diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..2b48c8b --- /dev/null +++ b/.npmignore @@ -0,0 +1,6 @@ +# package directories +node_modules +jspm_packages + +# Serverless directories +.serverless \ No newline at end of file diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..6a1fccf --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +6.9.0 \ No newline at end of file diff --git a/.serverless/cloudformation-template-create-stack.json b/.serverless/cloudformation-template-create-stack.json new file mode 100644 index 0000000..3be3c87 --- /dev/null +++ b/.serverless/cloudformation-template-create-stack.json @@ -0,0 +1,16 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "The AWS CloudFormation template for this Serverless application", + "Resources": { + "ServerlessDeploymentBucket": { + "Type": "AWS::S3::Bucket" + } + }, + "Outputs": { + "ServerlessDeploymentBucketName": { + "Value": { + "Ref": "ServerlessDeploymentBucket" + } + } + } +} \ No newline at end of file diff --git a/.serverless/cloudformation-template-update-stack.json b/.serverless/cloudformation-template-update-stack.json new file mode 100644 index 0000000..d29f2ab --- /dev/null +++ b/.serverless/cloudformation-template-update-stack.json @@ -0,0 +1,416 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Description": "The AWS CloudFormation template for this Serverless application", + "Resources": { + "ServerlessDeploymentBucket": { + "Type": "AWS::S3::Bucket" + }, + "IamRoleLambdaExecution": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Service": [ + "lambda.amazonaws.com" + ] + }, + "Action": [ + "sts:AssumeRole" + ] + } + ] + }, + "Path": "/" + } + }, + "IamPolicyLambdaExecution": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyName": "dev-lti-lambda-lambda", + "PolicyDocument": { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Resource": "arn:aws:logs:us-east-1:*:*" + } + ] + }, + "Roles": [ + { + "Ref": "IamRoleLambdaExecution" + } + ] + } + }, + "LtiLambdaFunction": { + "Type": "AWS::Lambda::Function", + "Properties": { + "Code": { + "S3Bucket": { + "Ref": "ServerlessDeploymentBucket" + }, + "S3Key": "serverless/lti-lambda/dev/1477516476677-2016-10-26T21:14:36.677Z/lti-lambda.zip" + }, + "FunctionName": "lti-lambda-dev-lti", + "Handler": "lti.handler", + "MemorySize": 1024, + "Role": { + "Fn::GetAtt": [ + "IamRoleLambdaExecution", + "Arn" + ] + }, + "Runtime": "nodejs4.3", + "Timeout": 10 + } + }, + "ApiGatewayRestApi": { + "Type": "AWS::ApiGateway::RestApi", + "Properties": { + "Name": "dev-lti-lambda" + } + }, + "ApiGatewayResourceLti": { + "Type": "AWS::ApiGateway::Resource", + "Properties": { + "ParentId": { + "Fn::GetAtt": [ + "ApiGatewayRestApi", + "RootResourceId" + ] + }, + "PathPart": "lti", + "RestApiId": { + "Ref": "ApiGatewayRestApi" + } + } + }, + "ApiGatewayMethodLtiPost": { + "Type": "AWS::ApiGateway::Method", + "Properties": { + "AuthorizationType": "NONE", + "HttpMethod": "POST", + "MethodResponses": [ + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 200 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 400 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 401 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 403 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 404 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 422 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 500 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 502 + }, + { + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Content-Type": true + }, + "ResponseModels": {}, + "StatusCode": 504 + } + ], + "RequestParameters": {}, + "Integration": { + "IntegrationHttpMethod": "POST", + "Type": "AWS", + "Uri": { + "Fn::Join": [ + "", + [ + "arn:aws:apigateway:", + { + "Ref": "AWS::Region" + }, + ":lambda:path/2015-03-31/functions/", + { + "Fn::GetAtt": [ + "LtiLambdaFunction", + "Arn" + ] + }, + "/invocations" + ] + ] + }, + "RequestTemplates": { + "application/json": "\n #define( $loop )\n {\n #foreach($key in $map.keySet())\n \"$util.escapeJavaScript($key)\":\n \"$util.escapeJavaScript($map.get($key))\"\n #if( $foreach.hasNext ) , #end\n #end\n }\n #end\n\n {\n \"body\": $input.json(\"$\"),\n \"method\": \"$context.httpMethod\",\n \"principalId\": \"$context.authorizer.principalId\",\n \"stage\": \"$context.stage\",\n\n #set( $map = $input.params().header )\n \"headers\": $loop,\n\n #set( $map = $input.params().querystring )\n \"query\": $loop,\n\n #set( $map = $input.params().path )\n \"path\": $loop,\n\n #set( $map = $context.identity )\n \"identity\": $loop,\n\n #set( $map = $stageVariables )\n \"stageVariables\": $loop\n }\n ", + "application/x-www-form-urlencoded": "\n #define( $body )\n {\n #foreach( $token in $input.path('$').split('&') )\n #set( $keyVal = $token.split('=') )\n #set( $keyValSize = $keyVal.size() )\n #if( $keyValSize >= 1 )\n #set( $key = $util.escapeJavaScript($util.urlDecode($keyVal[0])) )\n #if( $keyValSize >= 2 )\n #set( $val = $util.escapeJavaScript($util.urlDecode($keyVal[1])) )\n #else\n #set( $val = '' )\n #end\n \"$key\": \"$val\"#if($foreach.hasNext),#end\n #end\n #end\n }\n #end\n\n #define( $loop )\n {\n #foreach($key in $map.keySet())\n \"$util.escapeJavaScript($key)\":\n \"$util.escapeJavaScript($map.get($key))\"\n #if( $foreach.hasNext ) , #end\n #end\n }\n #end\n\n {\n \"body\": $body,\n \"method\": \"$context.httpMethod\",\n \"principalId\": \"$context.authorizer.principalId\",\n \"stage\": \"$context.stage\",\n\n #set( $map = $input.params().header )\n \"headers\": $loop,\n\n #set( $map = $input.params().querystring )\n \"query\": $loop,\n\n #set( $map = $input.params().path )\n \"path\": $loop,\n\n #set( $map = $context.identity )\n \"identity\": $loop,\n\n #set( $map = $stageVariables )\n \"stageVariables\": $loop\n }\n " + }, + "PassthroughBehavior": "NEVER", + "IntegrationResponses": [ + { + "StatusCode": 200, + "SelectionPattern": "", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 400, + "SelectionPattern": ".*\\[400\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 401, + "SelectionPattern": ".*\\[401\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 403, + "SelectionPattern": ".*\\[403\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 404, + "SelectionPattern": ".*\\[404\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 422, + "SelectionPattern": ".*\\[422\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 500, + "SelectionPattern": ".*(Process\\s?exited\\s?before\\s?completing\\s?request|\\[500\\]).*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 502, + "SelectionPattern": ".*\\[502\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + }, + { + "StatusCode": 504, + "SelectionPattern": ".*\\[504\\].*", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Content-Type": "'text/html'" + }, + "ResponseTemplates": { + "application/json": "$input.path('$').body" + } + } + ] + }, + "ResourceId": { + "Ref": "ApiGatewayResourceLti" + }, + "RestApiId": { + "Ref": "ApiGatewayRestApi" + } + } + }, + "ApiGatewayMethodLtiOptions": { + "Type": "AWS::ApiGateway::Method", + "Properties": { + "AuthorizationType": "NONE", + "HttpMethod": "OPTIONS", + "MethodResponses": [ + { + "ResponseModels": {}, + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": true, + "method.response.header.Access-Control-Allow-Headers": true, + "method.response.header.Access-Control-Allow-Methods": true + }, + "StatusCode": "200" + } + ], + "RequestParameters": {}, + "Integration": { + "Type": "MOCK", + "RequestTemplates": { + "application/json": "{statusCode:200}" + }, + "IntegrationResponses": [ + { + "StatusCode": "200", + "ResponseParameters": { + "method.response.header.Access-Control-Allow-Origin": "'*'", + "method.response.header.Access-Control-Allow-Headers": "'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token'", + "method.response.header.Access-Control-Allow-Methods": "'OPTIONS,POST'" + }, + "ResponseTemplates": { + "application/json": "" + } + } + ] + }, + "ResourceId": { + "Ref": "ApiGatewayResourceLti" + }, + "RestApiId": { + "Ref": "ApiGatewayRestApi" + } + } + }, + "ApiGatewayDeployment1477516476686": { + "Type": "AWS::ApiGateway::Deployment", + "Properties": { + "RestApiId": { + "Ref": "ApiGatewayRestApi" + }, + "StageName": "dev" + }, + "DependsOn": [ + "ApiGatewayMethodLtiPost" + ] + }, + "LtiLambdaPermissionApiGateway": { + "Type": "AWS::Lambda::Permission", + "Properties": { + "FunctionName": { + "Fn::GetAtt": [ + "LtiLambdaFunction", + "Arn" + ] + }, + "Action": "lambda:InvokeFunction", + "Principal": "apigateway.amazonaws.com" + } + } + }, + "Outputs": { + "ServerlessDeploymentBucketName": { + "Value": { + "Ref": "ServerlessDeploymentBucket" + } + }, + "LtiLambdaFunctionArn": { + "Description": "Lambda function info", + "Value": { + "Fn::GetAtt": [ + "LtiLambdaFunction", + "Arn" + ] + } + }, + "ServiceEndpoint": { + "Description": "URL of the service endpoint", + "Value": { + "Fn::Join": [ + "", + [ + "https://", + { + "Ref": "ApiGatewayRestApi" + }, + ".execute-api.us-east-1.amazonaws.com/dev" + ] + ] + } + } + } +} \ No newline at end of file diff --git a/.serverless/lti-lambda.zip b/.serverless/lti-lambda.zip new file mode 100644 index 0000000..6fedb7b Binary files /dev/null and b/.serverless/lti-lambda.zip differ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..59dc654 --- /dev/null +++ b/LICENSE @@ -0,0 +1,33 @@ +The MIT License (MIT) + +Copyright (c) 2016 Atomic Jolt, Inc. http://www.atomicjolt.com + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +==== + +All files located in the node_modules and external directories are +externally maintained libraries used by this software which have their +own licenses; we recommend you read them, as their terms may differ from +the terms above. diff --git a/README b/README new file mode 100644 index 0000000..9dc5a88 --- /dev/null +++ b/README @@ -0,0 +1,41 @@ +# LTI Lambda +----------------------- +This project demonstrates how to do a basic LTI launch using AWS Lambda. This uses the (Serverless framework)[https://serverless.com]. + +# Getting started +----------------------- +You will need an AWS account and credentials to run the function. + +## Follow the Serverless getting started documentation + +https://serverless.com/framework/docs/providers/aws/guide/intro/ + +## Deploy + + `serverless deploy` + +## LTI Setup +----------------------- +Add a new LTI tool to your LMS. Use any value for the lti key. The value for the lti secret is hard coded to 'secret'. + +Here's an example XML configuration that can be used to add a course navigation tool to Instructure Canvas. Be sure to replace the +urls with your own urls: + + + + Attendance + LTI Lambda + https://ngdjwzl0v6.execute-api.us-east-1.amazonaws.com/dev/lti + https://ngdjwzl0v6.execute-api.us-east-1.amazonaws.com/dev/images/oauth_icon.png + + + enabled + true + LTI Lambda + https://ngdjwzl0v6.execute-api.us-east-1.amazonaws.com/dev/lti + public + + https://ngdjwzl0v6.execute-api.us-east-1.amazonaws.com + public + + diff --git a/event.json b/event.json new file mode 100644 index 0000000..57fc6cc --- /dev/null +++ b/event.json @@ -0,0 +1,87 @@ +{ + "body": { + "oauth_consumer_key": "atomic-lti-demo", + "oauth_signature_method": "HMAC-SHA1", + "oauth_timestamp": "1472793890", + "oauth_nonce": "zOE0Ch5gn7RkWEeR4zzv0XByoKTOPCPGZ4TTDuGs4", + "oauth_version": "1.0", + "context_id": "a8a76fb8fbcc2d09787dafd28564e2ecdab51f11", + "context_label": "Grade 8 Math", + "context_title": "8th Grade Math - Ball", + "custom_canvas_api_domain": "atomicjolt.instructure.com", + "custom_canvas_course_id": "253", + "custom_canvas_enrollment_state": "active", + "custom_canvas_user_id": "1", + "custom_canvas_user_login_id": "justinball@gmail.com", + "ext_roles": "urn:lti:instrole:ims/lis/Administrator,urn:lti:instrole:ims/lis/Instructor,urn:lti:role:ims/lis/Instructor,urn:lti:sysrole:ims/lis/User", + "launch_presentation_document_target": "iframe", + "launch_presentation_height": "400", + "launch_presentation_locale": "en", + "launch_presentation_return_url": "https://atomicjolt.instructure.com/courses/253/external_content/success/external_tool_redirect", + "launch_presentation_width": "800", + "lis_course_offering_sourcedid": "MATH1010032016S", + "lis_person_contact_email_primary": "justinball@gmail.com", + "lis_person_name_family": "", + "lis_person_name_full": "justinball@gmail.com", + "lis_person_name_given": "justinball@gmail.com", + "lis_person_sourcedid": "1234", + "lti_message_type": "basic-lti-launch-request", + "lti_version": "LTI-1p0", + "oauth_callback": "about:blank", + "resource_link_id": "a8a76fb8fbcc2d09787dafd28564e2ecdab51f11", + "resource_link_title": "Atomic LTI", + "roles": "Instructor,urn:lti:instrole:ims/lis/Administrator", + "tool_consumer_info_product_family_code": "canvas", + "tool_consumer_info_version": "cloud", + "tool_consumer_instance_contact_email": "notifications@instructure.com", + "tool_consumer_instance_guid": "4MRcxnx6vQbFXxhLb8005m5WXFM2Z2i8lQwhJ1QT:canvas-lms", + "tool_consumer_instance_name": "Atomic Jolt", + "user_id": "0340cde37624c04979a6c3fdd0afc2479f8405ad", + "user_image": "https://secure.gravatar.com/avatar/bba664c75a2d85d1493a96dadbdedf1b?s=50&d=https%3A%2F%2Fatomicjolt.instructure.com%2Fimages%2Fmessages%2Favatar-50.png", + "oauth_signature": "yhW84qeoswxfs1VlbrDxyCRBoeQ=" + }, + "method": "POST", + "principalId": "", + "stage": "dev", + "resourcePath": "/lti", + "resourceId": "6y0epe", + "headers": { + "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", + "accept-encoding": "gzip, deflate, br", + "accept-language": "en-US,en;q=0.8", + "cache-control": "max-age=0", + "cloudfront-forwarded-proto": "https", + "cloudfront-is-desktop-viewer": "true", + "cloudfront-is-mobile-viewer": "false", + "cloudfront-is-smarttv-viewer": "false", + "cloudfront-is-tablet-viewer": "false", + "cloudfront-viewer-country": "US", + "content-type": "application/x-www-form-urlencoded", + "host": "8048qxq9dh.execute-api.us-east-1.amazonaws.com", + "origin": "https://atomicjolt.instructure.com", + "referer": "https://atomicjolt.instructure.com/courses/253/external_tools/948", + "upgrade-insecure-requests": "1", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", + "via": "1.1 296faebadd40feee8b2eb0e10d5786d2.cloudfront.net (CloudFront)", + "x-amz-cf-id": "hn7sEgJnGCLwlK5ckBSTabqM0EtWTLKxqlLrsXbNVxRyIg3vsjRb5A==", + "x-forwarded-for": "76.27.110.110, 54.239.203.101", + "x-forwarded-port": "443", + "x-forwarded-proto": "https" + }, + "query": {}, + "path": {}, + "identity": { + "cognitoidentitypoolid": "", + "accountid": "", + "cognitoidentityid": "", + "caller": "", + "apikey": "", + "sourceip": "76.27.110.110", + "cognitoauthenticationtype": "", + "cognitoauthenticationprovider": "", + "userarn": "", + "useragent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", + "user": "" + }, + "stageVariables": {} +} \ No newline at end of file diff --git a/handler.js b/handler.js new file mode 100644 index 0000000..92749f2 --- /dev/null +++ b/handler.js @@ -0,0 +1,30 @@ +"use strict"; + +import lti from 'ims-lti'; +import querystring from 'querystring'; + +const lti_shared_secret = "secret"; + +module.exports.handler = function(event, context, callback) { + + const protocol = event.headers['X-Forwarded-Proto']; + + // Fix up the event to look like a node request for the ims-lti library + event.headers.host = event.headers.Host + event.url = `${protocol}://${event.headers.Host}/${event.stage}/lti`; // Note that we hard code the path of 'lti' here. This must match the 'path' set in serverless.yml. + event.protocol = protocol; + + // Validate the lti launch + const provider = new lti.Provider(event.body.oauth_consumer_key, lti_shared_secret); + + provider.valid_request(event, (error, isValid) => { + + const body = isValid ? + `

Hello ${event.body.lis_person_name_full}

` : + `

Error: Failed LTI Launch

${error}

`; + + callback(null, { body: body }); + + }); + +}; \ No newline at end of file diff --git a/node_modules/.bin/webpack b/node_modules/.bin/webpack new file mode 120000 index 0000000..d462c1d --- /dev/null +++ b/node_modules/.bin/webpack @@ -0,0 +1 @@ +../webpack/bin/webpack.js \ No newline at end of file diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity new file mode 100644 index 0000000..87e3e63 --- /dev/null +++ b/node_modules/.yarn-integrity @@ -0,0 +1 @@ +65a625794086bc7c03fc335d1643128fd1f237d3d2d128d5b3a0a0ad0a710fd1 \ No newline at end of file diff --git a/node_modules/Base64/.npmignore b/node_modules/Base64/.npmignore new file mode 100644 index 0000000..cba87a3 --- /dev/null +++ b/node_modules/Base64/.npmignore @@ -0,0 +1,2 @@ +/coverage/ +/node_modules/ diff --git a/node_modules/Base64/.travis.yml b/node_modules/Base64/.travis.yml new file mode 100644 index 0000000..fbde051 --- /dev/null +++ b/node_modules/Base64/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.11" +install: make setup +script: make test diff --git a/node_modules/Base64/LICENSE b/node_modules/Base64/LICENSE new file mode 100644 index 0000000..4832767 --- /dev/null +++ b/node_modules/Base64/LICENSE @@ -0,0 +1,14 @@ + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + + Copyright (c) 2011..2012 David Chambers + + Everyone is permitted to copy and distribute verbatim or modified + copies of this license document, and changing it is allowed as long + as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. You just DO WHAT THE FUCK YOU WANT TO. diff --git a/node_modules/Base64/Makefile b/node_modules/Base64/Makefile new file mode 100644 index 0000000..5c475df --- /dev/null +++ b/node_modules/Base64/Makefile @@ -0,0 +1,42 @@ +ISTANBUL = node_modules/.bin/istanbul +UGLIFYJS = node_modules/.bin/uglifyjs +XYZ = node_modules/.bin/xyz --message X.Y.Z --tag X.Y.Z + +SRC = base64.js +MIN = $(patsubst %.js,%.min.js,$(SRC)) + + +.PHONY: all +all: $(MIN) + +%.min.js: %.js + $(UGLIFYJS) $< --compress --mangle > $@ + + +.PHONY: bytes +bytes: base64.min.js + gzip --best --stdout $< | wc -c | tr -d ' ' + + +.PHONY: clean +clean: + rm -f -- $(MIN) + + +.PHONY: release-major release-minor release-patch +release-major: + $(XYZ) --increment major +release-minor: + $(XYZ) --increment minor +release-patch: + $(XYZ) --increment patch + + +.PHONY: setup +setup: + npm install + + +.PHONY: test +test: + $(ISTANBUL) cover node_modules/.bin/_mocha -- --compilers coffee:coffee-script/register diff --git a/node_modules/Base64/README.md b/node_modules/Base64/README.md new file mode 100644 index 0000000..e5dafed --- /dev/null +++ b/node_modules/Base64/README.md @@ -0,0 +1,34 @@ +# Base64.js + +≈ 500 byte* polyfill for browsers which don't provide [`window.btoa`][1] and +[`window.atob`][2]. + +Although the script does no harm in browsers which do provide these functions, +a conditional script loader such as [yepnope][3] can prevent unnecessary HTTP +requests. + +```javascript +yepnope({ + test: window.btoa && window.atob, + nope: 'base64.js', + callback: function () { + // `btoa` and `atob` are now safe to use + } +}) +``` + +Base64.js stems from a [gist][4] by [yahiko][5]. + +### Running the test suite + + make setup + make test + +\* Minified and gzipped. Run `make bytes` to verify. + + +[1]: https://developer.mozilla.org/en/DOM/window.btoa +[2]: https://developer.mozilla.org/en/DOM/window.atob +[3]: http://yepnopejs.com/ +[4]: https://gist.github.com/229984 +[5]: https://github.com/yahiko diff --git a/node_modules/Base64/base64.js b/node_modules/Base64/base64.js new file mode 100644 index 0000000..15d3a8a --- /dev/null +++ b/node_modules/Base64/base64.js @@ -0,0 +1,60 @@ +;(function () { + + var object = typeof exports != 'undefined' ? exports : this; // #8: web workers + var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/='; + + function InvalidCharacterError(message) { + this.message = message; + } + InvalidCharacterError.prototype = new Error; + InvalidCharacterError.prototype.name = 'InvalidCharacterError'; + + // encoder + // [https://gist.github.com/999166] by [https://github.com/nignag] + object.btoa || ( + object.btoa = function (input) { + for ( + // initialize result and counter + var block, charCode, idx = 0, map = chars, output = ''; + // if the next input index does not exist: + // change the mapping table to "=" + // check if d has no fractional digits + input.charAt(idx | 0) || (map = '=', idx % 1); + // "8 - idx % 1 * 8" generates the sequence 2, 4, 6, 8 + output += map.charAt(63 & block >> 8 - idx % 1 * 8) + ) { + charCode = input.charCodeAt(idx += 3/4); + if (charCode > 0xFF) { + throw new InvalidCharacterError("'btoa' failed: The string to be encoded contains characters outside of the Latin1 range."); + } + block = block << 8 | charCode; + } + return output; + }); + + // decoder + // [https://gist.github.com/1020396] by [https://github.com/atk] + object.atob || ( + object.atob = function (input) { + input = input.replace(/=+$/, ''); + if (input.length % 4 == 1) { + throw new InvalidCharacterError("'atob' failed: The string to be decoded is not correctly encoded."); + } + for ( + // initialize result and counters + var bc = 0, bs, buffer, idx = 0, output = ''; + // get next character + buffer = input.charAt(idx++); + // character found in table? initialize bit storage and add its ascii value; + ~buffer && (bs = bc % 4 ? bs * 64 + buffer : buffer, + // and if not first of each 4 characters, + // convert the first 8 bits to one ascii character + bc++ % 4) ? output += String.fromCharCode(255 & bs >> (-2 * bc & 6)) : 0 + ) { + // try to find character in table (0-63, not found => -1) + buffer = chars.indexOf(buffer); + } + return output; + }); + +}()); diff --git a/node_modules/Base64/base64.min.js b/node_modules/Base64/base64.min.js new file mode 100644 index 0000000..33c7aa4 --- /dev/null +++ b/node_modules/Base64/base64.min.js @@ -0,0 +1 @@ +!function(){function t(t){this.message=t}var e="undefined"!=typeof exports?exports:this,r="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";t.prototype=new Error,t.prototype.name="InvalidCharacterError",e.btoa||(e.btoa=function(e){for(var o,n,a=0,i=r,c="";e.charAt(0|a)||(i="=",a%1);c+=i.charAt(63&o>>8-a%1*8)){if(n=e.charCodeAt(a+=.75),n>255)throw new t("'btoa' failed: The string to be encoded contains characters outside of the Latin1 range.");o=o<<8|n}return c}),e.atob||(e.atob=function(e){if(e=e.replace(/=+$/,""),e.length%4==1)throw new t("'atob' failed: The string to be decoded is not correctly encoded.");for(var o,n,a=0,i=0,c="";n=e.charAt(i++);~n&&(o=a%4?64*o+n:n,a++%4)?c+=String.fromCharCode(255&o>>(-2*a&6)):0)n=r.indexOf(n);return c})}(); \ No newline at end of file diff --git a/node_modules/Base64/package.json b/node_modules/Base64/package.json new file mode 100644 index 0000000..e453567 --- /dev/null +++ b/node_modules/Base64/package.json @@ -0,0 +1,24 @@ +{ + "name": "Base64", + "version": "0.2.1", + "description": "Base64 encoding and decoding", + "author": "David Chambers ", + "main": "./base64.js", + "licenses": [ + { + "type": "WTFPL", + "url": "https://raw.github.com/davidchambers/Base64.js/master/LICENSE" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/davidchambers/Base64.js.git" + }, + "devDependencies": { + "coffee-script": "1.7.x", + "istanbul": "0.2.x", + "mocha": "1.18.x", + "uglify-js": "2.4.x", + "xyz": "0.1.x" + } +} diff --git a/node_modules/Base64/test/base64.coffee b/node_modules/Base64/test/base64.coffee new file mode 100644 index 0000000..e25cf5c --- /dev/null +++ b/node_modules/Base64/test/base64.coffee @@ -0,0 +1,52 @@ +assert = require 'assert' + +{btoa, atob} = require '..' + + +describe 'Base64.js', -> + + it 'can encode ASCII input', -> + assert.strictEqual btoa(''), '' + assert.strictEqual btoa('f'), 'Zg==' + assert.strictEqual btoa('fo'), 'Zm8=' + assert.strictEqual btoa('foo'), 'Zm9v' + assert.strictEqual btoa('quux'), 'cXV1eA==' + assert.strictEqual btoa('!"#$%'), 'ISIjJCU=' + assert.strictEqual btoa("&'()*+"), 'JicoKSor' + assert.strictEqual btoa(',-./012'), 'LC0uLzAxMg==' + assert.strictEqual btoa('3456789:'), 'MzQ1Njc4OTo=' + assert.strictEqual btoa(';<=>?@ABC'), 'Ozw9Pj9AQUJD' + assert.strictEqual btoa('DEFGHIJKLM'), 'REVGR0hJSktMTQ==' + assert.strictEqual btoa('NOPQRSTUVWX'), 'Tk9QUVJTVFVWV1g=' + assert.strictEqual btoa('YZ[\\]^_`abc'), 'WVpbXF1eX2BhYmM=' + assert.strictEqual btoa('defghijklmnop'), 'ZGVmZ2hpamtsbW5vcA==' + assert.strictEqual btoa('qrstuvwxyz{|}~'), 'cXJzdHV2d3h5ent8fX4=' + + it 'cannot encode non-ASCII input', -> + assert.throws (-> btoa '✈'), (err) -> + err instanceof Error and + err.name is 'InvalidCharacterError' and + err.message is "'btoa' failed: The string to be encoded contains characters outside of the Latin1 range." + + it 'can decode Base64-encoded input', -> + assert.strictEqual atob(''), '' + assert.strictEqual atob('Zg=='), 'f' + assert.strictEqual atob('Zm8='), 'fo' + assert.strictEqual atob('Zm9v'), 'foo' + assert.strictEqual atob('cXV1eA=='), 'quux' + assert.strictEqual atob('ISIjJCU='), '!"#$%' + assert.strictEqual atob('JicoKSor'), "&'()*+" + assert.strictEqual atob('LC0uLzAxMg=='), ',-./012' + assert.strictEqual atob('MzQ1Njc4OTo='), '3456789:' + assert.strictEqual atob('Ozw9Pj9AQUJD'), ';<=>?@ABC' + assert.strictEqual atob('REVGR0hJSktMTQ=='), 'DEFGHIJKLM' + assert.strictEqual atob('Tk9QUVJTVFVWV1g='), 'NOPQRSTUVWX' + assert.strictEqual atob('WVpbXF1eX2BhYmM='), 'YZ[\\]^_`abc' + assert.strictEqual atob('ZGVmZ2hpamtsbW5vcA=='), 'defghijklmnop' + assert.strictEqual atob('cXJzdHV2d3h5ent8fX4='), 'qrstuvwxyz{|}~' + + it 'cannot decode invalid input', -> + assert.throws (-> atob 'a'), (err) -> + err instanceof Error and + err.name is 'InvalidCharacterError' and + err.message is "'atob' failed: The string to be decoded is not correctly encoded." diff --git a/node_modules/abbrev/LICENSE b/node_modules/abbrev/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/abbrev/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md new file mode 100644 index 0000000..99746fe --- /dev/null +++ b/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/abbrev/abbrev.js b/node_modules/abbrev/abbrev.js new file mode 100644 index 0000000..69cfeac --- /dev/null +++ b/node_modules/abbrev/abbrev.js @@ -0,0 +1,62 @@ + +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json new file mode 100644 index 0000000..d794c03 --- /dev/null +++ b/node_modules/abbrev/package.json @@ -0,0 +1,18 @@ +{ + "name": "abbrev", + "version": "1.0.9", + "description": "Like ruby's abbrev module, but in js", + "author": "Isaac Z. Schlueter ", + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --cov" + }, + "repository": "http://github.com/isaacs/abbrev-js", + "license": "ISC", + "devDependencies": { + "tap": "^5.7.2" + }, + "files": [ + "abbrev.js" + ] +} diff --git a/node_modules/accepts/HISTORY.md b/node_modules/accepts/HISTORY.md new file mode 100644 index 0000000..0477ed7 --- /dev/null +++ b/node_modules/accepts/HISTORY.md @@ -0,0 +1,212 @@ +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/node_modules/accepts/LICENSE b/node_modules/accepts/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/accepts/README.md b/node_modules/accepts/README.md new file mode 100644 index 0000000..ae36676 --- /dev/null +++ b/node_modules/accepts/README.md @@ -0,0 +1,135 @@ +# accepts + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +```sh +npm install accepts +``` + +## API + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app(req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch(accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/accepts.svg +[npm-url]: https://npmjs.org/package/accepts +[node-version-image]: https://img.shields.io/node/v/accepts.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/accepts/master.svg +[travis-url]: https://travis-ci.org/jshttp/accepts +[coveralls-image]: https://img.shields.io/coveralls/jshttp/accepts/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/accepts +[downloads-image]: https://img.shields.io/npm/dm/accepts.svg +[downloads-url]: https://npmjs.org/package/accepts diff --git a/node_modules/accepts/index.js b/node_modules/accepts/index.js new file mode 100644 index 0000000..e80192a --- /dev/null +++ b/node_modules/accepts/index.js @@ -0,0 +1,231 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts(req) { + if (!(this instanceof Accepts)) + return new Accepts(req) + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + if (!this.headers.accept) return types[0]; + var mimes = types.map(extToMime); + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)); + var first = accepts[0]; + if (!first) return false; + return types[mimes.indexOf(first)]; +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime(type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime(type) { + return typeof type === 'string'; +} diff --git a/node_modules/accepts/package.json b/node_modules/accepts/package.json new file mode 100644 index 0000000..ed422bd --- /dev/null +++ b/node_modules/accepts/package.json @@ -0,0 +1,38 @@ +{ + "name": "accepts", + "description": "Higher-level content negotiation", + "version": "1.3.3", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/accepts", + "dependencies": { + "mime-types": "~2.1.11", + "negotiator": "0.6.1" + }, + "devDependencies": { + "istanbul": "0.4.3", + "mocha": "~1.21.5" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ] +} diff --git a/node_modules/acorn/.editorconfig b/node_modules/acorn/.editorconfig new file mode 100644 index 0000000..c14d5c6 --- /dev/null +++ b/node_modules/acorn/.editorconfig @@ -0,0 +1,7 @@ +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +insert_final_newline = true diff --git a/node_modules/acorn/.gitattributes b/node_modules/acorn/.gitattributes new file mode 100644 index 0000000..fcadb2c --- /dev/null +++ b/node_modules/acorn/.gitattributes @@ -0,0 +1 @@ +* text eol=lf diff --git a/node_modules/acorn/.npmignore b/node_modules/acorn/.npmignore new file mode 100644 index 0000000..ecba291 --- /dev/null +++ b/node_modules/acorn/.npmignore @@ -0,0 +1,3 @@ +/.tern-port +/test +/local diff --git a/node_modules/acorn/.tern-project b/node_modules/acorn/.tern-project new file mode 100644 index 0000000..6718ce0 --- /dev/null +++ b/node_modules/acorn/.tern-project @@ -0,0 +1,6 @@ +{ + "plugins": { + "node": true, + "es_modules": true + } +} \ No newline at end of file diff --git a/node_modules/acorn/.travis.yml b/node_modules/acorn/.travis.yml new file mode 100644 index 0000000..d9ee88b --- /dev/null +++ b/node_modules/acorn/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +sudo: false +node_js: + - '0.12' + - '4' + - '5' + - '6' diff --git a/node_modules/acorn/AUTHORS b/node_modules/acorn/AUTHORS new file mode 100644 index 0000000..1b2061c --- /dev/null +++ b/node_modules/acorn/AUTHORS @@ -0,0 +1,59 @@ +List of Acorn contributors. Updated before every release. + +Adrian Rakovsky +Alistair Braidwood +Amila Welihinda +Andres Suarez +Angelo +Aparajita Fishman +Arian Stolwijk +Artem Govorov +Brandon Mills +Charles Hughes +Conrad Irwin +Daniel Tschinder +David Bonnet +Domenico Matteo +ForbesLindesay +Forbes Lindesay +Gilad Peleg +impinball +Ingvar Stepanyan +Jackson Ray Hamilton +Jesse McCarthy +Jiaxing Wang +Joel Kemp +Johannes Herr +Jordan Klassen +Jürg Lehni +keeyipchan +Keheliya Gallaba +Kevin Irish +Kevin Kwok +krator +Marijn Haverbeke +Martin Carlberg +Mathias Bynens +Mathieu 'p01' Henri +Matthew Bastien +Max Schaefer +Max Zerzouri +Mihai Bazon +Mike Rennie +Nicholas C. Zakas +Nick Fitzgerald +Olivier Thomann +Oskar Schöldström +Paul Harper +Peter Rust +PlNG +Prayag Verma +ReadmeCritic +r-e-d +Richard Gibson +Rich Harris +Rich-Harris +Sebastian McKenzie +Timothy Gu +Toru Nagashima +zsjforcn diff --git a/node_modules/acorn/CHANGELOG.md b/node_modules/acorn/CHANGELOG.md new file mode 100644 index 0000000..16b8212 --- /dev/null +++ b/node_modules/acorn/CHANGELOG.md @@ -0,0 +1,159 @@ +## 3.3.0 (2016-07-25) + +### Bug fixes + +Fix bug in tokenizing of regexp operator after a function declaration. + +Fix parser crash when parsing an array pattern with a hole. + +### New features + +Implement check against complex argument lists in functions that +enable strict mode in ES7. + +## 3.2.0 (2016-06-07) + +### Bug fixes + +Improve handling of lack of unicode regexp support in host +environment. + +Properly reject shorthand properties whose name is a keyword. + +Don't crash when the loose parser is called without options object. + +### New features + +Visitors created with `visit.make` now have their base as _prototype_, +rather than copying properties into a fresh object. + +Make it possible to use `visit.ancestor` with a walk state. + +## 3.1.0 (2016-04-18) + +### Bug fixes + +Fix issue where the loose parser created invalid TemplateElement nodes +for unclosed template literals. + +Properly tokenize the division operator directly after a function +expression. + +Allow trailing comma in destructuring arrays. + +### New features + +The walker now allows defining handlers for `CatchClause` nodes. + +## 3.0.4 (2016-02-25) + +### Fixes + +Allow update expressions as left-hand-side of the ES7 exponential +operator. + +## 3.0.2 (2016-02-10) + +### Fixes + +Fix bug that accidentally made `undefined` a reserved word when +parsing ES7. + +## 3.0.0 (2016-02-10) + +### Breaking changes + +The default value of the `ecmaVersion` option is now 6 (used to be 5). + +Support for comprehension syntax (which was dropped from the draft +spec) has been removed. + +### Fixes + +`let` and `yield` are now “contextual keywords”, meaning you can +mostly use them as identifiers in ES5 non-strict code. + +A parenthesized class or function expression after `export default` is +now parsed correctly. + +### New features + +When `ecmaVersion` is set to 7, Acorn will parse the exponentiation +operator (`**`). + +The identifier character ranges are now based on Unicode 8.0.0. + +Plugins can now override the `raiseRecoverable` method to override the +way non-critical errors are handled. + +## 2.7.0 (2016-01-04) + +### Fixes + +Stop allowing rest parameters in setters. + +Make sure the loose parser always attaches a `local` property to +`ImportNamespaceSpecifier` nodes. + +Disallow `y` rexexp flag in ES5. + +Disallow `\00` and `\000` escapes in strict mode. + +Raise an error when an import name is a reserved word. + +## 2.6.4 (2015-11-12) + +### Fixes + +Fix crash in loose parser when parsing invalid object pattern. + +### New features + +Support plugins in the loose parser. + +## 2.6.2 (2015-11-10) + +### Fixes + +Don't crash when no options object is passed. + +## 2.6.0 (2015-11-09) + +### Fixes + +Add `await` as a reserved word in module sources. + +Disallow `yield` in a parameter default value for a generator. + +Forbid using a comma after a rest pattern in an array destructuring. + +### New features + +Support parsing stdin in command-line tool. + +## 2.5.2 (2015-10-27) + +### Fixes + +Fix bug where the walker walked an exported `let` statement as an +expression. + +## 2.5.0 (2015-10-27) + +### Fixes + +Fix tokenizer support in the command-line tool. + +In the loose parser, don't allow non-string-literals as import +sources. + +Stop allowing `new.target` outside of functions. + +Remove legacy `guard` and `guardedHandler` properties from try nodes. + +Stop allowing multiple `__proto__` properties on an object literal in +strict mode. + +Don't allow rest parameters to be non-identifier patterns. + +Check for duplicate paramter names in arrow functions. diff --git a/node_modules/acorn/LICENSE b/node_modules/acorn/LICENSE new file mode 100644 index 0000000..a35ebf4 --- /dev/null +++ b/node_modules/acorn/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2012-2016 by various contributors (see AUTHORS) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/acorn/README.md b/node_modules/acorn/README.md new file mode 100644 index 0000000..0c514d5 --- /dev/null +++ b/node_modules/acorn/README.md @@ -0,0 +1,407 @@ +# Acorn + +[![Build Status](https://travis-ci.org/ternjs/acorn.svg?branch=master)](https://travis-ci.org/ternjs/acorn) +[![NPM version](https://img.shields.io/npm/v/acorn.svg)](https://www.npmjs.com/package/acorn) +[Author funding status: ![maintainer happiness](https://marijnhaverbeke.nl/fund/status_s.png?force)](https://marijnhaverbeke.nl/fund/) + +A tiny, fast JavaScript parser, written completely in JavaScript. + +## Community + +Acorn is open source software released under an +[MIT license](https://github.com/ternjs/acorn/blob/master/LICENSE). + +You are welcome to +[report bugs](https://github.com/ternjs/acorn/issues) or create pull +requests on [github](https://github.com/ternjs/acorn). For questions +and discussion, please use the +[Tern discussion forum](https://discuss.ternjs.net). + +## Installation + +The easiest way to install acorn is with [`npm`][npm]. + +[npm]: https://www.npmjs.com/ + +```sh +npm install acorn +``` + +Alternately, download the source. + +```sh +git clone https://github.com/ternjs/acorn.git +``` + +## Components + +When run in a CommonJS (node.js) or AMD environment, exported values +appear in the interfaces exposed by the individual files, as usual. +When loaded in the browser (Acorn works in any JS-enabled browser more +recent than IE5) without any kind of module management, a single +global object `acorn` will be defined, and all the exported properties +will be added to that. + +### Main parser + +This is implemented in `dist/acorn.js`, and is what you get when you +`require("acorn")` in node.js. + +**parse**`(input, options)` is used to parse a JavaScript program. +The `input` parameter is a string, `options` can be undefined or an +object setting some of the options listed below. The return value will +be an abstract syntax tree object as specified by the +[ESTree spec][estree]. + +When encountering a syntax error, the parser will raise a +`SyntaxError` object with a meaningful message. The error object will +have a `pos` property that indicates the character offset at which the +error occurred, and a `loc` object that contains a `{line, column}` +object referring to that same position. + +[estree]: https://github.com/estree/estree + +- **ecmaVersion**: Indicates the ECMAScript version to parse. Must be + either 3, 5, 6, or 7. This influences support for strict mode, the set + of reserved words, and support for new syntax features. Default is 6. + + **NOTE**: Only 'stage 4' (finalized) ECMAScript 7 features are being + implemented by Acorn. That means that most of the draft standard is + not yet being parsed. + +- **sourceType**: Indicate the mode the code should be parsed in. Can be + either `"script"` or `"module"`. + +- **onInsertedSemicolon**: If given a callback, that callback will be + called whenever a missing semicolon is inserted by the parser. The + callback will be given the character offset of the point where the + semicolon is inserted as argument, and if `locations` is on, also a + `{line, column}` object representing this position. + +- **onTrailingComma**: Like `onInsertedSemicolon`, but for trailing + commas. + +- **allowReserved**: If `false`, using a reserved word will generate + an error. Defaults to `true` for `ecmaVersion` 3, `false` for higher + versions. When given the value `"never"`, reserved words and + keywords can also not be used as property names (as in Internet + Explorer's old parser). + +- **allowReturnOutsideFunction**: By default, a return statement at + the top level raises an error. Set this to `true` to accept such + code. + +- **allowImportExportEverywhere**: By default, `import` and `export` + declarations can only appear at a program's top level. Setting this + option to `true` allows them anywhere where a statement is allowed. + +- **allowHashBang**: When this is enabled (off by default), if the + code starts with the characters `#!` (as in a shellscript), the + first line will be treated as a comment. + +- **locations**: When `true`, each node has a `loc` object attached + with `start` and `end` subobjects, each of which contains the + one-based line and zero-based column numbers in `{line, column}` + form. Default is `false`. + +- **onToken**: If a function is passed for this option, each found + token will be passed in same format as tokens returned from + `tokenizer().getToken()`. + + If array is passed, each found token is pushed to it. + + Note that you are not allowed to call the parser from the + callback—that will corrupt its internal state. + +- **onComment**: If a function is passed for this option, whenever a + comment is encountered the function will be called with the + following parameters: + + - `block`: `true` if the comment is a block comment, false if it + is a line comment. + - `text`: The content of the comment. + - `start`: Character offset of the start of the comment. + - `end`: Character offset of the end of the comment. + + When the `locations` options is on, the `{line, column}` locations + of the comment’s start and end are passed as two additional + parameters. + + If array is passed for this option, each found comment is pushed + to it as object in Esprima format: + + ```javascript + { + "type": "Line" | "Block", + "value": "comment text", + "start": Number, + "end": Number, + // If `locations` option is on: + "loc": { + "start": {line: Number, column: Number} + "end": {line: Number, column: Number} + }, + // If `ranges` option is on: + "range": [Number, Number] + } + ``` + + Note that you are not allowed to call the parser from the + callback—that will corrupt its internal state. + +- **ranges**: Nodes have their start and end characters offsets + recorded in `start` and `end` properties (directly on the node, + rather than the `loc` object, which holds line/column data. To also + add a [semi-standardized][range] `range` property holding a + `[start, end]` array with the same numbers, set the `ranges` option + to `true`. + +- **program**: It is possible to parse multiple files into a single + AST by passing the tree produced by parsing the first file as the + `program` option in subsequent parses. This will add the toplevel + forms of the parsed file to the "Program" (top) node of an existing + parse tree. + +- **sourceFile**: When the `locations` option is `true`, you can pass + this option to add a `source` attribute in every node’s `loc` + object. Note that the contents of this option are not examined or + processed in any way; you are free to use whatever format you + choose. + +- **directSourceFile**: Like `sourceFile`, but a `sourceFile` property + will be added (regardless of the `location` option) directly to the + nodes, rather than the `loc` object. + +- **preserveParens**: If this option is `true`, parenthesized expressions + are represented by (non-standard) `ParenthesizedExpression` nodes + that have a single `expression` property containing the expression + inside parentheses. + +[range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678 + +**parseExpressionAt**`(input, offset, options)` will parse a single +expression in a string, and return its AST. It will not complain if +there is more of the string left after the expression. + +**getLineInfo**`(input, offset)` can be used to get a `{line, +column}` object for a given program string and character offset. + +**tokenizer**`(input, options)` returns an object with a `getToken` +method that can be called repeatedly to get the next token, a `{start, +end, type, value}` object (with added `loc` property when the +`locations` option is enabled and `range` property when the `ranges` +option is enabled). When the token's type is `tokTypes.eof`, you +should stop calling the method, since it will keep returning that same +token forever. + +In ES6 environment, returned result can be used as any other +protocol-compliant iterable: + +```javascript +for (let token of acorn.tokenizer(str)) { + // iterate over the tokens +} + +// transform code to array of tokens: +var tokens = [...acorn.tokenizer(str)]; +``` + +**tokTypes** holds an object mapping names to the token type objects +that end up in the `type` properties of tokens. + +#### Note on using with [Escodegen][escodegen] + +Escodegen supports generating comments from AST, attached in +Esprima-specific format. In order to simulate same format in +Acorn, consider following example: + +```javascript +var comments = [], tokens = []; + +var ast = acorn.parse('var x = 42; // answer', { + // collect ranges for each node + ranges: true, + // collect comments in Esprima's format + onComment: comments, + // collect token ranges + onToken: tokens +}); + +// attach comments using collected information +escodegen.attachComments(ast, comments, tokens); + +// generate code +console.log(escodegen.generate(ast, {comment: true})); +// > 'var x = 42; // answer' +``` + +[escodegen]: https://github.com/estools/escodegen + +### dist/acorn_loose.js ### + +This file implements an error-tolerant parser. It exposes a single +function. The loose parser is accessible in node.js via `require("acorn/dist/acorn_loose")`. + +**parse_dammit**`(input, options)` takes the same arguments and +returns the same syntax tree as the `parse` function in `acorn.js`, +but never raises an error, and will do its best to parse syntactically +invalid code in as meaningful a way as it can. It'll insert identifier +nodes with name `"✖"` as placeholders in places where it can't make +sense of the input. Depends on `acorn.js`, because it uses the same +tokenizer. + +### dist/walk.js ### + +Implements an abstract syntax tree walker. Will store its interface in +`acorn.walk` when loaded without a module system. + +**simple**`(node, visitors, base, state)` does a 'simple' walk over +a tree. `node` should be the AST node to walk, and `visitors` an +object with properties whose names correspond to node types in the +[ESTree spec][estree]. The properties should contain functions +that will be called with the node object and, if applicable the state +at that point. The last two arguments are optional. `base` is a walker +algorithm, and `state` is a start state. The default walker will +simply visit all statements and expressions and not produce a +meaningful state. (An example of a use of state is to track scope at +each point in the tree.) + +**ancestor**`(node, visitors, base, state)` does a 'simple' walk over +a tree, building up an array of ancestor nodes (including the current node) +and passing the array to the callbacks as a third parameter. + +**recursive**`(node, state, functions, base)` does a 'recursive' +walk, where the walker functions are responsible for continuing the +walk on the child nodes of their target node. `state` is the start +state, and `functions` should contain an object that maps node types +to walker functions. Such functions are called with `(node, state, c)` +arguments, and can cause the walk to continue on a sub-node by calling +the `c` argument on it with `(node, state)` arguments. The optional +`base` argument provides the fallback walker functions for node types +that aren't handled in the `functions` object. If not given, the +default walkers will be used. + +**make**`(functions, base)` builds a new walker object by using the +walker functions in `functions` and filling in the missing ones by +taking defaults from `base`. + +**findNodeAt**`(node, start, end, test, base, state)` tries to +locate a node in a tree at the given start and/or end offsets, which +satisfies the predicate `test`. `start` and `end` can be either `null` +(as wildcard) or a number. `test` may be a string (indicating a node +type) or a function that takes `(nodeType, node)` arguments and +returns a boolean indicating whether this node is interesting. `base` +and `state` are optional, and can be used to specify a custom walker. +Nodes are tested from inner to outer, so if two nodes match the +boundaries, the inner one will be preferred. + +**findNodeAround**`(node, pos, test, base, state)` is a lot like +`findNodeAt`, but will match any node that exists 'around' (spanning) +the given position. + +**findNodeAfter**`(node, pos, test, base, state)` is similar to +`findNodeAround`, but will match all nodes *after* the given position +(testing outer nodes before inner nodes). + +## Command line interface + +The `bin/acorn` utility can be used to parse a file from the command +line. It accepts as arguments its input file and the following +options: + +- `--ecma3|--ecma5|--ecma6|--ecma7`: Sets the ECMAScript version to parse. Default is + version 5. + +- `--module`: Sets the parsing mode to `"module"`. Is set to `"script"` otherwise. + +- `--locations`: Attaches a "loc" object to each node with "start" and + "end" subobjects, each of which contains the one-based line and + zero-based column numbers in `{line, column}` form. + +- `--allow-hash-bang`: If the code starts with the characters #! (as in a shellscript), the first line will be treated as a comment. + +- `--compact`: No whitespace is used in the AST output. + +- `--silent`: Do not output the AST, just return the exit status. + +- `--help`: Print the usage information and quit. + +The utility spits out the syntax tree as JSON data. + +## Build system + +Acorn is written in ECMAScript 6, as a set of small modules, in the +project's `src` directory, and compiled down to bigger ECMAScript 3 +files in `dist` using [Browserify](http://browserify.org) and +[Babel](http://babeljs.io/). If you are already using Babel, you can +consider including the modules directly. + +The command-line test runner (`npm test`) uses the ES6 modules. The +browser-based test page (`test/index.html`) uses the compiled modules. +The `bin/build-acorn.js` script builds the latter from the former. + +If you are working on Acorn, you'll probably want to try the code out +directly, without an intermediate build step. In your scripts, you can +register the Babel require shim like this: + + require("babel-core/register") + +That will allow you to directly `require` the ES6 modules. + +## Plugins + +Acorn is designed support allow plugins which, within reasonable +bounds, redefine the way the parser works. Plugins can add new token +types and new tokenizer contexts (if necessary), and extend methods in +the parser object. This is not a clean, elegant API—using it requires +an understanding of Acorn's internals, and plugins are likely to break +whenever those internals are significantly changed. But still, it is +_possible_, in this way, to create parsers for JavaScript dialects +without forking all of Acorn. And in principle it is even possible to +combine such plugins, so that if you have, for example, a plugin for +parsing types and a plugin for parsing JSX-style XML literals, you +could load them both and parse code with both JSX tags and types. + +A plugin should register itself by adding a property to +`acorn.plugins`, which holds a function. Calling `acorn.parse`, a +`plugins` option can be passed, holding an object mapping plugin names +to configuration values (or just `true` for plugins that don't take +options). After the parser object has been created, the initialization +functions for the chosen plugins are called with `(parser, +configValue)` arguments. They are expected to use the `parser.extend` +method to extend parser methods. For example, the `readToken` method +could be extended like this: + +```javascript +parser.extend("readToken", function(nextMethod) { + return function(code) { + console.log("Reading a token!") + return nextMethod.call(this, code) + } +}) +``` + +The `nextMethod` argument passed to `extend`'s second argument is the +previous value of this method, and should usually be called through to +whenever the extended method does not handle the call itself. + +Similarly, the loose parser allows plugins to register themselves via +`acorn.pluginsLoose`. The extension mechanism is the same as for the +normal parser: + +```javascript +looseParser.extend("readToken", function(nextMethod) { + return function() { + console.log("Reading a token in the loose parser!") + return nextMethod.call(this) + } +}) +``` + +### Existing plugins + + - [`acorn-jsx`](https://github.com/RReverser/acorn-jsx): Parse [Facebook JSX syntax extensions](https://github.com/facebook/jsx) + - [`acorn-es7-plugin`](https://github.com/MatAtBread/acorn-es7-plugin/): Parse [async/await syntax proposal](https://github.com/tc39/ecmascript-asyncawait) + - [`acorn-object-spread`](https://github.com/UXtemple/acorn-object-spread): Parse [object spread syntax proposal](https://github.com/sebmarkbage/ecmascript-rest-spread) + - [`acorn-es7`](https://www.npmjs.com/package/acorn-es7): Parse [decorator syntax proposal](https://github.com/wycats/javascript-decorators) + - [`acorn-objj`](https://www.npmjs.com/package/acorn-objj): [Objective-J](http://www.cappuccino-project.org/learn/objective-j.html) language parser built as Acorn plugin diff --git a/node_modules/acorn/bin/acorn b/node_modules/acorn/bin/acorn new file mode 100755 index 0000000..cf4acd5 --- /dev/null +++ b/node_modules/acorn/bin/acorn @@ -0,0 +1,65 @@ +#!/usr/bin/env node +'use strict'; + +var path = require('path'); +var fs = require('fs'); +var acorn = require('../dist/acorn.js'); + +var infile; +var forceFile; +var silent = false; +var compact = false; +var tokenize = false; +var options = {} + +function help(status) { + var print = (status == 0) ? console.log : console.error + print("usage: " + path.basename(process.argv[1]) + " [--ecma3|--ecma5|--ecma6|--ecma7]") + print(" [--tokenize] [--locations] [---allow-hash-bang] [--compact] [--silent] [--module] [--help] [--] [infile]") + process.exit(status) +} + +for (var i = 2; i < process.argv.length; ++i) { + var arg = process.argv[i] + if ((arg == "-" || arg[0] != "-") && !infile) infile = arg + else if (arg == "--" && !infile && i + 2 == process.argv.length) forceFile = infile = process.argv[++i] + else if (arg == "--ecma3") options.ecmaVersion = 3 + else if (arg == "--ecma5") options.ecmaVersion = 5 + else if (arg == "--ecma6") options.ecmaVersion = 6 + else if (arg == "--ecma7") options.ecmaVersion = 7 + else if (arg == "--locations") options.locations = true + else if (arg == "--allow-hash-bang") options.allowHashBang = true + else if (arg == "--silent") silent = true + else if (arg == "--compact") compact = true + else if (arg == "--help") help(0) + else if (arg == "--tokenize") tokenize = true + else if (arg == "--module") options.sourceType = 'module' + else help(1) +} + +function run(code) { + var result + if (!tokenize) { + try { result = acorn.parse(code, options) } + catch(e) { console.error(e.message); process.exit(1) } + } else { + result = [] + var tokenizer = acorn.tokenizer(code, options), token + while (true) { + try { token = tokenizer.getToken() } + catch(e) { console.error(e.message); process.exit(1) } + result.push(token) + if (token.type == acorn.tokTypes.eof) break + } + } + if (!silent) console.log(JSON.stringify(result, null, compact ? null : 2)) +} + +if (forceFile || infile && infile != "-") { + run(fs.readFileSync(infile, "utf8")) +} else { + var code = "" + process.stdin.resume() + process.stdin.on("data", function (chunk) { return code += chunk; }) + process.stdin.on("end", function () { return run(code); }) +} \ No newline at end of file diff --git a/node_modules/acorn/bin/generate-identifier-regex.js b/node_modules/acorn/bin/generate-identifier-regex.js new file mode 100644 index 0000000..100e8cf --- /dev/null +++ b/node_modules/acorn/bin/generate-identifier-regex.js @@ -0,0 +1,55 @@ +'use strict'; + +// Which Unicode version should be used? +var version = '9.0.0'; + +var start = require('unicode-' + version + '/Binary_Property/ID_Start/code-points.js') + .filter(function(ch) { return ch > 0x7f; }); +var last = -1; +var cont = [0x200c, 0x200d].concat(require('unicode-' + version + '/Binary_Property/ID_Continue/code-points.js') + .filter(function(ch) { return ch > 0x7f && search(start, ch, last + 1) == -1; })); + +function search(arr, ch, starting) { + for (var i = starting; arr[i] <= ch && i < arr.length; last = i++) + if (arr[i] === ch) + return i; + return -1; +} + +function pad(str, width) { + while (str.length < width) str = "0" + str; + return str; +} + +function esc(code) { + var hex = code.toString(16); + if (hex.length <= 2) return "\\x" + pad(hex, 2); + else return "\\u" + pad(hex, 4); +} + +function generate(chars) { + var astral = [], re = ""; + for (var i = 0, at = 0x10000; i < chars.length; i++) { + var from = chars[i], to = from; + while (i < chars.length - 1 && chars[i + 1] == to + 1) { + i++; + to++; + } + if (to <= 0xffff) { + if (from == to) re += esc(from); + else if (from + 1 == to) re += esc(from) + esc(to); + else re += esc(from) + "-" + esc(to); + } else { + astral.push(from - at, to - from); + at = to; + } + } + return {nonASCII: re, astral: astral}; +} + +var startData = generate(start), contData = generate(cont); + +console.log("let nonASCIIidentifierStartChars = \"" + startData.nonASCII + "\""); +console.log("let nonASCIIidentifierChars = \"" + contData.nonASCII + "\""); +console.log("const astralIdentifierStartCodes = " + JSON.stringify(startData.astral)); +console.log("const astralIdentifierCodes = " + JSON.stringify(contData.astral)); diff --git a/node_modules/acorn/bin/update_authors.sh b/node_modules/acorn/bin/update_authors.sh new file mode 100755 index 0000000..466c8db --- /dev/null +++ b/node_modules/acorn/bin/update_authors.sh @@ -0,0 +1,6 @@ +# Combine existing list of authors with everyone known in git, sort, add header. +tail --lines=+3 AUTHORS > AUTHORS.tmp +git log --format='%aN' | grep -v abraidwood >> AUTHORS.tmp +echo -e "List of Acorn contributors. Updated before every release.\n" > AUTHORS +sort -u AUTHORS.tmp >> AUTHORS +rm -f AUTHORS.tmp diff --git a/node_modules/acorn/dist/.keep b/node_modules/acorn/dist/.keep new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/acorn/dist/acorn.es.js b/node_modules/acorn/dist/acorn.es.js new file mode 100644 index 0000000..4460957 --- /dev/null +++ b/node_modules/acorn/dist/acorn.es.js @@ -0,0 +1,3112 @@ +// Reserved word lists for various dialects of the language + +var reservedWords = { + 3: "abstract boolean byte char class double enum export extends final float goto implements import int interface long native package private protected public short static super synchronized throws transient volatile", + 5: "class enum extends super const export import", + 6: "enum", + 7: "enum", + strict: "implements interface let package private protected public static yield", + strictBind: "eval arguments" +} + +// And the keywords + +var ecma5AndLessKeywords = "break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this" + +var keywords = { + 5: ecma5AndLessKeywords, + 6: ecma5AndLessKeywords + " const class extends export import super" +} + +// ## Character categories + +// Big ugly regular expressions that match characters in the +// whitespace, identifier, and identifier-start categories. These +// are only applied when a character is found to actually have a +// code point above 128. +// Generated by `bin/generate-identifier-regex.js`. + +var nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fd5\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ae\ua7b0-\ua7b7\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc" +var nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d4-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c03\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d01-\u0d03\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d82\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf2-\u1cf4\u1cf8\u1cf9\u1dc0-\u1df5\u1dfb-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua900-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f" + +var nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]") +var nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]") + +nonASCIIidentifierStartChars = nonASCIIidentifierChars = null + +// These are a run-length and offset encoded representation of the +// >0xffff code points that are a valid part of identifiers. The +// offset starts at 0x10000, and each pair of numbers represents an +// offset to the next range, and then a size of the range. They were +// generated by bin/generate-identifier-regex.js +var astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,17,26,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,157,310,10,21,11,7,153,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,26,45,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,785,52,76,44,33,24,27,35,42,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,85,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,54,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,86,25,391,63,32,0,449,56,264,8,2,36,18,0,50,29,881,921,103,110,18,195,2749,1070,4050,582,8634,568,8,30,114,29,19,47,17,3,32,20,6,18,881,68,12,0,67,12,65,0,32,6124,20,754,9486,1,3071,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,4149,196,60,67,1213,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42710,42,4148,12,221,3,5761,10591,541] +var astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,1306,2,54,14,32,9,16,3,46,10,54,9,7,2,37,13,2,9,52,0,13,2,49,13,10,2,4,9,83,11,7,0,161,11,6,9,7,3,57,0,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,87,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,423,9,838,7,2,7,17,9,57,21,2,13,19882,9,135,4,60,6,26,9,1016,45,17,3,19723,1,5319,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,2214,6,110,6,6,9,792487,239] + +// This has a complexity linear to the value of the code. The +// assumption is that looking up astral identifier characters is +// rare. +function isInAstralSet(code, set) { + var pos = 0x10000 + for (var i = 0; i < set.length; i += 2) { + pos += set[i] + if (pos > code) return false + pos += set[i + 1] + if (pos >= code) return true + } +} + +// Test whether a given character code starts an identifier. + +function isIdentifierStart(code, astral) { + if (code < 65) return code === 36 + if (code < 91) return true + if (code < 97) return code === 95 + if (code < 123) return true + if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code)) + if (astral === false) return false + return isInAstralSet(code, astralIdentifierStartCodes) +} + +// Test whether a given character is part of an identifier. + +function isIdentifierChar(code, astral) { + if (code < 48) return code === 36 + if (code < 58) return true + if (code < 65) return false + if (code < 91) return true + if (code < 97) return code === 95 + if (code < 123) return true + if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code)) + if (astral === false) return false + return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes) +} + +// ## Token types + +// The assignment of fine-grained, information-carrying type objects +// allows the tokenizer to store the information it has about a +// token in a way that is very cheap for the parser to look up. + +// All token type variables start with an underscore, to make them +// easy to recognize. + +// The `beforeExpr` property is used to disambiguate between regular +// expressions and divisions. It is set on all token types that can +// be followed by an expression (thus, a slash after them would be a +// regular expression). +// +// The `startsExpr` property is used to check if the token ends a +// `yield` expression. It is set on all token types that either can +// directly start an expression (like a quotation mark) or can +// continue an expression (like the body of a string). +// +// `isLoop` marks a keyword as starting a loop, which is important +// to know when parsing a label, in order to allow or disallow +// continue jumps to that label. + +var TokenType = function TokenType(label, conf) { + if ( conf === void 0 ) conf = {}; + + this.label = label + this.keyword = conf.keyword + this.beforeExpr = !!conf.beforeExpr + this.startsExpr = !!conf.startsExpr + this.isLoop = !!conf.isLoop + this.isAssign = !!conf.isAssign + this.prefix = !!conf.prefix + this.postfix = !!conf.postfix + this.binop = conf.binop || null + this.updateContext = null +}; + +function binop(name, prec) { + return new TokenType(name, {beforeExpr: true, binop: prec}) +} +var beforeExpr = {beforeExpr: true}; +var startsExpr = {startsExpr: true}; +// Map keyword names to token types. + +var keywordTypes = {} + +// Succinct definitions of keyword token types +function kw(name, options) { + if ( options === void 0 ) options = {}; + + options.keyword = name + return keywordTypes[name] = new TokenType(name, options) +} + +var tt = { + num: new TokenType("num", startsExpr), + regexp: new TokenType("regexp", startsExpr), + string: new TokenType("string", startsExpr), + name: new TokenType("name", startsExpr), + eof: new TokenType("eof"), + + // Punctuation token types. + bracketL: new TokenType("[", {beforeExpr: true, startsExpr: true}), + bracketR: new TokenType("]"), + braceL: new TokenType("{", {beforeExpr: true, startsExpr: true}), + braceR: new TokenType("}"), + parenL: new TokenType("(", {beforeExpr: true, startsExpr: true}), + parenR: new TokenType(")"), + comma: new TokenType(",", beforeExpr), + semi: new TokenType(";", beforeExpr), + colon: new TokenType(":", beforeExpr), + dot: new TokenType("."), + question: new TokenType("?", beforeExpr), + arrow: new TokenType("=>", beforeExpr), + template: new TokenType("template"), + ellipsis: new TokenType("...", beforeExpr), + backQuote: new TokenType("`", startsExpr), + dollarBraceL: new TokenType("${", {beforeExpr: true, startsExpr: true}), + + // Operators. These carry several kinds of properties to help the + // parser use them properly (the presence of these properties is + // what categorizes them as operators). + // + // `binop`, when present, specifies that this operator is a binary + // operator, and will refer to its precedence. + // + // `prefix` and `postfix` mark the operator as a prefix or postfix + // unary operator. + // + // `isAssign` marks all of `=`, `+=`, `-=` etcetera, which act as + // binary operators with a very low precedence, that should result + // in AssignmentExpression nodes. + + eq: new TokenType("=", {beforeExpr: true, isAssign: true}), + assign: new TokenType("_=", {beforeExpr: true, isAssign: true}), + incDec: new TokenType("++/--", {prefix: true, postfix: true, startsExpr: true}), + prefix: new TokenType("prefix", {beforeExpr: true, prefix: true, startsExpr: true}), + logicalOR: binop("||", 1), + logicalAND: binop("&&", 2), + bitwiseOR: binop("|", 3), + bitwiseXOR: binop("^", 4), + bitwiseAND: binop("&", 5), + equality: binop("==/!=", 6), + relational: binop("", 7), + bitShift: binop("<>", 8), + plusMin: new TokenType("+/-", {beforeExpr: true, binop: 9, prefix: true, startsExpr: true}), + modulo: binop("%", 10), + star: binop("*", 10), + slash: binop("/", 10), + starstar: new TokenType("**", {beforeExpr: true}), + + // Keyword token types. + _break: kw("break"), + _case: kw("case", beforeExpr), + _catch: kw("catch"), + _continue: kw("continue"), + _debugger: kw("debugger"), + _default: kw("default", beforeExpr), + _do: kw("do", {isLoop: true, beforeExpr: true}), + _else: kw("else", beforeExpr), + _finally: kw("finally"), + _for: kw("for", {isLoop: true}), + _function: kw("function", startsExpr), + _if: kw("if"), + _return: kw("return", beforeExpr), + _switch: kw("switch"), + _throw: kw("throw", beforeExpr), + _try: kw("try"), + _var: kw("var"), + _const: kw("const"), + _while: kw("while", {isLoop: true}), + _with: kw("with"), + _new: kw("new", {beforeExpr: true, startsExpr: true}), + _this: kw("this", startsExpr), + _super: kw("super", startsExpr), + _class: kw("class"), + _extends: kw("extends", beforeExpr), + _export: kw("export"), + _import: kw("import"), + _null: kw("null", startsExpr), + _true: kw("true", startsExpr), + _false: kw("false", startsExpr), + _in: kw("in", {beforeExpr: true, binop: 7}), + _instanceof: kw("instanceof", {beforeExpr: true, binop: 7}), + _typeof: kw("typeof", {beforeExpr: true, prefix: true, startsExpr: true}), + _void: kw("void", {beforeExpr: true, prefix: true, startsExpr: true}), + _delete: kw("delete", {beforeExpr: true, prefix: true, startsExpr: true}) +} + +// Matches a whole line break (where CRLF is considered a single +// line break). Used to count lines. + +var lineBreak = /\r\n?|\n|\u2028|\u2029/ +var lineBreakG = new RegExp(lineBreak.source, "g") + +function isNewLine(code) { + return code === 10 || code === 13 || code === 0x2028 || code == 0x2029 +} + +var nonASCIIwhitespace = /[\u1680\u180e\u2000-\u200a\u202f\u205f\u3000\ufeff]/ + +var skipWhiteSpace = /(?:\s|\/\/.*|\/\*[^]*?\*\/)*/g + +function isArray(obj) { + return Object.prototype.toString.call(obj) === "[object Array]" +} + +// Checks if an object has a property. + +function has(obj, propName) { + return Object.prototype.hasOwnProperty.call(obj, propName) +} + +// These are used when `options.locations` is on, for the +// `startLoc` and `endLoc` properties. + +var Position = function Position(line, col) { + this.line = line + this.column = col +}; + +Position.prototype.offset = function offset (n) { + return new Position(this.line, this.column + n) +}; + +var SourceLocation = function SourceLocation(p, start, end) { + this.start = start + this.end = end + if (p.sourceFile !== null) this.source = p.sourceFile +}; + +// The `getLineInfo` function is mostly useful when the +// `locations` option is off (for performance reasons) and you +// want to find the line/column position for a given character +// offset. `input` should be the code string that the offset refers +// into. + +function getLineInfo(input, offset) { + for (var line = 1, cur = 0;;) { + lineBreakG.lastIndex = cur + var match = lineBreakG.exec(input) + if (match && match.index < offset) { + ++line + cur = match.index + match[0].length + } else { + return new Position(line, offset - cur) + } + } +} + +// A second optional argument can be given to further configure +// the parser process. These options are recognized: + +var defaultOptions = { + // `ecmaVersion` indicates the ECMAScript version to parse. Must + // be either 3, or 5, or 6. This influences support for strict + // mode, the set of reserved words, support for getters and + // setters and other features. The default is 6. + ecmaVersion: 6, + // Source type ("script" or "module") for different semantics + sourceType: "script", + // `onInsertedSemicolon` can be a callback that will be called + // when a semicolon is automatically inserted. It will be passed + // th position of the comma as an offset, and if `locations` is + // enabled, it is given the location as a `{line, column}` object + // as second argument. + onInsertedSemicolon: null, + // `onTrailingComma` is similar to `onInsertedSemicolon`, but for + // trailing commas. + onTrailingComma: null, + // By default, reserved words are only enforced if ecmaVersion >= 5. + // Set `allowReserved` to a boolean value to explicitly turn this on + // an off. When this option has the value "never", reserved words + // and keywords can also not be used as property names. + allowReserved: null, + // When enabled, a return at the top level is not considered an + // error. + allowReturnOutsideFunction: false, + // When enabled, import/export statements are not constrained to + // appearing at the top of the program. + allowImportExportEverywhere: false, + // When enabled, hashbang directive in the beginning of file + // is allowed and treated as a line comment. + allowHashBang: false, + // When `locations` is on, `loc` properties holding objects with + // `start` and `end` properties in `{line, column}` form (with + // line being 1-based and column 0-based) will be attached to the + // nodes. + locations: false, + // A function can be passed as `onToken` option, which will + // cause Acorn to call that function with object in the same + // format as tokens returned from `tokenizer().getToken()`. Note + // that you are not allowed to call the parser from the + // callback—that will corrupt its internal state. + onToken: null, + // A function can be passed as `onComment` option, which will + // cause Acorn to call that function with `(block, text, start, + // end)` parameters whenever a comment is skipped. `block` is a + // boolean indicating whether this is a block (`/* */`) comment, + // `text` is the content of the comment, and `start` and `end` are + // character offsets that denote the start and end of the comment. + // When the `locations` option is on, two more parameters are + // passed, the full `{line, column}` locations of the start and + // end of the comments. Note that you are not allowed to call the + // parser from the callback—that will corrupt its internal state. + onComment: null, + // Nodes have their start and end characters offsets recorded in + // `start` and `end` properties (directly on the node, rather than + // the `loc` object, which holds line/column data. To also add a + // [semi-standardized][range] `range` property holding a `[start, + // end]` array with the same numbers, set the `ranges` option to + // `true`. + // + // [range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678 + ranges: false, + // It is possible to parse multiple files into a single AST by + // passing the tree produced by parsing the first file as + // `program` option in subsequent parses. This will add the + // toplevel forms of the parsed file to the `Program` (top) node + // of an existing parse tree. + program: null, + // When `locations` is on, you can pass this to record the source + // file in every node's `loc` object. + sourceFile: null, + // This value, if given, is stored in every node, whether + // `locations` is on or off. + directSourceFile: null, + // When enabled, parenthesized expressions are represented by + // (non-standard) ParenthesizedExpression nodes + preserveParens: false, + plugins: {} +} + +// Interpret and default an options object + +function getOptions(opts) { + var options = {} + for (var opt in defaultOptions) + options[opt] = opts && has(opts, opt) ? opts[opt] : defaultOptions[opt] + if (options.allowReserved == null) + options.allowReserved = options.ecmaVersion < 5 + + if (isArray(options.onToken)) { + var tokens = options.onToken + options.onToken = function (token) { return tokens.push(token); } + } + if (isArray(options.onComment)) + options.onComment = pushComment(options, options.onComment) + + return options +} + +function pushComment(options, array) { + return function (block, text, start, end, startLoc, endLoc) { + var comment = { + type: block ? 'Block' : 'Line', + value: text, + start: start, + end: end + } + if (options.locations) + comment.loc = new SourceLocation(this, startLoc, endLoc) + if (options.ranges) + comment.range = [start, end] + array.push(comment) + } +} + +// Registered plugins +var plugins = {} + +function keywordRegexp(words) { + return new RegExp("^(" + words.replace(/ /g, "|") + ")$") +} + +var Parser = function Parser(options, input, startPos) { + this.options = options = getOptions(options) + this.sourceFile = options.sourceFile + this.keywords = keywordRegexp(keywords[options.ecmaVersion >= 6 ? 6 : 5]) + var reserved = options.allowReserved ? "" : + reservedWords[options.ecmaVersion] + (options.sourceType == "module" ? " await" : "") + this.reservedWords = keywordRegexp(reserved) + var reservedStrict = (reserved ? reserved + " " : "") + reservedWords.strict + this.reservedWordsStrict = keywordRegexp(reservedStrict) + this.reservedWordsStrictBind = keywordRegexp(reservedStrict + " " + reservedWords.strictBind) + this.input = String(input) + + // Used to signal to callers of `readWord1` whether the word + // contained any escape sequences. This is needed because words with + // escape sequences must not be interpreted as keywords. + this.containsEsc = false + + // Load plugins + this.loadPlugins(options.plugins) + + // Set up token state + + // The current position of the tokenizer in the input. + if (startPos) { + this.pos = startPos + this.lineStart = Math.max(0, this.input.lastIndexOf("\n", startPos)) + this.curLine = this.input.slice(0, this.lineStart).split(lineBreak).length + } else { + this.pos = this.lineStart = 0 + this.curLine = 1 + } + + // Properties of the current token: + // Its type + this.type = tt.eof + // For tokens that include more information than their type, the value + this.value = null + // Its start and end offset + this.start = this.end = this.pos + // And, if locations are used, the {line, column} object + // corresponding to those offsets + this.startLoc = this.endLoc = this.curPosition() + + // Position information for the previous token + this.lastTokEndLoc = this.lastTokStartLoc = null + this.lastTokStart = this.lastTokEnd = this.pos + + // The context stack is used to superficially track syntactic + // context to predict whether a regular expression is allowed in a + // given position. + this.context = this.initialContext() + this.exprAllowed = true + + // Figure out if it's a module code. + this.strict = this.inModule = options.sourceType === "module" + + // Used to signify the start of a potential arrow function + this.potentialArrowAt = -1 + + // Flags to track whether we are in a function, a generator. + this.inFunction = this.inGenerator = false + // Labels in scope. + this.labels = [] + + // If enabled, skip leading hashbang line. + if (this.pos === 0 && options.allowHashBang && this.input.slice(0, 2) === '#!') + this.skipLineComment(2) +}; + +// DEPRECATED Kept for backwards compatibility until 3.0 in case a plugin uses them +Parser.prototype.isKeyword = function isKeyword (word) { return this.keywords.test(word) }; +Parser.prototype.isReservedWord = function isReservedWord (word) { return this.reservedWords.test(word) }; + +Parser.prototype.extend = function extend (name, f) { + this[name] = f(this[name]) +}; + +Parser.prototype.loadPlugins = function loadPlugins (pluginConfigs) { + var this$1 = this; + + for (var name in pluginConfigs) { + var plugin = plugins[name] + if (!plugin) throw new Error("Plugin '" + name + "' not found") + plugin(this$1, pluginConfigs[name]) + } +}; + +Parser.prototype.parse = function parse () { + var node = this.options.program || this.startNode() + this.nextToken() + return this.parseTopLevel(node) +}; + +var pp = Parser.prototype + +// ## Parser utilities + +// Test whether a statement node is the string literal `"use strict"`. + +pp.isUseStrict = function(stmt) { + return this.options.ecmaVersion >= 5 && stmt.type === "ExpressionStatement" && + stmt.expression.type === "Literal" && + stmt.expression.raw.slice(1, -1) === "use strict" +} + +// Predicate that tests whether the next token is of the given +// type, and if yes, consumes it as a side effect. + +pp.eat = function(type) { + if (this.type === type) { + this.next() + return true + } else { + return false + } +} + +// Tests whether parsed token is a contextual keyword. + +pp.isContextual = function(name) { + return this.type === tt.name && this.value === name +} + +// Consumes contextual keyword if possible. + +pp.eatContextual = function(name) { + return this.value === name && this.eat(tt.name) +} + +// Asserts that following token is given contextual keyword. + +pp.expectContextual = function(name) { + if (!this.eatContextual(name)) this.unexpected() +} + +// Test whether a semicolon can be inserted at the current position. + +pp.canInsertSemicolon = function() { + return this.type === tt.eof || + this.type === tt.braceR || + lineBreak.test(this.input.slice(this.lastTokEnd, this.start)) +} + +pp.insertSemicolon = function() { + if (this.canInsertSemicolon()) { + if (this.options.onInsertedSemicolon) + this.options.onInsertedSemicolon(this.lastTokEnd, this.lastTokEndLoc) + return true + } +} + +// Consume a semicolon, or, failing that, see if we are allowed to +// pretend that there is a semicolon at this position. + +pp.semicolon = function() { + if (!this.eat(tt.semi) && !this.insertSemicolon()) this.unexpected() +} + +pp.afterTrailingComma = function(tokType) { + if (this.type == tokType) { + if (this.options.onTrailingComma) + this.options.onTrailingComma(this.lastTokStart, this.lastTokStartLoc) + this.next() + return true + } +} + +// Expect a token of a given type. If found, consume it, otherwise, +// raise an unexpected token error. + +pp.expect = function(type) { + this.eat(type) || this.unexpected() +} + +// Raise an unexpected token error. + +pp.unexpected = function(pos) { + this.raise(pos != null ? pos : this.start, "Unexpected token") +} + +var DestructuringErrors = function DestructuringErrors() { + this.shorthandAssign = 0 + this.trailingComma = 0 +}; + +pp.checkPatternErrors = function(refDestructuringErrors, andThrow) { + var trailing = refDestructuringErrors && refDestructuringErrors.trailingComma + if (!andThrow) return !!trailing + if (trailing) this.raise(trailing, "Comma is not permitted after the rest element") +} + +pp.checkExpressionErrors = function(refDestructuringErrors, andThrow) { + var pos = refDestructuringErrors && refDestructuringErrors.shorthandAssign + if (!andThrow) return !!pos + if (pos) this.raise(pos, "Shorthand property assignments are valid only in destructuring patterns") +} + +var pp$1 = Parser.prototype + +// ### Statement parsing + +// Parse a program. Initializes the parser, reads any number of +// statements, and wraps them in a Program node. Optionally takes a +// `program` argument. If present, the statements will be appended +// to its body instead of creating a new node. + +pp$1.parseTopLevel = function(node) { + var this$1 = this; + + var first = true + if (!node.body) node.body = [] + while (this.type !== tt.eof) { + var stmt = this$1.parseStatement(true, true) + node.body.push(stmt) + if (first) { + if (this$1.isUseStrict(stmt)) this$1.setStrict(true) + first = false + } + } + this.next() + if (this.options.ecmaVersion >= 6) { + node.sourceType = this.options.sourceType + } + return this.finishNode(node, "Program") +} + +var loopLabel = {kind: "loop"}; +var switchLabel = {kind: "switch"}; +pp$1.isLet = function() { + if (this.type !== tt.name || this.options.ecmaVersion < 6 || this.value != "let") return false + skipWhiteSpace.lastIndex = this.pos + var skip = skipWhiteSpace.exec(this.input) + var next = this.pos + skip[0].length, nextCh = this.input.charCodeAt(next) + if (nextCh === 91 || nextCh == 123) return true // '{' and '[' + if (isIdentifierStart(nextCh, true)) { + for (var pos = next + 1; isIdentifierChar(this.input.charCodeAt(pos), true); ++pos) {} + var ident = this.input.slice(next, pos) + if (!this.isKeyword(ident)) return true + } + return false +} + +// Parse a single statement. +// +// If expecting a statement and finding a slash operator, parse a +// regular expression literal. This is to handle cases like +// `if (foo) /blah/.exec(foo)`, where looking at the previous token +// does not help. + +pp$1.parseStatement = function(declaration, topLevel) { + var starttype = this.type, node = this.startNode(), kind + + if (this.isLet()) { + starttype = tt._var + kind = "let" + } + + // Most types of statements are recognized by the keyword they + // start with. Many are trivial to parse, some require a bit of + // complexity. + + switch (starttype) { + case tt._break: case tt._continue: return this.parseBreakContinueStatement(node, starttype.keyword) + case tt._debugger: return this.parseDebuggerStatement(node) + case tt._do: return this.parseDoStatement(node) + case tt._for: return this.parseForStatement(node) + case tt._function: + if (!declaration && this.options.ecmaVersion >= 6) this.unexpected() + return this.parseFunctionStatement(node) + case tt._class: + if (!declaration) this.unexpected() + return this.parseClass(node, true) + case tt._if: return this.parseIfStatement(node) + case tt._return: return this.parseReturnStatement(node) + case tt._switch: return this.parseSwitchStatement(node) + case tt._throw: return this.parseThrowStatement(node) + case tt._try: return this.parseTryStatement(node) + case tt._const: case tt._var: + kind = kind || this.value + if (!declaration && kind != "var") this.unexpected() + return this.parseVarStatement(node, kind) + case tt._while: return this.parseWhileStatement(node) + case tt._with: return this.parseWithStatement(node) + case tt.braceL: return this.parseBlock() + case tt.semi: return this.parseEmptyStatement(node) + case tt._export: + case tt._import: + if (!this.options.allowImportExportEverywhere) { + if (!topLevel) + this.raise(this.start, "'import' and 'export' may only appear at the top level") + if (!this.inModule) + this.raise(this.start, "'import' and 'export' may appear only with 'sourceType: module'") + } + return starttype === tt._import ? this.parseImport(node) : this.parseExport(node) + + // If the statement does not start with a statement keyword or a + // brace, it's an ExpressionStatement or LabeledStatement. We + // simply start parsing an expression, and afterwards, if the + // next token is a colon and the expression was a simple + // Identifier node, we switch to interpreting it as a label. + default: + var maybeName = this.value, expr = this.parseExpression() + if (starttype === tt.name && expr.type === "Identifier" && this.eat(tt.colon)) + return this.parseLabeledStatement(node, maybeName, expr) + else return this.parseExpressionStatement(node, expr) + } +} + +pp$1.parseBreakContinueStatement = function(node, keyword) { + var this$1 = this; + + var isBreak = keyword == "break" + this.next() + if (this.eat(tt.semi) || this.insertSemicolon()) node.label = null + else if (this.type !== tt.name) this.unexpected() + else { + node.label = this.parseIdent() + this.semicolon() + } + + // Verify that there is an actual destination to break or + // continue to. + for (var i = 0; i < this.labels.length; ++i) { + var lab = this$1.labels[i] + if (node.label == null || lab.name === node.label.name) { + if (lab.kind != null && (isBreak || lab.kind === "loop")) break + if (node.label && isBreak) break + } + } + if (i === this.labels.length) this.raise(node.start, "Unsyntactic " + keyword) + return this.finishNode(node, isBreak ? "BreakStatement" : "ContinueStatement") +} + +pp$1.parseDebuggerStatement = function(node) { + this.next() + this.semicolon() + return this.finishNode(node, "DebuggerStatement") +} + +pp$1.parseDoStatement = function(node) { + this.next() + this.labels.push(loopLabel) + node.body = this.parseStatement(false) + this.labels.pop() + this.expect(tt._while) + node.test = this.parseParenExpression() + if (this.options.ecmaVersion >= 6) + this.eat(tt.semi) + else + this.semicolon() + return this.finishNode(node, "DoWhileStatement") +} + +// Disambiguating between a `for` and a `for`/`in` or `for`/`of` +// loop is non-trivial. Basically, we have to parse the init `var` +// statement or expression, disallowing the `in` operator (see +// the second parameter to `parseExpression`), and then check +// whether the next token is `in` or `of`. When there is no init +// part (semicolon immediately after the opening parenthesis), it +// is a regular `for` loop. + +pp$1.parseForStatement = function(node) { + this.next() + this.labels.push(loopLabel) + this.expect(tt.parenL) + if (this.type === tt.semi) return this.parseFor(node, null) + var isLet = this.isLet() + if (this.type === tt._var || this.type === tt._const || isLet) { + var init$1 = this.startNode(), kind = isLet ? "let" : this.value + this.next() + this.parseVar(init$1, true, kind) + this.finishNode(init$1, "VariableDeclaration") + if ((this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual("of"))) && init$1.declarations.length === 1 && + !(kind !== "var" && init$1.declarations[0].init)) + return this.parseForIn(node, init$1) + return this.parseFor(node, init$1) + } + var refDestructuringErrors = new DestructuringErrors + var init = this.parseExpression(true, refDestructuringErrors) + if (this.type === tt._in || (this.options.ecmaVersion >= 6 && this.isContextual("of"))) { + this.checkPatternErrors(refDestructuringErrors, true) + this.toAssignable(init) + this.checkLVal(init) + return this.parseForIn(node, init) + } else { + this.checkExpressionErrors(refDestructuringErrors, true) + } + return this.parseFor(node, init) +} + +pp$1.parseFunctionStatement = function(node) { + this.next() + return this.parseFunction(node, true) +} + +pp$1.parseIfStatement = function(node) { + this.next() + node.test = this.parseParenExpression() + node.consequent = this.parseStatement(false) + node.alternate = this.eat(tt._else) ? this.parseStatement(false) : null + return this.finishNode(node, "IfStatement") +} + +pp$1.parseReturnStatement = function(node) { + if (!this.inFunction && !this.options.allowReturnOutsideFunction) + this.raise(this.start, "'return' outside of function") + this.next() + + // In `return` (and `break`/`continue`), the keywords with + // optional arguments, we eagerly look for a semicolon or the + // possibility to insert one. + + if (this.eat(tt.semi) || this.insertSemicolon()) node.argument = null + else { node.argument = this.parseExpression(); this.semicolon() } + return this.finishNode(node, "ReturnStatement") +} + +pp$1.parseSwitchStatement = function(node) { + var this$1 = this; + + this.next() + node.discriminant = this.parseParenExpression() + node.cases = [] + this.expect(tt.braceL) + this.labels.push(switchLabel) + + // Statements under must be grouped (by label) in SwitchCase + // nodes. `cur` is used to keep the node that we are currently + // adding statements to. + + for (var cur, sawDefault = false; this.type != tt.braceR;) { + if (this$1.type === tt._case || this$1.type === tt._default) { + var isCase = this$1.type === tt._case + if (cur) this$1.finishNode(cur, "SwitchCase") + node.cases.push(cur = this$1.startNode()) + cur.consequent = [] + this$1.next() + if (isCase) { + cur.test = this$1.parseExpression() + } else { + if (sawDefault) this$1.raiseRecoverable(this$1.lastTokStart, "Multiple default clauses") + sawDefault = true + cur.test = null + } + this$1.expect(tt.colon) + } else { + if (!cur) this$1.unexpected() + cur.consequent.push(this$1.parseStatement(true)) + } + } + if (cur) this.finishNode(cur, "SwitchCase") + this.next() // Closing brace + this.labels.pop() + return this.finishNode(node, "SwitchStatement") +} + +pp$1.parseThrowStatement = function(node) { + this.next() + if (lineBreak.test(this.input.slice(this.lastTokEnd, this.start))) + this.raise(this.lastTokEnd, "Illegal newline after throw") + node.argument = this.parseExpression() + this.semicolon() + return this.finishNode(node, "ThrowStatement") +} + +// Reused empty array added for node fields that are always empty. + +var empty = [] + +pp$1.parseTryStatement = function(node) { + this.next() + node.block = this.parseBlock() + node.handler = null + if (this.type === tt._catch) { + var clause = this.startNode() + this.next() + this.expect(tt.parenL) + clause.param = this.parseBindingAtom() + this.checkLVal(clause.param, true) + this.expect(tt.parenR) + clause.body = this.parseBlock() + node.handler = this.finishNode(clause, "CatchClause") + } + node.finalizer = this.eat(tt._finally) ? this.parseBlock() : null + if (!node.handler && !node.finalizer) + this.raise(node.start, "Missing catch or finally clause") + return this.finishNode(node, "TryStatement") +} + +pp$1.parseVarStatement = function(node, kind) { + this.next() + this.parseVar(node, false, kind) + this.semicolon() + return this.finishNode(node, "VariableDeclaration") +} + +pp$1.parseWhileStatement = function(node) { + this.next() + node.test = this.parseParenExpression() + this.labels.push(loopLabel) + node.body = this.parseStatement(false) + this.labels.pop() + return this.finishNode(node, "WhileStatement") +} + +pp$1.parseWithStatement = function(node) { + if (this.strict) this.raise(this.start, "'with' in strict mode") + this.next() + node.object = this.parseParenExpression() + node.body = this.parseStatement(false) + return this.finishNode(node, "WithStatement") +} + +pp$1.parseEmptyStatement = function(node) { + this.next() + return this.finishNode(node, "EmptyStatement") +} + +pp$1.parseLabeledStatement = function(node, maybeName, expr) { + var this$1 = this; + + for (var i = 0; i < this.labels.length; ++i) + if (this$1.labels[i].name === maybeName) this$1.raise(expr.start, "Label '" + maybeName + "' is already declared") + var kind = this.type.isLoop ? "loop" : this.type === tt._switch ? "switch" : null + for (var i$1 = this.labels.length - 1; i$1 >= 0; i$1--) { + var label = this$1.labels[i$1] + if (label.statementStart == node.start) { + label.statementStart = this$1.start + label.kind = kind + } else break + } + this.labels.push({name: maybeName, kind: kind, statementStart: this.start}) + node.body = this.parseStatement(true) + this.labels.pop() + node.label = expr + return this.finishNode(node, "LabeledStatement") +} + +pp$1.parseExpressionStatement = function(node, expr) { + node.expression = expr + this.semicolon() + return this.finishNode(node, "ExpressionStatement") +} + +// Parse a semicolon-enclosed block of statements, handling `"use +// strict"` declarations when `allowStrict` is true (used for +// function bodies). + +pp$1.parseBlock = function(allowStrict) { + var this$1 = this; + + var node = this.startNode(), first = true, oldStrict + node.body = [] + this.expect(tt.braceL) + while (!this.eat(tt.braceR)) { + var stmt = this$1.parseStatement(true) + node.body.push(stmt) + if (first && allowStrict && this$1.isUseStrict(stmt)) { + oldStrict = this$1.strict + this$1.setStrict(this$1.strict = true) + } + first = false + } + if (oldStrict === false) this.setStrict(false) + return this.finishNode(node, "BlockStatement") +} + +// Parse a regular `for` loop. The disambiguation code in +// `parseStatement` will already have parsed the init statement or +// expression. + +pp$1.parseFor = function(node, init) { + node.init = init + this.expect(tt.semi) + node.test = this.type === tt.semi ? null : this.parseExpression() + this.expect(tt.semi) + node.update = this.type === tt.parenR ? null : this.parseExpression() + this.expect(tt.parenR) + node.body = this.parseStatement(false) + this.labels.pop() + return this.finishNode(node, "ForStatement") +} + +// Parse a `for`/`in` and `for`/`of` loop, which are almost +// same from parser's perspective. + +pp$1.parseForIn = function(node, init) { + var type = this.type === tt._in ? "ForInStatement" : "ForOfStatement" + this.next() + node.left = init + node.right = this.parseExpression() + this.expect(tt.parenR) + node.body = this.parseStatement(false) + this.labels.pop() + return this.finishNode(node, type) +} + +// Parse a list of variable declarations. + +pp$1.parseVar = function(node, isFor, kind) { + var this$1 = this; + + node.declarations = [] + node.kind = kind + for (;;) { + var decl = this$1.startNode() + this$1.parseVarId(decl) + if (this$1.eat(tt.eq)) { + decl.init = this$1.parseMaybeAssign(isFor) + } else if (kind === "const" && !(this$1.type === tt._in || (this$1.options.ecmaVersion >= 6 && this$1.isContextual("of")))) { + this$1.unexpected() + } else if (decl.id.type != "Identifier" && !(isFor && (this$1.type === tt._in || this$1.isContextual("of")))) { + this$1.raise(this$1.lastTokEnd, "Complex binding patterns require an initialization value") + } else { + decl.init = null + } + node.declarations.push(this$1.finishNode(decl, "VariableDeclarator")) + if (!this$1.eat(tt.comma)) break + } + return node +} + +pp$1.parseVarId = function(decl) { + decl.id = this.parseBindingAtom() + this.checkLVal(decl.id, true) +} + +// Parse a function declaration or literal (depending on the +// `isStatement` parameter). + +pp$1.parseFunction = function(node, isStatement, allowExpressionBody) { + this.initFunction(node) + if (this.options.ecmaVersion >= 6) + node.generator = this.eat(tt.star) + var oldInGen = this.inGenerator + this.inGenerator = node.generator + if (isStatement || this.type === tt.name) + node.id = this.parseIdent() + this.parseFunctionParams(node) + this.parseFunctionBody(node, allowExpressionBody) + this.inGenerator = oldInGen + return this.finishNode(node, isStatement ? "FunctionDeclaration" : "FunctionExpression") +} + +pp$1.parseFunctionParams = function(node) { + this.expect(tt.parenL) + node.params = this.parseBindingList(tt.parenR, false, false, true) +} + +// Parse a class declaration or literal (depending on the +// `isStatement` parameter). + +pp$1.parseClass = function(node, isStatement) { + var this$1 = this; + + this.next() + this.parseClassId(node, isStatement) + this.parseClassSuper(node) + var classBody = this.startNode() + var hadConstructor = false + classBody.body = [] + this.expect(tt.braceL) + while (!this.eat(tt.braceR)) { + if (this$1.eat(tt.semi)) continue + var method = this$1.startNode() + var isGenerator = this$1.eat(tt.star) + var isMaybeStatic = this$1.type === tt.name && this$1.value === "static" + this$1.parsePropertyName(method) + method.static = isMaybeStatic && this$1.type !== tt.parenL + if (method.static) { + if (isGenerator) this$1.unexpected() + isGenerator = this$1.eat(tt.star) + this$1.parsePropertyName(method) + } + method.kind = "method" + var isGetSet = false + if (!method.computed) { + var key = method.key; + if (!isGenerator && key.type === "Identifier" && this$1.type !== tt.parenL && (key.name === "get" || key.name === "set")) { + isGetSet = true + method.kind = key.name + key = this$1.parsePropertyName(method) + } + if (!method.static && (key.type === "Identifier" && key.name === "constructor" || + key.type === "Literal" && key.value === "constructor")) { + if (hadConstructor) this$1.raise(key.start, "Duplicate constructor in the same class") + if (isGetSet) this$1.raise(key.start, "Constructor can't have get/set modifier") + if (isGenerator) this$1.raise(key.start, "Constructor can't be a generator") + method.kind = "constructor" + hadConstructor = true + } + } + this$1.parseClassMethod(classBody, method, isGenerator) + if (isGetSet) { + var paramCount = method.kind === "get" ? 0 : 1 + if (method.value.params.length !== paramCount) { + var start = method.value.start + if (method.kind === "get") + this$1.raiseRecoverable(start, "getter should have no params") + else + this$1.raiseRecoverable(start, "setter should have exactly one param") + } + if (method.kind === "set" && method.value.params[0].type === "RestElement") + this$1.raise(method.value.params[0].start, "Setter cannot use rest params") + } + } + node.body = this.finishNode(classBody, "ClassBody") + return this.finishNode(node, isStatement ? "ClassDeclaration" : "ClassExpression") +} + +pp$1.parseClassMethod = function(classBody, method, isGenerator) { + method.value = this.parseMethod(isGenerator) + classBody.body.push(this.finishNode(method, "MethodDefinition")) +} + +pp$1.parseClassId = function(node, isStatement) { + node.id = this.type === tt.name ? this.parseIdent() : isStatement ? this.unexpected() : null +} + +pp$1.parseClassSuper = function(node) { + node.superClass = this.eat(tt._extends) ? this.parseExprSubscripts() : null +} + +// Parses module export declaration. + +pp$1.parseExport = function(node) { + var this$1 = this; + + this.next() + // export * from '...' + if (this.eat(tt.star)) { + this.expectContextual("from") + node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected() + this.semicolon() + return this.finishNode(node, "ExportAllDeclaration") + } + if (this.eat(tt._default)) { // export default ... + var parens = this.type == tt.parenL + var expr = this.parseMaybeAssign() + var needsSemi = true + if (!parens && (expr.type == "FunctionExpression" || + expr.type == "ClassExpression")) { + needsSemi = false + if (expr.id) { + expr.type = expr.type == "FunctionExpression" + ? "FunctionDeclaration" + : "ClassDeclaration" + } + } + node.declaration = expr + if (needsSemi) this.semicolon() + return this.finishNode(node, "ExportDefaultDeclaration") + } + // export var|const|let|function|class ... + if (this.shouldParseExportStatement()) { + node.declaration = this.parseStatement(true) + node.specifiers = [] + node.source = null + } else { // export { x, y as z } [from '...'] + node.declaration = null + node.specifiers = this.parseExportSpecifiers() + if (this.eatContextual("from")) { + node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected() + } else { + // check for keywords used as local names + for (var i = 0; i < node.specifiers.length; i++) { + if (this$1.keywords.test(node.specifiers[i].local.name) || this$1.reservedWords.test(node.specifiers[i].local.name)) { + this$1.unexpected(node.specifiers[i].local.start) + } + } + + node.source = null + } + this.semicolon() + } + return this.finishNode(node, "ExportNamedDeclaration") +} + +pp$1.shouldParseExportStatement = function() { + return this.type.keyword || this.isLet() +} + +// Parses a comma-separated list of module exports. + +pp$1.parseExportSpecifiers = function() { + var this$1 = this; + + var nodes = [], first = true + // export { x, y as z } [from '...'] + this.expect(tt.braceL) + while (!this.eat(tt.braceR)) { + if (!first) { + this$1.expect(tt.comma) + if (this$1.afterTrailingComma(tt.braceR)) break + } else first = false + + var node = this$1.startNode() + node.local = this$1.parseIdent(this$1.type === tt._default) + node.exported = this$1.eatContextual("as") ? this$1.parseIdent(true) : node.local + nodes.push(this$1.finishNode(node, "ExportSpecifier")) + } + return nodes +} + +// Parses import declaration. + +pp$1.parseImport = function(node) { + this.next() + // import '...' + if (this.type === tt.string) { + node.specifiers = empty + node.source = this.parseExprAtom() + } else { + node.specifiers = this.parseImportSpecifiers() + this.expectContextual("from") + node.source = this.type === tt.string ? this.parseExprAtom() : this.unexpected() + } + this.semicolon() + return this.finishNode(node, "ImportDeclaration") +} + +// Parses a comma-separated list of module imports. + +pp$1.parseImportSpecifiers = function() { + var this$1 = this; + + var nodes = [], first = true + if (this.type === tt.name) { + // import defaultObj, { x, y as z } from '...' + var node = this.startNode() + node.local = this.parseIdent() + this.checkLVal(node.local, true) + nodes.push(this.finishNode(node, "ImportDefaultSpecifier")) + if (!this.eat(tt.comma)) return nodes + } + if (this.type === tt.star) { + var node$1 = this.startNode() + this.next() + this.expectContextual("as") + node$1.local = this.parseIdent() + this.checkLVal(node$1.local, true) + nodes.push(this.finishNode(node$1, "ImportNamespaceSpecifier")) + return nodes + } + this.expect(tt.braceL) + while (!this.eat(tt.braceR)) { + if (!first) { + this$1.expect(tt.comma) + if (this$1.afterTrailingComma(tt.braceR)) break + } else first = false + + var node$2 = this$1.startNode() + node$2.imported = this$1.parseIdent(true) + if (this$1.eatContextual("as")) { + node$2.local = this$1.parseIdent() + } else { + node$2.local = node$2.imported + if (this$1.isKeyword(node$2.local.name)) this$1.unexpected(node$2.local.start) + if (this$1.reservedWordsStrict.test(node$2.local.name)) this$1.raise(node$2.local.start, "The keyword '" + node$2.local.name + "' is reserved") + } + this$1.checkLVal(node$2.local, true) + nodes.push(this$1.finishNode(node$2, "ImportSpecifier")) + } + return nodes +} + +var pp$2 = Parser.prototype + +// Convert existing expression atom to assignable pattern +// if possible. + +pp$2.toAssignable = function(node, isBinding) { + var this$1 = this; + + if (this.options.ecmaVersion >= 6 && node) { + switch (node.type) { + case "Identifier": + case "ObjectPattern": + case "ArrayPattern": + break + + case "ObjectExpression": + node.type = "ObjectPattern" + for (var i = 0; i < node.properties.length; i++) { + var prop = node.properties[i] + if (prop.kind !== "init") this$1.raise(prop.key.start, "Object pattern can't contain getter or setter") + this$1.toAssignable(prop.value, isBinding) + } + break + + case "ArrayExpression": + node.type = "ArrayPattern" + this.toAssignableList(node.elements, isBinding) + break + + case "AssignmentExpression": + if (node.operator === "=") { + node.type = "AssignmentPattern" + delete node.operator + // falls through to AssignmentPattern + } else { + this.raise(node.left.end, "Only '=' operator can be used for specifying default value.") + break + } + + case "AssignmentPattern": + if (node.right.type === "YieldExpression") + this.raise(node.right.start, "Yield expression cannot be a default value") + break + + case "ParenthesizedExpression": + node.expression = this.toAssignable(node.expression, isBinding) + break + + case "MemberExpression": + if (!isBinding) break + + default: + this.raise(node.start, "Assigning to rvalue") + } + } + return node +} + +// Convert list of expression atoms to binding list. + +pp$2.toAssignableList = function(exprList, isBinding) { + var this$1 = this; + + var end = exprList.length + if (end) { + var last = exprList[end - 1] + if (last && last.type == "RestElement") { + --end + } else if (last && last.type == "SpreadElement") { + last.type = "RestElement" + var arg = last.argument + this.toAssignable(arg, isBinding) + if (arg.type !== "Identifier" && arg.type !== "MemberExpression" && arg.type !== "ArrayPattern") + this.unexpected(arg.start) + --end + } + + if (isBinding && last && last.type === "RestElement" && last.argument.type !== "Identifier") + this.unexpected(last.argument.start) + } + for (var i = 0; i < end; i++) { + var elt = exprList[i] + if (elt) this$1.toAssignable(elt, isBinding) + } + return exprList +} + +// Parses spread element. + +pp$2.parseSpread = function(refDestructuringErrors) { + var node = this.startNode() + this.next() + node.argument = this.parseMaybeAssign(false, refDestructuringErrors) + return this.finishNode(node, "SpreadElement") +} + +pp$2.parseRest = function(allowNonIdent) { + var node = this.startNode() + this.next() + + // RestElement inside of a function parameter must be an identifier + if (allowNonIdent) node.argument = this.type === tt.name ? this.parseIdent() : this.unexpected() + else node.argument = this.type === tt.name || this.type === tt.bracketL ? this.parseBindingAtom() : this.unexpected() + + return this.finishNode(node, "RestElement") +} + +// Parses lvalue (assignable) atom. + +pp$2.parseBindingAtom = function() { + if (this.options.ecmaVersion < 6) return this.parseIdent() + switch (this.type) { + case tt.name: + return this.parseIdent() + + case tt.bracketL: + var node = this.startNode() + this.next() + node.elements = this.parseBindingList(tt.bracketR, true, true) + return this.finishNode(node, "ArrayPattern") + + case tt.braceL: + return this.parseObj(true) + + default: + this.unexpected() + } +} + +pp$2.parseBindingList = function(close, allowEmpty, allowTrailingComma, allowNonIdent) { + var this$1 = this; + + var elts = [], first = true + while (!this.eat(close)) { + if (first) first = false + else this$1.expect(tt.comma) + if (allowEmpty && this$1.type === tt.comma) { + elts.push(null) + } else if (allowTrailingComma && this$1.afterTrailingComma(close)) { + break + } else if (this$1.type === tt.ellipsis) { + var rest = this$1.parseRest(allowNonIdent) + this$1.parseBindingListItem(rest) + elts.push(rest) + if (this$1.type === tt.comma) this$1.raise(this$1.start, "Comma is not permitted after the rest element") + this$1.expect(close) + break + } else { + var elem = this$1.parseMaybeDefault(this$1.start, this$1.startLoc) + this$1.parseBindingListItem(elem) + elts.push(elem) + } + } + return elts +} + +pp$2.parseBindingListItem = function(param) { + return param +} + +// Parses assignment pattern around given atom if possible. + +pp$2.parseMaybeDefault = function(startPos, startLoc, left) { + left = left || this.parseBindingAtom() + if (this.options.ecmaVersion < 6 || !this.eat(tt.eq)) return left + var node = this.startNodeAt(startPos, startLoc) + node.left = left + node.right = this.parseMaybeAssign() + return this.finishNode(node, "AssignmentPattern") +} + +// Verify that a node is an lval — something that can be assigned +// to. + +pp$2.checkLVal = function(expr, isBinding, checkClashes) { + var this$1 = this; + + switch (expr.type) { + case "Identifier": + if (this.strict && this.reservedWordsStrictBind.test(expr.name)) + this.raiseRecoverable(expr.start, (isBinding ? "Binding " : "Assigning to ") + expr.name + " in strict mode") + if (checkClashes) { + if (has(checkClashes, expr.name)) + this.raiseRecoverable(expr.start, "Argument name clash") + checkClashes[expr.name] = true + } + break + + case "MemberExpression": + if (isBinding) this.raiseRecoverable(expr.start, (isBinding ? "Binding" : "Assigning to") + " member expression") + break + + case "ObjectPattern": + for (var i = 0; i < expr.properties.length; i++) + this$1.checkLVal(expr.properties[i].value, isBinding, checkClashes) + break + + case "ArrayPattern": + for (var i$1 = 0; i$1 < expr.elements.length; i$1++) { + var elem = expr.elements[i$1] + if (elem) this$1.checkLVal(elem, isBinding, checkClashes) + } + break + + case "AssignmentPattern": + this.checkLVal(expr.left, isBinding, checkClashes) + break + + case "RestElement": + this.checkLVal(expr.argument, isBinding, checkClashes) + break + + case "ParenthesizedExpression": + this.checkLVal(expr.expression, isBinding, checkClashes) + break + + default: + this.raise(expr.start, (isBinding ? "Binding" : "Assigning to") + " rvalue") + } +} + +var pp$3 = Parser.prototype + +// Check if property name clashes with already added. +// Object/class getters and setters are not allowed to clash — +// either with each other or with an init property — and in +// strict mode, init properties are also not allowed to be repeated. + +pp$3.checkPropClash = function(prop, propHash) { + if (this.options.ecmaVersion >= 6 && (prop.computed || prop.method || prop.shorthand)) + return + var key = prop.key; + var name + switch (key.type) { + case "Identifier": name = key.name; break + case "Literal": name = String(key.value); break + default: return + } + var kind = prop.kind; + if (this.options.ecmaVersion >= 6) { + if (name === "__proto__" && kind === "init") { + if (propHash.proto) this.raiseRecoverable(key.start, "Redefinition of __proto__ property") + propHash.proto = true + } + return + } + name = "$" + name + var other = propHash[name] + if (other) { + var isGetSet = kind !== "init" + if ((this.strict || isGetSet) && other[kind] || !(isGetSet ^ other.init)) + this.raiseRecoverable(key.start, "Redefinition of property") + } else { + other = propHash[name] = { + init: false, + get: false, + set: false + } + } + other[kind] = true +} + +// ### Expression parsing + +// These nest, from the most general expression type at the top to +// 'atomic', nondivisible expression types at the bottom. Most of +// the functions will simply let the function(s) below them parse, +// and, *if* the syntactic construct they handle is present, wrap +// the AST node that the inner parser gave them in another node. + +// Parse a full expression. The optional arguments are used to +// forbid the `in` operator (in for loops initalization expressions) +// and provide reference for storing '=' operator inside shorthand +// property assignment in contexts where both object expression +// and object pattern might appear (so it's possible to raise +// delayed syntax error at correct position). + +pp$3.parseExpression = function(noIn, refDestructuringErrors) { + var this$1 = this; + + var startPos = this.start, startLoc = this.startLoc + var expr = this.parseMaybeAssign(noIn, refDestructuringErrors) + if (this.type === tt.comma) { + var node = this.startNodeAt(startPos, startLoc) + node.expressions = [expr] + while (this.eat(tt.comma)) node.expressions.push(this$1.parseMaybeAssign(noIn, refDestructuringErrors)) + return this.finishNode(node, "SequenceExpression") + } + return expr +} + +// Parse an assignment expression. This includes applications of +// operators like `+=`. + +pp$3.parseMaybeAssign = function(noIn, refDestructuringErrors, afterLeftParse) { + if (this.inGenerator && this.isContextual("yield")) return this.parseYield() + + var ownDestructuringErrors = false + if (!refDestructuringErrors) { + refDestructuringErrors = new DestructuringErrors + ownDestructuringErrors = true + } + var startPos = this.start, startLoc = this.startLoc + if (this.type == tt.parenL || this.type == tt.name) + this.potentialArrowAt = this.start + var left = this.parseMaybeConditional(noIn, refDestructuringErrors) + if (afterLeftParse) left = afterLeftParse.call(this, left, startPos, startLoc) + if (this.type.isAssign) { + this.checkPatternErrors(refDestructuringErrors, true) + if (!ownDestructuringErrors) DestructuringErrors.call(refDestructuringErrors) + var node = this.startNodeAt(startPos, startLoc) + node.operator = this.value + node.left = this.type === tt.eq ? this.toAssignable(left) : left + refDestructuringErrors.shorthandAssign = 0 // reset because shorthand default was used correctly + this.checkLVal(left) + this.next() + node.right = this.parseMaybeAssign(noIn) + return this.finishNode(node, "AssignmentExpression") + } else { + if (ownDestructuringErrors) this.checkExpressionErrors(refDestructuringErrors, true) + } + return left +} + +// Parse a ternary conditional (`?:`) operator. + +pp$3.parseMaybeConditional = function(noIn, refDestructuringErrors) { + var startPos = this.start, startLoc = this.startLoc + var expr = this.parseExprOps(noIn, refDestructuringErrors) + if (this.checkExpressionErrors(refDestructuringErrors)) return expr + if (this.eat(tt.question)) { + var node = this.startNodeAt(startPos, startLoc) + node.test = expr + node.consequent = this.parseMaybeAssign() + this.expect(tt.colon) + node.alternate = this.parseMaybeAssign(noIn) + return this.finishNode(node, "ConditionalExpression") + } + return expr +} + +// Start the precedence parser. + +pp$3.parseExprOps = function(noIn, refDestructuringErrors) { + var startPos = this.start, startLoc = this.startLoc + var expr = this.parseMaybeUnary(refDestructuringErrors, false) + if (this.checkExpressionErrors(refDestructuringErrors)) return expr + return this.parseExprOp(expr, startPos, startLoc, -1, noIn) +} + +// Parse binary operators with the operator precedence parsing +// algorithm. `left` is the left-hand side of the operator. +// `minPrec` provides context that allows the function to stop and +// defer further parser to one of its callers when it encounters an +// operator that has a lower precedence than the set it is parsing. + +pp$3.parseExprOp = function(left, leftStartPos, leftStartLoc, minPrec, noIn) { + var prec = this.type.binop + if (prec != null && (!noIn || this.type !== tt._in)) { + if (prec > minPrec) { + var logical = this.type === tt.logicalOR || this.type === tt.logicalAND + var op = this.value + this.next() + var startPos = this.start, startLoc = this.startLoc + var right = this.parseExprOp(this.parseMaybeUnary(null, false), startPos, startLoc, prec, noIn) + var node = this.buildBinary(leftStartPos, leftStartLoc, left, right, op, logical) + return this.parseExprOp(node, leftStartPos, leftStartLoc, minPrec, noIn) + } + } + return left +} + +pp$3.buildBinary = function(startPos, startLoc, left, right, op, logical) { + var node = this.startNodeAt(startPos, startLoc) + node.left = left + node.operator = op + node.right = right + return this.finishNode(node, logical ? "LogicalExpression" : "BinaryExpression") +} + +// Parse unary operators, both prefix and postfix. + +pp$3.parseMaybeUnary = function(refDestructuringErrors, sawUnary) { + var this$1 = this; + + var startPos = this.start, startLoc = this.startLoc, expr + if (this.type.prefix) { + var node = this.startNode(), update = this.type === tt.incDec + node.operator = this.value + node.prefix = true + this.next() + node.argument = this.parseMaybeUnary(null, true) + this.checkExpressionErrors(refDestructuringErrors, true) + if (update) this.checkLVal(node.argument) + else if (this.strict && node.operator === "delete" && + node.argument.type === "Identifier") + this.raiseRecoverable(node.start, "Deleting local variable in strict mode") + else sawUnary = true + expr = this.finishNode(node, update ? "UpdateExpression" : "UnaryExpression") + } else { + expr = this.parseExprSubscripts(refDestructuringErrors) + if (this.checkExpressionErrors(refDestructuringErrors)) return expr + while (this.type.postfix && !this.canInsertSemicolon()) { + var node$1 = this$1.startNodeAt(startPos, startLoc) + node$1.operator = this$1.value + node$1.prefix = false + node$1.argument = expr + this$1.checkLVal(expr) + this$1.next() + expr = this$1.finishNode(node$1, "UpdateExpression") + } + } + + if (!sawUnary && this.eat(tt.starstar)) + return this.buildBinary(startPos, startLoc, expr, this.parseMaybeUnary(null, false), "**", false) + else + return expr +} + +// Parse call, dot, and `[]`-subscript expressions. + +pp$3.parseExprSubscripts = function(refDestructuringErrors) { + var startPos = this.start, startLoc = this.startLoc + var expr = this.parseExprAtom(refDestructuringErrors) + var skipArrowSubscripts = expr.type === "ArrowFunctionExpression" && this.input.slice(this.lastTokStart, this.lastTokEnd) !== ")" + if (this.checkExpressionErrors(refDestructuringErrors) || skipArrowSubscripts) return expr + return this.parseSubscripts(expr, startPos, startLoc) +} + +pp$3.parseSubscripts = function(base, startPos, startLoc, noCalls) { + var this$1 = this; + + for (;;) { + if (this$1.eat(tt.dot)) { + var node = this$1.startNodeAt(startPos, startLoc) + node.object = base + node.property = this$1.parseIdent(true) + node.computed = false + base = this$1.finishNode(node, "MemberExpression") + } else if (this$1.eat(tt.bracketL)) { + var node$1 = this$1.startNodeAt(startPos, startLoc) + node$1.object = base + node$1.property = this$1.parseExpression() + node$1.computed = true + this$1.expect(tt.bracketR) + base = this$1.finishNode(node$1, "MemberExpression") + } else if (!noCalls && this$1.eat(tt.parenL)) { + var node$2 = this$1.startNodeAt(startPos, startLoc) + node$2.callee = base + node$2.arguments = this$1.parseExprList(tt.parenR, false) + base = this$1.finishNode(node$2, "CallExpression") + } else if (this$1.type === tt.backQuote) { + var node$3 = this$1.startNodeAt(startPos, startLoc) + node$3.tag = base + node$3.quasi = this$1.parseTemplate() + base = this$1.finishNode(node$3, "TaggedTemplateExpression") + } else { + return base + } + } +} + +// Parse an atomic expression — either a single token that is an +// expression, an expression started by a keyword like `function` or +// `new`, or an expression wrapped in punctuation like `()`, `[]`, +// or `{}`. + +pp$3.parseExprAtom = function(refDestructuringErrors) { + var node, canBeArrow = this.potentialArrowAt == this.start + switch (this.type) { + case tt._super: + if (!this.inFunction) + this.raise(this.start, "'super' outside of function or class") + + case tt._this: + var type = this.type === tt._this ? "ThisExpression" : "Super" + node = this.startNode() + this.next() + return this.finishNode(node, type) + + case tt.name: + var startPos = this.start, startLoc = this.startLoc + var id = this.parseIdent(this.type !== tt.name) + if (canBeArrow && !this.canInsertSemicolon() && this.eat(tt.arrow)) + return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), [id]) + return id + + case tt.regexp: + var value = this.value + node = this.parseLiteral(value.value) + node.regex = {pattern: value.pattern, flags: value.flags} + return node + + case tt.num: case tt.string: + return this.parseLiteral(this.value) + + case tt._null: case tt._true: case tt._false: + node = this.startNode() + node.value = this.type === tt._null ? null : this.type === tt._true + node.raw = this.type.keyword + this.next() + return this.finishNode(node, "Literal") + + case tt.parenL: + return this.parseParenAndDistinguishExpression(canBeArrow) + + case tt.bracketL: + node = this.startNode() + this.next() + node.elements = this.parseExprList(tt.bracketR, true, true, refDestructuringErrors) + return this.finishNode(node, "ArrayExpression") + + case tt.braceL: + return this.parseObj(false, refDestructuringErrors) + + case tt._function: + node = this.startNode() + this.next() + return this.parseFunction(node, false) + + case tt._class: + return this.parseClass(this.startNode(), false) + + case tt._new: + return this.parseNew() + + case tt.backQuote: + return this.parseTemplate() + + default: + this.unexpected() + } +} + +pp$3.parseLiteral = function(value) { + var node = this.startNode() + node.value = value + node.raw = this.input.slice(this.start, this.end) + this.next() + return this.finishNode(node, "Literal") +} + +pp$3.parseParenExpression = function() { + this.expect(tt.parenL) + var val = this.parseExpression() + this.expect(tt.parenR) + return val +} + +pp$3.parseParenAndDistinguishExpression = function(canBeArrow) { + var this$1 = this; + + var startPos = this.start, startLoc = this.startLoc, val + if (this.options.ecmaVersion >= 6) { + this.next() + + var innerStartPos = this.start, innerStartLoc = this.startLoc + var exprList = [], first = true + var refDestructuringErrors = new DestructuringErrors, spreadStart, innerParenStart + while (this.type !== tt.parenR) { + first ? first = false : this$1.expect(tt.comma) + if (this$1.type === tt.ellipsis) { + spreadStart = this$1.start + exprList.push(this$1.parseParenItem(this$1.parseRest())) + break + } else { + if (this$1.type === tt.parenL && !innerParenStart) { + innerParenStart = this$1.start + } + exprList.push(this$1.parseMaybeAssign(false, refDestructuringErrors, this$1.parseParenItem)) + } + } + var innerEndPos = this.start, innerEndLoc = this.startLoc + this.expect(tt.parenR) + + if (canBeArrow && !this.canInsertSemicolon() && this.eat(tt.arrow)) { + this.checkPatternErrors(refDestructuringErrors, true) + if (innerParenStart) this.unexpected(innerParenStart) + return this.parseParenArrowList(startPos, startLoc, exprList) + } + + if (!exprList.length) this.unexpected(this.lastTokStart) + if (spreadStart) this.unexpected(spreadStart) + this.checkExpressionErrors(refDestructuringErrors, true) + + if (exprList.length > 1) { + val = this.startNodeAt(innerStartPos, innerStartLoc) + val.expressions = exprList + this.finishNodeAt(val, "SequenceExpression", innerEndPos, innerEndLoc) + } else { + val = exprList[0] + } + } else { + val = this.parseParenExpression() + } + + if (this.options.preserveParens) { + var par = this.startNodeAt(startPos, startLoc) + par.expression = val + return this.finishNode(par, "ParenthesizedExpression") + } else { + return val + } +} + +pp$3.parseParenItem = function(item) { + return item +} + +pp$3.parseParenArrowList = function(startPos, startLoc, exprList) { + return this.parseArrowExpression(this.startNodeAt(startPos, startLoc), exprList) +} + +// New's precedence is slightly tricky. It must allow its argument to +// be a `[]` or dot subscript expression, but not a call — at least, +// not without wrapping it in parentheses. Thus, it uses the noCalls +// argument to parseSubscripts to prevent it from consuming the +// argument list. + +var empty$1 = [] + +pp$3.parseNew = function() { + var node = this.startNode() + var meta = this.parseIdent(true) + if (this.options.ecmaVersion >= 6 && this.eat(tt.dot)) { + node.meta = meta + node.property = this.parseIdent(true) + if (node.property.name !== "target") + this.raiseRecoverable(node.property.start, "The only valid meta property for new is new.target") + if (!this.inFunction) + this.raiseRecoverable(node.start, "new.target can only be used in functions") + return this.finishNode(node, "MetaProperty") + } + var startPos = this.start, startLoc = this.startLoc + node.callee = this.parseSubscripts(this.parseExprAtom(), startPos, startLoc, true) + if (this.eat(tt.parenL)) node.arguments = this.parseExprList(tt.parenR, false) + else node.arguments = empty$1 + return this.finishNode(node, "NewExpression") +} + +// Parse template expression. + +pp$3.parseTemplateElement = function() { + var elem = this.startNode() + elem.value = { + raw: this.input.slice(this.start, this.end).replace(/\r\n?/g, '\n'), + cooked: this.value + } + this.next() + elem.tail = this.type === tt.backQuote + return this.finishNode(elem, "TemplateElement") +} + +pp$3.parseTemplate = function() { + var this$1 = this; + + var node = this.startNode() + this.next() + node.expressions = [] + var curElt = this.parseTemplateElement() + node.quasis = [curElt] + while (!curElt.tail) { + this$1.expect(tt.dollarBraceL) + node.expressions.push(this$1.parseExpression()) + this$1.expect(tt.braceR) + node.quasis.push(curElt = this$1.parseTemplateElement()) + } + this.next() + return this.finishNode(node, "TemplateLiteral") +} + +// Parse an object literal or binding pattern. + +pp$3.parseObj = function(isPattern, refDestructuringErrors) { + var this$1 = this; + + var node = this.startNode(), first = true, propHash = {} + node.properties = [] + this.next() + while (!this.eat(tt.braceR)) { + if (!first) { + this$1.expect(tt.comma) + if (this$1.afterTrailingComma(tt.braceR)) break + } else first = false + + var prop = this$1.startNode(), isGenerator, startPos, startLoc + if (this$1.options.ecmaVersion >= 6) { + prop.method = false + prop.shorthand = false + if (isPattern || refDestructuringErrors) { + startPos = this$1.start + startLoc = this$1.startLoc + } + if (!isPattern) + isGenerator = this$1.eat(tt.star) + } + this$1.parsePropertyName(prop) + this$1.parsePropertyValue(prop, isPattern, isGenerator, startPos, startLoc, refDestructuringErrors) + this$1.checkPropClash(prop, propHash) + node.properties.push(this$1.finishNode(prop, "Property")) + } + return this.finishNode(node, isPattern ? "ObjectPattern" : "ObjectExpression") +} + +pp$3.parsePropertyValue = function(prop, isPattern, isGenerator, startPos, startLoc, refDestructuringErrors) { + if (this.eat(tt.colon)) { + prop.value = isPattern ? this.parseMaybeDefault(this.start, this.startLoc) : this.parseMaybeAssign(false, refDestructuringErrors) + prop.kind = "init" + } else if (this.options.ecmaVersion >= 6 && this.type === tt.parenL) { + if (isPattern) this.unexpected() + prop.kind = "init" + prop.method = true + prop.value = this.parseMethod(isGenerator) + } else if (this.options.ecmaVersion >= 5 && !prop.computed && prop.key.type === "Identifier" && + (prop.key.name === "get" || prop.key.name === "set") && + (this.type != tt.comma && this.type != tt.braceR)) { + if (isGenerator || isPattern) this.unexpected() + prop.kind = prop.key.name + this.parsePropertyName(prop) + prop.value = this.parseMethod(false) + var paramCount = prop.kind === "get" ? 0 : 1 + if (prop.value.params.length !== paramCount) { + var start = prop.value.start + if (prop.kind === "get") + this.raiseRecoverable(start, "getter should have no params") + else + this.raiseRecoverable(start, "setter should have exactly one param") + } + if (prop.kind === "set" && prop.value.params[0].type === "RestElement") + this.raiseRecoverable(prop.value.params[0].start, "Setter cannot use rest params") + } else if (this.options.ecmaVersion >= 6 && !prop.computed && prop.key.type === "Identifier") { + if (this.keywords.test(prop.key.name) || + (this.strict ? this.reservedWordsStrictBind : this.reservedWords).test(prop.key.name) || + (this.inGenerator && prop.key.name == "yield")) + this.raiseRecoverable(prop.key.start, "'" + prop.key.name + "' can not be used as shorthand property") + prop.kind = "init" + if (isPattern) { + prop.value = this.parseMaybeDefault(startPos, startLoc, prop.key) + } else if (this.type === tt.eq && refDestructuringErrors) { + if (!refDestructuringErrors.shorthandAssign) + refDestructuringErrors.shorthandAssign = this.start + prop.value = this.parseMaybeDefault(startPos, startLoc, prop.key) + } else { + prop.value = prop.key + } + prop.shorthand = true + } else this.unexpected() +} + +pp$3.parsePropertyName = function(prop) { + if (this.options.ecmaVersion >= 6) { + if (this.eat(tt.bracketL)) { + prop.computed = true + prop.key = this.parseMaybeAssign() + this.expect(tt.bracketR) + return prop.key + } else { + prop.computed = false + } + } + return prop.key = this.type === tt.num || this.type === tt.string ? this.parseExprAtom() : this.parseIdent(true) +} + +// Initialize empty function node. + +pp$3.initFunction = function(node) { + node.id = null + if (this.options.ecmaVersion >= 6) { + node.generator = false + node.expression = false + } +} + +// Parse object or class method. + +pp$3.parseMethod = function(isGenerator) { + var node = this.startNode(), oldInGen = this.inGenerator + this.inGenerator = isGenerator + this.initFunction(node) + this.expect(tt.parenL) + node.params = this.parseBindingList(tt.parenR, false, false) + if (this.options.ecmaVersion >= 6) + node.generator = isGenerator + this.parseFunctionBody(node, false) + this.inGenerator = oldInGen + return this.finishNode(node, "FunctionExpression") +} + +// Parse arrow function expression with given parameters. + +pp$3.parseArrowExpression = function(node, params) { + var oldInGen = this.inGenerator + this.inGenerator = false + this.initFunction(node) + node.params = this.toAssignableList(params, true) + this.parseFunctionBody(node, true) + this.inGenerator = oldInGen + return this.finishNode(node, "ArrowFunctionExpression") +} + +// Parse function body and check parameters. + +pp$3.parseFunctionBody = function(node, isArrowFunction) { + var isExpression = isArrowFunction && this.type !== tt.braceL + + if (isExpression) { + node.body = this.parseMaybeAssign() + node.expression = true + } else { + // Start a new scope with regard to labels and the `inFunction` + // flag (restore them to their old value afterwards). + var oldInFunc = this.inFunction, oldLabels = this.labels + this.inFunction = true; this.labels = [] + node.body = this.parseBlock(true) + node.expression = false + this.inFunction = oldInFunc; this.labels = oldLabels + } + + // If this is a strict mode function, verify that argument names + // are not repeated, and it does not try to bind the words `eval` + // or `arguments`. + var useStrict = (!isExpression && node.body.body.length && this.isUseStrict(node.body.body[0])) ? node.body.body[0] : null; + if (this.strict || useStrict) { + var oldStrict = this.strict + this.strict = true + if (node.id) + this.checkLVal(node.id, true) + this.checkParams(node, useStrict) + this.strict = oldStrict + } else if (isArrowFunction) { + this.checkParams(node, useStrict) + } +} + +// Checks function params for various disallowed patterns such as using "eval" +// or "arguments" and duplicate parameters. + +pp$3.checkParams = function(node, useStrict) { + var this$1 = this; + + var nameHash = {} + for (var i = 0; i < node.params.length; i++) { + if (useStrict && this$1.options.ecmaVersion >= 7 && node.params[i].type !== "Identifier") + this$1.raiseRecoverable(useStrict.start, "Illegal 'use strict' directive in function with non-simple parameter list"); + this$1.checkLVal(node.params[i], true, nameHash) + } +} + +// Parses a comma-separated list of expressions, and returns them as +// an array. `close` is the token type that ends the list, and +// `allowEmpty` can be turned on to allow subsequent commas with +// nothing in between them to be parsed as `null` (which is needed +// for array literals). + +pp$3.parseExprList = function(close, allowTrailingComma, allowEmpty, refDestructuringErrors) { + var this$1 = this; + + var elts = [], first = true + while (!this.eat(close)) { + if (!first) { + this$1.expect(tt.comma) + if (allowTrailingComma && this$1.afterTrailingComma(close)) break + } else first = false + + var elt + if (allowEmpty && this$1.type === tt.comma) + elt = null + else if (this$1.type === tt.ellipsis) { + elt = this$1.parseSpread(refDestructuringErrors) + if (this$1.type === tt.comma && refDestructuringErrors && !refDestructuringErrors.trailingComma) { + refDestructuringErrors.trailingComma = this$1.lastTokStart + } + } else + elt = this$1.parseMaybeAssign(false, refDestructuringErrors) + elts.push(elt) + } + return elts +} + +// Parse the next token as an identifier. If `liberal` is true (used +// when parsing properties), it will also convert keywords into +// identifiers. + +pp$3.parseIdent = function(liberal) { + var node = this.startNode() + if (liberal && this.options.allowReserved == "never") liberal = false + if (this.type === tt.name) { + if (!liberal && (this.strict ? this.reservedWordsStrict : this.reservedWords).test(this.value) && + (this.options.ecmaVersion >= 6 || + this.input.slice(this.start, this.end).indexOf("\\") == -1)) + this.raiseRecoverable(this.start, "The keyword '" + this.value + "' is reserved") + if (!liberal && this.inGenerator && this.value === "yield") + this.raiseRecoverable(this.start, "Can not use 'yield' as identifier inside a generator") + node.name = this.value + } else if (liberal && this.type.keyword) { + node.name = this.type.keyword + } else { + this.unexpected() + } + this.next() + return this.finishNode(node, "Identifier") +} + +// Parses yield expression inside generator. + +pp$3.parseYield = function() { + var node = this.startNode() + this.next() + if (this.type == tt.semi || this.canInsertSemicolon() || (this.type != tt.star && !this.type.startsExpr)) { + node.delegate = false + node.argument = null + } else { + node.delegate = this.eat(tt.star) + node.argument = this.parseMaybeAssign() + } + return this.finishNode(node, "YieldExpression") +} + +var pp$4 = Parser.prototype + +// This function is used to raise exceptions on parse errors. It +// takes an offset integer (into the current `input`) to indicate +// the location of the error, attaches the position to the end +// of the error message, and then raises a `SyntaxError` with that +// message. + +pp$4.raise = function(pos, message) { + var loc = getLineInfo(this.input, pos) + message += " (" + loc.line + ":" + loc.column + ")" + var err = new SyntaxError(message) + err.pos = pos; err.loc = loc; err.raisedAt = this.pos + throw err +} + +pp$4.raiseRecoverable = pp$4.raise + +pp$4.curPosition = function() { + if (this.options.locations) { + return new Position(this.curLine, this.pos - this.lineStart) + } +} + +var Node = function Node(parser, pos, loc) { + this.type = "" + this.start = pos + this.end = 0 + if (parser.options.locations) + this.loc = new SourceLocation(parser, loc) + if (parser.options.directSourceFile) + this.sourceFile = parser.options.directSourceFile + if (parser.options.ranges) + this.range = [pos, 0] +}; + +// Start an AST node, attaching a start offset. + +var pp$5 = Parser.prototype + +pp$5.startNode = function() { + return new Node(this, this.start, this.startLoc) +} + +pp$5.startNodeAt = function(pos, loc) { + return new Node(this, pos, loc) +} + +// Finish an AST node, adding `type` and `end` properties. + +function finishNodeAt(node, type, pos, loc) { + node.type = type + node.end = pos + if (this.options.locations) + node.loc.end = loc + if (this.options.ranges) + node.range[1] = pos + return node +} + +pp$5.finishNode = function(node, type) { + return finishNodeAt.call(this, node, type, this.lastTokEnd, this.lastTokEndLoc) +} + +// Finish node at given position + +pp$5.finishNodeAt = function(node, type, pos, loc) { + return finishNodeAt.call(this, node, type, pos, loc) +} + +var TokContext = function TokContext(token, isExpr, preserveSpace, override) { + this.token = token + this.isExpr = !!isExpr + this.preserveSpace = !!preserveSpace + this.override = override +}; + +var types = { + b_stat: new TokContext("{", false), + b_expr: new TokContext("{", true), + b_tmpl: new TokContext("${", true), + p_stat: new TokContext("(", false), + p_expr: new TokContext("(", true), + q_tmpl: new TokContext("`", true, true, function (p) { return p.readTmplToken(); }), + f_expr: new TokContext("function", true) +} + +var pp$6 = Parser.prototype + +pp$6.initialContext = function() { + return [types.b_stat] +} + +pp$6.braceIsBlock = function(prevType) { + if (prevType === tt.colon) { + var parent = this.curContext() + if (parent === types.b_stat || parent === types.b_expr) + return !parent.isExpr + } + if (prevType === tt._return) + return lineBreak.test(this.input.slice(this.lastTokEnd, this.start)) + if (prevType === tt._else || prevType === tt.semi || prevType === tt.eof || prevType === tt.parenR) + return true + if (prevType == tt.braceL) + return this.curContext() === types.b_stat + return !this.exprAllowed +} + +pp$6.updateContext = function(prevType) { + var update, type = this.type + if (type.keyword && prevType == tt.dot) + this.exprAllowed = false + else if (update = type.updateContext) + update.call(this, prevType) + else + this.exprAllowed = type.beforeExpr +} + +// Token-specific context update code + +tt.parenR.updateContext = tt.braceR.updateContext = function() { + if (this.context.length == 1) { + this.exprAllowed = true + return + } + var out = this.context.pop() + if (out === types.b_stat && this.curContext() === types.f_expr) { + this.context.pop() + this.exprAllowed = false + } else if (out === types.b_tmpl) { + this.exprAllowed = true + } else { + this.exprAllowed = !out.isExpr + } +} + +tt.braceL.updateContext = function(prevType) { + this.context.push(this.braceIsBlock(prevType) ? types.b_stat : types.b_expr) + this.exprAllowed = true +} + +tt.dollarBraceL.updateContext = function() { + this.context.push(types.b_tmpl) + this.exprAllowed = true +} + +tt.parenL.updateContext = function(prevType) { + var statementParens = prevType === tt._if || prevType === tt._for || prevType === tt._with || prevType === tt._while + this.context.push(statementParens ? types.p_stat : types.p_expr) + this.exprAllowed = true +} + +tt.incDec.updateContext = function() { + // tokExprAllowed stays unchanged +} + +tt._function.updateContext = function(prevType) { + if (prevType.beforeExpr && prevType !== tt.semi && prevType !== tt._else && + !((prevType === tt.colon || prevType === tt.braceL) && this.curContext() === types.b_stat)) + this.context.push(types.f_expr) + this.exprAllowed = false +} + +tt.backQuote.updateContext = function() { + if (this.curContext() === types.q_tmpl) + this.context.pop() + else + this.context.push(types.q_tmpl) + this.exprAllowed = false +} + +// Object type used to represent tokens. Note that normally, tokens +// simply exist as properties on the parser object. This is only +// used for the onToken callback and the external tokenizer. + +var Token = function Token(p) { + this.type = p.type + this.value = p.value + this.start = p.start + this.end = p.end + if (p.options.locations) + this.loc = new SourceLocation(p, p.startLoc, p.endLoc) + if (p.options.ranges) + this.range = [p.start, p.end] +}; + +// ## Tokenizer + +var pp$7 = Parser.prototype + +// Are we running under Rhino? +var isRhino = typeof Packages == "object" && Object.prototype.toString.call(Packages) == "[object JavaPackage]" + +// Move to the next token + +pp$7.next = function() { + if (this.options.onToken) + this.options.onToken(new Token(this)) + + this.lastTokEnd = this.end + this.lastTokStart = this.start + this.lastTokEndLoc = this.endLoc + this.lastTokStartLoc = this.startLoc + this.nextToken() +} + +pp$7.getToken = function() { + this.next() + return new Token(this) +} + +// If we're in an ES6 environment, make parsers iterable +if (typeof Symbol !== "undefined") + pp$7[Symbol.iterator] = function () { + var self = this + return {next: function () { + var token = self.getToken() + return { + done: token.type === tt.eof, + value: token + } + }} + } + +// Toggle strict mode. Re-reads the next number or string to please +// pedantic tests (`"use strict"; 010;` should fail). + +pp$7.setStrict = function(strict) { + var this$1 = this; + + this.strict = strict + if (this.type !== tt.num && this.type !== tt.string) return + this.pos = this.start + if (this.options.locations) { + while (this.pos < this.lineStart) { + this$1.lineStart = this$1.input.lastIndexOf("\n", this$1.lineStart - 2) + 1 + --this$1.curLine + } + } + this.nextToken() +} + +pp$7.curContext = function() { + return this.context[this.context.length - 1] +} + +// Read a single token, updating the parser object's token-related +// properties. + +pp$7.nextToken = function() { + var curContext = this.curContext() + if (!curContext || !curContext.preserveSpace) this.skipSpace() + + this.start = this.pos + if (this.options.locations) this.startLoc = this.curPosition() + if (this.pos >= this.input.length) return this.finishToken(tt.eof) + + if (curContext.override) return curContext.override(this) + else this.readToken(this.fullCharCodeAtPos()) +} + +pp$7.readToken = function(code) { + // Identifier or keyword. '\uXXXX' sequences are allowed in + // identifiers, so '\' also dispatches to that. + if (isIdentifierStart(code, this.options.ecmaVersion >= 6) || code === 92 /* '\' */) + return this.readWord() + + return this.getTokenFromCode(code) +} + +pp$7.fullCharCodeAtPos = function() { + var code = this.input.charCodeAt(this.pos) + if (code <= 0xd7ff || code >= 0xe000) return code + var next = this.input.charCodeAt(this.pos + 1) + return (code << 10) + next - 0x35fdc00 +} + +pp$7.skipBlockComment = function() { + var this$1 = this; + + var startLoc = this.options.onComment && this.curPosition() + var start = this.pos, end = this.input.indexOf("*/", this.pos += 2) + if (end === -1) this.raise(this.pos - 2, "Unterminated comment") + this.pos = end + 2 + if (this.options.locations) { + lineBreakG.lastIndex = start + var match + while ((match = lineBreakG.exec(this.input)) && match.index < this.pos) { + ++this$1.curLine + this$1.lineStart = match.index + match[0].length + } + } + if (this.options.onComment) + this.options.onComment(true, this.input.slice(start + 2, end), start, this.pos, + startLoc, this.curPosition()) +} + +pp$7.skipLineComment = function(startSkip) { + var this$1 = this; + + var start = this.pos + var startLoc = this.options.onComment && this.curPosition() + var ch = this.input.charCodeAt(this.pos+=startSkip) + while (this.pos < this.input.length && ch !== 10 && ch !== 13 && ch !== 8232 && ch !== 8233) { + ++this$1.pos + ch = this$1.input.charCodeAt(this$1.pos) + } + if (this.options.onComment) + this.options.onComment(false, this.input.slice(start + startSkip, this.pos), start, this.pos, + startLoc, this.curPosition()) +} + +// Called at the start of the parse and after every token. Skips +// whitespace and comments, and. + +pp$7.skipSpace = function() { + var this$1 = this; + + loop: while (this.pos < this.input.length) { + var ch = this$1.input.charCodeAt(this$1.pos) + switch (ch) { + case 32: case 160: // ' ' + ++this$1.pos + break + case 13: + if (this$1.input.charCodeAt(this$1.pos + 1) === 10) { + ++this$1.pos + } + case 10: case 8232: case 8233: + ++this$1.pos + if (this$1.options.locations) { + ++this$1.curLine + this$1.lineStart = this$1.pos + } + break + case 47: // '/' + switch (this$1.input.charCodeAt(this$1.pos + 1)) { + case 42: // '*' + this$1.skipBlockComment() + break + case 47: + this$1.skipLineComment(2) + break + default: + break loop + } + break + default: + if (ch > 8 && ch < 14 || ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch))) { + ++this$1.pos + } else { + break loop + } + } + } +} + +// Called at the end of every token. Sets `end`, `val`, and +// maintains `context` and `exprAllowed`, and skips the space after +// the token, so that the next one's `start` will point at the +// right position. + +pp$7.finishToken = function(type, val) { + this.end = this.pos + if (this.options.locations) this.endLoc = this.curPosition() + var prevType = this.type + this.type = type + this.value = val + + this.updateContext(prevType) +} + +// ### Token reading + +// This is the function that is called to fetch the next token. It +// is somewhat obscure, because it works in character codes rather +// than characters, and because operator parsing has been inlined +// into it. +// +// All in the name of speed. +// +pp$7.readToken_dot = function() { + var next = this.input.charCodeAt(this.pos + 1) + if (next >= 48 && next <= 57) return this.readNumber(true) + var next2 = this.input.charCodeAt(this.pos + 2) + if (this.options.ecmaVersion >= 6 && next === 46 && next2 === 46) { // 46 = dot '.' + this.pos += 3 + return this.finishToken(tt.ellipsis) + } else { + ++this.pos + return this.finishToken(tt.dot) + } +} + +pp$7.readToken_slash = function() { // '/' + var next = this.input.charCodeAt(this.pos + 1) + if (this.exprAllowed) {++this.pos; return this.readRegexp()} + if (next === 61) return this.finishOp(tt.assign, 2) + return this.finishOp(tt.slash, 1) +} + +pp$7.readToken_mult_modulo_exp = function(code) { // '%*' + var next = this.input.charCodeAt(this.pos + 1) + var size = 1 + var tokentype = code === 42 ? tt.star : tt.modulo + + // exponentiation operator ** and **= + if (this.options.ecmaVersion >= 7 && next === 42) { + ++size + tokentype = tt.starstar + next = this.input.charCodeAt(this.pos + 2) + } + + if (next === 61) return this.finishOp(tt.assign, size + 1) + return this.finishOp(tokentype, size) +} + +pp$7.readToken_pipe_amp = function(code) { // '|&' + var next = this.input.charCodeAt(this.pos + 1) + if (next === code) return this.finishOp(code === 124 ? tt.logicalOR : tt.logicalAND, 2) + if (next === 61) return this.finishOp(tt.assign, 2) + return this.finishOp(code === 124 ? tt.bitwiseOR : tt.bitwiseAND, 1) +} + +pp$7.readToken_caret = function() { // '^' + var next = this.input.charCodeAt(this.pos + 1) + if (next === 61) return this.finishOp(tt.assign, 2) + return this.finishOp(tt.bitwiseXOR, 1) +} + +pp$7.readToken_plus_min = function(code) { // '+-' + var next = this.input.charCodeAt(this.pos + 1) + if (next === code) { + if (next == 45 && this.input.charCodeAt(this.pos + 2) == 62 && + lineBreak.test(this.input.slice(this.lastTokEnd, this.pos))) { + // A `-->` line comment + this.skipLineComment(3) + this.skipSpace() + return this.nextToken() + } + return this.finishOp(tt.incDec, 2) + } + if (next === 61) return this.finishOp(tt.assign, 2) + return this.finishOp(tt.plusMin, 1) +} + +pp$7.readToken_lt_gt = function(code) { // '<>' + var next = this.input.charCodeAt(this.pos + 1) + var size = 1 + if (next === code) { + size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2 + if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1) + return this.finishOp(tt.bitShift, size) + } + if (next == 33 && code == 60 && this.input.charCodeAt(this.pos + 2) == 45 && + this.input.charCodeAt(this.pos + 3) == 45) { + if (this.inModule) this.unexpected() + // `` line comment + this.skipLineComment(3) + this.skipSpace() + return this.nextToken() + } + return this.finishOp(tt.incDec, 2) + } + if (next === 61) return this.finishOp(tt.assign, 2) + return this.finishOp(tt.plusMin, 1) + } + + pp$7.readToken_lt_gt = function(code) { // '<>' + var next = this.input.charCodeAt(this.pos + 1) + var size = 1 + if (next === code) { + size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2 + if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1) + return this.finishOp(tt.bitShift, size) + } + if (next == 33 && code == 60 && this.input.charCodeAt(this.pos + 2) == 45 && + this.input.charCodeAt(this.pos + 3) == 45) { + if (this.inModule) this.unexpected() + // `` line comment + this.skipLineComment(3) + this.skipSpace() + return this.nextToken() + } + return this.finishOp(tt.incDec, 2) + } + if (next === 61) return this.finishOp(tt.assign, 2) + return this.finishOp(tt.plusMin, 1) +} + +pp.readToken_lt_gt = function(code) { // '<>' + let next = this.input.charCodeAt(this.pos + 1) + let size = 1 + if (next === code) { + size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2 + if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1) + return this.finishOp(tt.bitShift, size) + } + if (next == 33 && code == 60 && this.input.charCodeAt(this.pos + 2) == 45 && + this.input.charCodeAt(this.pos + 3) == 45) { + if (this.inModule) this.unexpected() + // ` + +Both [Writable][] and [Readable][] streams will store data in an internal +buffer that can be retrieved using `writable._writableState.getBuffer()` or +`readable._readableState.buffer`, respectively. + +The amount of data potentially buffered depends on the `highWaterMark` option +passed into the streams constructor. For normal streams, the `highWaterMark` +option specifies a total number of bytes. For streams operating in object mode, +the `highWaterMark` specifies a total number of objects. + +Data is buffered in Readable streams when the implementation calls +[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not +call [`stream.read()`][stream-read], the data will sit in the internal +queue until it is consumed. + +Once the total size of the internal read buffer reaches the threshold specified +by `highWaterMark`, the stream will temporarily stop reading data from the +underlying resource until the data currently buffered can be consumed (that is, +the stream will stop calling the internal `readable._read()` method that is +used to fill the read buffer). + +Data is buffered in Writable streams when the +[`writable.write(chunk)`][stream-write] method is called repeatedly. While the +total size of the internal write buffer is below the threshold set by +`highWaterMark`, calls to `writable.write()` will return `true`. Once the +the size of the internal buffer reaches or exceeds the `highWaterMark`, `false` +will be returned. + +A key goal of the `stream` API, and in particular the [`stream.pipe()`] method, +is to limit the buffering of data to acceptable levels such that sources and +destinations of differing speeds will not overwhelm the available memory. + +Because [Duplex][] and [Transform][] streams are both Readable and Writable, +each maintain *two* separate internal buffers used for reading and writing, +allowing each side to operate independently of the other while maintaining an +appropriate and efficient flow of data. For example, [`net.Socket`][] instances +are [Duplex][] streams whose Readable side allows consumption of data received +*from* the socket and whose Writable side allows writing data *to* the socket. +Because data may be written to the socket at a faster or slower rate than data +is received, it is important each side operate (and buffer) independently of +the other. + +## API for Stream Consumers + + + +Almost all Node.js applications, no matter how simple, use streams in some +manner. The following is an example of using streams in a Node.js application +that implements an HTTP server: + +```js +const http = require('http'); + +const server = http.createServer( (req, res) => { + // req is an http.IncomingMessage, which is a Readable Stream + // res is an http.ServerResponse, which is a Writable Stream + + let body = ''; + // Get the data as utf8 strings. + // If an encoding is not set, Buffer objects will be received. + req.setEncoding('utf8'); + + // Readable streams emit 'data' events once a listener is added + req.on('data', (chunk) => { + body += chunk; + }); + + // the end event indicates that the entire body has been received + req.on('end', () => { + try { + const data = JSON.parse(body); + } catch (er) { + // uh oh! bad json! + res.statusCode = 400; + return res.end(`error: ${er.message}`); + } + + // write back something interesting to the user: + res.write(typeof data); + res.end(); + }); +}); + +server.listen(1337); + +// $ curl localhost:1337 -d '{}' +// object +// $ curl localhost:1337 -d '"foo"' +// string +// $ curl localhost:1337 -d 'not json' +// error: Unexpected token o +``` + +[Writable][] streams (such as `res` in the example) expose methods such as +`write()` and `end()` that are used to write data onto the stream. + +[Readable][] streams use the [`EventEmitter`][] API for notifying application +code when data is available to be read off the stream. That available data can +be read from the stream in multiple ways. + +Both [Writable][] and [Readable][] streams use the [`EventEmitter`][] API in +various ways to communicate the current state of the stream. + +[Duplex][] and [Transform][] streams are both [Writable][] and [Readable][]. + +Applications that are either writing data to or consuming data from a stream +are not required to implement the stream interfaces directly and will generally +have no reason to call `require('stream')`. + +Developers wishing to implement new types of streams should refer to the +section [API for Stream Implementers][]. + +### Writable Streams + +Writable streams are an abstraction for a *destination* to which data is +written. + +Examples of [Writable][] streams include: + +* [HTTP requests, on the client][] +* [HTTP responses, on the server][] +* [fs write streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdin][] +* [`process.stdout`][], [`process.stderr`][] + +*Note*: Some of these examples are actually [Duplex][] streams that implement +the [Writable][] interface. + +All [Writable][] streams implement the interface defined by the +`stream.Writable` class. + +While specific instances of [Writable][] streams may differ in various ways, +all Writable streams follow the same fundamental usage pattern as illustrated +in the example below: + +```js +const myStream = getWritableStreamSomehow(); +myStream.write('some data'); +myStream.write('some more data'); +myStream.end('done writing data'); +``` + +#### Class: stream.Writable + + + + +##### Event: 'close' + + +The `'close'` event is emitted when the stream and any of its underlying +resources (a file descriptor, for example) have been closed. The event indicates +that no more events will be emitted, and no further computation will occur. + +Not all Writable streams will emit the `'close'` event. + +##### Event: 'drain' + + +If a call to [`stream.write(chunk)`][stream-write] returns `false`, the +`'drain'` event will be emitted when it is appropriate to resume writing data +to the stream. + +```js +// Write the data to the supplied writable stream one million times. +// Be attentive to back-pressure. +function writeOneMillionTimes(writer, data, encoding, callback) { + let i = 1000000; + write(); + function write() { + var ok = true; + do { + i--; + if (i === 0) { + // last time! + writer.write(data, encoding, callback); + } else { + // see if we should continue, or wait + // don't pass the callback, because we're not done yet. + ok = writer.write(data, encoding); + } + } while (i > 0 && ok); + if (i > 0) { + // had to stop early! + // write some more once it drains + writer.once('drain', write); + } + } +} +``` + +##### Event: 'error' + + +* {Error} + +The `'error'` event is emitted if an error occurred while writing or piping +data. The listener callback is passed a single `Error` argument when called. + +*Note*: The stream is not closed when the `'error'` event is emitted. + +##### Event: 'finish' + + +The `'finish'` event is emitted after the [`stream.end()`][stream-end] method +has been called, and all data has been flushed to the underlying system. + +```js +const writer = getWritableStreamSomehow(); +for (var i = 0; i < 100; i ++) { + writer.write('hello, #${i}!\n'); +} +writer.end('This is the end\n'); +writer.on('finish', () => { + console.error('All writes are now complete.'); +}); +``` + +##### Event: 'pipe' + + +* `src` {stream.Readable} source stream that is piping to this writable + +The `'pipe'` event is emitted when the [`stream.pipe()`][] method is called on +a readable stream, adding this writable to its set of destinations. + +```js +const writer = getWritableStreamSomehow(); +const reader = getReadableStreamSomehow(); +writer.on('pipe', (src) => { + console.error('something is piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +``` + +##### Event: 'unpipe' + + +* `src` {[Readable][] Stream} The source stream that + [unpiped][`stream.unpipe()`] this writable + +The `'unpipe'` event is emitted when the [`stream.unpipe()`][] method is called +on a [Readable][] stream, removing this [Writable][] from its set of +destinations. + +```js +const writer = getWritableStreamSomehow(); +const reader = getReadableStreamSomehow(); +writer.on('unpipe', (src) => { + console.error('Something has stopped piping into the writer.'); + assert.equal(src, reader); +}); +reader.pipe(writer); +reader.unpipe(writer); +``` + +##### writable.cork() + + +The `writable.cork()` method forces all written data to be buffered in memory. +The buffered data will be flushed when either the [`stream.uncork()`][] or +[`stream.end()`][stream-end] methods are called. + +The primary intent of `writable.cork()` is to avoid a situation where writing +many small chunks of data to a stream do not cause an backup in the internal +buffer that would have an adverse impact on performance. In such situations, +implementations that implement the `writable._writev()` method can perform +buffered writes in a more optimized manner. + +##### writable.end([chunk][, encoding][, callback]) + + +* `chunk` {String|Buffer|any} Optional data to write. For streams not operating + in object mode, `chunk` must be a string or a `Buffer`. For object mode + streams, `chunk` may be any JavaScript value other than `null`. +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Optional callback for when the stream is finished + +Calling the `writable.end()` method signals that no more data will be written +to the [Writable][]. The optional `chunk` and `encoding` arguments allow one +final additional chunk of data to be written immediately before closing the +stream. If provided, the optional `callback` function is attached as a listener +for the [`'finish'`][] event. + +Calling the [`stream.write()`][stream-write] method after calling +[`stream.end()`][stream-end] will raise an error. + +```js +// write 'hello, ' and then end with 'world!' +const file = fs.createWriteStream('example.txt'); +file.write('hello, '); +file.end('world!'); +// writing more now is not allowed! +``` + +##### writable.setDefaultEncoding(encoding) + + +* `encoding` {String} The new default encoding +* Return: `this` + +The `writable.setDefaultEncoding()` method sets the default `encoding` for a +[Writable][] stream. + +##### writable.uncork() + + +The `writable.uncork()` method flushes all data buffered since +[`stream.cork()`][] was called. + +When using `writable.cork()` and `writable.uncork()` to manage the buffering +of writes to a stream, it is recommended that calls to `writable.uncork()` be +deferred using `process.nextTick()`. Doing so allows batching of all +`writable.write()` calls that occur within a given Node.js event loop phase. + +```js +stream.cork(); +stream.write('some '); +stream.write('data '); +process.nextTick(() => stream.uncork()); +``` + +If the `writable.cork()` method is called multiple times on a stream, the same +number of calls to `writable.uncork()` must be called to flush the buffered +data. + +``` +stream.cork(); +stream.write('some '); +stream.cork(); +stream.write('data '); +process.nextTick(() => { + stream.uncork(); + // The data will not be flushed until uncork() is called a second time. + stream.uncork(); +}); +``` + +##### writable.write(chunk[, encoding][, callback]) + + +* `chunk` {String|Buffer} The data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Callback for when this chunk of data is flushed +* Returns: {Boolean} `false` if the stream wishes for the calling code to + wait for the `'drain'` event to be emitted before continuing to write + additional data; otherwise `true`. + +The `writable.write()` method writes some data to the stream, and calls the +supplied `callback` once the data has been fully handled. If an error +occurs, the `callback` *may or may not* be called with the error as its +first argument. To reliably detect write errors, add a listener for the +`'error'` event. + +The return value indicates whether the written `chunk` was buffered internally +and the buffer has exceeded the `highWaterMark` configured when the stream was +created. If `false` is returned, further attempts to write data to the stream +should be paused until the `'drain'` event is emitted. + +A Writable stream in object mode will always ignore the `encoding` argument. + +### Readable Streams + +Readable streams are an abstraction for a *source* from which data is +consumed. + +Examples of Readable streams include: + +* [HTTP responses, on the client][http-incoming-message] +* [HTTP requests, on the server][http-incoming-message] +* [fs read streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdout and stderr][] +* [`process.stdin`][] + +All [Readable][] streams implement the interface defined by the +`stream.Readable` class. + +#### Two Modes + +Readable streams effectively operate in one of two modes: flowing and paused. + +When in flowing mode, data is read from the underlying system automatically +and provided to an application as quickly as possible using events via the +[`EventEmitter`][] interface. + +In paused mode, the [`stream.read()`][stream-read] method must be called +explicitly to read chunks of data from the stream. + +All [Readable][] streams begin in paused mode but can be switched to flowing +mode in one of the following ways: + +* Adding a [`'data'`][] event handler. +* Calling the [`stream.resume()`][stream-resume] method. +* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. + +The Readable can switch back to paused mode using one of the following: + +* If there are no pipe destinations, by calling the + [`stream.pause()`][stream-pause] method. +* If there are pipe destinations, by removing any [`'data'`][] event + handlers, and removing all pipe destinations by calling the + [`stream.unpipe()`][] method. + +The important concept to remember is that a Readable will not generate data +until a mechanism for either consuming or ignoring that data is provided. If +the consuming mechanism is disabled or taken away, the Readable will *attempt* +to stop generating the data. + +*Note*: For backwards compatibility reasons, removing [`'data'`][] event +handlers will **not** automatically pause the stream. Also, if there are piped +destinations, then calling [`stream.pause()`][stream-pause] will not guarantee +that the stream will *remain* paused once those destinations drain and ask for +more data. + +*Note*: If a [Readable][] is switched into flowing mode and there are no +consumers available handle the data, that data will be lost. This can occur, +for instance, when the `readable.resume()` method is called without a listener +attached to the `'data'` event, or when a `'data'` event handler is removed +from the stream. + +#### Three States + +The "two modes" of operation for a Readable stream are a simplified abstraction +for the more complicated internal state management that is happening within the +Readable stream implementation. + +Specifically, at any given point in time, every Readable is in one of three +possible states: + +* `readable._readableState.flowing = null` +* `readable._readableState.flowing = false` +* `readable._readableState.flowing = true` + +When `readable._readableState.flowing` is `null`, no mechanism for consuming the +streams data is provided so the stream will not generate its data. + +Attaching a listener for the `'data'` event, calling the `readable.pipe()` +method, or calling the `readable.resume()` method will switch +`readable._readableState.flowing` to `true`, causing the Readable to begin +actively emitting events as data is generated. + +Calling `readable.pause()`, `readable.unpipe()`, or receiving "back pressure" +will cause the `readable._readableState.flowing` to be set as `false`, +temporarily halting the flowing of events but *not* halting the generation of +data. + +While `readable._readableState.flowing` is `false`, data may be accumulating +within the streams internal buffer. + +#### Choose One + +The Readable stream API evolved across multiple Node.js versions and provides +multiple methods of consuming stream data. In general, developers should choose +*one* of the methods of consuming data and *should never* use multiple methods +to consume data from a single stream. + +Use of the `readable.pipe()` method is recommended for most users as it has been +implemented to provide the easiest way of consuming stream data. Developers that +require more fine-grained control over the transfer and generation of data can +use the [`EventEmitter`][] and `readable.pause()`/`readable.resume()` APIs. + +#### Class: stream.Readable + + + + +##### Event: 'close' + + +The `'close'` event is emitted when the stream and any of its underlying +resources (a file descriptor, for example) have been closed. The event indicates +that no more events will be emitted, and no further computation will occur. + +Not all [Readable][] streams will emit the `'close'` event. + +##### Event: 'data' + + +* `chunk` {Buffer|String|any} The chunk of data. For streams that are not + operating in object mode, the chunk will be either a string or `Buffer`. + For streams that are in object mode, the chunk can be any JavaScript value + other than `null`. + +The `'data'` event is emitted whenever the stream is relinquishing ownership of +a chunk of data to a consumer. This may occur whenever the stream is switched +in flowing mode by calling `readable.pipe()`, `readable.resume()`, or by +attaching a listener callback to the `'data'` event. The `'data'` event will +also be emitted whenever the `readable.read()` method is called and a chunk of +data is available to be returned. + +Attaching a `'data'` event listener to a stream that has not been explicitly +paused will switch the stream into flowing mode. Data will then be passed as +soon as it is available. + +The listener callback will be passed the chunk of data as a string if a default +encoding has been specified for the stream using the +`readable.setEncoding()` method; otherwise the data will be passed as a +`Buffer`. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); +}); +``` + +##### Event: 'end' + + +The `'end'` event is emitted when there is no more data to be consumed from +the stream. + +*Note*: The `'end'` event **will not be emitted** unless the data is +completely consumed. This can be accomplished by switching the stream into +flowing mode, or by calling [`stream.read()`][stream-read] repeatedly until +all data has been consumed. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); +}); +readable.on('end', () => { + console.log('There will be no more data.'); +}); +``` + +##### Event: 'error' + + +* {Error} + +The `'error'` event may be emitted by a Readable implementation at any time. +Typically, this may occur if the underlying stream in unable to generate data +due to an underlying internal failure, or when a stream implementation attempts +to push an invalid chunk of data. + +The listener callback will be passed a single `Error` object. + +##### Event: 'readable' + + +The `'readable'` event is emitted when there is data available to be read from +the stream. In some cases, attaching a listener for the `'readable'` event will +cause some amount of data to be read into an internal buffer. + +```javascript +const readable = getReadableStreamSomehow(); +readable.on('readable', () => { + // there is some data to read now +}); +``` +The `'readable'` event will also be emitted once the end of the stream data +has been reached but before the `'end'` event is emitted. + +Effectively, the `'readable'` event indicates that the stream has new +information: either new data is available or the end of the stream has been +reached. In the former case, [`stream.read()`][stream-read] will return the +available data. In the latter case, [`stream.read()`][stream-read] will return +`null`. For instance, in the following example, `foo.txt` is an empty file: + +```js +const fs = require('fs'); +const rr = fs.createReadStream('foo.txt'); +rr.on('readable', () => { + console.log('readable:', rr.read()); +}); +rr.on('end', () => { + console.log('end'); +}); +``` + +The output of running this script is: + +``` +$ node test.js +readable: null +end +``` + +*Note*: In general, the `readable.pipe()` and `'data'` event mechanisms are +preferred over the use of the `'readable'` event. + +##### readable.isPaused() + + +* Return: {Boolean} + +The `readable.isPaused()` method returns the current operating state of the +Readable. This is used primarily by the mechanism that underlies the +`readable.pipe()` method. In most typical cases, there will be no reason to +use this method directly. + +```js +const readable = new stream.Readable + +readable.isPaused() // === false +readable.pause() +readable.isPaused() // === true +readable.resume() +readable.isPaused() // === false +``` + +##### readable.pause() + + +* Return: `this` + +The `readable.pause()` method will cause a stream in flowing mode to stop +emitting [`'data'`][] events, switching out of flowing mode. Any data that +becomes available will remain in the internal buffer. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); + readable.pause(); + console.log('There will be no additional data for 1 second.'); + setTimeout(() => { + console.log('Now data will start flowing again.'); + readable.resume(); + }, 1000); +}); +``` + +##### readable.pipe(destination[, options]) + + +* `destination` {stream.Writable} The destination for writing data +* `options` {Object} Pipe options + * `end` {Boolean} End the writer when the reader ends. Defaults to `true`. + +The `readable.pipe()` method attaches a [Writable][] stream to the `readable`, +causing it to switch automatically into flowing mode and push all of its data +to the attached [Writable][]. The flow of data will be automatically managed so +that the destination Writable stream is not overwhelmed by a faster Readable +stream. + +The following example pipes all of the data from the `readable` into a file +named `file.txt`: + +```js +const readable = getReadableStreamSomehow(); +const writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt' +readable.pipe(writable); +``` +It is possible to attach multiple Writable streams to a single Readable stream. + +The `readable.pipe()` method returns a reference to the *destination* stream +making it possible to set up chains of piped streams: + +```js +const r = fs.createReadStream('file.txt'); +const z = zlib.createGzip(); +const w = fs.createWriteStream('file.txt.gz'); +r.pipe(z).pipe(w); +``` + +By default, [`stream.end()`][stream-end] is called on the destination Writable +stream when the source Readable stream emits [`'end'`][], so that the +destination is no longer writable. To disable this default behavior, the `end` +option can be passed as `false`, causing the destination stream to remain open, +as illustrated in the following example: + +```js +reader.pipe(writer, { end: false }); +reader.on('end', () => { + writer.end('Goodbye\n'); +}); +``` + +One important caveat is that if the Readable stream emits an error during +processing, the Writable destination *is not closed* automatically. If an +error occurs, it will be necessary to *manually* close each stream in order +to prevent memory leaks. + +*Note*: The [`process.stderr`][] and [`process.stdout`][] Writable streams are +never closed until the Node.js process exits, regardless of the specified +options. + +##### readable.read([size]) + + +* `size` {Number} Optional argument to specify how much data to read. +* Return {String|Buffer|Null} + +The `readable.read()` method pulls some data out of the internal buffer and +returns it. If no data available to be read, `null` is returned. By default, +the data will be returned as a `Buffer` object unless an encoding has been +specified using the `readable.setEncoding()` method or the stream is operating +in object mode. + +The optional `size` argument specifies a specific number of bytes to read. If +`size` bytes are not available to be read, `null` will be returned *unless* +the stream has ended, in which case all of the data remaining in the internal +buffer will be returned (*even if it exceeds `size` bytes*). + +If the `size` argument is not specified, all of the data contained in the +internal buffer will be returned. + +The `readable.read()` method should only be called on Readable streams operating +in paused mode. In flowing mode, `readable.read()` is called automatically until +the internal buffer is fully drained. + +```js +const readable = getReadableStreamSomehow(); +readable.on('readable', () => { + var chunk; + while (null !== (chunk = readable.read())) { + console.log(`Received ${chunk.length} bytes of data.`); + } +}); +``` + +In general, it is recommended that developers avoid the use of the `'readable'` +event and the `readable.read()` method in favor of using either +`readable.pipe()` or the `'data'` event. + +A Readable stream in object mode will always return a single item from +a call to [`readable.read(size)`][stream-read], regardless of the value of the +`size` argument. + +*Note:* If the `readable.read()` method returns a chunk of data, a `'data'` +event will also be emitted. + +*Note*: Calling [`stream.read([size])`][stream-read] after the [`'end'`][] +event has been emitted will return `null`. No runtime error will be raised. + +##### readable.resume() + + +* Return: `this` + +The `readable.resume()` method causes an explicitly paused Readable stream to +resume emitting [`'data'`][] events, switching the stream into flowing mode. + +The `readable.resume()` method can be used to fully consume the data from a +stream without actually processing any of that data as illustrated in the +following example: + +```js +getReadableStreamSomehow() + .resume() + .on('end', () => { + console.log('Reached the end, but did not read anything.'); + }); +``` + +##### readable.setEncoding(encoding) + + +* `encoding` {String} The encoding to use. +* Return: `this` + +The `readable.setEncoding()` method sets the default character encoding for +data read from the Readable stream. + +Setting an encoding causes the stream data +to be returned as string of the specified encoding rather than as `Buffer` +objects. For instance, calling `readable.setEncoding('utf8')` will cause the +output data will be interpreted as UTF-8 data, and passed as strings. Calling +`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal +string format. + +The Readable stream will properly handle multi-byte characters delivered through +the stream that would otherwise become improperly decoded if simply pulled from +the stream as `Buffer` objects. + +Encoding can be disabled by calling `readable.setEncoding(null)`. This approach +is useful when working with binary data or with large multi-byte strings spread +out over multiple chunks. + +```js +const readable = getReadableStreamSomehow(); +readable.setEncoding('utf8'); +readable.on('data', (chunk) => { + assert.equal(typeof chunk, 'string'); + console.log('got %d characters of string data', chunk.length); +}); +``` + +##### readable.unpipe([destination]) + + +* `destination` {stream.Writable} Optional specific stream to unpipe + +The `readable.unpipe()` method detaches a Writable stream previously attached +using the [`stream.pipe()`][] method. + +If the `destination` is not specified, then *all* pipes are detached. + +If the `destination` is specified, but no pipe is set up for it, then +the method does nothing. + +```js +const readable = getReadableStreamSomehow(); +const writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt', +// but only for the first second +readable.pipe(writable); +setTimeout(() => { + console.log('Stop writing to file.txt'); + readable.unpipe(writable); + console.log('Manually close the file stream'); + writable.end(); +}, 1000); +``` + +##### readable.unshift(chunk) + + +* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue + +The `readable.unshift()` method pushes a chunk of data back into the internal +buffer. This is useful in certain situations where a stream is being consumed by +code that needs to "un-consume" some amount of data that it has optimistically +pulled out of the source, so that the data can be passed on to some other party. + +*Note*: The `stream.unshift(chunk)` method cannot be called after the +[`'end'`][] event has been emitted or a runtime error will be thrown. + +Developers using `stream.unshift()` often should consider switching to +use of a [Transform][] stream instead. See the [API for Stream Implementers][] +section for more information. + +```js +// Pull off a header delimited by \n\n +// use unshift() if we get too much +// Call the callback with (error, header, stream) +const StringDecoder = require('string_decoder').StringDecoder; +function parseHeader(stream, callback) { + stream.on('error', callback); + stream.on('readable', onReadable); + const decoder = new StringDecoder('utf8'); + var header = ''; + function onReadable() { + var chunk; + while (null !== (chunk = stream.read())) { + var str = decoder.write(chunk); + if (str.match(/\n\n/)) { + // found the header boundary + var split = str.split(/\n\n/); + header += split.shift(); + const remaining = split.join('\n\n'); + const buf = Buffer.from(remaining, 'utf8'); + if (buf.length) + stream.unshift(buf); + stream.removeListener('error', callback); + stream.removeListener('readable', onReadable); + // now the body of the message can be read from the stream. + callback(null, header, stream); + } else { + // still reading the header. + header += str; + } + } + } +} +``` + +*Note*: Unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` +will not end the reading process by resetting the internal reading state of the +stream. This can cause unexpected results if `readable.unshift()` is called +during a read (i.e. from within a [`stream._read()`][stream-_read] +implementation on a custom stream). Following the call to `readable.unshift()` +with an immediate [`stream.push('')`][stream-push] will reset the reading state +appropriately, however it is best to simply avoid calling `readable.unshift()` +while in the process of performing a read. + +##### readable.wrap(stream) + + +* `stream` {Stream} An "old style" readable stream + +Versions of Node.js prior to v0.10 had streams that did not implement the +entire `stream` module API as it is currently defined. (See [Compatibility][] +for more information.) + +When using an older Node.js library that emits [`'data'`][] events and has a +[`stream.pause()`][stream-pause] method that is advisory only, the +`readable.wrap()` method can be used to create a [Readable][] stream that uses +the old stream as its data source. + +It will rarely be necessary to use `readable.wrap()` but the method has been +provided as a convenience for interacting with older Node.js applications and +libraries. + +For example: + +```js +const OldReader = require('./old-api-module.js').OldReader; +const Readable = require('stream').Readable; +const oreader = new OldReader; +const myReader = new Readable().wrap(oreader); + +myReader.on('readable', () => { + myReader.read(); // etc. +}); +``` + +### Duplex and Transform Streams + +#### Class: stream.Duplex + + + + +Duplex streams are streams that implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Duplex streams include: + +* [TCP sockets][] +* [zlib streams][zlib] +* [crypto streams][crypto] + +#### Class: stream.Transform + + + + +Transform streams are [Duplex][] streams where the output is in some way +related to the input. Like all [Duplex][] streams, Transform streams +implement both the [Readable][] and [Writable][] interfaces. + +Examples of Transform streams include: + +* [zlib streams][zlib] +* [crypto streams][crypto] + + +## API for Stream Implementers + + + +The `stream` module API has been designed to make it possible to easily +implement streams using JavaScript's prototypical inheritance model. + +First, a stream developer would declare a new JavaScript class that extends one +of the four basic stream classes (`stream.Writable`, `stream.Readable`, +`stream.Duplex`, or `stream.Transform`), making sure the call the appropriate +parent class constructor: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + super(options); + } +} +``` + +The new stream class must then implement one or more specific methods, depending +on the type of stream being created, as detailed in the chart below: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Use-case

+
+

Class

+
+

Method(s) to implement

+
+

Reading only

+
+

[Readable](#stream_class_stream_readable)

+
+

[_read][stream-_read]

+
+

Writing only

+
+

[Writable](#stream_class_stream_writable)

+
+

[_write][stream-_write], [_writev][stream-_writev]

+
+

Reading and writing

+
+

[Duplex](#stream_class_stream_duplex)

+
+

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

+
+

Operate on written data, then read the result

+
+

[Transform](#stream_class_stream_transform)

+
+

[_transform][stream-_transform], [_flush][stream-_flush]

+
+ +*Note*: The implementation code for a stream should *never* call the "public" +methods of a stream that are intended for use by consumers (as described in +the [API for Stream Consumers][] section). Doing so may lead to adverse +side effects in application code consuming the stream. + +### Simplified Construction + +For many simple cases, it is possible to construct a stream without relying on +inheritance. This can be accomplished by directly creating instances of the +`stream.Writable`, `stream.Readable`, `stream.Duplex` or `stream.Transform` +objects and passing appropriate methods as constructor options. + +For example: + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + // ... + } +}); +``` + +### Implementing a Writable Stream + +The `stream.Writable` class is extended to implement a [Writable][] stream. + +Custom Writable streams *must* call the `new stream.Writable([options])` +constructor and implement the `writable._write()` method. The +`writable._writev()` method *may* also be implemented. + +#### Constructor: new stream.Writable([options]) + +* `options` {Object} + * `highWaterMark` {Number} Buffer level when + [`stream.write()`][stream-write] starts returning `false`. Defaults to + `16384` (16kb), or `16` for `objectMode` streams. + * `decodeStrings` {Boolean} Whether or not to decode strings into + Buffers before passing them to [`stream._write()`][stream-_write]. + Defaults to `true` + * `objectMode` {Boolean} Whether or not the + [`stream.write(anyObj)`][stream-write] is a valid operation. When set, + it becomes possible to write JavaScript values other than string or + `Buffer` if supported by the stream implementation. Defaults to `false` + * `write` {Function} Implementation for the + [`stream._write()`][stream-_write] method. + * `writev` {Function} Implementation for the + [`stream._writev()`][stream-_writev] method. + +For example: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + // Calls the stream.Writable() constructor + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Writable = require('stream').Writable; +const util = require('util'); + +function MyWritable(options) { + if (!(this instanceof MyWritable)) + return new MyWritable(options); + Writable.call(this, options); +} +util.inherits(MyWritable, Writable); +``` + +Or, using the Simplified Constructor approach: + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + // ... + }, + writev(chunks, callback) { + // ... + } +}); +``` + +#### writable.\_write(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be written. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then `encoding` is the + character encoding of that string. If chunk is a `Buffer`, or if the + stream is operating in object mode, `encoding` may be ignored. +* `callback` {Function} Call this function (optionally with an error + argument) when processing is complete for the supplied chunk. + +All Writable stream implementations must provide a +[`writable._write()`][stream-_write] method to send data to the underlying +resource. + +*Note*: [Transform][] streams provide their own implementation of the +[`writable._write()`][stream-_write]. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Writable +class methods only. + +The `callback` method must be called to signal either that the write completed +successfully or failed with an error. The first argument passed to the +`callback` must be the `Error` object if the call failed or `null` if the +write succeeded. + +It is important to note that all calls to `writable.write()` that occur between +the time `writable._write()` is called and the `callback` is called will cause +the written data to be buffered. Once the `callback` is invoked, the stream will +emit a `'drain'` event. If a stream implementation is capable of processing +multiple chunks of data at once, the `writable._writev()` method should be +implemented. + +If the `decodeStrings` property is set in the constructor options, then +`chunk` may be a string rather than a Buffer, and `encoding` will +indicate the character encoding of the string. This is to support +implementations that have an optimized handling for certain string +data encodings. If the `decodeStrings` property is explicitly set to `false`, +the `encoding` argument can be safely ignored, and `chunk` will always be a +`Buffer`. + +The `writable._write()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### writable.\_writev(chunks, callback) + +* `chunks` {Array} The chunks to be written. Each chunk has following + format: `{ chunk: ..., encoding: ... }`. +* `callback` {Function} A callback function (optionally with an error + argument) to be invoked when processing is complete for the supplied chunks. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Writable +class methods only. + +The `writable._writev()` method may be implemented in addition to +`writable._write()` in stream implementations that are capable of processing +multiple chunks of data at once. If implemented, the method will be called with +all chunks of data currently buffered in the write queue. + +The `writable._writev()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### Errors While Writing + +It is recommended that errors occurring during the processing of the +`writable._write()` and `writable._writev()` methods are reported by invoking +the callback and passing the error as the first argument. This will cause an +`'error'` event to be emitted by the Writable. Throwing an Error from within +`writable._write()` can result in expected and inconsistent behavior depending +on how the stream is being used. Using the callback ensures consistent and +predictable handling of errors. + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + if (chunk.toString().indexOf('a') >= 0) { + callback(new Error('chunk is invalid')); + } else { + callback(); + } + } +}); +``` + +#### An Example Writable Stream + +The following illustrates a rather simplistic (and somewhat pointless) custom +Writable stream implementation. While this specific Writable stream instance +is not of any real particular usefulness, the example illustrates each of the +required elements of a custom [Writable][] stream instance: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + super(options); + } + + _write(chunk, encoding, callback) { + if (chunk.toString().indexOf('a') >= 0) { + callback(new Error('chunk is invalid')); + } else { + callback(); + } + } +} +``` + +### Implementing a Readable Stream + +The `stream.Readable` class is extended to implement a [Readable][] stream. + +Custom Readable streams *must* call the `new stream.Readable([options])` +constructor and implement the `readable._read()` method. + +#### new stream.Readable([options]) + +* `options` {Object} + * `highWaterMark` {Number} The maximum number of bytes to store in + the internal buffer before ceasing to read from the underlying + resource. Defaults to `16384` (16kb), or `16` for `objectMode` streams + * `encoding` {String} If specified, then buffers will be decoded to + strings using the specified encoding. Defaults to `null` + * `objectMode` {Boolean} Whether this stream should behave + as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns + a single value instead of a Buffer of size n. Defaults to `false` + * `read` {Function} Implementation for the [`stream._read()`][stream-_read] + method. + +For example: + +```js +const Readable = require('stream').Readable; + +class MyReadable extends Readable { + constructor(options) { + // Calls the stream.Readable(options) constructor + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Readable = require('stream').Readable; +const util = require('util'); + +function MyReadable(options) { + if (!(this instanceof MyReadable)) + return new MyReadable(options); + Readable.call(this, options); +} +util.inherits(MyReadable, Readable); +``` + +Or, using the Simplified Constructor approach: + +```js +const Readable = require('stream').Readable; + +const myReadable = new Readable({ + read(size) { + // ... + } +}); +``` + +#### readable.\_read(size) + +* `size` {Number} Number of bytes to read asynchronously + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +All Readable stream implementations must provide an implementation of the +`readable._read()` method to fetch data from the underlying resource. + +When `readable._read()` is called, if data is available from the resource, the +implementation should begin pushing that data into the read queue using the +[`this.push(dataChunk)`][stream-push] method. `_read()` should continue reading +from the resource and pushing data until `readable.push()` returns `false`. Only +when `_read()` is called again after it has stopped should it resume pushing +additional data onto the queue. + +*Note*: Once the `readable._read()` method has been called, it will not be +called again until the [`readable.push()`][stream-push] method is called. + +The `size` argument is advisory. For implementations where a "read" is a +single operation that returns data can use the `size` argument to determine how +much data to fetch. Other implementations may ignore this argument and simply +provide data whenever it becomes available. There is no need to "wait" until +`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. + +The `readable._read()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### readable.push(chunk[, encoding]) + +* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue +* `encoding` {String} Encoding of String chunks. Must be a valid + Buffer encoding, such as `'utf8'` or `'ascii'` +* Returns {Boolean} `true` if additional chunks of data may continued to be + pushed; `false` otherwise. + +When `chunk` is a `Buffer` or `string`, the `chunk` of data will be added to the +internal queue for users of the stream to consume. Passing `chunk` as `null` +signals the end of the stream (EOF), after which no more data can be written. + +When the Readable is operating in paused mode, the data added with +`readable.push()` can be read out by calling the +[`readable.read()`][stream-read] method when the [`'readable'`][] event is +emitted. + +When the Readable is operating in flowing mode, the data added with +`readable.push()` will be delivered by emitting a `'data'` event. + +The `readable.push()` method is designed to be as flexible as possible. For +example, when wrapping a lower-level source that provides some form of +pause/resume mechanism, and a data callback, the low-level source can be wrapped +by the custom Readable instance as illustrated in the following example: + +```js +// source is an object with readStop() and readStart() methods, +// and an `ondata` member that gets called when it has data, and +// an `onend` member that gets called when the data is over. + +class SourceWrapper extends Readable { + constructor(options) { + super(options); + + this._source = getLowlevelSourceObject(); + + // Every time there's data, push it into the internal buffer. + this._source.ondata = (chunk) => { + // if push() returns false, then stop reading from source + if (!this.push(chunk)) + this._source.readStop(); + }; + + // When the source ends, push the EOF-signaling `null` chunk + this._source.onend = () => { + this.push(null); + }; + } + // _read will be called when the stream wants to pull more data in + // the advisory size argument is ignored in this case. + _read(size) { + this._source.readStart(); + } +} +``` +*Note*: The `readable.push()` method is intended be called only by Readable +Implementers, and only from within the `readable._read()` method. + +#### Errors While Reading + +It is recommended that errors occurring during the processing of the +`readable._read()` method are emitted using the `'error'` event rather than +being thrown. Throwing an Error from within `readable._read()` can result in +expected and inconsistent behavior depending on whether the stream is operating +in flowing or paused mode. Using the `'error'` event ensures consistent and +predictable handling of errors. + +```js +const Readable = require('stream').Readable; + +const myReadable = new Readable({ + read(size) { + if (checkSomeErrorCondition()) { + process.nextTick(() => this.emit('error', err)); + return; + } + // do some work + } +}); +``` + +#### An Example Counting Stream + + + +The following is a basic example of a Readable stream that emits the numerals +from 1 to 1,000,000 in ascending order, and then ends. + +```js +const Readable = require('stream').Readable; + +class Counter extends Readable { + constructor(opt) { + super(opt); + this._max = 1000000; + this._index = 1; + } + + _read() { + var i = this._index++; + if (i > this._max) + this.push(null); + else { + var str = '' + i; + var buf = Buffer.from(str, 'ascii'); + this.push(buf); + } + } +} +``` + +### Implementing a Duplex Stream + +A [Duplex][] stream is one that implements both [Readable][] and [Writable][], +such as a TCP socket connection. + +Because Javascript does not have support for multiple inheritance, the +`stream.Duplex` class is extended to implement a [Duplex][] stream (as opposed +to extending the `stream.Readable` *and* `stream.Writable` classes). + +*Note*: The `stream.Duplex` class prototypically inherits from `stream.Readable` +and parasitically from `stream.Writable`. + +Custom Duplex streams *must* call the `new stream.Duplex([options])` +constructor and implement *both* the `readable._read()` and +`writable._write()` methods. + +#### new stream.Duplex(options) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `allowHalfOpen` {Boolean} Defaults to `true`. If set to `false`, then + the stream will automatically end the readable side when the + writable side ends and vice versa. + * `readableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` + for readable side of the stream. Has no effect if `objectMode` + is `true`. + * `writableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` + for writable side of the stream. Has no effect if `objectMode` + is `true`. + +For example: + +```js +const Duplex = require('stream').Duplex; + +class MyDuplex extends Duplex { + constructor(options) { + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Duplex = require('stream').Duplex; +const util = require('util'); + +function MyDuplex(options) { + if (!(this instanceof MyDuplex)) + return new MyDuplex(options); + Duplex.call(this, options); +} +util.inherits(MyDuplex, Duplex); +``` + +Or, using the Simplified Constructor approach: + +```js +const Duplex = require('stream').Duplex; + +const myDuplex = new Duplex({ + read(size) { + // ... + }, + write(chunk, encoding, callback) { + // ... + } +}); +``` + +#### An Example Duplex Stream + +The following illustrates a simple example of a Duplex stream that wraps a +hypothetical lower-level source object to which data can be written, and +from which data can be read, albeit using an API that is not compatible with +Node.js streams. +The following illustrates a simple example of a Duplex stream that buffers +incoming written data via the [Writable][] interface that is read back out +via the [Readable][] interface. + +```js +const Duplex = require('stream').Duplex; +const kSource = Symbol('source'); + +class MyDuplex extends Duplex { + constructor(source, options) { + super(options); + this[kSource] = source; + } + + _write(chunk, encoding, callback) { + // The underlying source only deals with strings + if (Buffer.isBuffer(chunk)) + chunk = chunk.toString(encoding); + this[kSource].writeSomeData(chunk, encoding); + callback(); + } + + _read(size) { + this[kSource].fetchSomeData(size, (data, encoding) => { + this.push(Buffer.from(data, encoding)); + }); + } +} +``` + +The most important aspect of a Duplex stream is that the Readable and Writable +sides operate independently of one another despite co-existing within a single +object instance. + +#### Object Mode Duplex Streams + +For Duplex streams, `objectMode` can be set exclusively for either the Readable +or Writable side using the `readableObjectMode` and `writableObjectMode` options +respectively. + +In the following example, for instance, a new Transform stream (which is a +type of [Duplex][] stream) is created that has an object mode Writable side +that accepts JavaScript numbers that are converted to hexidecimal strings on +the Readable side. + +```js +const Transform = require('stream').Transform; + +// All Transform streams are also Duplex Streams +const myTransform = new Transform({ + writableObjectMode: true, + + transform(chunk, encoding, callback) { + // Coerce the chunk to a number if necessary + chunk |= 0; + + // Transform the chunk into something else. + const data = chunk.toString(16); + + // Push the data onto the readable queue. + callback(null, '0'.repeat(data.length % 2) + data); + } +}); + +myTransform.setEncoding('ascii'); +myTransform.on('data', (chunk) => console.log(chunk)); + +myTransform.write(1); + // Prints: 01 +myTransform.write(10); + // Prints: 0a +myTransform.write(100); + // Prints: 64 +``` + +### Implementing a Transform Stream + +A [Transform][] stream is a [Duplex][] stream where the output is computed +in some way from the input. Examples include [zlib][] streams or [crypto][] +streams that compress, encrypt, or decrypt data. + +*Note*: There is no requirement that the output be the same size as the input, +the same number of chunks, or arrive at the same time. For example, a +Hash stream will only ever have a single chunk of output which is +provided when the input is ended. A `zlib` stream will produce output +that is either much smaller or much larger than its input. + +The `stream.Transform` class is extended to implement a [Transform][] stream. + +The `stream.Transform` class prototypically inherits from `stream.Duplex` and +implements its own versions of the `writable._write()` and `readable._read()` +methods. Custom Transform implementations *must* implement the +[`transform._transform()`][stream-_transform] method and *may* also implement +the [`transform._flush()`][stream-_flush] method. + +*Note*: Care must be taken when using Transform streams in that data written +to the stream can cause the Writable side of the stream to become paused if +the output on the Readable side is not consumed. + +#### new stream.Transform([options]) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `transform` {Function} Implementation for the + [`stream._transform()`][stream-_transform] method. + * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] + method. + +For example: + +```js +const Transform = require('stream').Transform; + +class MyTransform extends Transform { + constructor(options) { + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Transform = require('stream').Transform; +const util = require('util'); + +function MyTransform(options) { + if (!(this instanceof MyTransform)) + return new MyTransform(options); + Transform.call(this, options); +} +util.inherits(MyTransform, Transform); +``` + +Or, using the Simplified Constructor approach: + +```js +const Transform = require('stream').Transform; + +const myTransform = new Transform({ + transform(chunk, encoding, callback) { + // ... + } +}); +``` + +#### Events: 'finish' and 'end' + +The [`'finish'`][] and [`'end'`][] events are from the `stream.Writable` +and `stream.Readable` classes, respectively. The `'finish'` event is emitted +after [`stream.end()`][stream-end] is called and all chunks have been processed +by [`stream._transform()`][stream-_transform]. The `'end'` event is emitted +after all data has been output, which occurs after the callback in +[`transform._flush()`][stream-_flush] has been called. + +#### transform.\_flush(callback) + +* `callback` {Function} A callback function (optionally with an error + argument) to be called when remaining data has been flushed. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +In some cases, a transform operation may need to emit an additional bit of +data at the end of the stream. For example, a `zlib` compression stream will +store an amount of internal state used to optimally compress the output. When +the stream ends, however, that additional data needs to be flushed so that the +compressed data will be complete. + +Custom [Transform][] implementations *may* implement the `transform._flush()` +method. This will be called when there is no more written data to be consumed, +but before the [`'end'`][] event is emitted signaling the end of the +[Readable][] stream. + +Within the `transform._flush()` implementation, the `readable.push()` method +may be called zero or more times, as appropriate. The `callback` function must +be called when the flush operation is complete. + +The `transform._flush()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### transform.\_transform(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be transformed. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} A callback function (optionally with an error + argument and data) to be called after the supplied `chunk` has been + processed. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +All Transform stream implementations must provide a `_transform()` +method to accept input and produce output. The `transform._transform()` +implementation handles the bytes being written, computes an output, then passes +that output off to the readable portion using the `readable.push()` method. + +The `transform.push()` method may be called zero or more times to generate +output from a single input chunk, depending on how much is to be output +as a result of the chunk. + +It is possible that no output is generated from any given chunk of input data. + +The `callback` function must be called only when the current chunk is completely +consumed. The first argument passed to the `callback` must be an `Error` object +if an error occurred while processing the input or `null` otherwise. If a second +argument is passed to the `callback`, it will be forwarded on to the +`readable.push()` method. In other words the following are equivalent: + +```js +transform.prototype._transform = function (data, encoding, callback) { + this.push(data); + callback(); +}; + +transform.prototype._transform = function (data, encoding, callback) { + callback(null, data); +}; +``` + +The `transform._transform()` method is prefixed with an underscore because it +is internal to the class that defines it, and should never be called directly by +user programs. + +#### Class: stream.PassThrough + +The `stream.PassThrough` class is a trivial implementation of a [Transform][] +stream that simply passes the input bytes across to the output. Its purpose is +primarily for examples and testing, but there are some use cases where +`stream.PassThrough` is useful as a building block for novel sorts of streams. + +## Additional Notes + + + +### Compatibility with Older Node.js Versions + + + +In versions of Node.js prior to v0.10, the Readable stream interface was +simpler, but also less powerful and less useful. + +* Rather than waiting for calls the [`stream.read()`][stream-read] method, + [`'data'`][] events would begin emitting immediately. Applications that + would need to perform some amount of work to decide how to handle data + were required to store read data into buffers so the data would not be lost. +* The [`stream.pause()`][stream-pause] method was advisory, rather than + guaranteed. This meant that it was still necessary to be prepared to receive + [`'data'`][] events *even when the stream was in a paused state*. + +In Node.js v0.10, the [Readable][] class was added. For backwards compatibility +with older Node.js programs, Readable streams switch into "flowing mode" when a +[`'data'`][] event handler is added, or when the +[`stream.resume()`][stream-resume] method is called. The effect is that, even +when not using the new [`stream.read()`][stream-read] method and +[`'readable'`][] event, it is no longer necessary to worry about losing +[`'data'`][] chunks. + +While most applications will continue to function normally, this introduces an +edge case in the following conditions: + +* No [`'data'`][] event listener is added. +* The [`stream.resume()`][stream-resume] method is never called. +* The stream is not piped to any writable destination. + +For example, consider the following code: + +```js +// WARNING! BROKEN! +net.createServer((socket) => { + + // we add an 'end' method, but never consume the data + socket.on('end', () => { + // It will never get here. + socket.end('The message was received but was not processed.\n'); + }); + +}).listen(1337); +``` + +In versions of Node.js prior to v0.10, the incoming message data would be +simply discarded. However, in Node.js v0.10 and beyond, the socket remains +paused forever. + +The workaround in this situation is to call the +[`stream.resume()`][stream-resume] method to begin the flow of data: + +```js +// Workaround +net.createServer((socket) => { + + socket.on('end', () => { + socket.end('The message was received but was not processed.\n'); + }); + + // start the flow of data, discarding it. + socket.resume(); + +}).listen(1337); +``` + +In addition to new Readable streams switching into flowing mode, +pre-v0.10 style streams can be wrapped in a Readable class using the +[`readable.wrap()`][`stream.wrap()`] method. + + +### `readable.read(0)` + +There are some cases where it is necessary to trigger a refresh of the +underlying readable stream mechanisms, without actually consuming any +data. In such cases, it is possible to call `readable.read(0)`, which will +always return `null`. + +If the internal read buffer is below the `highWaterMark`, and the +stream is not currently reading, then calling `stream.read(0)` will trigger +a low-level [`stream._read()`][stream-_read] call. + +While most applications will almost never need to do this, there are +situations within Node.js where this is done, particularly in the +Readable stream class internals. + +### `readable.push('')` + +Use of `readable.push('')` is not recommended. + +Pushing a zero-byte string or `Buffer` to a stream that is not in object mode +has an interesting side effect. Because it *is* a call to +[`readable.push()`][stream-push], the call will end the reading process. +However, because the argument is an empty string, no data is added to the +readable buffer so there is nothing for a user to consume. + +[`'data'`]: #stream_event_data +[`'drain'`]: #stream_event_drain +[`'end'`]: #stream_event_end +[`'finish'`]: #stream_event_finish +[`'readable'`]: #stream_event_readable +[`buf.toString(encoding)`]: https://nodejs.org/docs/v6.3.1/api/buffer.html#buffer_buf_tostring_encoding_start_end +[`EventEmitter`]: https://nodejs.org/docs/v6.3.1/api/events.html#events_class_eventemitter +[`process.stderr`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stderr +[`process.stdin`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdin +[`process.stdout`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdout +[`stream.cork()`]: #stream_writable_cork +[`stream.pipe()`]: #stream_readable_pipe_destination_options +[`stream.uncork()`]: #stream_writable_uncork +[`stream.unpipe()`]: #stream_readable_unpipe_destination +[`stream.wrap()`]: #stream_readable_wrap_stream +[`tls.CryptoStream`]: https://nodejs.org/docs/v6.3.1/api/tls.html#tls_class_cryptostream +[API for Stream Consumers]: #stream_api_for_stream_consumers +[API for Stream Implementers]: #stream_api_for_stream_implementers +[child process stdin]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdin +[child process stdout and stderr]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdout +[Compatibility]: #stream_compatibility_with_older_node_js_versions +[crypto]: crypto.html +[Duplex]: #stream_class_stream_duplex +[fs read streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_readstream +[fs write streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_writestream +[`fs.createReadStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createreadstream_path_options +[`fs.createWriteStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createwritestream_path_options +[`net.Socket`]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket +[`zlib.createDeflate()`]: https://nodejs.org/docs/v6.3.1/api/zlib.html#zlib_zlib_createdeflate_options +[HTTP requests, on the client]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_clientrequest +[HTTP responses, on the server]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_serverresponse +[http-incoming-message]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_incomingmessage +[Object mode]: #stream_object_mode +[Readable]: #stream_class_stream_readable +[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 +[stream-_flush]: #stream_transform_flush_callback +[stream-_read]: #stream_readable_read_size_1 +[stream-_transform]: #stream_transform_transform_chunk_encoding_callback +[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 +[stream-_writev]: #stream_writable_writev_chunks_callback +[stream-end]: #stream_writable_end_chunk_encoding_callback +[stream-pause]: #stream_readable_pause +[stream-push]: #stream_readable_push_chunk_encoding +[stream-read]: #stream_readable_read_size +[stream-resume]: #stream_readable_resume +[stream-write]: #stream_writable_write_chunk_encoding_callback +[TCP sockets]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket +[Transform]: #stream_class_stream_transform +[Writable]: #stream_class_stream_writable +[zlib]: zlib.html diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/are-we-there-yet/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/duplex.js b/node_modules/are-we-there-yet/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..ca807af --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..736693b --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,75 @@ +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +var keys = objectKeys(Writable.prototype); +for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + processNextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..d06f71f --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,26 @@ +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..208cc65 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,937 @@ +'use strict'; + +module.exports = Readable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var isArray = require('isarray'); +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var StringDecoder; + +util.inherits(Readable, Stream); + +function prependListener(emitter, event, fn) { + if (typeof emitter.prependListener === 'function') { + return emitter.prependListener(event, fn); + } else { + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; + } +} + +var Duplex; +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +var Duplex; +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options && typeof options.read === 'function') this._read = options.read; + + Stream.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + + if (!state.objectMode && typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = bufferShim.from(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + var state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + var er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (state.ended && !addToFront) { + var e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + var _e = new Error('stream.unshift() after end event'); + stream.emit('error', _e); + } else { + var skipAdd; + if (state.decoder && !addToFront && !encoding) { + chunk = state.decoder.write(chunk); + skipAdd = !state.objectMode && chunk.length === 0; + } + + if (!addToFront) state.reading = false; + + // Don't add to the buffer if we've decoded to an empty string chunk and + // we're not in object mode + if (!skipAdd) { + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + var er = null; + if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + processNextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : cleanup; + if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var _i = 0; _i < len; _i++) { + dests[_i].emit('unpipe', this); + }return this; + } + + // try to find the right one. + var i = indexOf(state.pipes, dest); + if (i === -1) return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + processNextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + processNextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + + var self = this; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + var events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, function (ev) { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = bufferShim.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + processNextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..dbc996e --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,180 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function TransformState(stream) { + this.afterTransform = function (er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; + this.writeencoding = null; +} + +function afterTransform(stream, er, data) { + var ts = stream._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (data !== null && data !== undefined) stream.push(data); + + cb(er); + + var rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(this); + + // when the writable side finishes, then flush out anything remaining. + var stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + this.once('prefinish', function () { + if (typeof this._flush === 'function') this._flush(function (er) { + done(stream, er); + });else done(stream); + }); +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('Not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +function done(stream, er) { + if (er) return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + var ws = stream._writableState; + var ts = stream._transformState; + + if (ws.length) throw new Error('Calling transform done when ws.length != 0'); + + if (ts.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..ed5efcb --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,526 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +util.inherits(Writable, Stream); + +function nop() {} + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +var Duplex; +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function writableStateGetBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') + }); + } catch (_) {} +})(); + +var Duplex; +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + processNextTick(cb, er); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + // Always throw error if a null is written + // if we are not in object mode then throw + // if it is not a buffer, string, or undefined. + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + processNextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = bufferShim.from(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) processNextTick(cb, er);else cb(er); + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + while (entry) { + buffer[count] = entry; + entry = entry.next; + count += 1; + } + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequestCount = 0; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else { + prefinish(stream, state); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) processNextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function (err) { + var entry = _this.entry; + _this.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = _this; + } else { + state.corkedRequestsFree = _this; + } + }; +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000..e4bfcf0 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,64 @@ +'use strict'; + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +module.exports = BufferList; + +function BufferList() { + this.head = null; + this.tail = null; + this.length = 0; +} + +BufferList.prototype.push = function (v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; +}; + +BufferList.prototype.unshift = function (v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; +}; + +BufferList.prototype.shift = function () { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; +}; + +BufferList.prototype.clear = function () { + this.head = this.tail = null; + this.length = 0; +}; + +BufferList.prototype.join = function (s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; +}; + +BufferList.prototype.concat = function (n) { + if (this.length === 0) return bufferShim.alloc(0); + if (this.length === 1) return this.head.data; + var ret = bufferShim.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + p.data.copy(ret, i); + i += p.data.length; + p = p.next; + } + return ret; +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/package.json b/node_modules/are-we-there-yet/node_modules/readable-stream/package.json new file mode 100644 index 0000000..4293090 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/package.json @@ -0,0 +1,49 @@ +{ + "name": "readable-stream", + "version": "2.1.5", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "buffer-shims": "^1.0.0", + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~0.10.x", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "~1.4.0", + "babel-polyfill": "^6.9.1", + "nyc": "^6.4.0", + "tap": "~0.7.1", + "tape": "~4.5.1", + "zuul": "~3.10.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js", + "browser": "npm run write-zuul && zuul --browser-retries 2 -- test/browser.js", + "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml", + "local": "zuul --local 3000 --no-coverage -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/passthrough.js b/node_modules/are-we-there-yet/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..27e8d8a --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..be2688a --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = (function (){ + try { + return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify + } catch(_){} +}()); +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = Stream || exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); + +if (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; +} diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/transform.js b/node_modules/are-we-there-yet/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..5d482f0 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/writable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..e1e9efd --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/writable.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_writable.js") diff --git a/node_modules/are-we-there-yet/package.json b/node_modules/are-we-there-yet/package.json new file mode 100644 index 0000000..3b77c6b --- /dev/null +++ b/node_modules/are-we-there-yet/package.json @@ -0,0 +1,27 @@ +{ + "name": "are-we-there-yet", + "version": "1.1.2", + "description": "Keep track of the overall completion of many dispirate processes", + "main": "index.js", + "scripts": { + "test": "standard && tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/are-we-there-yet.git" + }, + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/are-we-there-yet/issues" + }, + "homepage": "https://github.com/iarna/are-we-there-yet", + "devDependencies": { + "standard": "^6.0.8", + "tap": "^5.7.0" + }, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.0 || ^1.1.13" + } +} diff --git a/node_modules/are-we-there-yet/test/lib/test-event.js b/node_modules/are-we-there-yet/test/lib/test-event.js new file mode 100644 index 0000000..2aa7c05 --- /dev/null +++ b/node_modules/are-we-there-yet/test/lib/test-event.js @@ -0,0 +1,29 @@ +'use strict' +var util = require('util') + +module.exports = function (obj, event, next) { + var timeout = setTimeout(gotTimeout, 10) + obj.once(event, gotResult) + + function gotTimeout () { + obj.removeListener(event, gotResult) + next(new Error('Timeout listening for ' + event)) + } + var result = [] + function gotResult () { + result = Array.prototype.slice.call(arguments) + clearTimeout(timeout) + timeout = setTimeout(gotNoMoreResults, 10) + obj.once(event, gotTooManyResults) + } + function gotNoMoreResults () { + obj.removeListener(event, gotTooManyResults) + var args = [null].concat(result) + next.apply(null, args) + } + function gotTooManyResults () { + var secondResult = Array.prototype.slice.call(arguments) + clearTimeout(timeout) + next(new Error('Got too many results, first ' + util.inspect(result) + ' and then ' + util.inspect(secondResult))) + } +} diff --git a/node_modules/are-we-there-yet/test/tracker.js b/node_modules/are-we-there-yet/test/tracker.js new file mode 100644 index 0000000..374c773 --- /dev/null +++ b/node_modules/are-we-there-yet/test/tracker.js @@ -0,0 +1,57 @@ +'use strict' +var test = require('tap').test +var Tracker = require('../index.js').Tracker + +var testEvent = require('./lib/test-event.js') + +var name = 'test' + +test('initialization', function (t) { + var simple = new Tracker(name) + + t.is(simple.completed(), 0, 'Nothing todo is 0 completion') + t.done() +}) + +var track +var todo = 100 +test('completion', function (t) { + track = new Tracker(name, todo) + t.is(track.completed(), 0, 'Nothing done is 0 completion') + + testEvent(track, 'change', afterCompleteWork) + + track.completeWork(todo) + t.is(track.completed(), 1, 'completeWork: 100% completed') + + function afterCompleteWork (er, onChangeName) { + t.is(er, null, 'completeWork: on change event fired') + t.is(onChangeName, name, 'completeWork: on change emits the correct name') + t.done() + } +}) + +test('add more work', function (t) { + testEvent(track, 'change', afterAddWork) + track.addWork(todo) + t.is(track.completed(), 0.5, 'addWork: 50% completed') + function afterAddWork (er, onChangeName) { + t.is(er, null, 'addWork: on change event fired') + t.is(onChangeName, name, 'addWork: on change emits the correct name') + t.done() + } +}) + +test('complete more work', function (t) { + track.completeWork(200) + t.is(track.completed(), 1, 'completeWork: Over completion is still only 100% complete') + t.done() +}) + +test('finish is always 100%', function (t) { + var finishtest = new Tracker(name, todo) + finishtest.completeWork(50) + finishtest.finish() + t.is(finishtest.completed(), 1, 'finish: Explicitly finishing moves to 100%') + t.done() +}) diff --git a/node_modules/are-we-there-yet/test/trackergroup.js b/node_modules/are-we-there-yet/test/trackergroup.js new file mode 100644 index 0000000..799a767 --- /dev/null +++ b/node_modules/are-we-there-yet/test/trackergroup.js @@ -0,0 +1,96 @@ +'use strict' +var test = require('tap').test +var TrackerGroup = require('../index.js').TrackerGroup +var testEvent = require('./lib/test-event.js') + +test('TrackerGroup', function (t) { + var name = 'test' + + var track = new TrackerGroup(name) + t.is(track.completed(), 0, 'Nothing todo is 0 completion') + testEvent(track, 'change', afterFinishEmpty) + track.finish() + var a, b + function afterFinishEmpty (er, onChangeName, completion) { + t.is(er, null, 'finishEmpty: on change event fired') + t.is(onChangeName, name, 'finishEmpty: on change emits the correct name') + t.is(completion, 1, 'finishEmpty: passed through completion was correct') + t.is(track.completed(), 1, 'finishEmpty: Finishing an empty group actually finishes it') + + track = new TrackerGroup(name) + a = track.newItem('a', 10, 1) + b = track.newItem('b', 10, 1) + t.is(track.completed(), 0, 'Initially empty') + testEvent(track, 'change', afterCompleteWork) + a.completeWork(5) + } + function afterCompleteWork (er, onChangeName, completion) { + t.is(er, null, 'on change event fired') + t.is(onChangeName, 'a', 'on change emits the correct name') + t.is(completion, 0.25, 'Complete half of one is a quarter overall') + t.is(track.completed(), 0.25, 'Complete half of one is a quarter overall') + testEvent(track, 'change', afterFinishAll) + track.finish() + } + function afterFinishAll (er, onChangeName, completion) { + t.is(er, null, 'finishAll: on change event fired') + t.is(onChangeName, name, 'finishAll: on change emits the correct name') + t.is(completion, 1, 'Finishing everything ') + t.is(track.completed(), 1, 'Finishing everything ') + + track = new TrackerGroup(name) + a = track.newItem('a', 10, 2) + b = track.newItem('b', 10, 1) + t.is(track.completed(), 0, 'weighted: Initially empty') + testEvent(track, 'change', afterWeightedCompleteWork) + a.completeWork(5) + } + function afterWeightedCompleteWork (er, onChangeName, completion) { + t.is(er, null, 'weighted: on change event fired') + t.is(onChangeName, 'a', 'weighted: on change emits the correct name') + t.is(Math.floor(completion * 100), 33, 'weighted: Complete half of double weighted') + t.is(Math.floor(track.completed() * 100), 33, 'weighted: Complete half of double weighted') + testEvent(track, 'change', afterWeightedFinishAll) + track.finish() + } + function afterWeightedFinishAll (er, onChangeName, completion) { + t.is(er, null, 'weightedFinishAll: on change event fired') + t.is(onChangeName, name, 'weightedFinishAll: on change emits the correct name') + t.is(completion, 1, 'weightedFinishaAll: Finishing everything ') + t.is(track.completed(), 1, 'weightedFinishaAll: Finishing everything ') + + track = new TrackerGroup(name) + a = track.newGroup('a', 10) + b = track.newGroup('b', 10) + var a1 = a.newItem('a.1', 10) + a1.completeWork(5) + t.is(track.completed(), 0.25, 'nested: Initially quarter done') + testEvent(track, 'change', afterNestedComplete) + b.finish() + } + function afterNestedComplete (er, onChangeName, completion) { + t.is(er, null, 'nestedComplete: on change event fired') + t.is(onChangeName, 'b', 'nestedComplete: on change emits the correct name') + t.is(completion, 0.75, 'nestedComplete: Finishing everything ') + t.is(track.completed(), 0.75, 'nestedComplete: Finishing everything ') + t.end() + } +}) + +test('cycles', function (t) { + var track = new TrackerGroup('top') + testCycle(track, track) + var layer1 = track.newGroup('layer1') + testCycle(layer1, track) + t.end() + + function testCycle (addTo, toAdd) { + try { + addTo.addUnit(toAdd) + t.fail(toAdd.name) + } catch (ex) { + console.log(ex) + t.pass(toAdd.name) + } + } +}) diff --git a/node_modules/are-we-there-yet/test/trackerstream.js b/node_modules/are-we-there-yet/test/trackerstream.js new file mode 100644 index 0000000..65f04b0 --- /dev/null +++ b/node_modules/are-we-there-yet/test/trackerstream.js @@ -0,0 +1,51 @@ +'use strict' +var test = require('tap').test +var util = require('util') +var stream = require('readable-stream') +var TrackerStream = require('../index.js').TrackerStream +var testEvent = require('./lib/test-event.js') + +var Sink = function () { + stream.Writable.apply(this, arguments) +} +util.inherits(Sink, stream.Writable) +Sink.prototype._write = function (data, encoding, cb) { + cb() +} + +test('TrackerStream', function (t) { + t.plan(9) + + var name = 'test' + var track = new TrackerStream(name) + + t.is(track.completed(), 0, 'Nothing todo is 0 completion') + + var todo = 10 + track = new TrackerStream(name, todo) + t.is(track.completed(), 0, 'Nothing done is 0 completion') + + track.pipe(new Sink()) + + testEvent(track, 'change', afterCompleteWork) + track.write('0123456789') + function afterCompleteWork (er, onChangeName) { + t.is(er, null, 'write: on change event fired') + t.is(onChangeName, name, 'write: on change emits the correct name') + t.is(track.completed(), 1, 'write: 100% completed') + + testEvent(track, 'change', afterAddWork) + track.addWork(10) + } + function afterAddWork (er, onChangeName) { + t.is(er, null, 'addWork: on change event fired') + t.is(track.completed(), 0.5, 'addWork: 50% completed') + + testEvent(track, 'change', afterAllWork) + track.write('ABCDEFGHIJKLMNOPQRST') + } + function afterAllWork (er) { + t.is(er, null, 'allWork: on change event fired') + t.is(track.completed(), 1, 'allWork: 100% completed') + } +}) diff --git a/node_modules/are-we-there-yet/tracker-base.js b/node_modules/are-we-there-yet/tracker-base.js new file mode 100644 index 0000000..6f43687 --- /dev/null +++ b/node_modules/are-we-there-yet/tracker-base.js @@ -0,0 +1,11 @@ +'use strict' +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) diff --git a/node_modules/are-we-there-yet/tracker-group.js b/node_modules/are-we-there-yet/tracker-group.js new file mode 100644 index 0000000..9759e12 --- /dev/null +++ b/node_modules/are-we-there-yet/tracker-group.js @@ -0,0 +1,107 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') +var Tracker = require('./tracker.js') +var TrackerStream = require('./tracker-stream.js') + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) return + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) + return unit +} + +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) return 0 + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} + +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output +} diff --git a/node_modules/are-we-there-yet/tracker-stream.js b/node_modules/are-we-there-yet/tracker-stream.js new file mode 100644 index 0000000..fb9598e --- /dev/null +++ b/node_modules/are-we-there-yet/tracker-stream.js @@ -0,0 +1,35 @@ +'use strict' +var util = require('util') +var stream = require('readable-stream') +var delegate = require('delegates') +var Tracker = require('./tracker.js') + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) + +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} + +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} + +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} + +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') diff --git a/node_modules/are-we-there-yet/tracker.js b/node_modules/are-we-there-yet/tracker.js new file mode 100644 index 0000000..68c2339 --- /dev/null +++ b/node_modules/are-we-there-yet/tracker.js @@ -0,0 +1,30 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') + +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) + +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} + +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) this.workDone = this.workTodo + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} diff --git a/node_modules/arr-diff/LICENSE b/node_modules/arr-diff/LICENSE new file mode 100755 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/arr-diff/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arr-diff/README.md b/node_modules/arr-diff/README.md new file mode 100644 index 0000000..7705c6c --- /dev/null +++ b/node_modules/arr-diff/README.md @@ -0,0 +1,74 @@ +# arr-diff [![NPM version](https://img.shields.io/npm/v/arr-diff.svg)](https://www.npmjs.com/package/arr-diff) [![Build Status](https://img.shields.io/travis/jonschlinkert/base.svg)](https://travis-ci.org/jonschlinkert/base) + +> Returns an array with only the unique values from the first array, by excluding all values from additional arrays using strict equality for comparisons. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i arr-diff --save +``` +Install with [bower](http://bower.io/) + +```sh +$ bower install arr-diff --save +``` + +## API + +### [diff](index.js#L33) + +Return the difference between the first array and additional arrays. + +**Params** + +* `a` **{Array}** +* `b` **{Array}** +* `returns` **{Array}** + +**Example** + +```js +var diff = require('arr-diff'); + +var a = ['a', 'b', 'c', 'd']; +var b = ['b', 'c']; + +console.log(diff(a, b)) +//=> ['a', 'd'] +``` + +## Related projects + +* [arr-flatten](https://www.npmjs.com/package/arr-flatten): Recursively flatten an array or arrays. This is the fastest implementation of array flatten. | [homepage](https://github.com/jonschlinkert/arr-flatten) +* [array-filter](https://www.npmjs.com/package/array-filter): Array#filter for older browsers. | [homepage](https://github.com/juliangruber/array-filter) +* [array-intersection](https://www.npmjs.com/package/array-intersection): Return an array with the unique values present in _all_ given arrays using strict equality… [more](https://www.npmjs.com/package/array-intersection) | [homepage](https://github.com/jonschlinkert/array-intersection) + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/arr-diff/issues/new). + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 [Jon Schlinkert](https://github.com/jonschlinkert) +Released under the MIT license. + +*** + +_This file was generated by [verb](https://github.com/verbose/verb) on Sat Dec 05 2015 23:24:53 GMT-0500 (EST)._ diff --git a/node_modules/arr-diff/index.js b/node_modules/arr-diff/index.js new file mode 100644 index 0000000..bc7200d --- /dev/null +++ b/node_modules/arr-diff/index.js @@ -0,0 +1,58 @@ +/*! + * arr-diff + * + * Copyright (c) 2014 Jon Schlinkert, contributors. + * Licensed under the MIT License + */ + +'use strict'; + +var flatten = require('arr-flatten'); +var slice = [].slice; + +/** + * Return the difference between the first array and + * additional arrays. + * + * ```js + * var diff = require('{%= name %}'); + * + * var a = ['a', 'b', 'c', 'd']; + * var b = ['b', 'c']; + * + * console.log(diff(a, b)) + * //=> ['a', 'd'] + * ``` + * + * @param {Array} `a` + * @param {Array} `b` + * @return {Array} + * @api public + */ + +function diff(arr, arrays) { + var argsLen = arguments.length; + var len = arr.length, i = -1; + var res = [], arrays; + + if (argsLen === 1) { + return arr; + } + + if (argsLen > 2) { + arrays = flatten(slice.call(arguments, 1)); + } + + while (++i < len) { + if (!~arrays.indexOf(arr[i])) { + res.push(arr[i]); + } + } + return res; +} + +/** + * Expose `diff` + */ + +module.exports = diff; diff --git a/node_modules/arr-diff/package.json b/node_modules/arr-diff/package.json new file mode 100644 index 0000000..d40c210 --- /dev/null +++ b/node_modules/arr-diff/package.json @@ -0,0 +1,49 @@ +{ + "name": "arr-diff", + "description": "Returns an array with only the unique values from the first array, by excluding all values from additional arrays using strict equality for comparisons.", + "version": "2.0.0", + "homepage": "https://github.com/jonschlinkert/arr-diff", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/arr-diff", + "bugs": { + "url": "https://github.com/jonschlinkert/arr-diff/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "arr-flatten": "^1.0.1" + }, + "devDependencies": { + "array-differ": "^1.0.0", + "array-slice": "^0.2.3", + "benchmarked": "^0.1.4", + "chalk": "^1.1.1", + "mocha": "*", + "should": "*" + }, + "keywords": [ + "arr", + "array", + "diff", + "differ", + "difference" + ], + "verb": { + "related": { + "list": [ + "arr-flatten", + "array-filter", + "array-intersection" + ] + } + } +} \ No newline at end of file diff --git a/node_modules/arr-flatten/LICENSE b/node_modules/arr-flatten/LICENSE new file mode 100755 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/arr-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arr-flatten/README.md b/node_modules/arr-flatten/README.md new file mode 100755 index 0000000..bd696e6 --- /dev/null +++ b/node_modules/arr-flatten/README.md @@ -0,0 +1,73 @@ +# arr-flatten [![NPM version](https://badge.fury.io/js/arr-flatten.svg)](http://badge.fury.io/js/arr-flatten) [![Build Status](https://travis-ci.org/jonschlinkert/arr-flatten.svg)](https://travis-ci.org/jonschlinkert/arr-flatten) + +> Recursively flatten an array or arrays. This is the fastest implementation of array flatten. + +Why another flatten utility? I wanted the fastest implementation I could find, with implementation choices that should work for 95% of use cases, but no cruft to cover the other 5%. + +## Run benchmarks + +```bash +npm run benchmarks +``` + +Benchmark results comparing this library to [array-flatten]: + +```bash +#1: large.js + arr-flatten.js x 487,030 ops/sec ±0.67% (92 runs sampled) + array-flatten.js x 347,020 ops/sec ±0.57% (98 runs sampled) + +#2: medium.js + arr-flatten.js x 1,914,516 ops/sec ±0.76% (94 runs sampled) + array-flatten.js x 1,391,661 ops/sec ±0.63% (96 runs sampled) + +#3: small.js + arr-flatten.js x 5,158,980 ops/sec ±0.85% (94 runs sampled) + array-flatten.js x 3,683,173 ops/sec ±0.79% (97 runs sampled) +``` + +## Run tests + +Install dev dependencies: + +```bash +npm i -d && npm test +``` + +## Install with [npm](npmjs.org) + +```bash +npm i arr-flatten --save +``` +### Install with [bower](https://github.com/bower/bower) + +```bash +bower install arr-flatten --save +``` + + +## Usage + +```js +var flatten = require('arr-flatten'); + +flatten(['a', ['b', ['c']], 'd', ['e']]); +//=> ['a', 'b', 'c', 'd', 'e'] +``` + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2014-2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on March 11, 2015._ + +[array-flatten]: https://github.com/blakeembrey/array-flatten \ No newline at end of file diff --git a/node_modules/arr-flatten/index.js b/node_modules/arr-flatten/index.js new file mode 100755 index 0000000..f74e48c --- /dev/null +++ b/node_modules/arr-flatten/index.js @@ -0,0 +1,27 @@ +/*! + * arr-flatten + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function flatten(arr) { + return flat(arr, []); +}; + +function flat(arr, res) { + var len = arr.length; + var i = -1; + + while (len--) { + var cur = arr[++i]; + if (Array.isArray(cur)) { + flat(cur, res); + } else { + res.push(cur); + } + } + return res; +} \ No newline at end of file diff --git a/node_modules/arr-flatten/package.json b/node_modules/arr-flatten/package.json new file mode 100755 index 0000000..8d0322d --- /dev/null +++ b/node_modules/arr-flatten/package.json @@ -0,0 +1,51 @@ +{ + "name": "arr-flatten", + "description": "Recursively flatten an array or arrays. This is the fastest implementation of array flatten.", + "version": "1.0.1", + "homepage": "https://github.com/jonschlinkert/arr-flatten", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/arr-flatten.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/arr-flatten/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/jonschlinkert/arr-flatten/blob/master/LICENSE" + }, + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "benchmarks": "node benchmark" + }, + "devDependencies": { + "array-flatten": "^1.0.2", + "array-slice": "^0.2.2", + "benchmarked": "^0.1.3", + "chalk": "^0.5.1", + "glob": "^4.3.5", + "kind-of": "^1.0.0" + }, + "keywords": [ + "arr", + "array", + "elements", + "flat", + "flatten", + "nested", + "recurse", + "recursive", + "recursively" + ] +} diff --git a/node_modules/array-flatten/LICENSE b/node_modules/array-flatten/LICENSE new file mode 100644 index 0000000..983fbe8 --- /dev/null +++ b/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-flatten/README.md b/node_modules/array-flatten/README.md new file mode 100644 index 0000000..91fa5b6 --- /dev/null +++ b/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/node_modules/array-flatten/array-flatten.js b/node_modules/array-flatten/array-flatten.js new file mode 100644 index 0000000..089117b --- /dev/null +++ b/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/node_modules/array-flatten/package.json b/node_modules/array-flatten/package.json new file mode 100644 index 0000000..1a24e2a --- /dev/null +++ b/node_modules/array-flatten/package.json @@ -0,0 +1,39 @@ +{ + "name": "array-flatten", + "version": "1.1.1", + "description": "Flatten an array of nested arrays into a single flat array", + "main": "array-flatten.js", + "files": [ + "array-flatten.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "homepage": "https://github.com/blakeembrey/array-flatten", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + } +} diff --git a/node_modules/array-unique/LICENSE b/node_modules/array-unique/LICENSE new file mode 100755 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/array-unique/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/array-unique/README.md b/node_modules/array-unique/README.md new file mode 100755 index 0000000..2e28774 --- /dev/null +++ b/node_modules/array-unique/README.md @@ -0,0 +1,51 @@ +# array-unique [![NPM version](https://badge.fury.io/js/array-unique.svg)](http://badge.fury.io/js/array-unique) [![Build Status](https://travis-ci.org/jonschlinkert/array-unique.svg)](https://travis-ci.org/jonschlinkert/array-unique) + +> Return an array free of duplicate values. Fastest ES5 implementation. + +## Install with [npm](npmjs.org) + +```bash +npm i array-unique --save +``` + +## Usage + +```js +var unique = require('array-unique'); + +unique(['a', 'b', 'c', 'c']); +//=> ['a', 'b', 'c'] +``` + +## Related +* [arr-diff](https://github.com/jonschlinkert/arr-diff): Returns an array with only the unique values from the first array, by excluding all values from additional arrays using strict equality for comparisons. +* [arr-union](https://github.com/jonschlinkert/arr-union): Returns an array of unique values using strict equality for comparisons. +* [arr-flatten](https://github.com/jonschlinkert/arr-flatten): Recursively flatten an array or arrays. This is the fastest implementation of array flatten. +* [arr-reduce](https://github.com/jonschlinkert/arr-reduce): Fast array reduce that also loops over sparse elements. +* [arr-map](https://github.com/jonschlinkert/arr-map): Faster, node.js focused alternative to JavaScript's native array map. +* [arr-pluck](https://github.com/jonschlinkert/arr-pluck): Retrieves the value of a specified property from all elements in the collection. + +## Run tests +Install dev dependencies. + +```bash +npm i -d && npm test +``` + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/array-unique/issues) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on March 24, 2015._ \ No newline at end of file diff --git a/node_modules/array-unique/index.js b/node_modules/array-unique/index.js new file mode 100755 index 0000000..7fa75af --- /dev/null +++ b/node_modules/array-unique/index.js @@ -0,0 +1,28 @@ +/*! + * array-unique + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function unique(arr) { + if (!Array.isArray(arr)) { + throw new TypeError('array-unique expects an array.'); + } + + var len = arr.length; + var i = -1; + + while (i++ < len) { + var j = i + 1; + + for (; j < arr.length; ++j) { + if (arr[i] === arr[j]) { + arr.splice(j--, 1); + } + } + } + return arr; +}; diff --git a/node_modules/array-unique/package.json b/node_modules/array-unique/package.json new file mode 100755 index 0000000..a493f69 --- /dev/null +++ b/node_modules/array-unique/package.json @@ -0,0 +1,37 @@ +{ + "name": "array-unique", + "description": "Return an array free of duplicate values. Fastest ES5 implementation.", + "version": "0.2.1", + "homepage": "https://github.com/jonschlinkert/array-unique", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/array-unique.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/array-unique/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/jonschlinkert/array-unique/blob/master/LICENSE" + }, + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "array-uniq": "^1.0.2", + "benchmarked": "^0.1.3", + "mocha": "*", + "should": "*" + } +} diff --git a/node_modules/arrify/index.js b/node_modules/arrify/index.js new file mode 100644 index 0000000..2a2fdee --- /dev/null +++ b/node_modules/arrify/index.js @@ -0,0 +1,8 @@ +'use strict'; +module.exports = function (val) { + if (val === null || val === undefined) { + return []; + } + + return Array.isArray(val) ? val : [val]; +}; diff --git a/node_modules/arrify/license b/node_modules/arrify/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/arrify/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arrify/package.json b/node_modules/arrify/package.json new file mode 100644 index 0000000..1e63e59 --- /dev/null +++ b/node_modules/arrify/package.json @@ -0,0 +1,33 @@ +{ + "name": "arrify", + "version": "1.0.1", + "description": "Convert a value to an array", + "license": "MIT", + "repository": "sindresorhus/arrify", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "array", + "arr", + "arrify", + "arrayify", + "convert", + "value" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/arrify/readme.md b/node_modules/arrify/readme.md new file mode 100644 index 0000000..183d075 --- /dev/null +++ b/node_modules/arrify/readme.md @@ -0,0 +1,36 @@ +# arrify [![Build Status](https://travis-ci.org/sindresorhus/arrify.svg?branch=master)](https://travis-ci.org/sindresorhus/arrify) + +> Convert a value to an array + + +## Install + +``` +$ npm install --save arrify +``` + + +## Usage + +```js +const arrify = require('arrify'); + +arrify('unicorn'); +//=> ['unicorn'] + +arrify(['unicorn']); +//=> ['unicorn'] + +arrify(null); +//=> [] + +arrify(undefined); +//=> [] +``` + +*Supplying `null` or `undefined` results in an empty array.* + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/asn1/.npmignore b/node_modules/asn1/.npmignore new file mode 100644 index 0000000..eb03e3e --- /dev/null +++ b/node_modules/asn1/.npmignore @@ -0,0 +1,2 @@ +node_modules +*.log diff --git a/node_modules/asn1/.travis.yml b/node_modules/asn1/.travis.yml new file mode 100644 index 0000000..09d3ef3 --- /dev/null +++ b/node_modules/asn1/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.8 + - 0.10 diff --git a/node_modules/asn1/LICENSE b/node_modules/asn1/LICENSE new file mode 100644 index 0000000..9b5dcdb --- /dev/null +++ b/node_modules/asn1/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2011 Mark Cavage, All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/asn1/README.md b/node_modules/asn1/README.md new file mode 100644 index 0000000..7cebf7a --- /dev/null +++ b/node_modules/asn1/README.md @@ -0,0 +1,50 @@ +node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS. +Currently BER encoding is supported; at some point I'll likely have to do DER. + +## Usage + +Mostly, if you're *actually* needing to read and write ASN.1, you probably don't +need this readme to explain what and why. If you have no idea what ASN.1 is, +see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +The source is pretty much self-explanatory, and has read/write methods for the +common types out there. + +### Decoding + +The following reads an ASN.1 sequence with a boolean. + + var Ber = require('asn1').Ber; + + var reader = new Ber.Reader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff])); + + reader.readSequence(); + console.log('Sequence len: ' + reader.length); + if (reader.peek() === Ber.Boolean) + console.log(reader.readBoolean()); + +### Encoding + +The following generates the same payload as above. + + var Ber = require('asn1').Ber; + + var writer = new Ber.Writer(); + + writer.startSequence(); + writer.writeBoolean(true); + writer.endSequence(); + + console.log(writer.buffer); + +## Installation + + npm install asn1 + +## License + +MIT. + +## Bugs + +See . diff --git a/node_modules/asn1/lib/ber/errors.js b/node_modules/asn1/lib/ber/errors.js new file mode 100644 index 0000000..ff21d4f --- /dev/null +++ b/node_modules/asn1/lib/ber/errors.js @@ -0,0 +1,13 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + + newInvalidAsn1Error: function(msg) { + var e = new Error(); + e.name = 'InvalidAsn1Error'; + e.message = msg || ''; + return e; + } + +}; diff --git a/node_modules/asn1/lib/ber/index.js b/node_modules/asn1/lib/ber/index.js new file mode 100644 index 0000000..4fb90ae --- /dev/null +++ b/node_modules/asn1/lib/ber/index.js @@ -0,0 +1,27 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var errors = require('./errors'); +var types = require('./types'); + +var Reader = require('./reader'); +var Writer = require('./writer'); + + +///--- Exports + +module.exports = { + + Reader: Reader, + + Writer: Writer + +}; + +for (var t in types) { + if (types.hasOwnProperty(t)) + module.exports[t] = types[t]; +} +for (var e in errors) { + if (errors.hasOwnProperty(e)) + module.exports[e] = errors[e]; +} diff --git a/node_modules/asn1/lib/ber/reader.js b/node_modules/asn1/lib/ber/reader.js new file mode 100644 index 0000000..0a00e98 --- /dev/null +++ b/node_modules/asn1/lib/ber/reader.js @@ -0,0 +1,261 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); + +var ASN1 = require('./types'); +var errors = require('./errors'); + + +///--- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + + + +///--- API + +function Reader(data) { + if (!data || !Buffer.isBuffer(data)) + throw new TypeError('data must be a node Buffer'); + + this._buf = data; + this._size = data.length; + + // These hold the "current" state + this._len = 0; + this._offset = 0; +} + +Object.defineProperty(Reader.prototype, 'length', { + enumerable: true, + get: function () { return (this._len); } +}); + +Object.defineProperty(Reader.prototype, 'offset', { + enumerable: true, + get: function () { return (this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'remain', { + get: function () { return (this._size - this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'buffer', { + get: function () { return (this._buf.slice(this._offset)); } +}); + + +/** + * Reads a single byte and advances offset; you can pass in `true` to make this + * a "peek" operation (i.e., get the byte, but don't advance the offset). + * + * @param {Boolean} peek true means don't move offset. + * @return {Number} the next byte, null if not enough data. + */ +Reader.prototype.readByte = function(peek) { + if (this._size - this._offset < 1) + return null; + + var b = this._buf[this._offset] & 0xff; + + if (!peek) + this._offset += 1; + + return b; +}; + + +Reader.prototype.peek = function() { + return this.readByte(true); +}; + + +/** + * Reads a (potentially) variable length off the BER buffer. This call is + * not really meant to be called directly, as callers have to manipulate + * the internal buffer afterwards. + * + * As a result of this call, you can call `Reader.length`, until the + * next thing called that does a readLength. + * + * @return {Number} the amount of offset to advance the buffer. + * @throws {InvalidAsn1Error} on bad ASN.1 + */ +Reader.prototype.readLength = function(offset) { + if (offset === undefined) + offset = this._offset; + + if (offset >= this._size) + return null; + + var lenB = this._buf[offset++] & 0xff; + if (lenB === null) + return null; + + if ((lenB & 0x80) == 0x80) { + lenB &= 0x7f; + + if (lenB == 0) + throw newInvalidAsn1Error('Indefinite length not supported'); + + if (lenB > 4) + throw newInvalidAsn1Error('encoding too long'); + + if (this._size - offset < lenB) + return null; + + this._len = 0; + for (var i = 0; i < lenB; i++) + this._len = (this._len << 8) + (this._buf[offset++] & 0xff); + + } else { + // Wasn't a variable length + this._len = lenB; + } + + return offset; +}; + + +/** + * Parses the next sequence in this BER buffer. + * + * To get the length of the sequence, call `Reader.length`. + * + * @return {Number} the sequence's tag. + */ +Reader.prototype.readSequence = function(tag) { + var seq = this.peek(); + if (seq === null) + return null; + if (tag !== undefined && tag !== seq) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + seq.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + this._offset = o; + return seq; +}; + + +Reader.prototype.readInt = function() { + return this._readTag(ASN1.Integer); +}; + + +Reader.prototype.readBoolean = function() { + return (this._readTag(ASN1.Boolean) === 0 ? false : true); +}; + + +Reader.prototype.readEnumeration = function() { + return this._readTag(ASN1.Enumeration); +}; + + +Reader.prototype.readString = function(tag, retbuf) { + if (!tag) + tag = ASN1.OctetString; + + var b = this.peek(); + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + + if (o === null) + return null; + + if (this.length > this._size - o) + return null; + + this._offset = o; + + if (this.length === 0) + return retbuf ? new Buffer(0) : ''; + + var str = this._buf.slice(this._offset, this._offset + this.length); + this._offset += this.length; + + return retbuf ? str : str.toString('utf8'); +}; + +Reader.prototype.readOID = function(tag) { + if (!tag) + tag = ASN1.OID; + + var b = this.readString(tag, true); + if (b === null) + return null; + + var values = []; + var value = 0; + + for (var i = 0; i < b.length; i++) { + var byte = b[i] & 0xff; + + value <<= 7; + value += byte & 0x7f; + if ((byte & 0x80) == 0) { + values.push(value); + value = 0; + } + } + + value = values.shift(); + values.unshift(value % 40); + values.unshift((value / 40) >> 0); + + return values.join('.'); +}; + + +Reader.prototype._readTag = function(tag) { + assert.ok(tag !== undefined); + + var b = this.peek(); + + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + if (this.length > 4) + throw newInvalidAsn1Error('Integer too long: ' + this.length); + + if (this.length > this._size - o) + return null; + this._offset = o; + + var fb = this._buf[this._offset]; + var value = 0; + + for (var i = 0; i < this.length; i++) { + value <<= 8; + value |= (this._buf[this._offset++] & 0xff); + } + + if ((fb & 0x80) == 0x80 && i !== 4) + value -= (1 << (i * 8)); + + return value >> 0; +}; + + + +///--- Exported API + +module.exports = Reader; diff --git a/node_modules/asn1/lib/ber/types.js b/node_modules/asn1/lib/ber/types.js new file mode 100644 index 0000000..8aea000 --- /dev/null +++ b/node_modules/asn1/lib/ber/types.js @@ -0,0 +1,36 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + EOC: 0, + Boolean: 1, + Integer: 2, + BitString: 3, + OctetString: 4, + Null: 5, + OID: 6, + ObjectDescriptor: 7, + External: 8, + Real: 9, // float + Enumeration: 10, + PDV: 11, + Utf8String: 12, + RelativeOID: 13, + Sequence: 16, + Set: 17, + NumericString: 18, + PrintableString: 19, + T61String: 20, + VideotexString: 21, + IA5String: 22, + UTCTime: 23, + GeneralizedTime: 24, + GraphicString: 25, + VisibleString: 26, + GeneralString: 28, + UniversalString: 29, + CharacterString: 30, + BMPString: 31, + Constructor: 32, + Context: 128 +}; diff --git a/node_modules/asn1/lib/ber/writer.js b/node_modules/asn1/lib/ber/writer.js new file mode 100644 index 0000000..d9d99af --- /dev/null +++ b/node_modules/asn1/lib/ber/writer.js @@ -0,0 +1,316 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); +var ASN1 = require('./types'); +var errors = require('./errors'); + + +///--- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + +var DEFAULT_OPTS = { + size: 1024, + growthFactor: 8 +}; + + +///--- Helpers + +function merge(from, to) { + assert.ok(from); + assert.equal(typeof(from), 'object'); + assert.ok(to); + assert.equal(typeof(to), 'object'); + + var keys = Object.getOwnPropertyNames(from); + keys.forEach(function(key) { + if (to[key]) + return; + + var value = Object.getOwnPropertyDescriptor(from, key); + Object.defineProperty(to, key, value); + }); + + return to; +} + + + +///--- API + +function Writer(options) { + options = merge(DEFAULT_OPTS, options || {}); + + this._buf = new Buffer(options.size || 1024); + this._size = this._buf.length; + this._offset = 0; + this._options = options; + + // A list of offsets in the buffer where we need to insert + // sequence tag/len pairs. + this._seq = []; +} + +Object.defineProperty(Writer.prototype, 'buffer', { + get: function () { + if (this._seq.length) + throw new InvalidAsn1Error(this._seq.length + ' unended sequence(s)'); + + return (this._buf.slice(0, this._offset)); + } +}); + +Writer.prototype.writeByte = function(b) { + if (typeof(b) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(1); + this._buf[this._offset++] = b; +}; + + +Writer.prototype.writeInt = function(i, tag) { + if (typeof(i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof(tag) !== 'number') + tag = ASN1.Integer; + + var sz = 4; + + while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && + (sz > 1)) { + sz--; + i <<= 8; + } + + if (sz > 4) + throw new InvalidAsn1Error('BER ints cannot be > 0xffffffff'); + + this._ensure(2 + sz); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = sz; + + while (sz-- > 0) { + this._buf[this._offset++] = ((i & 0xff000000) >>> 24); + i <<= 8; + } + +}; + + +Writer.prototype.writeNull = function() { + this.writeByte(ASN1.Null); + this.writeByte(0x00); +}; + + +Writer.prototype.writeEnumeration = function(i, tag) { + if (typeof(i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof(tag) !== 'number') + tag = ASN1.Enumeration; + + return this.writeInt(i, tag); +}; + + +Writer.prototype.writeBoolean = function(b, tag) { + if (typeof(b) !== 'boolean') + throw new TypeError('argument must be a Boolean'); + if (typeof(tag) !== 'number') + tag = ASN1.Boolean; + + this._ensure(3); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = 0x01; + this._buf[this._offset++] = b ? 0xff : 0x00; +}; + + +Writer.prototype.writeString = function(s, tag) { + if (typeof(s) !== 'string') + throw new TypeError('argument must be a string (was: ' + typeof(s) + ')'); + if (typeof(tag) !== 'number') + tag = ASN1.OctetString; + + var len = Buffer.byteLength(s); + this.writeByte(tag); + this.writeLength(len); + if (len) { + this._ensure(len); + this._buf.write(s, this._offset); + this._offset += len; + } +}; + + +Writer.prototype.writeBuffer = function(buf, tag) { + if (typeof(tag) !== 'number') + throw new TypeError('tag must be a number'); + if (!Buffer.isBuffer(buf)) + throw new TypeError('argument must be a buffer'); + + this.writeByte(tag); + this.writeLength(buf.length); + this._ensure(buf.length); + buf.copy(this._buf, this._offset, 0, buf.length); + this._offset += buf.length; +}; + + +Writer.prototype.writeStringArray = function(strings) { + if ((!strings instanceof Array)) + throw new TypeError('argument must be an Array[String]'); + + var self = this; + strings.forEach(function(s) { + self.writeString(s); + }); +}; + +// This is really to solve DER cases, but whatever for now +Writer.prototype.writeOID = function(s, tag) { + if (typeof(s) !== 'string') + throw new TypeError('argument must be a string'); + if (typeof(tag) !== 'number') + tag = ASN1.OID; + + if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) + throw new Error('argument is not a valid OID string'); + + function encodeOctet(bytes, octet) { + if (octet < 128) { + bytes.push(octet); + } else if (octet < 16384) { + bytes.push((octet >>> 7) | 0x80); + bytes.push(octet & 0x7F); + } else if (octet < 2097152) { + bytes.push((octet >>> 14) | 0x80); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else if (octet < 268435456) { + bytes.push((octet >>> 21) | 0x80); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else { + bytes.push(((octet >>> 28) | 0x80) & 0xFF); + bytes.push(((octet >>> 21) | 0x80) & 0xFF); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } + } + + var tmp = s.split('.'); + var bytes = []; + bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); + tmp.slice(2).forEach(function(b) { + encodeOctet(bytes, parseInt(b, 10)); + }); + + var self = this; + this._ensure(2 + bytes.length); + this.writeByte(tag); + this.writeLength(bytes.length); + bytes.forEach(function(b) { + self.writeByte(b); + }); +}; + + +Writer.prototype.writeLength = function(len) { + if (typeof(len) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(4); + + if (len <= 0x7f) { + this._buf[this._offset++] = len; + } else if (len <= 0xff) { + this._buf[this._offset++] = 0x81; + this._buf[this._offset++] = len; + } else if (len <= 0xffff) { + this._buf[this._offset++] = 0x82; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else if (len <= 0xffffff) { + this._buf[this._offset++] = 0x83; + this._buf[this._offset++] = len >> 16; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else { + throw new InvalidAsn1ERror('Length too long (> 4 bytes)'); + } +}; + +Writer.prototype.startSequence = function(tag) { + if (typeof(tag) !== 'number') + tag = ASN1.Sequence | ASN1.Constructor; + + this.writeByte(tag); + this._seq.push(this._offset); + this._ensure(3); + this._offset += 3; +}; + + +Writer.prototype.endSequence = function() { + var seq = this._seq.pop(); + var start = seq + 3; + var len = this._offset - start; + + if (len <= 0x7f) { + this._shift(start, len, -2); + this._buf[seq] = len; + } else if (len <= 0xff) { + this._shift(start, len, -1); + this._buf[seq] = 0x81; + this._buf[seq + 1] = len; + } else if (len <= 0xffff) { + this._buf[seq] = 0x82; + this._buf[seq + 1] = len >> 8; + this._buf[seq + 2] = len; + } else if (len <= 0xffffff) { + this._shift(start, len, 1); + this._buf[seq] = 0x83; + this._buf[seq + 1] = len >> 16; + this._buf[seq + 2] = len >> 8; + this._buf[seq + 3] = len; + } else { + throw new InvalidAsn1Error('Sequence too long'); + } +}; + + +Writer.prototype._shift = function(start, len, shift) { + assert.ok(start !== undefined); + assert.ok(len !== undefined); + assert.ok(shift); + + this._buf.copy(this._buf, start + shift, start, start + len); + this._offset += shift; +}; + +Writer.prototype._ensure = function(len) { + assert.ok(len); + + if (this._size - this._offset < len) { + var sz = this._size * this._options.growthFactor; + if (sz - this._offset < len) + sz += len; + + var buf = new Buffer(sz); + + this._buf.copy(buf, 0, 0, this._offset); + this._buf = buf; + this._size = sz; + } +}; + + + +///--- Exported API + +module.exports = Writer; diff --git a/node_modules/asn1/lib/index.js b/node_modules/asn1/lib/index.js new file mode 100644 index 0000000..d1766e7 --- /dev/null +++ b/node_modules/asn1/lib/index.js @@ -0,0 +1,20 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +// If you have no idea what ASN.1 or BER is, see this: +// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +var Ber = require('./ber/index'); + + + +///--- Exported API + +module.exports = { + + Ber: Ber, + + BerReader: Ber.Reader, + + BerWriter: Ber.Writer + +}; diff --git a/node_modules/asn1/package.json b/node_modules/asn1/package.json new file mode 100644 index 0000000..f6dfb06 --- /dev/null +++ b/node_modules/asn1/package.json @@ -0,0 +1,24 @@ +{ + "author": "Mark Cavage ", + "contributors": [ + "David Gwynne ", + "Yunong Xiao ", + "Alex Wilson " + ], + "name": "asn1", + "description": "Contains parsers and serializers for ASN.1 (currently BER only)", + "version": "0.2.3", + "repository": { + "type": "git", + "url": "git://github.com/mcavage/node-asn1.git" + }, + "main": "lib/index.js", + "dependencies": {}, + "devDependencies": { + "tap": "0.4.8" + }, + "scripts": { + "test": "./node_modules/.bin/tap ./tst" + }, + "license": "MIT" +} diff --git a/node_modules/asn1/tst/ber/reader.test.js b/node_modules/asn1/tst/ber/reader.test.js new file mode 100644 index 0000000..062fd7e --- /dev/null +++ b/node_modules/asn1/tst/ber/reader.test.js @@ -0,0 +1,208 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var test = require('tap').test; + + + +///--- Globals + +var BerReader; + + + +///--- Tests + +test('load library', function(t) { + BerReader = require('../../lib/index').BerReader; + t.ok(BerReader); + try { + new BerReader(); + t.fail('Should have thrown'); + } catch (e) { + t.ok(e instanceof TypeError, 'Should have been a type error'); + } + t.end(); +}); + + +test('read byte', function(t) { + var reader = new BerReader(new Buffer([0xde])); + t.ok(reader); + t.equal(reader.readByte(), 0xde, 'wrong value'); + t.end(); +}); + + +test('read 1 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x01, 0x03])); + t.ok(reader); + t.equal(reader.readInt(), 0x03, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read 2 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x02, 0x7e, 0xde])); + t.ok(reader); + t.equal(reader.readInt(), 0x7ede, 'wrong value'); + t.equal(reader.length, 0x02, 'wrong length'); + t.end(); +}); + + +test('read 3 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x03, 0x7e, 0xde, 0x03])); + t.ok(reader); + t.equal(reader.readInt(), 0x7ede03, 'wrong value'); + t.equal(reader.length, 0x03, 'wrong length'); + t.end(); +}); + + +test('read 4 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x04, 0x7e, 0xde, 0x03, 0x01])); + t.ok(reader); + t.equal(reader.readInt(), 0x7ede0301, 'wrong value'); + t.equal(reader.length, 0x04, 'wrong length'); + t.end(); +}); + + +test('read 1 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x01, 0xdc])); + t.ok(reader); + t.equal(reader.readInt(), -36, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read 2 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x02, 0xc0, 0x4e])); + t.ok(reader); + t.equal(reader.readInt(), -16306, 'wrong value'); + t.equal(reader.length, 0x02, 'wrong length'); + t.end(); +}); + + +test('read 3 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x03, 0xff, 0x00, 0x19])); + t.ok(reader); + t.equal(reader.readInt(), -65511, 'wrong value'); + t.equal(reader.length, 0x03, 'wrong length'); + t.end(); +}); + + +test('read 4 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x04, 0x91, 0x7c, 0x22, 0x1f])); + t.ok(reader); + t.equal(reader.readInt(), -1854135777, 'wrong value'); + t.equal(reader.length, 0x04, 'wrong length'); + t.end(); +}); + + +test('read boolean true', function(t) { + var reader = new BerReader(new Buffer([0x01, 0x01, 0xff])); + t.ok(reader); + t.equal(reader.readBoolean(), true, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read boolean false', function(t) { + var reader = new BerReader(new Buffer([0x01, 0x01, 0x00])); + t.ok(reader); + t.equal(reader.readBoolean(), false, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read enumeration', function(t) { + var reader = new BerReader(new Buffer([0x0a, 0x01, 0x20])); + t.ok(reader); + t.equal(reader.readEnumeration(), 0x20, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read string', function(t) { + var dn = 'cn=foo,ou=unit,o=test'; + var buf = new Buffer(dn.length + 2); + buf[0] = 0x04; + buf[1] = Buffer.byteLength(dn); + buf.write(dn, 2); + var reader = new BerReader(buf); + t.ok(reader); + t.equal(reader.readString(), dn, 'wrong value'); + t.equal(reader.length, dn.length, 'wrong length'); + t.end(); +}); + + +test('read sequence', function(t) { + var reader = new BerReader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff])); + t.ok(reader); + t.equal(reader.readSequence(), 0x30, 'wrong value'); + t.equal(reader.length, 0x03, 'wrong length'); + t.equal(reader.readBoolean(), true, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('anonymous LDAPv3 bind', function(t) { + var BIND = new Buffer(14); + BIND[0] = 0x30; // Sequence + BIND[1] = 12; // len + BIND[2] = 0x02; // ASN.1 Integer + BIND[3] = 1; // len + BIND[4] = 0x04; // msgid (make up 4) + BIND[5] = 0x60; // Bind Request + BIND[6] = 7; // len + BIND[7] = 0x02; // ASN.1 Integer + BIND[8] = 1; // len + BIND[9] = 0x03; // v3 + BIND[10] = 0x04; // String (bind dn) + BIND[11] = 0; // len + BIND[12] = 0x80; // ContextSpecific (choice) + BIND[13] = 0; // simple bind + + // Start testing ^^ + var ber = new BerReader(BIND); + t.equal(ber.readSequence(), 48, 'Not an ASN.1 Sequence'); + t.equal(ber.length, 12, 'Message length should be 12'); + t.equal(ber.readInt(), 4, 'Message id should have been 4'); + t.equal(ber.readSequence(), 96, 'Bind Request should have been 96'); + t.equal(ber.length, 7, 'Bind length should have been 7'); + t.equal(ber.readInt(), 3, 'LDAP version should have been 3'); + t.equal(ber.readString(), '', 'Bind DN should have been empty'); + t.equal(ber.length, 0, 'string length should have been 0'); + t.equal(ber.readByte(), 0x80, 'Should have been ContextSpecific (choice)'); + t.equal(ber.readByte(), 0, 'Should have been simple bind'); + t.equal(null, ber.readByte(), 'Should be out of data'); + t.end(); +}); + + +test('long string', function(t) { + var buf = new Buffer(256); + var o; + var s = + '2;649;CN=Red Hat CS 71GA Demo,O=Red Hat CS 71GA Demo,C=US;' + + 'CN=RHCS Agent - admin01,UID=admin01,O=redhat,C=US [1] This is ' + + 'Teena Vradmin\'s description.'; + buf[0] = 0x04; + buf[1] = 0x81; + buf[2] = 0x94; + buf.write(s, 3); + var ber = new BerReader(buf.slice(0, 3 + s.length)); + t.equal(ber.readString(), s); + t.end(); +}); diff --git a/node_modules/asn1/tst/ber/writer.test.js b/node_modules/asn1/tst/ber/writer.test.js new file mode 100644 index 0000000..d87cb7b --- /dev/null +++ b/node_modules/asn1/tst/ber/writer.test.js @@ -0,0 +1,370 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var test = require('tap').test; +var sys = require('sys'); + +///--- Globals + +var BerWriter; + +var BerReader; + + +///--- Tests + +test('load library', function(t) { + BerWriter = require('../../lib/index').BerWriter; + t.ok(BerWriter); + t.ok(new BerWriter()); + t.end(); +}); + + +test('write byte', function(t) { + var writer = new BerWriter(); + + writer.writeByte(0xC2); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 1, 'Wrong length'); + t.equal(ber[0], 0xC2, 'value wrong'); + + t.end(); +}); + + +test('write 1 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7f); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 3, 'Wrong length for an int: ' + ber.length); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong (2) -> ' + ber[0]); + t.equal(ber[1], 0x01, 'length wrong(1) -> ' + ber[1]); + t.equal(ber[2], 0x7f, 'value wrong(3) -> ' + ber[2]); + + t.end(); +}); + + +test('write 2 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7ffe); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 4, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x02, 'length wrong'); + t.equal(ber[2], 0x7f, 'value wrong (byte 1)'); + t.equal(ber[3], 0xfe, 'value wrong (byte 2)'); + + t.end(); +}); + + +test('write 3 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7ffffe); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 5, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x03, 'length wrong'); + t.equal(ber[2], 0x7f, 'value wrong (byte 1)'); + t.equal(ber[3], 0xff, 'value wrong (byte 2)'); + t.equal(ber[4], 0xfe, 'value wrong (byte 3)'); + + t.end(); +}); + + +test('write 4 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7ffffffe); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 6, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x04, 'length wrong'); + t.equal(ber[2], 0x7f, 'value wrong (byte 1)'); + t.equal(ber[3], 0xff, 'value wrong (byte 2)'); + t.equal(ber[4], 0xff, 'value wrong (byte 3)'); + t.equal(ber[5], 0xfe, 'value wrong (byte 4)'); + + t.end(); +}); + + +test('write 1 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-128); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 3, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x01, 'length wrong'); + t.equal(ber[2], 0x80, 'value wrong (byte 1)'); + + t.end(); +}); + + +test('write 2 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-22400); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 4, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x02, 'length wrong'); + t.equal(ber[2], 0xa8, 'value wrong (byte 1)'); + t.equal(ber[3], 0x80, 'value wrong (byte 2)'); + + t.end(); +}); + + +test('write 3 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-481653); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 5, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x03, 'length wrong'); + t.equal(ber[2], 0xf8, 'value wrong (byte 1)'); + t.equal(ber[3], 0xa6, 'value wrong (byte 2)'); + t.equal(ber[4], 0x8b, 'value wrong (byte 3)'); + + t.end(); +}); + + +test('write 4 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-1522904131); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 6, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x04, 'length wrong'); + t.equal(ber[2], 0xa5, 'value wrong (byte 1)'); + t.equal(ber[3], 0x3a, 'value wrong (byte 2)'); + t.equal(ber[4], 0x53, 'value wrong (byte 3)'); + t.equal(ber[5], 0xbd, 'value wrong (byte 4)'); + + t.end(); +}); + + +test('write boolean', function(t) { + var writer = new BerWriter(); + + writer.writeBoolean(true); + writer.writeBoolean(false); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 6, 'Wrong length'); + t.equal(ber[0], 0x01, 'tag wrong'); + t.equal(ber[1], 0x01, 'length wrong'); + t.equal(ber[2], 0xff, 'value wrong'); + t.equal(ber[3], 0x01, 'tag wrong'); + t.equal(ber[4], 0x01, 'length wrong'); + t.equal(ber[5], 0x00, 'value wrong'); + + t.end(); +}); + + +test('write string', function(t) { + var writer = new BerWriter(); + writer.writeString('hello world'); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 13, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + +test('write buffer', function(t) { + var writer = new BerWriter(); + // write some stuff to start with + writer.writeString('hello world'); + var ber = writer.buffer; + var buf = new Buffer([0x04, 0x0b, 0x30, 0x09, 0x02, 0x01, 0x0f, 0x01, 0x01, + 0xff, 0x01, 0x01, 0xff]); + writer.writeBuffer(buf.slice(2, buf.length), 0x04); + ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 26, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2, 13).toString('utf8'), 'hello world', 'wrong value'); + t.equal(ber[13], buf[0], 'wrong tag'); + t.equal(ber[14], buf[1], 'wrong length'); + for (var i = 13, j = 0; i < ber.length && j < buf.length; i++, j++) { + t.equal(ber[i], buf[j], 'buffer contents not identical'); + } + t.end(); +}); + +test('write string array', function(t) { + var writer = new BerWriter(); + writer.writeStringArray(['hello world', 'fubar!']); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 21, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2, 13).toString('utf8'), 'hello world', 'wrong value'); + + t.equal(ber[13], 0x04, 'wrong tag'); + t.equal(ber[14], 6, 'wrong length'); + t.equal(ber.slice(15).toString('utf8'), 'fubar!', 'wrong value'); + + t.end(); +}); + + +test('resize internal buffer', function(t) { + var writer = new BerWriter({size: 2}); + writer.writeString('hello world'); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 13, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + + +test('sequence', function(t) { + var writer = new BerWriter({size: 25}); + writer.startSequence(); + writer.writeString('hello world'); + writer.endSequence(); + var ber = writer.buffer; + + t.ok(ber); + console.log(ber); + t.equal(ber.length, 15, 'wrong length'); + t.equal(ber[0], 0x30, 'wrong tag'); + t.equal(ber[1], 13, 'wrong length'); + t.equal(ber[2], 0x04, 'wrong tag'); + t.equal(ber[3], 11, 'wrong length'); + t.equal(ber.slice(4).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + + +test('nested sequence', function(t) { + var writer = new BerWriter({size: 25}); + writer.startSequence(); + writer.writeString('hello world'); + writer.startSequence(); + writer.writeString('hello world'); + writer.endSequence(); + writer.endSequence(); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 30, 'wrong length'); + t.equal(ber[0], 0x30, 'wrong tag'); + t.equal(ber[1], 28, 'wrong length'); + t.equal(ber[2], 0x04, 'wrong tag'); + t.equal(ber[3], 11, 'wrong length'); + t.equal(ber.slice(4, 15).toString('utf8'), 'hello world', 'wrong value'); + t.equal(ber[15], 0x30, 'wrong tag'); + t.equal(ber[16], 13, 'wrong length'); + t.equal(ber[17], 0x04, 'wrong tag'); + t.equal(ber[18], 11, 'wrong length'); + t.equal(ber.slice(19, 30).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + + +test('LDAP bind message', function(t) { + var dn = 'cn=foo,ou=unit,o=test'; + var writer = new BerWriter(); + writer.startSequence(); + writer.writeInt(3); // msgid = 3 + writer.startSequence(0x60); // ldap bind + writer.writeInt(3); // ldap v3 + writer.writeString(dn); + writer.writeByte(0x80); + writer.writeByte(0x00); + writer.endSequence(); + writer.endSequence(); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 35, 'wrong length (buffer)'); + t.equal(ber[0], 0x30, 'wrong tag'); + t.equal(ber[1], 33, 'wrong length'); + t.equal(ber[2], 0x02, 'wrong tag'); + t.equal(ber[3], 1, 'wrong length'); + t.equal(ber[4], 0x03, 'wrong value'); + t.equal(ber[5], 0x60, 'wrong tag'); + t.equal(ber[6], 28, 'wrong length'); + t.equal(ber[7], 0x02, 'wrong tag'); + t.equal(ber[8], 1, 'wrong length'); + t.equal(ber[9], 0x03, 'wrong value'); + t.equal(ber[10], 0x04, 'wrong tag'); + t.equal(ber[11], dn.length, 'wrong length'); + t.equal(ber.slice(12, 33).toString('utf8'), dn, 'wrong value'); + t.equal(ber[33], 0x80, 'wrong tag'); + t.equal(ber[34], 0x00, 'wrong len'); + + t.end(); +}); + + +test('Write OID', function(t) { + var oid = '1.2.840.113549.1.1.1'; + var writer = new BerWriter(); + writer.writeOID(oid); + + var ber = writer.buffer; + t.ok(ber); + console.log(require('util').inspect(ber)); + console.log(require('util').inspect(new Buffer([0x06, 0x09, 0x2a, 0x86, + 0x48, 0x86, 0xf7, 0x0d, + 0x01, 0x01, 0x01]))); + + t.end(); +}); diff --git a/node_modules/assert-plus/AUTHORS b/node_modules/assert-plus/AUTHORS new file mode 100644 index 0000000..1923524 --- /dev/null +++ b/node_modules/assert-plus/AUTHORS @@ -0,0 +1,6 @@ +Dave Eddy +Fred Kuo +Lars-Magnus Skog +Mark Cavage +Patrick Mooney +Rob Gulewich diff --git a/node_modules/assert-plus/CHANGES.md b/node_modules/assert-plus/CHANGES.md new file mode 100644 index 0000000..d249d9b --- /dev/null +++ b/node_modules/assert-plus/CHANGES.md @@ -0,0 +1,8 @@ +# assert-plus Changelog + +## 0.2.0 + +- Fix `assert.object(null)` so it throws +- Fix optional/arrayOf exports for non-type-of asserts +- Add optiona/arrayOf exports for Stream/Date/Regex/uuid +- Add basic unit test coverage diff --git a/node_modules/assert-plus/README.md b/node_modules/assert-plus/README.md new file mode 100644 index 0000000..0b39593 --- /dev/null +++ b/node_modules/assert-plus/README.md @@ -0,0 +1,155 @@ +# assert-plus + +This library is a super small wrapper over node's assert module that has two +things: (1) the ability to disable assertions with the environment variable +NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like +`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks +like this: + +```javascript + var assert = require('assert-plus'); + + function fooAccount(options, callback) { + assert.object(options, 'options'); + assert.number(options.id, 'options.id'); + assert.bool(options.isManager, 'options.isManager'); + assert.string(options.name, 'options.name'); + assert.arrayOfString(options.email, 'options.email'); + assert.func(callback, 'callback'); + + // Do stuff + callback(null, {}); + } +``` + +# API + +All methods that *aren't* part of node's core assert API are simply assumed to +take an argument, and then a string 'name' that's not a message; `AssertionError` +will be thrown if the assertion fails with a message like: + + AssertionError: foo (string) is required + at test (/home/mark/work/foo/foo.js:3:9) + at Object. (/home/mark/work/foo/foo.js:15:1) + at Module._compile (module.js:446:26) + at Object..js (module.js:464:10) + at Module.load (module.js:353:31) + at Function._load (module.js:311:12) + at Array.0 (module.js:484:10) + at EventEmitter._tickCallback (node.js:190:38) + +from: + +```javascript + function test(foo) { + assert.string(foo, 'foo'); + } +``` + +There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: + +```javascript + function test(foo) { + assert.arrayOfString(foo, 'foo'); + } +``` + +You can assert IFF an argument is not `undefined` (i.e., an optional arg): + +```javascript + assert.optionalString(foo, 'foo'); +``` + +Lastly, you can opt-out of assertion checking altogether by setting the +environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have +lots of assertions, and don't want to pay `typeof ()` taxes to v8 in +production. Be advised: The standard functions re-exported from `assert` are +also disabled in assert-plus if NDEBUG is specified. Using them directly from +the `assert` module avoids this behavior. + +The complete list of APIs is: + +* assert.array +* assert.bool +* assert.buffer +* assert.func +* assert.number +* assert.object +* assert.string +* assert.stream +* assert.date +* assert.regex +* assert.uuid +* assert.arrayOfArray +* assert.arrayOfBool +* assert.arrayOfBuffer +* assert.arrayOfFunc +* assert.arrayOfNumber +* assert.arrayOfObject +* assert.arrayOfString +* assert.arrayOfStream +* assert.arrayOfDate +* assert.arrayOfUuid +* assert.optionalArray +* assert.optionalBool +* assert.optionalBuffer +* assert.optionalFunc +* assert.optionalNumber +* assert.optionalObject +* assert.optionalString +* assert.optionalStream +* assert.optionalDate +* assert.optionalUuid +* assert.optionalArrayOfArray +* assert.optionalArrayOfBool +* assert.optionalArrayOfBuffer +* assert.optionalArrayOfFunc +* assert.optionalArrayOfNumber +* assert.optionalArrayOfObject +* assert.optionalArrayOfString +* assert.optionalArrayOfStream +* assert.optionalArrayOfDate +* assert.optionalArrayOfUuid +* assert.AssertionError +* assert.fail +* assert.ok +* assert.equal +* assert.notEqual +* assert.deepEqual +* assert.notDeepEqual +* assert.strictEqual +* assert.notStrictEqual +* assert.throws +* assert.doesNotThrow +* assert.ifError + +# Installation + + npm install assert-plus + +## License + +The MIT License (MIT) +Copyright (c) 2012 Mark Cavage + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## Bugs + +See . diff --git a/node_modules/assert-plus/assert.js b/node_modules/assert-plus/assert.js new file mode 100644 index 0000000..6bce4d8 --- /dev/null +++ b/node_modules/assert-plus/assert.js @@ -0,0 +1,206 @@ +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = require('assert'); +var Stream = require('stream').Stream; +var util = require('util'); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); diff --git a/node_modules/assert-plus/package.json b/node_modules/assert-plus/package.json new file mode 100644 index 0000000..7654fa9 --- /dev/null +++ b/node_modules/assert-plus/package.json @@ -0,0 +1,23 @@ +{ + "author": "Mark Cavage ", + "name": "assert-plus", + "description": "Extra assertions on top of node's assert module", + "version": "0.2.0", + "license": "MIT", + "main": "./assert.js", + "devDependencies": { + "tape": "4.2.2", + "faucet": "0.0.1" + }, + "optionalDependencies": {}, + "scripts": { + "test": "./node_modules/.bin/tape tests/*.js | ./node_modules/.bin/faucet" + }, + "repository": { + "type": "git", + "url": "https://github.com/mcavage/node-assert-plus.git" + }, + "engines": { + "node": ">=0.8" + } +} diff --git a/node_modules/assert/.npmignore b/node_modules/assert/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/assert/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/assert/.travis.yml b/node_modules/assert/.travis.yml new file mode 100644 index 0000000..a690ffd --- /dev/null +++ b/node_modules/assert/.travis.yml @@ -0,0 +1,31 @@ +language: node_js +before_install: + - npm install -g npm@2 + - npm install -g npm +matrix: + include: + - node_js: '0.8' + env: TASK=test-node + - node_js: '0.10' + env: TASK=test-node + - node_js: '0.11' + env: TASK=test-node + - node_js: '0.12' + env: TASK=test-node + - node_js: 1 + env: TASK=test-node + - node_js: 2 + env: TASK=test-node + - node_js: 3 + env: TASK=test-node + - node_js: 4 + env: TASK=test-node + - node_js: 5 + env: TASK=test-node + - node_js: '0.10' + env: TASK=test-browser +script: "npm run $TASK" +env: + global: + - secure: qThuKBZQtkooAvzaYldECGNqvKGPRTnXx62IVyhSbFlsCY1VCmjhLldhyPDiZQ3JqL1XvSkK8OMDupiHqZnNE0nGijoO4M/kaEdjBB+jpjg3f8I6te2SNU935SbkfY9KHAaFXMZwdcq7Fk932AxWEu+FMSDM+080wNKpEATXDe4= + - secure: O/scKjHLRcPN5ILV5qsSkksQ7qcZQdHWEUUPItmj/4+vmCc28bHpicoUxXG5A96iHvkBbdmky/nGCg464ZaNLk68m6hfEMDAR3J6mhM2Pf5C4QI/LlFlR1fob9sQ8lztwSGOItwdK8Rfrgb30RRVV71f6FxnaJ6PKMuMNT5S1AQ= diff --git a/node_modules/assert/.zuul.yml b/node_modules/assert/.zuul.yml new file mode 100644 index 0000000..9353eb7 --- /dev/null +++ b/node_modules/assert/.zuul.yml @@ -0,0 +1,13 @@ +ui: mocha-qunit +tunnel: ngrok +browsers: + - name: chrome + version: latest + - name: firefox + version: latest + - name: safari + version: latest + - name: ie + version: 9..latest + - name: microsoftedge + version: latest diff --git a/node_modules/assert/LICENSE b/node_modules/assert/LICENSE new file mode 100644 index 0000000..e3d4e69 --- /dev/null +++ b/node_modules/assert/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/assert/README.md b/node_modules/assert/README.md new file mode 100644 index 0000000..6d252ab --- /dev/null +++ b/node_modules/assert/README.md @@ -0,0 +1,64 @@ +# assert + +[![Build Status](https://travis-ci.org/defunctzombie/commonjs-assert.svg?branch=master)](https://travis-ci.org/defunctzombie/commonjs-assert) + +This module is used for writing unit tests for your applications, you can access it with require('assert'). + +The API is derived from the [commonjs 1.0 unit testing](http://wiki.commonjs.org/wiki/Unit_Testing/1.0) spec and the [node.js assert module](http://nodejs.org/api/assert.html) + +## assert.fail(actual, expected, message, operator) +Throws an exception that displays the values for actual and expected separated by the provided operator. + +## assert(value, message), assert.ok(value, [message]) +Tests if value is truthy, it is equivalent to assert.equal(true, !!value, message); + +## assert.equal(actual, expected, [message]) +Tests shallow, coercive equality with the equal comparison operator ( == ). + +## assert.notEqual(actual, expected, [message]) +Tests shallow, coercive non-equality with the not equal comparison operator ( != ). + +## assert.deepEqual(actual, expected, [message]) +Tests for deep equality. + +## assert.notDeepEqual(actual, expected, [message]) +Tests for any deep inequality. + +## assert.strictEqual(actual, expected, [message]) +Tests strict equality, as determined by the strict equality operator ( === ) + +## assert.notStrictEqual(actual, expected, [message]) +Tests strict non-equality, as determined by the strict not equal operator ( !== ) + +## assert.throws(block, [error], [message]) +Expects block to throw an error. error can be constructor, regexp or validation function. + +Validate instanceof using constructor: + +```javascript +assert.throws(function() { throw new Error("Wrong value"); }, Error); +``` + +Validate error message using RegExp: + +```javascript +assert.throws(function() { throw new Error("Wrong value"); }, /value/); +``` + +Custom error validation: + +```javascript +assert.throws(function() { + throw new Error("Wrong value"); +}, function(err) { + if ( (err instanceof Error) && /value/.test(err) ) { + return true; + } +}, "unexpected error"); +``` + +## assert.doesNotThrow(block, [message]) +Expects block not to throw an error, see assert.throws for details. + +## assert.ifError(value) +Tests if value is not a false value, throws if it is a true value. Useful when testing the first argument, error in callbacks. diff --git a/node_modules/assert/assert.js b/node_modules/assert/assert.js new file mode 100644 index 0000000..fa1b910 --- /dev/null +++ b/node_modules/assert/assert.js @@ -0,0 +1,490 @@ +'use strict'; + +// compare and isBuffer taken from https://github.com/feross/buffer/blob/680e9e5e488f22aac27599a57dc844a6315928dd/index.js +// original notice: + +/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +function compare(a, b) { + if (a === b) { + return 0; + } + + var x = a.length; + var y = b.length; + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i]; + y = b[i]; + break; + } + } + + if (x < y) { + return -1; + } + if (y < x) { + return 1; + } + return 0; +} +function isBuffer(b) { + if (global.Buffer && typeof global.Buffer.isBuffer === 'function') { + return global.Buffer.isBuffer(b); + } + return !!(b != null && b._isBuffer); +} + +// based on node assert, original notice: + +// http://wiki.commonjs.org/wiki/Unit_Testing/1.0 +// +// THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8! +// +// Originally from narwhal.js (http://narwhaljs.org) +// Copyright (c) 2009 Thomas Robinson <280north.com> +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to +// deal in the Software without restriction, including without limitation the +// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +// sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +var util = require('util/'); +var hasOwn = Object.prototype.hasOwnProperty; +var pSlice = Array.prototype.slice; +var functionsHaveNames = (function () { + return function foo() {}.name === 'foo'; +}()); +function pToString (obj) { + return Object.prototype.toString.call(obj); +} +function isView(arrbuf) { + if (isBuffer(arrbuf)) { + return false; + } + if (typeof global.ArrayBuffer !== 'function') { + return false; + } + if (typeof ArrayBuffer.isView === 'function') { + return ArrayBuffer.isView(arrbuf); + } + if (!arrbuf) { + return false; + } + if (arrbuf instanceof DataView) { + return true; + } + if (arrbuf.buffer && arrbuf.buffer instanceof ArrayBuffer) { + return true; + } + return false; +} +// 1. The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +var assert = module.exports = ok; + +// 2. The AssertionError is defined in assert. +// new assert.AssertionError({ message: message, +// actual: actual, +// expected: expected }) + +var regex = /\s*function\s+([^\(\s]*)\s*/; +// based on https://github.com/ljharb/function.prototype.name/blob/adeeeec8bfcc6068b187d7d9fb3d5bb1d3a30899/implementation.js +function getName(func) { + if (!util.isFunction(func)) { + return; + } + if (functionsHaveNames) { + return func.name; + } + var str = func.toString(); + var match = str.match(regex); + return match && match[1]; +} +assert.AssertionError = function AssertionError(options) { + this.name = 'AssertionError'; + this.actual = options.actual; + this.expected = options.expected; + this.operator = options.operator; + if (options.message) { + this.message = options.message; + this.generatedMessage = false; + } else { + this.message = getMessage(this); + this.generatedMessage = true; + } + var stackStartFunction = options.stackStartFunction || fail; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, stackStartFunction); + } else { + // non v8 browsers so we can have a stacktrace + var err = new Error(); + if (err.stack) { + var out = err.stack; + + // try to strip useless frames + var fn_name = getName(stackStartFunction); + var idx = out.indexOf('\n' + fn_name); + if (idx >= 0) { + // once we have located the function frame + // we need to strip out everything before it (and its line) + var next_line = out.indexOf('\n', idx + 1); + out = out.substring(next_line + 1); + } + + this.stack = out; + } + } +}; + +// assert.AssertionError instanceof Error +util.inherits(assert.AssertionError, Error); + +function truncate(s, n) { + if (typeof s === 'string') { + return s.length < n ? s : s.slice(0, n); + } else { + return s; + } +} +function inspect(something) { + if (functionsHaveNames || !util.isFunction(something)) { + return util.inspect(something); + } + var rawname = getName(something); + var name = rawname ? ': ' + rawname : ''; + return '[Function' + name + ']'; +} +function getMessage(self) { + return truncate(inspect(self.actual), 128) + ' ' + + self.operator + ' ' + + truncate(inspect(self.expected), 128); +} + +// At present only the three keys mentioned above are used and +// understood by the spec. Implementations or sub modules can pass +// other keys to the AssertionError's constructor - they will be +// ignored. + +// 3. All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function fail(actual, expected, message, operator, stackStartFunction) { + throw new assert.AssertionError({ + message: message, + actual: actual, + expected: expected, + operator: operator, + stackStartFunction: stackStartFunction + }); +} + +// EXTENSION! allows for well behaved errors defined elsewhere. +assert.fail = fail; + +// 4. Pure assertion tests whether a value is truthy, as determined +// by !!guard. +// assert.ok(guard, message_opt); +// This statement is equivalent to assert.equal(true, !!guard, +// message_opt);. To test strictly for the value true, use +// assert.strictEqual(true, guard, message_opt);. + +function ok(value, message) { + if (!value) fail(value, true, message, '==', assert.ok); +} +assert.ok = ok; + +// 5. The equality assertion tests shallow, coercive equality with +// ==. +// assert.equal(actual, expected, message_opt); + +assert.equal = function equal(actual, expected, message) { + if (actual != expected) fail(actual, expected, message, '==', assert.equal); +}; + +// 6. The non-equality assertion tests for whether two objects are not equal +// with != assert.notEqual(actual, expected, message_opt); + +assert.notEqual = function notEqual(actual, expected, message) { + if (actual == expected) { + fail(actual, expected, message, '!=', assert.notEqual); + } +}; + +// 7. The equivalence assertion tests a deep equality relation. +// assert.deepEqual(actual, expected, message_opt); + +assert.deepEqual = function deepEqual(actual, expected, message) { + if (!_deepEqual(actual, expected, false)) { + fail(actual, expected, message, 'deepEqual', assert.deepEqual); + } +}; + +assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { + if (!_deepEqual(actual, expected, true)) { + fail(actual, expected, message, 'deepStrictEqual', assert.deepStrictEqual); + } +}; + +function _deepEqual(actual, expected, strict, memos) { + // 7.1. All identical values are equivalent, as determined by ===. + if (actual === expected) { + return true; + } else if (isBuffer(actual) && isBuffer(expected)) { + return compare(actual, expected) === 0; + + // 7.2. If the expected value is a Date object, the actual value is + // equivalent if it is also a Date object that refers to the same time. + } else if (util.isDate(actual) && util.isDate(expected)) { + return actual.getTime() === expected.getTime(); + + // 7.3 If the expected value is a RegExp object, the actual value is + // equivalent if it is also a RegExp object with the same source and + // properties (`global`, `multiline`, `lastIndex`, `ignoreCase`). + } else if (util.isRegExp(actual) && util.isRegExp(expected)) { + return actual.source === expected.source && + actual.global === expected.global && + actual.multiline === expected.multiline && + actual.lastIndex === expected.lastIndex && + actual.ignoreCase === expected.ignoreCase; + + // 7.4. Other pairs that do not both pass typeof value == 'object', + // equivalence is determined by ==. + } else if ((actual === null || typeof actual !== 'object') && + (expected === null || typeof expected !== 'object')) { + return strict ? actual === expected : actual == expected; + + // If both values are instances of typed arrays, wrap their underlying + // ArrayBuffers in a Buffer each to increase performance + // This optimization requires the arrays to have the same type as checked by + // Object.prototype.toString (aka pToString). Never perform binary + // comparisons for Float*Arrays, though, since e.g. +0 === -0 but their + // bit patterns are not identical. + } else if (isView(actual) && isView(expected) && + pToString(actual) === pToString(expected) && + !(actual instanceof Float32Array || + actual instanceof Float64Array)) { + return compare(new Uint8Array(actual.buffer), + new Uint8Array(expected.buffer)) === 0; + + // 7.5 For all other Object pairs, including Array objects, equivalence is + // determined by having the same number of owned properties (as verified + // with Object.prototype.hasOwnProperty.call), the same set of keys + // (although not necessarily the same order), equivalent values for every + // corresponding key, and an identical 'prototype' property. Note: this + // accounts for both named and indexed properties on Arrays. + } else if (isBuffer(actual) !== isBuffer(expected)) { + return false; + } else { + memos = memos || {actual: [], expected: []}; + + var actualIndex = memos.actual.indexOf(actual); + if (actualIndex !== -1) { + if (actualIndex === memos.expected.indexOf(expected)) { + return true; + } + } + + memos.actual.push(actual); + memos.expected.push(expected); + + return objEquiv(actual, expected, strict, memos); + } +} + +function isArguments(object) { + return Object.prototype.toString.call(object) == '[object Arguments]'; +} + +function objEquiv(a, b, strict, actualVisitedObjects) { + if (a === null || a === undefined || b === null || b === undefined) + return false; + // if one is a primitive, the other must be same + if (util.isPrimitive(a) || util.isPrimitive(b)) + return a === b; + if (strict && Object.getPrototypeOf(a) !== Object.getPrototypeOf(b)) + return false; + var aIsArgs = isArguments(a); + var bIsArgs = isArguments(b); + if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs)) + return false; + if (aIsArgs) { + a = pSlice.call(a); + b = pSlice.call(b); + return _deepEqual(a, b, strict); + } + var ka = objectKeys(a); + var kb = objectKeys(b); + var key, i; + // having the same number of owned properties (keys incorporates + // hasOwnProperty) + if (ka.length !== kb.length) + return false; + //the same set of keys (although not necessarily the same order), + ka.sort(); + kb.sort(); + //~~~cheap key test + for (i = ka.length - 1; i >= 0; i--) { + if (ka[i] !== kb[i]) + return false; + } + //equivalent values for every corresponding key, and + //~~~possibly expensive deep test + for (i = ka.length - 1; i >= 0; i--) { + key = ka[i]; + if (!_deepEqual(a[key], b[key], strict, actualVisitedObjects)) + return false; + } + return true; +} + +// 8. The non-equivalence assertion tests for any deep inequality. +// assert.notDeepEqual(actual, expected, message_opt); + +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (_deepEqual(actual, expected, false)) { + fail(actual, expected, message, 'notDeepEqual', assert.notDeepEqual); + } +}; + +assert.notDeepStrictEqual = notDeepStrictEqual; +function notDeepStrictEqual(actual, expected, message) { + if (_deepEqual(actual, expected, true)) { + fail(actual, expected, message, 'notDeepStrictEqual', notDeepStrictEqual); + } +} + + +// 9. The strict equality assertion tests strict equality, as determined by ===. +// assert.strictEqual(actual, expected, message_opt); + +assert.strictEqual = function strictEqual(actual, expected, message) { + if (actual !== expected) { + fail(actual, expected, message, '===', assert.strictEqual); + } +}; + +// 10. The strict non-equality assertion tests for strict inequality, as +// determined by !==. assert.notStrictEqual(actual, expected, message_opt); + +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (actual === expected) { + fail(actual, expected, message, '!==', assert.notStrictEqual); + } +}; + +function expectedException(actual, expected) { + if (!actual || !expected) { + return false; + } + + if (Object.prototype.toString.call(expected) == '[object RegExp]') { + return expected.test(actual); + } + + try { + if (actual instanceof expected) { + return true; + } + } catch (e) { + // Ignore. The instanceof check doesn't work for arrow functions. + } + + if (Error.isPrototypeOf(expected)) { + return false; + } + + return expected.call({}, actual) === true; +} + +function _tryBlock(block) { + var error; + try { + block(); + } catch (e) { + error = e; + } + return error; +} + +function _throws(shouldThrow, block, expected, message) { + var actual; + + if (typeof block !== 'function') { + throw new TypeError('"block" argument must be a function'); + } + + if (typeof expected === 'string') { + message = expected; + expected = null; + } + + actual = _tryBlock(block); + + message = (expected && expected.name ? ' (' + expected.name + ').' : '.') + + (message ? ' ' + message : '.'); + + if (shouldThrow && !actual) { + fail(actual, expected, 'Missing expected exception' + message); + } + + var userProvidedMessage = typeof message === 'string'; + var isUnwantedException = !shouldThrow && util.isError(actual); + var isUnexpectedException = !shouldThrow && actual && !expected; + + if ((isUnwantedException && + userProvidedMessage && + expectedException(actual, expected)) || + isUnexpectedException) { + fail(actual, expected, 'Got unwanted exception' + message); + } + + if ((shouldThrow && actual && expected && + !expectedException(actual, expected)) || (!shouldThrow && actual)) { + throw actual; + } +} + +// 11. Expected to throw an error: +// assert.throws(block, Error_opt, message_opt); + +assert.throws = function(block, /*optional*/error, /*optional*/message) { + _throws(true, block, error, message); +}; + +// EXTENSION! This is annoying to write outside this module. +assert.doesNotThrow = function(block, /*optional*/error, /*optional*/message) { + _throws(false, block, error, message); +}; + +assert.ifError = function(err) { if (err) throw err; }; + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + if (hasOwn.call(obj, key)) keys.push(key); + } + return keys; +}; diff --git a/node_modules/assert/package.json b/node_modules/assert/package.json new file mode 100644 index 0000000..bc6a405 --- /dev/null +++ b/node_modules/assert/package.json @@ -0,0 +1,30 @@ +{ + "name": "assert", + "description": "commonjs assert - node.js api compatible", + "keywords": [ + "assert" + ], + "version": "1.4.1", + "homepage": "https://github.com/defunctzombie/commonjs-assert", + "repository": { + "type": "git", + "url": "git://github.com/defunctzombie/commonjs-assert.git" + }, + "main": "./assert.js", + "dependencies": { + "util": "0.10.3" + }, + "devDependencies": { + "mocha": "~1.21.4", + "zuul": "~3.10.0", + "zuul-ngrok": "^4.0.0" + }, + "license": "MIT", + "scripts": { + "test-node": "mocha --ui qunit test.js", + "test-browser": "zuul -- test.js", + "test": "npm run test-node && npm run test-browser", + "test-native": "TEST_NATIVE=true mocha --ui qunit test.js", + "browser-local": "zuul --no-coverage --local 8000 -- test.js" + } +} diff --git a/node_modules/assert/test.js b/node_modules/assert/test.js new file mode 100644 index 0000000..8c7c5a4 --- /dev/null +++ b/node_modules/assert/test.js @@ -0,0 +1,345 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var nodeAssert = require('assert'); +var ourAssert = require('./'); +var keys = Object.keys; +if (process.env.TEST_NATIVE === true) { + tests(nodeAssert, 'node assert'); +} else { + tests(ourAssert, 'our assert'); +} + +function makeBlock(f) { + var args = Array.prototype.slice.call(arguments, 1); + return function() { + return f.apply(this, args); + }; +} + +function tests (assert, what) { + test('assert.ok', function () { + assert.throws(makeBlock(assert, false), assert.AssertionError, 'ok(false)'); + + assert.doesNotThrow(makeBlock(assert, true), assert.AssertionError, 'ok(true)'); + + assert.doesNotThrow(makeBlock(assert, 'test', 'ok(\'test\')')); + + assert.throws(makeBlock(assert.ok, false), + assert.AssertionError, 'ok(false)'); + + assert.doesNotThrow(makeBlock(assert.ok, true), + assert.AssertionError, 'ok(true)'); + + assert.doesNotThrow(makeBlock(assert.ok, 'test'), 'ok(\'test\')'); + }); + + test('assert.equal', function () { + assert.throws(makeBlock(assert.equal, true, false), assert.AssertionError, 'equal'); + + assert.doesNotThrow(makeBlock(assert.equal, null, null), 'equal'); + + assert.doesNotThrow(makeBlock(assert.equal, undefined, undefined), 'equal'); + + assert.doesNotThrow(makeBlock(assert.equal, null, undefined), 'equal'); + + assert.doesNotThrow(makeBlock(assert.equal, true, true), 'equal'); + + assert.doesNotThrow(makeBlock(assert.equal, 2, '2'), 'equal'); + + assert.doesNotThrow(makeBlock(assert.notEqual, true, false), 'notEqual'); + + assert.throws(makeBlock(assert.notEqual, true, true), + assert.AssertionError, 'notEqual'); + }); + + test('assert.strictEqual', function () { + assert.throws(makeBlock(assert.strictEqual, 2, '2'), + assert.AssertionError, 'strictEqual'); + + assert.throws(makeBlock(assert.strictEqual, null, undefined), + assert.AssertionError, 'strictEqual'); + + assert.doesNotThrow(makeBlock(assert.notStrictEqual, 2, '2'), 'notStrictEqual'); + }); + + test('assert.deepStrictEqual', function () { + assert.throws(makeBlock(assert.deepStrictEqual, [2], ['2']), + assert.AssertionError, 'deepStrictEqual'); + + assert.throws(makeBlock(assert.deepStrictEqual, [null], [undefined]), + assert.AssertionError, 'deepStrictEqual'); + + assert.doesNotThrow(makeBlock(assert.notDeepStrictEqual, [2], ['2']), 'notDeepStrictEqual'); + }); + + test('assert.deepEqual - 7.2', function () { + assert.doesNotThrow(makeBlock(assert.deepEqual, new Date(2000, 3, 14), + new Date(2000, 3, 14)), 'deepEqual date'); + + assert.throws(makeBlock(assert.deepEqual, new Date(), new Date(2000, 3, 14)), + assert.AssertionError, + 'deepEqual date'); + }); + + test('assert.deepEqual - 7.3', function () { + assert.doesNotThrow(makeBlock(assert.deepEqual, /a/, /a/)); + assert.doesNotThrow(makeBlock(assert.deepEqual, /a/g, /a/g)); + assert.doesNotThrow(makeBlock(assert.deepEqual, /a/i, /a/i)); + assert.doesNotThrow(makeBlock(assert.deepEqual, /a/m, /a/m)); + assert.doesNotThrow(makeBlock(assert.deepEqual, /a/igm, /a/igm)); + assert.throws(makeBlock(assert.deepEqual, /ab/, /a/)); + assert.throws(makeBlock(assert.deepEqual, /a/g, /a/)); + assert.throws(makeBlock(assert.deepEqual, /a/i, /a/)); + assert.throws(makeBlock(assert.deepEqual, /a/m, /a/)); + assert.throws(makeBlock(assert.deepEqual, /a/igm, /a/im)); + + var re1 = /a/; + re1.lastIndex = 3; + assert.throws(makeBlock(assert.deepEqual, re1, /a/)); + }); + + test('assert.deepEqual - 7.4', function () { + assert.doesNotThrow(makeBlock(assert.deepEqual, 4, '4'), 'deepEqual == check'); + assert.doesNotThrow(makeBlock(assert.deepEqual, true, 1), 'deepEqual == check'); + assert.throws(makeBlock(assert.deepEqual, 4, '5'), + assert.AssertionError, + 'deepEqual == check'); + }); + + test('assert.deepEqual - 7.5', function () { + // having the same number of owned properties && the same set of keys + assert.doesNotThrow(makeBlock(assert.deepEqual, {a: 4}, {a: 4})); + assert.doesNotThrow(makeBlock(assert.deepEqual, {a: 4, b: '2'}, {a: 4, b: '2'})); + assert.doesNotThrow(makeBlock(assert.deepEqual, [4], ['4'])); + assert.throws(makeBlock(assert.deepEqual, {a: 4}, {a: 4, b: true}), + assert.AssertionError); + assert.doesNotThrow(makeBlock(assert.deepEqual, ['a'], {0: 'a'})); + //(although not necessarily the same order), + assert.doesNotThrow(makeBlock(assert.deepEqual, {a: 4, b: '1'}, {b: '1', a: 4})); + var a1 = [1, 2, 3]; + var a2 = [1, 2, 3]; + a1.a = 'test'; + a1.b = true; + a2.b = true; + a2.a = 'test'; + assert.throws(makeBlock(assert.deepEqual, keys(a1), keys(a2)), + assert.AssertionError); + assert.doesNotThrow(makeBlock(assert.deepEqual, a1, a2)); + }); + + test('assert.deepEqual - ES6 primitives', function () { + assert.throws(makeBlock(assert.deepEqual, null, {}), assert.AssertionError); + assert.throws(makeBlock(assert.deepEqual, undefined, {}), assert.AssertionError); + assert.throws(makeBlock(assert.deepEqual, 'a', ['a']), assert.AssertionError); + assert.throws(makeBlock(assert.deepEqual, 'a', {0: 'a'}), assert.AssertionError); + assert.throws(makeBlock(assert.deepEqual, 1, {}), assert.AssertionError); + assert.throws(makeBlock(assert.deepEqual, true, {}), assert.AssertionError); + if (typeof Symbol === 'symbol') { + assert.throws(makeBlock(assert.deepEqual, Symbol(), {}), assert.AssertionError); + } + }); + + test('assert.deepEqual - object wrappers', function () { + assert.doesNotThrow(makeBlock(assert.deepEqual, new String('a'), ['a'])); + assert.doesNotThrow(makeBlock(assert.deepEqual, new String('a'), {0: 'a'})); + assert.doesNotThrow(makeBlock(assert.deepEqual, new Number(1), {})); + assert.doesNotThrow(makeBlock(assert.deepEqual, new Boolean(true), {})); + }); + + test('assert.deepEqual - Buffers', function () { + assert.doesNotThrow(makeBlock(assert.deepEqual, new Buffer([1, 2, 3]), new Buffer([1, 2, 3]))); + if (typeof global.Uint8Array === 'function') { + assert.throws(makeBlock(assert.deepEqual, new Buffer([1, 2, 3]), new Uint8Array([1, 2, 3]))); + } + if (typeof global.Uint16Array === 'function') { + assert.doesNotThrow(makeBlock(assert.deepEqual, new Uint16Array([1, 2, 3]), new Uint16Array([1, 2, 3]))); + } + }); + + function thrower(errorConstructor) { + throw new errorConstructor('test'); + } + + test('assert - Testing the throwing', function () { + var aethrow = makeBlock(thrower, assert.AssertionError); + aethrow = makeBlock(thrower, assert.AssertionError); + + // the basic calls work + assert.throws(makeBlock(thrower, assert.AssertionError), + assert.AssertionError, 'message'); + assert.throws(makeBlock(thrower, assert.AssertionError), assert.AssertionError); + assert.throws(makeBlock(thrower, assert.AssertionError)); + + // if not passing an error, catch all. + assert.throws(makeBlock(thrower, TypeError)); + + // when passing a type, only catch errors of the appropriate type + var threw = false; + try { + assert.throws(makeBlock(thrower, TypeError), assert.AssertionError); + } catch (e) { + threw = true; + assert.ok(e instanceof TypeError, 'type'); + } + assert.equal(true, threw, + 'a.throws with an explicit error is eating extra errors', + assert.AssertionError); + threw = false; + + // doesNotThrow should pass through all errors + try { + assert.doesNotThrow(makeBlock(thrower, TypeError), assert.AssertionError); + } catch (e) { + threw = true; + assert.ok(e instanceof TypeError); + } + assert.equal(true, threw, + 'a.doesNotThrow with an explicit error is eating extra errors'); + + // key difference is that throwing our correct error makes an assertion error + try { + assert.doesNotThrow(makeBlock(thrower, TypeError), TypeError); + } catch (e) { + threw = true; + assert.ok(e instanceof assert.AssertionError); + } + assert.equal(true, threw, + 'a.doesNotThrow is not catching type matching errors'); + }); + + test('assert.ifError', function () { + assert.throws(function() {assert.ifError(new Error('test error'))}); + assert.doesNotThrow(function() {assert.ifError(null)}); + assert.doesNotThrow(function() {assert.ifError()}); + }); + + test('assert - make sure that validating using constructor really works', function () { + var threw = false; + try { + assert.throws( + function() { + throw ({}); + }, + Array + ); + } catch (e) { + threw = true; + } + assert.ok(threw, 'wrong constructor validation'); + }); + + test('assert - use a RegExp to validate error message', function () { + assert.throws(makeBlock(thrower, TypeError), /test/); + }); + + test('assert - se a fn to validate error object', function () { + assert.throws(makeBlock(thrower, TypeError), function(err) { + if ((err instanceof TypeError) && /test/.test(err)) { + return true; + } + }); + }); + + test('assert - Make sure deepEqual doesn\'t loop forever on circular refs', function () { + var b = {}; + b.b = b; + + var c = {}; + c.b = c; + + var gotError = false; + var equal = true; + try { + equal = assert.deepEqual(b, c); + } catch (e) { + gotError = true; + } + assert.ok(gotError || !equal, gotError ? 'got error': 'are equal'); + }); + + test('assert - Ensure reflexivity of deepEqual with `arguments` objects', function() { + var args = (function() { return arguments; })(); + assert.throws(makeBlock(assert.deepEqual, [], args), assert.AssertionError); + assert.throws(makeBlock(assert.deepEqual, args, []), assert.AssertionError); + }); + + test('assert - test assertion message', function () { + function testAssertionMessage(actual, expected) { + try { + assert.equal(actual, ''); + } catch (e) { + assert.equal(e.toString(), + ['AssertionError:', expected, '==', '\'\''].join(' ')); + } + } + testAssertionMessage(undefined, 'undefined'); + testAssertionMessage(null, 'null'); + testAssertionMessage(true, 'true'); + testAssertionMessage(false, 'false'); + testAssertionMessage(0, '0'); + testAssertionMessage(100, '100'); + testAssertionMessage(NaN, 'NaN'); + testAssertionMessage(Infinity, 'Infinity'); + testAssertionMessage(-Infinity, '-Infinity'); + testAssertionMessage('', '""'); + testAssertionMessage('foo', '\'foo\''); + testAssertionMessage([], '[]'); + testAssertionMessage([1, 2, 3], '[ 1, 2, 3 ]'); + testAssertionMessage(new Buffer([1, 2, 3]), ''); + if (typeof global.Uint8Array === 'function' && Object.getOwnPropertyNames( new Uint8Array([])).length === 0) { + // todo fix util.inspect + testAssertionMessage(new Uint8Array([1, 2, 3]), '{ \'0\': 1, \'1\': 2, \'2\': 3 }'); + } + testAssertionMessage(/a/, '/a/'); + testAssertionMessage(function f() {}, '[Function: f]'); + testAssertionMessage({}, '{}'); + testAssertionMessage({a: undefined, b: null}, '{ a: undefined, b: null }'); + testAssertionMessage({a: NaN, b: Infinity, c: -Infinity}, + '{ a: NaN, b: Infinity, c: -Infinity }'); + }); + + test('assert - regressions from node.js testcase', function () { + var threw = false; + + try { + assert.throws(function () { + assert.ifError(null); + }); + } catch (e) { + threw = true; + assert.equal(e.message, 'Missing expected exception..'); + } + assert.ok(threw); + + try { + assert.equal(1, 2); + } catch (e) { + assert.equal(e.toString().split('\n')[0], 'AssertionError: 1 == 2'); + } + + try { + assert.equal(1, 2, 'oh no'); + } catch (e) { + assert.equal(e.toString().split('\n')[0], 'AssertionError: oh no'); + } + }); +} diff --git a/node_modules/async-each/.npmignore b/node_modules/async-each/.npmignore new file mode 100644 index 0000000..3887b2b --- /dev/null +++ b/node_modules/async-each/.npmignore @@ -0,0 +1,3 @@ +bower.json +component.json +CHANGELOG.md diff --git a/node_modules/async-each/CHANGELOG.md b/node_modules/async-each/CHANGELOG.md new file mode 100644 index 0000000..bee2548 --- /dev/null +++ b/node_modules/async-each/CHANGELOG.md @@ -0,0 +1,23 @@ +# async-each 1.0.0 (26 November 2015) +* Bumped version to 1.0.0 (no functional changes) + +# async-each 0.1.6 (5 November 2014) +* Add license to package.json + +# async-each 0.1.5 (22 October 2014) +* Clean up package.json to fix npm warning about `repo` + +# async-each 0.1.4 (12 November 2013) +* Fixed AMD definition. + +# async-each 0.1.3 (25 July 2013) +* Fixed double wrapping of errors. + +# async-each 0.1.2 (7 July 2013) +* Fixed behaviour on empty arrays. + +# async-each 0.1.1 (14 June 2013) +* Wrapped function in closure, enabled strict mode. + +# async-each 0.1.0 (14 June 2013) +* Initial release. diff --git a/node_modules/async-each/README.md b/node_modules/async-each/README.md new file mode 100644 index 0000000..a79cbd7 --- /dev/null +++ b/node_modules/async-each/README.md @@ -0,0 +1,38 @@ +# async-each + +No-bullshit, ultra-simple, 35-lines-of-code async parallel forEach function for JavaScript. + +We don't need junky 30K async libs. Really. + +For browsers and node.js. + +## Installation +* Just include async-each before your scripts. +* `npm install async-each` if you’re using node.js. +* `bower install async-each` if you’re using [Bower](http://bower.io). + +## Usage + +* `each(array, iterator, callback);` — `Array`, `Function`, `(optional) Function` +* `iterator(item, next)` receives current item and a callback that will mark the item as done. `next` callback receives optional `error, transformedItem` arguments. +* `callback(error, transformedArray)` optionally receives first error and transformed result `Array`. + +Node.js: + +```javascript +var each = require('async-each'); +each(['a.js', 'b.js', 'c.js'], fs.readFile, function(error, contents) { + if (error) console.error(error); + console.log('Contents for a, b and c:', contents); +}); +``` + +Browser: + +```javascript +window.asyncEach(list, fn, callback); +``` + +## License + +[The MIT License](https://raw.githubusercontent.com/paulmillr/mit/master/README.md) diff --git a/node_modules/async-each/index.js b/node_modules/async-each/index.js new file mode 100644 index 0000000..1c51c95 --- /dev/null +++ b/node_modules/async-each/index.js @@ -0,0 +1,38 @@ +// async-each MIT license (by Paul Miller from http://paulmillr.com). +(function(globals) { + 'use strict'; + var each = function(items, next, callback) { + if (!Array.isArray(items)) throw new TypeError('each() expects array as first argument'); + if (typeof next !== 'function') throw new TypeError('each() expects function as second argument'); + if (typeof callback !== 'function') callback = Function.prototype; // no-op + + if (items.length === 0) return callback(undefined, items); + + var transformed = new Array(items.length); + var count = 0; + var returned = false; + + items.forEach(function(item, index) { + next(item, function(error, transformedItem) { + if (returned) return; + if (error) { + returned = true; + return callback(error); + } + transformed[index] = transformedItem; + count += 1; + if (count === items.length) return callback(undefined, transformed); + }); + }); + }; + + if (typeof define !== 'undefined' && define.amd) { + define([], function() { + return each; + }); // RequireJS + } else if (typeof module !== 'undefined' && module.exports) { + module.exports = each; // CommonJS + } else { + globals.asyncEach = each; // + +``` + +## Documentation + +Some functions are also available in the following forms: +* `Series` - the same as `` but runs only a single async operation at a time +* `Limit` - the same as `` but runs a maximum of `limit` async operations at a time + +### Collections + +* [`each`](#each), `eachSeries`, `eachLimit` +* [`forEachOf`](#forEachOf), `forEachOfSeries`, `forEachOfLimit` +* [`map`](#map), `mapSeries`, `mapLimit` +* [`filter`](#filter), `filterSeries`, `filterLimit` +* [`reject`](#reject), `rejectSeries`, `rejectLimit` +* [`reduce`](#reduce), [`reduceRight`](#reduceRight) +* [`detect`](#detect), `detectSeries`, `detectLimit` +* [`sortBy`](#sortBy) +* [`some`](#some), `someLimit` +* [`every`](#every), `everyLimit` +* [`concat`](#concat), `concatSeries` + +### Control Flow + +* [`series`](#seriestasks-callback) +* [`parallel`](#parallel), `parallelLimit` +* [`whilst`](#whilst), [`doWhilst`](#doWhilst) +* [`until`](#until), [`doUntil`](#doUntil) +* [`during`](#during), [`doDuring`](#doDuring) +* [`forever`](#forever) +* [`waterfall`](#waterfall) +* [`compose`](#compose) +* [`seq`](#seq) +* [`applyEach`](#applyEach), `applyEachSeries` +* [`queue`](#queue), [`priorityQueue`](#priorityQueue) +* [`cargo`](#cargo) +* [`auto`](#auto) +* [`retry`](#retry) +* [`iterator`](#iterator) +* [`times`](#times), `timesSeries`, `timesLimit` + +### Utils + +* [`apply`](#apply) +* [`nextTick`](#nextTick) +* [`memoize`](#memoize) +* [`unmemoize`](#unmemoize) +* [`ensureAsync`](#ensureAsync) +* [`constant`](#constant) +* [`asyncify`](#asyncify) +* [`wrapSync`](#wrapSync) +* [`log`](#log) +* [`dir`](#dir) +* [`noConflict`](#noConflict) + +## Collections + + + +### each(arr, iterator, [callback]) + +Applies the function `iterator` to each item in `arr`, in parallel. +The `iterator` is called with an item from the list, and a callback for when it +has finished. If the `iterator` passes an error to its `callback`, the main +`callback` (for the `each` function) is immediately called with the error. + +Note, that since this function applies `iterator` to each item in parallel, +there is no guarantee that the iterator functions will complete in order. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err)` which must be called once it has + completed. If no error has occurred, the `callback` should be run without + arguments or with an explicit `null` argument. The array index is not passed + to the iterator. If you need the index, use [`forEachOf`](#forEachOf). +* `callback(err)` - *Optional* A callback which is called when all `iterator` functions + have finished, or an error occurs. + +__Examples__ + + +```js +// assuming openFiles is an array of file names and saveFile is a function +// to save the modified contents of that file: + +async.each(openFiles, saveFile, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +```js +// assuming openFiles is an array of file names + +async.each(openFiles, function(file, callback) { + + // Perform operation on file here. + console.log('Processing file ' + file); + + if( file.length > 32 ) { + console.log('This file name is too long'); + callback('File name too long'); + } else { + // Do work to process file here + console.log('File processed'); + callback(); + } +}, function(err){ + // if any of the file processing produced an error, err would equal that error + if( err ) { + // One of the iterations produced an error. + // All processing will now stop. + console.log('A file failed to process'); + } else { + console.log('All files have been processed successfully'); + } +}); +``` + +__Related__ + +* eachSeries(arr, iterator, [callback]) +* eachLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + + + +### forEachOf(obj, iterator, [callback]) + +Like `each`, except that it iterates over objects, and passes the key as the second argument to the iterator. + +__Arguments__ + +* `obj` - An object or array to iterate over. +* `iterator(item, key, callback)` - A function to apply to each item in `obj`. +The `key` is the item's key, or index in the case of an array. The iterator is +passed a `callback(err)` which must be called once it has completed. If no +error has occurred, the callback should be run without arguments or with an +explicit `null` argument. +* `callback(err)` - *Optional* A callback which is called when all `iterator` functions have finished, or an error occurs. + +__Example__ + +```js +var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; +var configs = {}; + +async.forEachOf(obj, function (value, key, callback) { + fs.readFile(__dirname + value, "utf8", function (err, data) { + if (err) return callback(err); + try { + configs[key] = JSON.parse(data); + } catch (e) { + return callback(e); + } + callback(); + }) +}, function (err) { + if (err) console.error(err.message); + // configs is now a map of JSON data + doSomethingWith(configs); +}) +``` + +__Related__ + +* forEachOfSeries(obj, iterator, [callback]) +* forEachOfLimit(obj, limit, iterator, [callback]) + +--------------------------------------- + + +### map(arr, iterator, [callback]) + +Produces a new array of values by mapping each value in `arr` through +the `iterator` function. The `iterator` is called with an item from `arr` and a +callback for when it has finished processing. Each of these callback takes 2 arguments: +an `error`, and the transformed item from `arr`. If `iterator` passes an error to its +callback, the main `callback` (for the `map` function) is immediately called with the error. + +Note, that since this function applies the `iterator` to each item in parallel, +there is no guarantee that the `iterator` functions will complete in order. +However, the results array will be in the same order as the original `arr`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, transformed)` which must be called once + it has completed with an error (which can be `null`) and a transformed item. +* `callback(err, results)` - *Optional* A callback which is called when all `iterator` + functions have finished, or an error occurs. Results is an array of the + transformed items from the `arr`. + +__Example__ + +```js +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +__Related__ +* mapSeries(arr, iterator, [callback]) +* mapLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + + +### filter(arr, iterator, [callback]) + +__Alias:__ `select` + +Returns a new array of all the values in `arr` which pass an async truth test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. This operation is +performed in parallel, but the results array will be in the same order as the +original. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The `iterator` is passed a `callback(truthValue)`, which must be called with a + boolean argument once it has completed. +* `callback(results)` - *Optional* A callback which is called after all the `iterator` + functions have finished. + +__Example__ + +```js +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); +``` + +__Related__ + +* filterSeries(arr, iterator, [callback]) +* filterLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + +### reject(arr, iterator, [callback]) + +The opposite of [`filter`](#filter). Removes values that pass an `async` truth test. + +__Related__ + +* rejectSeries(arr, iterator, [callback]) +* rejectLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + +### reduce(arr, memo, iterator, [callback]) + +__Aliases:__ `inject`, `foldl` + +Reduces `arr` into a single value using an async `iterator` to return +each successive step. `memo` is the initial state of the reduction. +This function only operates in series. + +For performance reasons, it may make sense to split a call to this function into +a parallel map, and then use the normal `Array.prototype.reduce` on the results. +This function is for situations where each step in the reduction needs to be async; +if you can get the data before reducing it, then it's probably a good idea to do so. + +__Arguments__ + +* `arr` - An array to iterate over. +* `memo` - The initial state of the reduction. +* `iterator(memo, item, callback)` - A function applied to each item in the + array to produce the next step in the reduction. The `iterator` is passed a + `callback(err, reduction)` which accepts an optional error as its first + argument, and the state of the reduction as the second. If an error is + passed to the callback, the reduction is stopped and the main `callback` is + immediately called with the error. +* `callback(err, result)` - *Optional* A callback which is called after all the `iterator` + functions have finished. Result is the reduced value. + +__Example__ + +```js +async.reduce([1,2,3], 0, function(memo, item, callback){ + // pointless async: + process.nextTick(function(){ + callback(null, memo + item) + }); +}, function(err, result){ + // result is now equal to the last value of memo, which is 6 +}); +``` + +--------------------------------------- + + +### reduceRight(arr, memo, iterator, [callback]) + +__Alias:__ `foldr` + +Same as [`reduce`](#reduce), only operates on `arr` in reverse order. + + +--------------------------------------- + + +### detect(arr, iterator, [callback]) + +Returns the first value in `arr` that passes an async truth test. The +`iterator` is applied in parallel, meaning the first iterator to return `true` will +fire the detect `callback` with that result. That means the result might not be +the first item in the original `arr` (in terms of order) that passes the test. + +If order within the original `arr` is important, then look at [`detectSeries`](#detectSeries). + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The iterator is passed a `callback(truthValue)` which must be called with a + boolean argument once it has completed. **Note: this callback does not take an error as its first argument.** +* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns + `true`, or after all the `iterator` functions have finished. Result will be + the first item in the array that passes the truth test (iterator) or the + value `undefined` if none passed. **Note: this callback does not take an error as its first argument.** + +__Example__ + +```js +async.detect(['file1','file2','file3'], fs.exists, function(result){ + // result now equals the first file in the list that exists +}); +``` + +__Related__ + +* detectSeries(arr, iterator, [callback]) +* detectLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + +### sortBy(arr, iterator, [callback]) + +Sorts a list by the results of running each `arr` value through an async `iterator`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, sortValue)` which must be called once it + has completed with an error (which can be `null`) and a value to use as the sort + criteria. +* `callback(err, results)` - *Optional* A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is the items from + the original `arr` sorted by the values returned by the `iterator` calls. + +__Example__ + +```js +async.sortBy(['file1','file2','file3'], function(file, callback){ + fs.stat(file, function(err, stats){ + callback(err, stats.mtime); + }); +}, function(err, results){ + // results is now the original array of files sorted by + // modified date +}); +``` + +__Sort Order__ + +By modifying the callback parameter the sorting order can be influenced: + +```js +//ascending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x); +}, function(err,result){ + //result callback +} ); + +//descending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x*-1); //<- x*-1 instead of x, turns the order around +}, function(err,result){ + //result callback +} ); +``` + +--------------------------------------- + + +### some(arr, iterator, [callback]) + +__Alias:__ `any` + +Returns `true` if at least one element in the `arr` satisfies an async test. +_The callback for each iterator call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. Once any iterator +call returns `true`, the main `callback` is immediately called. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a `callback(truthValue)`` which must be + called with a boolean argument once it has completed. +* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns + `true`, or after all the iterator functions have finished. Result will be + either `true` or `false` depending on the values of the async tests. + + **Note: the callbacks do not take an error as their first argument.** +__Example__ + +```js +async.some(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then at least one of the files exists +}); +``` + +__Related__ + +* someLimit(arr, limit, iterator, callback) + +--------------------------------------- + + +### every(arr, iterator, [callback]) + +__Alias:__ `all` + +Returns `true` if every element in `arr` satisfies an async test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a `callback(truthValue)` which must be + called with a boolean argument once it has completed. +* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns + `false`, or after all the iterator functions have finished. Result will be + either `true` or `false` depending on the values of the async tests. + + **Note: the callbacks do not take an error as their first argument.** + +__Example__ + +```js +async.every(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then every file exists +}); +``` + +__Related__ + +* everyLimit(arr, limit, iterator, callback) + +--------------------------------------- + + +### concat(arr, iterator, [callback]) + +Applies `iterator` to each item in `arr`, concatenating the results. Returns the +concatenated list. The `iterator`s are called in parallel, and the results are +concatenated as they return. There is no guarantee that the results array will +be returned in the original order of `arr` passed to the `iterator` function. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, results)` which must be called once it + has completed with an error (which can be `null`) and an array of results. +* `callback(err, results)` - *Optional* A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is an array containing + the concatenated results of the `iterator` function. + +__Example__ + +```js +async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){ + // files is now a list of filenames that exist in the 3 directories +}); +``` + +__Related__ + +* concatSeries(arr, iterator, [callback]) + + +## Control Flow + + +### series(tasks, [callback]) + +Run the functions in the `tasks` array in series, each one running once the previous +function has completed. If any functions in the series pass an error to its +callback, no more functions are run, and `callback` is immediately called with the value of the error. +Otherwise, `callback` receives an array of results when `tasks` have completed. + +It is also possible to use an object instead of an array. Each property will be +run as a function, and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`series`](#series). + +**Note** that while many implementations preserve the order of object properties, the +[ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) +explicitly states that + +> The mechanics and order of enumerating the properties is not specified. + +So if you rely on the order in which your series of functions are executed, and want +this to work on all platforms, consider using an array. + +__Arguments__ + +* `tasks` - An array or object containing functions to run, each function is passed + a `callback(err, result)` it must call on completion with an error `err` (which can + be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the `task` callbacks. + +__Example__ + +```js +async.series([ + function(callback){ + // do some stuff ... + callback(null, 'one'); + }, + function(callback){ + // do some more stuff ... + callback(null, 'two'); + } +], +// optional callback +function(err, results){ + // results is now equal to ['one', 'two'] +}); + + +// an example using an object instead of an array +async.series({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equal to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + + +### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its +callback, the main `callback` is immediately called with the value of the error. +Once the `tasks` have completed, the results are passed to the final `callback` as an +array. + +**Note:** `parallel` is about kicking-off I/O tasks in parallel, not about parallel execution of code. If your tasks do not use any timers or perform any I/O, they will actually be executed in series. Any synchronous setup sections for each task will happen one after the other. JavaScript remains single-threaded. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`parallel`](#parallel). + + +__Arguments__ + +* `tasks` - An array or object containing functions to run. Each function is passed + a `callback(err, result)` which it must call on completion with an error `err` + (which can be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed successfully. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +__Example__ + +```js +async.parallel([ + function(callback){ + setTimeout(function(){ + callback(null, 'one'); + }, 200); + }, + function(callback){ + setTimeout(function(){ + callback(null, 'two'); + }, 100); + } +], +// optional callback +function(err, results){ + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}); + + +// an example using an object instead of an array +async.parallel({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equals to: {one: 1, two: 2} +}); +``` + +__Related__ + +* parallelLimit(tasks, limit, [callback]) + +--------------------------------------- + + +### whilst(test, fn, callback) + +Repeatedly call `fn`, while `test` returns `true`. Calls `callback` when stopped, +or an error occurs. + +__Arguments__ + +* `test()` - synchronous truth test to perform before each execution of `fn`. +* `fn(callback)` - A function which is called each time `test` passes. The function is + passed a `callback(err)`, which must be called once it has completed with an + optional `err` argument. +* `callback(err, [results])` - A callback which is called after the test + function has failed and repeated execution of `fn` has stopped. `callback` + will be passed an error and any arguments passed to the final `fn`'s callback. + +__Example__ + +```js +var count = 0; + +async.whilst( + function () { return count < 5; }, + function (callback) { + count++; + setTimeout(function () { + callback(null, count); + }, 1000); + }, + function (err, n) { + // 5 seconds have passed, n = 5 + } +); +``` + +--------------------------------------- + + +### doWhilst(fn, test, callback) + +The post-check version of [`whilst`](#whilst). To reflect the difference in +the order of operations, the arguments `test` and `fn` are switched. + +`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + +--------------------------------------- + + +### until(test, fn, callback) + +Repeatedly call `fn` until `test` returns `true`. Calls `callback` when stopped, +or an error occurs. `callback` will be passed an error and any arguments passed +to the final `fn`'s callback. + +The inverse of [`whilst`](#whilst). + +--------------------------------------- + + +### doUntil(fn, test, callback) + +Like [`doWhilst`](#doWhilst), except the `test` is inverted. Note the argument ordering differs from `until`. + +--------------------------------------- + + +### during(test, fn, callback) + +Like [`whilst`](#whilst), except the `test` is an asynchronous function that is passed a callback in the form of `function (err, truth)`. If error is passed to `test` or `fn`, the main callback is immediately called with the value of the error. + +__Example__ + +```js +var count = 0; + +async.during( + function (callback) { + return callback(null, count < 5); + }, + function (callback) { + count++; + setTimeout(callback, 1000); + }, + function (err) { + // 5 seconds have passed + } +); +``` + +--------------------------------------- + + +### doDuring(fn, test, callback) + +The post-check version of [`during`](#during). To reflect the difference in +the order of operations, the arguments `test` and `fn` are switched. + +Also a version of [`doWhilst`](#doWhilst) with asynchronous `test` function. + +--------------------------------------- + + +### forever(fn, [errback]) + +Calls the asynchronous function `fn` with a callback parameter that allows it to +call itself again, in series, indefinitely. + +If an error is passed to the callback then `errback` is called with the +error, and execution stops, otherwise it will never be called. + +```js +async.forever( + function(next) { + // next is suitable for passing to things that need a callback(err [, whatever]); + // it will result in this function being called again. + }, + function(err) { + // if next is called with a value in its first parameter, it will appear + // in here as 'err', and execution will stop. + } +); +``` + +--------------------------------------- + + +### waterfall(tasks, [callback]) + +Runs the `tasks` array of functions in series, each passing their results to the next in +the array. However, if any of the `tasks` pass an error to their own callback, the +next function is not executed, and the main `callback` is immediately called with +the error. + +__Arguments__ + +* `tasks` - An array of functions to run, each function is passed a + `callback(err, result1, result2, ...)` it must call on completion. The first + argument is an error (which can be `null`) and any further arguments will be + passed as arguments in order to the next task. +* `callback(err, [results])` - An optional callback to run once all the functions + have completed. This will be passed the results of the last task's callback. + + + +__Example__ + +```js +async.waterfall([ + function(callback) { + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); + }, + function(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); + } +], function (err, result) { + // result now equals 'done' +}); +``` +Or, with named functions: + +```js +async.waterfall([ + myFirstFunction, + mySecondFunction, + myLastFunction, +], function (err, result) { + // result now equals 'done' +}); +function myFirstFunction(callback) { + callback(null, 'one', 'two'); +} +function mySecondFunction(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); +} +function myLastFunction(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); +} +``` + +Or, if you need to pass any argument to the first function: + +```js +async.waterfall([ + async.apply(myFirstFunction, 'zero'), + mySecondFunction, + myLastFunction, +], function (err, result) { + // result now equals 'done' +}); +function myFirstFunction(arg1, callback) { + // arg1 now equals 'zero' + callback(null, 'one', 'two'); +} +function mySecondFunction(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); +} +function myLastFunction(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); +} +``` + +--------------------------------------- + +### compose(fn1, fn2...) + +Creates a function which is a composition of the passed asynchronous +functions. Each function consumes the return value of the function that +follows. Composing functions `f()`, `g()`, and `h()` would produce the result of +`f(g(h()))`, only this version uses callbacks to obtain the return values. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* `functions...` - the asynchronous functions to compose + + +__Example__ + +```js +function add1(n, callback) { + setTimeout(function () { + callback(null, n + 1); + }, 10); +} + +function mul3(n, callback) { + setTimeout(function () { + callback(null, n * 3); + }, 10); +} + +var add1mul3 = async.compose(mul3, add1); + +add1mul3(4, function (err, result) { + // result now equals 15 +}); +``` + +--------------------------------------- + +### seq(fn1, fn2...) + +Version of the compose function that is more natural to read. +Each function consumes the return value of the previous function. +It is the equivalent of [`compose`](#compose) with the arguments reversed. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* `functions...` - the asynchronous functions to compose + + +__Example__ + +```js +// Requires lodash (or underscore), express3 and dresende's orm2. +// Part of an app, that fetches cats of the logged user. +// This example uses `seq` function to avoid overnesting and error +// handling clutter. +app.get('/cats', function(request, response) { + var User = request.models.User; + async.seq( + _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) + function(user, fn) { + user.getCats(fn); // 'getCats' has signature (callback(err, data)) + } + )(req.session.user_id, function (err, cats) { + if (err) { + console.error(err); + response.json({ status: 'error', message: err.message }); + } else { + response.json({ status: 'ok', message: 'Cats found', data: cats }); + } + }); +}); +``` + +--------------------------------------- + +### applyEach(fns, args..., callback) + +Applies the provided arguments to each function in the array, calling +`callback` after all functions have completed. If you only provide the first +argument, then it will return a function which lets you pass in the +arguments as if it were a single function call. + +__Arguments__ + +* `fns` - the asynchronous functions to all call with the same arguments +* `args...` - any number of separate arguments to pass to the function +* `callback` - the final argument should be the callback, called when all + functions have completed processing + + +__Example__ + +```js +async.applyEach([enableSearch, updateSchema], 'bucket', callback); + +// partial application example: +async.each( + buckets, + async.applyEach([enableSearch, updateSchema]), + callback +); +``` + +__Related__ + +* applyEachSeries(tasks, args..., [callback]) + +--------------------------------------- + + +### queue(worker, [concurrency]) + +Creates a `queue` object with the specified `concurrency`. Tasks added to the +`queue` are processed in parallel (up to the `concurrency` limit). If all +`worker`s are in progress, the task is queued until one becomes available. +Once a `worker` completes a `task`, that `task`'s callback is called. + +__Arguments__ + +* `worker(task, callback)` - An asynchronous function for processing a queued + task, which must call its `callback(err)` argument when finished, with an + optional `error` as an argument. If you want to handle errors from an individual task, pass a callback to `q.push()`. +* `concurrency` - An `integer` for determining how many `worker` functions should be + run in parallel. If omitted, the concurrency defaults to `1`. If the concurrency is `0`, an error is thrown. + +__Queue objects__ + +The `queue` object returned by this function has the following properties and +methods: + +* `length()` - a function returning the number of items waiting to be processed. +* `started` - a function returning whether or not any items have been pushed and processed by the queue +* `running()` - a function returning the number of items currently being processed. +* `workersList()` - a function returning the array of items currently being processed. +* `idle()` - a function returning false if there are items waiting or being processed, or true if not. +* `concurrency` - an integer for determining how many `worker` functions should be + run in parallel. This property can be changed after a `queue` is created to + alter the concurrency on-the-fly. +* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once + the `worker` has finished processing the task. Instead of a single task, a `tasks` array + can be submitted. The respective callback is used for every task in the list. +* `unshift(task, [callback])` - add a new task to the front of the `queue`. +* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit, + and further tasks will be queued. +* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`. +* `paused` - a boolean for determining whether the queue is in a paused state +* `pause()` - a function that pauses the processing of tasks until `resume()` is called. +* `resume()` - a function that resumes the processing of queued tasks when the queue is paused. +* `kill()` - a function that removes the `drain` callback and empties remaining tasks from the queue forcing it to go idle. + +__Example__ + +```js +// create a queue object with concurrency 2 + +var q = async.queue(function (task, callback) { + console.log('hello ' + task.name); + callback(); +}, 2); + + +// assign a callback +q.drain = function() { + console.log('all items have been processed'); +} + +// add some items to the queue + +q.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); +}); +q.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); + +// add some items to the queue (batch-wise) + +q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) { + console.log('finished processing item'); +}); + +// add some items to the front of the queue + +q.unshift({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); +``` + + +--------------------------------------- + + +### priorityQueue(worker, concurrency) + +The same as [`queue`](#queue) only tasks are assigned a priority and completed in ascending priority order. There are two differences between `queue` and `priorityQueue` objects: + +* `push(task, priority, [callback])` - `priority` should be a number. If an array of + `tasks` is given, all tasks will be assigned the same priority. +* The `unshift` method was removed. + +--------------------------------------- + + +### cargo(worker, [payload]) + +Creates a `cargo` object with the specified payload. Tasks added to the +cargo will be processed altogether (up to the `payload` limit). If the +`worker` is in progress, the task is queued until it becomes available. Once +the `worker` has completed some tasks, each callback of those tasks is called. +Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) for how `cargo` and `queue` work. + +While [queue](#queue) passes only one task to one of a group of workers +at a time, cargo passes an array of tasks to a single worker, repeating +when the worker is finished. + +__Arguments__ + +* `worker(tasks, callback)` - An asynchronous function for processing an array of + queued tasks, which must call its `callback(err)` argument when finished, with + an optional `err` argument. +* `payload` - An optional `integer` for determining how many tasks should be + processed per round; if omitted, the default is unlimited. + +__Cargo objects__ + +The `cargo` object returned by this function has the following properties and +methods: + +* `length()` - A function returning the number of items waiting to be processed. +* `payload` - An `integer` for determining how many tasks should be + process per round. This property can be changed after a `cargo` is created to + alter the payload on-the-fly. +* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called + once the `worker` has finished processing the task. Instead of a single task, an array of `tasks` + can be submitted. The respective callback is used for every task in the list. +* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued. +* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - A callback that is called when the last item from the `queue` has returned from the `worker`. +* `idle()`, `pause()`, `resume()`, `kill()` - cargo inherits all of the same methods and event calbacks as [`queue`](#queue) + +__Example__ + +```js +// create a cargo object with payload 2 + +var cargo = async.cargo(function (tasks, callback) { + for(var i=0; i +### auto(tasks, [concurrency], [callback]) + +Determines the best order for running the functions in `tasks`, based on their requirements. Each function can optionally depend on other functions being completed first, and each function is run as soon as its requirements are satisfied. + +If any of the functions pass an error to their callback, the `auto` sequence will stop. Further tasks will not execute (so any other functions depending on it will not run), and the main `callback` is immediately called with the error. Functions also receive an object containing the results of functions which have completed so far. + +Note, all functions are called with a `results` object as a second argument, +so it is unsafe to pass functions in the `tasks` object which cannot handle the +extra argument. + +For example, this snippet of code: + +```js +async.auto({ + readData: async.apply(fs.readFile, 'data.txt', 'utf-8') +}, callback); +``` + +will have the effect of calling `readFile` with the results object as the last +argument, which will fail: + +```js +fs.readFile('data.txt', 'utf-8', cb, {}); +``` + +Instead, wrap the call to `readFile` in a function which does not forward the +`results` object: + +```js +async.auto({ + readData: function(cb, results){ + fs.readFile('data.txt', 'utf-8', cb); + } +}, callback); +``` + +__Arguments__ + +* `tasks` - An object. Each of its properties is either a function or an array of + requirements, with the function itself the last item in the array. The object's key + of a property serves as the name of the task defined by that property, + i.e. can be used when specifying requirements for other tasks. + The function receives two arguments: (1) a `callback(err, result)` which must be + called when finished, passing an `error` (which can be `null`) and the result of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions. +* `concurrency` - An optional `integer` for determining the maximum number of tasks that can be run in parallel. By default, as many as possible. +* `callback(err, results)` - An optional callback which is called when all the + tasks have been completed. It receives the `err` argument if any `tasks` + pass an error to their callback. Results are always returned; however, if + an error occurs, no further `tasks` will be performed, and the results + object will only contain partial results. + + +__Example__ + +```js +async.auto({ + get_data: function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + make_folder: function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + }, + write_file: ['get_data', 'make_folder', function(callback, results){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + callback(null, 'filename'); + }], + email_link: ['write_file', function(callback, results){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + // results.write_file contains the filename returned by write_file. + callback(null, {'file':results.write_file, 'email':'user@example.com'}); + }] +}, function(err, results) { + console.log('err = ', err); + console.log('results = ', results); +}); +``` + +This is a fairly trivial example, but to do this using the basic parallel and +series functions would look like this: + +```js +async.parallel([ + function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + } +], +function(err, results){ + async.series([ + function(callback){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + results.push('filename'); + callback(null); + }, + function(callback){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + callback(null, {'file':results.pop(), 'email':'user@example.com'}); + } + ]); +}); +``` + +For a complicated series of `async` tasks, using the [`auto`](#auto) function makes adding +new tasks much easier (and the code more readable). + + +--------------------------------------- + + +### retry([opts = {times: 5, interval: 0}| 5], task, [callback]) + +Attempts to get a successful response from `task` no more than `times` times before +returning an error. If the task is successful, the `callback` will be passed the result +of the successful task. If all attempts fail, the callback will be passed the error and +result (if any) of the final attempt. + +__Arguments__ + +* `opts` - Can be either an object with `times` and `interval` or a number. + * `times` - The number of attempts to make before giving up. The default is `5`. + * `interval` - The time to wait between retries, in milliseconds. The default is `0`. + * If `opts` is a number, the number specifies the number of times to retry, with the default interval of `0`. +* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)` + which must be called when finished, passing `err` (which can be `null`) and the `result` of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions (if nested inside another control flow). +* `callback(err, results)` - An optional callback which is called when the + task has succeeded, or after the final failed attempt. It receives the `err` and `result` arguments of the last attempt at completing the `task`. + +The [`retry`](#retry) function can be used as a stand-alone control flow by passing a callback, as shown below: + +```js +// try calling apiMethod 3 times +async.retry(3, apiMethod, function(err, result) { + // do something with the result +}); +``` + +```js +// try calling apiMethod 3 times, waiting 200 ms between each retry +async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + // do something with the result +}); +``` + +```js +// try calling apiMethod the default 5 times no delay between each retry +async.retry(apiMethod, function(err, result) { + // do something with the result +}); +``` + +It can also be embedded within other control flow functions to retry individual methods +that are not as reliable, like this: + +```js +async.auto({ + users: api.getUsers.bind(api), + payments: async.retry(3, api.getPayments.bind(api)) +}, function(err, results) { + // do something with the results +}); +``` + + +--------------------------------------- + + +### iterator(tasks) + +Creates an iterator function which calls the next function in the `tasks` array, +returning a continuation to call the next one after that. It's also possible to +“peek” at the next iterator with `iterator.next()`. + +This function is used internally by the `async` module, but can be useful when +you want to manually control the flow of functions in series. + +__Arguments__ + +* `tasks` - An array of functions to run. + +__Example__ + +```js +var iterator = async.iterator([ + function(){ sys.p('one'); }, + function(){ sys.p('two'); }, + function(){ sys.p('three'); } +]); + +node> var iterator2 = iterator(); +'one' +node> var iterator3 = iterator2(); +'two' +node> iterator3(); +'three' +node> var nextfn = iterator2.next(); +node> nextfn(); +'three' +``` + +--------------------------------------- + + +### apply(function, arguments..) + +Creates a continuation function with some arguments already applied. + +Useful as a shorthand when combined with other control flow functions. Any arguments +passed to the returned function are added to the arguments originally passed +to apply. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to automatically apply when the + continuation is called. + +__Example__ + +```js +// using apply + +async.parallel([ + async.apply(fs.writeFile, 'testfile1', 'test1'), + async.apply(fs.writeFile, 'testfile2', 'test2'), +]); + + +// the same process without using apply + +async.parallel([ + function(callback){ + fs.writeFile('testfile1', 'test1', callback); + }, + function(callback){ + fs.writeFile('testfile2', 'test2', callback); + } +]); +``` + +It's possible to pass any number of additional arguments when calling the +continuation: + +```js +node> var fn = async.apply(sys.puts, 'one'); +node> fn('two', 'three'); +one +two +three +``` + +--------------------------------------- + + +### nextTick(callback), setImmediate(callback) + +Calls `callback` on a later loop around the event loop. In Node.js this just +calls `process.nextTick`; in the browser it falls back to `setImmediate(callback)` +if available, otherwise `setTimeout(callback, 0)`, which means other higher priority +events may precede the execution of `callback`. + +This is used internally for browser-compatibility purposes. + +__Arguments__ + +* `callback` - The function to call on a later loop around the event loop. + +__Example__ + +```js +var call_order = []; +async.nextTick(function(){ + call_order.push('two'); + // call_order now equals ['one','two'] +}); +call_order.push('one') +``` + + +### times(n, iterator, [callback]) + +Calls the `iterator` function `n` times, and accumulates results in the same manner +you would use with [`map`](#map). + +__Arguments__ + +* `n` - The number of times to run the function. +* `iterator` - The function to call `n` times. +* `callback` - see [`map`](#map) + +__Example__ + +```js +// Pretend this is some complicated async factory +var createUser = function(id, callback) { + callback(null, { + id: 'user' + id + }) +} +// generate 5 users +async.times(5, function(n, next){ + createUser(n, function(err, user) { + next(err, user) + }) +}, function(err, users) { + // we should now have 5 users +}); +``` + +__Related__ + +* timesSeries(n, iterator, [callback]) +* timesLimit(n, limit, iterator, [callback]) + + +## Utils + + +### memoize(fn, [hasher]) + +Caches the results of an `async` function. When creating a hash to store function +results against, the callback is omitted from the hash and an optional hash +function can be used. + +If no hash function is specified, the first argument is used as a hash key, which may work reasonably if it is a string or a data type that converts to a distinct string. Note that objects and arrays will not behave reasonably. Neither will cases where the other arguments are significant. In such cases, specify your own hash function. + +The cache of results is exposed as the `memo` property of the function returned +by `memoize`. + +__Arguments__ + +* `fn` - The function to proxy and cache results from. +* `hasher` - An optional function for generating a custom hash for storing + results. It has all the arguments applied to it apart from the callback, and + must be synchronous. + +__Example__ + +```js +var slow_fn = function (name, callback) { + // do something + callback(null, result); +}; +var fn = async.memoize(slow_fn); + +// fn can now be used as if it were slow_fn +fn('some name', function () { + // callback +}); +``` + + +### unmemoize(fn) + +Undoes a [`memoize`](#memoize)d function, reverting it to the original, unmemoized +form. Handy for testing. + +__Arguments__ + +* `fn` - the memoized function + +--------------------------------------- + + +### ensureAsync(fn) + +Wrap an async function and ensure it calls its callback on a later tick of the event loop. If the function already calls its callback on a next tick, no extra deferral is added. This is useful for preventing stack overflows (`RangeError: Maximum call stack size exceeded`) and generally keeping [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) contained. + +__Arguments__ + +* `fn` - an async function, one that expects a node-style callback as its last argument + +Returns a wrapped function with the exact same call signature as the function passed in. + +__Example__ + +```js +function sometimesAsync(arg, callback) { + if (cache[arg]) { + return callback(null, cache[arg]); // this would be synchronous!! + } else { + doSomeIO(arg, callback); // this IO would be asynchronous + } +} + +// this has a risk of stack overflows if many results are cached in a row +async.mapSeries(args, sometimesAsync, done); + +// this will defer sometimesAsync's callback if necessary, +// preventing stack overflows +async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + +``` + +--------------------------------------- + + +### constant(values...) + +Returns a function that when called, calls-back with the values provided. Useful as the first function in a `waterfall`, or for plugging values in to `auto`. + +__Example__ + +```js +async.waterfall([ + async.constant(42), + function (value, next) { + // value === 42 + }, + //... +], callback); + +async.waterfall([ + async.constant(filename, "utf8"), + fs.readFile, + function (fileData, next) { + //... + } + //... +], callback); + +async.auto({ + hostname: async.constant("https://server.net/"), + port: findFreePort, + launchServer: ["hostname", "port", function (cb, options) { + startServer(options, cb); + }], + //... +}, callback); + +``` + +--------------------------------------- + + + +### asyncify(func) + +__Alias:__ `wrapSync` + +Take a sync function and make it async, passing its return value to a callback. This is useful for plugging sync functions into a waterfall, series, or other async functions. Any arguments passed to the generated function will be passed to the wrapped function (except for the final callback argument). Errors thrown will be passed to the callback. + +__Example__ + +```js +async.waterfall([ + async.apply(fs.readFile, filename, "utf8"), + async.asyncify(JSON.parse), + function (data, next) { + // data is the result of parsing the text. + // If there was a parsing error, it would have been caught. + } +], callback) +``` + +If the function passed to `asyncify` returns a Promise, that promises's resolved/rejected state will be used to call the callback, rather than simply the synchronous return value. Example: + +```js +async.waterfall([ + async.apply(fs.readFile, filename, "utf8"), + async.asyncify(function (contents) { + return db.model.create(contents); + }), + function (model, next) { + // `model` is the instantiated model object. + // If there was an error, this function would be skipped. + } +], callback) +``` + +This also means you can asyncify ES2016 `async` functions. + +```js +var q = async.queue(async.asyncify(async function (file) { + var intermediateStep = await processFile(file); + return await somePromise(intermediateStep) +})); + +q.push(files); +``` + +--------------------------------------- + + +### log(function, arguments) + +Logs the result of an `async` function to the `console`. Only works in Node.js or +in browsers that support `console.log` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.log` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, 'hello ' + name); + }, 1000); +}; +``` +```js +node> async.log(hello, 'world'); +'hello world' +``` + +--------------------------------------- + + +### dir(function, arguments) + +Logs the result of an `async` function to the `console` using `console.dir` to +display the properties of the resulting object. Only works in Node.js or +in browsers that support `console.dir` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.dir` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, {hello: name}); + }, 1000); +}; +``` +```js +node> async.dir(hello, 'world'); +{hello: 'world'} +``` + +--------------------------------------- + + +### noConflict() + +Changes the value of `async` back to its original value, returning a reference to the +`async` object. diff --git a/node_modules/async/dist/async.js b/node_modules/async/dist/async.js new file mode 100644 index 0000000..31e7620 --- /dev/null +++ b/node_modules/async/dist/async.js @@ -0,0 +1,1265 @@ +/*! + * async + * https://github.com/caolan/async + * + * Copyright 2010-2014 Caolan McMahon + * Released under the MIT license + */ +(function () { + + var async = {}; + function noop() {} + function identity(v) { + return v; + } + function toBool(v) { + return !!v; + } + function notId(v) { + return !v; + } + + // global on the server, window in the browser + var previous_async; + + // Establish the root object, `window` (`self`) in the browser, `global` + // on the server, or `this` in some virtual machines. We use `self` + // instead of `window` for `WebWorker` support. + var root = typeof self === 'object' && self.self === self && self || + typeof global === 'object' && global.global === global && global || + this; + + if (root != null) { + previous_async = root.async; + } + + async.noConflict = function () { + root.async = previous_async; + return async; + }; + + function only_once(fn) { + return function() { + if (fn === null) throw new Error("Callback was already called."); + fn.apply(this, arguments); + fn = null; + }; + } + + function _once(fn) { + return function() { + if (fn === null) return; + fn.apply(this, arguments); + fn = null; + }; + } + + //// cross-browser compatiblity functions //// + + var _toString = Object.prototype.toString; + + var _isArray = Array.isArray || function (obj) { + return _toString.call(obj) === '[object Array]'; + }; + + // Ported from underscore.js isObject + var _isObject = function(obj) { + var type = typeof obj; + return type === 'function' || type === 'object' && !!obj; + }; + + function _isArrayLike(arr) { + return _isArray(arr) || ( + // has a positive integer length property + typeof arr.length === "number" && + arr.length >= 0 && + arr.length % 1 === 0 + ); + } + + function _arrayEach(arr, iterator) { + var index = -1, + length = arr.length; + + while (++index < length) { + iterator(arr[index], index, arr); + } + } + + function _map(arr, iterator) { + var index = -1, + length = arr.length, + result = Array(length); + + while (++index < length) { + result[index] = iterator(arr[index], index, arr); + } + return result; + } + + function _range(count) { + return _map(Array(count), function (v, i) { return i; }); + } + + function _reduce(arr, iterator, memo) { + _arrayEach(arr, function (x, i, a) { + memo = iterator(memo, x, i, a); + }); + return memo; + } + + function _forEachOf(object, iterator) { + _arrayEach(_keys(object), function (key) { + iterator(object[key], key); + }); + } + + function _indexOf(arr, item) { + for (var i = 0; i < arr.length; i++) { + if (arr[i] === item) return i; + } + return -1; + } + + var _keys = Object.keys || function (obj) { + var keys = []; + for (var k in obj) { + if (obj.hasOwnProperty(k)) { + keys.push(k); + } + } + return keys; + }; + + function _keyIterator(coll) { + var i = -1; + var len; + var keys; + if (_isArrayLike(coll)) { + len = coll.length; + return function next() { + i++; + return i < len ? i : null; + }; + } else { + keys = _keys(coll); + len = keys.length; + return function next() { + i++; + return i < len ? keys[i] : null; + }; + } + } + + // Similar to ES6's rest param (http://ariya.ofilabs.com/2013/03/es6-and-rest-parameter.html) + // This accumulates the arguments passed into an array, after a given index. + // From underscore.js (https://github.com/jashkenas/underscore/pull/2140). + function _restParam(func, startIndex) { + startIndex = startIndex == null ? func.length - 1 : +startIndex; + return function() { + var length = Math.max(arguments.length - startIndex, 0); + var rest = Array(length); + for (var index = 0; index < length; index++) { + rest[index] = arguments[index + startIndex]; + } + switch (startIndex) { + case 0: return func.call(this, rest); + case 1: return func.call(this, arguments[0], rest); + } + // Currently unused but handle cases outside of the switch statement: + // var args = Array(startIndex + 1); + // for (index = 0; index < startIndex; index++) { + // args[index] = arguments[index]; + // } + // args[startIndex] = rest; + // return func.apply(this, args); + }; + } + + function _withoutIndex(iterator) { + return function (value, index, callback) { + return iterator(value, callback); + }; + } + + //// exported async module functions //// + + //// nextTick implementation with browser-compatible fallback //// + + // capture the global reference to guard against fakeTimer mocks + var _setImmediate = typeof setImmediate === 'function' && setImmediate; + + var _delay = _setImmediate ? function(fn) { + // not a direct alias for IE10 compatibility + _setImmediate(fn); + } : function(fn) { + setTimeout(fn, 0); + }; + + if (typeof process === 'object' && typeof process.nextTick === 'function') { + async.nextTick = process.nextTick; + } else { + async.nextTick = _delay; + } + async.setImmediate = _setImmediate ? _delay : async.nextTick; + + + async.forEach = + async.each = function (arr, iterator, callback) { + return async.eachOf(arr, _withoutIndex(iterator), callback); + }; + + async.forEachSeries = + async.eachSeries = function (arr, iterator, callback) { + return async.eachOfSeries(arr, _withoutIndex(iterator), callback); + }; + + + async.forEachLimit = + async.eachLimit = function (arr, limit, iterator, callback) { + return _eachOfLimit(limit)(arr, _withoutIndex(iterator), callback); + }; + + async.forEachOf = + async.eachOf = function (object, iterator, callback) { + callback = _once(callback || noop); + object = object || []; + + var iter = _keyIterator(object); + var key, completed = 0; + + while ((key = iter()) != null) { + completed += 1; + iterator(object[key], key, only_once(done)); + } + + if (completed === 0) callback(null); + + function done(err) { + completed--; + if (err) { + callback(err); + } + // Check key is null in case iterator isn't exhausted + // and done resolved synchronously. + else if (key === null && completed <= 0) { + callback(null); + } + } + }; + + async.forEachOfSeries = + async.eachOfSeries = function (obj, iterator, callback) { + callback = _once(callback || noop); + obj = obj || []; + var nextKey = _keyIterator(obj); + var key = nextKey(); + function iterate() { + var sync = true; + if (key === null) { + return callback(null); + } + iterator(obj[key], key, only_once(function (err) { + if (err) { + callback(err); + } + else { + key = nextKey(); + if (key === null) { + return callback(null); + } else { + if (sync) { + async.setImmediate(iterate); + } else { + iterate(); + } + } + } + })); + sync = false; + } + iterate(); + }; + + + + async.forEachOfLimit = + async.eachOfLimit = function (obj, limit, iterator, callback) { + _eachOfLimit(limit)(obj, iterator, callback); + }; + + function _eachOfLimit(limit) { + + return function (obj, iterator, callback) { + callback = _once(callback || noop); + obj = obj || []; + var nextKey = _keyIterator(obj); + if (limit <= 0) { + return callback(null); + } + var done = false; + var running = 0; + var errored = false; + + (function replenish () { + if (done && running <= 0) { + return callback(null); + } + + while (running < limit && !errored) { + var key = nextKey(); + if (key === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iterator(obj[key], key, only_once(function (err) { + running -= 1; + if (err) { + callback(err); + errored = true; + } + else { + replenish(); + } + })); + } + })(); + }; + } + + + function doParallel(fn) { + return function (obj, iterator, callback) { + return fn(async.eachOf, obj, iterator, callback); + }; + } + function doParallelLimit(fn) { + return function (obj, limit, iterator, callback) { + return fn(_eachOfLimit(limit), obj, iterator, callback); + }; + } + function doSeries(fn) { + return function (obj, iterator, callback) { + return fn(async.eachOfSeries, obj, iterator, callback); + }; + } + + function _asyncMap(eachfn, arr, iterator, callback) { + callback = _once(callback || noop); + arr = arr || []; + var results = _isArrayLike(arr) ? [] : {}; + eachfn(arr, function (value, index, callback) { + iterator(value, function (err, v) { + results[index] = v; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + + async.map = doParallel(_asyncMap); + async.mapSeries = doSeries(_asyncMap); + async.mapLimit = doParallelLimit(_asyncMap); + + // reduce only has a series version, as doing reduce in parallel won't + // work in many situations. + async.inject = + async.foldl = + async.reduce = function (arr, memo, iterator, callback) { + async.eachOfSeries(arr, function (x, i, callback) { + iterator(memo, x, function (err, v) { + memo = v; + callback(err); + }); + }, function (err) { + callback(err, memo); + }); + }; + + async.foldr = + async.reduceRight = function (arr, memo, iterator, callback) { + var reversed = _map(arr, identity).reverse(); + async.reduce(reversed, memo, iterator, callback); + }; + + async.transform = function (arr, memo, iterator, callback) { + if (arguments.length === 3) { + callback = iterator; + iterator = memo; + memo = _isArray(arr) ? [] : {}; + } + + async.eachOf(arr, function(v, k, cb) { + iterator(memo, v, k, cb); + }, function(err) { + callback(err, memo); + }); + }; + + function _filter(eachfn, arr, iterator, callback) { + var results = []; + eachfn(arr, function (x, index, callback) { + iterator(x, function (v) { + if (v) { + results.push({index: index, value: x}); + } + callback(); + }); + }, function () { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + } + + async.select = + async.filter = doParallel(_filter); + + async.selectLimit = + async.filterLimit = doParallelLimit(_filter); + + async.selectSeries = + async.filterSeries = doSeries(_filter); + + function _reject(eachfn, arr, iterator, callback) { + _filter(eachfn, arr, function(value, cb) { + iterator(value, function(v) { + cb(!v); + }); + }, callback); + } + async.reject = doParallel(_reject); + async.rejectLimit = doParallelLimit(_reject); + async.rejectSeries = doSeries(_reject); + + function _createTester(eachfn, check, getResult) { + return function(arr, limit, iterator, cb) { + function done() { + if (cb) cb(getResult(false, void 0)); + } + function iteratee(x, _, callback) { + if (!cb) return callback(); + iterator(x, function (v) { + if (cb && check(v)) { + cb(getResult(true, x)); + cb = iterator = false; + } + callback(); + }); + } + if (arguments.length > 3) { + eachfn(arr, limit, iteratee, done); + } else { + cb = iterator; + iterator = limit; + eachfn(arr, iteratee, done); + } + }; + } + + async.any = + async.some = _createTester(async.eachOf, toBool, identity); + + async.someLimit = _createTester(async.eachOfLimit, toBool, identity); + + async.all = + async.every = _createTester(async.eachOf, notId, notId); + + async.everyLimit = _createTester(async.eachOfLimit, notId, notId); + + function _findGetResult(v, x) { + return x; + } + async.detect = _createTester(async.eachOf, identity, _findGetResult); + async.detectSeries = _createTester(async.eachOfSeries, identity, _findGetResult); + async.detectLimit = _createTester(async.eachOfLimit, identity, _findGetResult); + + async.sortBy = function (arr, iterator, callback) { + async.map(arr, function (x, callback) { + iterator(x, function (err, criteria) { + if (err) { + callback(err); + } + else { + callback(null, {value: x, criteria: criteria}); + } + }); + }, function (err, results) { + if (err) { + return callback(err); + } + else { + callback(null, _map(results.sort(comparator), function (x) { + return x.value; + })); + } + + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } + }; + + async.auto = function (tasks, concurrency, callback) { + if (typeof arguments[1] === 'function') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = _once(callback || noop); + var keys = _keys(tasks); + var remainingTasks = keys.length; + if (!remainingTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = remainingTasks; + } + + var results = {}; + var runningTasks = 0; + + var hasError = false; + + var listeners = []; + function addListener(fn) { + listeners.unshift(fn); + } + function removeListener(fn) { + var idx = _indexOf(listeners, fn); + if (idx >= 0) listeners.splice(idx, 1); + } + function taskComplete() { + remainingTasks--; + _arrayEach(listeners.slice(0), function (fn) { + fn(); + }); + } + + addListener(function () { + if (!remainingTasks) { + callback(null, results); + } + }); + + _arrayEach(keys, function (k) { + if (hasError) return; + var task = _isArray(tasks[k]) ? tasks[k]: [tasks[k]]; + var taskCallback = _restParam(function(err, args) { + runningTasks--; + if (args.length <= 1) { + args = args[0]; + } + if (err) { + var safeResults = {}; + _forEachOf(results, function(val, rkey) { + safeResults[rkey] = val; + }); + safeResults[k] = args; + hasError = true; + + callback(err, safeResults); + } + else { + results[k] = args; + async.setImmediate(taskComplete); + } + }); + var requires = task.slice(0, task.length - 1); + // prevent dead-locks + var len = requires.length; + var dep; + while (len--) { + if (!(dep = tasks[requires[len]])) { + throw new Error('Has nonexistent dependency in ' + requires.join(', ')); + } + if (_isArray(dep) && _indexOf(dep, k) >= 0) { + throw new Error('Has cyclic dependencies'); + } + } + function ready() { + return runningTasks < concurrency && _reduce(requires, function (a, x) { + return (a && results.hasOwnProperty(x)); + }, true) && !results.hasOwnProperty(k); + } + if (ready()) { + runningTasks++; + task[task.length - 1](taskCallback, results); + } + else { + addListener(listener); + } + function listener() { + if (ready()) { + runningTasks++; + removeListener(listener); + task[task.length - 1](taskCallback, results); + } + } + }); + }; + + + + async.retry = function(times, task, callback) { + var DEFAULT_TIMES = 5; + var DEFAULT_INTERVAL = 0; + + var attempts = []; + + var opts = { + times: DEFAULT_TIMES, + interval: DEFAULT_INTERVAL + }; + + function parseTimes(acc, t){ + if(typeof t === 'number'){ + acc.times = parseInt(t, 10) || DEFAULT_TIMES; + } else if(typeof t === 'object'){ + acc.times = parseInt(t.times, 10) || DEFAULT_TIMES; + acc.interval = parseInt(t.interval, 10) || DEFAULT_INTERVAL; + } else { + throw new Error('Unsupported argument type for \'times\': ' + typeof t); + } + } + + var length = arguments.length; + if (length < 1 || length > 3) { + throw new Error('Invalid arguments - must be either (task), (task, callback), (times, task) or (times, task, callback)'); + } else if (length <= 2 && typeof times === 'function') { + callback = task; + task = times; + } + if (typeof times !== 'function') { + parseTimes(opts, times); + } + opts.callback = callback; + opts.task = task; + + function wrappedTask(wrappedCallback, wrappedResults) { + function retryAttempt(task, finalAttempt) { + return function(seriesCallback) { + task(function(err, result){ + seriesCallback(!err || finalAttempt, {err: err, result: result}); + }, wrappedResults); + }; + } + + function retryInterval(interval){ + return function(seriesCallback){ + setTimeout(function(){ + seriesCallback(null); + }, interval); + }; + } + + while (opts.times) { + + var finalAttempt = !(opts.times-=1); + attempts.push(retryAttempt(opts.task, finalAttempt)); + if(!finalAttempt && opts.interval > 0){ + attempts.push(retryInterval(opts.interval)); + } + } + + async.series(attempts, function(done, data){ + data = data[data.length - 1]; + (wrappedCallback || opts.callback)(data.err, data.result); + }); + } + + // If a callback is passed, run this as a controll flow + return opts.callback ? wrappedTask() : wrappedTask; + }; + + async.waterfall = function (tasks, callback) { + callback = _once(callback || noop); + if (!_isArray(tasks)) { + var err = new Error('First argument to waterfall must be an array of functions'); + return callback(err); + } + if (!tasks.length) { + return callback(); + } + function wrapIterator(iterator) { + return _restParam(function (err, args) { + if (err) { + callback.apply(null, [err].concat(args)); + } + else { + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + ensureAsync(iterator).apply(null, args); + } + }); + } + wrapIterator(async.iterator(tasks))(); + }; + + function _parallel(eachfn, tasks, callback) { + callback = callback || noop; + var results = _isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, function (task, key, callback) { + task(_restParam(function (err, args) { + if (args.length <= 1) { + args = args[0]; + } + results[key] = args; + callback(err); + })); + }, function (err) { + callback(err, results); + }); + } + + async.parallel = function (tasks, callback) { + _parallel(async.eachOf, tasks, callback); + }; + + async.parallelLimit = function(tasks, limit, callback) { + _parallel(_eachOfLimit(limit), tasks, callback); + }; + + async.series = function(tasks, callback) { + _parallel(async.eachOfSeries, tasks, callback); + }; + + async.iterator = function (tasks) { + function makeCallback(index) { + function fn() { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + } + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + } + return makeCallback(0); + }; + + async.apply = _restParam(function (fn, args) { + return _restParam(function (callArgs) { + return fn.apply( + null, args.concat(callArgs) + ); + }); + }); + + function _concat(eachfn, arr, fn, callback) { + var result = []; + eachfn(arr, function (x, index, cb) { + fn(x, function (err, y) { + result = result.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, result); + }); + } + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + callback = callback || noop; + if (test()) { + var next = _restParam(function(err, args) { + if (err) { + callback(err); + } else if (test.apply(this, args)) { + iterator(next); + } else { + callback.apply(null, [null].concat(args)); + } + }); + iterator(next); + } else { + callback(null); + } + }; + + async.doWhilst = function (iterator, test, callback) { + var calls = 0; + return async.whilst(function() { + return ++calls <= 1 || test.apply(this, arguments); + }, iterator, callback); + }; + + async.until = function (test, iterator, callback) { + return async.whilst(function() { + return !test.apply(this, arguments); + }, iterator, callback); + }; + + async.doUntil = function (iterator, test, callback) { + return async.doWhilst(iterator, function() { + return !test.apply(this, arguments); + }, callback); + }; + + async.during = function (test, iterator, callback) { + callback = callback || noop; + + var next = _restParam(function(err, args) { + if (err) { + callback(err); + } else { + args.push(check); + test.apply(this, args); + } + }); + + var check = function(err, truth) { + if (err) { + callback(err); + } else if (truth) { + iterator(next); + } else { + callback(null); + } + }; + + test(check); + }; + + async.doDuring = function (iterator, test, callback) { + var calls = 0; + async.during(function(next) { + if (calls++ < 1) { + next(null, true); + } else { + test.apply(this, arguments); + } + }, iterator, callback); + }; + + function _queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new Error('Concurrency must not be zero'); + } + function _insert(q, data, pos, callback) { + if (callback != null && typeof callback !== "function") { + throw new Error("task callback must be a function"); + } + q.started = true; + if (!_isArray(data)) { + data = [data]; + } + if(data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + q.drain(); + }); + } + _arrayEach(data, function(task) { + var item = { + data: task, + callback: callback || noop + }; + + if (pos) { + q.tasks.unshift(item); + } else { + q.tasks.push(item); + } + + if (q.tasks.length === q.concurrency) { + q.saturated(); + } + }); + async.setImmediate(q.process); + } + function _next(q, tasks) { + return function(){ + workers -= 1; + + var removed = false; + var args = arguments; + _arrayEach(tasks, function (task) { + _arrayEach(workersList, function (worker, index) { + if (worker === task && !removed) { + workersList.splice(index, 1); + removed = true; + } + }); + + task.callback.apply(task, args); + }); + if (q.tasks.length + workers === 0) { + q.drain(); + } + q.process(); + }; + } + + var workers = 0; + var workersList = []; + var q = { + tasks: [], + concurrency: concurrency, + payload: payload, + saturated: noop, + empty: noop, + drain: noop, + started: false, + paused: false, + push: function (data, callback) { + _insert(q, data, false, callback); + }, + kill: function () { + q.drain = noop; + q.tasks = []; + }, + unshift: function (data, callback) { + _insert(q, data, true, callback); + }, + process: function () { + while(!q.paused && workers < q.concurrency && q.tasks.length){ + + var tasks = q.payload ? + q.tasks.splice(0, q.payload) : + q.tasks.splice(0, q.tasks.length); + + var data = _map(tasks, function (task) { + return task.data; + }); + + if (q.tasks.length === 0) { + q.empty(); + } + workers += 1; + workersList.push(tasks[0]); + var cb = only_once(_next(q, tasks)); + worker(data, cb); + } + }, + length: function () { + return q.tasks.length; + }, + running: function () { + return workers; + }, + workersList: function () { + return workersList; + }, + idle: function() { + return q.tasks.length + workers === 0; + }, + pause: function () { + q.paused = true; + }, + resume: function () { + if (q.paused === false) { return; } + q.paused = false; + var resumeCount = Math.min(q.concurrency, q.tasks.length); + // Need to call q.process once per concurrent + // worker to preserve full concurrency after pause + for (var w = 1; w <= resumeCount; w++) { + async.setImmediate(q.process); + } + } + }; + return q; + } + + async.queue = function (worker, concurrency) { + var q = _queue(function (items, cb) { + worker(items[0], cb); + }, concurrency, 1); + + return q; + }; + + async.priorityQueue = function (worker, concurrency) { + + function _compareTasks(a, b){ + return a.priority - b.priority; + } + + function _binarySearch(sequence, item, compare) { + var beg = -1, + end = sequence.length - 1; + while (beg < end) { + var mid = beg + ((end - beg + 1) >>> 1); + if (compare(item, sequence[mid]) >= 0) { + beg = mid; + } else { + end = mid - 1; + } + } + return beg; + } + + function _insert(q, data, priority, callback) { + if (callback != null && typeof callback !== "function") { + throw new Error("task callback must be a function"); + } + q.started = true; + if (!_isArray(data)) { + data = [data]; + } + if(data.length === 0) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + q.drain(); + }); + } + _arrayEach(data, function(task) { + var item = { + data: task, + priority: priority, + callback: typeof callback === 'function' ? callback : noop + }; + + q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item); + + if (q.tasks.length === q.concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + // Start with a normal queue + var q = async.queue(worker, concurrency); + + // Override push to accept second parameter representing priority + q.push = function (data, priority, callback) { + _insert(q, data, priority, callback); + }; + + // Remove unshift function + delete q.unshift; + + return q; + }; + + async.cargo = function (worker, payload) { + return _queue(worker, 1, payload); + }; + + function _console_fn(name) { + return _restParam(function (fn, args) { + fn.apply(null, args.concat([_restParam(function (err, args) { + if (typeof console === 'object') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _arrayEach(args, function (x) { + console[name](x); + }); + } + } + })])); + }); + } + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + + async.memoize = function (fn, hasher) { + var memo = {}; + var queues = {}; + var has = Object.prototype.hasOwnProperty; + hasher = hasher || identity; + var memoized = _restParam(function memoized(args) { + var callback = args.pop(); + var key = hasher.apply(null, args); + if (has.call(memo, key)) { + async.setImmediate(function () { + callback.apply(null, memo[key]); + }); + } + else if (has.call(queues, key)) { + queues[key].push(callback); + } + else { + queues[key] = [callback]; + fn.apply(null, args.concat([_restParam(function (args) { + memo[key] = args; + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i].apply(null, args); + } + })])); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + }; + + async.unmemoize = function (fn) { + return function () { + return (fn.unmemoized || fn).apply(null, arguments); + }; + }; + + function _times(mapper) { + return function (count, iterator, callback) { + mapper(_range(count), iterator, callback); + }; + } + + async.times = _times(async.map); + async.timesSeries = _times(async.mapSeries); + async.timesLimit = function (count, limit, iterator, callback) { + return async.mapLimit(_range(count), limit, iterator, callback); + }; + + async.seq = function (/* functions... */) { + var fns = arguments; + return _restParam(function (args) { + var that = this; + + var callback = args[args.length - 1]; + if (typeof callback == 'function') { + args.pop(); + } else { + callback = noop; + } + + async.reduce(fns, args, function (newargs, fn, cb) { + fn.apply(that, newargs.concat([_restParam(function (err, nextargs) { + cb(err, nextargs); + })])); + }, + function (err, results) { + callback.apply(that, [err].concat(results)); + }); + }); + }; + + async.compose = function (/* functions... */) { + return async.seq.apply(null, Array.prototype.reverse.call(arguments)); + }; + + + function _applyEach(eachfn) { + return _restParam(function(fns, args) { + var go = _restParam(function(args) { + var that = this; + var callback = args.pop(); + return eachfn(fns, function (fn, _, cb) { + fn.apply(that, args.concat([cb])); + }, + callback); + }); + if (args.length) { + return go.apply(this, args); + } + else { + return go; + } + }); + } + + async.applyEach = _applyEach(async.eachOf); + async.applyEachSeries = _applyEach(async.eachOfSeries); + + + async.forever = function (fn, callback) { + var done = only_once(callback || noop); + var task = ensureAsync(fn); + function next(err) { + if (err) { + return done(err); + } + task(next); + } + next(); + }; + + function ensureAsync(fn) { + return _restParam(function (args) { + var callback = args.pop(); + args.push(function () { + var innerArgs = arguments; + if (sync) { + async.setImmediate(function () { + callback.apply(null, innerArgs); + }); + } else { + callback.apply(null, innerArgs); + } + }); + var sync = true; + fn.apply(this, args); + sync = false; + }); + } + + async.ensureAsync = ensureAsync; + + async.constant = _restParam(function(values) { + var args = [null].concat(values); + return function (callback) { + return callback.apply(this, args); + }; + }); + + async.wrapSync = + async.asyncify = function asyncify(func) { + return _restParam(function (args) { + var callback = args.pop(); + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (_isObject(result) && typeof result.then === "function") { + result.then(function(value) { + callback(null, value); + })["catch"](function(err) { + callback(err.message ? err : new Error(err)); + }); + } else { + callback(null, result); + } + }); + }; + + // Node.js + if (typeof module === 'object' && module.exports) { + module.exports = async; + } + // AMD / RequireJS + else if (typeof define === 'function' && define.amd) { + define([], function () { + return async; + }); + } + // included directly via +``` + +## Usage + +``` +var emojis = require('emojis-list'); +console.log(emojis[0]); +// => 🀄 +``` + +## Related + +* [emojis-unicode](https://github.com/Kikobeats/emojis-unicode) – Complete list of standard Unicode codes that represent emojis. +* [emojis-keywords](https://github.com/Kikobeats/emojis-keywords) – Complete list of am emoji shortcuts. +* [is-emoji-keyword](https://github.com/Kikobeats/is-emoji-keyword) – Check if a word is a emoji shortcut. +* [is-standard-emoji](https://github.com/kikobeats/is-standard-emoji) – Simply way to check if a emoji is a standard emoji. +* [trim-emoji](https://github.com/Kikobeats/trim-emoji) – Deletes ':' from the begin and the end of an emoji shortcut. + +## License + +MIT © [Kiko Beats](http://www.kikobeats.com) diff --git a/node_modules/emojis-list/index.js b/node_modules/emojis-list/index.js new file mode 100644 index 0000000..d59beab --- /dev/null +++ b/node_modules/emojis-list/index.js @@ -0,0 +1,2479 @@ +module.exports = [ + "🀄", + "🃏", + "🅰", + "🅱", + "🅾", + "🅿", + "🆎", + "🆑", + "🆒", + "🆓", + "🆔", + "🆕", + "🆖", + "🆗", + "🆘", + "🆙", + "🆚", + "🇦🇨", + "🇦🇩", + "🇦🇪", + "🇦🇫", + "🇦🇬", + "🇦🇮", + "🇦🇱", + "🇦🇲", + "🇦🇴", + "🇦🇶", + "🇦🇷", + "🇦🇸", + "🇦🇹", + "🇦🇺", + "🇦🇼", + "🇦🇽", + "🇦🇿", + "🇦", + "🇧🇦", + "🇧🇧", + "🇧🇩", + "🇧🇪", + "🇧🇫", + "🇧🇬", + "🇧🇭", + "🇧🇮", + "🇧🇯", + "🇧🇱", + "🇧🇲", + "🇧🇳", + "🇧🇴", + "🇧🇶", + "🇧🇷", + "🇧🇸", + "🇧🇹", + "🇧🇻", + "🇧🇼", + "🇧🇾", + "🇧🇿", + "🇧", + "🇨🇦", + "🇨🇨", + "🇨🇩", + "🇨🇫", + "🇨🇬", + "🇨🇭", + "🇨🇮", + "🇨🇰", + "🇨🇱", + "🇨🇲", + "🇨🇳", + "🇨🇴", + "🇨🇵", + "🇨🇷", + "🇨🇺", + "🇨🇻", + "🇨🇼", + "🇨🇽", + "🇨🇾", + "🇨🇿", + "🇨", + "🇩🇪", + "🇩🇬", + "🇩🇯", + "🇩🇰", + "🇩🇲", + "🇩🇴", + "🇩🇿", + "🇩", + "🇪🇦", + "🇪🇨", + "🇪🇪", + "🇪🇬", + "🇪🇭", + "🇪🇷", + "🇪🇸", + "🇪🇹", + "🇪🇺", + "🇪", + "🇫🇮", + "🇫🇯", + "🇫🇰", + "🇫🇲", + "🇫🇴", + "🇫🇷", + "🇫", + "🇬🇦", + "🇬🇧", + "🇬🇩", + "🇬🇪", + "🇬🇫", + "🇬🇬", + "🇬🇭", + "🇬🇮", + "🇬🇱", + "🇬🇲", + "🇬🇳", + "🇬🇵", + "🇬🇶", + "🇬🇷", + "🇬🇸", + "🇬🇹", + "🇬🇺", + "🇬🇼", + "🇬🇾", + "🇬", + "🇭🇰", + "🇭🇲", + "🇭🇳", + "🇭🇷", + "🇭🇹", + "🇭🇺", + "🇭", + "🇮🇨", + "🇮🇩", + "🇮🇪", + "🇮🇱", + "🇮🇲", + "🇮🇳", + "🇮🇴", + "🇮🇶", + "🇮🇷", + "🇮🇸", + "🇮🇹", + "🇮", + "🇯🇪", + "🇯🇲", + "🇯🇴", + "🇯🇵", + "🇯", + "🇰🇪", + "🇰🇬", + "🇰🇭", + "🇰🇮", + "🇰🇲", + "🇰🇳", + "🇰🇵", + "🇰🇷", + "🇰🇼", + "🇰🇾", + "🇰🇿", + "🇰", + "🇱🇦", + "🇱🇧", + "🇱🇨", + "🇱🇮", + "🇱🇰", + "🇱🇷", + "🇱🇸", + "🇱🇹", + "🇱🇺", + "🇱🇻", + "🇱🇾", + "🇱", + "🇲🇦", + "🇲🇨", + "🇲🇩", + "🇲🇪", + "🇲🇫", + "🇲🇬", + "🇲🇭", + "🇲🇰", + "🇲🇱", + "🇲🇲", + "🇲🇳", + "🇲🇴", + "🇲🇵", + "🇲🇶", + "🇲🇷", + "🇲🇸", + "🇲🇹", + "🇲🇺", + "🇲🇻", + "🇲🇼", + "🇲🇽", + "🇲🇾", + "🇲🇿", + "🇲", + "🇳🇦", + "🇳🇨", + "🇳🇪", + "🇳🇫", + "🇳🇬", + "🇳🇮", + "🇳🇱", + "🇳🇴", + "🇳🇵", + "🇳🇷", + "🇳🇺", + "🇳🇿", + "🇳", + "🇴🇲", + "🇴", + "🇵🇦", + "🇵🇪", + "🇵🇫", + "🇵🇬", + "🇵🇭", + "🇵🇰", + "🇵🇱", + "🇵🇲", + "🇵🇳", + "🇵🇷", + "🇵🇸", + "🇵🇹", + "🇵🇼", + "🇵🇾", + "🇵", + "🇶🇦", + "🇶", + "🇷🇪", + "🇷🇴", + "🇷🇸", + "🇷🇺", + "🇷🇼", + "🇷", + "🇸🇦", + "🇸🇧", + "🇸🇨", + "🇸🇩", + "🇸🇪", + "🇸🇬", + "🇸🇭", + "🇸🇮", + "🇸🇯", + "🇸🇰", + "🇸🇱", + "🇸🇲", + "🇸🇳", + "🇸🇴", + "🇸🇷", + "🇸🇸", + "🇸🇹", + "🇸🇻", + "🇸🇽", + "🇸🇾", + "🇸🇿", + "🇸", + "🇹🇦", + "🇹🇨", + "🇹🇩", + "🇹🇫", + "🇹🇬", + "🇹🇭", + "🇹🇯", + "🇹🇰", + "🇹🇱", + "🇹🇲", + "🇹🇳", + "🇹🇴", + "🇹🇷", + "🇹🇹", + "🇹🇻", + "🇹🇼", + "🇹🇿", + "🇹", + "🇺🇦", + "🇺🇬", + "🇺🇲", + "🇺🇳", + "🇺🇸", + "🇺🇾", + "🇺🇿", + "🇺", + "🇻🇦", + "🇻🇨", + "🇻🇪", + "🇻🇬", + "🇻🇮", + "🇻🇳", + "🇻🇺", + "🇻", + "🇼🇫", + "🇼🇸", + "🇼", + "🇽🇰", + "🇽", + "🇾🇪", + "🇾🇹", + "🇾", + "🇿🇦", + "🇿🇲", + "🇿🇼", + "🇿", + "🈁", + "🈂", + "🈚", + "🈯", + "🈲", + "🈳", + "🈴", + "🈵", + "🈶", + "🈷", + "🈸", + "🈹", + "🈺", + "🉐", + "🉑", + "🌀", + "🌁", + "🌂", + "🌃", + "🌄", + "🌅", + "🌆", + "🌇", + "🌈", + "🌉", + "🌊", + "🌋", + "🌌", + "🌍", + "🌎", + "🌏", + "🌐", + "🌑", + "🌒", + "🌓", + "🌔", + "🌕", + "🌖", + "🌗", + "🌘", + "🌙", + "🌚", + "🌛", + "🌜", + "🌝", + "🌞", + "🌟", + "🌠", + "🌡", + "🌤", + "🌥", + "🌦", + "🌧", + "🌨", + "🌩", + "🌪", + "🌫", + "🌬", + "🌭", + "🌮", + "🌯", + "🌰", + "🌱", + "🌲", + "🌳", + "🌴", + "🌵", + "🌶", + "🌷", + "🌸", + "🌹", + "🌺", + "🌻", + "🌼", + "🌽", + "🌾", + "🌿", + "🍀", + "🍁", + "🍂", + "🍃", + "🍄", + "🍅", + "🍆", + "🍇", + "🍈", + "🍉", + "🍊", + "🍋", + "🍌", + "🍍", + "🍎", + "🍏", + "🍐", + "🍑", + "🍒", + "🍓", + "🍔", + "🍕", + "🍖", + "🍗", + "🍘", + "🍙", + "🍚", + "🍛", + "🍜", + "🍝", + "🍞", + "🍟", + "🍠", + "🍡", + "🍢", + "🍣", + "🍤", + "🍥", + "🍦", + "🍧", + "🍨", + "🍩", + "🍪", + "🍫", + "🍬", + "🍭", + "🍮", + "🍯", + "🍰", + "🍱", + "🍲", + "🍳", + "🍴", + "🍵", + "🍶", + "🍷", + "🍸", + "🍹", + "🍺", + "🍻", + "🍼", + "🍽", + "🍾", + "🍿", + "🎀", + "🎁", + "🎂", + "🎃", + "🎄", + "🎅🏻", + "🎅🏼", + "🎅🏽", + "🎅🏾", + "🎅🏿", + "🎅", + "🎆", + "🎇", + "🎈", + "🎉", + "🎊", + "🎋", + "🎌", + "🎍", + "🎎", + "🎏", + "🎐", + "🎑", + "🎒", + "🎓", + "🎖", + "🎗", + "🎙", + "🎚", + "🎛", + "🎞", + "🎟", + "🎠", + "🎡", + "🎢", + "🎣", + "🎤", + "🎥", + "🎦", + "🎧", + "🎨", + "🎩", + "🎪", + "🎫", + "🎬", + "🎭", + "🎮", + "🎯", + "🎰", + "🎱", + "🎲", + "🎳", + "🎴", + "🎵", + "🎶", + "🎷", + "🎸", + "🎹", + "🎺", + "🎻", + "🎼", + "🎽", + "🎾", + "🎿", + "🏀", + "🏁", + "🏂🏻", + "🏂🏼", + "🏂🏽", + "🏂🏾", + "🏂🏿", + "🏂", + "🏃🏻‍♀️", + "🏃🏻‍♂️", + "🏃🏻", + "🏃🏼‍♀️", + "🏃🏼‍♂️", + "🏃🏼", + "🏃🏽‍♀️", + "🏃🏽‍♂️", + "🏃🏽", + "🏃🏾‍♀️", + "🏃🏾‍♂️", + "🏃🏾", + "🏃🏿‍♀️", + "🏃🏿‍♂️", + "🏃🏿", + "🏃‍♀️", + "🏃‍♂️", + "🏃", + "🏄🏻‍♀️", + "🏄🏻‍♂️", + "🏄🏻", + "🏄🏼‍♀️", + "🏄🏼‍♂️", + "🏄🏼", + "🏄🏽‍♀️", + "🏄🏽‍♂️", + "🏄🏽", + "🏄🏾‍♀️", + "🏄🏾‍♂️", + "🏄🏾", + "🏄🏿‍♀️", + "🏄🏿‍♂️", + "🏄🏿", + "🏄‍♀️", + "🏄‍♂️", + "🏄", + "🏅", + "🏆", + "🏇🏻", + "🏇🏼", + "🏇🏽", + "🏇🏾", + "🏇🏿", + "🏇", + "🏈", + "🏉", + "🏊🏻‍♀️", + "🏊🏻‍♂️", + "🏊🏻", + "🏊🏼‍♀️", + "🏊🏼‍♂️", + "🏊🏼", + "🏊🏽‍♀️", + "🏊🏽‍♂️", + "🏊🏽", + "🏊🏾‍♀️", + "🏊🏾‍♂️", + "🏊🏾", + "🏊🏿‍♀️", + "🏊🏿‍♂️", + "🏊🏿", + "🏊‍♀️", + "🏊‍♂️", + "🏊", + "🏋🏻‍♀️", + "🏋🏻‍♂️", + "🏋🏻", + "🏋🏼‍♀️", + "🏋🏼‍♂️", + "🏋🏼", + "🏋🏽‍♀️", + "🏋🏽‍♂️", + "🏋🏽", + "🏋🏾‍♀️", + "🏋🏾‍♂️", + "🏋🏾", + "🏋🏿‍♀️", + "🏋🏿‍♂️", + "🏋🏿", + "🏋️‍♀️", + "🏋️‍♂️", + "🏋", + "🏌🏻‍♀️", + "🏌🏻‍♂️", + "🏌🏻", + "🏌🏼‍♀️", + "🏌🏼‍♂️", + "🏌🏼", + "🏌🏽‍♀️", + "🏌🏽‍♂️", + "🏌🏽", + "🏌🏾‍♀️", + "🏌🏾‍♂️", + "🏌🏾", + "🏌🏿‍♀️", + "🏌🏿‍♂️", + "🏌🏿", + "🏌️‍♀️", + "🏌️‍♂️", + "🏌", + "🏍", + "🏎", + "🏏", + "🏐", + "🏑", + "🏒", + "🏓", + "🏔", + "🏕", + "🏖", + "🏗", + "🏘", + "🏙", + "🏚", + "🏛", + "🏜", + "🏝", + "🏞", + "🏟", + "🏠", + "🏡", + "🏢", + "🏣", + "🏤", + "🏥", + "🏦", + "🏧", + "🏨", + "🏩", + "🏪", + "🏫", + "🏬", + "🏭", + "🏮", + "🏯", + "🏰", + "🏳️‍🌈", + "🏳", + "🏴‍☠️", + "🏴", + "🏵", + "🏷", + "🏸", + "🏹", + "🏺", + "🏻", + "🏼", + "🏽", + "🏾", + "🏿", + "🐀", + "🐁", + "🐂", + "🐃", + "🐄", + "🐅", + "🐆", + "🐇", + "🐈", + "🐉", + "🐊", + "🐋", + "🐌", + "🐍", + "🐎", + "🐏", + "🐐", + "🐑", + "🐒", + "🐓", + "🐔", + "🐕", + "🐖", + "🐗", + "🐘", + "🐙", + "🐚", + "🐛", + "🐜", + "🐝", + "🐞", + "🐟", + "🐠", + "🐡", + "🐢", + "🐣", + "🐤", + "🐥", + "🐦", + "🐧", + "🐨", + "🐩", + "🐪", + "🐫", + "🐬", + "🐭", + "🐮", + "🐯", + "🐰", + "🐱", + "🐲", + "🐳", + "🐴", + "🐵", + "🐶", + "🐷", + "🐸", + "🐹", + "🐺", + "🐻", + "🐼", + "🐽", + "🐾", + "🐿", + "👀", + "👁‍🗨", + "👁", + "👂🏻", + "👂🏼", + "👂🏽", + "👂🏾", + "👂🏿", + "👂", + "👃🏻", + "👃🏼", + "👃🏽", + "👃🏾", + "👃🏿", + "👃", + "👄", + "👅", + "👆🏻", + "👆🏼", + "👆🏽", + "👆🏾", + "👆🏿", + "👆", + "👇🏻", + "👇🏼", + "👇🏽", + "👇🏾", + "👇🏿", + "👇", + "👈🏻", + "👈🏼", + "👈🏽", + "👈🏾", + "👈🏿", + "👈", + "👉🏻", + "👉🏼", + "👉🏽", + "👉🏾", + "👉🏿", + "👉", + "👊🏻", + "👊🏼", + "👊🏽", + "👊🏾", + "👊🏿", + "👊", + "👋🏻", + "👋🏼", + "👋🏽", + "👋🏾", + "👋🏿", + "👋", + "👌🏻", + "👌🏼", + "👌🏽", + "👌🏾", + "👌🏿", + "👌", + "👍🏻", + "👍🏼", + "👍🏽", + "👍🏾", + "👍🏿", + "👍", + "👎🏻", + "👎🏼", + "👎🏽", + "👎🏾", + "👎🏿", + "👎", + "👏🏻", + "👏🏼", + "👏🏽", + "👏🏾", + "👏🏿", + "👏", + "👐🏻", + "👐🏼", + "👐🏽", + "👐🏾", + "👐🏿", + "👐", + "👑", + "👒", + "👓", + "👔", + "👕", + "👖", + "👗", + "👘", + "👙", + "👚", + "👛", + "👜", + "👝", + "👞", + "👟", + "👠", + "👡", + "👢", + "👣", + "👤", + "👥", + "👦🏻", + "👦🏼", + "👦🏽", + "👦🏾", + "👦🏿", + "👦", + "👧🏻", + "👧🏼", + "👧🏽", + "👧🏾", + "👧🏿", + "👧", + "👨🏻‍🌾", + "👨🏻‍🍳", + "👨🏻‍🎓", + "👨🏻‍🎤", + "👨🏻‍🎨", + "👨🏻‍🏫", + "👨🏻‍🏭", + "👨🏻‍💻", + "👨🏻‍💼", + "👨🏻‍🔧", + "👨🏻‍🔬", + "👨🏻‍🚀", + "👨🏻‍🚒", + "👨🏻‍⚕️", + "👨🏻‍⚖️", + "👨🏻‍✈️", + "👨🏻", + "👨🏼‍🌾", + "👨🏼‍🍳", + "👨🏼‍🎓", + "👨🏼‍🎤", + "👨🏼‍🎨", + "👨🏼‍🏫", + "👨🏼‍🏭", + "👨🏼‍💻", + "👨🏼‍💼", + "👨🏼‍🔧", + "👨🏼‍🔬", + "👨🏼‍🚀", + "👨🏼‍🚒", + "👨🏼‍⚕️", + "👨🏼‍⚖️", + "👨🏼‍✈️", + "👨🏼", + "👨🏽‍🌾", + "👨🏽‍🍳", + "👨🏽‍🎓", + "👨🏽‍🎤", + "👨🏽‍🎨", + "👨🏽‍🏫", + "👨🏽‍🏭", + "👨🏽‍💻", + "👨🏽‍💼", + "👨🏽‍🔧", + "👨🏽‍🔬", + "👨🏽‍🚀", + "👨🏽‍🚒", + "👨🏽‍⚕️", + "👨🏽‍⚖️", + "👨🏽‍✈️", + "👨🏽", + "👨🏾‍🌾", + "👨🏾‍🍳", + "👨🏾‍🎓", + "👨🏾‍🎤", + "👨🏾‍🎨", + "👨🏾‍🏫", + "👨🏾‍🏭", + "👨🏾‍💻", + "👨🏾‍💼", + "👨🏾‍🔧", + "👨🏾‍🔬", + "👨🏾‍🚀", + "👨🏾‍🚒", + "👨🏾‍⚕️", + "👨🏾‍⚖️", + "👨🏾‍✈️", + "👨🏾", + "👨🏿‍🌾", + "👨🏿‍🍳", + "👨🏿‍🎓", + "👨🏿‍🎤", + "👨🏿‍🎨", + "👨🏿‍🏫", + "👨🏿‍🏭", + "👨🏿‍💻", + "👨🏿‍💼", + "👨🏿‍🔧", + "👨🏿‍🔬", + "👨🏿‍🚀", + "👨🏿‍🚒", + "👨🏿‍⚕️", + "👨🏿‍⚖️", + "👨🏿‍✈️", + "👨🏿", + "👨‍🌾", + "👨‍🍳", + "👨‍🎓", + "👨‍🎤", + "👨‍🎨", + "👨‍🏫", + "👨‍🏭", + "👨‍👦‍👦", + "👨‍👦", + "👨‍👧‍👦", + "👨‍👧‍👧", + "👨‍👧", + "👨‍👨‍👦‍👦", + "👨‍👨‍👦", + "👨‍👨‍👧‍👦", + "👨‍👨‍👧‍👧", + "👨‍👨‍👧", + "👨‍👩‍👦‍👦", + "👨‍👩‍👦", + "👨‍👩‍👧‍👦", + "👨‍👩‍👧‍👧", + "👨‍👩‍👧", + "👨‍💻", + "👨‍💼", + "👨‍🔧", + "👨‍🔬", + "👨‍🚀", + "👨‍🚒", + "👨‍⚕️", + "👨‍⚖️", + "👨‍✈️", + "👨‍❤️‍👨", + "👨‍❤️‍💋‍👨", + "👨", + "👩🏻‍🌾", + "👩🏻‍🍳", + "👩🏻‍🎓", + "👩🏻‍🎤", + "👩🏻‍🎨", + "👩🏻‍🏫", + "👩🏻‍🏭", + "👩🏻‍💻", + "👩🏻‍💼", + "👩🏻‍🔧", + "👩🏻‍🔬", + "👩🏻‍🚀", + "👩🏻‍🚒", + "👩🏻‍⚕️", + "👩🏻‍⚖️", + "👩🏻‍✈️", + "👩🏻", + "👩🏼‍🌾", + "👩🏼‍🍳", + "👩🏼‍🎓", + "👩🏼‍🎤", + "👩🏼‍🎨", + "👩🏼‍🏫", + "👩🏼‍🏭", + "👩🏼‍💻", + "👩🏼‍💼", + "👩🏼‍🔧", + "👩🏼‍🔬", + "👩🏼‍🚀", + "👩🏼‍🚒", + "👩🏼‍⚕️", + "👩🏼‍⚖️", + "👩🏼‍✈️", + "👩🏼", + "👩🏽‍🌾", + "👩🏽‍🍳", + "👩🏽‍🎓", + "👩🏽‍🎤", + "👩🏽‍🎨", + "👩🏽‍🏫", + "👩🏽‍🏭", + "👩🏽‍💻", + "👩🏽‍💼", + "👩🏽‍🔧", + "👩🏽‍🔬", + "👩🏽‍🚀", + "👩🏽‍🚒", + "👩🏽‍⚕️", + "👩🏽‍⚖️", + "👩🏽‍✈️", + "👩🏽", + "👩🏾‍🌾", + "👩🏾‍🍳", + "👩🏾‍🎓", + "👩🏾‍🎤", + "👩🏾‍🎨", + "👩🏾‍🏫", + "👩🏾‍🏭", + "👩🏾‍💻", + "👩🏾‍💼", + "👩🏾‍🔧", + "👩🏾‍🔬", + "👩🏾‍🚀", + "👩🏾‍🚒", + "👩🏾‍⚕️", + "👩🏾‍⚖️", + "👩🏾‍✈️", + "👩🏾", + "👩🏿‍🌾", + "👩🏿‍🍳", + "👩🏿‍🎓", + "👩🏿‍🎤", + "👩🏿‍🎨", + "👩🏿‍🏫", + "👩🏿‍🏭", + "👩🏿‍💻", + "👩🏿‍💼", + "👩🏿‍🔧", + "👩🏿‍🔬", + "👩🏿‍🚀", + "👩🏿‍🚒", + "👩🏿‍⚕️", + "👩🏿‍⚖️", + "👩🏿‍✈️", + "👩🏿", + "👩‍🌾", + "👩‍🍳", + "👩‍🎓", + "👩‍🎤", + "👩‍🎨", + "👩‍🏫", + "👩‍🏭", + "👩‍👦‍👦", + "👩‍👦", + "👩‍👧‍👦", + "👩‍👧‍👧", + "👩‍👧", + "👩‍👩‍👦‍👦", + "👩‍👩‍👦", + "👩‍👩‍👧‍👦", + "👩‍👩‍👧‍👧", + "👩‍👩‍👧", + "👩‍💻", + "👩‍💼", + "👩‍🔧", + "👩‍🔬", + "👩‍🚀", + "👩‍🚒", + "👩‍⚕️", + "👩‍⚖️", + "👩‍✈️", + "👩‍❤️‍👨", + "👩‍❤️‍👩", + "👩‍❤️‍💋‍👨", + "👩‍❤️‍💋‍👩", + "👩", + "👪🏻", + "👪🏼", + "👪🏽", + "👪🏾", + "👪🏿", + "👪", + "👫🏻", + "👫🏼", + "👫🏽", + "👫🏾", + "👫🏿", + "👫", + "👬🏻", + "👬🏼", + "👬🏽", + "👬🏾", + "👬🏿", + "👬", + "👭🏻", + "👭🏼", + "👭🏽", + "👭🏾", + "👭🏿", + "👭", + "👮🏻‍♀️", + "👮🏻‍♂️", + "👮🏻", + "👮🏼‍♀️", + "👮🏼‍♂️", + "👮🏼", + "👮🏽‍♀️", + "👮🏽‍♂️", + "👮🏽", + "👮🏾‍♀️", + "👮🏾‍♂️", + "👮🏾", + "👮🏿‍♀️", + "👮🏿‍♂️", + "👮🏿", + "👮‍♀️", + "👮‍♂️", + "👮", + "👯🏻‍♀️", + "👯🏻‍♂️", + "👯🏻", + "👯🏼‍♀️", + "👯🏼‍♂️", + "👯🏼", + "👯🏽‍♀️", + "👯🏽‍♂️", + "👯🏽", + "👯🏾‍♀️", + "👯🏾‍♂️", + "👯🏾", + "👯🏿‍♀️", + "👯🏿‍♂️", + "👯🏿", + "👯‍♀️", + "👯‍♂️", + "👯", + "👰🏻", + "👰🏼", + "👰🏽", + "👰🏾", + "👰🏿", + "👰", + "👱🏻‍♀️", + "👱🏻‍♂️", + "👱🏻", + "👱🏼‍♀️", + "👱🏼‍♂️", + "👱🏼", + "👱🏽‍♀️", + "👱🏽‍♂️", + "👱🏽", + "👱🏾‍♀️", + "👱🏾‍♂️", + "👱🏾", + "👱🏿‍♀️", + "👱🏿‍♂️", + "👱🏿", + "👱‍♀️", + "👱‍♂️", + "👱", + "👲🏻", + "👲🏼", + "👲🏽", + "👲🏾", + "👲🏿", + "👲", + "👳🏻‍♀️", + "👳🏻‍♂️", + "👳🏻", + "👳🏼‍♀️", + "👳🏼‍♂️", + "👳🏼", + "👳🏽‍♀️", + "👳🏽‍♂️", + "👳🏽", + "👳🏾‍♀️", + "👳🏾‍♂️", + "👳🏾", + "👳🏿‍♀️", + "👳🏿‍♂️", + "👳🏿", + "👳‍♀️", + "👳‍♂️", + "👳", + "👴🏻", + "👴🏼", + "👴🏽", + "👴🏾", + "👴🏿", + "👴", + "👵🏻", + "👵🏼", + "👵🏽", + "👵🏾", + "👵🏿", + "👵", + "👶🏻", + "👶🏼", + "👶🏽", + "👶🏾", + "👶🏿", + "👶", + "👷🏻‍♀️", + "👷🏻‍♂️", + "👷🏻", + "👷🏼‍♀️", + "👷🏼‍♂️", + "👷🏼", + "👷🏽‍♀️", + "👷🏽‍♂️", + "👷🏽", + "👷🏾‍♀️", + "👷🏾‍♂️", + "👷🏾", + "👷🏿‍♀️", + "👷🏿‍♂️", + "👷🏿", + "👷‍♀️", + "👷‍♂️", + "👷", + "👸🏻", + "👸🏼", + "👸🏽", + "👸🏾", + "👸🏿", + "👸", + "👹", + "👺", + "👻", + "👼🏻", + "👼🏼", + "👼🏽", + "👼🏾", + "👼🏿", + "👼", + "👽", + "👾", + "👿", + "💀", + "💁🏻‍♀️", + "💁🏻‍♂️", + "💁🏻", + "💁🏼‍♀️", + "💁🏼‍♂️", + "💁🏼", + "💁🏽‍♀️", + "💁🏽‍♂️", + "💁🏽", + "💁🏾‍♀️", + "💁🏾‍♂️", + "💁🏾", + "💁🏿‍♀️", + "💁🏿‍♂️", + "💁🏿", + "💁‍♀️", + "💁‍♂️", + "💁", + "💂🏻‍♀️", + "💂🏻‍♂️", + "💂🏻", + "💂🏼‍♀️", + "💂🏼‍♂️", + "💂🏼", + "💂🏽‍♀️", + "💂🏽‍♂️", + "💂🏽", + "💂🏾‍♀️", + "💂🏾‍♂️", + "💂🏾", + "💂🏿‍♀️", + "💂🏿‍♂️", + "💂🏿", + "💂‍♀️", + "💂‍♂️", + "💂", + "💃🏻", + "💃🏼", + "💃🏽", + "💃🏾", + "💃🏿", + "💃", + "💄", + "💅🏻", + "💅🏼", + "💅🏽", + "💅🏾", + "💅🏿", + "💅", + "💆🏻‍♀️", + "💆🏻‍♂️", + "💆🏻", + "💆🏼‍♀️", + "💆🏼‍♂️", + "💆🏼", + "💆🏽‍♀️", + "💆🏽‍♂️", + "💆🏽", + "💆🏾‍♀️", + "💆🏾‍♂️", + "💆🏾", + "💆🏿‍♀️", + "💆🏿‍♂️", + "💆🏿", + "💆‍♀️", + "💆‍♂️", + "💆", + "💇🏻‍♀️", + "💇🏻‍♂️", + "💇🏻", + "💇🏼‍♀️", + "💇🏼‍♂️", + "💇🏼", + "💇🏽‍♀️", + "💇🏽‍♂️", + "💇🏽", + "💇🏾‍♀️", + "💇🏾‍♂️", + "💇🏾", + "💇🏿‍♀️", + "💇🏿‍♂️", + "💇🏿", + "💇‍♀️", + "💇‍♂️", + "💇", + "💈", + "💉", + "💊", + "💋", + "💌", + "💍", + "💎", + "💏", + "💐", + "💑", + "💒", + "💓", + "💔", + "💕", + "💖", + "💗", + "💘", + "💙", + "💚", + "💛", + "💜", + "💝", + "💞", + "💟", + "💠", + "💡", + "💢", + "💣", + "💤", + "💥", + "💦", + "💧", + "💨", + "💩", + "💪🏻", + "💪🏼", + "💪🏽", + "💪🏾", + "💪🏿", + "💪", + "💫", + "💬", + "💭", + "💮", + "💯", + "💰", + "💱", + "💲", + "💳", + "💴", + "💵", + "💶", + "💷", + "💸", + "💹", + "💺", + "💻", + "💼", + "💽", + "💾", + "💿", + "📀", + "📁", + "📂", + "📃", + "📄", + "📅", + "📆", + "📇", + "📈", + "📉", + "📊", + "📋", + "📌", + "📍", + "📎", + "📏", + "📐", + "📑", + "📒", + "📓", + "📔", + "📕", + "📖", + "📗", + "📘", + "📙", + "📚", + "📛", + "📜", + "📝", + "📞", + "📟", + "📠", + "📡", + "📢", + "📣", + "📤", + "📥", + "📦", + "📧", + "📨", + "📩", + "📪", + "📫", + "📬", + "📭", + "📮", + "📯", + "📰", + "📱", + "📲", + "📳", + "📴", + "📵", + "📶", + "📷", + "📸", + "📹", + "📺", + "📻", + "📼", + "📽", + "📿", + "🔀", + "🔁", + "🔂", + "🔃", + "🔄", + "🔅", + "🔆", + "🔇", + "🔈", + "🔉", + "🔊", + "🔋", + "🔌", + "🔍", + "🔎", + "🔏", + "🔐", + "🔑", + "🔒", + "🔓", + "🔔", + "🔕", + "🔖", + "🔗", + "🔘", + "🔙", + "🔚", + "🔛", + "🔜", + "🔝", + "🔞", + "🔟", + "🔠", + "🔡", + "🔢", + "🔣", + "🔤", + "🔥", + "🔦", + "🔧", + "🔨", + "🔩", + "🔪", + "🔫", + "🔬", + "🔭", + "🔮", + "🔯", + "🔰", + "🔱", + "🔲", + "🔳", + "🔴", + "🔵", + "🔶", + "🔷", + "🔸", + "🔹", + "🔺", + "🔻", + "🔼", + "🔽", + "🕉", + "🕊", + "🕋", + "🕌", + "🕍", + "🕎", + "🕐", + "🕑", + "🕒", + "🕓", + "🕔", + "🕕", + "🕖", + "🕗", + "🕘", + "🕙", + "🕚", + "🕛", + "🕜", + "🕝", + "🕞", + "🕟", + "🕠", + "🕡", + "🕢", + "🕣", + "🕤", + "🕥", + "🕦", + "🕧", + "🕯", + "🕰", + "🕳", + "🕴🏻", + "🕴🏼", + "🕴🏽", + "🕴🏾", + "🕴🏿", + "🕴", + "🕵🏻‍♀️", + "🕵🏻‍♂️", + "🕵🏻", + "🕵🏼‍♀️", + "🕵🏼‍♂️", + "🕵🏼", + "🕵🏽‍♀️", + "🕵🏽‍♂️", + "🕵🏽", + "🕵🏾‍♀️", + "🕵🏾‍♂️", + "🕵🏾", + "🕵🏿‍♀️", + "🕵🏿‍♂️", + "🕵🏿", + "🕵️‍♀️", + "🕵️‍♂️", + "🕵", + "🕶", + "🕷", + "🕸", + "🕹", + "🕺🏻", + "🕺🏼", + "🕺🏽", + "🕺🏾", + "🕺🏿", + "🕺", + "🖇", + "🖊", + "🖋", + "🖌", + "🖍", + "🖐🏻", + "🖐🏼", + "🖐🏽", + "🖐🏾", + "🖐🏿", + "🖐", + "🖕🏻", + "🖕🏼", + "🖕🏽", + "🖕🏾", + "🖕🏿", + "🖕", + "🖖🏻", + "🖖🏼", + "🖖🏽", + "🖖🏾", + "🖖🏿", + "🖖", + "🖤", + "🖥", + "🖨", + "🖱", + "🖲", + "🖼", + "🗂", + "🗃", + "🗄", + "🗑", + "🗒", + "🗓", + "🗜", + "🗝", + "🗞", + "🗡", + "🗣", + "🗨", + "🗯", + "🗳", + "🗺", + "🗻", + "🗼", + "🗽", + "🗾", + "🗿", + "😀", + "😁", + "😂", + "😃", + "😄", + "😅", + "😆", + "😇", + "😈", + "😉", + "😊", + "😋", + "😌", + "😍", + "😎", + "😏", + "😐", + "😑", + "😒", + "😓", + "😔", + "😕", + "😖", + "😗", + "😘", + "😙", + "😚", + "😛", + "😜", + "😝", + "😞", + "😟", + "😠", + "😡", + "😢", + "😣", + "😤", + "😥", + "😦", + "😧", + "😨", + "😩", + "😪", + "😫", + "😬", + "😭", + "😮", + "😯", + "😰", + "😱", + "😲", + "😳", + "😴", + "😵", + "😶", + "😷", + "😸", + "😹", + "😺", + "😻", + "😼", + "😽", + "😾", + "😿", + "🙀", + "🙁", + "🙂", + "🙃", + "🙄", + "🙅🏻‍♀️", + "🙅🏻‍♂️", + "🙅🏻", + "🙅🏼‍♀️", + "🙅🏼‍♂️", + "🙅🏼", + "🙅🏽‍♀️", + "🙅🏽‍♂️", + "🙅🏽", + "🙅🏾‍♀️", + "🙅🏾‍♂️", + "🙅🏾", + "🙅🏿‍♀️", + "🙅🏿‍♂️", + "🙅🏿", + "🙅‍♀️", + "🙅‍♂️", + "🙅", + "🙆🏻‍♀️", + "🙆🏻‍♂️", + "🙆🏻", + "🙆🏼‍♀️", + "🙆🏼‍♂️", + "🙆🏼", + "🙆🏽‍♀️", + "🙆🏽‍♂️", + "🙆🏽", + "🙆🏾‍♀️", + "🙆🏾‍♂️", + "🙆🏾", + "🙆🏿‍♀️", + "🙆🏿‍♂️", + "🙆🏿", + "🙆‍♀️", + "🙆‍♂️", + "🙆", + "🙇🏻‍♀️", + "🙇🏻‍♂️", + "🙇🏻", + "🙇🏼‍♀️", + "🙇🏼‍♂️", + "🙇🏼", + "🙇🏽‍♀️", + "🙇🏽‍♂️", + "🙇🏽", + "🙇🏾‍♀️", + "🙇🏾‍♂️", + "🙇🏾", + "🙇🏿‍♀️", + "🙇🏿‍♂️", + "🙇🏿", + "🙇‍♀️", + "🙇‍♂️", + "🙇", + "🙈", + "🙉", + "🙊", + "🙋🏻‍♀️", + "🙋🏻‍♂️", + "🙋🏻", + "🙋🏼‍♀️", + "🙋🏼‍♂️", + "🙋🏼", + "🙋🏽‍♀️", + "🙋🏽‍♂️", + "🙋🏽", + "🙋🏾‍♀️", + "🙋🏾‍♂️", + "🙋🏾", + "🙋🏿‍♀️", + "🙋🏿‍♂️", + "🙋🏿", + "🙋‍♀️", + "🙋‍♂️", + "🙋", + "🙌🏻", + "🙌🏼", + "🙌🏽", + "🙌🏾", + "🙌🏿", + "🙌", + "🙍🏻‍♀️", + "🙍🏻‍♂️", + "🙍🏻", + "🙍🏼‍♀️", + "🙍🏼‍♂️", + "🙍🏼", + "🙍🏽‍♀️", + "🙍🏽‍♂️", + "🙍🏽", + "🙍🏾‍♀️", + "🙍🏾‍♂️", + "🙍🏾", + "🙍🏿‍♀️", + "🙍🏿‍♂️", + "🙍🏿", + "🙍‍♀️", + "🙍‍♂️", + "🙍", + "🙎🏻‍♀️", + "🙎🏻‍♂️", + "🙎🏻", + "🙎🏼‍♀️", + "🙎🏼‍♂️", + "🙎🏼", + "🙎🏽‍♀️", + "🙎🏽‍♂️", + "🙎🏽", + "🙎🏾‍♀️", + "🙎🏾‍♂️", + "🙎🏾", + "🙎🏿‍♀️", + "🙎🏿‍♂️", + "🙎🏿", + "🙎‍♀️", + "🙎‍♂️", + "🙎", + "🙏🏻", + "🙏🏼", + "🙏🏽", + "🙏🏾", + "🙏🏿", + "🙏", + "🚀", + "🚁", + "🚂", + "🚃", + "🚄", + "🚅", + "🚆", + "🚇", + "🚈", + "🚉", + "🚊", + "🚋", + "🚌", + "🚍", + "🚎", + "🚏", + "🚐", + "🚑", + "🚒", + "🚓", + "🚔", + "🚕", + "🚖", + "🚗", + "🚘", + "🚙", + "🚚", + "🚛", + "🚜", + "🚝", + "🚞", + "🚟", + "🚠", + "🚡", + "🚢", + "🚣🏻‍♀️", + "🚣🏻‍♂️", + "🚣🏻", + "🚣🏼‍♀️", + "🚣🏼‍♂️", + "🚣🏼", + "🚣🏽‍♀️", + "🚣🏽‍♂️", + "🚣🏽", + "🚣🏾‍♀️", + "🚣🏾‍♂️", + "🚣🏾", + "🚣🏿‍♀️", + "🚣🏿‍♂️", + "🚣🏿", + "🚣‍♀️", + "🚣‍♂️", + "🚣", + "🚤", + "🚥", + "🚦", + "🚧", + "🚨", + "🚩", + "🚪", + "🚫", + "🚬", + "🚭", + "🚮", + "🚯", + "🚰", + "🚱", + "🚲", + "🚳", + "🚴🏻‍♀️", + "🚴🏻‍♂️", + "🚴🏻", + "🚴🏼‍♀️", + "🚴🏼‍♂️", + "🚴🏼", + "🚴🏽‍♀️", + "🚴🏽‍♂️", + "🚴🏽", + "🚴🏾‍♀️", + "🚴🏾‍♂️", + "🚴🏾", + "🚴🏿‍♀️", + "🚴🏿‍♂️", + "🚴🏿", + "🚴‍♀️", + "🚴‍♂️", + "🚴", + "🚵🏻‍♀️", + "🚵🏻‍♂️", + "🚵🏻", + "🚵🏼‍♀️", + "🚵🏼‍♂️", + "🚵🏼", + "🚵🏽‍♀️", + "🚵🏽‍♂️", + "🚵🏽", + "🚵🏾‍♀️", + "🚵🏾‍♂️", + "🚵🏾", + "🚵🏿‍♀️", + "🚵🏿‍♂️", + "🚵🏿", + "🚵‍♀️", + "🚵‍♂️", + "🚵", + "🚶🏻‍♀️", + "🚶🏻‍♂️", + "🚶🏻", + "🚶🏼‍♀️", + "🚶🏼‍♂️", + "🚶🏼", + "🚶🏽‍♀️", + "🚶🏽‍♂️", + "🚶🏽", + "🚶🏾‍♀️", + "🚶🏾‍♂️", + "🚶🏾", + "🚶🏿‍♀️", + "🚶🏿‍♂️", + "🚶🏿", + "🚶‍♀️", + "🚶‍♂️", + "🚶", + "🚷", + "🚸", + "🚹", + "🚺", + "🚻", + "🚼", + "🚽", + "🚾", + "🚿", + "🛀🏻", + "🛀🏼", + "🛀🏽", + "🛀🏾", + "🛀🏿", + "🛀", + "🛁", + "🛂", + "🛃", + "🛄", + "🛅", + "🛋", + "🛌🏻", + "🛌🏼", + "🛌🏽", + "🛌🏾", + "🛌🏿", + "🛌", + "🛍", + "🛎", + "🛏", + "🛐", + "🛑", + "🛒", + "🛠", + "🛡", + "🛢", + "🛣", + "🛤", + "🛥", + "🛩", + "🛫", + "🛬", + "🛰", + "🛳", + "🛴", + "🛵", + "🛶", + "🤐", + "🤑", + "🤒", + "🤓", + "🤔", + "🤕", + "🤖", + "🤗", + "🤘🏻", + "🤘🏼", + "🤘🏽", + "🤘🏾", + "🤘🏿", + "🤘", + "🤙🏻", + "🤙🏼", + "🤙🏽", + "🤙🏾", + "🤙🏿", + "🤙", + "🤚🏻", + "🤚🏼", + "🤚🏽", + "🤚🏾", + "🤚🏿", + "🤚", + "🤛🏻", + "🤛🏼", + "🤛🏽", + "🤛🏾", + "🤛🏿", + "🤛", + "🤜🏻", + "🤜🏼", + "🤜🏽", + "🤜🏾", + "🤜🏿", + "🤜", + "🤝🏻", + "🤝🏼", + "🤝🏽", + "🤝🏾", + "🤝🏿", + "🤝", + "🤞🏻", + "🤞🏼", + "🤞🏽", + "🤞🏾", + "🤞🏿", + "🤞", + "🤠", + "🤡", + "🤢", + "🤣", + "🤤", + "🤥", + "🤦🏻‍♀️", + "🤦🏻‍♂️", + "🤦🏻", + "🤦🏼‍♀️", + "🤦🏼‍♂️", + "🤦🏼", + "🤦🏽‍♀️", + "🤦🏽‍♂️", + "🤦🏽", + "🤦🏾‍♀️", + "🤦🏾‍♂️", + "🤦🏾", + "🤦🏿‍♀️", + "🤦🏿‍♂️", + "🤦🏿", + "🤦‍♀️", + "🤦‍♂️", + "🤦", + "🤧", + "🤰🏻", + "🤰🏼", + "🤰🏽", + "🤰🏾", + "🤰🏿", + "🤰", + "🤳🏻", + "🤳🏼", + "🤳🏽", + "🤳🏾", + "🤳🏿", + "🤳", + "🤴🏻", + "🤴🏼", + "🤴🏽", + "🤴🏾", + "🤴🏿", + "🤴", + "🤵🏻", + "🤵🏼", + "🤵🏽", + "🤵🏾", + "🤵🏿", + "🤵", + "🤶🏻", + "🤶🏼", + "🤶🏽", + "🤶🏾", + "🤶🏿", + "🤶", + "🤷🏻‍♀️", + "🤷🏻‍♂️", + "🤷🏻", + "🤷🏼‍♀️", + "🤷🏼‍♂️", + "🤷🏼", + "🤷🏽‍♀️", + "🤷🏽‍♂️", + "🤷🏽", + "🤷🏾‍♀️", + "🤷🏾‍♂️", + "🤷🏾", + "🤷🏿‍♀️", + "🤷🏿‍♂️", + "🤷🏿", + "🤷‍♀️", + "🤷‍♂️", + "🤷", + "🤸🏻‍♀️", + "🤸🏻‍♂️", + "🤸🏻", + "🤸🏼‍♀️", + "🤸🏼‍♂️", + "🤸🏼", + "🤸🏽‍♀️", + "🤸🏽‍♂️", + "🤸🏽", + "🤸🏾‍♀️", + "🤸🏾‍♂️", + "🤸🏾", + "🤸🏿‍♀️", + "🤸🏿‍♂️", + "🤸🏿", + "🤸‍♀️", + "🤸‍♂️", + "🤸", + "🤹🏻‍♀️", + "🤹🏻‍♂️", + "🤹🏻", + "🤹🏼‍♀️", + "🤹🏼‍♂️", + "🤹🏼", + "🤹🏽‍♀️", + "🤹🏽‍♂️", + "🤹🏽", + "🤹🏾‍♀️", + "🤹🏾‍♂️", + "🤹🏾", + "🤹🏿‍♀️", + "🤹🏿‍♂️", + "🤹🏿", + "🤹‍♀️", + "🤹‍♂️", + "🤹", + "🤺", + "🤼🏻‍♀️", + "🤼🏻‍♂️", + "🤼🏻", + "🤼🏼‍♀️", + "🤼🏼‍♂️", + "🤼🏼", + "🤼🏽‍♀️", + "🤼🏽‍♂️", + "🤼🏽", + "🤼🏾‍♀️", + "🤼🏾‍♂️", + "🤼🏾", + "🤼🏿‍♀️", + "🤼🏿‍♂️", + "🤼🏿", + "🤼‍♀️", + "🤼‍♂️", + "🤼", + "🤽🏻‍♀️", + "🤽🏻‍♂️", + "🤽🏻", + "🤽🏼‍♀️", + "🤽🏼‍♂️", + "🤽🏼", + "🤽🏽‍♀️", + "🤽🏽‍♂️", + "🤽🏽", + "🤽🏾‍♀️", + "🤽🏾‍♂️", + "🤽🏾", + "🤽🏿‍♀️", + "🤽🏿‍♂️", + "🤽🏿", + "🤽‍♀️", + "🤽‍♂️", + "🤽", + "🤾🏻‍♀️", + "🤾🏻‍♂️", + "🤾🏻", + "🤾🏼‍♀️", + "🤾🏼‍♂️", + "🤾🏼", + "🤾🏽‍♀️", + "🤾🏽‍♂️", + "🤾🏽", + "🤾🏾‍♀️", + "🤾🏾‍♂️", + "🤾🏾", + "🤾🏿‍♀️", + "🤾🏿‍♂️", + "🤾🏿", + "🤾‍♀️", + "🤾‍♂️", + "🤾", + "🥀", + "🥁", + "🥂", + "🥃", + "🥄", + "🥅", + "🥇", + "🥈", + "🥉", + "🥊", + "🥋", + "🥐", + "🥑", + "🥒", + "🥓", + "🥔", + "🥕", + "🥖", + "🥗", + "🥘", + "🥙", + "🥚", + "🥛", + "🥜", + "🥝", + "🥞", + "🦀", + "🦁", + "🦂", + "🦃", + "🦄", + "🦅", + "🦆", + "🦇", + "🦈", + "🦉", + "🦊", + "🦋", + "🦌", + "🦍", + "🦎", + "🦏", + "🦐", + "🦑", + "🧀", + "‼", + "⁉", + "™", + "ℹ", + "↔", + "↕", + "↖", + "↗", + "↘", + "↙", + "↩", + "↪", + "#⃣", + "⌚", + "⌛", + "⌨", + "⏏", + "⏩", + "⏪", + "⏫", + "⏬", + "⏭", + "⏮", + "⏯", + "⏰", + "⏱", + "⏲", + "⏳", + "⏸", + "⏹", + "⏺", + "Ⓜ", + "▪", + "▫", + "▶", + "◀", + "◻", + "◼", + "◽", + "◾", + "☀", + "☁", + "☂", + "☃", + "☄", + "☎", + "☑", + "☔", + "☕", + "☘", + "☝🏻", + "☝🏼", + "☝🏽", + "☝🏾", + "☝🏿", + "☝", + "☠", + "☢", + "☣", + "☦", + "☪", + "☮", + "☯", + "☸", + "☹", + "☺", + "♀", + "♂", + "♈", + "♉", + "♊", + "♋", + "♌", + "♍", + "♎", + "♏", + "♐", + "♑", + "♒", + "♓", + "♠", + "♣", + "♥", + "♦", + "♨", + "♻", + "♿", + "⚒", + "⚓", + "⚔", + "⚕", + "⚖", + "⚗", + "⚙", + "⚛", + "⚜", + "⚠", + "⚡", + "⚪", + "⚫", + "⚰", + "⚱", + "⚽", + "⚾", + "⛄", + "⛅", + "⛈", + "⛎", + "⛏", + "⛑", + "⛓", + "⛔", + "⛩", + "⛪", + "⛰", + "⛱", + "⛲", + "⛳", + "⛴", + "⛵", + "⛷🏻", + "⛷🏼", + "⛷🏽", + "⛷🏾", + "⛷🏿", + "⛷", + "⛸", + "⛹🏻‍♀️", + "⛹🏻‍♂️", + "⛹🏻", + "⛹🏼‍♀️", + "⛹🏼‍♂️", + "⛹🏼", + "⛹🏽‍♀️", + "⛹🏽‍♂️", + "⛹🏽", + "⛹🏾‍♀️", + "⛹🏾‍♂️", + "⛹🏾", + "⛹🏿‍♀️", + "⛹🏿‍♂️", + "⛹🏿", + "⛹️‍♀️", + "⛹️‍♂️", + "⛹", + "⛺", + "⛽", + "✂", + "✅", + "✈", + "✉", + "✊🏻", + "✊🏼", + "✊🏽", + "✊🏾", + "✊🏿", + "✊", + "✋🏻", + "✋🏼", + "✋🏽", + "✋🏾", + "✋🏿", + "✋", + "✌🏻", + "✌🏼", + "✌🏽", + "✌🏾", + "✌🏿", + "✌", + "✍🏻", + "✍🏼", + "✍🏽", + "✍🏾", + "✍🏿", + "✍", + "✏", + "✒", + "✔", + "✖", + "✝", + "✡", + "✨", + "✳", + "✴", + "❄", + "❇", + "❌", + "❎", + "❓", + "❔", + "❕", + "❗", + "❣", + "❤", + "➕", + "➖", + "➗", + "➡", + "➰", + "➿", + "⤴", + "⤵", + "*⃣", + "⬅", + "⬆", + "⬇", + "⬛", + "⬜", + "⭐", + "⭕", + "0⃣", + "〰", + "〽", + "1⃣", + "2⃣", + "㊗", + "㊙", + "3⃣", + "4⃣", + "5⃣", + "6⃣", + "7⃣", + "8⃣", + "9⃣", + "©", + "®", + "" +] \ No newline at end of file diff --git a/node_modules/emojis-list/package.json b/node_modules/emojis-list/package.json new file mode 100644 index 0000000..328a1db --- /dev/null +++ b/node_modules/emojis-list/package.json @@ -0,0 +1,51 @@ +{ + "name": "emojis-list", + "description": "Complete list of standard emojis.", + "homepage": "https://github.com/Kikobeats/emojis-list", + "version": "2.1.0", + "main": "./index.js", + "author": { + "email": "josefrancisco.verdu@gmail.com", + "name": "Kiko Beats", + "url": "https://github.com/Kikobeats" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/kikobeats/emojis-list.git" + }, + "bugs": { + "url": "https://github.com/Kikobeats/emojis-list/issues" + }, + "keywords": [ + "archive", + "complete", + "emoji", + "list", + "standard" + ], + "devDependencies": { + "acho": "latest", + "browserify": "latest", + "cheerio": "latest", + "got": ">=5 <6", + "gulp": "latest", + "gulp-header": "latest", + "gulp-uglify": "latest", + "gulp-util": "latest", + "standard": "latest", + "vinyl-buffer": "latest", + "vinyl-source-stream": "latest" + }, + "engines": { + "node": ">= 0.10" + }, + "files": [ + "index.js" + ], + "scripts": { + "pretest": "standard update.js", + "test": "echo 'YOLO'", + "update": "node update" + }, + "license": "MIT" +} diff --git a/node_modules/encodeurl/HISTORY.md b/node_modules/encodeurl/HISTORY.md new file mode 100644 index 0000000..06d34a5 --- /dev/null +++ b/node_modules/encodeurl/HISTORY.md @@ -0,0 +1,9 @@ +1.0.1 / 2016-06-09 +================== + + * Fix encoding unpaired surrogates at start/end of string + +1.0.0 / 2016-06-08 +================== + + * Initial release diff --git a/node_modules/encodeurl/LICENSE b/node_modules/encodeurl/LICENSE new file mode 100644 index 0000000..8812229 --- /dev/null +++ b/node_modules/encodeurl/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/encodeurl/README.md b/node_modules/encodeurl/README.md new file mode 100644 index 0000000..b086133 --- /dev/null +++ b/node_modules/encodeurl/README.md @@ -0,0 +1,124 @@ +# encodeurl + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Encode a URL to a percent-encoded form, excluding already-encoded sequences + +## Installation + +```sh +$ npm install encodeurl +``` + +## API + +```js +var encodeUrl = require('encodeurl') +``` + +### encodeUrl(url) + +Encode a URL to a percent-encoded form, excluding already-encoded sequences. + +This function will take an already-encoded URL and encode all the non-URL +code points (as UTF-8 byte sequences). This function will not encode the +"%" character unless it is not part of a valid sequence (`%20` will be +left as-is, but `%foo` will be encoded as `%25foo`). + +This encode is meant to be "safe" and does not throw errors. It will try as +hard as it can to properly encode the given URL, including replacing any raw, +unpaired surrogate pairs with the Unicode replacement character prior to +encoding. + +This function is _similar_ to the intrinsic function `encodeURI`, except it +will not encode the `%` character if that is part of a valid sequence, will +not encode `[` and `]` (for IPv6 hostnames) and will replace raw, unpaired +surrogate pairs with the Unicode replacement character (instead of throwing). + +## Examples + +### Encode a URL containing user-controled data + +```js +var encodeUrl = require('encodeurl') +var escapeHtml = require('escape-html') + +http.createServer(function onRequest (req, res) { + // get encoded form of inbound url + var url = encodeUrl(req.url) + + // create html message + var body = '

Location ' + escapeHtml(url) + ' not found

' + + // send a 404 + res.statusCode = 404 + res.setHeader('Content-Type', 'text/html; charset=UTF-8') + res.setHeader('Content-Length', String(Buffer.byteLength(body, 'utf-8'))) + res.end(body, 'utf-8') +}) +``` + +### Encode a URL for use in a header field + +```js +var encodeUrl = require('encodeurl') +var escapeHtml = require('escape-html') +var url = require('url') + +http.createServer(function onRequest (req, res) { + // parse inbound url + var href = url.parse(req) + + // set new host for redirect + href.host = 'localhost' + href.protocol = 'https:' + href.slashes = true + + // create location header + var location = encodeUrl(url.format(href)) + + // create html message + var body = '

Redirecting to new site: ' + escapeHtml(location) + '

' + + // send a 301 + res.statusCode = 301 + res.setHeader('Content-Type', 'text/html; charset=UTF-8') + res.setHeader('Content-Length', String(Buffer.byteLength(body, 'utf-8'))) + res.setHeader('Location', location) + res.end(body, 'utf-8') +}) +``` + +## Testing + +```sh +$ npm test +$ npm run lint +``` + +## References + +- [RFC 3986: Uniform Resource Identifier (URI): Generic Syntax][rfc-3986] +- [WHATWG URL Living Standard][whatwg-url] + +[rfc-3986]: https://tools.ietf.org/html/rfc3986 +[whatwg-url]: https://url.spec.whatwg.org/ + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/encodeurl.svg +[npm-url]: https://npmjs.org/package/encodeurl +[node-version-image]: https://img.shields.io/node/v/encodeurl.svg +[node-version-url]: https://nodejs.org/en/download +[travis-image]: https://img.shields.io/travis/pillarjs/encodeurl.svg +[travis-url]: https://travis-ci.org/pillarjs/encodeurl +[coveralls-image]: https://img.shields.io/coveralls/pillarjs/encodeurl.svg +[coveralls-url]: https://coveralls.io/r/pillarjs/encodeurl?branch=master +[downloads-image]: https://img.shields.io/npm/dm/encodeurl.svg +[downloads-url]: https://npmjs.org/package/encodeurl diff --git a/node_modules/encodeurl/index.js b/node_modules/encodeurl/index.js new file mode 100644 index 0000000..ae77cc9 --- /dev/null +++ b/node_modules/encodeurl/index.js @@ -0,0 +1,60 @@ +/*! + * encodeurl + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = encodeUrl + +/** + * RegExp to match non-URL code points, *after* encoding (i.e. not including "%") + * and including invalid escape sequences. + * @private + */ + +var ENCODE_CHARS_REGEXP = /(?:[^\x21\x25\x26-\x3B\x3D\x3F-\x5B\x5D\x5F\x61-\x7A\x7E]|%(?:[^0-9A-Fa-f]|[0-9A-Fa-f][^0-9A-Fa-f]))+/g + +/** + * RegExp to match unmatched surrogate pair. + * @private + */ + +var UNMATCHED_SURROGATE_PAIR_REGEXP = /(^|[^\uD800-\uDBFF])[\uDC00-\uDFFF]|[\uD800-\uDBFF]([^\uDC00-\uDFFF]|$)/g + +/** + * String to replace unmatched surrogate pair with. + * @private + */ + +var UNMATCHED_SURROGATE_PAIR_REPLACE = '$1\uFFFD$2' + +/** + * Encode a URL to a percent-encoded form, excluding already-encoded sequences. + * + * This function will take an already-encoded URL and encode all the non-URL + * code points. This function will not encode the "%" character unless it is + * not part of a valid sequence (`%20` will be left as-is, but `%foo` will + * be encoded as `%25foo`). + * + * This encode is meant to be "safe" and does not throw errors. It will try as + * hard as it can to properly encode the given URL, including replacing any raw, + * unpaired surrogate pairs with the Unicode replacement character prior to + * encoding. + * + * @param {string} url + * @return {string} + * @public + */ + +function encodeUrl (url) { + return String(url) + .replace(UNMATCHED_SURROGATE_PAIR_REGEXP, UNMATCHED_SURROGATE_PAIR_REPLACE) + .replace(ENCODE_CHARS_REGEXP, encodeURI) +} diff --git a/node_modules/encodeurl/package.json b/node_modules/encodeurl/package.json new file mode 100644 index 0000000..df06c96 --- /dev/null +++ b/node_modules/encodeurl/package.json @@ -0,0 +1,38 @@ +{ + "name": "encodeurl", + "description": "Encode a URL to a percent-encoded form, excluding already-encoded sequences", + "version": "1.0.1", + "contributors": [ + "Douglas Christopher Wilson " + ], + "license": "MIT", + "keywords": [ + "encode", + "encodeurl", + "url" + ], + "repository": "pillarjs/encodeurl", + "devDependencies": { + "eslint": "2.11.1", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.3.2", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "2.5.3" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint **/*.js", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/enhanced-resolve/README.md b/node_modules/enhanced-resolve/README.md new file mode 100644 index 0000000..d3f7931 --- /dev/null +++ b/node_modules/enhanced-resolve/README.md @@ -0,0 +1,28 @@ +# enhanced-resolve + +Offers a async require.resolve function. It's highly configurable. + +[documentation](https://github.com/webpack/docs/wiki) + + +## Features + +* plugin system +* provide a custom filesystem +* sync and async node.js filesystems included + + +## Tests + +``` javascript +npm test +``` + +[![Build Status](https://secure.travis-ci.org/webpack/enhanced-resolve.png?branch=master)](http://travis-ci.org/webpack/enhanced-resolve) + + +## License + +Copyright (c) 2012-2013 Tobias Koppers + +MIT (http://www.opensource.org/licenses/mit-license.php) \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/CachedInputFileSystem.js b/node_modules/enhanced-resolve/lib/CachedInputFileSystem.js new file mode 100644 index 0000000..132f965 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/CachedInputFileSystem.js @@ -0,0 +1,152 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function Storage(duration) { + this.duration = duration; + this.running = {}; + this.data = {}; + this.levels = []; + if(duration > 0) { + this.levels.push([], [], [], [], [], [], [], [], []); + for(var i = 8000; i < duration; i+=500) + this.levels.push([]); + } + this.count = 0; + this.interval = null; + this.needTickCheck = false; + this.nextTick = null; + this.passive = true; +} + +Storage.prototype.ensureTick = function() { + if(!this.interval && this.duration > 0 && !this.nextTick) + this.interval = setInterval(this.tick.bind(this), Math.floor(this.duration / this.levels.length)); +}; + +Storage.prototype.finished = function(name) { + var args = Array.prototype.slice.call(arguments, 1); + var callbacks = this.running[name]; + delete this.running[name]; + if(this.duration > 0) { + this.count++; + this.data[name] = args; + this.levels[0].push(name); + this.ensureTick(); + } + for(var i = 0; i < callbacks.length; i++) { + callbacks[i].apply(null, args); + } +}; + +Storage.prototype.provide = function(name, provider, callback) { + var running = this.running[name]; + if(running) { + running.push(callback); + return; + } + if(this.duration > 0) { + this.checkTicks(); + var data = this.data[name]; + if(data) { + return callback.apply(null, data); + } + } + this.running[name] = running = [callback]; + provider(name, this.finished.bind(this, name)); +}; + +Storage.prototype.tick = function() { + var decay = this.levels.pop(); + for(var i = decay.length - 1; i >= 0; i--) { + delete this.data[decay[i]]; + } + this.count -= decay.length; + decay.length = 0; + this.levels.unshift(decay); + if(this.count == 0) { + clearInterval(this.interval); + this.interval = null; + this.nextTick = null; + return true; + } else if(this.nextTick) { + this.nextTick += Math.floor(this.duration / this.levels.length); + var time = new Date().getTime(); + if(this.nextTick > time) { + this.nextTick = null; + this.interval = setInterval(this.tick.bind(this), Math.floor(this.duration / this.levels.length)); + return true; + } + } else if(this.passive) { + clearInterval(this.interval); + this.interval = null; + this.nextTick = new Date().getTime() + Math.floor(this.duration / this.levels.length); + } else { + this.passive = true; + } +}; + +Storage.prototype.checkTicks = function() { + this.passive = false; + if(this.nextTick) { + while(!this.tick()); + } +}; + +Storage.prototype.purge = function(what) { + if(!what) { + this.count = 0; + clearInterval(this.interval); + this.nextTick = null; + this.data = {}; + this.levels.forEach(function(level) { + level.length = 0; + }); + } else if(typeof what === "string") { + Object.keys(this.data).forEach(function(key) { + if(key.indexOf(what) === 0) + delete this.data[key]; + }, this); + } else { + for(var i = what.length - 1; i >= 0; i--) { + this.purge(what[i]); + } + } +}; + + +function CachedInputFileSystem(fileSystem, duration) { + this.fileSystem = fileSystem; + this._statStorage = new Storage(duration); + this._readdirStorage = new Storage(duration); + this._readFileStorage = new Storage(duration); + this._readlinkStorage = new Storage(duration); +} +module.exports = CachedInputFileSystem; + +CachedInputFileSystem.prototype.isSync = function() { + return this.fileSystem.isSync(); +}; + +CachedInputFileSystem.prototype.stat = function(path, callback) { + this._statStorage.provide(path, this.fileSystem.stat.bind(this.fileSystem), callback); +}; + +CachedInputFileSystem.prototype.readdir = function(path, callback) { + this._readdirStorage.provide(path, this.fileSystem.readdir.bind(this.fileSystem), callback); +}; + +CachedInputFileSystem.prototype.readFile = function(path, callback) { + this._readFileStorage.provide(path, this.fileSystem.readFile.bind(this.fileSystem), callback); +}; + +CachedInputFileSystem.prototype.readlink = function(path, callback) { + this._readlinkStorage.provide(path, this.fileSystem.readlink.bind(this.fileSystem), callback); +}; + +CachedInputFileSystem.prototype.purge = function(what) { + this._statStorage.purge(what); + this._readdirStorage.purge(what); + this._readFileStorage.purge(what); + this._readlinkStorage.purge(what); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/DirectoryDefaultFilePlugin.js b/node_modules/enhanced-resolve/lib/DirectoryDefaultFilePlugin.js new file mode 100644 index 0000000..f4514ff --- /dev/null +++ b/node_modules/enhanced-resolve/lib/DirectoryDefaultFilePlugin.js @@ -0,0 +1,42 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); + +function DirectoryDefaultFilePlugin(files) { + this.files = files; +} +module.exports = DirectoryDefaultFilePlugin; + +DirectoryDefaultFilePlugin.prototype.apply = function(resolver) { + var files = this.files; + resolver.plugin("directory", function(request, callback) { + var fs = this.fileSystem; + var topLevelCallback = callback; + var directory = this.join(request.path, request.request); + fs.stat(directory, function(err, stat) { + if(err || !stat) { + if(callback.log) callback.log(directory + " doesn't exist (directory default file)"); + return callback(); + } + if(!stat.isDirectory()) { + if(callback.log) callback.log(directory + " is not a directory (directory default file)"); + return callback(); + } + this.forEachBail(files, function(file, callback) { + this.doResolve("file", { + path: directory, + query: request.query, + request: file + }, createInnerCallback(function(err, result) { + if(!err && result) return callback(result); + return callback(); + }, topLevelCallback, "directory default file " + file)); + }.bind(this), function(result) { + if(!result) return callback(); + return callback(null, result); + }); + }.bind(this)); + }); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/DirectoryDescriptionFileFieldAliasPlugin.js b/node_modules/enhanced-resolve/lib/DirectoryDescriptionFileFieldAliasPlugin.js new file mode 100644 index 0000000..e9a44f2 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/DirectoryDescriptionFileFieldAliasPlugin.js @@ -0,0 +1,126 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); + +function DirectoryDescriptionFileFieldAliasPlugin(filename, field) { + this.filename = filename; + this.field = field; +} +module.exports = DirectoryDescriptionFileFieldAliasPlugin; + +function findDescriptionFileField(resolver, directory, filename, field, callback) { + (function findDescriptionFile() { + var descriptionFilePath = resolver.join(directory, filename); + resolver.fileSystem.readFile(descriptionFilePath, function(err, content) { + if(err) { + directory = cdUp(directory); + if(!directory) { + return callback(); + } else { + return findDescriptionFile(); + } + } + try { + content = JSON.parse(content); + } catch(e) { + if(callback.log) + callback.log(descriptionFilePath + " (directory description file): " + e); + else + e.message = descriptionFilePath + " (directory description file): " + e; + return callback(e); + } + var fieldData; + if(Array.isArray(field)) { + var current = content; + for(var j = 0; j < field.length; j++) { + if(current === null || typeof current !== "object") { + current = null; + break; + } + current = current[field[j]]; + } + if(typeof current === "object") { + fieldData = current; + } + } else { + if(typeof content[field] === "object") { + fieldData = content[field]; + } + } + if(!fieldData) return callback(); + callback(null, fieldData, directory); + }); + }()); +} + +function cdUp(directory) { + if(directory === "/") return null; + var i = directory.lastIndexOf("/"), + j = directory.lastIndexOf("\\"); + var p = i < 0 ? j : j < 0 ? i : i < j ? j : i; + if(p < 0) return null; + return directory.substr(0, p || 1); +} + +DirectoryDescriptionFileFieldAliasPlugin.prototype.apply = function(resolver) { + var filename = this.filename; + var field = this.field; + resolver.plugin("module", function(request, callback) { + var directory = request.path; + var moduleName = request.request; + findDescriptionFileField(this, directory, filename, field, function(err, fieldData, directory) { + if(err) return callback(err); + if(!fieldData) return callback(); + var data = fieldData[moduleName]; + if(data === moduleName) return callback(); + if(data === false) return callback(null, { + path: false, + resolved: true + }); + if(!data) return callback(); + var newRequest = this.parse(data); + var obj = { + path: directory, + request: newRequest.path, + query: newRequest.query, + directory: newRequest.directory + }; + var newCallback = createInnerCallback(callback, callback, "aliased from directory description file " + this.join(directory, filename) + " with mapping " + JSON.stringify(moduleName)); + if(newRequest.module) return this.doResolve("module", obj, newCallback); + if(newRequest.directory) return this.doResolve("directory", obj, newCallback); + return this.doResolve(["file", "directory"], obj, newCallback); + }.bind(this)); + }); + resolver.plugin("result", function(request, callback) { + var directory = cdUp(request.path); + var requestPath = request.path; + findDescriptionFileField(this, directory, filename, field, function(err, fieldData, directory) { + if(err) return callback(err); + if(!fieldData) return callback(); + var relative = requestPath.substr(directory.length+1).replace(/\\/g, "/"); + if(typeof fieldData[relative] !== "undefined") + var data = fieldData[relative]; + else if(typeof fieldData["./" + relative] !== "undefined") + var data = fieldData["./" + relative]; + if(data === relative || data === "./" + relative) return callback(); + if(data === false) return callback(null, { + path: false, + resolved: true + }); + if(!data) return callback(); + var newRequest = this.parse(data); + var obj = { + path: directory, + request: newRequest.path, + query: newRequest.query, + directory: newRequest.directory + }; + var newCallback = createInnerCallback(callback, callback, "aliased from directory description file " + this.join(directory, filename) + " with mapping " + JSON.stringify(relative)); + if(newRequest.module) return this.doResolve("module", obj, newCallback); + if(newRequest.directory) return this.doResolve("directory", obj, newCallback); + return this.doResolve(["file", "directory"], obj, newCallback); + }.bind(this)); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/DirectoryDescriptionFilePlugin.js b/node_modules/enhanced-resolve/lib/DirectoryDescriptionFilePlugin.js new file mode 100644 index 0000000..2cbc8c6 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/DirectoryDescriptionFilePlugin.js @@ -0,0 +1,74 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); + +function DirectoryDescriptionFilePlugin(filename, fields) { + this.filename = filename; + this.fields = fields; +} +module.exports = DirectoryDescriptionFilePlugin; + +DirectoryDescriptionFilePlugin.prototype.apply = function(resolver) { + var filename = this.filename; + var fields = this.fields; + resolver.plugin("directory", function(request, callback) { + var fs = this.fileSystem; + var directory = this.join(request.path, request.request); + var descriptionFilePath = this.join(directory, filename); + fs.readFile(descriptionFilePath, function(err, content) { + if(err) { + if(callback.log) + callback.log(descriptionFilePath + " doesn't exist (directory description file)"); + return callback(); + } + content = content.toString("utf-8"); + try { + content = JSON.parse(content); + } catch(e) { + if(callback.log) + callback.log(descriptionFilePath + " (directory description file): " + e); + else + e.message = descriptionFilePath + " (directory description file): " + e; + return callback(e); + } + var mainModules = []; + for(var i = 0; i < fields.length; i++) { + if(Array.isArray(fields[i])) { + var current = content; + for(var j = 0; j < fields[i].length; j++) { + if(current === null || typeof current !== "object") { + current = null; + break; + } + var field = fields[i][j]; + current = current[field]; + } + if(typeof current === "string") { + mainModules.push(current); + continue; + } + } else { + var field = fields[i]; + if(typeof content[field] === "string") { + mainModules.push(content[field]); + continue; + } + } + } + (function next() { + if(mainModules.length == 0) return callback(); + var mainModule = mainModules.shift(); + return this.doResolve(["file", "directory"], { + path: directory, + query: request.query, + request: mainModule + }, createInnerCallback(function(err, result) { + if(!err && result) return callback(null, result); + return next.call(this); + }.bind(this), callback, "use " + mainModule + " from " + filename)); + }.call(this)) + }.bind(this)); + }); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/DirectoryResultPlugin.js b/node_modules/enhanced-resolve/lib/DirectoryResultPlugin.js new file mode 100644 index 0000000..7ad5730 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/DirectoryResultPlugin.js @@ -0,0 +1,31 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function DirectoryResultPlugin(files) { + this.files = files; +} +module.exports = DirectoryResultPlugin; + +DirectoryResultPlugin.prototype.apply = function(resolver) { + var files = this.files; + resolver.plugin("directory", function(request, callback) { + var fs = this.fileSystem; + var directory = this.join(request.path, request.request); + fs.stat(directory, function(err, stat) { + if(!err && stat && stat.isDirectory()) { + return this.doResolve("result", { + path: directory, + query: request.query, + directory: true, + resolved: true + }, callback); + } + if(callback.log) { + if(err) callback.log(directory + " doesn't exist"); + else callback.log(directory + " is not a directory"); + } + return callback(); + }.bind(this)); + }); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/FileAppendPlugin.js b/node_modules/enhanced-resolve/lib/FileAppendPlugin.js new file mode 100644 index 0000000..1be582a --- /dev/null +++ b/node_modules/enhanced-resolve/lib/FileAppendPlugin.js @@ -0,0 +1,40 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function FileAppendPlugin(appendings) { + this.appendings = appendings; +} +module.exports = FileAppendPlugin; + +FileAppendPlugin.prototype.apply = function(resolver) { + var appendings = this.appendings; + resolver.plugin("file", function(request, callback) { + var fs = this.fileSystem; + var addr = this.join(request.path, request.request); + var addrs = appendings.map(function(a) { return addr + a }); + var log = callback.log; + var missing = callback.missing; + this.forEachBail(addrs, function(addr, callback) { + fs.stat(addr, function(err, stat) { + if(!err && stat && stat.isFile()) + return callback(addr); + if(missing && err) + missing.push(addr); + if(log) { + if(err) log(addr + " doesn't exist"); + else log(addr + " is not a file"); + } + return callback(); + }); + }, function(validAddr) { + if(!validAddr) return callback(); + return this.doResolve("result", { + path: validAddr, + query: request.query, + file: true, + resolved: true + }, callback); + }.bind(this)); + }); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/ModuleAliasPlugin.js b/node_modules/enhanced-resolve/lib/ModuleAliasPlugin.js new file mode 100644 index 0000000..eaa6926 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ModuleAliasPlugin.js @@ -0,0 +1,44 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); + +function ModuleAliasPlugin(aliasMap) { + this.aliasMap = aliasMap; +} +module.exports = ModuleAliasPlugin; + +ModuleAliasPlugin.prototype.apply = function(resolver) { + var aliasMap = this.aliasMap; + resolver.plugin("module", function(request, callback) { + var fs = this.fileSystem; + var keys = Object.keys(aliasMap); + var i = 0; + (function next() { + for(;i < keys.length; i++) { + var aliasName = keys[i]; + var onlyModule = /\$$/.test(aliasName); + if(onlyModule) aliasName = aliasName.substr(0, aliasName.length-1); + if((!onlyModule && request.request.indexOf(aliasName + "/") === 0) || request.request === aliasName) { + var aliasValue = aliasMap[keys[i]]; + if(request.request.indexOf(aliasValue + "/") !== 0 && request.request != aliasValue) { + var newRequestStr = aliasValue + request.request.substr(aliasName.length); + var newRequest = this.parse(newRequestStr); + var obj = { + path: request.path, + request: newRequest.path, + query: newRequest.query, + directory: newRequest.directory + }; + var newCallback = createInnerCallback(callback, callback, "aliased with mapping " + JSON.stringify(aliasName) + ": " + JSON.stringify(aliasValue) + " to " + newRequestStr); + if(newRequest.module) return this.doResolve("module", obj, newCallback); + if(newRequest.directory) return this.doResolve("directory", obj, newCallback); + return this.doResolve(["file", "directory"], obj, newCallback); + } + } + } + return callback(); + }.call(this)); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/ModuleAsDirectoryPlugin.js b/node_modules/enhanced-resolve/lib/ModuleAsDirectoryPlugin.js new file mode 100644 index 0000000..e058fcf --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ModuleAsDirectoryPlugin.js @@ -0,0 +1,43 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function ModuleAsDirectoryPlugin(moduleType) { + this.moduleType = moduleType; +} +module.exports = ModuleAsDirectoryPlugin; + +ModuleAsDirectoryPlugin.prototype.apply = function(resolver) { + resolver.plugin("module-" + this.moduleType, function(request, callback) { + var fs = this.fileSystem; + var i = request.request.indexOf("/"), + j = request.request.indexOf("\\"); + var p = i < 0 ? j : j < 0 ? i : i < j ? i : j; + var moduleName, remainingRequest; + if(p < 0) { + moduleName = request.request; + remainingRequest = ""; + } else { + moduleName = request.request.substr(0, p); + remainingRequest = request.request.substr(p+1); + } + var modulePath = this.join(request.path, moduleName); + fs.stat(modulePath, function(err, stat) { + if(err || !stat) { + if(callback.missing) + callback.missing.push(modulePath); + if(callback.log) callback.log(modulePath + " doesn't exist (module as directory)"); + return callback(); + } + if(stat.isDirectory()) { + return this.doResolve(request.directory ? "directory" : ["file", "directory"], { + path: modulePath, + request: remainingRequest, + query: request.query + }, callback, true); + } + if(callback.log) callback.log(modulePath + " is not a directory (module as directory)"); + return callback(); + }.bind(this)); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/ModuleAsFilePlugin.js b/node_modules/enhanced-resolve/lib/ModuleAsFilePlugin.js new file mode 100644 index 0000000..7d6fadb --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ModuleAsFilePlugin.js @@ -0,0 +1,18 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function ModuleAsFilePlugin(moduleType) { + this.moduleType = moduleType; +} +module.exports = ModuleAsFilePlugin; + +ModuleAsFilePlugin.prototype.apply = function(resolver) { + resolver.plugin("module-" + this.moduleType, function(request, callback) { + var fs = this.fileSystem; + var i = request.request.indexOf("/"), + j = request.request.indexOf("\\"); + if(i >= 0 || j >= 0 || request.directory) return callback(); + return this.doResolve("file", request, callback, true); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/ModuleTemplatesPlugin.js b/node_modules/enhanced-resolve/lib/ModuleTemplatesPlugin.js new file mode 100644 index 0000000..2962ee2 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ModuleTemplatesPlugin.js @@ -0,0 +1,45 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); + +function ModuleTemplatesPlugin(moduleType, templates, targetModuleType) { + this.moduleType = moduleType; + this.targetModuleType = targetModuleType; + this.templates = templates; +} +module.exports = ModuleTemplatesPlugin; + +ModuleTemplatesPlugin.prototype.apply = function(resolver) { + var templates = this.templates; + var targetModuleType = this.targetModuleType; + resolver.plugin("module-" + this.moduleType, function(request, callback) { + var fs = this.fileSystem; + var topLevelCallback = callback; + var i = request.request.indexOf("/"), + j = request.request.indexOf("\\"); + var p = i < 0 ? j : j < 0 ? i : i < j ? i : j; + var moduleName, remainingRequest; + if(p < 0) { + moduleName = request.request; + remainingRequest = ""; + } else { + moduleName = request.request.substr(0, p); + remainingRequest = request.request.substr(p); + } + this.forEachBail(templates, function(template, callback) { + var moduleFinalName = template.replace(/\*/g, moduleName); + this.applyPluginsParallelBailResult("module-" + targetModuleType, { + path: request.path, + request: moduleFinalName + remainingRequest, + query: request.query, + directory: request.directory + }, createInnerCallback(function(err, result) { + if(err) return callback(err); + if(!result) return callback(); + return callback(null, result); + }, topLevelCallback, "module variation " + moduleFinalName)); + }.bind(this), callback); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/ModulesInDirectoriesPlugin.js b/node_modules/enhanced-resolve/lib/ModulesInDirectoriesPlugin.js new file mode 100644 index 0000000..3b801c0 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ModulesInDirectoriesPlugin.js @@ -0,0 +1,58 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); +var popPathSeqment = require("./popPathSeqment"); + +function ModulesInDirectoriesPlugin(moduleType, directories) { + this.moduleType = moduleType; + this.directories = directories; +} +module.exports = ModulesInDirectoriesPlugin; + +ModulesInDirectoriesPlugin.prototype.apply = function(resolver) { + var moduleType = this.moduleType; + var directories = this.directories; + resolver.plugin("module", function(request, callback) { + var fs = this.fileSystem; + var paths = [request.path]; + var addr = [request.path]; + var pathSeqment = popPathSeqment(addr); + var topLevelCallback = callback; + while(pathSeqment) { + paths.push(addr[0]); + pathSeqment = popPathSeqment(addr); + } + var addrs = paths.map(function(p) { + return directories.map(function(d) { + return this.join(p, d); + }, this); + }, this).reduce(function(array, p) { + array.push.apply(array, p); + return array; + }, []); + this.forEachBail(addrs, function(addr, callback) { + fs.stat(addr, function(err, stat) { + if(!err && stat && stat.isDirectory()) { + this.applyPluginsParallelBailResult("module-" + moduleType, { + path: addr, + request: request.request, + query: request.query, + directory: request.directory + }, createInnerCallback(function(err, result) { + if(err) return callback(err); + if(!result) return callback(); + return callback(null, result); + }, topLevelCallback, "looking for modules in " + addr)); + return; + } + return callback(); + }.bind(this)); + }.bind(this), function(err, result) { + if(err) return callback(err); + if(!result) return callback(); + return callback(null, result); + }); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/ModulesInRootPlugin.js b/node_modules/enhanced-resolve/lib/ModulesInRootPlugin.js new file mode 100644 index 0000000..2b2ab6a --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ModulesInRootPlugin.js @@ -0,0 +1,28 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var createInnerCallback = require("./createInnerCallback"); + +function ModulesInRootPlugin(moduleType, path) { + this.moduleType = moduleType; + this.path = path; +} +module.exports = ModulesInRootPlugin; + +ModulesInRootPlugin.prototype.apply = function(resolver) { + var moduleType = this.moduleType; + var path = this.path; + resolver.plugin("module", function(request, callback) { + this.applyPluginsParallelBailResult("module-" + moduleType, { + path: path, + request: request.request, + query: request.query, + directory: request.directory + }, createInnerCallback(function innerCallback(err, result) { + if(err) return callback(err); + if(!result) return callback(); + return callback(null, result); + }, callback, "looking for modules in " + path)); + }); +}; diff --git a/node_modules/enhanced-resolve/lib/NodeJsInputFileSystem.js b/node_modules/enhanced-resolve/lib/NodeJsInputFileSystem.js new file mode 100644 index 0000000..588095d --- /dev/null +++ b/node_modules/enhanced-resolve/lib/NodeJsInputFileSystem.js @@ -0,0 +1,23 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var fs = require("graceful-fs"); + +function NodeJsInputFileSystem() {} +module.exports = NodeJsInputFileSystem; + +NodeJsInputFileSystem.prototype.isSync = function() { + return false; +}; + +NodeJsInputFileSystem.prototype.stat = fs.stat.bind(fs); +NodeJsInputFileSystem.prototype.readdir = function readdir(path, callback) { + fs.readdir(path, function (err, files) { + callback(err, files && files.map(function (file) { + return file.normalize ? file.normalize("NFC") : file; + })); + }); +}; +NodeJsInputFileSystem.prototype.readFile = fs.readFile.bind(fs); +NodeJsInputFileSystem.prototype.readlink = fs.readlink.bind(fs); \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/Resolver.js b/node_modules/enhanced-resolve/lib/Resolver.js new file mode 100644 index 0000000..62e57d9 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/Resolver.js @@ -0,0 +1,200 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var Tapable = require("tapable"); +var createInnerCallback = require("./createInnerCallback"); + +function Resolver(fileSystem) { + Tapable.call(this); + this.fileSystem = fileSystem; +} +module.exports = Resolver; + +Resolver.prototype = Object.create(Tapable.prototype); + +Resolver.prototype.resolveSync = function resolveSync(context, request) { + var err, result, sync = false; + this.resolve(context, request, function(e, r) { + err = e; + result = r; + sync = true; + }); + if(!sync) throw new Error("Cannot 'resolveSync' because the fileSystem is not sync. Use 'resolve'!"); + if(err) throw err; + return result; +}; + +Resolver.prototype.resolve = function resolve(context, request, callback) { + if(typeof request === "string") request = this.parse(request); + this.applyPlugins("resolve", context, request); + var obj = { + path: context, + request: request.path, + query: request.query, + directory: request.directory + }; + function onResolved(err, result) { + if(err) return callback(err); + return callback(null, result.path === false ? false : result.path + (result.query || "")); + } + onResolved.log = callback.log; + onResolved.missing = callback.missing; + if(request.module) return this.doResolve("module", obj, onResolved); + if(request.directory) return this.doResolve("directory", obj, onResolved); + return this.doResolve(["file", "directory"], obj, onResolved); +}; + +Resolver.prototype.doResolve = function doResolve(types, request, callback, noError) { + if(!Array.isArray(types)) types = [types]; + var stackLine = types.join(" or ") + ": (" + request.path + ") " + + (request.request || "") + (request.query || "") + + (request.directory ? " directory" : "") + + (request.module ? " module" : ""); + var newStack = [stackLine]; + if(callback.stack) { + newStack = callback.stack.concat(newStack); + if(callback.stack.indexOf(stackLine) >= 0) { + // Prevent recursion + var recursionError = new Error("Recursion in resolving\nStack:\n " + newStack.join("\n ")); + recursionError.recursion = true; + if(callback.log) callback.log("abort resolving because of recursion"); + return callback(recursionError); + } + } + this.applyPlugins("resolve-step", types, request); + var localMissing = []; + var missing = callback.missing ? { + push: function(item) { + callback.missing.push(item); + localMissing.push(item); + } + } : localMissing; + var log = []; + function writeLog(msg) { + log.push(msg); + } + function logAsString() { + return log.join("\n"); + } + var currentRequestString = request.request ? request.request + " in " + request.path : request.path; + if(types.length == 1 && !noError) { + // If only one type, we can pass the error. + return this.applyPluginsParallelBailResult(types[0], request, createInnerCallback(function innerCallback(err, result) { + if(callback.log) { + for(var i = 0; i < log.length; i++) + callback.log(log[i]); + } + if(err) return callback(err); + if(result) return callback(null, result); + if(types[0] === "result") return callback(null, request); + var error = new Error("Cannot resolve " + types[0] + " '" + request.request + "' in " + request.path); + error.details = logAsString(); + error.missing = localMissing; + return callback(error); + }, { + log: writeLog, + missing: missing, + stack: newStack + }, "resolve " + types[0] + " " + currentRequestString)); + } + // For multiple type we list the errors in the details although some of them are not important + this.forEachBail(types, function(type, callback) { + this.applyPluginsParallelBailResult(type, request, createInnerCallback(function(err, result) { + if(!err && result) return callback(result); + if (err) { + (err.message || "").split("\n").forEach(function(line) { + log.push(" " + line); + }); + } + callback(); + }, { + log: writeLog, + missing: missing, + stack: newStack + }, "resolve " + type)); + }.bind(this), function(result) { + if(callback.log) { + callback.log("resolve '" + types.join("' or '") + "' " + currentRequestString); + for(var i = 0; i < log.length; i++) + callback.log(" " + log[i]); + } + if(noError && !result) return callback(); + if(result) return callback(null, result); + var error = new Error("Cannot resolve '" + types.join("' or '") + "' " + currentRequestString); + error.details = logAsString(); + error.missing = localMissing; + return callback(error); + }); +}; + +Resolver.prototype.parse = function parse(identifier) { + if(identifier === "") return null; + var part = { + path: null, + query: null, + module: false, + directory: false, + file: false + }; + var idxQuery = identifier.indexOf("?"); + if(idxQuery == 0) { + part.query = identifier; + } else if(idxQuery > 0) { + part.path = identifier.slice(0, idxQuery); + part.query = identifier.slice(idxQuery); + } else { + part.path = identifier; + } + if(part.path) { + part.module = this.isModule(part.path); + if(part.directory = this.isDirectory(part.path)) { + part.path = part.path.substr(0, part.path.length - 1); + } + } + return part; +}; + +var notModuleRegExp = /^\.$|^\.[\\\/]|^\.\.$|^\.\.[\/\\]|^\/|^[A-Z]:[\\\/]/i; +Resolver.prototype.isModule = function isModule(path) { + return !notModuleRegExp.test(path); +}; + +var directoryRegExp = /[\/\\]$/i; +Resolver.prototype.isDirectory = function isDirectory(path) { + return directoryRegExp.test(path); +}; + +Resolver.prototype.join = require("memory-fs/lib/join"); + +Resolver.prototype.normalize = require("memory-fs/lib/normalize"); + +Resolver.prototype.forEachBail = function(array, iterator, callback) { + if(array.length == 0) return callback(); + var currentPos = array.length; + var currentError, currentResult; + var done = []; + for(var i = 0; i < array.length; i++) { + var itCb = (function(i) { + return function() { + if(i >= currentPos) return; // ignore + var args = Array.prototype.slice.call(arguments); + done.push(i); + if(args.length > 0) { + currentPos = i + 1; + done = done.filter(function(item) { + return item <= i; + }); + currentResult = args; + } + if(done.length == currentPos) { + callback.apply(null, currentResult); + currentPos = 0; + } + }; + }(i)); + iterator(array[i], itCb); + if(currentPos == 0) break; + } +}; + diff --git a/node_modules/enhanced-resolve/lib/ResultSymlinkPlugin.js b/node_modules/enhanced-resolve/lib/ResultSymlinkPlugin.js new file mode 100644 index 0000000..9d4fac2 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/ResultSymlinkPlugin.js @@ -0,0 +1,49 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var popPathSeqment = require("./popPathSeqment"); + +function ResultSymlinkPlugin(appendings) { +} +module.exports = ResultSymlinkPlugin; + +ResultSymlinkPlugin.prototype.apply = function(resolver) { + resolver.plugin("result", function pluginMethod(request, callback) { + var fs = this.fileSystem; + var paths = [request.path]; + var pathSeqments = []; + var addr = [request.path]; + var pathSeqment = popPathSeqment(addr); + while(pathSeqment) { + pathSeqments.push(pathSeqment); + paths.push(addr[0]); + pathSeqment = popPathSeqment(addr); + } + pathSeqments.push(paths[paths.length-1]); + var log = callback.log; + var missing = callback.missing; + var containsSymlink = false; + this.forEachBail(paths.map(function(_, i) { return i; }), function(idx, callback) { + fs.readlink(paths[idx], function(err, result) { + if(!err && result) { + pathSeqments[idx] = result; + containsSymlink = true; + // Shortcut when absolute symlink found + if(/^(\/|[a-zA-z]:($|\\))/.test(result)) + return callback(null, idx); + } + callback(); + }); + }, function(err, idx) { + if(!containsSymlink) return callback(); + var resultSeqments = typeof idx === "number" ? pathSeqments.slice(0, idx+1) : pathSeqments.slice(); + var result = resultSeqments.reverse().reduce(function(a, b) { + return this.join(a, b); + }.bind(this)); + log("resolved symlink to " + result); + request.path = result; + pluginMethod.call(this, request, callback); + }.bind(this)); + }); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/SyncNodeJsInputFileSystem.js b/node_modules/enhanced-resolve/lib/SyncNodeJsInputFileSystem.js new file mode 100644 index 0000000..9fc036a --- /dev/null +++ b/node_modules/enhanced-resolve/lib/SyncNodeJsInputFileSystem.js @@ -0,0 +1,34 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var fs = require("graceful-fs"); + +function SyncNodeJsInputFileSystem() {} +module.exports = SyncNodeJsInputFileSystem; + +SyncNodeJsInputFileSystem.prototype.isSync = function() { + return true; +}; + +function asAsync(fn, context) { + return function() { + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + try { + callback(null, fn.apply(context, args)); + } catch(e) { + callback(e); + } + }; +} + +SyncNodeJsInputFileSystem.prototype.stat = asAsync(fs.statSync, fs); +SyncNodeJsInputFileSystem.prototype.readdir = asAsync(function readdirSync(path) { + var files = fs.readdirSync(path); + return files && files.map(function (file) { + return file.normalize ? file.normalize("NFC") : file; + }); +}, fs); +SyncNodeJsInputFileSystem.prototype.readFile = asAsync(fs.readFileSync, fs); +SyncNodeJsInputFileSystem.prototype.readlink = asAsync(fs.readlinkSync, fs); \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/UnsafeCachePlugin.js b/node_modules/enhanced-resolve/lib/UnsafeCachePlugin.js new file mode 100644 index 0000000..8806b1c --- /dev/null +++ b/node_modules/enhanced-resolve/lib/UnsafeCachePlugin.js @@ -0,0 +1,32 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function UnsafeCachePlugin(regExps, cache) { + this.regExps = regExps || [/./]; + if(this.regExps === true) this.regExps = [/./]; + else if(!Array.isArray(this.regExps)) this.regExps = [this.regExps]; + this.cache = cache || {}; +} +module.exports = UnsafeCachePlugin; + +UnsafeCachePlugin.prototype.apply = function(resolver) { + var oldResolve = resolver.resolve; + var regExps = this.regExps; + var cache = this.cache; + resolver.resolve = function resolve(context, request, callback) { + var id = context + "->" + request; + if(cache[id]) { + // From cache + return callback(null, cache[id]); + } + oldResolve.call(resolver, context, request, function(err, result) { + if(err) return callback(err); + var doCache = regExps.some(function(regExp) { + return regExp.test(result.path); + }); + if(!doCache) return callback(null, result); + callback(null, cache[id] = result); + }); + }; +}; diff --git a/node_modules/enhanced-resolve/lib/createInnerCallback.js b/node_modules/enhanced-resolve/lib/createInnerCallback.js new file mode 100644 index 0000000..66fc772 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/createInnerCallback.js @@ -0,0 +1,30 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +module.exports = function createInnerCallback(callback, options, message) { + var log = options.log; + if(!log) { + if(options.stack !== callback.stack) { + function callbackWrapper() { + return callback.apply(this, arguments); + } + callbackWrapper.stack = options.stack; + callbackWrapper.missing = options.missing; + } + return callback; + } + function loggingCallbackWrapper() { + log(message); + for(var i = 0; i < theLog.length; i++) + log(" " + theLog[i]); + return callback.apply(this, arguments); + } + var theLog = []; + loggingCallbackWrapper.log = function writeLog(msg) { + theLog.push(msg); + }; + loggingCallbackWrapper.stack = options.stack; + loggingCallbackWrapper.missing = options.missing; + return loggingCallbackWrapper; +} \ No newline at end of file diff --git a/node_modules/enhanced-resolve/lib/node.js b/node_modules/enhanced-resolve/lib/node.js new file mode 100644 index 0000000..22c6590 --- /dev/null +++ b/node_modules/enhanced-resolve/lib/node.js @@ -0,0 +1,103 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var Resolver = require("./Resolver"); +var NodeJsInputFileSystem = require("./NodeJsInputFileSystem"); +var SyncNodeJsInputFileSystem = require("./SyncNodeJsInputFileSystem"); +var CachedInputFileSystem = require("./CachedInputFileSystem"); +var ModulesInDirectoriesPlugin = require("./ModulesInDirectoriesPlugin"); +var ModuleTemplatesPlugin = require("./ModuleTemplatesPlugin"); +var ModuleAsFilePlugin = require("./ModuleAsFilePlugin"); +var ModuleAsDirectoryPlugin = require("./ModuleAsDirectoryPlugin"); +var DirectoryDefaultFilePlugin = require("./DirectoryDefaultFilePlugin"); +var DirectoryDescriptionFilePlugin = require("./DirectoryDescriptionFilePlugin"); +var FileAppendPlugin = require("./FileAppendPlugin"); +var ResultSymlinkPlugin = require("./ResultSymlinkPlugin"); +var DirectoryResultPlugin = require("./DirectoryResultPlugin"); + +var commonPlugins = [ + new ModulesInDirectoriesPlugin("node", ["node_modules"]), + new ModuleAsFilePlugin("node"), + new ModuleAsDirectoryPlugin("node"), + new DirectoryDescriptionFilePlugin("package.json", ["main"]), + new DirectoryDefaultFilePlugin(["index"]), + new FileAppendPlugin(["", ".js", ".node"]), + new ResultSymlinkPlugin() +]; + +var commonContextPlugins = [ + new ModulesInDirectoriesPlugin("node", ["node_modules"]), + new ModuleAsFilePlugin("node"), + new ModuleAsDirectoryPlugin("node"), + new DirectoryResultPlugin(), + new ResultSymlinkPlugin() +]; + +var commonLoaderPlugins = [ + new ModulesInDirectoriesPlugin("loader-module", ["node_loaders", "node_modules"]), + new ModuleTemplatesPlugin("loader-module", ["*-loader", "*"], "node"), + new ModuleAsFilePlugin("node"), + new ModuleAsDirectoryPlugin("node"), + new DirectoryDescriptionFilePlugin("package.json", ["loader", "main"]), + new DirectoryDefaultFilePlugin(["index"]), + new FileAppendPlugin([".loader.js", "", ".js"]), + new ResultSymlinkPlugin() +]; + +var asyncFileSystem = new CachedInputFileSystem(new NodeJsInputFileSystem(), 4000); +var syncFileSystem = new CachedInputFileSystem(new SyncNodeJsInputFileSystem(), 4000); + + +var asyncResolver = new Resolver(asyncFileSystem); +asyncResolver.apply.apply(asyncResolver, commonPlugins); +module.exports = function resolve(context, request, callback) { + asyncResolver.resolve(context, request, callback); +}; + +var syncResolver = new Resolver(syncFileSystem); +syncResolver.apply.apply(syncResolver, commonPlugins); +module.exports.sync = function resolveSync(context, request) { + return syncResolver.resolveSync(context, request); +}; + + +var asyncContextResolver = new Resolver(asyncFileSystem); +asyncContextResolver.apply.apply(asyncContextResolver, commonContextPlugins); +module.exports.context = function resolveContext(context, request, callback) { + asyncContextResolver.resolve(context, request, callback); +}; + +var syncContextResolver = new Resolver(syncFileSystem); +syncContextResolver.apply.apply(syncContextResolver, commonContextPlugins); +module.exports.context.sync = function resolveSync(context, request) { + return syncContextResolver.resolveSync(context, request); +}; + + +var asyncLoaderResolver = new Resolver(asyncFileSystem); +asyncLoaderResolver.apply.apply(asyncLoaderResolver, commonLoaderPlugins); +module.exports.loader = function resolveContext(context, request, callback) { + asyncLoaderResolver.resolve(context, request, callback); +}; + +var syncLoaderResolver = new Resolver(syncFileSystem); +syncLoaderResolver.apply.apply(syncLoaderResolver, commonLoaderPlugins); +module.exports.loader.sync = function resolveSync(context, request) { + return syncLoaderResolver.resolveSync(context, request); +}; + +// Export Resolver, FileSystems and Plugins +module.exports.Resolver = Resolver; +module.exports.NodeJsInputFileSystem = NodeJsInputFileSystem; +module.exports.SyncNodeJsInputFileSystem = SyncNodeJsInputFileSystem; +module.exports.CachedInputFileSystem = CachedInputFileSystem; +module.exports.ModulesInDirectoriesPlugin = ModulesInDirectoriesPlugin; +module.exports.ModuleAsDirectoryPlugin = ModuleAsDirectoryPlugin; +module.exports.DirectoryDefaultFilePlugin = DirectoryDefaultFilePlugin; +module.exports.DirectoryDescriptionFilePlugin = DirectoryDescriptionFilePlugin; +module.exports.FileAppendPlugin = FileAppendPlugin; +module.exports.DirectoryResultPlugin = DirectoryResultPlugin; +module.exports.ResultSymlinkPlugin = ResultSymlinkPlugin; +module.exports.ModuleAsFilePlugin = ModuleAsFilePlugin; +module.exports.ModuleTemplatesPlugin = ModuleTemplatesPlugin; diff --git a/node_modules/enhanced-resolve/lib/popPathSeqment.js b/node_modules/enhanced-resolve/lib/popPathSeqment.js new file mode 100644 index 0000000..7eea05c --- /dev/null +++ b/node_modules/enhanced-resolve/lib/popPathSeqment.js @@ -0,0 +1,13 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +module.exports = function popPathSeqment(pathInArray) { + var i = pathInArray[0].lastIndexOf("/"), + j = pathInArray[0].lastIndexOf("\\"); + var p = i < 0 ? j : j < 0 ? i : i < j ? j : i; + if(p < 0) return null; + var s = pathInArray[0].substr(p+1); + pathInArray[0] = pathInArray[0].substr(0, p || 1); + return s; +}; diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/.gitattributes b/node_modules/enhanced-resolve/node_modules/memory-fs/.gitattributes new file mode 100644 index 0000000..412eeda --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/.gitattributes @@ -0,0 +1,22 @@ +# Auto detect text files and perform LF normalization +* text=auto + +# Custom for Visual Studio +*.cs diff=csharp +*.sln merge=union +*.csproj merge=union +*.vbproj merge=union +*.fsproj merge=union +*.dbproj merge=union + +# Standard to msysgit +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/.npmignore b/node_modules/enhanced-resolve/node_modules/memory-fs/.npmignore new file mode 100644 index 0000000..d2b3860 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/.npmignore @@ -0,0 +1,65 @@ +# Logs +logs +*.log + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directory +# Deployed apps should consider commenting this line out: +# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git +node_modules + +# ========================= +# Operating System Files +# ========================= + +# OSX +# ========================= + +.DS_Store +.AppleDouble +.LSOverride + +# Icon must ends with two \r. +Icon + +# Thumbnails +._* + +# Files that might appear on external disk +.Spotlight-V100 +.Trashes + +# Windows +# ========================= + +# Windows image file caches +Thumbs.db +ehthumbs.db + +# Folder config file +Desktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msm +*.msp diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/.travis.yml b/node_modules/enhanced-resolve/node_modules/memory-fs/.travis.yml new file mode 100644 index 0000000..1416d60 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.10" + - "0.11" \ No newline at end of file diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/README.md b/node_modules/enhanced-resolve/node_modules/memory-fs/README.md new file mode 100644 index 0000000..450ef48 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/README.md @@ -0,0 +1,29 @@ +# memory-fs + +A simple in-memory filesystem. Holds data in a javascript object. + +``` javascript +var MemoryFileSystem = require("memory-fs"); +var fs = new MemoryFileSystem(); // Optionally pass a javascript object + +fs.mkdirpSync("/a/test/dir"); +fs.writeFileSync("/a/test/dir/file.txt", "Hello World"); +fs.readFileSync("/a/test/dir/file.txt"); // returns Buffer("Hello World") + +// Async variantes too +fs.unlink("/a/test/dir/file.txt", function(err) { + // ... +}); + +fs.readdirSync("/a/test"); // returns ["dir"] +fs.statSync("/a/test/dir").isDirectory(); // returns true +fs.rmdirSync("/a/test/dir"); + +fs.mkdirpSync("C:\\use\\windows\\style\\paths"); +``` + +## License + +Copyright (c) 2012-2014 Tobias Koppers + +MIT (http://www.opensource.org/licenses/mit-license.php) diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/lib/MemoryFileSystem.js b/node_modules/enhanced-resolve/node_modules/memory-fs/lib/MemoryFileSystem.js new file mode 100644 index 0000000..118b3d2 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/lib/MemoryFileSystem.js @@ -0,0 +1,225 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + +var normalize = require("./normalize"); + +function MemoryFileSystem(data) { + this.data = data || {}; +} +module.exports = MemoryFileSystem; + +function isDir(item) { + if(typeof item !== "object") return false; + return item[""] === true; +} + +function isFile(item) { + if(typeof item !== "object") return false; + return !item[""]; +} + +function pathToArray(path) { + path = normalize(path); + var nix = /^\//.test(path); + if(!nix) { + if(!/^[A-Za-z]:/.test(path)) throw new Error("Invalid path '" + path + "'"); + path = path.replace(/[\\\/]+/g, "\\"); // multi slashs + path = path.split(/[\\\/]/); + path[0] = path[0].toUpperCase(); + } else { + path = path.replace(/\/+/g, "/"); // multi slashs + path = path.substr(1).split("/"); + } + if(!path[path.length-1]) path.pop(); + return path; +} + +function trueFn() { return true; } +function falseFn() { return false; } + +MemoryFileSystem.prototype.statSync = function(_path) { + var path = pathToArray(_path); + var current = this.data; + for(var i = 0; i < path.length - 1; i++) { + if(!isDir(current[path[i]])) + throw new Error("Path doesn't exist '" + _path + "'"); + current = current[path[i]]; + } + if(_path === "/" || isDir(current[path[i]])) { + return { + isFile: falseFn, + isDirectory: trueFn, + isBlockDevice: falseFn, + isCharacterDevice: falseFn, + isSymbolicLink: falseFn, + isFIFO: falseFn, + isSocket: falseFn + }; + } else if(isFile(current[path[i]])) { + return { + isFile: trueFn, + isDirectory: falseFn, + isBlockDevice: falseFn, + isCharacterDevice: falseFn, + isSymbolicLink: falseFn, + isFIFO: falseFn, + isSocket: falseFn + }; + } else + throw new Error("Path doesn't exist '" + _path + "'"); +}; + +MemoryFileSystem.prototype.readFileSync = function(_path, encoding) { + var path = pathToArray(_path); + var current = this.data; + for(var i = 0; i < path.length - 1; i++) { + if(!isDir(current[path[i]])) + throw new Error("Path doesn't exist '" + _path + "'"); + current = current[path[i]]; + } + if(!isFile(current[path[i]])) { + if(isDir(current[path[i]])) + throw new Error("Cannot readFile on directory '" + _path + "'"); + else + throw new Error("Path doesn't exist '" + _path + "'"); + } + current = current[path[i]]; + return encoding ? current.toString(encoding) : current; +}; + +MemoryFileSystem.prototype.readdirSync = function(_path) { + if(_path === "/") return Object.keys(this.data).filter(Boolean); + var path = pathToArray(_path); + var current = this.data; + for(var i = 0; i < path.length - 1; i++) { + if(!isDir(current[path[i]])) + throw new Error("Path doesn't exist '" + _path + "'"); + current = current[path[i]]; + } + if(!isDir(current[path[i]])) { + if(isFile(current[path[i]])) + throw new Error("Cannot readdir on file '" + _path + "'"); + else + throw new Error("Path doesn't exist '" + _path + "'"); + } + return Object.keys(current[path[i]]).filter(Boolean); +}; + +MemoryFileSystem.prototype.mkdirpSync = function(_path) { + var path = pathToArray(_path); + if(path.length === 0) return; + var current = this.data; + for(var i = 0; i < path.length; i++) { + if(isFile(current[path[i]])) + throw new Error("Path is a file '" + _path + "'"); + else if(!isDir(current[path[i]])) + current[path[i]] = {"":true}; + current = current[path[i]]; + } + return; +}; + +MemoryFileSystem.prototype.mkdirSync = function(_path) { + var path = pathToArray(_path); + if(path.length === 0) return; + var current = this.data; + for(var i = 0; i < path.length - 1; i++) { + if(!isDir(current[path[i]])) + throw new Error("Path doesn't exist '" + _path + "'"); + current = current[path[i]]; + } + if(isDir(current[path[i]])) + throw new new Error("Directory already exist '" + _path + "'"); + else if(isFile(current[path[i]])) + throw new Error("Cannot mkdir on file '" + _path + "'"); + current[path[i]] = {"":true}; + return; +}; + +MemoryFileSystem.prototype._remove = function(_path, name, testFn) { + var path = pathToArray(_path); + if(path.length === 0) throw new Error("Path cannot be removed '" + _path + "'"); + var current = this.data; + for(var i = 0; i < path.length - 1; i++) { + if(!isDir(current[path[i]])) + throw new Error("Path doesn't exist '" + _path + "'"); + current = current[path[i]]; + } + if(!testFn(current[path[i]])) + throw new Error("'" + name + "' doesn't exist '" + _path + "'"); + delete current[path[i]]; + return; +}; + +MemoryFileSystem.prototype.rmdirSync = function(_path) { + return this._remove(_path, "Directory", isDir); +}; + +MemoryFileSystem.prototype.unlinkSync = function(_path) { + return this._remove(_path, "File", isFile); +}; + +MemoryFileSystem.prototype.readlinkSync = function(_path) { + throw new Error("Path is not a link '" + _path + "'"); +}; + +MemoryFileSystem.prototype.writeFileSync = function(_path, content, encoding) { + if(!content && !encoding) throw new Error("No content"); + var path = pathToArray(_path); + if(path.length === 0) throw new Error("Path is not a file '" + _path + "'"); + var current = this.data; + for(var i = 0; i < path.length - 1; i++) { + if(!isDir(current[path[i]])) + throw new Error("Path doesn't exist '" + _path + "'"); + current = current[path[i]]; + } + if(isDir(current[path[i]])) + throw new Error("Cannot writeFile on directory '" + _path + "'"); + current[path[i]] = encoding || typeof content === "string" ? new Buffer(content, encoding) : content; + return; +}; + +MemoryFileSystem.prototype.join = require("./join"); + +MemoryFileSystem.prototype.normalize = normalize; + +// async functions + +["stat", "readdir", "mkdirp", "mkdir", "rmdir", "unlink", "readlink"].forEach(function(fn) { + MemoryFileSystem.prototype[fn] = function(path, callback) { + try { + var result = this[fn + "Sync"](path); + } catch(e) { + return callback(e); + } + return callback(null, result); + }; +}); + +MemoryFileSystem.prototype.readFile = function(path, optArg, callback) { + if(!callback) { + callback = optArg; + optArg = undefined; + } + try { + var result = this.readFileSync(path, optArg); + } catch(e) { + return callback(e); + } + return callback(null, result); +}; + +MemoryFileSystem.prototype.writeFile = function (path, content, encoding, callback) { + if(!callback) { + callback = encoding; + encoding = undefined; + } + try { + this.writeFileSync(path, content, encoding); + } catch(e) { + return callback(e); + } + return callback(); +}; diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/lib/join.js b/node_modules/enhanced-resolve/node_modules/memory-fs/lib/join.js new file mode 100644 index 0000000..255e175 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/lib/join.js @@ -0,0 +1,14 @@ +var normalize = require("./normalize"); + +var absoluteWinRegExp = /^[A-Z]:([\\\/]|$)/i; +var absoluteNixRegExp = /^\//i; + +module.exports = function join(path, request) { + if(request == "") return normalize(path); + if(absoluteWinRegExp.test(request)) return normalize(request.replace(/\//g, "\\")); + if(absoluteNixRegExp.test(request)) return normalize(request); + if(path == "/") return normalize(path + request); + if(absoluteWinRegExp.test(path)) return normalize(path + "\\" + request.replace(/\//g, "\\")); + if(absoluteNixRegExp.test(path)) return normalize(path + "/" + request); + return normalize(path + "/" + request); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/lib/normalize.js b/node_modules/enhanced-resolve/node_modules/memory-fs/lib/normalize.js new file mode 100644 index 0000000..04726d0 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/lib/normalize.js @@ -0,0 +1,38 @@ +var doubleSlashWinRegExp = /\\+/g; +var doubleSlashNixRegExp = /\/+/g; +var currentDirectoryWinMiddleRegExp = /\\(\.\\)+/; +var currentDirectoryWinEndRegExp = /\\\.$/; +var parentDirectoryWinMiddleRegExp = /\\+[^\\]+\\+\.\.\\/; +var parentDirectoryWinEndRegExp1 = /([A-Z]:\\)\\*[^\\]+\\+\.\.$/i; +var parentDirectoryWinEndRegExp2 = /\\+[^\\]+\\+\.\.$/; +var currentDirectoryNixMiddleRegExp = /\/+(\.\/)+/; +var currentDirectoryNixEndRegExp1 = /^\/+\.$/; +var currentDirectoryNixEndRegExp2 = /\/+\.$/; +var parentDirectoryNixMiddleRegExp = /(^|\/[^\/]+)\/+\.\.\/+/; +var parentDirectoryNixEndRegExp1 = /^\/[^\/]+\/+\.\.$/; +var parentDirectoryNixEndRegExp2 = /\/+[^\/]+\/+\.\.$/; +var parentDirectoryNixEndRegExp3 = /^\/+\.\.$/; + +// RegExp magic :) + +module.exports = function normalize(path) { + while(currentDirectoryWinMiddleRegExp.test(path)) + path = path.replace(currentDirectoryWinMiddleRegExp, "\\"); + path = path.replace(currentDirectoryWinEndRegExp, ""); + while(parentDirectoryWinMiddleRegExp.test(path)) + path = path.replace(parentDirectoryWinMiddleRegExp, "\\"); + path = path.replace(parentDirectoryWinEndRegExp1, "$1"); + path = path.replace(parentDirectoryWinEndRegExp2, ""); + + while(currentDirectoryNixMiddleRegExp.test(path)) + path = path.replace(currentDirectoryNixMiddleRegExp, "/"); + path = path.replace(currentDirectoryNixEndRegExp1, "/"); + path = path.replace(currentDirectoryNixEndRegExp2, ""); + while(parentDirectoryNixMiddleRegExp.test(path)) + path = path.replace(parentDirectoryNixMiddleRegExp, "/"); + path = path.replace(parentDirectoryNixEndRegExp1, "/"); + path = path.replace(parentDirectoryNixEndRegExp2, ""); + path = path.replace(parentDirectoryNixEndRegExp3, "/"); + + return path.replace(doubleSlashWinRegExp, "\\").replace(doubleSlashNixRegExp, "/"); +}; \ No newline at end of file diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/package.json b/node_modules/enhanced-resolve/node_modules/memory-fs/package.json new file mode 100644 index 0000000..0cb8d09 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/package.json @@ -0,0 +1,32 @@ +{ + "name": "memory-fs", + "version": "0.2.0", + "description": "A simple in-memory filesystem. Holds data in a javascript object.", + "main": "lib/MemoryFileSystem.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "mocha -R spec", + "cover": "istanbul cover node_modules/mocha/bin/_mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "https://github.com/webpack/memory-fs.git" + }, + "keywords": [ + "fs", + "memory" + ], + "author": "Tobias Koppers @sokra", + "license": "MIT", + "bugs": { + "url": "https://github.com/webpack/memory-fs/issues" + }, + "homepage": "https://github.com/webpack/memory-fs", + "devDependencies": { + "istanbul": "^0.2.13", + "mocha": "^1.20.1", + "should": "^4.0.4" + } +} diff --git a/node_modules/enhanced-resolve/node_modules/memory-fs/test/MemoryFileSystem.js b/node_modules/enhanced-resolve/node_modules/memory-fs/test/MemoryFileSystem.js new file mode 100644 index 0000000..4944618 --- /dev/null +++ b/node_modules/enhanced-resolve/node_modules/memory-fs/test/MemoryFileSystem.js @@ -0,0 +1,362 @@ +var should = require("should"); +var MemoryFileSystem = require("../lib/MemoryFileSystem"); + +describe("directory", function() { + it("should have a empty root directory as startup", function(done) { + var fs = new MemoryFileSystem(); + fs.readdirSync("/").should.be.eql([]); + var stat = fs.statSync("/"); + stat.isFile().should.be.eql(false); + stat.isDirectory().should.be.eql(true); + fs.readdir("/", function(err, files) { + if(err) throw err; + files.should.be.eql([]); + done(); + }); + }); + it("should make and remove directories (linux style)", function() { + var fs = new MemoryFileSystem(); + fs.mkdirSync("/test"); + fs.mkdirSync("/test//sub/"); + fs.mkdirpSync("/test/sub2"); + fs.mkdirSync("/root\\dir"); + fs.mkdirpSync("/"); + fs.mkdirSync("/"); + fs.readdirSync("/").should.be.eql(["test", "root\\dir"]); + fs.readdirSync("/test/").should.be.eql(["sub", "sub2"]); + fs.rmdirSync("/test/sub//"); + fs.readdirSync("//test").should.be.eql(["sub2"]); + fs.rmdirSync("/test/sub2"); + fs.rmdirSync("/test"); + (function() { + fs.readdirSync("/test"); + }).should.throw(); + fs.readdirSync("/").should.be.eql(["root\\dir"]); + fs.mkdirpSync("/a/depth/sub/dir"); + var stat = fs.statSync("/a/depth/sub"); + stat.isFile().should.be.eql(false); + stat.isDirectory().should.be.eql(true); + }); + it("should make and remove directories (windows style)", function() { + var fs = new MemoryFileSystem(); + fs.mkdirSync("C:\\"); + fs.mkdirSync("C:\\test"); + fs.mkdirSync("C:\\test\\\\sub/"); + fs.mkdirpSync("c:\\test/sub2"); + fs.mkdirSync("C:\\root-dir"); + fs.readdirSync("C:").should.be.eql(["test", "root-dir"]); + fs.readdirSync("C:/test/").should.be.eql(["sub", "sub2"]); + fs.rmdirSync("C:/test\\sub\\\\"); + fs.readdirSync("C:\\\\test").should.be.eql(["sub2"]); + fs.rmdirSync("C:\\test\\sub2"); + fs.rmdirSync("C:\\test"); + (function() { + fs.readdirSync("C:\\test"); + }).should.throw(); + fs.readdirSync("C:").should.be.eql(["root-dir"]); + fs.mkdirpSync("D:\\a\\depth\\sub\\dir"); + var stat = fs.statSync("D:\\a\\depth\\sub"); + stat.isFile().should.be.eql(false); + stat.isDirectory().should.be.eql(true); + fs.readdirSync("D:\\//a/depth/\\sub").should.be.eql(["dir"]); + }); +}); +describe("files", function() { + it("should make and remove files", function() { + var fs = new MemoryFileSystem(); + fs.mkdirSync("/test"); + var buf = new Buffer("Hello World", "utf-8"); + fs.writeFileSync("/test/hello-world.txt", buf); + fs.readFileSync("/test/hello-world.txt").should.be.eql(buf); + fs.readFileSync("/test/hello-world.txt", "utf-8").should.be.eql("Hello World"); + (function() { + fs.readFileSync("/test/other-file"); + }).should.throw(); + (function() { + fs.readFileSync("/test/other-file", "utf-8"); + }).should.throw(); + fs.writeFileSync("/a", "Test", "utf-8"); + fs.readFileSync("/a", "utf-8").should.be.eql("Test"); + var stat = fs.statSync("/a"); + stat.isFile().should.be.eql(true); + stat.isDirectory().should.be.eql(false); + }); +}); +describe("errors", function() { + it("should fail on invalid paths", function() { + var fs = new MemoryFileSystem(); + fs.mkdirpSync("/test/a/b/c"); + fs.mkdirpSync("/test/a/bc"); + fs.mkdirpSync("/test/abc"); + (function() { + fs.mkdirpSync("xyz"); + }).should.throw(); + (function() { + fs.readdirSync("/test/abc/a/b/c"); + }).should.throw(); + (function() { + fs.readdirSync("/abc"); + }).should.throw(); + (function() { + fs.statSync("/abc"); + }).should.throw(); + (function() { + fs.mkdirSync("/test/a/d/b/c"); + }).should.throw(); + (function() { + fs.writeFileSync("/test/a/d/b/c", "Hello"); + }).should.throw(); + (function() { + fs.readFileSync("/test/a/d/b/c"); + }).should.throw(); + (function() { + fs.readFileSync("/test/abcd"); + }).should.throw(); + (function() { + fs.mkdirSync("/test/abcd/dir"); + }).should.throw(); + (function() { + fs.unlinkSync("/test/abcd"); + }).should.throw(); + (function() { + fs.unlinkSync("/test/abcd/file"); + }).should.throw(); + (function() { + fs.statSync("/test/a/d/b/c"); + }).should.throw(); + (function() { + fs.statSync("/test/abcd"); + }).should.throw(); + fs.mkdir("/test/a/d/b/c", function(err) { + err.should.be.instanceof(Error); + }); + }); + it("should fail incorrect arguments", function() { + var fs = new MemoryFileSystem(); + (function() { + fs.writeFileSync("/test"); + }).should.throw(); + }); + it("should fail on wrong type", function() { + var fs = new MemoryFileSystem(); + fs.mkdirpSync("/test/dir"); + fs.mkdirpSync("/test/dir"); + fs.writeFileSync("/test/file", "Hello"); + (function() { + fs.writeFileSync("/test/dir", "Hello"); + }).should.throw(); + (function() { + fs.readFileSync("/test/dir"); + }).should.throw(); + (function() { + fs.writeFileSync("/", "Hello"); + }).should.throw(); + (function() { + fs.rmdirSync("/"); + }).should.throw(); + (function() { + fs.unlinkSync("/"); + }).should.throw(); + (function() { + fs.mkdirSync("/test/dir"); + }).should.throw(); + (function() { + fs.mkdirSync("/test/file"); + }).should.throw(); + (function() { + fs.mkdirpSync("/test/file"); + }).should.throw(); + (function() { + fs.readdirSync("/test/file"); + }).should.throw(); + fs.readdirSync("/test/").should.be.eql(["dir", "file"]); + }); + it("should throw on readlink", function() { + var fs = new MemoryFileSystem(); + fs.mkdirpSync("/test/dir"); + (function() { + fs.readlinkSync("/"); + }).should.throw(); + (function() { + fs.readlinkSync("/link"); + }).should.throw(); + (function() { + fs.readlinkSync("/test"); + }).should.throw(); + (function() { + fs.readlinkSync("/test/dir"); + }).should.throw(); + (function() { + fs.readlinkSync("/test/dir/link"); + }).should.throw(); + }); +}); +describe("async", function() { + it("should be able to use the async versions", function(done) { + var fs = new MemoryFileSystem(); + fs.mkdirp("/test/dir", function(err) { + if(err) throw err; + fs.writeFile("/test/dir/a", "Hello", function(err) { + if(err) throw err; + fs.writeFile("/test/dir/b", "World", "utf-8", function(err) { + if(err) throw err; + fs.readFile("/test/dir/a", "utf-8", function(err, content) { + if(err) throw err; + content.should.be.eql("Hello"); + fs.readFile("/test/dir/b", function(err, content) { + if(err) throw err; + content.should.be.eql(new Buffer("World")); + done(); + }); + }); + }); + }); + }); + }); + it("should return errors", function(done) { + var fs = new MemoryFileSystem(); + fs.readFile("/fail/file", function(err, content) { + err.should.be.instanceof(Error); + fs.writeFile("/fail/file", "", function(err) { + err.should.be.instanceof(Error); + done(); + }); + }); + }); +}); +describe("normalize", function() { + it("should normalize paths", function() { + var fs = new MemoryFileSystem(); + fs.normalize("/a/b/c").should.be.eql("/a/b/c"); + fs.normalize("/a//b/c").should.be.eql("/a/b/c"); + fs.normalize("/a//b//c").should.be.eql("/a/b/c"); + fs.normalize("//a//b//c").should.be.eql("/a/b/c"); + fs.normalize("/a/////b/c").should.be.eql("/a/b/c"); + fs.normalize("/./a/d///..////b/c").should.be.eql("/a/b/c"); + fs.normalize("/..").should.be.eql("/"); + fs.normalize("/.").should.be.eql("/"); + fs.normalize("/.git").should.be.eql("/.git"); + fs.normalize("/a/b/c/.git").should.be.eql("/a/b/c/.git"); + fs.normalize("/a/b/c/..git").should.be.eql("/a/b/c/..git"); + fs.normalize("/a/b/c/..").should.be.eql("/a/b"); + fs.normalize("/a/b/c/../..").should.be.eql("/a"); + fs.normalize("/a/b/c/../../..").should.be.eql("/"); + fs.normalize("C:\\a\\..").should.be.eql("C:\\"); + fs.normalize("C:\\a\\b\\..").should.be.eql("C:\\a"); + fs.normalize("C:\\a\\b\\\c\\..\\..").should.be.eql("C:\\a"); + fs.normalize("C:\\a\\b\\d\\..\\c\\..\\..").should.be.eql("C:\\a"); + fs.normalize("C:\\a\\b\\d\\\\.\\\\.\\c\\.\\..").should.be.eql("C:\\a\\b\\d"); + }); +}); +describe("join", function() { + it("should join paths", function() { + var fs = new MemoryFileSystem(); + fs.join("/", "a/b/c").should.be.eql("/a/b/c"); + fs.join("/a", "b/c").should.be.eql("/a/b/c"); + fs.join("/a/b", "c").should.be.eql("/a/b/c"); + fs.join("/a/", "b/c").should.be.eql("/a/b/c"); + fs.join("/a//", "b/c").should.be.eql("/a/b/c"); + fs.join("a", "b/c").should.be.eql("a/b/c"); + fs.join("a/b", "c").should.be.eql("a/b/c"); + fs.join("C:", "a/b").should.be.eql("C:\\a\\b"); + fs.join("C:\\", "a/b").should.be.eql("C:\\a\\b"); + fs.join("C:\\", "a\\b").should.be.eql("C:\\a\\b"); + }); + it("should join paths (weird cases)", function() { + var fs = new MemoryFileSystem(); + fs.join("/", "").should.be.eql("/"); + fs.join("/a/b/", "").should.be.eql("/a/b/"); + fs.join("/a/b/c", "").should.be.eql("/a/b/c"); + fs.join("C:", "").should.be.eql("C:"); + fs.join("C:\\a\\b", "").should.be.eql("C:\\a\\b"); + }); + it("should join paths (absolute request)", function() { + var fs = new MemoryFileSystem(); + fs.join("/a/b/c", "/d/e/f").should.be.eql("/d/e/f"); + fs.join("C:\\a\\b\\c", "/d/e/f").should.be.eql("/d/e/f"); + fs.join("/a/b/c", "C:\\d\\e\\f").should.be.eql("C:\\d\\e\\f"); + fs.join("C:\\a\\b\\c", "C:\\d\\e\\f").should.be.eql("C:\\d\\e\\f"); + }); +}); +describe("os", function() { + var fileSystem; + + beforeEach(function() { + fileSystem = new MemoryFileSystem({ + "": true, + a: { + "": true, + index: new Buffer("1"), // /a/index + dir: { + "": true, + index: new Buffer("2") // /a/dir/index + } + }, + "C:": { + "": true, + a: { + "": true, + index: new Buffer("3"), // C:\files\index + dir: { + "": true, + index: new Buffer("4") // C:\files\a\index + } + } + } + }); + }); + + describe("unix", function() { + it("should stat stuff", function() { + fileSystem.statSync("/a").isDirectory().should.be.eql(true); + fileSystem.statSync("/a").isFile().should.be.eql(false); + fileSystem.statSync("/a/index").isDirectory().should.be.eql(false); + fileSystem.statSync("/a/index").isFile().should.be.eql(true); + fileSystem.statSync("/a/dir").isDirectory().should.be.eql(true); + fileSystem.statSync("/a/dir").isFile().should.be.eql(false); + fileSystem.statSync("/a/dir/index").isDirectory().should.be.eql(false); + fileSystem.statSync("/a/dir/index").isFile().should.be.eql(true); + }); + it("should readdir directories", function() { + fileSystem.readdirSync("/a").should.be.eql(["index", "dir"]); + fileSystem.readdirSync("/a/dir").should.be.eql(["index"]); + }); + it("should readdir directories", function() { + fileSystem.readFileSync("/a/index", "utf-8").should.be.eql("1"); + fileSystem.readFileSync("/a/dir/index", "utf-8").should.be.eql("2"); + }); + it("should also accept multi slashs", function() { + fileSystem.statSync("/a///dir//index").isFile().should.be.eql(true); + }); + }); + + describe("windows", function() { + it("should stat stuff", function() { + fileSystem.statSync("C:\\a").isDirectory().should.be.eql(true); + fileSystem.statSync("C:\\a").isFile().should.be.eql(false); + fileSystem.statSync("C:\\a\\index").isDirectory().should.be.eql(false); + fileSystem.statSync("C:\\a\\index").isFile().should.be.eql(true); + fileSystem.statSync("C:\\a\\dir").isDirectory().should.be.eql(true); + fileSystem.statSync("C:\\a\\dir").isFile().should.be.eql(false); + fileSystem.statSync("C:\\a\\dir\\index").isDirectory().should.be.eql(false); + fileSystem.statSync("C:\\a\\dir\\index").isFile().should.be.eql(true); + }); + it("should readdir directories", function() { + fileSystem.readdirSync("C:\\a").should.be.eql(["index", "dir"]); + fileSystem.readdirSync("C:\\a\\dir").should.be.eql(["index"]); + }); + it("should readdir directories", function() { + fileSystem.readFileSync("C:\\a\\index", "utf-8").should.be.eql("3"); + fileSystem.readFileSync("C:\\a\\dir\\index", "utf-8").should.be.eql("4"); + }); + it("should also accept multi slashs", function() { + fileSystem.statSync("C:\\\\a\\\\\\dir\\\\index").isFile().should.be.eql(true); + }); + it("should also accept a normal slash", function() { + fileSystem.statSync("C:\\a\\dir/index").isFile().should.be.eql(true); + fileSystem.statSync("C:\\a\\dir\\index").isFile().should.be.eql(true); + fileSystem.statSync("C:\\a/dir/index").isFile().should.be.eql(true); + fileSystem.statSync("C:\\a/dir\\index").isFile().should.be.eql(true); + }); + }); +}); \ No newline at end of file diff --git a/node_modules/enhanced-resolve/package.json b/node_modules/enhanced-resolve/package.json new file mode 100644 index 0000000..f603231 --- /dev/null +++ b/node_modules/enhanced-resolve/package.json @@ -0,0 +1,38 @@ +{ + "name": "enhanced-resolve", + "version": "0.9.1", + "author": "Tobias Koppers @sokra", + "description": "Offers a async require.resolve function. It's highly configurable.", + "files": [ + "lib" + ], + "dependencies": { + "tapable": "^0.1.8", + "memory-fs": "^0.2.0", + "graceful-fs": "^4.1.2" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://www.opensource.org/licenses/mit-license.php" + } + ], + "devDependencies": { + "istanbul": "^0.3.5", + "mocha": "^2.1.0", + "should": "^4.6.0" + }, + "engines": { + "node": ">=0.6" + }, + "main": "lib/node.js", + "homepage": "http://github.com/webpack/enhanced-resolve", + "scripts": { + "test": "mocha --reporter spec", + "cover": "istanbul cover node_modules/mocha/bin/_mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/webpack/enhanced-resolve.git" + } +} diff --git a/node_modules/errno/.jshintrc b/node_modules/errno/.jshintrc new file mode 100644 index 0000000..c8ef3ca --- /dev/null +++ b/node_modules/errno/.jshintrc @@ -0,0 +1,59 @@ +{ + "predef": [ ] + , "bitwise": false + , "camelcase": false + , "curly": false + , "eqeqeq": false + , "forin": false + , "immed": false + , "latedef": false + , "noarg": true + , "noempty": true + , "nonew": true + , "plusplus": false + , "quotmark": true + , "regexp": false + , "undef": true + , "unused": true + , "strict": false + , "trailing": true + , "maxlen": 120 + , "asi": true + , "boss": true + , "debug": true + , "eqnull": true + , "esnext": true + , "evil": true + , "expr": true + , "funcscope": false + , "globalstrict": false + , "iterator": false + , "lastsemic": true + , "laxbreak": true + , "laxcomma": true + , "loopfunc": true + , "multistr": false + , "onecase": false + , "proto": false + , "regexdash": false + , "scripturl": true + , "smarttabs": false + , "shadow": false + , "sub": true + , "supernew": false + , "validthis": true + , "browser": true + , "couch": false + , "devel": false + , "dojo": false + , "mootools": false + , "node": true + , "nonstandard": true + , "prototypejs": false + , "rhino": false + , "worker": true + , "wsh": false + , "nomen": false + , "onevar": false + , "passfail": false +} \ No newline at end of file diff --git a/node_modules/errno/.npmignore b/node_modules/errno/.npmignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/node_modules/errno/.npmignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/node_modules/errno/README.md b/node_modules/errno/README.md new file mode 100644 index 0000000..2c1f8a5 --- /dev/null +++ b/node_modules/errno/README.md @@ -0,0 +1,141 @@ +# node-errno + +Better [libuv](https://github.com/libuv/libuv)/[Node.js](https://nodejs.org)/[io.js](https://iojs.org) error handling & reporting. Available in npm as *errno*. + +* [errno exposed](#errnoexposed) +* [Custom errors](#customerrors) + +
+## errno exposed + +Ever find yourself needing more details about Node.js errors? Me too, so *node-errno* contains the errno mappings direct from libuv so you can use them in your code. + +**By errno:** + +```js +require('errno').errno[3] +// → { +// "errno": 3, +// "code": "EACCES", +// "description": "permission denied" +// } +``` + +**By code:** + +```js +require('errno').code.ENOTEMPTY +// → { +// "errno": 53, +// "code": "ENOTEMPTY", +// "description": "directory not empty" +// } +``` + +**Make your errors more descriptive:** + +```js +var errno = require('errno') + +function errmsg(err) { + var str = 'Error: ' + // if it's a libuv error then get the description from errno + if (errno.errno[err.errno]) + str += errno.errno[err.errno].description + else + str += err.message + + // if it's a `fs` error then it'll have a 'path' property + if (err.path) + str += ' [' + err.path + ']' + + return str +} + +var fs = require('fs') + +fs.readFile('thisisnotarealfile.txt', function (err, data) { + if (err) + console.log(errmsg(err)) +}) +``` + +**Use as a command line tool:** + +``` +~ $ errno 53 +{ + "errno": 53, + "code": "ENOTEMPTY", + "description": "directory not empty" +} +~ $ errno EROFS +{ + "errno": 56, + "code": "EROFS", + "description": "read-only file system" +} +~ $ errno foo +No such errno/code: "foo" +``` + +Supply no arguments for the full list. Error codes are processed case-insensitive. + +You will need to install with `npm install errno -g` if you want the `errno` command to be available without supplying a full path to the node_modules installation. + + +## Custom errors + +Use `errno.custom.createError()` to create custom `Error` objects to throw around in your Node.js library. Create error heirachies so `instanceof` becomes a useful tool in tracking errors. Call-stack is correctly captured at the time you create an instance of the error object, plus a `cause` property will make available the original error object if you pass one in to the constructor. + +```js +var create = require('errno').custom.createError +var MyError = create('MyError') // inherits from Error +var SpecificError = create('SpecificError', MyError) // inherits from MyError +var OtherError = create('OtherError', MyError) + +// use them! +if (condition) throw new SpecificError('Eeek! Something bad happened') + +if (err) return callback(new OtherError(err)) +``` + +Also available is a `errno.custom.FilesystemError` with in-built access to errno properties: + +```js +fs.readFile('foo', function (err, data) { + if (err) return callback(new errno.custom.FilesystemError(err)) + // do something else +}) +``` + +The resulting error object passed through the callback will have the following properties: `code`, `errno`, `path` and `message` will contain a descriptive human-readable message. + +## Contributors + +* [bahamas10](https://github.com/bahamas10) (Dave Eddy) - Added CLI +* [ralphtheninja](https://github.com/ralphtheninja) (Lars-Magnus Skog) + +## Copyright & Licence + +*Copyright (c) 2012-2015 [Rod Vagg](https://github.com/rvagg) ([@rvagg](https://twitter.com/rvagg))* + +Made available under the MIT licence: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is furnished +to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/node_modules/errno/build.js b/node_modules/errno/build.js new file mode 100755 index 0000000..fce8926 --- /dev/null +++ b/node_modules/errno/build.js @@ -0,0 +1,43 @@ +#!/usr/bin/env node + +var request = require('request') + , fs = require('fs') + + , uvheadloc = 'https://raw.github.com/joyent/libuv/master/include/uv.h' + , defreg = /^\s*XX\(\s*([\-\d]+),\s*([A-Z]+),\s*"([^"]*)"\s*\)\s*\\?$/ + + +request(uvheadloc, function (err, response) { + if (err) + throw err + + var data, out + + data = response.body + .split('\n') + .map(function (line) { return line.match(defreg) }) + .filter(function (match) { return match }) + .map(function (match) { return { + errno: parseInt(match[1], 10) + , code: match[2] + , description: match[3] + }}) + + out = 'var all = module.exports.all = ' + JSON.stringify(data, 0, 1) + '\n\n' + + out += '\nmodule.exports.errno = {\n ' + + data.map(function (e, i) { + return '\'' + e.errno + '\': all[' + i + ']' + }).join('\n , ') + + '\n}\n\n' + + out += '\nmodule.exports.code = {\n ' + + data.map(function (e, i) { + return '\'' + e.code + '\': all[' + i + ']' + }).join('\n , ') + + '\n}\n\n' + + out += '\nmodule.exports.custom = require("./custom")(module.exports)\n' + + fs.writeFile('errno.js', out) +}) \ No newline at end of file diff --git a/node_modules/errno/cli.js b/node_modules/errno/cli.js new file mode 100755 index 0000000..f841771 --- /dev/null +++ b/node_modules/errno/cli.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node + +var errno = require('./') + , arg = process.argv[2] + , data, code + +if (arg === undefined) + return console.log(JSON.stringify(errno.code, null, 2)) + +if ((code = +arg) == arg) + data = errno.errno[code] +else + data = errno.code[arg] || errno.code[arg.toUpperCase()] + +if (data) + console.log(JSON.stringify(data, null, 2)) +else { + console.error('No such errno/code: "' + arg + '"') + process.exit(1) +} \ No newline at end of file diff --git a/node_modules/errno/custom.js b/node_modules/errno/custom.js new file mode 100644 index 0000000..7be16c1 --- /dev/null +++ b/node_modules/errno/custom.js @@ -0,0 +1,55 @@ +var prr = require('prr') + +function init (type, message, cause) { + prr(this, { + type : type + , name : type + // can be passed just a 'cause' + , cause : typeof message != 'string' ? message : cause + , message : !!message && typeof message != 'string' ? message.message : message + + }, 'ewr') +} + +// generic prototype, not intended to be actually used - helpful for `instanceof` +function CustomError (message, cause) { + Error.call(this) + if (Error.captureStackTrace) + Error.captureStackTrace(this, arguments.callee) + init.call(this, 'CustomError', message, cause) +} + +CustomError.prototype = new Error() + +function createError (errno, type, proto) { + var err = function (message, cause) { + init.call(this, type, message, cause) + //TODO: the specificity here is stupid, errno should be available everywhere + if (type == 'FilesystemError') { + this.code = this.cause.code + this.path = this.cause.path + this.errno = this.cause.errno + this.message = + (errno.errno[this.cause.errno] + ? errno.errno[this.cause.errno].description + : this.cause.message) + + (this.cause.path ? ' [' + this.cause.path + ']' : '') + } + Error.call(this) + if (Error.captureStackTrace) + Error.captureStackTrace(this, arguments.callee) + } + err.prototype = !!proto ? new proto() : new CustomError() + return err +} + +module.exports = function (errno) { + var ce = function (type, proto) { + return createError(errno, type, proto) + } + return { + CustomError : CustomError + , FilesystemError : ce('FilesystemError') + , createError : ce + } +} diff --git a/node_modules/errno/errno.js b/node_modules/errno/errno.js new file mode 100644 index 0000000..efb79d4 --- /dev/null +++ b/node_modules/errno/errno.js @@ -0,0 +1,313 @@ +var all = module.exports.all = [ + { + errno: -2, + code: 'ENOENT', + description: 'no such file or directory' + }, + { + errno: -1, + code: 'UNKNOWN', + description: 'unknown error' + }, + { + errno: 0, + code: 'OK', + description: 'success' + }, + { + errno: 1, + code: 'EOF', + description: 'end of file' + }, + { + errno: 2, + code: 'EADDRINFO', + description: 'getaddrinfo error' + }, + { + errno: 3, + code: 'EACCES', + description: 'permission denied' + }, + { + errno: 4, + code: 'EAGAIN', + description: 'resource temporarily unavailable' + }, + { + errno: 5, + code: 'EADDRINUSE', + description: 'address already in use' + }, + { + errno: 6, + code: 'EADDRNOTAVAIL', + description: 'address not available' + }, + { + errno: 7, + code: 'EAFNOSUPPORT', + description: 'address family not supported' + }, + { + errno: 8, + code: 'EALREADY', + description: 'connection already in progress' + }, + { + errno: 9, + code: 'EBADF', + description: 'bad file descriptor' + }, + { + errno: 10, + code: 'EBUSY', + description: 'resource busy or locked' + }, + { + errno: 11, + code: 'ECONNABORTED', + description: 'software caused connection abort' + }, + { + errno: 12, + code: 'ECONNREFUSED', + description: 'connection refused' + }, + { + errno: 13, + code: 'ECONNRESET', + description: 'connection reset by peer' + }, + { + errno: 14, + code: 'EDESTADDRREQ', + description: 'destination address required' + }, + { + errno: 15, + code: 'EFAULT', + description: 'bad address in system call argument' + }, + { + errno: 16, + code: 'EHOSTUNREACH', + description: 'host is unreachable' + }, + { + errno: 17, + code: 'EINTR', + description: 'interrupted system call' + }, + { + errno: 18, + code: 'EINVAL', + description: 'invalid argument' + }, + { + errno: 19, + code: 'EISCONN', + description: 'socket is already connected' + }, + { + errno: 20, + code: 'EMFILE', + description: 'too many open files' + }, + { + errno: 21, + code: 'EMSGSIZE', + description: 'message too long' + }, + { + errno: 22, + code: 'ENETDOWN', + description: 'network is down' + }, + { + errno: 23, + code: 'ENETUNREACH', + description: 'network is unreachable' + }, + { + errno: 24, + code: 'ENFILE', + description: 'file table overflow' + }, + { + errno: 25, + code: 'ENOBUFS', + description: 'no buffer space available' + }, + { + errno: 26, + code: 'ENOMEM', + description: 'not enough memory' + }, + { + errno: 27, + code: 'ENOTDIR', + description: 'not a directory' + }, + { + errno: 28, + code: 'EISDIR', + description: 'illegal operation on a directory' + }, + { + errno: 29, + code: 'ENONET', + description: 'machine is not on the network' + }, + { + errno: 31, + code: 'ENOTCONN', + description: 'socket is not connected' + }, + { + errno: 32, + code: 'ENOTSOCK', + description: 'socket operation on non-socket' + }, + { + errno: 33, + code: 'ENOTSUP', + description: 'operation not supported on socket' + }, + { + errno: 34, + code: 'ENOENT', + description: 'no such file or directory' + }, + { + errno: 35, + code: 'ENOSYS', + description: 'function not implemented' + }, + { + errno: 36, + code: 'EPIPE', + description: 'broken pipe' + }, + { + errno: 37, + code: 'EPROTO', + description: 'protocol error' + }, + { + errno: 38, + code: 'EPROTONOSUPPORT', + description: 'protocol not supported' + }, + { + errno: 39, + code: 'EPROTOTYPE', + description: 'protocol wrong type for socket' + }, + { + errno: 40, + code: 'ETIMEDOUT', + description: 'connection timed out' + }, + { + errno: 41, + code: 'ECHARSET', + description: 'invalid Unicode character' + }, + { + errno: 42, + code: 'EAIFAMNOSUPPORT', + description: 'address family for hostname not supported' + }, + { + errno: 44, + code: 'EAISERVICE', + description: 'servname not supported for ai_socktype' + }, + { + errno: 45, + code: 'EAISOCKTYPE', + description: 'ai_socktype not supported' + }, + { + errno: 46, + code: 'ESHUTDOWN', + description: 'cannot send after transport endpoint shutdown' + }, + { + errno: 47, + code: 'EEXIST', + description: 'file already exists' + }, + { + errno: 48, + code: 'ESRCH', + description: 'no such process' + }, + { + errno: 49, + code: 'ENAMETOOLONG', + description: 'name too long' + }, + { + errno: 50, + code: 'EPERM', + description: 'operation not permitted' + }, + { + errno: 51, + code: 'ELOOP', + description: 'too many symbolic links encountered' + }, + { + errno: 52, + code: 'EXDEV', + description: 'cross-device link not permitted' + }, + { + errno: 53, + code: 'ENOTEMPTY', + description: 'directory not empty' + }, + { + errno: 54, + code: 'ENOSPC', + description: 'no space left on device' + }, + { + errno: 55, + code: 'EIO', + description: 'i/o error' + }, + { + errno: 56, + code: 'EROFS', + description: 'read-only file system' + }, + { + errno: 57, + code: 'ENODEV', + description: 'no such device' + }, + { + errno: 58, + code: 'ESPIPE', + description: 'invalid seek' + }, + { + errno: 59, + code: 'ECANCELED', + description: 'operation canceled' + } +] + +module.exports.errno = {} +module.exports.code = {} + +all.forEach(function (error) { + module.exports.errno[error.errno] = error + module.exports.code[error.code] = error +}) + +module.exports.custom = require('./custom')(module.exports) +module.exports.create = module.exports.custom.createError diff --git a/node_modules/errno/package.json b/node_modules/errno/package.json new file mode 100644 index 0000000..fe877cb --- /dev/null +++ b/node_modules/errno/package.json @@ -0,0 +1,31 @@ +{ + "name": "errno", + "authors": [ + "Rod Vagg @rvagg (https://github.com/rvagg)" + ], + "description": "libuv errno details exposed", + "keywords": [ + "errors", + "errno", + "libuv" + ], + "version": "0.1.4", + "main": "errno.js", + "dependencies": { + "prr": "~0.0.0" + }, + "bin": { + "errno": "./cli.js" + }, + "devDependencies": { + "tape": "~3.5.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/rvagg/node-errno.git" + }, + "license": "MIT", + "scripts": { + "test": "tape test.js" + } +} diff --git a/node_modules/errno/test.js b/node_modules/errno/test.js new file mode 100755 index 0000000..6b76a85 --- /dev/null +++ b/node_modules/errno/test.js @@ -0,0 +1,31 @@ +#!/usr/bin/env node + +var test = require('tape') + , errno = require('./') + +test('sanity checks', function (t) { + t.ok(errno.all, 'errno.all not found') + t.ok(errno.errno, 'errno.errno not found') + t.ok(errno.code, 'errno.code not found') + + t.equal(errno.all.length, 59, 'found ' + errno.all.length + ', expected 59') + t.equal(errno.errno['-1'], errno.all[0], 'errno -1 not first element') + + t.equal(errno.code['UNKNOWN'], errno.all[0], 'code UNKNOWN not first element') + + t.equal(errno.errno[1], errno.all[2], 'errno 1 not third element') + + t.equal(errno.code['EOF'], errno.all[2], 'code EOF not third element') + t.end() +}) + +test('custom errors', function (t) { + var Cust = errno.create('FooNotBarError') + var cust = new Cust('foo is not bar') + + t.equal(cust.name, 'FooNotBarError', 'correct custom name') + t.equal(cust.type, 'FooNotBarError', 'correct custom type') + t.equal(cust.message, 'foo is not bar', 'correct custom message') + t.notOk(cust.cause, 'no cause') + t.end() +}) diff --git a/node_modules/escape-html/LICENSE b/node_modules/escape-html/LICENSE new file mode 100644 index 0000000..2e70de9 --- /dev/null +++ b/node_modules/escape-html/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2013 TJ Holowaychuk +Copyright (c) 2015 Andreas Lubbe +Copyright (c) 2015 Tiancheng "Timothy" Gu + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/escape-html/Readme.md b/node_modules/escape-html/Readme.md new file mode 100644 index 0000000..653d9ea --- /dev/null +++ b/node_modules/escape-html/Readme.md @@ -0,0 +1,43 @@ + +# escape-html + + Escape string for use in HTML + +## Example + +```js +var escape = require('escape-html'); +var html = escape('foo & bar'); +// -> foo & bar +``` + +## Benchmark + +``` +$ npm run-script bench + +> escape-html@1.0.3 bench nodejs-escape-html +> node benchmark/index.js + + + http_parser@1.0 + node@0.10.33 + v8@3.14.5.9 + ares@1.9.0-DEV + uv@0.10.29 + zlib@1.2.3 + modules@11 + openssl@1.0.1j + + 1 test completed. + 2 tests completed. + 3 tests completed. + + no special characters x 19,435,271 ops/sec ±0.85% (187 runs sampled) + single special character x 6,132,421 ops/sec ±0.67% (194 runs sampled) + many special characters x 3,175,826 ops/sec ±0.65% (193 runs sampled) +``` + +## License + + MIT \ No newline at end of file diff --git a/node_modules/escape-html/index.js b/node_modules/escape-html/index.js new file mode 100644 index 0000000..bf9e226 --- /dev/null +++ b/node_modules/escape-html/index.js @@ -0,0 +1,78 @@ +/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */ + +'use strict'; + +/** + * Module variables. + * @private + */ + +var matchHtmlRegExp = /["'&<>]/; + +/** + * Module exports. + * @public + */ + +module.exports = escapeHtml; + +/** + * Escape special characters in the given string of html. + * + * @param {string} string The string to escape for inserting into HTML + * @return {string} + * @public + */ + +function escapeHtml(string) { + var str = '' + string; + var match = matchHtmlRegExp.exec(str); + + if (!match) { + return str; + } + + var escape; + var html = ''; + var index = 0; + var lastIndex = 0; + + for (index = match.index; index < str.length; index++) { + switch (str.charCodeAt(index)) { + case 34: // " + escape = '"'; + break; + case 38: // & + escape = '&'; + break; + case 39: // ' + escape = '''; + break; + case 60: // < + escape = '<'; + break; + case 62: // > + escape = '>'; + break; + default: + continue; + } + + if (lastIndex !== index) { + html += str.substring(lastIndex, index); + } + + lastIndex = index + 1; + html += escape; + } + + return lastIndex !== index + ? html + str.substring(lastIndex, index) + : html; +} diff --git a/node_modules/escape-html/package.json b/node_modules/escape-html/package.json new file mode 100644 index 0000000..57ec7bd --- /dev/null +++ b/node_modules/escape-html/package.json @@ -0,0 +1,24 @@ +{ + "name": "escape-html", + "description": "Escape string for use in HTML", + "version": "1.0.3", + "license": "MIT", + "keywords": [ + "escape", + "html", + "utility" + ], + "repository": "component/escape-html", + "devDependencies": { + "benchmark": "1.0.0", + "beautify-benchmark": "0.2.4" + }, + "files": [ + "LICENSE", + "Readme.md", + "index.js" + ], + "scripts": { + "bench": "node benchmark/index.js" + } +} diff --git a/node_modules/escape-string-regexp/index.js b/node_modules/escape-string-regexp/index.js new file mode 100644 index 0000000..7834bf9 --- /dev/null +++ b/node_modules/escape-string-regexp/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g; + +module.exports = function (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a string'); + } + + return str.replace(matchOperatorsRe, '\\$&'); +}; diff --git a/node_modules/escape-string-regexp/license b/node_modules/escape-string-regexp/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/escape-string-regexp/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/escape-string-regexp/package.json b/node_modules/escape-string-regexp/package.json new file mode 100644 index 0000000..f307df3 --- /dev/null +++ b/node_modules/escape-string-regexp/package.json @@ -0,0 +1,41 @@ +{ + "name": "escape-string-regexp", + "version": "1.0.5", + "description": "Escape RegExp special characters", + "license": "MIT", + "repository": "sindresorhus/escape-string-regexp", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Boy Nicolai Appelman (jbna.nl)" + ], + "engines": { + "node": ">=0.8.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "escape", + "regex", + "regexp", + "re", + "regular", + "expression", + "string", + "str", + "special", + "characters" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/escape-string-regexp/readme.md b/node_modules/escape-string-regexp/readme.md new file mode 100644 index 0000000..87ac82d --- /dev/null +++ b/node_modules/escape-string-regexp/readme.md @@ -0,0 +1,27 @@ +# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp) + +> Escape RegExp special characters + + +## Install + +``` +$ npm install --save escape-string-regexp +``` + + +## Usage + +```js +const escapeStringRegexp = require('escape-string-regexp'); + +const escapedString = escapeStringRegexp('how much $ for a unicorn?'); +//=> 'how much \$ for a unicorn\?' + +new RegExp(escapedString); +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/esutils/LICENSE.BSD b/node_modules/esutils/LICENSE.BSD new file mode 100644 index 0000000..3e580c3 --- /dev/null +++ b/node_modules/esutils/LICENSE.BSD @@ -0,0 +1,19 @@ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/esutils/README.md b/node_modules/esutils/README.md new file mode 100644 index 0000000..610d8bd --- /dev/null +++ b/node_modules/esutils/README.md @@ -0,0 +1,169 @@ +### esutils [![Build Status](https://secure.travis-ci.org/estools/esutils.svg)](http://travis-ci.org/estools/esutils) +esutils ([esutils](http://github.com/estools/esutils)) is +utility box for ECMAScript language tools. + +### API + +### ast + +#### ast.isExpression(node) + +Returns true if `node` is an Expression as defined in ECMA262 edition 5.1 section +[11](https://es5.github.io/#x11). + +#### ast.isStatement(node) + +Returns true if `node` is a Statement as defined in ECMA262 edition 5.1 section +[12](https://es5.github.io/#x12). + +#### ast.isIterationStatement(node) + +Returns true if `node` is an IterationStatement as defined in ECMA262 edition +5.1 section [12.6](https://es5.github.io/#x12.6). + +#### ast.isSourceElement(node) + +Returns true if `node` is a SourceElement as defined in ECMA262 edition 5.1 +section [14](https://es5.github.io/#x14). + +#### ast.trailingStatement(node) + +Returns `Statement?` if `node` has trailing `Statement`. +```js +if (cond) + consequent; +``` +When taking this `IfStatement`, returns `consequent;` statement. + +#### ast.isProblematicIfStatement(node) + +Returns true if `node` is a problematic IfStatement. If `node` is a problematic `IfStatement`, `node` cannot be represented as an one on one JavaScript code. +```js +{ + type: 'IfStatement', + consequent: { + type: 'WithStatement', + body: { + type: 'IfStatement', + consequent: {type: 'EmptyStatement'} + } + }, + alternate: {type: 'EmptyStatement'} +} +``` +The above node cannot be represented as a JavaScript code, since the top level `else` alternate belongs to an inner `IfStatement`. + + +### code + +#### code.isDecimalDigit(code) + +Return true if provided code is decimal digit. + +#### code.isHexDigit(code) + +Return true if provided code is hexadecimal digit. + +#### code.isOctalDigit(code) + +Return true if provided code is octal digit. + +#### code.isWhiteSpace(code) + +Return true if provided code is white space. White space characters are formally defined in ECMA262. + +#### code.isLineTerminator(code) + +Return true if provided code is line terminator. Line terminator characters are formally defined in ECMA262. + +#### code.isIdentifierStart(code) + +Return true if provided code can be the first character of ECMA262 Identifier. They are formally defined in ECMA262. + +#### code.isIdentifierPart(code) + +Return true if provided code can be the trailing character of ECMA262 Identifier. They are formally defined in ECMA262. + +### keyword + +#### keyword.isKeywordES5(id, strict) + +Returns `true` if provided identifier string is a Keyword or Future Reserved Word +in ECMA262 edition 5.1. They are formally defined in ECMA262 sections +[7.6.1.1](http://es5.github.io/#x7.6.1.1) and [7.6.1.2](http://es5.github.io/#x7.6.1.2), +respectively. If the `strict` flag is truthy, this function additionally checks whether +`id` is a Keyword or Future Reserved Word under strict mode. + +#### keyword.isKeywordES6(id, strict) + +Returns `true` if provided identifier string is a Keyword or Future Reserved Word +in ECMA262 edition 6. They are formally defined in ECMA262 sections +[11.6.2.1](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-keywords) and +[11.6.2.2](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-future-reserved-words), +respectively. If the `strict` flag is truthy, this function additionally checks whether +`id` is a Keyword or Future Reserved Word under strict mode. + +#### keyword.isReservedWordES5(id, strict) + +Returns `true` if provided identifier string is a Reserved Word in ECMA262 edition 5.1. +They are formally defined in ECMA262 section [7.6.1](http://es5.github.io/#x7.6.1). +If the `strict` flag is truthy, this function additionally checks whether `id` +is a Reserved Word under strict mode. + +#### keyword.isReservedWordES6(id, strict) + +Returns `true` if provided identifier string is a Reserved Word in ECMA262 edition 6. +They are formally defined in ECMA262 section [11.6.2](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-reserved-words). +If the `strict` flag is truthy, this function additionally checks whether `id` +is a Reserved Word under strict mode. + +#### keyword.isRestrictedWord(id) + +Returns `true` if provided identifier string is one of `eval` or `arguments`. +They are restricted in strict mode code throughout ECMA262 edition 5.1 and +in ECMA262 edition 6 section [12.1.1](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-identifiers-static-semantics-early-errors). + +#### keyword.isIdentifierName(id) + +Return true if provided identifier string is an IdentifierName as specified in +ECMA262 edition 5.1 section [7.6](https://es5.github.io/#x7.6). + +#### keyword.isIdentifierES5(id, strict) + +Return true if provided identifier string is an Identifier as specified in +ECMA262 edition 5.1 section [7.6](https://es5.github.io/#x7.6). If the `strict` +flag is truthy, this function additionally checks whether `id` is an Identifier +under strict mode. + +#### keyword.isIdentifierES6(id, strict) + +Return true if provided identifier string is an Identifier as specified in +ECMA262 edition 6 section [12.1](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-identifiers). +If the `strict` flag is truthy, this function additionally checks whether `id` +is an Identifier under strict mode. + +### License + +Copyright (C) 2013 [Yusuke Suzuki](http://github.com/Constellation) + (twitter: [@Constellation](http://twitter.com/Constellation)) and other contributors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/esutils/lib/ast.js b/node_modules/esutils/lib/ast.js new file mode 100644 index 0000000..8faadae --- /dev/null +++ b/node_modules/esutils/lib/ast.js @@ -0,0 +1,144 @@ +/* + Copyright (C) 2013 Yusuke Suzuki + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +(function () { + 'use strict'; + + function isExpression(node) { + if (node == null) { return false; } + switch (node.type) { + case 'ArrayExpression': + case 'AssignmentExpression': + case 'BinaryExpression': + case 'CallExpression': + case 'ConditionalExpression': + case 'FunctionExpression': + case 'Identifier': + case 'Literal': + case 'LogicalExpression': + case 'MemberExpression': + case 'NewExpression': + case 'ObjectExpression': + case 'SequenceExpression': + case 'ThisExpression': + case 'UnaryExpression': + case 'UpdateExpression': + return true; + } + return false; + } + + function isIterationStatement(node) { + if (node == null) { return false; } + switch (node.type) { + case 'DoWhileStatement': + case 'ForInStatement': + case 'ForStatement': + case 'WhileStatement': + return true; + } + return false; + } + + function isStatement(node) { + if (node == null) { return false; } + switch (node.type) { + case 'BlockStatement': + case 'BreakStatement': + case 'ContinueStatement': + case 'DebuggerStatement': + case 'DoWhileStatement': + case 'EmptyStatement': + case 'ExpressionStatement': + case 'ForInStatement': + case 'ForStatement': + case 'IfStatement': + case 'LabeledStatement': + case 'ReturnStatement': + case 'SwitchStatement': + case 'ThrowStatement': + case 'TryStatement': + case 'VariableDeclaration': + case 'WhileStatement': + case 'WithStatement': + return true; + } + return false; + } + + function isSourceElement(node) { + return isStatement(node) || node != null && node.type === 'FunctionDeclaration'; + } + + function trailingStatement(node) { + switch (node.type) { + case 'IfStatement': + if (node.alternate != null) { + return node.alternate; + } + return node.consequent; + + case 'LabeledStatement': + case 'ForStatement': + case 'ForInStatement': + case 'WhileStatement': + case 'WithStatement': + return node.body; + } + return null; + } + + function isProblematicIfStatement(node) { + var current; + + if (node.type !== 'IfStatement') { + return false; + } + if (node.alternate == null) { + return false; + } + current = node.consequent; + do { + if (current.type === 'IfStatement') { + if (current.alternate == null) { + return true; + } + } + current = trailingStatement(current); + } while (current); + + return false; + } + + module.exports = { + isExpression: isExpression, + isStatement: isStatement, + isIterationStatement: isIterationStatement, + isSourceElement: isSourceElement, + isProblematicIfStatement: isProblematicIfStatement, + + trailingStatement: trailingStatement + }; +}()); +/* vim: set sw=4 ts=4 et tw=80 : */ diff --git a/node_modules/esutils/lib/code.js b/node_modules/esutils/lib/code.js new file mode 100644 index 0000000..2a7c19d --- /dev/null +++ b/node_modules/esutils/lib/code.js @@ -0,0 +1,135 @@ +/* + Copyright (C) 2013-2014 Yusuke Suzuki + Copyright (C) 2014 Ivan Nikulin + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +(function () { + 'use strict'; + + var ES6Regex, ES5Regex, NON_ASCII_WHITESPACES, IDENTIFIER_START, IDENTIFIER_PART, ch; + + // See `tools/generate-identifier-regex.js`. + ES5Regex = { + // ECMAScript 5.1/Unicode v7.0.0 NonAsciiIdentifierStart: + NonAsciiIdentifierStart: /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0-\u08B2\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191E\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA69D\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA7AD\uA7B0\uA7B1\uA7F7-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uA9E0-\uA9E4\uA9E6-\uA9EF\uA9FA-\uA9FE\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA7E-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB5F\uAB64\uAB65\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]/, + // ECMAScript 5.1/Unicode v7.0.0 NonAsciiIdentifierPart: + NonAsciiIdentifierPart: /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0-\u08B2\u08E4-\u0963\u0966-\u096F\u0971-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C00-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58\u0C59\u0C60-\u0C63\u0C66-\u0C6F\u0C81-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D01-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D60-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DE6-\u0DEF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191E\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19D9\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1AB0-\u1ABD\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1CF8\u1CF9\u1D00-\u1DF5\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u2E2F\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099\u309A\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA69D\uA69F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA7AD\uA7B0\uA7B1\uA7F7-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uA9E0-\uA9FE\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB5F\uAB64\uAB65\uABC0-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE2D\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]/ + }; + + ES6Regex = { + // ECMAScript 6/Unicode v7.0.0 NonAsciiIdentifierStart: + NonAsciiIdentifierStart: /[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0-\u08B2\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191E\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309B-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA69D\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA7AD\uA7B0\uA7B1\uA7F7-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uA9E0-\uA9E4\uA9E6-\uA9EF\uA9FA-\uA9FE\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA7E-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB5F\uAB64\uAB65\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD40-\uDD74\uDE80-\uDE9C\uDEA0-\uDED0\uDF00-\uDF1F\uDF30-\uDF4A\uDF50-\uDF75\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDD00-\uDD27\uDD30-\uDD63\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC60-\uDC76\uDC80-\uDC9E\uDD00-\uDD15\uDD20-\uDD39\uDD80-\uDDB7\uDDBE\uDDBF\uDE00\uDE10-\uDE13\uDE15-\uDE17\uDE19-\uDE33\uDE60-\uDE7C\uDE80-\uDE9C\uDEC0-\uDEC7\uDEC9-\uDEE4\uDF00-\uDF35\uDF40-\uDF55\uDF60-\uDF72\uDF80-\uDF91]|\uD803[\uDC00-\uDC48]|\uD804[\uDC03-\uDC37\uDC83-\uDCAF\uDCD0-\uDCE8\uDD03-\uDD26\uDD50-\uDD72\uDD76\uDD83-\uDDB2\uDDC1-\uDDC4\uDDDA\uDE00-\uDE11\uDE13-\uDE2B\uDEB0-\uDEDE\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3D\uDF5D-\uDF61]|\uD805[\uDC80-\uDCAF\uDCC4\uDCC5\uDCC7\uDD80-\uDDAE\uDE00-\uDE2F\uDE44\uDE80-\uDEAA]|\uD806[\uDCA0-\uDCDF\uDCFF\uDEC0-\uDEF8]|\uD808[\uDC00-\uDF98]|\uD809[\uDC00-\uDC6E]|[\uD80C\uD840-\uD868\uD86A-\uD86C][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2E]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDED0-\uDEED\uDF00-\uDF2F\uDF40-\uDF43\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDF00-\uDF44\uDF50\uDF93-\uDF9F]|\uD82C[\uDC00\uDC01]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB]|\uD83A[\uDC00-\uDCC4]|\uD83B[\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD869[\uDC00-\uDED6\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF34\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D]|\uD87E[\uDC00-\uDE1D]/, + // ECMAScript 6/Unicode v7.0.0 NonAsciiIdentifierPart: + NonAsciiIdentifierPart: /[\xAA\xB5\xB7\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0-\u08B2\u08E4-\u0963\u0966-\u096F\u0971-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C00-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58\u0C59\u0C60-\u0C63\u0C66-\u0C6F\u0C81-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D01-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D60-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DE6-\u0DEF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1369-\u1371\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191E\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19DA\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1AB0-\u1ABD\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1CF8\u1CF9\u1D00-\u1DF5\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA69D\uA69F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA7AD\uA7B0\uA7B1\uA7F7-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uA9E0-\uA9FE\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB5F\uAB64\uAB65\uABC0-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE2D\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD40-\uDD74\uDDFD\uDE80-\uDE9C\uDEA0-\uDED0\uDEE0\uDF00-\uDF1F\uDF30-\uDF4A\uDF50-\uDF7A\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDCA0-\uDCA9\uDD00-\uDD27\uDD30-\uDD63\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC60-\uDC76\uDC80-\uDC9E\uDD00-\uDD15\uDD20-\uDD39\uDD80-\uDDB7\uDDBE\uDDBF\uDE00-\uDE03\uDE05\uDE06\uDE0C-\uDE13\uDE15-\uDE17\uDE19-\uDE33\uDE38-\uDE3A\uDE3F\uDE60-\uDE7C\uDE80-\uDE9C\uDEC0-\uDEC7\uDEC9-\uDEE6\uDF00-\uDF35\uDF40-\uDF55\uDF60-\uDF72\uDF80-\uDF91]|\uD803[\uDC00-\uDC48]|\uD804[\uDC00-\uDC46\uDC66-\uDC6F\uDC7F-\uDCBA\uDCD0-\uDCE8\uDCF0-\uDCF9\uDD00-\uDD34\uDD36-\uDD3F\uDD50-\uDD73\uDD76\uDD80-\uDDC4\uDDD0-\uDDDA\uDE00-\uDE11\uDE13-\uDE37\uDEB0-\uDEEA\uDEF0-\uDEF9\uDF01-\uDF03\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3C-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF5D-\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDC80-\uDCC5\uDCC7\uDCD0-\uDCD9\uDD80-\uDDB5\uDDB8-\uDDC0\uDE00-\uDE40\uDE44\uDE50-\uDE59\uDE80-\uDEB7\uDEC0-\uDEC9]|\uD806[\uDCA0-\uDCE9\uDCFF\uDEC0-\uDEF8]|\uD808[\uDC00-\uDF98]|\uD809[\uDC00-\uDC6E]|[\uD80C\uD840-\uD868\uD86A-\uD86C][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2E]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDE60-\uDE69\uDED0-\uDEED\uDEF0-\uDEF4\uDF00-\uDF36\uDF40-\uDF43\uDF50-\uDF59\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDF00-\uDF44\uDF50-\uDF7E\uDF8F-\uDF9F]|\uD82C[\uDC00\uDC01]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB\uDFCE-\uDFFF]|\uD83A[\uDC00-\uDCC4\uDCD0-\uDCD6]|\uD83B[\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD869[\uDC00-\uDED6\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF34\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D]|\uD87E[\uDC00-\uDE1D]|\uDB40[\uDD00-\uDDEF]/ + }; + + function isDecimalDigit(ch) { + return 0x30 <= ch && ch <= 0x39; // 0..9 + } + + function isHexDigit(ch) { + return 0x30 <= ch && ch <= 0x39 || // 0..9 + 0x61 <= ch && ch <= 0x66 || // a..f + 0x41 <= ch && ch <= 0x46; // A..F + } + + function isOctalDigit(ch) { + return ch >= 0x30 && ch <= 0x37; // 0..7 + } + + // 7.2 White Space + + NON_ASCII_WHITESPACES = [ + 0x1680, 0x180E, + 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A, + 0x202F, 0x205F, + 0x3000, + 0xFEFF + ]; + + function isWhiteSpace(ch) { + return ch === 0x20 || ch === 0x09 || ch === 0x0B || ch === 0x0C || ch === 0xA0 || + ch >= 0x1680 && NON_ASCII_WHITESPACES.indexOf(ch) >= 0; + } + + // 7.3 Line Terminators + + function isLineTerminator(ch) { + return ch === 0x0A || ch === 0x0D || ch === 0x2028 || ch === 0x2029; + } + + // 7.6 Identifier Names and Identifiers + + function fromCodePoint(cp) { + if (cp <= 0xFFFF) { return String.fromCharCode(cp); } + var cu1 = String.fromCharCode(Math.floor((cp - 0x10000) / 0x400) + 0xD800); + var cu2 = String.fromCharCode(((cp - 0x10000) % 0x400) + 0xDC00); + return cu1 + cu2; + } + + IDENTIFIER_START = new Array(0x80); + for(ch = 0; ch < 0x80; ++ch) { + IDENTIFIER_START[ch] = + ch >= 0x61 && ch <= 0x7A || // a..z + ch >= 0x41 && ch <= 0x5A || // A..Z + ch === 0x24 || ch === 0x5F; // $ (dollar) and _ (underscore) + } + + IDENTIFIER_PART = new Array(0x80); + for(ch = 0; ch < 0x80; ++ch) { + IDENTIFIER_PART[ch] = + ch >= 0x61 && ch <= 0x7A || // a..z + ch >= 0x41 && ch <= 0x5A || // A..Z + ch >= 0x30 && ch <= 0x39 || // 0..9 + ch === 0x24 || ch === 0x5F; // $ (dollar) and _ (underscore) + } + + function isIdentifierStartES5(ch) { + return ch < 0x80 ? IDENTIFIER_START[ch] : ES5Regex.NonAsciiIdentifierStart.test(fromCodePoint(ch)); + } + + function isIdentifierPartES5(ch) { + return ch < 0x80 ? IDENTIFIER_PART[ch] : ES5Regex.NonAsciiIdentifierPart.test(fromCodePoint(ch)); + } + + function isIdentifierStartES6(ch) { + return ch < 0x80 ? IDENTIFIER_START[ch] : ES6Regex.NonAsciiIdentifierStart.test(fromCodePoint(ch)); + } + + function isIdentifierPartES6(ch) { + return ch < 0x80 ? IDENTIFIER_PART[ch] : ES6Regex.NonAsciiIdentifierPart.test(fromCodePoint(ch)); + } + + module.exports = { + isDecimalDigit: isDecimalDigit, + isHexDigit: isHexDigit, + isOctalDigit: isOctalDigit, + isWhiteSpace: isWhiteSpace, + isLineTerminator: isLineTerminator, + isIdentifierStartES5: isIdentifierStartES5, + isIdentifierPartES5: isIdentifierPartES5, + isIdentifierStartES6: isIdentifierStartES6, + isIdentifierPartES6: isIdentifierPartES6 + }; +}()); +/* vim: set sw=4 ts=4 et tw=80 : */ diff --git a/node_modules/esutils/lib/keyword.js b/node_modules/esutils/lib/keyword.js new file mode 100644 index 0000000..13c8c6a --- /dev/null +++ b/node_modules/esutils/lib/keyword.js @@ -0,0 +1,165 @@ +/* + Copyright (C) 2013 Yusuke Suzuki + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +(function () { + 'use strict'; + + var code = require('./code'); + + function isStrictModeReservedWordES6(id) { + switch (id) { + case 'implements': + case 'interface': + case 'package': + case 'private': + case 'protected': + case 'public': + case 'static': + case 'let': + return true; + default: + return false; + } + } + + function isKeywordES5(id, strict) { + // yield should not be treated as keyword under non-strict mode. + if (!strict && id === 'yield') { + return false; + } + return isKeywordES6(id, strict); + } + + function isKeywordES6(id, strict) { + if (strict && isStrictModeReservedWordES6(id)) { + return true; + } + + switch (id.length) { + case 2: + return (id === 'if') || (id === 'in') || (id === 'do'); + case 3: + return (id === 'var') || (id === 'for') || (id === 'new') || (id === 'try'); + case 4: + return (id === 'this') || (id === 'else') || (id === 'case') || + (id === 'void') || (id === 'with') || (id === 'enum'); + case 5: + return (id === 'while') || (id === 'break') || (id === 'catch') || + (id === 'throw') || (id === 'const') || (id === 'yield') || + (id === 'class') || (id === 'super'); + case 6: + return (id === 'return') || (id === 'typeof') || (id === 'delete') || + (id === 'switch') || (id === 'export') || (id === 'import'); + case 7: + return (id === 'default') || (id === 'finally') || (id === 'extends'); + case 8: + return (id === 'function') || (id === 'continue') || (id === 'debugger'); + case 10: + return (id === 'instanceof'); + default: + return false; + } + } + + function isReservedWordES5(id, strict) { + return id === 'null' || id === 'true' || id === 'false' || isKeywordES5(id, strict); + } + + function isReservedWordES6(id, strict) { + return id === 'null' || id === 'true' || id === 'false' || isKeywordES6(id, strict); + } + + function isRestrictedWord(id) { + return id === 'eval' || id === 'arguments'; + } + + function isIdentifierNameES5(id) { + var i, iz, ch; + + if (id.length === 0) { return false; } + + ch = id.charCodeAt(0); + if (!code.isIdentifierStartES5(ch)) { + return false; + } + + for (i = 1, iz = id.length; i < iz; ++i) { + ch = id.charCodeAt(i); + if (!code.isIdentifierPartES5(ch)) { + return false; + } + } + return true; + } + + function decodeUtf16(lead, trail) { + return (lead - 0xD800) * 0x400 + (trail - 0xDC00) + 0x10000; + } + + function isIdentifierNameES6(id) { + var i, iz, ch, lowCh, check; + + if (id.length === 0) { return false; } + + check = code.isIdentifierStartES6; + for (i = 0, iz = id.length; i < iz; ++i) { + ch = id.charCodeAt(i); + if (0xD800 <= ch && ch <= 0xDBFF) { + ++i; + if (i >= iz) { return false; } + lowCh = id.charCodeAt(i); + if (!(0xDC00 <= lowCh && lowCh <= 0xDFFF)) { + return false; + } + ch = decodeUtf16(ch, lowCh); + } + if (!check(ch)) { + return false; + } + check = code.isIdentifierPartES6; + } + return true; + } + + function isIdentifierES5(id, strict) { + return isIdentifierNameES5(id) && !isReservedWordES5(id, strict); + } + + function isIdentifierES6(id, strict) { + return isIdentifierNameES6(id) && !isReservedWordES6(id, strict); + } + + module.exports = { + isKeywordES5: isKeywordES5, + isKeywordES6: isKeywordES6, + isReservedWordES5: isReservedWordES5, + isReservedWordES6: isReservedWordES6, + isRestrictedWord: isRestrictedWord, + isIdentifierNameES5: isIdentifierNameES5, + isIdentifierNameES6: isIdentifierNameES6, + isIdentifierES5: isIdentifierES5, + isIdentifierES6: isIdentifierES6 + }; +}()); +/* vim: set sw=4 ts=4 et tw=80 : */ diff --git a/node_modules/esutils/lib/utils.js b/node_modules/esutils/lib/utils.js new file mode 100644 index 0000000..ce18faa --- /dev/null +++ b/node_modules/esutils/lib/utils.js @@ -0,0 +1,33 @@ +/* + Copyright (C) 2013 Yusuke Suzuki + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY + DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + + +(function () { + 'use strict'; + + exports.ast = require('./ast'); + exports.code = require('./code'); + exports.keyword = require('./keyword'); +}()); +/* vim: set sw=4 ts=4 et tw=80 : */ diff --git a/node_modules/esutils/package.json b/node_modules/esutils/package.json new file mode 100644 index 0000000..ddce20b --- /dev/null +++ b/node_modules/esutils/package.json @@ -0,0 +1,49 @@ +{ + "name": "esutils", + "description": "utility box for ECMAScript language tools", + "homepage": "https://github.com/estools/esutils", + "main": "lib/utils.js", + "version": "2.0.2", + "engines": { + "node": ">=0.10.0" + }, + "directories": { + "lib": "./lib" + }, + "files": [ + "LICENSE.BSD", + "README.md", + "lib" + ], + "maintainers": [ + { + "name": "Yusuke Suzuki", + "email": "utatane.tea@gmail.com", + "web": "http://github.com/Constellation" + } + ], + "repository": { + "type": "git", + "url": "http://github.com/estools/esutils.git" + }, + "devDependencies": { + "chai": "~1.7.2", + "coffee-script": "~1.6.3", + "jshint": "2.6.3", + "mocha": "~2.2.1", + "regenerate": "~1.2.1", + "unicode-7.0.0": "^0.1.5" + }, + "licenses": [ + { + "type": "BSD", + "url": "http://github.com/estools/esutils/raw/master/LICENSE.BSD" + } + ], + "scripts": { + "test": "npm run-script lint && npm run-script unit-test", + "lint": "jshint lib/*.js", + "unit-test": "mocha --compilers coffee:coffee-script -R spec", + "generate-regex": "node tools/generate-identifier-regex.js" + } +} diff --git a/node_modules/etag/HISTORY.md b/node_modules/etag/HISTORY.md new file mode 100644 index 0000000..bd0f26d --- /dev/null +++ b/node_modules/etag/HISTORY.md @@ -0,0 +1,71 @@ +1.7.0 / 2015-06-08 +================== + + * Always include entity length in ETags for hash length extensions + * Generate non-Stats ETags using MD5 only (no longer CRC32) + * Improve stat performance by removing hashing + * Remove base64 padding in ETags to shorten + * Use MD5 instead of MD4 in weak ETags over 1KB + +1.6.0 / 2015-05-10 +================== + + * Improve support for JXcore + * Remove requirement of `atime` in the stats object + * Support "fake" stats objects in environments without `fs` + +1.5.1 / 2014-11-19 +================== + + * deps: crc@3.2.1 + - Minor fixes + +1.5.0 / 2014-10-14 +================== + + * Improve string performance + * Slightly improve speed for weak ETags over 1KB + +1.4.0 / 2014-09-21 +================== + + * Support "fake" stats objects + * Support Node.js 0.6 + +1.3.1 / 2014-09-14 +================== + + * Use the (new and improved) `crc` for crc32 + +1.3.0 / 2014-08-29 +================== + + * Default strings to strong ETags + * Improve speed for weak ETags over 1KB + +1.2.1 / 2014-08-29 +================== + + * Use the (much faster) `buffer-crc32` for crc32 + +1.2.0 / 2014-08-24 +================== + + * Add support for file stat objects + +1.1.0 / 2014-08-24 +================== + + * Add fast-path for empty entity + * Add weak ETag generation + * Shrink size of generated ETags + +1.0.1 / 2014-08-24 +================== + + * Fix behavior of string containing Unicode + +1.0.0 / 2014-05-18 +================== + + * Initial release diff --git a/node_modules/etag/LICENSE b/node_modules/etag/LICENSE new file mode 100644 index 0000000..142ede3 --- /dev/null +++ b/node_modules/etag/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/etag/README.md b/node_modules/etag/README.md new file mode 100644 index 0000000..8da9e05 --- /dev/null +++ b/node_modules/etag/README.md @@ -0,0 +1,165 @@ +# etag + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Create simple ETags + +## Installation + +```sh +$ npm install etag +``` + +## API + +```js +var etag = require('etag') +``` + +### etag(entity, [options]) + +Generate a strong ETag for the given entity. This should be the complete +body of the entity. Strings, `Buffer`s, and `fs.Stats` are accepted. By +default, a strong ETag is generated except for `fs.Stats`, which will +generate a weak ETag (this can be overwritten by `options.weak`). + +```js +res.setHeader('ETag', etag(body)) +``` + +#### Options + +`etag` accepts these properties in the options object. + +##### weak + +Specifies if the generated ETag will include the weak validator mark (that +is, the leading `W/`). The actual entity tag is the same. The default value +is `false`, unless the `entity` is `fs.Stats`, in which case it is `true`. + +## Testing + +```sh +$ npm test +``` + +## Benchmark + +```bash +$ npm run-script bench + +> etag@1.6.0 bench nodejs-etag +> node benchmark/index.js + + http_parser@1.0 + node@0.10.33 + v8@3.14.5.9 + ares@1.9.0-DEV + uv@0.10.29 + zlib@1.2.3 + modules@11 + openssl@1.0.1j + +> node benchmark/body0-100b.js + + 100B body + + 1 test completed. + 2 tests completed. + 3 tests completed. + 4 tests completed. + +* buffer - strong x 289,198 ops/sec ±1.09% (190 runs sampled) +* buffer - weak x 287,838 ops/sec ±0.91% (189 runs sampled) +* string - strong x 284,586 ops/sec ±1.05% (192 runs sampled) +* string - weak x 287,439 ops/sec ±0.82% (192 runs sampled) + +> node benchmark/body1-1kb.js + + 1KB body + + 1 test completed. + 2 tests completed. + 3 tests completed. + 4 tests completed. + +* buffer - strong x 212,423 ops/sec ±0.75% (193 runs sampled) +* buffer - weak x 211,871 ops/sec ±0.74% (194 runs sampled) + string - strong x 205,291 ops/sec ±0.86% (194 runs sampled) + string - weak x 208,463 ops/sec ±0.79% (192 runs sampled) + +> node benchmark/body2-5kb.js + + 5KB body + + 1 test completed. + 2 tests completed. + 3 tests completed. + 4 tests completed. + +* buffer - strong x 92,901 ops/sec ±0.58% (195 runs sampled) +* buffer - weak x 93,045 ops/sec ±0.65% (192 runs sampled) + string - strong x 89,621 ops/sec ±0.68% (194 runs sampled) + string - weak x 90,070 ops/sec ±0.70% (196 runs sampled) + +> node benchmark/body3-10kb.js + + 10KB body + + 1 test completed. + 2 tests completed. + 3 tests completed. + 4 tests completed. + +* buffer - strong x 54,220 ops/sec ±0.85% (192 runs sampled) +* buffer - weak x 54,069 ops/sec ±0.83% (191 runs sampled) + string - strong x 53,078 ops/sec ±0.53% (194 runs sampled) + string - weak x 53,849 ops/sec ±0.47% (197 runs sampled) + +> node benchmark/body4-100kb.js + + 100KB body + + 1 test completed. + 2 tests completed. + 3 tests completed. + 4 tests completed. + +* buffer - strong x 6,673 ops/sec ±0.15% (197 runs sampled) +* buffer - weak x 6,716 ops/sec ±0.12% (198 runs sampled) + string - strong x 6,357 ops/sec ±0.14% (197 runs sampled) + string - weak x 6,344 ops/sec ±0.21% (197 runs sampled) + +> node benchmark/stats.js + + stats + + 1 test completed. + 2 tests completed. + 3 tests completed. + 4 tests completed. + +* real - strong x 1,671,989 ops/sec ±0.13% (197 runs sampled) +* real - weak x 1,681,297 ops/sec ±0.12% (198 runs sampled) + fake - strong x 927,063 ops/sec ±0.14% (198 runs sampled) + fake - weak x 914,461 ops/sec ±0.41% (191 runs sampled) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/etag.svg +[npm-url]: https://npmjs.org/package/etag +[node-version-image]: https://img.shields.io/node/v/etag.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/etag/master.svg +[travis-url]: https://travis-ci.org/jshttp/etag +[coveralls-image]: https://img.shields.io/coveralls/jshttp/etag/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/etag?branch=master +[downloads-image]: https://img.shields.io/npm/dm/etag.svg +[downloads-url]: https://npmjs.org/package/etag diff --git a/node_modules/etag/index.js b/node_modules/etag/index.js new file mode 100644 index 0000000..b582c84 --- /dev/null +++ b/node_modules/etag/index.js @@ -0,0 +1,132 @@ +/*! + * etag + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = etag + +/** + * Module dependencies. + * @private + */ + +var crypto = require('crypto') +var Stats = require('fs').Stats + +/** + * Module variables. + * @private + */ + +var base64PadCharRegExp = /=+$/ +var toString = Object.prototype.toString + +/** + * Generate an entity tag. + * + * @param {Buffer|string} entity + * @return {string} + * @private + */ + +function entitytag(entity) { + if (entity.length === 0) { + // fast-path empty + return '"0-1B2M2Y8AsgTpgAmY7PhCfg"' + } + + // compute hash of entity + var hash = crypto + .createHash('md5') + .update(entity, 'utf8') + .digest('base64') + .replace(base64PadCharRegExp, '') + + // compute length of entity + var len = typeof entity === 'string' + ? Buffer.byteLength(entity, 'utf8') + : entity.length + + return '"' + len.toString(16) + '-' + hash + '"' +} + +/** + * Create a simple ETag. + * + * @param {string|Buffer|Stats} entity + * @param {object} [options] + * @param {boolean} [options.weak] + * @return {String} + * @public + */ + +function etag(entity, options) { + if (entity == null) { + throw new TypeError('argument entity is required') + } + + // support fs.Stats object + var isStats = isstats(entity) + var weak = options && typeof options.weak === 'boolean' + ? options.weak + : isStats + + // validate argument + if (!isStats && typeof entity !== 'string' && !Buffer.isBuffer(entity)) { + throw new TypeError('argument entity must be string, Buffer, or fs.Stats') + } + + // generate entity tag + var tag = isStats + ? stattag(entity) + : entitytag(entity) + + return weak + ? 'W/' + tag + : tag +} + +/** + * Determine if object is a Stats object. + * + * @param {object} obj + * @return {boolean} + * @api private + */ + +function isstats(obj) { + // genuine fs.Stats + if (typeof Stats === 'function' && obj instanceof Stats) { + return true + } + + // quack quack + return obj && typeof obj === 'object' + && 'ctime' in obj && toString.call(obj.ctime) === '[object Date]' + && 'mtime' in obj && toString.call(obj.mtime) === '[object Date]' + && 'ino' in obj && typeof obj.ino === 'number' + && 'size' in obj && typeof obj.size === 'number' +} + +/** + * Generate a tag for a stat. + * + * @param {object} stat + * @return {string} + * @private + */ + +function stattag(stat) { + var mtime = stat.mtime.getTime().toString(16) + var size = stat.size.toString(16) + + return '"' + size + '-' + mtime + '"' +} diff --git a/node_modules/etag/package.json b/node_modules/etag/package.json new file mode 100644 index 0000000..50bf248 --- /dev/null +++ b/node_modules/etag/package.json @@ -0,0 +1,38 @@ +{ + "name": "etag", + "description": "Create simple ETags", + "version": "1.7.0", + "contributors": [ + "Douglas Christopher Wilson ", + "David Björklund " + ], + "license": "MIT", + "keywords": [ + "etag", + "http", + "res" + ], + "repository": "jshttp/etag", + "devDependencies": { + "benchmark": "1.0.0", + "beautify-benchmark": "0.2.4", + "istanbul": "0.3.14", + "mocha": "~1.21.4", + "seedrandom": "2.3.11" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "bench": "node benchmark/index.js", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/events/.npmignore b/node_modules/events/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/events/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/events/.travis.yml b/node_modules/events/.travis.yml new file mode 100644 index 0000000..002b042 --- /dev/null +++ b/node_modules/events/.travis.yml @@ -0,0 +1,8 @@ +sudo: false +language: node_js +node_js: +- '0.10' +env: + global: + - secure: XcBiD8yReflut9q7leKsigDZ0mI3qTKH+QrNVY8DaqlomJOZw8aOrVuX9Jz12l86ZJ41nbxmKnRNkFzcVr9mbP9YaeTb3DpeOBWmvaoSfud9Wnc16VfXtc1FCcwDhSVcSiM3UtnrmFU5cH+Dw1LPh5PbfylYOS/nJxUvG0FFLqI= + - secure: jNWtEbqhUdQ0xXDHvCYfUbKYeJCi6a7B4LsrcxYCyWWn4NIgncE5x2YbB+FSUUFVYfz0dsn5RKP1oHB99f0laUEo18HBNkrAS/rtyOdVzcpJjbQ6kgSILGjnJD/Ty1B57Rcz3iyev5Y7bLZ6Y1FbDnk/i9/l0faOGz8vTC3Vdkc= diff --git a/node_modules/events/.zuul.yml b/node_modules/events/.zuul.yml new file mode 100644 index 0000000..a8e35af --- /dev/null +++ b/node_modules/events/.zuul.yml @@ -0,0 +1,12 @@ +ui: mocha-qunit +browsers: + - name: chrome + version: latest + - name: firefox + version: latest + - name: safari + version: 5..latest + - name: iphone + version: latest + - name: ie + version: 8..latest diff --git a/node_modules/events/History.md b/node_modules/events/History.md new file mode 100644 index 0000000..b345858 --- /dev/null +++ b/node_modules/events/History.md @@ -0,0 +1,42 @@ +# 1.1.1 (2016-06-22) + + - add more context to errors if they are not instanceof Error + +# 1.1.0 (2015-09-29) + + - add Emitter#listerCount (to match node v4 api) + +# 1.0.2 (2014-08-28) + + - remove un-reachable code + - update devDeps + +## 1.0.1 / 2014-05-11 + + - check for console.trace before using it + +## 1.0.0 / 2013-12-10 + + - Update to latest events code from node.js 0.10 + - copy tests from node.js + +## 0.4.0 / 2011-07-03 ## + + - Switching to graphquire@0.8.0 + +## 0.3.0 / 2011-07-03 ## + + - Switching to URL based module require. + +## 0.2.0 / 2011-06-10 ## + + - Simplified package structure. + - Graphquire for dependency management. + +## 0.1.1 / 2011-05-16 ## + + - Unhandled errors are logged via console.error + +## 0.1.0 / 2011-04-22 ## + + - Initial release diff --git a/node_modules/events/LICENSE b/node_modules/events/LICENSE new file mode 100644 index 0000000..52ed3b0 --- /dev/null +++ b/node_modules/events/LICENSE @@ -0,0 +1,22 @@ +MIT + +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/events/Readme.md b/node_modules/events/Readme.md new file mode 100644 index 0000000..02694ef --- /dev/null +++ b/node_modules/events/Readme.md @@ -0,0 +1,19 @@ +# events [![Build Status](https://travis-ci.org/Gozala/events.png?branch=master)](https://travis-ci.org/Gozala/events) + +Node's event emitter for all engines. + +## Install ## + +``` +npm install events +``` + +## Require ## + +```javascript +var EventEmitter = require('events').EventEmitter +``` + +## Usage ## + +See the [node.js event emitter docs](http://nodejs.org/api/events.html) diff --git a/node_modules/events/events.js b/node_modules/events/events.js new file mode 100644 index 0000000..1619a62 --- /dev/null +++ b/node_modules/events/events.js @@ -0,0 +1,302 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +function EventEmitter() { + this._events = this._events || {}; + this._maxListeners = this._maxListeners || undefined; +} +module.exports = EventEmitter; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +EventEmitter.defaultMaxListeners = 10; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function(n) { + if (!isNumber(n) || n < 0 || isNaN(n)) + throw TypeError('n must be a positive number'); + this._maxListeners = n; + return this; +}; + +EventEmitter.prototype.emit = function(type) { + var er, handler, len, args, i, listeners; + + if (!this._events) + this._events = {}; + + // If there is no 'error' event listener then throw. + if (type === 'error') { + if (!this._events.error || + (isObject(this._events.error) && !this._events.error.length)) { + er = arguments[1]; + if (er instanceof Error) { + throw er; // Unhandled 'error' event + } else { + // At least give some kind of context to the user + var err = new Error('Uncaught, unspecified "error" event. (' + er + ')'); + err.context = er; + throw err; + } + } + } + + handler = this._events[type]; + + if (isUndefined(handler)) + return false; + + if (isFunction(handler)) { + switch (arguments.length) { + // fast cases + case 1: + handler.call(this); + break; + case 2: + handler.call(this, arguments[1]); + break; + case 3: + handler.call(this, arguments[1], arguments[2]); + break; + // slower + default: + args = Array.prototype.slice.call(arguments, 1); + handler.apply(this, args); + } + } else if (isObject(handler)) { + args = Array.prototype.slice.call(arguments, 1); + listeners = handler.slice(); + len = listeners.length; + for (i = 0; i < len; i++) + listeners[i].apply(this, args); + } + + return true; +}; + +EventEmitter.prototype.addListener = function(type, listener) { + var m; + + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + if (!this._events) + this._events = {}; + + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (this._events.newListener) + this.emit('newListener', type, + isFunction(listener.listener) ? + listener.listener : listener); + + if (!this._events[type]) + // Optimize the case of one listener. Don't need the extra array object. + this._events[type] = listener; + else if (isObject(this._events[type])) + // If we've already got an array, just append. + this._events[type].push(listener); + else + // Adding the second element, need to change to array. + this._events[type] = [this._events[type], listener]; + + // Check for listener leak + if (isObject(this._events[type]) && !this._events[type].warned) { + if (!isUndefined(this._maxListeners)) { + m = this._maxListeners; + } else { + m = EventEmitter.defaultMaxListeners; + } + + if (m && m > 0 && this._events[type].length > m) { + this._events[type].warned = true; + console.error('(node) warning: possible EventEmitter memory ' + + 'leak detected. %d listeners added. ' + + 'Use emitter.setMaxListeners() to increase limit.', + this._events[type].length); + if (typeof console.trace === 'function') { + // not supported in IE 10 + console.trace(); + } + } + } + + return this; +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.once = function(type, listener) { + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + var fired = false; + + function g() { + this.removeListener(type, g); + + if (!fired) { + fired = true; + listener.apply(this, arguments); + } + } + + g.listener = listener; + this.on(type, g); + + return this; +}; + +// emits a 'removeListener' event iff the listener was removed +EventEmitter.prototype.removeListener = function(type, listener) { + var list, position, length, i; + + if (!isFunction(listener)) + throw TypeError('listener must be a function'); + + if (!this._events || !this._events[type]) + return this; + + list = this._events[type]; + length = list.length; + position = -1; + + if (list === listener || + (isFunction(list.listener) && list.listener === listener)) { + delete this._events[type]; + if (this._events.removeListener) + this.emit('removeListener', type, listener); + + } else if (isObject(list)) { + for (i = length; i-- > 0;) { + if (list[i] === listener || + (list[i].listener && list[i].listener === listener)) { + position = i; + break; + } + } + + if (position < 0) + return this; + + if (list.length === 1) { + list.length = 0; + delete this._events[type]; + } else { + list.splice(position, 1); + } + + if (this._events.removeListener) + this.emit('removeListener', type, listener); + } + + return this; +}; + +EventEmitter.prototype.removeAllListeners = function(type) { + var key, listeners; + + if (!this._events) + return this; + + // not listening for removeListener, no need to emit + if (!this._events.removeListener) { + if (arguments.length === 0) + this._events = {}; + else if (this._events[type]) + delete this._events[type]; + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + for (key in this._events) { + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = {}; + return this; + } + + listeners = this._events[type]; + + if (isFunction(listeners)) { + this.removeListener(type, listeners); + } else if (listeners) { + // LIFO order + while (listeners.length) + this.removeListener(type, listeners[listeners.length - 1]); + } + delete this._events[type]; + + return this; +}; + +EventEmitter.prototype.listeners = function(type) { + var ret; + if (!this._events || !this._events[type]) + ret = []; + else if (isFunction(this._events[type])) + ret = [this._events[type]]; + else + ret = this._events[type].slice(); + return ret; +}; + +EventEmitter.prototype.listenerCount = function(type) { + if (this._events) { + var evlistener = this._events[type]; + + if (isFunction(evlistener)) + return 1; + else if (evlistener) + return evlistener.length; + } + return 0; +}; + +EventEmitter.listenerCount = function(emitter, type) { + return emitter.listenerCount(type); +}; + +function isFunction(arg) { + return typeof arg === 'function'; +} + +function isNumber(arg) { + return typeof arg === 'number'; +} + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} + +function isUndefined(arg) { + return arg === void 0; +} diff --git a/node_modules/events/package.json b/node_modules/events/package.json new file mode 100644 index 0000000..5d7e797 --- /dev/null +++ b/node_modules/events/package.json @@ -0,0 +1,33 @@ +{ + "name": "events", + "id": "events", + "version": "1.1.1", + "description": "Node's event emitter for all engines.", + "keywords": [ + "events", + "eventEmitter", + "eventDispatcher", + "listeners" + ], + "author": "Irakli Gozalishvili (http://jeditoolkit.com)", + "repository": { + "type": "git", + "url": "git://github.com/Gozala/events.git", + "web": "https://github.com/Gozala/events" + }, + "bugs": { + "url": "http://github.com/Gozala/events/issues/" + }, + "main": "./events.js", + "engines": { + "node": ">=0.4.x" + }, + "devDependencies": { + "mocha": "~1.21.4", + "zuul": "~1.10.2" + }, + "scripts": { + "test": "mocha --ui qunit -- tests/index.js && zuul -- tests/index.js" + }, + "license": "MIT" +} \ No newline at end of file diff --git a/node_modules/events/tests/add-listeners.js b/node_modules/events/tests/add-listeners.js new file mode 100644 index 0000000..5ab874c --- /dev/null +++ b/node_modules/events/tests/add-listeners.js @@ -0,0 +1,63 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var assert = require('assert'); +var events = require('../'); + +var e = new events.EventEmitter(); + +var events_new_listener_emited = []; +var listeners_new_listener_emited = []; +var times_hello_emited = 0; + +// sanity check +assert.equal(e.addListener, e.on); + +e.on('newListener', function(event, listener) { + console.log('newListener: ' + event); + events_new_listener_emited.push(event); + listeners_new_listener_emited.push(listener); +}); + +function hello(a, b) { + console.log('hello'); + times_hello_emited += 1; + assert.equal('a', a); + assert.equal('b', b); +} +e.on('hello', hello); + +var foo = function() {}; +e.once('foo', foo); + +console.log('start'); + +e.emit('hello', 'a', 'b'); + + +// just make sure that this doesn't throw: +var f = new events.EventEmitter(); +f.setMaxListeners(0); + +assert.deepEqual(['hello', 'foo'], events_new_listener_emited); +assert.deepEqual([hello, foo], listeners_new_listener_emited); +assert.equal(1, times_hello_emited); + diff --git a/node_modules/events/tests/check-listener-leaks.js b/node_modules/events/tests/check-listener-leaks.js new file mode 100644 index 0000000..e07866a --- /dev/null +++ b/node_modules/events/tests/check-listener-leaks.js @@ -0,0 +1,86 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var assert = require('assert'); +var events = require('../'); + +var e = new events.EventEmitter(); + +// default +for (var i = 0; i < 10; i++) { + e.on('default', function() {}); +} +assert.ok(!e._events['default'].hasOwnProperty('warned')); +e.on('default', function() {}); +assert.ok(e._events['default'].warned); + +// specific +e.setMaxListeners(5); +for (var i = 0; i < 5; i++) { + e.on('specific', function() {}); +} +assert.ok(!e._events['specific'].hasOwnProperty('warned')); +e.on('specific', function() {}); +assert.ok(e._events['specific'].warned); + +// only one +e.setMaxListeners(1); +e.on('only one', function() {}); +assert.ok(!e._events['only one'].hasOwnProperty('warned')); +e.on('only one', function() {}); +assert.ok(e._events['only one'].hasOwnProperty('warned')); + +// unlimited +e.setMaxListeners(0); +for (var i = 0; i < 1000; i++) { + e.on('unlimited', function() {}); +} +assert.ok(!e._events['unlimited'].hasOwnProperty('warned')); + +// process-wide +events.EventEmitter.defaultMaxListeners = 42; +e = new events.EventEmitter(); + +for (var i = 0; i < 42; ++i) { + e.on('fortytwo', function() {}); +} +assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); +e.on('fortytwo', function() {}); +assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); +delete e._events['fortytwo'].warned; + +events.EventEmitter.defaultMaxListeners = 44; +e.on('fortytwo', function() {}); +assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); +e.on('fortytwo', function() {}); +assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); + +// but _maxListeners still has precedence over defaultMaxListeners +events.EventEmitter.defaultMaxListeners = 42; +e = new events.EventEmitter(); +e.setMaxListeners(1); +e.on('uno', function() {}); +assert.ok(!e._events['uno'].hasOwnProperty('warned')); +e.on('uno', function() {}); +assert.ok(e._events['uno'].hasOwnProperty('warned')); + +// chainable +assert.strictEqual(e, e.setMaxListeners(1)); diff --git a/node_modules/events/tests/common.js b/node_modules/events/tests/common.js new file mode 100644 index 0000000..66f70a3 --- /dev/null +++ b/node_modules/events/tests/common.js @@ -0,0 +1,42 @@ +var assert = require('assert'); + +var mustCallChecks = []; + +function runCallChecks() { + var failed_count = 0; + for (var i=0 ; i< mustCallChecks.length; ++i) { + var context = mustCallChecks[i]; + if (context.actual === context.expected) { + continue; + } + + failed_count++; + console.log('Mismatched %s function calls. Expected %d, actual %d.', + context.name, + context.expected, + context.actual); + console.log(context.stack.split('\n').slice(2).join('\n')); + } + + assert(failed_count === 0); +} + +after(runCallChecks); + +exports.mustCall = function(fn, expected) { + if (typeof expected !== 'number') expected = 1; + + var context = { + expected: expected, + actual: 0, + stack: (new Error).stack, + name: fn.name || '' + }; + + mustCallChecks.push(context); + + return function() { + context.actual++; + return fn.apply(this, arguments); + }; +}; diff --git a/node_modules/events/tests/index.js b/node_modules/events/tests/index.js new file mode 100644 index 0000000..f144530 --- /dev/null +++ b/node_modules/events/tests/index.js @@ -0,0 +1,25 @@ + +require('./legacy-compat'); + +// we do this to easily wrap each file in a mocha test +// and also have browserify be able to statically analyze this file +var orig_require = require; +var require = function(file) { + test(file, function() { + orig_require(file); + }); +} + +require('./add-listeners.js'); +require('./check-listener-leaks.js'); +require('./listener-count.js'); +require('./listeners-side-effects.js'); +require('./listeners.js'); +require('./max-listeners.js'); +require('./modify-in-emit.js'); +require('./num-args.js'); +require('./once.js'); +require('./set-max-listeners-side-effects.js'); +require('./subclass.js'); +require('./remove-all-listeners.js'); +require('./remove-listeners.js'); diff --git a/node_modules/events/tests/legacy-compat.js b/node_modules/events/tests/legacy-compat.js new file mode 100644 index 0000000..afbc0ab --- /dev/null +++ b/node_modules/events/tests/legacy-compat.js @@ -0,0 +1,18 @@ +// sigh... life is hard +if (!global.console) { + console = {} +} + +var fns = ['log', 'error', 'trace']; +for (var i=0 ; ifoo should not be emitted', '!'); +}; + +e.once('foo', remove); +e.removeListener('foo', remove); +e.emit('foo'); + +var times_recurse_emitted = 0; + +e.once('e', function() { + e.emit('e'); + times_recurse_emitted++; +}); + +e.once('e', function() { + times_recurse_emitted++; +}); + +e.emit('e'); + +assert.equal(1, times_hello_emited); +assert.equal(2, times_recurse_emitted); diff --git a/node_modules/events/tests/remove-all-listeners.js b/node_modules/events/tests/remove-all-listeners.js new file mode 100644 index 0000000..b3dc886 --- /dev/null +++ b/node_modules/events/tests/remove-all-listeners.js @@ -0,0 +1,80 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var events = require('../'); + +var after_checks = []; +after(function() { + for (var i=0 ; i Expand POSIX bracket expressions (character classes) in glob patterns. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install expand-brackets --save +``` + +## Usage + +```js +var brackets = require('expand-brackets'); + +brackets('[![:lower:]]'); +//=> '[^a-z]' +``` + +## .isMatch + +Return true if the given string matches the bracket expression: + +```js +brackets.isMatch('A', '[![:lower:]]'); +//=> true + +brackets.isMatch('a', '[![:lower:]]'); +//=> false +``` + +## .makeRe + +Make a regular expression from a bracket expression: + +```js +brackets.makeRe('[![:lower:]]'); +//=> /[^a-z]/ +``` + +The following named POSIX bracket expressions are supported: + +* `[:alnum:]`: Alphanumeric characters (`a-zA-Z0-9]`) +* `[:alpha:]`: Alphabetic characters (`a-zA-Z]`) +* `[:blank:]`: Space and tab (`[ t]`) +* `[:digit:]`: Digits (`[0-9]`) +* `[:lower:]`: Lowercase letters (`[a-z]`) +* `[:punct:]`: Punctuation and symbols. (`[!"#$%&'()*+, -./:;<=>?@ [\]^_``{|}~]`) +* `[:upper:]`: Uppercase letters (`[A-Z]`) +* `[:word:]`: Word characters (letters, numbers and underscores) (`[A-Za-z0-9_]`) +* `[:xdigit:]`: Hexadecimal digits (`[A-Fa-f0-9]`) + +Collating sequences are not supported. + +## Related projects + +You might also be interested in these projects: + +* [extglob](https://www.npmjs.com/package/extglob): Convert extended globs to regex-compatible strings. Add (almost) the expressive power of regular expressions to… [more](https://www.npmjs.com/package/extglob) | [homepage](https://github.com/jonschlinkert/extglob) +* [is-extglob](https://www.npmjs.com/package/is-extglob): Returns true if a string has an extglob. | [homepage](https://github.com/jonschlinkert/is-extglob) +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern.… [more](https://www.npmjs.com/package/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob) +* [is-posix-bracket](https://www.npmjs.com/package/is-posix-bracket): Returns true if the given string is a POSIX bracket expression (POSIX character class). | [homepage](https://github.com/jonschlinkert/is-posix-bracket) +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. Just… [more](https://www.npmjs.com/package/micromatch) | [homepage](https://github.com/jonschlinkert/micromatch) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/expand-brackets/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +verb © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/expand-brackets/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v, on April 01, 2016._ \ No newline at end of file diff --git a/node_modules/expand-brackets/index.js b/node_modules/expand-brackets/index.js new file mode 100644 index 0000000..b843cc2 --- /dev/null +++ b/node_modules/expand-brackets/index.js @@ -0,0 +1,163 @@ +/*! + * expand-brackets + * + * Copyright (c) 2015 Jon Schlinkert. + * Licensed under the MIT license. + */ + +'use strict'; + +var isPosixBracket = require('is-posix-bracket'); + +/** + * POSIX character classes + */ + +var POSIX = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E', + punct: '-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9', +}; + +/** + * Expose `brackets` + */ + +module.exports = brackets; + +function brackets(str) { + if (!isPosixBracket(str)) { + return str; + } + + var negated = false; + if (str.indexOf('[^') !== -1) { + negated = true; + str = str.split('[^').join('['); + } + if (str.indexOf('[!') !== -1) { + negated = true; + str = str.split('[!').join('['); + } + + var a = str.split('['); + var b = str.split(']'); + var imbalanced = a.length !== b.length; + + var parts = str.split(/(?::\]\[:|\[?\[:|:\]\]?)/); + var len = parts.length, i = 0; + var end = '', beg = ''; + var res = []; + + // start at the end (innermost) first + while (len--) { + var inner = parts[i++]; + if (inner === '^[!' || inner === '[!') { + inner = ''; + negated = true; + } + + var prefix = negated ? '^' : ''; + var ch = POSIX[inner]; + + if (ch) { + res.push('[' + prefix + ch + ']'); + } else if (inner) { + if (/^\[?\w-\w\]?$/.test(inner)) { + if (i === parts.length) { + res.push('[' + prefix + inner); + } else if (i === 1) { + res.push(prefix + inner + ']'); + } else { + res.push(prefix + inner); + } + } else { + if (i === 1) { + beg += inner; + } else if (i === parts.length) { + end += inner; + } else { + res.push('[' + prefix + inner + ']'); + } + } + } + } + + var result = res.join('|'); + var rlen = res.length || 1; + if (rlen > 1) { + result = '(?:' + result + ')'; + rlen = 1; + } + if (beg) { + rlen++; + if (beg.charAt(0) === '[') { + if (imbalanced) { + beg = '\\[' + beg.slice(1); + } else { + beg += ']'; + } + } + result = beg + result; + } + if (end) { + rlen++; + if (end.slice(-1) === ']') { + if (imbalanced) { + end = end.slice(0, end.length - 1) + '\\]'; + } else { + end = '[' + end; + } + } + result += end; + } + + if (rlen > 1) { + result = result.split('][').join(']|['); + if (result.indexOf('|') !== -1 && !/\(\?/.test(result)) { + result = '(?:' + result + ')'; + } + } + + result = result.replace(/\[+=|=\]+/g, '\\b'); + return result; +} + +brackets.makeRe = function(pattern) { + try { + return new RegExp(brackets(pattern)); + } catch (err) {} +}; + +brackets.isMatch = function(str, pattern) { + try { + return brackets.makeRe(pattern).test(str); + } catch (err) { + return false; + } +}; + +brackets.match = function(arr, pattern) { + var len = arr.length, i = 0; + var res = arr.slice(); + + var re = brackets.makeRe(pattern); + while (i < len) { + var ele = arr[i++]; + if (!re.test(ele)) { + continue; + } + res.splice(i, 1); + } + return res; +}; diff --git a/node_modules/expand-brackets/package.json b/node_modules/expand-brackets/package.json new file mode 100644 index 0000000..99dddaf --- /dev/null +++ b/node_modules/expand-brackets/package.json @@ -0,0 +1,62 @@ +{ + "name": "expand-brackets", + "description": "Expand POSIX bracket expressions (character classes) in glob patterns.", + "version": "0.1.5", + "homepage": "https://github.com/jonschlinkert/expand-brackets", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/expand-brackets", + "bugs": { + "url": "https://github.com/jonschlinkert/expand-brackets/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-posix-bracket": "^0.1.0" + }, + "devDependencies": { + "gulp-format-md": "^0.1.7", + "mocha": "^2.2.5", + "should": "^7.0.2" + }, + "keywords": [ + "bracket", + "character class", + "expression", + "posix" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "extglob", + "is-extglob", + "is-glob", + "is-posix-bracket", + "micromatch" + ] + }, + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/expand-range/LICENSE b/node_modules/expand-range/LICENSE new file mode 100644 index 0000000..b1e51ff --- /dev/null +++ b/node_modules/expand-range/LICENSE @@ -0,0 +1,24 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/expand-range/README.md b/node_modules/expand-range/README.md new file mode 100644 index 0000000..72c7e8c --- /dev/null +++ b/node_modules/expand-range/README.md @@ -0,0 +1,145 @@ +# expand-range [![NPM version](https://img.shields.io/npm/v/expand-range.svg?style=flat)](https://www.npmjs.com/package/expand-range) [![NPM downloads](https://img.shields.io/npm/dm/expand-range.svg?style=flat)](https://npmjs.org/package/expand-range) [![Build Status](https://img.shields.io/travis/jonschlinkert/expand-range.svg?style=flat)](https://travis-ci.org/jonschlinkert/expand-range) + +Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See the benchmarks. Used by micromatch. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install expand-range --save +``` + +Wraps [fill-range] to do range expansion using `..` separated strings. See [fill-range] for the full list of options and features. + +## Example usage + +```js +var expand = require('expand-range'); +``` + +**Params** + +```js +expand(start, stop, increment); +``` + +* `start`: the number or letter to start with +* `end`: the number or letter to end with +* `increment`: optionally pass the increment to use. works for letters or numbers + +**Examples** + +```js +expand('a..e') +//=> ['a', 'b', 'c', 'd', 'e'] + +expand('a..e..2') +//=> ['a', 'c', 'e'] + +expand('A..E..2') +//=> ['A', 'C', 'E'] + +expand('1..3') +//=> ['1', '2', '3'] + +expand('0..-5') +//=> [ '0', '-1', '-2', '-3', '-4', '-5' ] + +expand('-9..9..3') +//=> [ '-9', '-6', '-3', '0', '3', '6', '9' ]) + +expand('-1..-10..-2') +//=> [ '-1', '-3', '-5', '-7', '-9' ] + +expand('1..10..2') +//=> [ '1', '3', '5', '7', '9' ] +``` + +### Custom function + +Optionally pass a custom function as the second argument: + +```js +expand('a..e', function (val, isNumber, pad, i) { + if (!isNumber) { + return String.fromCharCode(val) + i; + } + return val; +}); +//=> ['a0', 'b1', 'c2', 'd3', 'e4'] +``` + +## Benchmarks + +```sh +# benchmark/fixtures/alpha-lower.js (29 bytes) + brace-expansion x 145,653 ops/sec ±0.89% (87 runs sampled) + expand-range x 453,213 ops/sec ±1.66% (85 runs sampled) + minimatch x 152,193 ops/sec ±1.17% (86 runs sampled) + +# benchmark/fixtures/alpha-upper.js (29 bytes) + brace-expansion x 149,975 ops/sec ±1.10% (88 runs sampled) + expand-range x 459,390 ops/sec ±1.27% (84 runs sampled) + minimatch x 155,253 ops/sec ±1.25% (88 runs sampled) + +# benchmark/fixtures/padded.js (33 bytes) + brace-expansion x 14,694 ops/sec ±1.37% (85 runs sampled) + expand-range x 169,393 ops/sec ±1.76% (80 runs sampled) + minimatch x 15,052 ops/sec ±1.15% (88 runs sampled) + +# benchmark/fixtures/range.js (29 bytes) + brace-expansion x 142,968 ops/sec ±1.35% (86 runs sampled) + expand-range x 465,579 ops/sec ±1.43% (86 runs sampled) + minimatch x 126,872 ops/sec ±1.18% (90 runs sampled) +``` + +## Related projects + +You might also be interested in these projects: + +* [braces](https://www.npmjs.com/package/braces): Fastest brace expansion for node.js, with the most complete support for the Bash 4.3 braces… [more](https://www.npmjs.com/package/braces) | [homepage](https://github.com/jonschlinkert/braces) +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or multiplier to… [more](https://www.npmjs.com/package/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range) +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/expand-range/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/expand-range/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v0.9.0, on May 05, 2016._ \ No newline at end of file diff --git a/node_modules/expand-range/index.js b/node_modules/expand-range/index.js new file mode 100644 index 0000000..369962a --- /dev/null +++ b/node_modules/expand-range/index.js @@ -0,0 +1,43 @@ +/*! + * expand-range + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT license. + */ + +'use strict'; + +var fill = require('fill-range'); + +module.exports = function expandRange(str, options, fn) { + if (typeof str !== 'string') { + throw new TypeError('expand-range expects a string.'); + } + + if (typeof options === 'function') { + fn = options; + options = {}; + } + + if (typeof options === 'boolean') { + options = {}; + options.makeRe = true; + } + + // create arguments to pass to fill-range + var opts = options || {}; + var args = str.split('..'); + var len = args.length; + if (len > 3) { return str; } + + // if only one argument, it can't expand so return it + if (len === 1) { return args; } + + // if `true`, tell fill-range to regexify the string + if (typeof fn === 'boolean' && fn === true) { + opts.makeRe = true; + } + + args.push(opts); + return fill.apply(null, args.concat(fn)); +}; diff --git a/node_modules/expand-range/package.json b/node_modules/expand-range/package.json new file mode 100644 index 0000000..01df494 --- /dev/null +++ b/node_modules/expand-range/package.json @@ -0,0 +1,73 @@ +{ + "name": "expand-range", + "description": "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See the benchmarks. Used by micromatch.", + "version": "1.8.2", + "homepage": "https://github.com/jonschlinkert/expand-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/expand-range", + "bugs": { + "url": "https://github.com/jonschlinkert/expand-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "fill-range": "^2.1.0" + }, + "devDependencies": { + "benchmarked": "^0.2.4", + "brace-expansion": "^1.1.4", + "glob": "^7.0.3", + "gulp-format-md": "^0.1.9", + "minimatch": "^3.0.0", + "mocha": "^2.4.5" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "expand", + "expansion", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "sh" + ], + "verb": { + "plugins": [ + "gulp-format-md" + ], + "reflinks": [ + "verb" + ], + "toc": false, + "layout": "default", + "lint": { + "reflinks": true + }, + "tasks": [ + "readme" + ], + "related": { + "list": [ + "micromatch", + "fill-range", + "braces" + ] + } + } +} diff --git a/node_modules/express/History.md b/node_modules/express/History.md new file mode 100644 index 0000000..40a5ed7 --- /dev/null +++ b/node_modules/express/History.md @@ -0,0 +1,3142 @@ +4.14.0 / 2016-06-16 +=================== + + * Add `acceptRanges` option to `res.sendFile`/`res.sendfile` + * Add `cacheControl` option to `res.sendFile`/`res.sendfile` + * Add `options` argument to `req.range` + - Includes the `combine` option + * Encode URL in `res.location`/`res.redirect` if not already encoded + * Fix some redirect handling in `res.sendFile`/`res.sendfile` + * Fix Windows absolute path check using forward slashes + * Improve error with invalid arguments to `req.get()` + * Improve performance for `res.json`/`res.jsonp` in most cases + * Improve `Range` header handling in `res.sendFile`/`res.sendfile` + * deps: accepts@~1.3.3 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Many performance improvments + - deps: mime-types@~2.1.11 + - deps: negotiator@0.6.1 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: cookie@0.3.1 + - Add `sameSite` option + - Fix cookie `Max-Age` to never be a floating point number + - Improve error message when `encode` is not a function + - Improve error message when `expires` is not a `Date` + - Throw better error for invalid argument to parse + - Throw on invalid values provided to `serialize` + - perf: enable strict mode + - perf: hoist regular expression + - perf: use for loop in parse + - perf: use string concatination for serialization + * deps: finalhandler@0.5.0 + - Change invalid or non-numeric status code to 500 + - Overwrite status message to match set status code + - Prefer `err.statusCode` if `err.status` is invalid + - Set response headers from `err.headers` object + - Use `statuses` instead of `http` module for status messages + * deps: proxy-addr@~1.1.2 + - Fix accepting various invalid netmasks + - Fix IPv6-mapped IPv4 validation edge cases + - IPv4 netmasks must be contingous + - IPv6 addresses cannot be used as a netmask + - deps: ipaddr.js@1.1.1 + * deps: qs@6.2.0 + - Add `decoder` option in `parse` function + * deps: range-parser@~1.2.0 + - Add `combine` option to combine overlapping ranges + - Fix incorrectly returning -1 when there is at least one valid range + - perf: remove internal function + * deps: send@0.14.1 + - Add `acceptRanges` option + - Add `cacheControl` option + - Attempt to combine multiple ranges into single range + - Correctly inherit from `Stream` class + - Fix `Content-Range` header in 416 responses when using `start`/`end` options + - Fix `Content-Range` header missing from default 416 responses + - Fix redirect error when `path` contains raw non-URL characters + - Fix redirect when `path` starts with multiple forward slashes + - Ignore non-byte `Range` headers + - deps: http-errors@~1.5.0 + - deps: range-parser@~1.2.0 + - deps: statuses@~1.3.0 + - perf: remove argument reassignment + * deps: serve-static@~1.11.1 + - Add `acceptRanges` option + - Add `cacheControl` option + - Attempt to combine multiple ranges into single range + - Fix redirect error when `req.url` contains raw non-URL characters + - Ignore non-byte `Range` headers + - Use status code 301 for redirects + - deps: send@0.14.1 + * deps: type-is@~1.6.13 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.11 + * deps: vary@~1.1.0 + - Only accept valid field names in the `field` argument + * perf: use strict equality when possible + +4.13.4 / 2016-01-21 +=================== + + * deps: content-disposition@0.5.1 + - perf: enable strict mode + * deps: cookie@0.1.5 + - Throw on invalid values provided to `serialize` + * deps: depd@~1.1.0 + - Support web browser loading + - perf: enable strict mode + * deps: escape-html@~1.0.3 + - perf: enable strict mode + - perf: optimize string replacement + - perf: use faster string coercion + * deps: finalhandler@0.4.1 + - deps: escape-html@~1.0.3 + * deps: merge-descriptors@1.0.1 + - perf: enable strict mode + * deps: methods@~1.1.2 + - perf: enable strict mode + * deps: parseurl@~1.3.1 + - perf: enable strict mode + * deps: proxy-addr@~1.0.10 + - deps: ipaddr.js@1.0.5 + - perf: enable strict mode + * deps: range-parser@~1.0.3 + - perf: enable strict mode + * deps: send@0.13.1 + - deps: depd@~1.1.0 + - deps: destroy@~1.0.4 + - deps: escape-html@~1.0.3 + - deps: range-parser@~1.0.3 + * deps: serve-static@~1.10.2 + - deps: escape-html@~1.0.3 + - deps: parseurl@~1.3.0 + - deps: send@0.13.1 + +4.13.3 / 2015-08-02 +=================== + + * Fix infinite loop condition using `mergeParams: true` + * Fix inner numeric indices incorrectly altering parent `req.params` + +4.13.2 / 2015-07-31 +=================== + + * deps: accepts@~1.2.12 + - deps: mime-types@~2.1.4 + * deps: array-flatten@1.1.1 + - perf: enable strict mode + * deps: path-to-regexp@0.1.7 + - Fix regression with escaped round brackets and matching groups + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +4.13.1 / 2015-07-05 +=================== + + * deps: accepts@~1.2.10 + - deps: mime-types@~2.1.2 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +4.13.0 / 2015-06-20 +=================== + + * Add settings to debug output + * Fix `res.format` error when only `default` provided + * Fix issue where `next('route')` in `app.param` would incorrectly skip values + * Fix hiding platform issues with `decodeURIComponent` + - Only `URIError`s are a 400 + * Fix using `*` before params in routes + * Fix using capture groups before params in routes + * Simplify `res.cookie` to call `res.append` + * Use `array-flatten` module for flattening arrays + * deps: accepts@~1.2.9 + - deps: mime-types@~2.1.1 + - perf: avoid argument reassignment & argument slice + - perf: avoid negotiator recursive construction + - perf: enable strict mode + - perf: remove unnecessary bitwise operator + * deps: cookie@0.1.3 + - perf: deduce the scope of try-catch deopt + - perf: remove argument reassignments + * deps: escape-html@1.0.2 + * deps: etag@~1.7.0 + - Always include entity length in ETags for hash length extensions + - Generate non-Stats ETags using MD5 only (no longer CRC32) + - Improve stat performance by removing hashing + - Improve support for JXcore + - Remove base64 padding in ETags to shorten + - Support "fake" stats objects in environments without fs + - Use MD5 instead of MD4 in weak ETags over 1KB + * deps: finalhandler@0.4.0 + - Fix a false-positive when unpiping in Node.js 0.8 + - Support `statusCode` property on `Error` objects + - Use `unpipe` module for unpiping requests + - deps: escape-html@1.0.2 + - deps: on-finished@~2.3.0 + - perf: enable strict mode + - perf: remove argument reassignment + * deps: fresh@0.3.0 + - Add weak `ETag` matching support + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: path-to-regexp@0.1.6 + * deps: send@0.13.0 + - Allow Node.js HTTP server to set `Date` response header + - Fix incorrectly removing `Content-Location` on 304 response + - Improve the default redirect response headers + - Send appropriate headers on default error response + - Use `http-errors` for standard emitted errors + - Use `statuses` instead of `http` module for status messages + - deps: escape-html@1.0.2 + - deps: etag@~1.7.0 + - deps: fresh@0.3.0 + - deps: on-finished@~2.3.0 + - perf: enable strict mode + - perf: remove unnecessary array allocations + * deps: serve-static@~1.10.0 + - Add `fallthrough` option + - Fix reading options from options prototype + - Improve the default redirect response headers + - Malformed URLs now `next()` instead of 400 + - deps: escape-html@1.0.2 + - deps: send@0.13.0 + - perf: enable strict mode + - perf: remove argument reassignment + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: isolate `app.render` try block + * perf: remove argument reassignments in application + * perf: remove argument reassignments in request prototype + * perf: remove argument reassignments in response prototype + * perf: remove argument reassignments in routing + * perf: remove argument reassignments in `View` + * perf: skip attempting to decode zero length string + * perf: use saved reference to `http.STATUS_CODES` + +4.12.4 / 2015-05-17 +=================== + + * deps: accepts@~1.2.7 + - deps: mime-types@~2.0.11 + - deps: negotiator@0.5.3 + * deps: debug@~2.2.0 + - deps: ms@0.7.1 + * deps: depd@~1.0.1 + * deps: etag@~1.6.0 + - Improve support for JXcore + - Support "fake" stats objects in environments without `fs` + * deps: finalhandler@0.3.6 + - deps: debug@~2.2.0 + - deps: on-finished@~2.2.1 + * deps: on-finished@~2.2.1 + - Fix `isFinished(req)` when data buffered + * deps: proxy-addr@~1.0.8 + - deps: ipaddr.js@1.0.1 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: send@0.12.3 + - deps: debug@~2.2.0 + - deps: depd@~1.0.1 + - deps: etag@~1.6.0 + - deps: ms@0.7.1 + - deps: on-finished@~2.2.1 + * deps: serve-static@~1.9.3 + - deps: send@0.12.3 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +4.12.3 / 2015-03-17 +=================== + + * deps: accepts@~1.2.5 + - deps: mime-types@~2.0.10 + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: finalhandler@0.3.4 + - deps: debug@~2.1.3 + * deps: proxy-addr@~1.0.7 + - deps: ipaddr.js@0.1.9 + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + * deps: send@0.12.2 + - Throw errors early for invalid `extensions` or `index` options + - deps: debug@~2.1.3 + * deps: serve-static@~1.9.2 + - deps: send@0.12.2 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +4.12.2 / 2015-03-02 +=================== + + * Fix regression where `"Request aborted"` is logged using `res.sendFile` + +4.12.1 / 2015-03-01 +=================== + + * Fix constructing application with non-configurable prototype properties + * Fix `ECONNRESET` errors from `res.sendFile` usage + * Fix `req.host` when using "trust proxy" hops count + * Fix `req.protocol`/`req.secure` when using "trust proxy" hops count + * Fix wrong `code` on aborted connections from `res.sendFile` + * deps: merge-descriptors@1.0.0 + +4.12.0 / 2015-02-23 +=================== + + * Fix `"trust proxy"` setting to inherit when app is mounted + * Generate `ETag`s for all request responses + - No longer restricted to only responses for `GET` and `HEAD` requests + * Use `content-type` to parse `Content-Type` headers + * deps: accepts@~1.2.4 + - Fix preference sorting to be stable for long acceptable lists + - deps: mime-types@~2.0.9 + - deps: negotiator@0.5.1 + * deps: cookie-signature@1.0.6 + * deps: send@0.12.1 + - Always read the stat size from the file + - Fix mutating passed-in `options` + - deps: mime@1.3.4 + * deps: serve-static@~1.9.1 + - deps: send@0.12.1 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +4.11.2 / 2015-02-01 +=================== + + * Fix `res.redirect` double-calling `res.end` for `HEAD` requests + * deps: accepts@~1.2.3 + - deps: mime-types@~2.0.8 + * deps: proxy-addr@~1.0.6 + - deps: ipaddr.js@0.1.8 + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +4.11.1 / 2015-01-20 +=================== + + * deps: send@0.11.1 + - Fix root path disclosure + * deps: serve-static@~1.8.1 + - Fix redirect loop in Node.js 0.11.14 + - Fix root path disclosure + - deps: send@0.11.1 + +4.11.0 / 2015-01-13 +=================== + + * Add `res.append(field, val)` to append headers + * Deprecate leading `:` in `name` for `app.param(name, fn)` + * Deprecate `req.param()` -- use `req.params`, `req.body`, or `req.query` instead + * Deprecate `app.param(fn)` + * Fix `OPTIONS` responses to include the `HEAD` method properly + * Fix `res.sendFile` not always detecting aborted connection + * Match routes iteratively to prevent stack overflows + * deps: accepts@~1.2.2 + - deps: mime-types@~2.0.7 + - deps: negotiator@0.5.0 + * deps: send@0.11.0 + - deps: debug@~2.1.1 + - deps: etag@~1.5.1 + - deps: ms@0.7.0 + - deps: on-finished@~2.2.0 + * deps: serve-static@~1.8.0 + - deps: send@0.11.0 + +4.10.8 / 2015-01-13 +=================== + + * Fix crash from error within `OPTIONS` response handler + * deps: proxy-addr@~1.0.5 + - deps: ipaddr.js@0.1.6 + +4.10.7 / 2015-01-04 +=================== + + * Fix `Allow` header for `OPTIONS` to not contain duplicate methods + * Fix incorrect "Request aborted" for `res.sendFile` when `HEAD` or 304 + * deps: debug@~2.1.1 + * deps: finalhandler@0.3.3 + - deps: debug@~2.1.1 + - deps: on-finished@~2.2.0 + * deps: methods@~1.1.1 + * deps: on-finished@~2.2.0 + * deps: serve-static@~1.7.2 + - Fix potential open redirect when mounted at root + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +4.10.6 / 2014-12-12 +=================== + + * Fix exception in `req.fresh`/`req.stale` without response headers + +4.10.5 / 2014-12-10 +=================== + + * Fix `res.send` double-calling `res.end` for `HEAD` requests + * deps: accepts@~1.1.4 + - deps: mime-types@~2.0.4 + * deps: type-is@~1.5.4 + - deps: mime-types@~2.0.4 + +4.10.4 / 2014-11-24 +=================== + + * Fix `res.sendfile` logging standard write errors + +4.10.3 / 2014-11-23 +=================== + + * Fix `res.sendFile` logging standard write errors + * deps: etag@~1.5.1 + * deps: proxy-addr@~1.0.4 + - deps: ipaddr.js@0.1.5 + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + +4.10.2 / 2014-11-09 +=================== + + * Correctly invoke async router callback asynchronously + * deps: accepts@~1.1.3 + - deps: mime-types@~2.0.3 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +4.10.1 / 2014-10-28 +=================== + + * Fix handling of URLs containing `://` in the path + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +4.10.0 / 2014-10-23 +=================== + + * Add support for `app.set('views', array)` + - Views are looked up in sequence in array of directories + * Fix `res.send(status)` to mention `res.sendStatus(status)` + * Fix handling of invalid empty URLs + * Use `content-disposition` module for `res.attachment`/`res.download` + - Sends standards-compliant `Content-Disposition` header + - Full Unicode support + * Use `path.resolve` in view lookup + * deps: debug@~2.1.0 + - Implement `DEBUG_FD` env variable support + * deps: depd@~1.0.0 + * deps: etag@~1.5.0 + - Improve string performance + - Slightly improve speed for weak ETags over 1KB + * deps: finalhandler@0.3.2 + - Terminate in progress response only on error + - Use `on-finished` to determine request status + - deps: debug@~2.1.0 + - deps: on-finished@~2.1.1 + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: send@0.10.1 + - deps: debug@~2.1.0 + - deps: depd@~1.0.0 + - deps: etag@~1.5.0 + - deps: on-finished@~2.1.1 + * deps: serve-static@~1.7.1 + - deps: send@0.10.1 + +4.9.8 / 2014-10-17 +================== + + * Fix `res.redirect` body when redirect status specified + * deps: accepts@~1.1.2 + - Fix error when media type has invalid parameter + - deps: negotiator@0.4.9 + +4.9.7 / 2014-10-10 +================== + + * Fix using same param name in array of paths + +4.9.6 / 2014-10-08 +================== + + * deps: accepts@~1.1.1 + - deps: mime-types@~2.0.2 + - deps: negotiator@0.4.8 + * deps: serve-static@~1.6.4 + - Fix redirect loop when index file serving disabled + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +4.9.5 / 2014-09-24 +================== + + * deps: etag@~1.4.0 + * deps: proxy-addr@~1.0.3 + - Use `forwarded` npm module + * deps: send@0.9.3 + - deps: etag@~1.4.0 + * deps: serve-static@~1.6.3 + - deps: send@0.9.3 + +4.9.4 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +4.9.3 / 2014-09-18 +================== + + * deps: proxy-addr@~1.0.2 + - Fix a global leak when multiple subnets are trusted + - deps: ipaddr.js@0.1.3 + +4.9.2 / 2014-09-17 +================== + + * Fix regression for empty string `path` in `app.use` + * Fix `router.use` to accept array of middleware without path + * Improve error message for bad `app.use` arguments + +4.9.1 / 2014-09-16 +================== + + * Fix `app.use` to accept array of middleware without path + * deps: depd@0.4.5 + * deps: etag@~1.3.1 + * deps: send@0.9.2 + - deps: depd@0.4.5 + - deps: etag@~1.3.1 + - deps: range-parser@~1.0.2 + * deps: serve-static@~1.6.2 + - deps: send@0.9.2 + +4.9.0 / 2014-09-08 +================== + + * Add `res.sendStatus` + * Invoke callback for sendfile when client aborts + - Applies to `res.sendFile`, `res.sendfile`, and `res.download` + - `err` will be populated with request aborted error + * Support IP address host in `req.subdomains` + * Use `etag` to generate `ETag` headers + * deps: accepts@~1.1.0 + - update `mime-types` + * deps: cookie-signature@1.0.5 + * deps: debug@~2.0.0 + * deps: finalhandler@0.2.0 + - Set `X-Content-Type-Options: nosniff` header + - deps: debug@~2.0.0 + * deps: fresh@0.2.4 + * deps: media-typer@0.3.0 + - Throw error when parameter format invalid on parse + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: range-parser@~1.0.2 + * deps: send@0.9.1 + - Add `lastModified` option + - Use `etag` to generate `ETag` header + - deps: debug@~2.0.0 + - deps: fresh@0.2.4 + * deps: serve-static@~1.6.1 + - Add `lastModified` option + - deps: send@0.9.1 + * deps: type-is@~1.5.1 + - fix `hasbody` to be true for `content-length: 0` + - deps: media-typer@0.3.0 + - deps: mime-types@~2.0.1 + * deps: vary@~1.0.0 + - Accept valid `Vary` header string as `field` + +4.8.8 / 2014-09-04 +================== + + * deps: send@0.8.5 + - Fix a path traversal issue when using `root` + - Fix malicious path detection for empty string path + * deps: serve-static@~1.5.4 + - deps: send@0.8.5 + +4.8.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +4.8.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +4.8.5 / 2014-08-18 +================== + + * deps: send@0.8.3 + - deps: destroy@1.0.3 + - deps: on-finished@2.1.0 + * deps: serve-static@~1.5.3 + - deps: send@0.8.3 + +4.8.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + * deps: send@0.8.2 + - Work around `fd` leak in Node.js 0.10 for `fs.ReadStream` + * deps: serve-static@~1.5.2 + - deps: send@0.8.2 + +4.8.3 / 2014-08-10 +================== + + * deps: parseurl@~1.3.0 + * deps: qs@1.2.1 + * deps: serve-static@~1.5.1 + - Fix parsing of weird `req.originalUrl` values + - deps: parseurl@~1.3.0 + - deps: utils-merge@1.0.0 + +4.8.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +4.8.1 / 2014-08-06 +================== + + * fix incorrect deprecation warnings on `res.download` + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +4.8.0 / 2014-08-05 +================== + + * add `res.sendFile` + - accepts a file system path instead of a URL + - requires an absolute path or `root` option specified + * deprecate `res.sendfile` -- use `res.sendFile` instead + * support mounted app as any argument to `app.use()` + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + * deps: send@0.8.1 + - Add `extensions` option + * deps: serve-static@~1.5.0 + - Add `extensions` option + - deps: send@0.8.1 + +4.7.4 / 2014-08-04 +================== + + * fix `res.sendfile` regression for serving directory index files + * deps: send@0.7.4 + - Fix incorrect 403 on Windows and Node.js 0.11 + - Fix serving index files without root dir + * deps: serve-static@~1.4.4 + - deps: send@0.7.4 + +4.7.3 / 2014-08-04 +================== + + * deps: send@0.7.3 + - Fix incorrect 403 on Windows and Node.js 0.11 + * deps: serve-static@~1.4.3 + - Fix incorrect 403 on Windows and Node.js 0.11 + - deps: send@0.7.3 + +4.7.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + * deps: send@0.7.2 + - deps: depd@0.4.4 + * deps: serve-static@~1.4.2 + +4.7.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + * deps: send@0.7.1 + - deps: depd@0.4.3 + * deps: serve-static@~1.4.1 + +4.7.0 / 2014-07-25 +================== + + * fix `req.protocol` for proxy-direct connections + * configurable query parser with `app.set('query parser', parser)` + - `app.set('query parser', 'extended')` parse with "qs" module + - `app.set('query parser', 'simple')` parse with "querystring" core module + - `app.set('query parser', false)` disable query string parsing + - `app.set('query parser', true)` enable simple parsing + * deprecate `res.json(status, obj)` -- use `res.status(status).json(obj)` instead + * deprecate `res.jsonp(status, obj)` -- use `res.status(status).jsonp(obj)` instead + * deprecate `res.send(status, body)` -- use `res.status(status).send(body)` instead + * deps: debug@1.0.4 + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: finalhandler@0.1.0 + - Respond after request fully read + - deps: debug@1.0.4 + * deps: parseurl@~1.2.0 + - Cache URLs based on original value + - Remove no-longer-needed URL mis-parse work-around + - Simplify the "fast-path" `RegExp` + * deps: send@0.7.0 + - Add `dotfiles` option + - Cap `maxAge` value to 1 year + - deps: debug@1.0.4 + - deps: depd@0.4.2 + * deps: serve-static@~1.4.0 + - deps: parseurl@~1.2.0 + - deps: send@0.7.0 + * perf: prevent multiple `Buffer` creation in `res.send` + +4.6.1 / 2014-07-12 +================== + + * fix `subapp.mountpath` regression for `app.use(subapp)` + +4.6.0 / 2014-07-11 +================== + + * accept multiple callbacks to `app.use()` + * add explicit "Rosetta Flash JSONP abuse" protection + - previous versions are not vulnerable; this is just explicit protection + * catch errors in multiple `req.param(name, fn)` handlers + * deprecate `res.redirect(url, status)` -- use `res.redirect(status, url)` instead + * fix `res.send(status, num)` to send `num` as json (not error) + * remove unnecessary escaping when `res.jsonp` returns JSON response + * support non-string `path` in `app.use(path, fn)` + - supports array of paths + - supports `RegExp` + * router: fix optimization on router exit + * router: refactor location of `try` blocks + * router: speed up standard `app.use(fn)` + * deps: debug@1.0.3 + - Add support for multiple wildcards in namespaces + * deps: finalhandler@0.0.3 + - deps: debug@1.0.3 + * deps: methods@1.1.0 + - add `CONNECT` + * deps: parseurl@~1.1.3 + - faster parsing of href-only URLs + * deps: path-to-regexp@0.1.3 + * deps: send@0.6.0 + - deps: debug@1.0.3 + * deps: serve-static@~1.3.2 + - deps: parseurl@~1.1.3 + - deps: send@0.6.0 + * perf: fix arguments reassign deopt in some `res` methods + +4.5.1 / 2014-07-06 +================== + + * fix routing regression when altering `req.method` + +4.5.0 / 2014-07-04 +================== + + * add deprecation message to non-plural `req.accepts*` + * add deprecation message to `res.send(body, status)` + * add deprecation message to `res.vary()` + * add `headers` option to `res.sendfile` + - use to set headers on successful file transfer + * add `mergeParams` option to `Router` + - merges `req.params` from parent routes + * add `req.hostname` -- correct name for what `req.host` returns + * deprecate things with `depd` module + * deprecate `req.host` -- use `req.hostname` instead + * fix behavior when handling request without routes + * fix handling when `route.all` is only route + * invoke `router.param()` only when route matches + * restore `req.params` after invoking router + * use `finalhandler` for final response handling + * use `media-typer` to alter content-type charset + * deps: accepts@~1.0.7 + * deps: send@0.5.0 + - Accept string for `maxage` (converted by `ms`) + - Include link in default redirect response + * deps: serve-static@~1.3.0 + - Accept string for `maxAge` (converted by `ms`) + - Add `setHeaders` option + - Include HTML link in redirect response + - deps: send@0.5.0 + * deps: type-is@~1.3.2 + +4.4.5 / 2014-06-26 +================== + + * deps: cookie-signature@1.0.4 + - fix for timing attacks + +4.4.4 / 2014-06-20 +================== + + * fix `res.attachment` Unicode filenames in Safari + * fix "trim prefix" debug message in `express:router` + * deps: accepts@~1.0.5 + * deps: buffer-crc32@0.2.3 + +4.4.3 / 2014-06-11 +================== + + * fix persistence of modified `req.params[name]` from `app.param()` + * deps: accepts@1.0.3 + - deps: negotiator@0.4.6 + * deps: debug@1.0.2 + * deps: send@0.4.3 + - Do not throw un-catchable error on file open race condition + - Use `escape-html` for HTML escaping + - deps: debug@1.0.2 + - deps: finished@1.2.2 + - deps: fresh@0.2.2 + * deps: serve-static@1.2.3 + - Do not throw un-catchable error on file open race condition + - deps: send@0.4.3 + +4.4.2 / 2014-06-09 +================== + + * fix catching errors from top-level handlers + * use `vary` module for `res.vary` + * deps: debug@1.0.1 + * deps: proxy-addr@1.0.1 + * deps: send@0.4.2 + - fix "event emitter leak" warnings + - deps: debug@1.0.1 + - deps: finished@1.2.1 + * deps: serve-static@1.2.2 + - fix "event emitter leak" warnings + - deps: send@0.4.2 + * deps: type-is@1.2.1 + +4.4.1 / 2014-06-02 +================== + + * deps: methods@1.0.1 + * deps: send@0.4.1 + - Send `max-age` in `Cache-Control` in correct format + * deps: serve-static@1.2.1 + - use `escape-html` for escaping + - deps: send@0.4.1 + +4.4.0 / 2014-05-30 +================== + + * custom etag control with `app.set('etag', val)` + - `app.set('etag', function(body, encoding){ return '"etag"' })` custom etag generation + - `app.set('etag', 'weak')` weak tag + - `app.set('etag', 'strong')` strong etag + - `app.set('etag', false)` turn off + - `app.set('etag', true)` standard etag + * mark `res.send` ETag as weak and reduce collisions + * update accepts to 1.0.2 + - Fix interpretation when header not in request + * update send to 0.4.0 + - Calculate ETag with md5 for reduced collisions + - Ignore stream errors after request ends + - deps: debug@0.8.1 + * update serve-static to 1.2.0 + - Calculate ETag with md5 for reduced collisions + - Ignore stream errors after request ends + - deps: send@0.4.0 + +4.3.2 / 2014-05-28 +================== + + * fix handling of errors from `router.param()` callbacks + +4.3.1 / 2014-05-23 +================== + + * revert "fix behavior of multiple `app.VERB` for the same path" + - this caused a regression in the order of route execution + +4.3.0 / 2014-05-21 +================== + + * add `req.baseUrl` to access the path stripped from `req.url` in routes + * fix behavior of multiple `app.VERB` for the same path + * fix issue routing requests among sub routers + * invoke `router.param()` only when necessary instead of every match + * proper proxy trust with `app.set('trust proxy', trust)` + - `app.set('trust proxy', 1)` trust first hop + - `app.set('trust proxy', 'loopback')` trust loopback addresses + - `app.set('trust proxy', '10.0.0.1')` trust single IP + - `app.set('trust proxy', '10.0.0.1/16')` trust subnet + - `app.set('trust proxy', '10.0.0.1, 10.0.0.2')` trust list + - `app.set('trust proxy', false)` turn off + - `app.set('trust proxy', true)` trust everything + * set proper `charset` in `Content-Type` for `res.send` + * update type-is to 1.2.0 + - support suffix matching + +4.2.0 / 2014-05-11 +================== + + * deprecate `app.del()` -- use `app.delete()` instead + * deprecate `res.json(obj, status)` -- use `res.json(status, obj)` instead + - the edge-case `res.json(status, num)` requires `res.status(status).json(num)` + * deprecate `res.jsonp(obj, status)` -- use `res.jsonp(status, obj)` instead + - the edge-case `res.jsonp(status, num)` requires `res.status(status).jsonp(num)` + * fix `req.next` when inside router instance + * include `ETag` header in `HEAD` requests + * keep previous `Content-Type` for `res.jsonp` + * support PURGE method + - add `app.purge` + - add `router.purge` + - include PURGE in `app.all` + * update debug to 0.8.0 + - add `enable()` method + - change from stderr to stdout + * update methods to 1.0.0 + - add PURGE + +4.1.2 / 2014-05-08 +================== + + * fix `req.host` for IPv6 literals + * fix `res.jsonp` error if callback param is object + +4.1.1 / 2014-04-27 +================== + + * fix package.json to reflect supported node version + +4.1.0 / 2014-04-24 +================== + + * pass options from `res.sendfile` to `send` + * preserve casing of headers in `res.header` and `res.set` + * support unicode file names in `res.attachment` and `res.download` + * update accepts to 1.0.1 + - deps: negotiator@0.4.0 + * update cookie to 0.1.2 + - Fix for maxAge == 0 + - made compat with expires field + * update send to 0.3.0 + - Accept API options in options object + - Coerce option types + - Control whether to generate etags + - Default directory access to 403 when index disabled + - Fix sending files with dots without root set + - Include file path in etag + - Make "Can't set headers after they are sent." catchable + - Send full entity-body for multi range requests + - Set etags to "weak" + - Support "If-Range" header + - Support multiple index paths + - deps: mime@1.2.11 + * update serve-static to 1.1.0 + - Accept options directly to `send` module + - Resolve relative paths at middleware setup + - Use parseurl to parse the URL from request + - deps: send@0.3.0 + * update type-is to 1.1.0 + - add non-array values support + - add `multipart` as a shorthand + +4.0.0 / 2014-04-09 +================== + + * remove: + - node 0.8 support + - connect and connect's patches except for charset handling + - express(1) - moved to [express-generator](https://github.com/expressjs/generator) + - `express.createServer()` - it has been deprecated for a long time. Use `express()` + - `app.configure` - use logic in your own app code + - `app.router` - is removed + - `req.auth` - use `basic-auth` instead + - `req.accepted*` - use `req.accepts*()` instead + - `res.location` - relative URL resolution is removed + - `res.charset` - include the charset in the content type when using `res.set()` + - all bundled middleware except `static` + * change: + - `app.route` -> `app.mountpath` when mounting an express app in another express app + - `json spaces` no longer enabled by default in development + - `req.accepts*` -> `req.accepts*s` - i.e. `req.acceptsEncoding` -> `req.acceptsEncodings` + - `req.params` is now an object instead of an array + - `res.locals` is no longer a function. It is a plain js object. Treat it as such. + - `res.headerSent` -> `res.headersSent` to match node.js ServerResponse object + * refactor: + - `req.accepts*` with [accepts](https://github.com/expressjs/accepts) + - `req.is` with [type-is](https://github.com/expressjs/type-is) + - [path-to-regexp](https://github.com/component/path-to-regexp) + * add: + - `app.router()` - returns the app Router instance + - `app.route()` - Proxy to the app's `Router#route()` method to create a new route + - Router & Route - public API + +3.21.2 / 2015-07-31 +=================== + + * deps: connect@2.30.2 + - deps: body-parser@~1.13.3 + - deps: compression@~1.5.2 + - deps: errorhandler@~1.4.2 + - deps: method-override@~2.3.5 + - deps: serve-index@~1.7.2 + - deps: type-is@~1.6.6 + - deps: vhost@~3.0.1 + * deps: vary@~1.0.1 + - Fix setting empty header from empty `field` + - perf: enable strict mode + - perf: remove argument reassignments + +3.21.1 / 2015-07-05 +=================== + + * deps: basic-auth@~1.0.3 + * deps: connect@2.30.1 + - deps: body-parser@~1.13.2 + - deps: compression@~1.5.1 + - deps: errorhandler@~1.4.1 + - deps: morgan@~1.6.1 + - deps: pause@0.1.0 + - deps: qs@4.0.0 + - deps: serve-index@~1.7.1 + - deps: type-is@~1.6.4 + +3.21.0 / 2015-06-18 +=================== + + * deps: basic-auth@1.0.2 + - perf: enable strict mode + - perf: hoist regular expression + - perf: parse with regular expressions + - perf: remove argument reassignment + * deps: connect@2.30.0 + - deps: body-parser@~1.13.1 + - deps: bytes@2.1.0 + - deps: compression@~1.5.0 + - deps: cookie@0.1.3 + - deps: cookie-parser@~1.3.5 + - deps: csurf@~1.8.3 + - deps: errorhandler@~1.4.0 + - deps: express-session@~1.11.3 + - deps: finalhandler@0.4.0 + - deps: fresh@0.3.0 + - deps: morgan@~1.6.0 + - deps: serve-favicon@~2.3.0 + - deps: serve-index@~1.7.0 + - deps: serve-static@~1.10.0 + - deps: type-is@~1.6.3 + * deps: cookie@0.1.3 + - perf: deduce the scope of try-catch deopt + - perf: remove argument reassignments + * deps: escape-html@1.0.2 + * deps: etag@~1.7.0 + - Always include entity length in ETags for hash length extensions + - Generate non-Stats ETags using MD5 only (no longer CRC32) + - Improve stat performance by removing hashing + - Improve support for JXcore + - Remove base64 padding in ETags to shorten + - Support "fake" stats objects in environments without fs + - Use MD5 instead of MD4 in weak ETags over 1KB + * deps: fresh@0.3.0 + - Add weak `ETag` matching support + * deps: mkdirp@0.5.1 + - Work in global strict mode + * deps: send@0.13.0 + - Allow Node.js HTTP server to set `Date` response header + - Fix incorrectly removing `Content-Location` on 304 response + - Improve the default redirect response headers + - Send appropriate headers on default error response + - Use `http-errors` for standard emitted errors + - Use `statuses` instead of `http` module for status messages + - deps: escape-html@1.0.2 + - deps: etag@~1.7.0 + - deps: fresh@0.3.0 + - deps: on-finished@~2.3.0 + - perf: enable strict mode + - perf: remove unnecessary array allocations + +3.20.3 / 2015-05-17 +=================== + + * deps: connect@2.29.2 + - deps: body-parser@~1.12.4 + - deps: compression@~1.4.4 + - deps: connect-timeout@~1.6.2 + - deps: debug@~2.2.0 + - deps: depd@~1.0.1 + - deps: errorhandler@~1.3.6 + - deps: finalhandler@0.3.6 + - deps: method-override@~2.3.3 + - deps: morgan@~1.5.3 + - deps: qs@2.4.2 + - deps: response-time@~2.3.1 + - deps: serve-favicon@~2.2.1 + - deps: serve-index@~1.6.4 + - deps: serve-static@~1.9.3 + - deps: type-is@~1.6.2 + * deps: debug@~2.2.0 + - deps: ms@0.7.1 + * deps: depd@~1.0.1 + * deps: proxy-addr@~1.0.8 + - deps: ipaddr.js@1.0.1 + * deps: send@0.12.3 + - deps: debug@~2.2.0 + - deps: depd@~1.0.1 + - deps: etag@~1.6.0 + - deps: ms@0.7.1 + - deps: on-finished@~2.2.1 + +3.20.2 / 2015-03-16 +=================== + + * deps: connect@2.29.1 + - deps: body-parser@~1.12.2 + - deps: compression@~1.4.3 + - deps: connect-timeout@~1.6.1 + - deps: debug@~2.1.3 + - deps: errorhandler@~1.3.5 + - deps: express-session@~1.10.4 + - deps: finalhandler@0.3.4 + - deps: method-override@~2.3.2 + - deps: morgan@~1.5.2 + - deps: qs@2.4.1 + - deps: serve-index@~1.6.3 + - deps: serve-static@~1.9.2 + - deps: type-is@~1.6.1 + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: merge-descriptors@1.0.0 + * deps: proxy-addr@~1.0.7 + - deps: ipaddr.js@0.1.9 + * deps: send@0.12.2 + - Throw errors early for invalid `extensions` or `index` options + - deps: debug@~2.1.3 + +3.20.1 / 2015-02-28 +=================== + + * Fix `req.host` when using "trust proxy" hops count + * Fix `req.protocol`/`req.secure` when using "trust proxy" hops count + +3.20.0 / 2015-02-18 +=================== + + * Fix `"trust proxy"` setting to inherit when app is mounted + * Generate `ETag`s for all request responses + - No longer restricted to only responses for `GET` and `HEAD` requests + * Use `content-type` to parse `Content-Type` headers + * deps: connect@2.29.0 + - Use `content-type` to parse `Content-Type` headers + - deps: body-parser@~1.12.0 + - deps: compression@~1.4.1 + - deps: connect-timeout@~1.6.0 + - deps: cookie-parser@~1.3.4 + - deps: cookie-signature@1.0.6 + - deps: csurf@~1.7.0 + - deps: errorhandler@~1.3.4 + - deps: express-session@~1.10.3 + - deps: http-errors@~1.3.1 + - deps: response-time@~2.3.0 + - deps: serve-index@~1.6.2 + - deps: serve-static@~1.9.1 + - deps: type-is@~1.6.0 + * deps: cookie-signature@1.0.6 + * deps: send@0.12.1 + - Always read the stat size from the file + - Fix mutating passed-in `options` + - deps: mime@1.3.4 + +3.19.2 / 2015-02-01 +=================== + + * deps: connect@2.28.3 + - deps: compression@~1.3.1 + - deps: csurf@~1.6.6 + - deps: errorhandler@~1.3.3 + - deps: express-session@~1.10.2 + - deps: serve-index@~1.6.1 + - deps: type-is@~1.5.6 + * deps: proxy-addr@~1.0.6 + - deps: ipaddr.js@0.1.8 + +3.19.1 / 2015-01-20 +=================== + + * deps: connect@2.28.2 + - deps: body-parser@~1.10.2 + - deps: serve-static@~1.8.1 + * deps: send@0.11.1 + - Fix root path disclosure + +3.19.0 / 2015-01-09 +=================== + + * Fix `OPTIONS` responses to include the `HEAD` method property + * Use `readline` for prompt in `express(1)` + * deps: commander@2.6.0 + * deps: connect@2.28.1 + - deps: body-parser@~1.10.1 + - deps: compression@~1.3.0 + - deps: connect-timeout@~1.5.0 + - deps: csurf@~1.6.4 + - deps: debug@~2.1.1 + - deps: errorhandler@~1.3.2 + - deps: express-session@~1.10.1 + - deps: finalhandler@0.3.3 + - deps: method-override@~2.3.1 + - deps: morgan@~1.5.1 + - deps: serve-favicon@~2.2.0 + - deps: serve-index@~1.6.0 + - deps: serve-static@~1.8.0 + - deps: type-is@~1.5.5 + * deps: debug@~2.1.1 + * deps: methods@~1.1.1 + * deps: proxy-addr@~1.0.5 + - deps: ipaddr.js@0.1.6 + * deps: send@0.11.0 + - deps: debug@~2.1.1 + - deps: etag@~1.5.1 + - deps: ms@0.7.0 + - deps: on-finished@~2.2.0 + +3.18.6 / 2014-12-12 +=================== + + * Fix exception in `req.fresh`/`req.stale` without response headers + +3.18.5 / 2014-12-11 +=================== + + * deps: connect@2.27.6 + - deps: compression@~1.2.2 + - deps: express-session@~1.9.3 + - deps: http-errors@~1.2.8 + - deps: serve-index@~1.5.3 + - deps: type-is@~1.5.4 + +3.18.4 / 2014-11-23 +=================== + + * deps: connect@2.27.4 + - deps: body-parser@~1.9.3 + - deps: compression@~1.2.1 + - deps: errorhandler@~1.2.3 + - deps: express-session@~1.9.2 + - deps: qs@2.3.3 + - deps: serve-favicon@~2.1.7 + - deps: serve-static@~1.5.1 + - deps: type-is@~1.5.3 + * deps: etag@~1.5.1 + * deps: proxy-addr@~1.0.4 + - deps: ipaddr.js@0.1.5 + +3.18.3 / 2014-11-09 +=================== + + * deps: connect@2.27.3 + - Correctly invoke async callback asynchronously + - deps: csurf@~1.6.3 + +3.18.2 / 2014-10-28 +=================== + + * deps: connect@2.27.2 + - Fix handling of URLs containing `://` in the path + - deps: body-parser@~1.9.2 + - deps: qs@2.3.2 + +3.18.1 / 2014-10-22 +=================== + + * Fix internal `utils.merge` deprecation warnings + * deps: connect@2.27.1 + - deps: body-parser@~1.9.1 + - deps: express-session@~1.9.1 + - deps: finalhandler@0.3.2 + - deps: morgan@~1.4.1 + - deps: qs@2.3.0 + - deps: serve-static@~1.7.1 + * deps: send@0.10.1 + - deps: on-finished@~2.1.1 + +3.18.0 / 2014-10-17 +=================== + + * Use `content-disposition` module for `res.attachment`/`res.download` + - Sends standards-compliant `Content-Disposition` header + - Full Unicode support + * Use `etag` module to generate `ETag` headers + * deps: connect@2.27.0 + - Use `http-errors` module for creating errors + - Use `utils-merge` module for merging objects + - deps: body-parser@~1.9.0 + - deps: compression@~1.2.0 + - deps: connect-timeout@~1.4.0 + - deps: debug@~2.1.0 + - deps: depd@~1.0.0 + - deps: express-session@~1.9.0 + - deps: finalhandler@0.3.1 + - deps: method-override@~2.3.0 + - deps: morgan@~1.4.0 + - deps: response-time@~2.2.0 + - deps: serve-favicon@~2.1.6 + - deps: serve-index@~1.5.0 + - deps: serve-static@~1.7.0 + * deps: debug@~2.1.0 + - Implement `DEBUG_FD` env variable support + * deps: depd@~1.0.0 + * deps: send@0.10.0 + - deps: debug@~2.1.0 + - deps: depd@~1.0.0 + - deps: etag@~1.5.0 + +3.17.8 / 2014-10-15 +=================== + + * deps: connect@2.26.6 + - deps: compression@~1.1.2 + - deps: csurf@~1.6.2 + - deps: errorhandler@~1.2.2 + +3.17.7 / 2014-10-08 +=================== + + * deps: connect@2.26.5 + - Fix accepting non-object arguments to `logger` + - deps: serve-static@~1.6.4 + +3.17.6 / 2014-10-02 +=================== + + * deps: connect@2.26.4 + - deps: morgan@~1.3.2 + - deps: type-is@~1.5.2 + +3.17.5 / 2014-09-24 +=================== + + * deps: connect@2.26.3 + - deps: body-parser@~1.8.4 + - deps: serve-favicon@~2.1.5 + - deps: serve-static@~1.6.3 + * deps: proxy-addr@~1.0.3 + - Use `forwarded` npm module + * deps: send@0.9.3 + - deps: etag@~1.4.0 + +3.17.4 / 2014-09-19 +=================== + + * deps: connect@2.26.2 + - deps: body-parser@~1.8.3 + - deps: qs@2.2.4 + +3.17.3 / 2014-09-18 +=================== + + * deps: proxy-addr@~1.0.2 + - Fix a global leak when multiple subnets are trusted + - deps: ipaddr.js@0.1.3 + +3.17.2 / 2014-09-15 +=================== + + * Use `crc` instead of `buffer-crc32` for speed + * deps: connect@2.26.1 + - deps: body-parser@~1.8.2 + - deps: depd@0.4.5 + - deps: express-session@~1.8.2 + - deps: morgan@~1.3.1 + - deps: serve-favicon@~2.1.3 + - deps: serve-static@~1.6.2 + * deps: depd@0.4.5 + * deps: send@0.9.2 + - deps: depd@0.4.5 + - deps: etag@~1.3.1 + - deps: range-parser@~1.0.2 + +3.17.1 / 2014-09-08 +=================== + + * Fix error in `req.subdomains` on empty host + +3.17.0 / 2014-09-08 +=================== + + * Support `X-Forwarded-Host` in `req.subdomains` + * Support IP address host in `req.subdomains` + * deps: connect@2.26.0 + - deps: body-parser@~1.8.1 + - deps: compression@~1.1.0 + - deps: connect-timeout@~1.3.0 + - deps: cookie-parser@~1.3.3 + - deps: cookie-signature@1.0.5 + - deps: csurf@~1.6.1 + - deps: debug@~2.0.0 + - deps: errorhandler@~1.2.0 + - deps: express-session@~1.8.1 + - deps: finalhandler@0.2.0 + - deps: fresh@0.2.4 + - deps: media-typer@0.3.0 + - deps: method-override@~2.2.0 + - deps: morgan@~1.3.0 + - deps: qs@2.2.3 + - deps: serve-favicon@~2.1.3 + - deps: serve-index@~1.2.1 + - deps: serve-static@~1.6.1 + - deps: type-is@~1.5.1 + - deps: vhost@~3.0.0 + * deps: cookie-signature@1.0.5 + * deps: debug@~2.0.0 + * deps: fresh@0.2.4 + * deps: media-typer@0.3.0 + - Throw error when parameter format invalid on parse + * deps: range-parser@~1.0.2 + * deps: send@0.9.1 + - Add `lastModified` option + - Use `etag` to generate `ETag` header + - deps: debug@~2.0.0 + - deps: fresh@0.2.4 + * deps: vary@~1.0.0 + - Accept valid `Vary` header string as `field` + +3.16.10 / 2014-09-04 +==================== + + * deps: connect@2.25.10 + - deps: serve-static@~1.5.4 + * deps: send@0.8.5 + - Fix a path traversal issue when using `root` + - Fix malicious path detection for empty string path + +3.16.9 / 2014-08-29 +=================== + + * deps: connect@2.25.9 + - deps: body-parser@~1.6.7 + - deps: qs@2.2.2 + +3.16.8 / 2014-08-27 +=================== + + * deps: connect@2.25.8 + - deps: body-parser@~1.6.6 + - deps: csurf@~1.4.1 + - deps: qs@2.2.0 + +3.16.7 / 2014-08-18 +=================== + + * deps: connect@2.25.7 + - deps: body-parser@~1.6.5 + - deps: express-session@~1.7.6 + - deps: morgan@~1.2.3 + - deps: serve-static@~1.5.3 + * deps: send@0.8.3 + - deps: destroy@1.0.3 + - deps: on-finished@2.1.0 + +3.16.6 / 2014-08-14 +=================== + + * deps: connect@2.25.6 + - deps: body-parser@~1.6.4 + - deps: qs@1.2.2 + - deps: serve-static@~1.5.2 + * deps: send@0.8.2 + - Work around `fd` leak in Node.js 0.10 for `fs.ReadStream` + +3.16.5 / 2014-08-11 +=================== + + * deps: connect@2.25.5 + - Fix backwards compatibility in `logger` + +3.16.4 / 2014-08-10 +=================== + + * Fix original URL parsing in `res.location` + * deps: connect@2.25.4 + - Fix `query` middleware breaking with argument + - deps: body-parser@~1.6.3 + - deps: compression@~1.0.11 + - deps: connect-timeout@~1.2.2 + - deps: express-session@~1.7.5 + - deps: method-override@~2.1.3 + - deps: on-headers@~1.0.0 + - deps: parseurl@~1.3.0 + - deps: qs@1.2.1 + - deps: response-time@~2.0.1 + - deps: serve-index@~1.1.6 + - deps: serve-static@~1.5.1 + * deps: parseurl@~1.3.0 + +3.16.3 / 2014-08-07 +=================== + + * deps: connect@2.25.3 + - deps: multiparty@3.3.2 + +3.16.2 / 2014-08-07 +=================== + + * deps: connect@2.25.2 + - deps: body-parser@~1.6.2 + - deps: qs@1.2.0 + +3.16.1 / 2014-08-06 +=================== + + * deps: connect@2.25.1 + - deps: body-parser@~1.6.1 + - deps: qs@1.1.0 + +3.16.0 / 2014-08-05 +=================== + + * deps: connect@2.25.0 + - deps: body-parser@~1.6.0 + - deps: compression@~1.0.10 + - deps: csurf@~1.4.0 + - deps: express-session@~1.7.4 + - deps: qs@1.0.2 + - deps: serve-static@~1.5.0 + * deps: send@0.8.1 + - Add `extensions` option + +3.15.3 / 2014-08-04 +=================== + + * fix `res.sendfile` regression for serving directory index files + * deps: connect@2.24.3 + - deps: serve-index@~1.1.5 + - deps: serve-static@~1.4.4 + * deps: send@0.7.4 + - Fix incorrect 403 on Windows and Node.js 0.11 + - Fix serving index files without root dir + +3.15.2 / 2014-07-27 +=================== + + * deps: connect@2.24.2 + - deps: body-parser@~1.5.2 + - deps: depd@0.4.4 + - deps: express-session@~1.7.2 + - deps: morgan@~1.2.2 + - deps: serve-static@~1.4.2 + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + * deps: send@0.7.2 + - deps: depd@0.4.4 + +3.15.1 / 2014-07-26 +=================== + + * deps: connect@2.24.1 + - deps: body-parser@~1.5.1 + - deps: depd@0.4.3 + - deps: express-session@~1.7.1 + - deps: morgan@~1.2.1 + - deps: serve-index@~1.1.4 + - deps: serve-static@~1.4.1 + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + * deps: send@0.7.1 + - deps: depd@0.4.3 + +3.15.0 / 2014-07-22 +=================== + + * Fix `req.protocol` for proxy-direct connections + * Pass options from `res.sendfile` to `send` + * deps: connect@2.24.0 + - deps: body-parser@~1.5.0 + - deps: compression@~1.0.9 + - deps: connect-timeout@~1.2.1 + - deps: debug@1.0.4 + - deps: depd@0.4.2 + - deps: express-session@~1.7.0 + - deps: finalhandler@0.1.0 + - deps: method-override@~2.1.2 + - deps: morgan@~1.2.0 + - deps: multiparty@3.3.1 + - deps: parseurl@~1.2.0 + - deps: serve-static@~1.4.0 + * deps: debug@1.0.4 + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: parseurl@~1.2.0 + - Cache URLs based on original value + - Remove no-longer-needed URL mis-parse work-around + - Simplify the "fast-path" `RegExp` + * deps: send@0.7.0 + - Add `dotfiles` option + - Cap `maxAge` value to 1 year + - deps: debug@1.0.4 + - deps: depd@0.4.2 + +3.14.0 / 2014-07-11 +=================== + + * add explicit "Rosetta Flash JSONP abuse" protection + - previous versions are not vulnerable; this is just explicit protection + * deprecate `res.redirect(url, status)` -- use `res.redirect(status, url)` instead + * fix `res.send(status, num)` to send `num` as json (not error) + * remove unnecessary escaping when `res.jsonp` returns JSON response + * deps: basic-auth@1.0.0 + - support empty password + - support empty username + * deps: connect@2.23.0 + - deps: debug@1.0.3 + - deps: express-session@~1.6.4 + - deps: method-override@~2.1.0 + - deps: parseurl@~1.1.3 + - deps: serve-static@~1.3.1 + * deps: debug@1.0.3 + - Add support for multiple wildcards in namespaces + * deps: methods@1.1.0 + - add `CONNECT` + * deps: parseurl@~1.1.3 + - faster parsing of href-only URLs + +3.13.0 / 2014-07-03 +=================== + + * add deprecation message to `app.configure` + * add deprecation message to `req.auth` + * use `basic-auth` to parse `Authorization` header + * deps: connect@2.22.0 + - deps: csurf@~1.3.0 + - deps: express-session@~1.6.1 + - deps: multiparty@3.3.0 + - deps: serve-static@~1.3.0 + * deps: send@0.5.0 + - Accept string for `maxage` (converted by `ms`) + - Include link in default redirect response + +3.12.1 / 2014-06-26 +=================== + + * deps: connect@2.21.1 + - deps: cookie-parser@1.3.2 + - deps: cookie-signature@1.0.4 + - deps: express-session@~1.5.2 + - deps: type-is@~1.3.2 + * deps: cookie-signature@1.0.4 + - fix for timing attacks + +3.12.0 / 2014-06-21 +=================== + + * use `media-typer` to alter content-type charset + * deps: connect@2.21.0 + - deprecate `connect(middleware)` -- use `app.use(middleware)` instead + - deprecate `connect.createServer()` -- use `connect()` instead + - fix `res.setHeader()` patch to work with with get -> append -> set pattern + - deps: compression@~1.0.8 + - deps: errorhandler@~1.1.1 + - deps: express-session@~1.5.0 + - deps: serve-index@~1.1.3 + +3.11.0 / 2014-06-19 +=================== + + * deprecate things with `depd` module + * deps: buffer-crc32@0.2.3 + * deps: connect@2.20.2 + - deprecate `verify` option to `json` -- use `body-parser` npm module instead + - deprecate `verify` option to `urlencoded` -- use `body-parser` npm module instead + - deprecate things with `depd` module + - use `finalhandler` for final response handling + - use `media-typer` to parse `content-type` for charset + - deps: body-parser@1.4.3 + - deps: connect-timeout@1.1.1 + - deps: cookie-parser@1.3.1 + - deps: csurf@1.2.2 + - deps: errorhandler@1.1.0 + - deps: express-session@1.4.0 + - deps: multiparty@3.2.9 + - deps: serve-index@1.1.2 + - deps: type-is@1.3.1 + - deps: vhost@2.0.0 + +3.10.5 / 2014-06-11 +=================== + + * deps: connect@2.19.6 + - deps: body-parser@1.3.1 + - deps: compression@1.0.7 + - deps: debug@1.0.2 + - deps: serve-index@1.1.1 + - deps: serve-static@1.2.3 + * deps: debug@1.0.2 + * deps: send@0.4.3 + - Do not throw un-catchable error on file open race condition + - Use `escape-html` for HTML escaping + - deps: debug@1.0.2 + - deps: finished@1.2.2 + - deps: fresh@0.2.2 + +3.10.4 / 2014-06-09 +=================== + + * deps: connect@2.19.5 + - fix "event emitter leak" warnings + - deps: csurf@1.2.1 + - deps: debug@1.0.1 + - deps: serve-static@1.2.2 + - deps: type-is@1.2.1 + * deps: debug@1.0.1 + * deps: send@0.4.2 + - fix "event emitter leak" warnings + - deps: finished@1.2.1 + - deps: debug@1.0.1 + +3.10.3 / 2014-06-05 +=================== + + * use `vary` module for `res.vary` + * deps: connect@2.19.4 + - deps: errorhandler@1.0.2 + - deps: method-override@2.0.2 + - deps: serve-favicon@2.0.1 + * deps: debug@1.0.0 + +3.10.2 / 2014-06-03 +=================== + + * deps: connect@2.19.3 + - deps: compression@1.0.6 + +3.10.1 / 2014-06-03 +=================== + + * deps: connect@2.19.2 + - deps: compression@1.0.4 + * deps: proxy-addr@1.0.1 + +3.10.0 / 2014-06-02 +=================== + + * deps: connect@2.19.1 + - deprecate `methodOverride()` -- use `method-override` npm module instead + - deps: body-parser@1.3.0 + - deps: method-override@2.0.1 + - deps: multiparty@3.2.8 + - deps: response-time@2.0.0 + - deps: serve-static@1.2.1 + * deps: methods@1.0.1 + * deps: send@0.4.1 + - Send `max-age` in `Cache-Control` in correct format + +3.9.0 / 2014-05-30 +================== + + * custom etag control with `app.set('etag', val)` + - `app.set('etag', function(body, encoding){ return '"etag"' })` custom etag generation + - `app.set('etag', 'weak')` weak tag + - `app.set('etag', 'strong')` strong etag + - `app.set('etag', false)` turn off + - `app.set('etag', true)` standard etag + * Include ETag in HEAD requests + * mark `res.send` ETag as weak and reduce collisions + * update connect to 2.18.0 + - deps: compression@1.0.3 + - deps: serve-index@1.1.0 + - deps: serve-static@1.2.0 + * update send to 0.4.0 + - Calculate ETag with md5 for reduced collisions + - Ignore stream errors after request ends + - deps: debug@0.8.1 + +3.8.1 / 2014-05-27 +================== + + * update connect to 2.17.3 + - deps: body-parser@1.2.2 + - deps: express-session@1.2.1 + - deps: method-override@1.0.2 + +3.8.0 / 2014-05-21 +================== + + * keep previous `Content-Type` for `res.jsonp` + * set proper `charset` in `Content-Type` for `res.send` + * update connect to 2.17.1 + - fix `res.charset` appending charset when `content-type` has one + - deps: express-session@1.2.0 + - deps: morgan@1.1.1 + - deps: serve-index@1.0.3 + +3.7.0 / 2014-05-18 +================== + + * proper proxy trust with `app.set('trust proxy', trust)` + - `app.set('trust proxy', 1)` trust first hop + - `app.set('trust proxy', 'loopback')` trust loopback addresses + - `app.set('trust proxy', '10.0.0.1')` trust single IP + - `app.set('trust proxy', '10.0.0.1/16')` trust subnet + - `app.set('trust proxy', '10.0.0.1, 10.0.0.2')` trust list + - `app.set('trust proxy', false)` turn off + - `app.set('trust proxy', true)` trust everything + * update connect to 2.16.2 + - deprecate `res.headerSent` -- use `res.headersSent` + - deprecate `res.on("header")` -- use on-headers module instead + - fix edge-case in `res.appendHeader` that would append in wrong order + - json: use body-parser + - urlencoded: use body-parser + - dep: bytes@1.0.0 + - dep: cookie-parser@1.1.0 + - dep: csurf@1.2.0 + - dep: express-session@1.1.0 + - dep: method-override@1.0.1 + +3.6.0 / 2014-05-09 +================== + + * deprecate `app.del()` -- use `app.delete()` instead + * deprecate `res.json(obj, status)` -- use `res.json(status, obj)` instead + - the edge-case `res.json(status, num)` requires `res.status(status).json(num)` + * deprecate `res.jsonp(obj, status)` -- use `res.jsonp(status, obj)` instead + - the edge-case `res.jsonp(status, num)` requires `res.status(status).jsonp(num)` + * support PURGE method + - add `app.purge` + - add `router.purge` + - include PURGE in `app.all` + * update connect to 2.15.0 + * Add `res.appendHeader` + * Call error stack even when response has been sent + * Patch `res.headerSent` to return Boolean + * Patch `res.headersSent` for node.js 0.8 + * Prevent default 404 handler after response sent + * dep: compression@1.0.2 + * dep: connect-timeout@1.1.0 + * dep: debug@^0.8.0 + * dep: errorhandler@1.0.1 + * dep: express-session@1.0.4 + * dep: morgan@1.0.1 + * dep: serve-favicon@2.0.0 + * dep: serve-index@1.0.2 + * update debug to 0.8.0 + * add `enable()` method + * change from stderr to stdout + * update methods to 1.0.0 + - add PURGE + * update mkdirp to 0.5.0 + +3.5.3 / 2014-05-08 +================== + + * fix `req.host` for IPv6 literals + * fix `res.jsonp` error if callback param is object + +3.5.2 / 2014-04-24 +================== + + * update connect to 2.14.5 + * update cookie to 0.1.2 + * update mkdirp to 0.4.0 + * update send to 0.3.0 + +3.5.1 / 2014-03-25 +================== + + * pin less-middleware in generated app + +3.5.0 / 2014-03-06 +================== + + * bump deps + +3.4.8 / 2014-01-13 +================== + + * prevent incorrect automatic OPTIONS responses #1868 @dpatti + * update binary and examples for jade 1.0 #1876 @yossi, #1877 @reqshark, #1892 @matheusazzi + * throw 400 in case of malformed paths @rlidwka + +3.4.7 / 2013-12-10 +================== + + * update connect + +3.4.6 / 2013-12-01 +================== + + * update connect (raw-body) + +3.4.5 / 2013-11-27 +================== + + * update connect + * res.location: remove leading ./ #1802 @kapouer + * res.redirect: fix `res.redirect('toString') #1829 @michaelficarra + * res.send: always send ETag when content-length > 0 + * router: add Router.all() method + +3.4.4 / 2013-10-29 +================== + + * update connect + * update supertest + * update methods + * express(1): replace bodyParser() with urlencoded() and json() #1795 @chirag04 + +3.4.3 / 2013-10-23 +================== + + * update connect + +3.4.2 / 2013-10-18 +================== + + * update connect + * downgrade commander + +3.4.1 / 2013-10-15 +================== + + * update connect + * update commander + * jsonp: check if callback is a function + * router: wrap encodeURIComponent in a try/catch #1735 (@lxe) + * res.format: now includes charset @1747 (@sorribas) + * res.links: allow multiple calls @1746 (@sorribas) + +3.4.0 / 2013-09-07 +================== + + * add res.vary(). Closes #1682 + * update connect + +3.3.8 / 2013-09-02 +================== + + * update connect + +3.3.7 / 2013-08-28 +================== + + * update connect + +3.3.6 / 2013-08-27 +================== + + * Revert "remove charset from json responses. Closes #1631" (causes issues in some clients) + * add: req.accepts take an argument list + +3.3.4 / 2013-07-08 +================== + + * update send and connect + +3.3.3 / 2013-07-04 +================== + + * update connect + +3.3.2 / 2013-07-03 +================== + + * update connect + * update send + * remove .version export + +3.3.1 / 2013-06-27 +================== + + * update connect + +3.3.0 / 2013-06-26 +================== + + * update connect + * add support for multiple X-Forwarded-Proto values. Closes #1646 + * change: remove charset from json responses. Closes #1631 + * change: return actual booleans from req.accept* functions + * fix jsonp callback array throw + +3.2.6 / 2013-06-02 +================== + + * update connect + +3.2.5 / 2013-05-21 +================== + + * update connect + * update node-cookie + * add: throw a meaningful error when there is no default engine + * change generation of ETags with res.send() to GET requests only. Closes #1619 + +3.2.4 / 2013-05-09 +================== + + * fix `req.subdomains` when no Host is present + * fix `req.host` when no Host is present, return undefined + +3.2.3 / 2013-05-07 +================== + + * update connect / qs + +3.2.2 / 2013-05-03 +================== + + * update qs + +3.2.1 / 2013-04-29 +================== + + * add app.VERB() paths array deprecation warning + * update connect + * update qs and remove all ~ semver crap + * fix: accept number as value of Signed Cookie + +3.2.0 / 2013-04-15 +================== + + * add "view" constructor setting to override view behaviour + * add req.acceptsEncoding(name) + * add req.acceptedEncodings + * revert cookie signature change causing session race conditions + * fix sorting of Accept values of the same quality + +3.1.2 / 2013-04-12 +================== + + * add support for custom Accept parameters + * update cookie-signature + +3.1.1 / 2013-04-01 +================== + + * add X-Forwarded-Host support to `req.host` + * fix relative redirects + * update mkdirp + * update buffer-crc32 + * remove legacy app.configure() method from app template. + +3.1.0 / 2013-01-25 +================== + + * add support for leading "." in "view engine" setting + * add array support to `res.set()` + * add node 0.8.x to travis.yml + * add "subdomain offset" setting for tweaking `req.subdomains` + * add `res.location(url)` implementing `res.redirect()`-like setting of Location + * use app.get() for x-powered-by setting for inheritance + * fix colons in passwords for `req.auth` + +3.0.6 / 2013-01-04 +================== + + * add http verb methods to Router + * update connect + * fix mangling of the `res.cookie()` options object + * fix jsonp whitespace escape. Closes #1132 + +3.0.5 / 2012-12-19 +================== + + * add throwing when a non-function is passed to a route + * fix: explicitly remove Transfer-Encoding header from 204 and 304 responses + * revert "add 'etag' option" + +3.0.4 / 2012-12-05 +================== + + * add 'etag' option to disable `res.send()` Etags + * add escaping of urls in text/plain in `res.redirect()` + for old browsers interpreting as html + * change crc32 module for a more liberal license + * update connect + +3.0.3 / 2012-11-13 +================== + + * update connect + * update cookie module + * fix cookie max-age + +3.0.2 / 2012-11-08 +================== + + * add OPTIONS to cors example. Closes #1398 + * fix route chaining regression. Closes #1397 + +3.0.1 / 2012-11-01 +================== + + * update connect + +3.0.0 / 2012-10-23 +================== + + * add `make clean` + * add "Basic" check to req.auth + * add `req.auth` test coverage + * add cb && cb(payload) to `res.jsonp()`. Closes #1374 + * add backwards compat for `res.redirect()` status. Closes #1336 + * add support for `res.json()` to retain previously defined Content-Types. Closes #1349 + * update connect + * change `res.redirect()` to utilize a pathname-relative Location again. Closes #1382 + * remove non-primitive string support for `res.send()` + * fix view-locals example. Closes #1370 + * fix route-separation example + +3.0.0rc5 / 2012-09-18 +================== + + * update connect + * add redis search example + * add static-files example + * add "x-powered-by" setting (`app.disable('x-powered-by')`) + * add "application/octet-stream" redirect Accept test case. Closes #1317 + +3.0.0rc4 / 2012-08-30 +================== + + * add `res.jsonp()`. Closes #1307 + * add "verbose errors" option to error-pages example + * add another route example to express(1) so people are not so confused + * add redis online user activity tracking example + * update connect dep + * fix etag quoting. Closes #1310 + * fix error-pages 404 status + * fix jsonp callback char restrictions + * remove old OPTIONS default response + +3.0.0rc3 / 2012-08-13 +================== + + * update connect dep + * fix signed cookies to work with `connect.cookieParser()` ("s:" prefix was missing) [tnydwrds] + * fix `res.render()` clobbering of "locals" + +3.0.0rc2 / 2012-08-03 +================== + + * add CORS example + * update connect dep + * deprecate `.createServer()` & remove old stale examples + * fix: escape `res.redirect()` link + * fix vhost example + +3.0.0rc1 / 2012-07-24 +================== + + * add more examples to view-locals + * add scheme-relative redirects (`res.redirect("//foo.com")`) support + * update cookie dep + * update connect dep + * update send dep + * fix `express(1)` -h flag, use -H for hogan. Closes #1245 + * fix `res.sendfile()` socket error handling regression + +3.0.0beta7 / 2012-07-16 +================== + + * update connect dep for `send()` root normalization regression + +3.0.0beta6 / 2012-07-13 +================== + + * add `err.view` property for view errors. Closes #1226 + * add "jsonp callback name" setting + * add support for "/foo/:bar*" non-greedy matches + * change `res.sendfile()` to use `send()` module + * change `res.send` to use "response-send" module + * remove `app.locals.use` and `res.locals.use`, use regular middleware + +3.0.0beta5 / 2012-07-03 +================== + + * add "make check" support + * add route-map example + * add `res.json(obj, status)` support back for BC + * add "methods" dep, remove internal methods module + * update connect dep + * update auth example to utilize cores pbkdf2 + * updated tests to use "supertest" + +3.0.0beta4 / 2012-06-25 +================== + + * Added `req.auth` + * Added `req.range(size)` + * Added `res.links(obj)` + * Added `res.send(body, status)` support back for backwards compat + * Added `.default()` support to `res.format()` + * Added 2xx / 304 check to `req.fresh` + * Revert "Added + support to the router" + * Fixed `res.send()` freshness check, respect res.statusCode + +3.0.0beta3 / 2012-06-15 +================== + + * Added hogan `--hjs` to express(1) [nullfirm] + * Added another example to content-negotiation + * Added `fresh` dep + * Changed: `res.send()` always checks freshness + * Fixed: expose connects mime module. Closes #1165 + +3.0.0beta2 / 2012-06-06 +================== + + * Added `+` support to the router + * Added `req.host` + * Changed `req.param()` to check route first + * Update connect dep + +3.0.0beta1 / 2012-06-01 +================== + + * Added `res.format()` callback to override default 406 behaviour + * Fixed `res.redirect()` 406. Closes #1154 + +3.0.0alpha5 / 2012-05-30 +================== + + * Added `req.ip` + * Added `{ signed: true }` option to `res.cookie()` + * Removed `res.signedCookie()` + * Changed: dont reverse `req.ips` + * Fixed "trust proxy" setting check for `req.ips` + +3.0.0alpha4 / 2012-05-09 +================== + + * Added: allow `[]` in jsonp callback. Closes #1128 + * Added `PORT` env var support in generated template. Closes #1118 [benatkin] + * Updated: connect 2.2.2 + +3.0.0alpha3 / 2012-05-04 +================== + + * Added public `app.routes`. Closes #887 + * Added _view-locals_ example + * Added _mvc_ example + * Added `res.locals.use()`. Closes #1120 + * Added conditional-GET support to `res.send()` + * Added: coerce `res.set()` values to strings + * Changed: moved `static()` in generated apps below router + * Changed: `res.send()` only set ETag when not previously set + * Changed connect 2.2.1 dep + * Changed: `make test` now runs unit / acceptance tests + * Fixed req/res proto inheritance + +3.0.0alpha2 / 2012-04-26 +================== + + * Added `make benchmark` back + * Added `res.send()` support for `String` objects + * Added client-side data exposing example + * Added `res.header()` and `req.header()` aliases for BC + * Added `express.createServer()` for BC + * Perf: memoize parsed urls + * Perf: connect 2.2.0 dep + * Changed: make `expressInit()` middleware self-aware + * Fixed: use app.get() for all core settings + * Fixed redis session example + * Fixed session example. Closes #1105 + * Fixed generated express dep. Closes #1078 + +3.0.0alpha1 / 2012-04-15 +================== + + * Added `app.locals.use(callback)` + * Added `app.locals` object + * Added `app.locals(obj)` + * Added `res.locals` object + * Added `res.locals(obj)` + * Added `res.format()` for content-negotiation + * Added `app.engine()` + * Added `res.cookie()` JSON cookie support + * Added "trust proxy" setting + * Added `req.subdomains` + * Added `req.protocol` + * Added `req.secure` + * Added `req.path` + * Added `req.ips` + * Added `req.fresh` + * Added `req.stale` + * Added comma-delimited / array support for `req.accepts()` + * Added debug instrumentation + * Added `res.set(obj)` + * Added `res.set(field, value)` + * Added `res.get(field)` + * Added `app.get(setting)`. Closes #842 + * Added `req.acceptsLanguage()` + * Added `req.acceptsCharset()` + * Added `req.accepted` + * Added `req.acceptedLanguages` + * Added `req.acceptedCharsets` + * Added "json replacer" setting + * Added "json spaces" setting + * Added X-Forwarded-Proto support to `res.redirect()`. Closes #92 + * Added `--less` support to express(1) + * Added `express.response` prototype + * Added `express.request` prototype + * Added `express.application` prototype + * Added `app.path()` + * Added `app.render()` + * Added `res.type()` to replace `res.contentType()` + * Changed: `res.redirect()` to add relative support + * Changed: enable "jsonp callback" by default + * Changed: renamed "case sensitive routes" to "case sensitive routing" + * Rewrite of all tests with mocha + * Removed "root" setting + * Removed `res.redirect('home')` support + * Removed `req.notify()` + * Removed `app.register()` + * Removed `app.redirect()` + * Removed `app.is()` + * Removed `app.helpers()` + * Removed `app.dynamicHelpers()` + * Fixed `res.sendfile()` with non-GET. Closes #723 + * Fixed express(1) public dir for windows. Closes #866 + +2.5.9/ 2012-04-02 +================== + + * Added support for PURGE request method [pbuyle] + * Fixed `express(1)` generated app `app.address()` before `listening` [mmalecki] + +2.5.8 / 2012-02-08 +================== + + * Update mkdirp dep. Closes #991 + +2.5.7 / 2012-02-06 +================== + + * Fixed `app.all` duplicate DELETE requests [mscdex] + +2.5.6 / 2012-01-13 +================== + + * Updated hamljs dev dep. Closes #953 + +2.5.5 / 2012-01-08 +================== + + * Fixed: set `filename` on cached templates [matthewleon] + +2.5.4 / 2012-01-02 +================== + + * Fixed `express(1)` eol on 0.4.x. Closes #947 + +2.5.3 / 2011-12-30 +================== + + * Fixed `req.is()` when a charset is present + +2.5.2 / 2011-12-10 +================== + + * Fixed: express(1) LF -> CRLF for windows + +2.5.1 / 2011-11-17 +================== + + * Changed: updated connect to 1.8.x + * Removed sass.js support from express(1) + +2.5.0 / 2011-10-24 +================== + + * Added ./routes dir for generated app by default + * Added npm install reminder to express(1) app gen + * Added 0.5.x support + * Removed `make test-cov` since it wont work with node 0.5.x + * Fixed express(1) public dir for windows. Closes #866 + +2.4.7 / 2011-10-05 +================== + + * Added mkdirp to express(1). Closes #795 + * Added simple _json-config_ example + * Added shorthand for the parsed request's pathname via `req.path` + * Changed connect dep to 1.7.x to fix npm issue... + * Fixed `res.redirect()` __HEAD__ support. [reported by xerox] + * Fixed `req.flash()`, only escape args + * Fixed absolute path checking on windows. Closes #829 [reported by andrewpmckenzie] + +2.4.6 / 2011-08-22 +================== + + * Fixed multiple param callback regression. Closes #824 [reported by TroyGoode] + +2.4.5 / 2011-08-19 +================== + + * Added support for routes to handle errors. Closes #809 + * Added `app.routes.all()`. Closes #803 + * Added "basepath" setting to work in conjunction with reverse proxies etc. + * Refactored `Route` to use a single array of callbacks + * Added support for multiple callbacks for `app.param()`. Closes #801 +Closes #805 + * Changed: removed .call(self) for route callbacks + * Dependency: `qs >= 0.3.1` + * Fixed `res.redirect()` on windows due to `join()` usage. Closes #808 + +2.4.4 / 2011-08-05 +================== + + * Fixed `res.header()` intention of a set, even when `undefined` + * Fixed `*`, value no longer required + * Fixed `res.send(204)` support. Closes #771 + +2.4.3 / 2011-07-14 +================== + + * Added docs for `status` option special-case. Closes #739 + * Fixed `options.filename`, exposing the view path to template engines + +2.4.2. / 2011-07-06 +================== + + * Revert "removed jsonp stripping" for XSS + +2.4.1 / 2011-07-06 +================== + + * Added `res.json()` JSONP support. Closes #737 + * Added _extending-templates_ example. Closes #730 + * Added "strict routing" setting for trailing slashes + * Added support for multiple envs in `app.configure()` calls. Closes #735 + * Changed: `res.send()` using `res.json()` + * Changed: when cookie `path === null` don't default it + * Changed; default cookie path to "home" setting. Closes #731 + * Removed _pids/logs_ creation from express(1) + +2.4.0 / 2011-06-28 +================== + + * Added chainable `res.status(code)` + * Added `res.json()`, an explicit version of `res.send(obj)` + * Added simple web-service example + +2.3.12 / 2011-06-22 +================== + + * \#express is now on freenode! come join! + * Added `req.get(field, param)` + * Added links to Japanese documentation, thanks @hideyukisaito! + * Added; the `express(1)` generated app outputs the env + * Added `content-negotiation` example + * Dependency: connect >= 1.5.1 < 2.0.0 + * Fixed view layout bug. Closes #720 + * Fixed; ignore body on 304. Closes #701 + +2.3.11 / 2011-06-04 +================== + + * Added `npm test` + * Removed generation of dummy test file from `express(1)` + * Fixed; `express(1)` adds express as a dep + * Fixed; prune on `prepublish` + +2.3.10 / 2011-05-27 +================== + + * Added `req.route`, exposing the current route + * Added _package.json_ generation support to `express(1)` + * Fixed call to `app.param()` function for optional params. Closes #682 + +2.3.9 / 2011-05-25 +================== + + * Fixed bug-ish with `../' in `res.partial()` calls + +2.3.8 / 2011-05-24 +================== + + * Fixed `app.options()` + +2.3.7 / 2011-05-23 +================== + + * Added route `Collection`, ex: `app.get('/user/:id').remove();` + * Added support for `app.param(fn)` to define param logic + * Removed `app.param()` support for callback with return value + * Removed module.parent check from express(1) generated app. Closes #670 + * Refactored router. Closes #639 + +2.3.6 / 2011-05-20 +================== + + * Changed; using devDependencies instead of git submodules + * Fixed redis session example + * Fixed markdown example + * Fixed view caching, should not be enabled in development + +2.3.5 / 2011-05-20 +================== + + * Added export `.view` as alias for `.View` + +2.3.4 / 2011-05-08 +================== + + * Added `./examples/say` + * Fixed `res.sendfile()` bug preventing the transfer of files with spaces + +2.3.3 / 2011-05-03 +================== + + * Added "case sensitive routes" option. + * Changed; split methods supported per rfc [slaskis] + * Fixed route-specific middleware when using the same callback function several times + +2.3.2 / 2011-04-27 +================== + + * Fixed view hints + +2.3.1 / 2011-04-26 +================== + + * Added `app.match()` as `app.match.all()` + * Added `app.lookup()` as `app.lookup.all()` + * Added `app.remove()` for `app.remove.all()` + * Added `app.remove.VERB()` + * Fixed template caching collision issue. Closes #644 + * Moved router over from connect and started refactor + +2.3.0 / 2011-04-25 +================== + + * Added options support to `res.clearCookie()` + * Added `res.helpers()` as alias of `res.locals()` + * Added; json defaults to UTF-8 with `res.send()`. Closes #632. [Daniel * Dependency `connect >= 1.4.0` + * Changed; auto set Content-Type in res.attachement [Aaron Heckmann] + * Renamed "cache views" to "view cache". Closes #628 + * Fixed caching of views when using several apps. Closes #637 + * Fixed gotcha invoking `app.param()` callbacks once per route middleware. +Closes #638 + * Fixed partial lookup precedence. Closes #631 +Shaw] + +2.2.2 / 2011-04-12 +================== + + * Added second callback support for `res.download()` connection errors + * Fixed `filename` option passing to template engine + +2.2.1 / 2011-04-04 +================== + + * Added `layout(path)` helper to change the layout within a view. Closes #610 + * Fixed `partial()` collection object support. + Previously only anything with `.length` would work. + When `.length` is present one must still be aware of holes, + however now `{ collection: {foo: 'bar'}}` is valid, exposes + `keyInCollection` and `keysInCollection`. + + * Performance improved with better view caching + * Removed `request` and `response` locals + * Changed; errorHandler page title is now `Express` instead of `Connect` + +2.2.0 / 2011-03-30 +================== + + * Added `app.lookup.VERB()`, ex `app.lookup.put('/user/:id')`. Closes #606 + * Added `app.match.VERB()`, ex `app.match.put('/user/12')`. Closes #606 + * Added `app.VERB(path)` as alias of `app.lookup.VERB()`. + * Dependency `connect >= 1.2.0` + +2.1.1 / 2011-03-29 +================== + + * Added; expose `err.view` object when failing to locate a view + * Fixed `res.partial()` call `next(err)` when no callback is given [reported by aheckmann] + * Fixed; `res.send(undefined)` responds with 204 [aheckmann] + +2.1.0 / 2011-03-24 +================== + + * Added `/_?` partial lookup support. Closes #447 + * Added `request`, `response`, and `app` local variables + * Added `settings` local variable, containing the app's settings + * Added `req.flash()` exception if `req.session` is not available + * Added `res.send(bool)` support (json response) + * Fixed stylus example for latest version + * Fixed; wrap try/catch around `res.render()` + +2.0.0 / 2011-03-17 +================== + + * Fixed up index view path alternative. + * Changed; `res.locals()` without object returns the locals + +2.0.0rc3 / 2011-03-17 +================== + + * Added `res.locals(obj)` to compliment `res.local(key, val)` + * Added `res.partial()` callback support + * Fixed recursive error reporting issue in `res.render()` + +2.0.0rc2 / 2011-03-17 +================== + + * Changed; `partial()` "locals" are now optional + * Fixed `SlowBuffer` support. Closes #584 [reported by tyrda01] + * Fixed .filename view engine option [reported by drudge] + * Fixed blog example + * Fixed `{req,res}.app` reference when mounting [Ben Weaver] + +2.0.0rc / 2011-03-14 +================== + + * Fixed; expose `HTTPSServer` constructor + * Fixed express(1) default test charset. Closes #579 [reported by secoif] + * Fixed; default charset to utf-8 instead of utf8 for lame IE [reported by NickP] + +2.0.0beta3 / 2011-03-09 +================== + + * Added support for `res.contentType()` literal + The original `res.contentType('.json')`, + `res.contentType('application/json')`, and `res.contentType('json')` + will work now. + * Added `res.render()` status option support back + * Added charset option for `res.render()` + * Added `.charset` support (via connect 1.0.4) + * Added view resolution hints when in development and a lookup fails + * Added layout lookup support relative to the page view. + For example while rendering `./views/user/index.jade` if you create + `./views/user/layout.jade` it will be used in favour of the root layout. + * Fixed `res.redirect()`. RFC states absolute url [reported by unlink] + * Fixed; default `res.send()` string charset to utf8 + * Removed `Partial` constructor (not currently used) + +2.0.0beta2 / 2011-03-07 +================== + + * Added res.render() `.locals` support back to aid in migration process + * Fixed flash example + +2.0.0beta / 2011-03-03 +================== + + * Added HTTPS support + * Added `res.cookie()` maxAge support + * Added `req.header()` _Referrer_ / _Referer_ special-case, either works + * Added mount support for `res.redirect()`, now respects the mount-point + * Added `union()` util, taking place of `merge(clone())` combo + * Added stylus support to express(1) generated app + * Added secret to session middleware used in examples and generated app + * Added `res.local(name, val)` for progressive view locals + * Added default param support to `req.param(name, default)` + * Added `app.disabled()` and `app.enabled()` + * Added `app.register()` support for omitting leading ".", either works + * Added `res.partial()`, using the same interface as `partial()` within a view. Closes #539 + * Added `app.param()` to map route params to async/sync logic + * Added; aliased `app.helpers()` as `app.locals()`. Closes #481 + * Added extname with no leading "." support to `res.contentType()` + * Added `cache views` setting, defaulting to enabled in "production" env + * Added index file partial resolution, eg: partial('user') may try _views/user/index.jade_. + * Added `req.accepts()` support for extensions + * Changed; `res.download()` and `res.sendfile()` now utilize Connect's + static file server `connect.static.send()`. + * Changed; replaced `connect.utils.mime()` with npm _mime_ module + * Changed; allow `req.query` to be pre-defined (via middleware or other parent + * Changed view partial resolution, now relative to parent view + * Changed view engine signature. no longer `engine.render(str, options, callback)`, now `engine.compile(str, options) -> Function`, the returned function accepts `fn(locals)`. + * Fixed `req.param()` bug returning Array.prototype methods. Closes #552 + * Fixed; using `Stream#pipe()` instead of `sys.pump()` in `res.sendfile()` + * Fixed; using _qs_ module instead of _querystring_ + * Fixed; strip unsafe chars from jsonp callbacks + * Removed "stream threshold" setting + +1.0.8 / 2011-03-01 +================== + + * Allow `req.query` to be pre-defined (via middleware or other parent app) + * "connect": ">= 0.5.0 < 1.0.0". Closes #547 + * Removed the long deprecated __EXPRESS_ENV__ support + +1.0.7 / 2011-02-07 +================== + + * Fixed `render()` setting inheritance. + Mounted apps would not inherit "view engine" + +1.0.6 / 2011-02-07 +================== + + * Fixed `view engine` setting bug when period is in dirname + +1.0.5 / 2011-02-05 +================== + + * Added secret to generated app `session()` call + +1.0.4 / 2011-02-05 +================== + + * Added `qs` dependency to _package.json_ + * Fixed namespaced `require()`s for latest connect support + +1.0.3 / 2011-01-13 +================== + + * Remove unsafe characters from JSONP callback names [Ryan Grove] + +1.0.2 / 2011-01-10 +================== + + * Removed nested require, using `connect.router` + +1.0.1 / 2010-12-29 +================== + + * Fixed for middleware stacked via `createServer()` + previously the `foo` middleware passed to `createServer(foo)` + would not have access to Express methods such as `res.send()` + or props like `req.query` etc. + +1.0.0 / 2010-11-16 +================== + + * Added; deduce partial object names from the last segment. + For example by default `partial('forum/post', postObject)` will + give you the _post_ object, providing a meaningful default. + * Added http status code string representation to `res.redirect()` body + * Added; `res.redirect()` supporting _text/plain_ and _text/html_ via __Accept__. + * Added `req.is()` to aid in content negotiation + * Added partial local inheritance [suggested by masylum]. Closes #102 + providing access to parent template locals. + * Added _-s, --session[s]_ flag to express(1) to add session related middleware + * Added _--template_ flag to express(1) to specify the + template engine to use. + * Added _--css_ flag to express(1) to specify the + stylesheet engine to use (or just plain css by default). + * Added `app.all()` support [thanks aheckmann] + * Added partial direct object support. + You may now `partial('user', user)` providing the "user" local, + vs previously `partial('user', { object: user })`. + * Added _route-separation_ example since many people question ways + to do this with CommonJS modules. Also view the _blog_ example for + an alternative. + * Performance; caching view path derived partial object names + * Fixed partial local inheritance precedence. [reported by Nick Poulden] Closes #454 + * Fixed jsonp support; _text/javascript_ as per mailinglist discussion + +1.0.0rc4 / 2010-10-14 +================== + + * Added _NODE_ENV_ support, _EXPRESS_ENV_ is deprecated and will be removed in 1.0.0 + * Added route-middleware support (very helpful, see the [docs](http://expressjs.com/guide.html#Route-Middleware)) + * Added _jsonp callback_ setting to enable/disable jsonp autowrapping [Dav Glass] + * Added callback query check on response.send to autowrap JSON objects for simple webservice implementations [Dav Glass] + * Added `partial()` support for array-like collections. Closes #434 + * Added support for swappable querystring parsers + * Added session usage docs. Closes #443 + * Added dynamic helper caching. Closes #439 [suggested by maritz] + * Added authentication example + * Added basic Range support to `res.sendfile()` (and `res.download()` etc) + * Changed; `express(1)` generated app using 2 spaces instead of 4 + * Default env to "development" again [aheckmann] + * Removed _context_ option is no more, use "scope" + * Fixed; exposing _./support_ libs to examples so they can run without installs + * Fixed mvc example + +1.0.0rc3 / 2010-09-20 +================== + + * Added confirmation for `express(1)` app generation. Closes #391 + * Added extending of flash formatters via `app.flashFormatters` + * Added flash formatter support. Closes #411 + * Added streaming support to `res.sendfile()` using `sys.pump()` when >= "stream threshold" + * Added _stream threshold_ setting for `res.sendfile()` + * Added `res.send()` __HEAD__ support + * Added `res.clearCookie()` + * Added `res.cookie()` + * Added `res.render()` headers option + * Added `res.redirect()` response bodies + * Added `res.render()` status option support. Closes #425 [thanks aheckmann] + * Fixed `res.sendfile()` responding with 403 on malicious path + * Fixed `res.download()` bug; when an error occurs remove _Content-Disposition_ + * Fixed; mounted apps settings now inherit from parent app [aheckmann] + * Fixed; stripping Content-Length / Content-Type when 204 + * Fixed `res.send()` 204. Closes #419 + * Fixed multiple _Set-Cookie_ headers via `res.header()`. Closes #402 + * Fixed bug messing with error handlers when `listenFD()` is called instead of `listen()`. [thanks guillermo] + + +1.0.0rc2 / 2010-08-17 +================== + + * Added `app.register()` for template engine mapping. Closes #390 + * Added `res.render()` callback support as second argument (no options) + * Added callback support to `res.download()` + * Added callback support for `res.sendfile()` + * Added support for middleware access via `express.middlewareName()` vs `connect.middlewareName()` + * Added "partials" setting to docs + * Added default expresso tests to `express(1)` generated app. Closes #384 + * Fixed `res.sendfile()` error handling, defer via `next()` + * Fixed `res.render()` callback when a layout is used [thanks guillermo] + * Fixed; `make install` creating ~/.node_libraries when not present + * Fixed issue preventing error handlers from being defined anywhere. Closes #387 + +1.0.0rc / 2010-07-28 +================== + + * Added mounted hook. Closes #369 + * Added connect dependency to _package.json_ + + * Removed "reload views" setting and support code + development env never caches, production always caches. + + * Removed _param_ in route callbacks, signature is now + simply (req, res, next), previously (req, res, params, next). + Use _req.params_ for path captures, _req.query_ for GET params. + + * Fixed "home" setting + * Fixed middleware/router precedence issue. Closes #366 + * Fixed; _configure()_ callbacks called immediately. Closes #368 + +1.0.0beta2 / 2010-07-23 +================== + + * Added more examples + * Added; exporting `Server` constructor + * Added `Server#helpers()` for view locals + * Added `Server#dynamicHelpers()` for dynamic view locals. Closes #349 + * Added support for absolute view paths + * Added; _home_ setting defaults to `Server#route` for mounted apps. Closes #363 + * Added Guillermo Rauch to the contributor list + * Added support for "as" for non-collection partials. Closes #341 + * Fixed _install.sh_, ensuring _~/.node_libraries_ exists. Closes #362 [thanks jf] + * Fixed `res.render()` exceptions, now passed to `next()` when no callback is given [thanks guillermo] + * Fixed instanceof `Array` checks, now `Array.isArray()` + * Fixed express(1) expansion of public dirs. Closes #348 + * Fixed middleware precedence. Closes #345 + * Fixed view watcher, now async [thanks aheckmann] + +1.0.0beta / 2010-07-15 +================== + + * Re-write + - much faster + - much lighter + - Check [ExpressJS.com](http://expressjs.com) for migration guide and updated docs + +0.14.0 / 2010-06-15 +================== + + * Utilize relative requires + * Added Static bufferSize option [aheckmann] + * Fixed caching of view and partial subdirectories [aheckmann] + * Fixed mime.type() comments now that ".ext" is not supported + * Updated haml submodule + * Updated class submodule + * Removed bin/express + +0.13.0 / 2010-06-01 +================== + + * Added node v0.1.97 compatibility + * Added support for deleting cookies via Request#cookie('key', null) + * Updated haml submodule + * Fixed not-found page, now using using charset utf-8 + * Fixed show-exceptions page, now using using charset utf-8 + * Fixed view support due to fs.readFile Buffers + * Changed; mime.type() no longer accepts ".type" due to node extname() changes + +0.12.0 / 2010-05-22 +================== + + * Added node v0.1.96 compatibility + * Added view `helpers` export which act as additional local variables + * Updated haml submodule + * Changed ETag; removed inode, modified time only + * Fixed LF to CRLF for setting multiple cookies + * Fixed cookie complation; values are now urlencoded + * Fixed cookies parsing; accepts quoted values and url escaped cookies + +0.11.0 / 2010-05-06 +================== + + * Added support for layouts using different engines + - this.render('page.html.haml', { layout: 'super-cool-layout.html.ejs' }) + - this.render('page.html.haml', { layout: 'foo' }) // assumes 'foo.html.haml' + - this.render('page.html.haml', { layout: false }) // no layout + * Updated ext submodule + * Updated haml submodule + * Fixed EJS partial support by passing along the context. Issue #307 + +0.10.1 / 2010-05-03 +================== + + * Fixed binary uploads. + +0.10.0 / 2010-04-30 +================== + + * Added charset support via Request#charset (automatically assigned to 'UTF-8' when respond()'s + encoding is set to 'utf8' or 'utf-8'. + * Added "encoding" option to Request#render(). Closes #299 + * Added "dump exceptions" setting, which is enabled by default. + * Added simple ejs template engine support + * Added error response support for text/plain, application/json. Closes #297 + * Added callback function param to Request#error() + * Added Request#sendHead() + * Added Request#stream() + * Added support for Request#respond(304, null) for empty response bodies + * Added ETag support to Request#sendfile() + * Added options to Request#sendfile(), passed to fs.createReadStream() + * Added filename arg to Request#download() + * Performance enhanced due to pre-reversing plugins so that plugins.reverse() is not called on each request + * Performance enhanced by preventing several calls to toLowerCase() in Router#match() + * Changed; Request#sendfile() now streams + * Changed; Renamed Request#halt() to Request#respond(). Closes #289 + * Changed; Using sys.inspect() instead of JSON.encode() for error output + * Changed; run() returns the http.Server instance. Closes #298 + * Changed; Defaulting Server#host to null (INADDR_ANY) + * Changed; Logger "common" format scale of 0.4f + * Removed Logger "request" format + * Fixed; Catching ENOENT in view caching, preventing error when "views/partials" is not found + * Fixed several issues with http client + * Fixed Logger Content-Length output + * Fixed bug preventing Opera from retaining the generated session id. Closes #292 + +0.9.0 / 2010-04-14 +================== + + * Added DSL level error() route support + * Added DSL level notFound() route support + * Added Request#error() + * Added Request#notFound() + * Added Request#render() callback function. Closes #258 + * Added "max upload size" setting + * Added "magic" variables to collection partials (\_\_index\_\_, \_\_length\_\_, \_\_isFirst\_\_, \_\_isLast\_\_). Closes #254 + * Added [haml.js](http://github.com/visionmedia/haml.js) submodule; removed haml-js + * Added callback function support to Request#halt() as 3rd/4th arg + * Added preprocessing of route param wildcards using param(). Closes #251 + * Added view partial support (with collections etc) + * Fixed bug preventing falsey params (such as ?page=0). Closes #286 + * Fixed setting of multiple cookies. Closes #199 + * Changed; view naming convention is now NAME.TYPE.ENGINE (for example page.html.haml) + * Changed; session cookie is now httpOnly + * Changed; Request is no longer global + * Changed; Event is no longer global + * Changed; "sys" module is no longer global + * Changed; moved Request#download to Static plugin where it belongs + * Changed; Request instance created before body parsing. Closes #262 + * Changed; Pre-caching views in memory when "cache view contents" is enabled. Closes #253 + * Changed; Pre-caching view partials in memory when "cache view partials" is enabled + * Updated support to node --version 0.1.90 + * Updated dependencies + * Removed set("session cookie") in favour of use(Session, { cookie: { ... }}) + * Removed utils.mixin(); use Object#mergeDeep() + +0.8.0 / 2010-03-19 +================== + + * Added coffeescript example app. Closes #242 + * Changed; cache api now async friendly. Closes #240 + * Removed deprecated 'express/static' support. Use 'express/plugins/static' + +0.7.6 / 2010-03-19 +================== + + * Added Request#isXHR. Closes #229 + * Added `make install` (for the executable) + * Added `express` executable for setting up simple app templates + * Added "GET /public/*" to Static plugin, defaulting to /public + * Added Static plugin + * Fixed; Request#render() only calls cache.get() once + * Fixed; Namespacing View caches with "view:" + * Fixed; Namespacing Static caches with "static:" + * Fixed; Both example apps now use the Static plugin + * Fixed set("views"). Closes #239 + * Fixed missing space for combined log format + * Deprecated Request#sendfile() and 'express/static' + * Removed Server#running + +0.7.5 / 2010-03-16 +================== + + * Added Request#flash() support without args, now returns all flashes + * Updated ext submodule + +0.7.4 / 2010-03-16 +================== + + * Fixed session reaper + * Changed; class.js replacing js-oo Class implementation (quite a bit faster, no browser cruft) + +0.7.3 / 2010-03-16 +================== + + * Added package.json + * Fixed requiring of haml / sass due to kiwi removal + +0.7.2 / 2010-03-16 +================== + + * Fixed GIT submodules (HAH!) + +0.7.1 / 2010-03-16 +================== + + * Changed; Express now using submodules again until a PM is adopted + * Changed; chat example using millisecond conversions from ext + +0.7.0 / 2010-03-15 +================== + + * Added Request#pass() support (finds the next matching route, or the given path) + * Added Logger plugin (default "common" format replaces CommonLogger) + * Removed Profiler plugin + * Removed CommonLogger plugin + +0.6.0 / 2010-03-11 +================== + + * Added seed.yml for kiwi package management support + * Added HTTP client query string support when method is GET. Closes #205 + + * Added support for arbitrary view engines. + For example "foo.engine.html" will now require('engine'), + the exports from this module are cached after the first require(). + + * Added async plugin support + + * Removed usage of RESTful route funcs as http client + get() etc, use http.get() and friends + + * Removed custom exceptions + +0.5.0 / 2010-03-10 +================== + + * Added ext dependency (library of js extensions) + * Removed extname() / basename() utils. Use path module + * Removed toArray() util. Use arguments.values + * Removed escapeRegexp() util. Use RegExp.escape() + * Removed process.mixin() dependency. Use utils.mixin() + * Removed Collection + * Removed ElementCollection + * Shameless self promotion of ebook "Advanced JavaScript" (http://dev-mag.com) ;) + +0.4.0 / 2010-02-11 +================== + + * Added flash() example to sample upload app + * Added high level restful http client module (express/http) + * Changed; RESTful route functions double as HTTP clients. Closes #69 + * Changed; throwing error when routes are added at runtime + * Changed; defaulting render() context to the current Request. Closes #197 + * Updated haml submodule + +0.3.0 / 2010-02-11 +================== + + * Updated haml / sass submodules. Closes #200 + * Added flash message support. Closes #64 + * Added accepts() now allows multiple args. fixes #117 + * Added support for plugins to halt. Closes #189 + * Added alternate layout support. Closes #119 + * Removed Route#run(). Closes #188 + * Fixed broken specs due to use(Cookie) missing + +0.2.1 / 2010-02-05 +================== + + * Added "plot" format option for Profiler (for gnuplot processing) + * Added request number to Profiler plugin + * Fixed binary encoding for multi-part file uploads, was previously defaulting to UTF8 + * Fixed issue with routes not firing when not files are present. Closes #184 + * Fixed process.Promise -> events.Promise + +0.2.0 / 2010-02-03 +================== + + * Added parseParam() support for name[] etc. (allows for file inputs with "multiple" attr) Closes #180 + * Added Both Cache and Session option "reapInterval" may be "reapEvery". Closes #174 + * Added expiration support to cache api with reaper. Closes #133 + * Added cache Store.Memory#reap() + * Added Cache; cache api now uses first class Cache instances + * Added abstract session Store. Closes #172 + * Changed; cache Memory.Store#get() utilizing Collection + * Renamed MemoryStore -> Store.Memory + * Fixed use() of the same plugin several time will always use latest options. Closes #176 + +0.1.0 / 2010-02-03 +================== + + * Changed; Hooks (before / after) pass request as arg as well as evaluated in their context + * Updated node support to 0.1.27 Closes #169 + * Updated dirname(__filename) -> __dirname + * Updated libxmljs support to v0.2.0 + * Added session support with memory store / reaping + * Added quick uid() helper + * Added multi-part upload support + * Added Sass.js support / submodule + * Added production env caching view contents and static files + * Added static file caching. Closes #136 + * Added cache plugin with memory stores + * Added support to StaticFile so that it works with non-textual files. + * Removed dirname() helper + * Removed several globals (now their modules must be required) + +0.0.2 / 2010-01-10 +================== + + * Added view benchmarks; currently haml vs ejs + * Added Request#attachment() specs. Closes #116 + * Added use of node's parseQuery() util. Closes #123 + * Added `make init` for submodules + * Updated Haml + * Updated sample chat app to show messages on load + * Updated libxmljs parseString -> parseHtmlString + * Fixed `make init` to work with older versions of git + * Fixed specs can now run independent specs for those who cant build deps. Closes #127 + * Fixed issues introduced by the node url module changes. Closes 126. + * Fixed two assertions failing due to Collection#keys() returning strings + * Fixed faulty Collection#toArray() spec due to keys() returning strings + * Fixed `make test` now builds libxmljs.node before testing + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/node_modules/express/LICENSE b/node_modules/express/LICENSE new file mode 100644 index 0000000..aa927e4 --- /dev/null +++ b/node_modules/express/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2009-2014 TJ Holowaychuk +Copyright (c) 2013-2014 Roman Shtylman +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/express/Readme.md b/node_modules/express/Readme.md new file mode 100644 index 0000000..e9bfaeb --- /dev/null +++ b/node_modules/express/Readme.md @@ -0,0 +1,142 @@ +[![Express Logo](https://i.cloudup.com/zfY6lL7eFa-3000x3000.png)](http://expressjs.com/) + + Fast, unopinionated, minimalist web framework for [node](http://nodejs.org). + + [![NPM Version][npm-image]][npm-url] + [![NPM Downloads][downloads-image]][downloads-url] + [![Linux Build][travis-image]][travis-url] + [![Windows Build][appveyor-image]][appveyor-url] + [![Test Coverage][coveralls-image]][coveralls-url] + +```js +var express = require('express') +var app = express() + +app.get('/', function (req, res) { + res.send('Hello World') +}) + +app.listen(3000) +``` + +## Installation + +```bash +$ npm install express +``` + +## Features + + * Robust routing + * Focus on high performance + * Super-high test coverage + * HTTP helpers (redirection, caching, etc) + * View system supporting 14+ template engines + * Content negotiation + * Executable for generating applications quickly + +## Docs & Community + + * [Website and Documentation](http://expressjs.com/) - [[website repo](https://github.com/strongloop/expressjs.com)] + * [#express](https://webchat.freenode.net/?channels=express) on freenode IRC + * [Github Organization](https://github.com/expressjs) for Official Middleware & Modules + * Visit the [Wiki](https://github.com/expressjs/express/wiki) + * [Google Group](https://groups.google.com/group/express-js) for discussion + * [Gitter](https://gitter.im/expressjs/express) for support and discussion + * [Русскоязычная документация](http://jsman.ru/express/) + +**PROTIP** Be sure to read [Migrating from 3.x to 4.x](https://github.com/expressjs/express/wiki/Migrating-from-3.x-to-4.x) as well as [New features in 4.x](https://github.com/expressjs/express/wiki/New-features-in-4.x). + +###Security Issues + +If you discover a security vulnerability in Express, please see [Security Policies and Procedures](Security.md). + +## Quick Start + + The quickest way to get started with express is to utilize the executable [`express(1)`](https://github.com/expressjs/generator) to generate an application as shown below: + + Install the executable. The executable's major version will match Express's: + +```bash +$ npm install -g express-generator@4 +``` + + Create the app: + +```bash +$ express /tmp/foo && cd /tmp/foo +``` + + Install dependencies: + +```bash +$ npm install +``` + + Start the server: + +```bash +$ npm start +``` + +## Philosophy + + The Express philosophy is to provide small, robust tooling for HTTP servers, making + it a great solution for single page applications, web sites, hybrids, or public + HTTP APIs. + + Express does not force you to use any specific ORM or template engine. With support for over + 14 template engines via [Consolidate.js](https://github.com/tj/consolidate.js), + you can quickly craft your perfect framework. + +## Examples + + To view the examples, clone the Express repo and install the dependencies: + +```bash +$ git clone git://github.com/expressjs/express.git --depth 1 +$ cd express +$ npm install +``` + + Then run whichever example you want: + +```bash +$ node examples/content-negotiation +``` + +## Tests + + To run the test suite, first install the dependencies, then run `npm test`: + +```bash +$ npm install +$ npm test +``` + +## People + +The original author of Express is [TJ Holowaychuk](https://github.com/tj) [![TJ's Gratipay][gratipay-image-visionmedia]][gratipay-url-visionmedia] + +The current lead maintainer is [Douglas Christopher Wilson](https://github.com/dougwilson) [![Doug's Gratipay][gratipay-image-dougwilson]][gratipay-url-dougwilson] + +[List of all contributors](https://github.com/expressjs/express/graphs/contributors) + +## License + + [MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/express.svg +[npm-url]: https://npmjs.org/package/express +[downloads-image]: https://img.shields.io/npm/dm/express.svg +[downloads-url]: https://npmjs.org/package/express +[travis-image]: https://img.shields.io/travis/expressjs/express/master.svg?label=linux +[travis-url]: https://travis-ci.org/expressjs/express +[appveyor-image]: https://img.shields.io/appveyor/ci/dougwilson/express/master.svg?label=windows +[appveyor-url]: https://ci.appveyor.com/project/dougwilson/express +[coveralls-image]: https://img.shields.io/coveralls/expressjs/express/master.svg +[coveralls-url]: https://coveralls.io/r/expressjs/express?branch=master +[gratipay-image-visionmedia]: https://img.shields.io/gratipay/visionmedia.svg +[gratipay-url-visionmedia]: https://gratipay.com/visionmedia/ +[gratipay-image-dougwilson]: https://img.shields.io/gratipay/dougwilson.svg +[gratipay-url-dougwilson]: https://gratipay.com/dougwilson/ diff --git a/node_modules/express/index.js b/node_modules/express/index.js new file mode 100644 index 0000000..d219b0c --- /dev/null +++ b/node_modules/express/index.js @@ -0,0 +1,11 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +module.exports = require('./lib/express'); diff --git a/node_modules/express/lib/application.js b/node_modules/express/lib/application.js new file mode 100644 index 0000000..0ee4def --- /dev/null +++ b/node_modules/express/lib/application.js @@ -0,0 +1,643 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var finalhandler = require('finalhandler'); +var Router = require('./router'); +var methods = require('methods'); +var middleware = require('./middleware/init'); +var query = require('./middleware/query'); +var debug = require('debug')('express:application'); +var View = require('./view'); +var http = require('http'); +var compileETag = require('./utils').compileETag; +var compileQueryParser = require('./utils').compileQueryParser; +var compileTrust = require('./utils').compileTrust; +var deprecate = require('depd')('express'); +var flatten = require('array-flatten'); +var merge = require('utils-merge'); +var resolve = require('path').resolve; +var slice = Array.prototype.slice; + +/** + * Application prototype. + */ + +var app = exports = module.exports = {}; + +/** + * Variable for trust proxy inheritance back-compat + * @private + */ + +var trustProxyDefaultSymbol = '@@symbol:trust_proxy_default'; + +/** + * Initialize the server. + * + * - setup default configuration + * - setup default middleware + * - setup route reflection methods + * + * @private + */ + +app.init = function init() { + this.cache = {}; + this.engines = {}; + this.settings = {}; + + this.defaultConfiguration(); +}; + +/** + * Initialize application configuration. + * @private + */ + +app.defaultConfiguration = function defaultConfiguration() { + var env = process.env.NODE_ENV || 'development'; + + // default settings + this.enable('x-powered-by'); + this.set('etag', 'weak'); + this.set('env', env); + this.set('query parser', 'extended'); + this.set('subdomain offset', 2); + this.set('trust proxy', false); + + // trust proxy inherit back-compat + Object.defineProperty(this.settings, trustProxyDefaultSymbol, { + configurable: true, + value: true + }); + + debug('booting in %s mode', env); + + this.on('mount', function onmount(parent) { + // inherit trust proxy + if (this.settings[trustProxyDefaultSymbol] === true + && typeof parent.settings['trust proxy fn'] === 'function') { + delete this.settings['trust proxy']; + delete this.settings['trust proxy fn']; + } + + // inherit protos + this.request.__proto__ = parent.request; + this.response.__proto__ = parent.response; + this.engines.__proto__ = parent.engines; + this.settings.__proto__ = parent.settings; + }); + + // setup locals + this.locals = Object.create(null); + + // top-most app is mounted at / + this.mountpath = '/'; + + // default locals + this.locals.settings = this.settings; + + // default configuration + this.set('view', View); + this.set('views', resolve('views')); + this.set('jsonp callback name', 'callback'); + + if (env === 'production') { + this.enable('view cache'); + } + + Object.defineProperty(this, 'router', { + get: function() { + throw new Error('\'app.router\' is deprecated!\nPlease see the 3.x to 4.x migration guide for details on how to update your app.'); + } + }); +}; + +/** + * lazily adds the base router if it has not yet been added. + * + * We cannot add the base router in the defaultConfiguration because + * it reads app settings which might be set after that has run. + * + * @private + */ +app.lazyrouter = function lazyrouter() { + if (!this._router) { + this._router = new Router({ + caseSensitive: this.enabled('case sensitive routing'), + strict: this.enabled('strict routing') + }); + + this._router.use(query(this.get('query parser fn'))); + this._router.use(middleware.init(this)); + } +}; + +/** + * Dispatch a req, res pair into the application. Starts pipeline processing. + * + * If no callback is provided, then default error handlers will respond + * in the event of an error bubbling through the stack. + * + * @private + */ + +app.handle = function handle(req, res, callback) { + var router = this._router; + + // final handler + var done = callback || finalhandler(req, res, { + env: this.get('env'), + onerror: logerror.bind(this) + }); + + // no routes + if (!router) { + debug('no routes defined on app'); + done(); + return; + } + + router.handle(req, res, done); +}; + +/** + * Proxy `Router#use()` to add middleware to the app router. + * See Router#use() documentation for details. + * + * If the _fn_ parameter is an express app, then it will be + * mounted at the _route_ specified. + * + * @public + */ + +app.use = function use(fn) { + var offset = 0; + var path = '/'; + + // default path to '/' + // disambiguate app.use([fn]) + if (typeof fn !== 'function') { + var arg = fn; + + while (Array.isArray(arg) && arg.length !== 0) { + arg = arg[0]; + } + + // first arg is the path + if (typeof arg !== 'function') { + offset = 1; + path = fn; + } + } + + var fns = flatten(slice.call(arguments, offset)); + + if (fns.length === 0) { + throw new TypeError('app.use() requires middleware functions'); + } + + // setup router + this.lazyrouter(); + var router = this._router; + + fns.forEach(function (fn) { + // non-express app + if (!fn || !fn.handle || !fn.set) { + return router.use(path, fn); + } + + debug('.use app under %s', path); + fn.mountpath = path; + fn.parent = this; + + // restore .app property on req and res + router.use(path, function mounted_app(req, res, next) { + var orig = req.app; + fn.handle(req, res, function (err) { + req.__proto__ = orig.request; + res.__proto__ = orig.response; + next(err); + }); + }); + + // mounted an app + fn.emit('mount', this); + }, this); + + return this; +}; + +/** + * Proxy to the app `Router#route()` + * Returns a new `Route` instance for the _path_. + * + * Routes are isolated middleware stacks for specific paths. + * See the Route api docs for details. + * + * @public + */ + +app.route = function route(path) { + this.lazyrouter(); + return this._router.route(path); +}; + +/** + * Register the given template engine callback `fn` + * as `ext`. + * + * By default will `require()` the engine based on the + * file extension. For example if you try to render + * a "foo.jade" file Express will invoke the following internally: + * + * app.engine('jade', require('jade').__express); + * + * For engines that do not provide `.__express` out of the box, + * or if you wish to "map" a different extension to the template engine + * you may use this method. For example mapping the EJS template engine to + * ".html" files: + * + * app.engine('html', require('ejs').renderFile); + * + * In this case EJS provides a `.renderFile()` method with + * the same signature that Express expects: `(path, options, callback)`, + * though note that it aliases this method as `ejs.__express` internally + * so if you're using ".ejs" extensions you dont need to do anything. + * + * Some template engines do not follow this convention, the + * [Consolidate.js](https://github.com/tj/consolidate.js) + * library was created to map all of node's popular template + * engines to follow this convention, thus allowing them to + * work seamlessly within Express. + * + * @param {String} ext + * @param {Function} fn + * @return {app} for chaining + * @public + */ + +app.engine = function engine(ext, fn) { + if (typeof fn !== 'function') { + throw new Error('callback function required'); + } + + // get file extension + var extension = ext[0] !== '.' + ? '.' + ext + : ext; + + // store engine + this.engines[extension] = fn; + + return this; +}; + +/** + * Proxy to `Router#param()` with one added api feature. The _name_ parameter + * can be an array of names. + * + * See the Router#param() docs for more details. + * + * @param {String|Array} name + * @param {Function} fn + * @return {app} for chaining + * @public + */ + +app.param = function param(name, fn) { + this.lazyrouter(); + + if (Array.isArray(name)) { + for (var i = 0; i < name.length; i++) { + this.param(name[i], fn); + } + + return this; + } + + this._router.param(name, fn); + + return this; +}; + +/** + * Assign `setting` to `val`, or return `setting`'s value. + * + * app.set('foo', 'bar'); + * app.get('foo'); + * // => "bar" + * + * Mounted servers inherit their parent server's settings. + * + * @param {String} setting + * @param {*} [val] + * @return {Server} for chaining + * @public + */ + +app.set = function set(setting, val) { + if (arguments.length === 1) { + // app.get(setting) + return this.settings[setting]; + } + + debug('set "%s" to %o', setting, val); + + // set value + this.settings[setting] = val; + + // trigger matched settings + switch (setting) { + case 'etag': + this.set('etag fn', compileETag(val)); + break; + case 'query parser': + this.set('query parser fn', compileQueryParser(val)); + break; + case 'trust proxy': + this.set('trust proxy fn', compileTrust(val)); + + // trust proxy inherit back-compat + Object.defineProperty(this.settings, trustProxyDefaultSymbol, { + configurable: true, + value: false + }); + + break; + } + + return this; +}; + +/** + * Return the app's absolute pathname + * based on the parent(s) that have + * mounted it. + * + * For example if the application was + * mounted as "/admin", which itself + * was mounted as "/blog" then the + * return value would be "/blog/admin". + * + * @return {String} + * @private + */ + +app.path = function path() { + return this.parent + ? this.parent.path() + this.mountpath + : ''; +}; + +/** + * Check if `setting` is enabled (truthy). + * + * app.enabled('foo') + * // => false + * + * app.enable('foo') + * app.enabled('foo') + * // => true + * + * @param {String} setting + * @return {Boolean} + * @public + */ + +app.enabled = function enabled(setting) { + return Boolean(this.set(setting)); +}; + +/** + * Check if `setting` is disabled. + * + * app.disabled('foo') + * // => true + * + * app.enable('foo') + * app.disabled('foo') + * // => false + * + * @param {String} setting + * @return {Boolean} + * @public + */ + +app.disabled = function disabled(setting) { + return !this.set(setting); +}; + +/** + * Enable `setting`. + * + * @param {String} setting + * @return {app} for chaining + * @public + */ + +app.enable = function enable(setting) { + return this.set(setting, true); +}; + +/** + * Disable `setting`. + * + * @param {String} setting + * @return {app} for chaining + * @public + */ + +app.disable = function disable(setting) { + return this.set(setting, false); +}; + +/** + * Delegate `.VERB(...)` calls to `router.VERB(...)`. + */ + +methods.forEach(function(method){ + app[method] = function(path){ + if (method === 'get' && arguments.length === 1) { + // app.get(setting) + return this.set(path); + } + + this.lazyrouter(); + + var route = this._router.route(path); + route[method].apply(route, slice.call(arguments, 1)); + return this; + }; +}); + +/** + * Special-cased "all" method, applying the given route `path`, + * middleware, and callback to _every_ HTTP method. + * + * @param {String} path + * @param {Function} ... + * @return {app} for chaining + * @public + */ + +app.all = function all(path) { + this.lazyrouter(); + + var route = this._router.route(path); + var args = slice.call(arguments, 1); + + for (var i = 0; i < methods.length; i++) { + route[methods[i]].apply(route, args); + } + + return this; +}; + +// del -> delete alias + +app.del = deprecate.function(app.delete, 'app.del: Use app.delete instead'); + +/** + * Render the given view `name` name with `options` + * and a callback accepting an error and the + * rendered template string. + * + * Example: + * + * app.render('email', { name: 'Tobi' }, function(err, html){ + * // ... + * }) + * + * @param {String} name + * @param {Object|Function} options or fn + * @param {Function} callback + * @public + */ + +app.render = function render(name, options, callback) { + var cache = this.cache; + var done = callback; + var engines = this.engines; + var opts = options; + var renderOptions = {}; + var view; + + // support callback function as second arg + if (typeof options === 'function') { + done = options; + opts = {}; + } + + // merge app.locals + merge(renderOptions, this.locals); + + // merge options._locals + if (opts._locals) { + merge(renderOptions, opts._locals); + } + + // merge options + merge(renderOptions, opts); + + // set .cache unless explicitly provided + if (renderOptions.cache == null) { + renderOptions.cache = this.enabled('view cache'); + } + + // primed cache + if (renderOptions.cache) { + view = cache[name]; + } + + // view + if (!view) { + var View = this.get('view'); + + view = new View(name, { + defaultEngine: this.get('view engine'), + root: this.get('views'), + engines: engines + }); + + if (!view.path) { + var dirs = Array.isArray(view.root) && view.root.length > 1 + ? 'directories "' + view.root.slice(0, -1).join('", "') + '" or "' + view.root[view.root.length - 1] + '"' + : 'directory "' + view.root + '"' + var err = new Error('Failed to lookup view "' + name + '" in views ' + dirs); + err.view = view; + return done(err); + } + + // prime the cache + if (renderOptions.cache) { + cache[name] = view; + } + } + + // render + tryRender(view, renderOptions, done); +}; + +/** + * Listen for connections. + * + * A node `http.Server` is returned, with this + * application (which is a `Function`) as its + * callback. If you wish to create both an HTTP + * and HTTPS server you may do so with the "http" + * and "https" modules as shown here: + * + * var http = require('http') + * , https = require('https') + * , express = require('express') + * , app = express(); + * + * http.createServer(app).listen(80); + * https.createServer({ ... }, app).listen(443); + * + * @return {http.Server} + * @public + */ + +app.listen = function listen() { + var server = http.createServer(this); + return server.listen.apply(server, arguments); +}; + +/** + * Log error using console.error. + * + * @param {Error} err + * @private + */ + +function logerror(err) { + /* istanbul ignore next */ + if (this.get('env') !== 'test') console.error(err.stack || err.toString()); +} + +/** + * Try rendering a view. + * @private + */ + +function tryRender(view, options, callback) { + try { + view.render(options, callback); + } catch (err) { + callback(err); + } +} diff --git a/node_modules/express/lib/express.js b/node_modules/express/lib/express.js new file mode 100644 index 0000000..540c8be --- /dev/null +++ b/node_modules/express/lib/express.js @@ -0,0 +1,103 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + */ + +var EventEmitter = require('events').EventEmitter; +var mixin = require('merge-descriptors'); +var proto = require('./application'); +var Route = require('./router/route'); +var Router = require('./router'); +var req = require('./request'); +var res = require('./response'); + +/** + * Expose `createApplication()`. + */ + +exports = module.exports = createApplication; + +/** + * Create an express application. + * + * @return {Function} + * @api public + */ + +function createApplication() { + var app = function(req, res, next) { + app.handle(req, res, next); + }; + + mixin(app, EventEmitter.prototype, false); + mixin(app, proto, false); + + app.request = { __proto__: req, app: app }; + app.response = { __proto__: res, app: app }; + app.init(); + return app; +} + +/** + * Expose the prototypes. + */ + +exports.application = proto; +exports.request = req; +exports.response = res; + +/** + * Expose constructors. + */ + +exports.Route = Route; +exports.Router = Router; + +/** + * Expose middleware + */ + +exports.query = require('./middleware/query'); +exports.static = require('serve-static'); + +/** + * Replace removed middleware with an appropriate error message. + */ + +[ + 'json', + 'urlencoded', + 'bodyParser', + 'compress', + 'cookieSession', + 'session', + 'logger', + 'cookieParser', + 'favicon', + 'responseTime', + 'errorHandler', + 'timeout', + 'methodOverride', + 'vhost', + 'csrf', + 'directory', + 'limit', + 'multipart', + 'staticCache', +].forEach(function (name) { + Object.defineProperty(exports, name, { + get: function () { + throw new Error('Most middleware (like ' + name + ') is no longer bundled with Express and must be installed separately. Please see https://github.com/senchalabs/connect#middleware.'); + }, + configurable: true + }); +}); diff --git a/node_modules/express/lib/middleware/init.js b/node_modules/express/lib/middleware/init.js new file mode 100644 index 0000000..f3119ed --- /dev/null +++ b/node_modules/express/lib/middleware/init.js @@ -0,0 +1,36 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Initialization middleware, exposing the + * request and response to each other, as well + * as defaulting the X-Powered-By header field. + * + * @param {Function} app + * @return {Function} + * @api private + */ + +exports.init = function(app){ + return function expressInit(req, res, next){ + if (app.enabled('x-powered-by')) res.setHeader('X-Powered-By', 'Express'); + req.res = res; + res.req = req; + req.next = next; + + req.__proto__ = app.request; + res.__proto__ = app.response; + + res.locals = res.locals || Object.create(null); + + next(); + }; +}; + diff --git a/node_modules/express/lib/middleware/query.js b/node_modules/express/lib/middleware/query.js new file mode 100644 index 0000000..5f76f84 --- /dev/null +++ b/node_modules/express/lib/middleware/query.js @@ -0,0 +1,46 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + */ + +var parseUrl = require('parseurl'); +var qs = require('qs'); + +/** + * @param {Object} options + * @return {Function} + * @api public + */ + +module.exports = function query(options) { + var opts = Object.create(options || null); + var queryparse = qs.parse; + + if (typeof options === 'function') { + queryparse = options; + opts = undefined; + } + + if (opts !== undefined && opts.allowPrototypes === undefined) { + // back-compat for qs module + opts.allowPrototypes = true; + } + + return function query(req, res, next){ + if (!req.query) { + var val = parseUrl(req).query; + req.query = queryparse(val, opts); + } + + next(); + }; +}; diff --git a/node_modules/express/lib/request.js b/node_modules/express/lib/request.js new file mode 100644 index 0000000..557d050 --- /dev/null +++ b/node_modules/express/lib/request.js @@ -0,0 +1,502 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var accepts = require('accepts'); +var deprecate = require('depd')('express'); +var isIP = require('net').isIP; +var typeis = require('type-is'); +var http = require('http'); +var fresh = require('fresh'); +var parseRange = require('range-parser'); +var parse = require('parseurl'); +var proxyaddr = require('proxy-addr'); + +/** + * Request prototype. + */ + +var req = exports = module.exports = { + __proto__: http.IncomingMessage.prototype +}; + +/** + * Return request header. + * + * The `Referrer` header field is special-cased, + * both `Referrer` and `Referer` are interchangeable. + * + * Examples: + * + * req.get('Content-Type'); + * // => "text/plain" + * + * req.get('content-type'); + * // => "text/plain" + * + * req.get('Something'); + * // => undefined + * + * Aliased as `req.header()`. + * + * @param {String} name + * @return {String} + * @public + */ + +req.get = +req.header = function header(name) { + if (!name) { + throw new TypeError('name argument is required to req.get'); + } + + if (typeof name !== 'string') { + throw new TypeError('name must be a string to req.get'); + } + + var lc = name.toLowerCase(); + + switch (lc) { + case 'referer': + case 'referrer': + return this.headers.referrer + || this.headers.referer; + default: + return this.headers[lc]; + } +}; + +/** + * To do: update docs. + * + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single MIME type string + * such as "application/json", an extension name + * such as "json", a comma-delimited list such as "json, html, text/plain", + * an argument list such as `"json", "html", "text/plain"`, + * or an array `["json", "html", "text/plain"]`. When a list + * or array is given, the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * req.accepts('html'); + * // => "html" + * + * // Accept: text/*, application/json + * req.accepts('html'); + * // => "html" + * req.accepts('text/html'); + * // => "text/html" + * req.accepts('json, text'); + * // => "json" + * req.accepts('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * req.accepts('image/png'); + * req.accepts('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * req.accepts(['html', 'json']); + * req.accepts('html', 'json'); + * req.accepts('html, json'); + * // => "json" + * + * @param {String|Array} type(s) + * @return {String|Array|Boolean} + * @public + */ + +req.accepts = function(){ + var accept = accepts(this); + return accept.types.apply(accept, arguments); +}; + +/** + * Check if the given `encoding`s are accepted. + * + * @param {String} ...encoding + * @return {String|Array} + * @public + */ + +req.acceptsEncodings = function(){ + var accept = accepts(this); + return accept.encodings.apply(accept, arguments); +}; + +req.acceptsEncoding = deprecate.function(req.acceptsEncodings, + 'req.acceptsEncoding: Use acceptsEncodings instead'); + +/** + * Check if the given `charset`s are acceptable, + * otherwise you should respond with 406 "Not Acceptable". + * + * @param {String} ...charset + * @return {String|Array} + * @public + */ + +req.acceptsCharsets = function(){ + var accept = accepts(this); + return accept.charsets.apply(accept, arguments); +}; + +req.acceptsCharset = deprecate.function(req.acceptsCharsets, + 'req.acceptsCharset: Use acceptsCharsets instead'); + +/** + * Check if the given `lang`s are acceptable, + * otherwise you should respond with 406 "Not Acceptable". + * + * @param {String} ...lang + * @return {String|Array} + * @public + */ + +req.acceptsLanguages = function(){ + var accept = accepts(this); + return accept.languages.apply(accept, arguments); +}; + +req.acceptsLanguage = deprecate.function(req.acceptsLanguages, + 'req.acceptsLanguage: Use acceptsLanguages instead'); + +/** + * Parse Range header field, capping to the given `size`. + * + * Unspecified ranges such as "0-" require knowledge of your resource length. In + * the case of a byte range this is of course the total number of bytes. If the + * Range header field is not given `undefined` is returned, `-1` when unsatisfiable, + * and `-2` when syntactically invalid. + * + * When ranges are returned, the array has a "type" property which is the type of + * range that is required (most commonly, "bytes"). Each array element is an object + * with a "start" and "end" property for the portion of the range. + * + * The "combine" option can be set to `true` and overlapping & adjacent ranges + * will be combined into a single range. + * + * NOTE: remember that ranges are inclusive, so for example "Range: users=0-3" + * should respond with 4 users when available, not 3. + * + * @param {number} size + * @param {object} [options] + * @param {boolean} [options.combine=false] + * @return {number|array} + * @public + */ + +req.range = function range(size, options) { + var range = this.get('Range'); + if (!range) return; + return parseRange(size, range, options); +}; + +/** + * Return the value of param `name` when present or `defaultValue`. + * + * - Checks route placeholders, ex: _/user/:id_ + * - Checks body params, ex: id=12, {"id":12} + * - Checks query string params, ex: ?id=12 + * + * To utilize request bodies, `req.body` + * should be an object. This can be done by using + * the `bodyParser()` middleware. + * + * @param {String} name + * @param {Mixed} [defaultValue] + * @return {String} + * @public + */ + +req.param = function param(name, defaultValue) { + var params = this.params || {}; + var body = this.body || {}; + var query = this.query || {}; + + var args = arguments.length === 1 + ? 'name' + : 'name, default'; + deprecate('req.param(' + args + '): Use req.params, req.body, or req.query instead'); + + if (null != params[name] && params.hasOwnProperty(name)) return params[name]; + if (null != body[name]) return body[name]; + if (null != query[name]) return query[name]; + + return defaultValue; +}; + +/** + * Check if the incoming request contains the "Content-Type" + * header field, and it contains the give mime `type`. + * + * Examples: + * + * // With Content-Type: text/html; charset=utf-8 + * req.is('html'); + * req.is('text/html'); + * req.is('text/*'); + * // => true + * + * // When Content-Type is application/json + * req.is('json'); + * req.is('application/json'); + * req.is('application/*'); + * // => true + * + * req.is('html'); + * // => false + * + * @param {String|Array} types... + * @return {String|false|null} + * @public + */ + +req.is = function is(types) { + var arr = types; + + // support flattened arguments + if (!Array.isArray(types)) { + arr = new Array(arguments.length); + for (var i = 0; i < arr.length; i++) { + arr[i] = arguments[i]; + } + } + + return typeis(this, arr); +}; + +/** + * Return the protocol string "http" or "https" + * when requested with TLS. When the "trust proxy" + * setting trusts the socket address, the + * "X-Forwarded-Proto" header field will be trusted + * and used if present. + * + * If you're running behind a reverse proxy that + * supplies https for you this may be enabled. + * + * @return {String} + * @public + */ + +defineGetter(req, 'protocol', function protocol(){ + var proto = this.connection.encrypted + ? 'https' + : 'http'; + var trust = this.app.get('trust proxy fn'); + + if (!trust(this.connection.remoteAddress, 0)) { + return proto; + } + + // Note: X-Forwarded-Proto is normally only ever a + // single value, but this is to be safe. + proto = this.get('X-Forwarded-Proto') || proto; + return proto.split(/\s*,\s*/)[0]; +}); + +/** + * Short-hand for: + * + * req.protocol === 'https' + * + * @return {Boolean} + * @public + */ + +defineGetter(req, 'secure', function secure(){ + return this.protocol === 'https'; +}); + +/** + * Return the remote address from the trusted proxy. + * + * The is the remote address on the socket unless + * "trust proxy" is set. + * + * @return {String} + * @public + */ + +defineGetter(req, 'ip', function ip(){ + var trust = this.app.get('trust proxy fn'); + return proxyaddr(this, trust); +}); + +/** + * When "trust proxy" is set, trusted proxy addresses + client. + * + * For example if the value were "client, proxy1, proxy2" + * you would receive the array `["client", "proxy1", "proxy2"]` + * where "proxy2" is the furthest down-stream and "proxy1" and + * "proxy2" were trusted. + * + * @return {Array} + * @public + */ + +defineGetter(req, 'ips', function ips() { + var trust = this.app.get('trust proxy fn'); + var addrs = proxyaddr.all(this, trust); + return addrs.slice(1).reverse(); +}); + +/** + * Return subdomains as an array. + * + * Subdomains are the dot-separated parts of the host before the main domain of + * the app. By default, the domain of the app is assumed to be the last two + * parts of the host. This can be changed by setting "subdomain offset". + * + * For example, if the domain is "tobi.ferrets.example.com": + * If "subdomain offset" is not set, req.subdomains is `["ferrets", "tobi"]`. + * If "subdomain offset" is 3, req.subdomains is `["tobi"]`. + * + * @return {Array} + * @public + */ + +defineGetter(req, 'subdomains', function subdomains() { + var hostname = this.hostname; + + if (!hostname) return []; + + var offset = this.app.get('subdomain offset'); + var subdomains = !isIP(hostname) + ? hostname.split('.').reverse() + : [hostname]; + + return subdomains.slice(offset); +}); + +/** + * Short-hand for `url.parse(req.url).pathname`. + * + * @return {String} + * @public + */ + +defineGetter(req, 'path', function path() { + return parse(this).pathname; +}); + +/** + * Parse the "Host" header field to a hostname. + * + * When the "trust proxy" setting trusts the socket + * address, the "X-Forwarded-Host" header field will + * be trusted. + * + * @return {String} + * @public + */ + +defineGetter(req, 'hostname', function hostname(){ + var trust = this.app.get('trust proxy fn'); + var host = this.get('X-Forwarded-Host'); + + if (!host || !trust(this.connection.remoteAddress, 0)) { + host = this.get('Host'); + } + + if (!host) return; + + // IPv6 literal support + var offset = host[0] === '[' + ? host.indexOf(']') + 1 + : 0; + var index = host.indexOf(':', offset); + + return index !== -1 + ? host.substring(0, index) + : host; +}); + +// TODO: change req.host to return host in next major + +defineGetter(req, 'host', deprecate.function(function host(){ + return this.hostname; +}, 'req.host: Use req.hostname instead')); + +/** + * Check if the request is fresh, aka + * Last-Modified and/or the ETag + * still match. + * + * @return {Boolean} + * @public + */ + +defineGetter(req, 'fresh', function(){ + var method = this.method; + var s = this.res.statusCode; + + // GET or HEAD for weak freshness validation only + if ('GET' !== method && 'HEAD' !== method) return false; + + // 2xx or 304 as per rfc2616 14.26 + if ((s >= 200 && s < 300) || 304 === s) { + return fresh(this.headers, (this.res._headers || {})); + } + + return false; +}); + +/** + * Check if the request is stale, aka + * "Last-Modified" and / or the "ETag" for the + * resource has changed. + * + * @return {Boolean} + * @public + */ + +defineGetter(req, 'stale', function stale(){ + return !this.fresh; +}); + +/** + * Check if the request was an _XMLHttpRequest_. + * + * @return {Boolean} + * @public + */ + +defineGetter(req, 'xhr', function xhr(){ + var val = this.get('X-Requested-With') || ''; + return val.toLowerCase() === 'xmlhttprequest'; +}); + +/** + * Helper function for creating a getter on an object. + * + * @param {Object} obj + * @param {String} name + * @param {Function} getter + * @private + */ +function defineGetter(obj, name, getter) { + Object.defineProperty(obj, name, { + configurable: true, + enumerable: true, + get: getter + }); +}; diff --git a/node_modules/express/lib/response.js b/node_modules/express/lib/response.js new file mode 100644 index 0000000..6128f45 --- /dev/null +++ b/node_modules/express/lib/response.js @@ -0,0 +1,1065 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var contentDisposition = require('content-disposition'); +var deprecate = require('depd')('express'); +var encodeUrl = require('encodeurl'); +var escapeHtml = require('escape-html'); +var http = require('http'); +var isAbsolute = require('./utils').isAbsolute; +var onFinished = require('on-finished'); +var path = require('path'); +var merge = require('utils-merge'); +var sign = require('cookie-signature').sign; +var normalizeType = require('./utils').normalizeType; +var normalizeTypes = require('./utils').normalizeTypes; +var setCharset = require('./utils').setCharset; +var statusCodes = http.STATUS_CODES; +var cookie = require('cookie'); +var send = require('send'); +var extname = path.extname; +var mime = send.mime; +var resolve = path.resolve; +var vary = require('vary'); + +/** + * Response prototype. + */ + +var res = module.exports = { + __proto__: http.ServerResponse.prototype +}; + +/** + * Module variables. + * @private + */ + +var charsetRegExp = /;\s*charset\s*=/; + +/** + * Set status `code`. + * + * @param {Number} code + * @return {ServerResponse} + * @public + */ + +res.status = function status(code) { + this.statusCode = code; + return this; +}; + +/** + * Set Link header field with the given `links`. + * + * Examples: + * + * res.links({ + * next: 'http://api.example.com/users?page=2', + * last: 'http://api.example.com/users?page=5' + * }); + * + * @param {Object} links + * @return {ServerResponse} + * @public + */ + +res.links = function(links){ + var link = this.get('Link') || ''; + if (link) link += ', '; + return this.set('Link', link + Object.keys(links).map(function(rel){ + return '<' + links[rel] + '>; rel="' + rel + '"'; + }).join(', ')); +}; + +/** + * Send a response. + * + * Examples: + * + * res.send(new Buffer('wahoo')); + * res.send({ some: 'json' }); + * res.send('

some html

'); + * + * @param {string|number|boolean|object|Buffer} body + * @public + */ + +res.send = function send(body) { + var chunk = body; + var encoding; + var len; + var req = this.req; + var type; + + // settings + var app = this.app; + + // allow status / body + if (arguments.length === 2) { + // res.send(body, status) backwards compat + if (typeof arguments[0] !== 'number' && typeof arguments[1] === 'number') { + deprecate('res.send(body, status): Use res.status(status).send(body) instead'); + this.statusCode = arguments[1]; + } else { + deprecate('res.send(status, body): Use res.status(status).send(body) instead'); + this.statusCode = arguments[0]; + chunk = arguments[1]; + } + } + + // disambiguate res.send(status) and res.send(status, num) + if (typeof chunk === 'number' && arguments.length === 1) { + // res.send(status) will set status message as text string + if (!this.get('Content-Type')) { + this.type('txt'); + } + + deprecate('res.send(status): Use res.sendStatus(status) instead'); + this.statusCode = chunk; + chunk = statusCodes[chunk]; + } + + switch (typeof chunk) { + // string defaulting to html + case 'string': + if (!this.get('Content-Type')) { + this.type('html'); + } + break; + case 'boolean': + case 'number': + case 'object': + if (chunk === null) { + chunk = ''; + } else if (Buffer.isBuffer(chunk)) { + if (!this.get('Content-Type')) { + this.type('bin'); + } + } else { + return this.json(chunk); + } + break; + } + + // write strings in utf-8 + if (typeof chunk === 'string') { + encoding = 'utf8'; + type = this.get('Content-Type'); + + // reflect this in content-type + if (typeof type === 'string') { + this.set('Content-Type', setCharset(type, 'utf-8')); + } + } + + // populate Content-Length + if (chunk !== undefined) { + if (!Buffer.isBuffer(chunk)) { + // convert chunk to Buffer; saves later double conversions + chunk = new Buffer(chunk, encoding); + encoding = undefined; + } + + len = chunk.length; + this.set('Content-Length', len); + } + + // populate ETag + var etag; + var generateETag = len !== undefined && app.get('etag fn'); + if (typeof generateETag === 'function' && !this.get('ETag')) { + if ((etag = generateETag(chunk, encoding))) { + this.set('ETag', etag); + } + } + + // freshness + if (req.fresh) this.statusCode = 304; + + // strip irrelevant headers + if (204 === this.statusCode || 304 === this.statusCode) { + this.removeHeader('Content-Type'); + this.removeHeader('Content-Length'); + this.removeHeader('Transfer-Encoding'); + chunk = ''; + } + + if (req.method === 'HEAD') { + // skip body for HEAD + this.end(); + } else { + // respond + this.end(chunk, encoding); + } + + return this; +}; + +/** + * Send JSON response. + * + * Examples: + * + * res.json(null); + * res.json({ user: 'tj' }); + * + * @param {string|number|boolean|object} obj + * @public + */ + +res.json = function json(obj) { + var val = obj; + + // allow status / body + if (arguments.length === 2) { + // res.json(body, status) backwards compat + if (typeof arguments[1] === 'number') { + deprecate('res.json(obj, status): Use res.status(status).json(obj) instead'); + this.statusCode = arguments[1]; + } else { + deprecate('res.json(status, obj): Use res.status(status).json(obj) instead'); + this.statusCode = arguments[0]; + val = arguments[1]; + } + } + + // settings + var app = this.app; + var replacer = app.get('json replacer'); + var spaces = app.get('json spaces'); + var body = stringify(val, replacer, spaces); + + // content-type + if (!this.get('Content-Type')) { + this.set('Content-Type', 'application/json'); + } + + return this.send(body); +}; + +/** + * Send JSON response with JSONP callback support. + * + * Examples: + * + * res.jsonp(null); + * res.jsonp({ user: 'tj' }); + * + * @param {string|number|boolean|object} obj + * @public + */ + +res.jsonp = function jsonp(obj) { + var val = obj; + + // allow status / body + if (arguments.length === 2) { + // res.json(body, status) backwards compat + if (typeof arguments[1] === 'number') { + deprecate('res.jsonp(obj, status): Use res.status(status).json(obj) instead'); + this.statusCode = arguments[1]; + } else { + deprecate('res.jsonp(status, obj): Use res.status(status).jsonp(obj) instead'); + this.statusCode = arguments[0]; + val = arguments[1]; + } + } + + // settings + var app = this.app; + var replacer = app.get('json replacer'); + var spaces = app.get('json spaces'); + var body = stringify(val, replacer, spaces); + var callback = this.req.query[app.get('jsonp callback name')]; + + // content-type + if (!this.get('Content-Type')) { + this.set('X-Content-Type-Options', 'nosniff'); + this.set('Content-Type', 'application/json'); + } + + // fixup callback + if (Array.isArray(callback)) { + callback = callback[0]; + } + + // jsonp + if (typeof callback === 'string' && callback.length !== 0) { + this.charset = 'utf-8'; + this.set('X-Content-Type-Options', 'nosniff'); + this.set('Content-Type', 'text/javascript'); + + // restrict callback charset + callback = callback.replace(/[^\[\]\w$.]/g, ''); + + // replace chars not allowed in JavaScript that are in JSON + body = body + .replace(/\u2028/g, '\\u2028') + .replace(/\u2029/g, '\\u2029'); + + // the /**/ is a specific security mitigation for "Rosetta Flash JSONP abuse" + // the typeof check is just to reduce client error noise + body = '/**/ typeof ' + callback + ' === \'function\' && ' + callback + '(' + body + ');'; + } + + return this.send(body); +}; + +/** + * Send given HTTP status code. + * + * Sets the response status to `statusCode` and the body of the + * response to the standard description from node's http.STATUS_CODES + * or the statusCode number if no description. + * + * Examples: + * + * res.sendStatus(200); + * + * @param {number} statusCode + * @public + */ + +res.sendStatus = function sendStatus(statusCode) { + var body = statusCodes[statusCode] || String(statusCode); + + this.statusCode = statusCode; + this.type('txt'); + + return this.send(body); +}; + +/** + * Transfer the file at the given `path`. + * + * Automatically sets the _Content-Type_ response header field. + * The callback `callback(err)` is invoked when the transfer is complete + * or when an error occurs. Be sure to check `res.sentHeader` + * if you wish to attempt responding, as the header and some data + * may have already been transferred. + * + * Options: + * + * - `maxAge` defaulting to 0 (can be string converted by `ms`) + * - `root` root directory for relative filenames + * - `headers` object of headers to serve with file + * - `dotfiles` serve dotfiles, defaulting to false; can be `"allow"` to send them + * + * Other options are passed along to `send`. + * + * Examples: + * + * The following example illustrates how `res.sendFile()` may + * be used as an alternative for the `static()` middleware for + * dynamic situations. The code backing `res.sendFile()` is actually + * the same code, so HTTP cache support etc is identical. + * + * app.get('/user/:uid/photos/:file', function(req, res){ + * var uid = req.params.uid + * , file = req.params.file; + * + * req.user.mayViewFilesFrom(uid, function(yes){ + * if (yes) { + * res.sendFile('/uploads/' + uid + '/' + file); + * } else { + * res.send(403, 'Sorry! you cant see that.'); + * } + * }); + * }); + * + * @public + */ + +res.sendFile = function sendFile(path, options, callback) { + var done = callback; + var req = this.req; + var res = this; + var next = req.next; + var opts = options || {}; + + if (!path) { + throw new TypeError('path argument is required to res.sendFile'); + } + + // support function as second arg + if (typeof options === 'function') { + done = options; + opts = {}; + } + + if (!opts.root && !isAbsolute(path)) { + throw new TypeError('path must be absolute or specify root to res.sendFile'); + } + + // create file stream + var pathname = encodeURI(path); + var file = send(req, pathname, opts); + + // transfer + sendfile(res, file, opts, function (err) { + if (done) return done(err); + if (err && err.code === 'EISDIR') return next(); + + // next() all but write errors + if (err && err.code !== 'ECONNABORTED' && err.syscall !== 'write') { + next(err); + } + }); +}; + +/** + * Transfer the file at the given `path`. + * + * Automatically sets the _Content-Type_ response header field. + * The callback `callback(err)` is invoked when the transfer is complete + * or when an error occurs. Be sure to check `res.sentHeader` + * if you wish to attempt responding, as the header and some data + * may have already been transferred. + * + * Options: + * + * - `maxAge` defaulting to 0 (can be string converted by `ms`) + * - `root` root directory for relative filenames + * - `headers` object of headers to serve with file + * - `dotfiles` serve dotfiles, defaulting to false; can be `"allow"` to send them + * + * Other options are passed along to `send`. + * + * Examples: + * + * The following example illustrates how `res.sendfile()` may + * be used as an alternative for the `static()` middleware for + * dynamic situations. The code backing `res.sendfile()` is actually + * the same code, so HTTP cache support etc is identical. + * + * app.get('/user/:uid/photos/:file', function(req, res){ + * var uid = req.params.uid + * , file = req.params.file; + * + * req.user.mayViewFilesFrom(uid, function(yes){ + * if (yes) { + * res.sendfile('/uploads/' + uid + '/' + file); + * } else { + * res.send(403, 'Sorry! you cant see that.'); + * } + * }); + * }); + * + * @public + */ + +res.sendfile = function (path, options, callback) { + var done = callback; + var req = this.req; + var res = this; + var next = req.next; + var opts = options || {}; + + // support function as second arg + if (typeof options === 'function') { + done = options; + opts = {}; + } + + // create file stream + var file = send(req, path, opts); + + // transfer + sendfile(res, file, opts, function (err) { + if (done) return done(err); + if (err && err.code === 'EISDIR') return next(); + + // next() all but write errors + if (err && err.code !== 'ECONNABORT' && err.syscall !== 'write') { + next(err); + } + }); +}; + +res.sendfile = deprecate.function(res.sendfile, + 'res.sendfile: Use res.sendFile instead'); + +/** + * Transfer the file at the given `path` as an attachment. + * + * Optionally providing an alternate attachment `filename`, + * and optional callback `callback(err)`. The callback is invoked + * when the data transfer is complete, or when an error has + * ocurred. Be sure to check `res.headersSent` if you plan to respond. + * + * This method uses `res.sendfile()`. + * + * @public + */ + +res.download = function download(path, filename, callback) { + var done = callback; + var name = filename; + + // support function as second arg + if (typeof filename === 'function') { + done = filename; + name = null; + } + + // set Content-Disposition when file is sent + var headers = { + 'Content-Disposition': contentDisposition(name || path) + }; + + // Resolve the full path for sendFile + var fullPath = resolve(path); + + return this.sendFile(fullPath, { headers: headers }, done); +}; + +/** + * Set _Content-Type_ response header with `type` through `mime.lookup()` + * when it does not contain "/", or set the Content-Type to `type` otherwise. + * + * Examples: + * + * res.type('.html'); + * res.type('html'); + * res.type('json'); + * res.type('application/json'); + * res.type('png'); + * + * @param {String} type + * @return {ServerResponse} for chaining + * @public + */ + +res.contentType = +res.type = function contentType(type) { + var ct = type.indexOf('/') === -1 + ? mime.lookup(type) + : type; + + return this.set('Content-Type', ct); +}; + +/** + * Respond to the Acceptable formats using an `obj` + * of mime-type callbacks. + * + * This method uses `req.accepted`, an array of + * acceptable types ordered by their quality values. + * When "Accept" is not present the _first_ callback + * is invoked, otherwise the first match is used. When + * no match is performed the server responds with + * 406 "Not Acceptable". + * + * Content-Type is set for you, however if you choose + * you may alter this within the callback using `res.type()` + * or `res.set('Content-Type', ...)`. + * + * res.format({ + * 'text/plain': function(){ + * res.send('hey'); + * }, + * + * 'text/html': function(){ + * res.send('

hey

'); + * }, + * + * 'appliation/json': function(){ + * res.send({ message: 'hey' }); + * } + * }); + * + * In addition to canonicalized MIME types you may + * also use extnames mapped to these types: + * + * res.format({ + * text: function(){ + * res.send('hey'); + * }, + * + * html: function(){ + * res.send('

hey

'); + * }, + * + * json: function(){ + * res.send({ message: 'hey' }); + * } + * }); + * + * By default Express passes an `Error` + * with a `.status` of 406 to `next(err)` + * if a match is not made. If you provide + * a `.default` callback it will be invoked + * instead. + * + * @param {Object} obj + * @return {ServerResponse} for chaining + * @public + */ + +res.format = function(obj){ + var req = this.req; + var next = req.next; + + var fn = obj.default; + if (fn) delete obj.default; + var keys = Object.keys(obj); + + var key = keys.length > 0 + ? req.accepts(keys) + : false; + + this.vary("Accept"); + + if (key) { + this.set('Content-Type', normalizeType(key).value); + obj[key](req, this, next); + } else if (fn) { + fn(); + } else { + var err = new Error('Not Acceptable'); + err.status = err.statusCode = 406; + err.types = normalizeTypes(keys).map(function(o){ return o.value }); + next(err); + } + + return this; +}; + +/** + * Set _Content-Disposition_ header to _attachment_ with optional `filename`. + * + * @param {String} filename + * @return {ServerResponse} + * @public + */ + +res.attachment = function attachment(filename) { + if (filename) { + this.type(extname(filename)); + } + + this.set('Content-Disposition', contentDisposition(filename)); + + return this; +}; + +/** + * Append additional header `field` with value `val`. + * + * Example: + * + * res.append('Link', ['', '']); + * res.append('Set-Cookie', 'foo=bar; Path=/; HttpOnly'); + * res.append('Warning', '199 Miscellaneous warning'); + * + * @param {String} field + * @param {String|Array} val + * @return {ServerResponse} for chaining + * @public + */ + +res.append = function append(field, val) { + var prev = this.get(field); + var value = val; + + if (prev) { + // concat the new and prev vals + value = Array.isArray(prev) ? prev.concat(val) + : Array.isArray(val) ? [prev].concat(val) + : [prev, val]; + } + + return this.set(field, value); +}; + +/** + * Set header `field` to `val`, or pass + * an object of header fields. + * + * Examples: + * + * res.set('Foo', ['bar', 'baz']); + * res.set('Accept', 'application/json'); + * res.set({ Accept: 'text/plain', 'X-API-Key': 'tobi' }); + * + * Aliased as `res.header()`. + * + * @param {String|Object} field + * @param {String|Array} val + * @return {ServerResponse} for chaining + * @public + */ + +res.set = +res.header = function header(field, val) { + if (arguments.length === 2) { + var value = Array.isArray(val) + ? val.map(String) + : String(val); + + // add charset to content-type + if (field.toLowerCase() === 'content-type' && !charsetRegExp.test(value)) { + var charset = mime.charsets.lookup(value.split(';')[0]); + if (charset) value += '; charset=' + charset.toLowerCase(); + } + + this.setHeader(field, value); + } else { + for (var key in field) { + this.set(key, field[key]); + } + } + return this; +}; + +/** + * Get value for header `field`. + * + * @param {String} field + * @return {String} + * @public + */ + +res.get = function(field){ + return this.getHeader(field); +}; + +/** + * Clear cookie `name`. + * + * @param {String} name + * @param {Object} [options] + * @return {ServerResponse} for chaining + * @public + */ + +res.clearCookie = function clearCookie(name, options) { + var opts = merge({ expires: new Date(1), path: '/' }, options); + + return this.cookie(name, '', opts); +}; + +/** + * Set cookie `name` to `value`, with the given `options`. + * + * Options: + * + * - `maxAge` max-age in milliseconds, converted to `expires` + * - `signed` sign the cookie + * - `path` defaults to "/" + * + * Examples: + * + * // "Remember Me" for 15 minutes + * res.cookie('rememberme', '1', { expires: new Date(Date.now() + 900000), httpOnly: true }); + * + * // save as above + * res.cookie('rememberme', '1', { maxAge: 900000, httpOnly: true }) + * + * @param {String} name + * @param {String|Object} value + * @param {Options} options + * @return {ServerResponse} for chaining + * @public + */ + +res.cookie = function (name, value, options) { + var opts = merge({}, options); + var secret = this.req.secret; + var signed = opts.signed; + + if (signed && !secret) { + throw new Error('cookieParser("secret") required for signed cookies'); + } + + var val = typeof value === 'object' + ? 'j:' + JSON.stringify(value) + : String(value); + + if (signed) { + val = 's:' + sign(val, secret); + } + + if ('maxAge' in opts) { + opts.expires = new Date(Date.now() + opts.maxAge); + opts.maxAge /= 1000; + } + + if (opts.path == null) { + opts.path = '/'; + } + + this.append('Set-Cookie', cookie.serialize(name, String(val), opts)); + + return this; +}; + +/** + * Set the location header to `url`. + * + * The given `url` can also be "back", which redirects + * to the _Referrer_ or _Referer_ headers or "/". + * + * Examples: + * + * res.location('/foo/bar').; + * res.location('http://example.com'); + * res.location('../login'); + * + * @param {String} url + * @return {ServerResponse} for chaining + * @public + */ + +res.location = function location(url) { + var loc = url; + + // "back" is an alias for the referrer + if (url === 'back') { + loc = this.req.get('Referrer') || '/'; + } + + // set location + return this.set('Location', encodeUrl(loc)); +}; + +/** + * Redirect to the given `url` with optional response `status` + * defaulting to 302. + * + * The resulting `url` is determined by `res.location()`, so + * it will play nicely with mounted apps, relative paths, + * `"back"` etc. + * + * Examples: + * + * res.redirect('/foo/bar'); + * res.redirect('http://example.com'); + * res.redirect(301, 'http://example.com'); + * res.redirect('../login'); // /blog/post/1 -> /blog/login + * + * @public + */ + +res.redirect = function redirect(url) { + var address = url; + var body; + var status = 302; + + // allow status / url + if (arguments.length === 2) { + if (typeof arguments[0] === 'number') { + status = arguments[0]; + address = arguments[1]; + } else { + deprecate('res.redirect(url, status): Use res.redirect(status, url) instead'); + status = arguments[1]; + } + } + + // Set location header + address = this.location(address).get('Location'); + + // Support text/{plain,html} by default + this.format({ + text: function(){ + body = statusCodes[status] + '. Redirecting to ' + address; + }, + + html: function(){ + var u = escapeHtml(address); + body = '

' + statusCodes[status] + '. Redirecting to ' + u + '

'; + }, + + default: function(){ + body = ''; + } + }); + + // Respond + this.statusCode = status; + this.set('Content-Length', Buffer.byteLength(body)); + + if (this.req.method === 'HEAD') { + this.end(); + } else { + this.end(body); + } +}; + +/** + * Add `field` to Vary. If already present in the Vary set, then + * this call is simply ignored. + * + * @param {Array|String} field + * @return {ServerResponse} for chaining + * @public + */ + +res.vary = function(field){ + // checks for back-compat + if (!field || (Array.isArray(field) && !field.length)) { + deprecate('res.vary(): Provide a field name'); + return this; + } + + vary(this, field); + + return this; +}; + +/** + * Render `view` with the given `options` and optional callback `fn`. + * When a callback function is given a response will _not_ be made + * automatically, otherwise a response of _200_ and _text/html_ is given. + * + * Options: + * + * - `cache` boolean hinting to the engine it should cache + * - `filename` filename of the view being rendered + * + * @public + */ + +res.render = function render(view, options, callback) { + var app = this.req.app; + var done = callback; + var opts = options || {}; + var req = this.req; + var self = this; + + // support callback function as second arg + if (typeof options === 'function') { + done = options; + opts = {}; + } + + // merge res.locals + opts._locals = self.locals; + + // default callback to respond + done = done || function (err, str) { + if (err) return req.next(err); + self.send(str); + }; + + // render + app.render(view, opts, done); +}; + +// pipe the send file stream +function sendfile(res, file, options, callback) { + var done = false; + var streaming; + + // request aborted + function onaborted() { + if (done) return; + done = true; + + var err = new Error('Request aborted'); + err.code = 'ECONNABORTED'; + callback(err); + } + + // directory + function ondirectory() { + if (done) return; + done = true; + + var err = new Error('EISDIR, read'); + err.code = 'EISDIR'; + callback(err); + } + + // errors + function onerror(err) { + if (done) return; + done = true; + callback(err); + } + + // ended + function onend() { + if (done) return; + done = true; + callback(); + } + + // file + function onfile() { + streaming = false; + } + + // finished + function onfinish(err) { + if (err && err.code === 'ECONNRESET') return onaborted(); + if (err) return onerror(err); + if (done) return; + + setImmediate(function () { + if (streaming !== false && !done) { + onaborted(); + return; + } + + if (done) return; + done = true; + callback(); + }); + } + + // streaming + function onstream() { + streaming = true; + } + + file.on('directory', ondirectory); + file.on('end', onend); + file.on('error', onerror); + file.on('file', onfile); + file.on('stream', onstream); + onFinished(res, onfinish); + + if (options.headers) { + // set headers on successful transfer + file.on('headers', function headers(res) { + var obj = options.headers; + var keys = Object.keys(obj); + + for (var i = 0; i < keys.length; i++) { + var k = keys[i]; + res.setHeader(k, obj[k]); + } + }); + } + + // pipe + file.pipe(res); +} + +/** + * Stringify JSON, like JSON.stringify, but v8 optimized. + * @private + */ + +function stringify(value, replacer, spaces) { + // v8 checks arguments.length for optimizing simple call + // https://bugs.chromium.org/p/v8/issues/detail?id=4730 + return replacer || spaces + ? JSON.stringify(value, replacer, spaces) + : JSON.stringify(value); +} diff --git a/node_modules/express/lib/router/index.js b/node_modules/express/lib/router/index.js new file mode 100644 index 0000000..dac2514 --- /dev/null +++ b/node_modules/express/lib/router/index.js @@ -0,0 +1,645 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var Route = require('./route'); +var Layer = require('./layer'); +var methods = require('methods'); +var mixin = require('utils-merge'); +var debug = require('debug')('express:router'); +var deprecate = require('depd')('express'); +var flatten = require('array-flatten'); +var parseUrl = require('parseurl'); + +/** + * Module variables. + * @private + */ + +var objectRegExp = /^\[object (\S+)\]$/; +var slice = Array.prototype.slice; +var toString = Object.prototype.toString; + +/** + * Initialize a new `Router` with the given `options`. + * + * @param {Object} options + * @return {Router} which is an callable function + * @public + */ + +var proto = module.exports = function(options) { + var opts = options || {}; + + function router(req, res, next) { + router.handle(req, res, next); + } + + // mixin Router class functions + router.__proto__ = proto; + + router.params = {}; + router._params = []; + router.caseSensitive = opts.caseSensitive; + router.mergeParams = opts.mergeParams; + router.strict = opts.strict; + router.stack = []; + + return router; +}; + +/** + * Map the given param placeholder `name`(s) to the given callback. + * + * Parameter mapping is used to provide pre-conditions to routes + * which use normalized placeholders. For example a _:user_id_ parameter + * could automatically load a user's information from the database without + * any additional code, + * + * The callback uses the same signature as middleware, the only difference + * being that the value of the placeholder is passed, in this case the _id_ + * of the user. Once the `next()` function is invoked, just like middleware + * it will continue on to execute the route, or subsequent parameter functions. + * + * Just like in middleware, you must either respond to the request or call next + * to avoid stalling the request. + * + * app.param('user_id', function(req, res, next, id){ + * User.find(id, function(err, user){ + * if (err) { + * return next(err); + * } else if (!user) { + * return next(new Error('failed to load user')); + * } + * req.user = user; + * next(); + * }); + * }); + * + * @param {String} name + * @param {Function} fn + * @return {app} for chaining + * @public + */ + +proto.param = function param(name, fn) { + // param logic + if (typeof name === 'function') { + deprecate('router.param(fn): Refactor to use path params'); + this._params.push(name); + return; + } + + // apply param functions + var params = this._params; + var len = params.length; + var ret; + + if (name[0] === ':') { + deprecate('router.param(' + JSON.stringify(name) + ', fn): Use router.param(' + JSON.stringify(name.substr(1)) + ', fn) instead'); + name = name.substr(1); + } + + for (var i = 0; i < len; ++i) { + if (ret = params[i](name, fn)) { + fn = ret; + } + } + + // ensure we end up with a + // middleware function + if ('function' !== typeof fn) { + throw new Error('invalid param() call for ' + name + ', got ' + fn); + } + + (this.params[name] = this.params[name] || []).push(fn); + return this; +}; + +/** + * Dispatch a req, res into the router. + * @private + */ + +proto.handle = function handle(req, res, out) { + var self = this; + + debug('dispatching %s %s', req.method, req.url); + + var search = 1 + req.url.indexOf('?'); + var pathlength = search ? search - 1 : req.url.length; + var fqdn = req.url[0] !== '/' && 1 + req.url.substr(0, pathlength).indexOf('://'); + var protohost = fqdn ? req.url.substr(0, req.url.indexOf('/', 2 + fqdn)) : ''; + var idx = 0; + var removed = ''; + var slashAdded = false; + var paramcalled = {}; + + // store options for OPTIONS request + // only used if OPTIONS request + var options = []; + + // middleware and routes + var stack = self.stack; + + // manage inter-router variables + var parentParams = req.params; + var parentUrl = req.baseUrl || ''; + var done = restore(out, req, 'baseUrl', 'next', 'params'); + + // setup next layer + req.next = next; + + // for options requests, respond with a default if nothing else responds + if (req.method === 'OPTIONS') { + done = wrap(done, function(old, err) { + if (err || options.length === 0) return old(err); + sendOptionsResponse(res, options, old); + }); + } + + // setup basic req values + req.baseUrl = parentUrl; + req.originalUrl = req.originalUrl || req.url; + + next(); + + function next(err) { + var layerError = err === 'route' + ? null + : err; + + // remove added slash + if (slashAdded) { + req.url = req.url.substr(1); + slashAdded = false; + } + + // restore altered req.url + if (removed.length !== 0) { + req.baseUrl = parentUrl; + req.url = protohost + removed + req.url.substr(protohost.length); + removed = ''; + } + + // no more matching layers + if (idx >= stack.length) { + setImmediate(done, layerError); + return; + } + + // get pathname of request + var path = getPathname(req); + + if (path == null) { + return done(layerError); + } + + // find next matching layer + var layer; + var match; + var route; + + while (match !== true && idx < stack.length) { + layer = stack[idx++]; + match = matchLayer(layer, path); + route = layer.route; + + if (typeof match !== 'boolean') { + // hold on to layerError + layerError = layerError || match; + } + + if (match !== true) { + continue; + } + + if (!route) { + // process non-route handlers normally + continue; + } + + if (layerError) { + // routes do not match with a pending error + match = false; + continue; + } + + var method = req.method; + var has_method = route._handles_method(method); + + // build up automatic options response + if (!has_method && method === 'OPTIONS') { + appendMethods(options, route._options()); + } + + // don't even bother matching route + if (!has_method && method !== 'HEAD') { + match = false; + continue; + } + } + + // no match + if (match !== true) { + return done(layerError); + } + + // store route for dispatch on change + if (route) { + req.route = route; + } + + // Capture one-time layer values + req.params = self.mergeParams + ? mergeParams(layer.params, parentParams) + : layer.params; + var layerPath = layer.path; + + // this should be done for the layer + self.process_params(layer, paramcalled, req, res, function (err) { + if (err) { + return next(layerError || err); + } + + if (route) { + return layer.handle_request(req, res, next); + } + + trim_prefix(layer, layerError, layerPath, path); + }); + } + + function trim_prefix(layer, layerError, layerPath, path) { + var c = path[layerPath.length]; + if (c && '/' !== c && '.' !== c) return next(layerError); + + // Trim off the part of the url that matches the route + // middleware (.use stuff) needs to have the path stripped + if (layerPath.length !== 0) { + debug('trim prefix (%s) from url %s', layerPath, req.url); + removed = layerPath; + req.url = protohost + req.url.substr(protohost.length + removed.length); + + // Ensure leading slash + if (!fqdn && req.url[0] !== '/') { + req.url = '/' + req.url; + slashAdded = true; + } + + // Setup base URL (no trailing slash) + req.baseUrl = parentUrl + (removed[removed.length - 1] === '/' + ? removed.substring(0, removed.length - 1) + : removed); + } + + debug('%s %s : %s', layer.name, layerPath, req.originalUrl); + + if (layerError) { + layer.handle_error(layerError, req, res, next); + } else { + layer.handle_request(req, res, next); + } + } +}; + +/** + * Process any parameters for the layer. + * @private + */ + +proto.process_params = function process_params(layer, called, req, res, done) { + var params = this.params; + + // captured parameters from the layer, keys and values + var keys = layer.keys; + + // fast track + if (!keys || keys.length === 0) { + return done(); + } + + var i = 0; + var name; + var paramIndex = 0; + var key; + var paramVal; + var paramCallbacks; + var paramCalled; + + // process params in order + // param callbacks can be async + function param(err) { + if (err) { + return done(err); + } + + if (i >= keys.length ) { + return done(); + } + + paramIndex = 0; + key = keys[i++]; + + if (!key) { + return done(); + } + + name = key.name; + paramVal = req.params[name]; + paramCallbacks = params[name]; + paramCalled = called[name]; + + if (paramVal === undefined || !paramCallbacks) { + return param(); + } + + // param previously called with same value or error occurred + if (paramCalled && (paramCalled.match === paramVal + || (paramCalled.error && paramCalled.error !== 'route'))) { + // restore value + req.params[name] = paramCalled.value; + + // next param + return param(paramCalled.error); + } + + called[name] = paramCalled = { + error: null, + match: paramVal, + value: paramVal + }; + + paramCallback(); + } + + // single param callbacks + function paramCallback(err) { + var fn = paramCallbacks[paramIndex++]; + + // store updated value + paramCalled.value = req.params[key.name]; + + if (err) { + // store error + paramCalled.error = err; + param(err); + return; + } + + if (!fn) return param(); + + try { + fn(req, res, paramCallback, paramVal, key.name); + } catch (e) { + paramCallback(e); + } + } + + param(); +}; + +/** + * Use the given middleware function, with optional path, defaulting to "/". + * + * Use (like `.all`) will run for any http METHOD, but it will not add + * handlers for those methods so OPTIONS requests will not consider `.use` + * functions even if they could respond. + * + * The other difference is that _route_ path is stripped and not visible + * to the handler function. The main effect of this feature is that mounted + * handlers can operate without any code changes regardless of the "prefix" + * pathname. + * + * @public + */ + +proto.use = function use(fn) { + var offset = 0; + var path = '/'; + + // default path to '/' + // disambiguate router.use([fn]) + if (typeof fn !== 'function') { + var arg = fn; + + while (Array.isArray(arg) && arg.length !== 0) { + arg = arg[0]; + } + + // first arg is the path + if (typeof arg !== 'function') { + offset = 1; + path = fn; + } + } + + var callbacks = flatten(slice.call(arguments, offset)); + + if (callbacks.length === 0) { + throw new TypeError('Router.use() requires middleware functions'); + } + + for (var i = 0; i < callbacks.length; i++) { + var fn = callbacks[i]; + + if (typeof fn !== 'function') { + throw new TypeError('Router.use() requires middleware function but got a ' + gettype(fn)); + } + + // add the middleware + debug('use %s %s', path, fn.name || ''); + + var layer = new Layer(path, { + sensitive: this.caseSensitive, + strict: false, + end: false + }, fn); + + layer.route = undefined; + + this.stack.push(layer); + } + + return this; +}; + +/** + * Create a new Route for the given path. + * + * Each route contains a separate middleware stack and VERB handlers. + * + * See the Route api documentation for details on adding handlers + * and middleware to routes. + * + * @param {String} path + * @return {Route} + * @public + */ + +proto.route = function route(path) { + var route = new Route(path); + + var layer = new Layer(path, { + sensitive: this.caseSensitive, + strict: this.strict, + end: true + }, route.dispatch.bind(route)); + + layer.route = route; + + this.stack.push(layer); + return route; +}; + +// create Router#VERB functions +methods.concat('all').forEach(function(method){ + proto[method] = function(path){ + var route = this.route(path) + route[method].apply(route, slice.call(arguments, 1)); + return this; + }; +}); + +// append methods to a list of methods +function appendMethods(list, addition) { + for (var i = 0; i < addition.length; i++) { + var method = addition[i]; + if (list.indexOf(method) === -1) { + list.push(method); + } + } +} + +// get pathname of request +function getPathname(req) { + try { + return parseUrl(req).pathname; + } catch (err) { + return undefined; + } +} + +// get type for error message +function gettype(obj) { + var type = typeof obj; + + if (type !== 'object') { + return type; + } + + // inspect [[Class]] for objects + return toString.call(obj) + .replace(objectRegExp, '$1'); +} + +/** + * Match path to a layer. + * + * @param {Layer} layer + * @param {string} path + * @private + */ + +function matchLayer(layer, path) { + try { + return layer.match(path); + } catch (err) { + return err; + } +} + +// merge params with parent params +function mergeParams(params, parent) { + if (typeof parent !== 'object' || !parent) { + return params; + } + + // make copy of parent for base + var obj = mixin({}, parent); + + // simple non-numeric merging + if (!(0 in params) || !(0 in parent)) { + return mixin(obj, params); + } + + var i = 0; + var o = 0; + + // determine numeric gaps + while (i in params) { + i++; + } + + while (o in parent) { + o++; + } + + // offset numeric indices in params before merge + for (i--; i >= 0; i--) { + params[i + o] = params[i]; + + // create holes for the merge when necessary + if (i < o) { + delete params[i]; + } + } + + return mixin(obj, params); +} + +// restore obj props after function +function restore(fn, obj) { + var props = new Array(arguments.length - 2); + var vals = new Array(arguments.length - 2); + + for (var i = 0; i < props.length; i++) { + props[i] = arguments[i + 2]; + vals[i] = obj[props[i]]; + } + + return function(err){ + // restore vals + for (var i = 0; i < props.length; i++) { + obj[props[i]] = vals[i]; + } + + return fn.apply(this, arguments); + }; +} + +// send an OPTIONS response +function sendOptionsResponse(res, options, next) { + try { + var body = options.join(','); + res.set('Allow', body); + res.send(body); + } catch (err) { + next(err); + } +} + +// wrap a function +function wrap(old, fn) { + return function proxy() { + var args = new Array(arguments.length + 1); + + args[0] = old; + for (var i = 0, len = arguments.length; i < len; i++) { + args[i + 1] = arguments[i]; + } + + fn.apply(this, args); + }; +} diff --git a/node_modules/express/lib/router/layer.js b/node_modules/express/lib/router/layer.js new file mode 100644 index 0000000..fe9210c --- /dev/null +++ b/node_modules/express/lib/router/layer.js @@ -0,0 +1,176 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var pathRegexp = require('path-to-regexp'); +var debug = require('debug')('express:router:layer'); + +/** + * Module variables. + * @private + */ + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +/** + * Module exports. + * @public + */ + +module.exports = Layer; + +function Layer(path, options, fn) { + if (!(this instanceof Layer)) { + return new Layer(path, options, fn); + } + + debug('new %s', path); + var opts = options || {}; + + this.handle = fn; + this.name = fn.name || ''; + this.params = undefined; + this.path = undefined; + this.regexp = pathRegexp(path, this.keys = [], opts); + + if (path === '/' && opts.end === false) { + this.regexp.fast_slash = true; + } +} + +/** + * Handle the error for the layer. + * + * @param {Error} error + * @param {Request} req + * @param {Response} res + * @param {function} next + * @api private + */ + +Layer.prototype.handle_error = function handle_error(error, req, res, next) { + var fn = this.handle; + + if (fn.length !== 4) { + // not a standard error handler + return next(error); + } + + try { + fn(error, req, res, next); + } catch (err) { + next(err); + } +}; + +/** + * Handle the request for the layer. + * + * @param {Request} req + * @param {Response} res + * @param {function} next + * @api private + */ + +Layer.prototype.handle_request = function handle(req, res, next) { + var fn = this.handle; + + if (fn.length > 3) { + // not a standard request handler + return next(); + } + + try { + fn(req, res, next); + } catch (err) { + next(err); + } +}; + +/** + * Check if this route matches `path`, if so + * populate `.params`. + * + * @param {String} path + * @return {Boolean} + * @api private + */ + +Layer.prototype.match = function match(path) { + if (path == null) { + // no path, nothing matches + this.params = undefined; + this.path = undefined; + return false; + } + + if (this.regexp.fast_slash) { + // fast path non-ending match for / (everything matches) + this.params = {}; + this.path = ''; + return true; + } + + var m = this.regexp.exec(path); + + if (!m) { + this.params = undefined; + this.path = undefined; + return false; + } + + // store values + this.params = {}; + this.path = m[0]; + + var keys = this.keys; + var params = this.params; + + for (var i = 1; i < m.length; i++) { + var key = keys[i - 1]; + var prop = key.name; + var val = decode_param(m[i]); + + if (val !== undefined || !(hasOwnProperty.call(params, prop))) { + params[prop] = val; + } + } + + return true; +}; + +/** + * Decode param value. + * + * @param {string} val + * @return {string} + * @private + */ + +function decode_param(val) { + if (typeof val !== 'string' || val.length === 0) { + return val; + } + + try { + return decodeURIComponent(val); + } catch (err) { + if (err instanceof URIError) { + err.message = 'Failed to decode param \'' + val + '\''; + err.status = err.statusCode = 400; + } + + throw err; + } +} diff --git a/node_modules/express/lib/router/route.js b/node_modules/express/lib/router/route.js new file mode 100644 index 0000000..2788d7b --- /dev/null +++ b/node_modules/express/lib/router/route.js @@ -0,0 +1,210 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var debug = require('debug')('express:router:route'); +var flatten = require('array-flatten'); +var Layer = require('./layer'); +var methods = require('methods'); + +/** + * Module variables. + * @private + */ + +var slice = Array.prototype.slice; +var toString = Object.prototype.toString; + +/** + * Module exports. + * @public + */ + +module.exports = Route; + +/** + * Initialize `Route` with the given `path`, + * + * @param {String} path + * @public + */ + +function Route(path) { + this.path = path; + this.stack = []; + + debug('new %s', path); + + // route handlers for various http methods + this.methods = {}; +} + +/** + * Determine if the route handles a given method. + * @private + */ + +Route.prototype._handles_method = function _handles_method(method) { + if (this.methods._all) { + return true; + } + + var name = method.toLowerCase(); + + if (name === 'head' && !this.methods['head']) { + name = 'get'; + } + + return Boolean(this.methods[name]); +}; + +/** + * @return {Array} supported HTTP methods + * @private + */ + +Route.prototype._options = function _options() { + var methods = Object.keys(this.methods); + + // append automatic head + if (this.methods.get && !this.methods.head) { + methods.push('head'); + } + + for (var i = 0; i < methods.length; i++) { + // make upper case + methods[i] = methods[i].toUpperCase(); + } + + return methods; +}; + +/** + * dispatch req, res into this route + * @private + */ + +Route.prototype.dispatch = function dispatch(req, res, done) { + var idx = 0; + var stack = this.stack; + if (stack.length === 0) { + return done(); + } + + var method = req.method.toLowerCase(); + if (method === 'head' && !this.methods['head']) { + method = 'get'; + } + + req.route = this; + + next(); + + function next(err) { + if (err && err === 'route') { + return done(); + } + + var layer = stack[idx++]; + if (!layer) { + return done(err); + } + + if (layer.method && layer.method !== method) { + return next(err); + } + + if (err) { + layer.handle_error(err, req, res, next); + } else { + layer.handle_request(req, res, next); + } + } +}; + +/** + * Add a handler for all HTTP verbs to this route. + * + * Behaves just like middleware and can respond or call `next` + * to continue processing. + * + * You can use multiple `.all` call to add multiple handlers. + * + * function check_something(req, res, next){ + * next(); + * }; + * + * function validate_user(req, res, next){ + * next(); + * }; + * + * route + * .all(validate_user) + * .all(check_something) + * .get(function(req, res, next){ + * res.send('hello world'); + * }); + * + * @param {function} handler + * @return {Route} for chaining + * @api public + */ + +Route.prototype.all = function all() { + var handles = flatten(slice.call(arguments)); + + for (var i = 0; i < handles.length; i++) { + var handle = handles[i]; + + if (typeof handle !== 'function') { + var type = toString.call(handle); + var msg = 'Route.all() requires callback functions but got a ' + type; + throw new TypeError(msg); + } + + var layer = Layer('/', {}, handle); + layer.method = undefined; + + this.methods._all = true; + this.stack.push(layer); + } + + return this; +}; + +methods.forEach(function(method){ + Route.prototype[method] = function(){ + var handles = flatten(slice.call(arguments)); + + for (var i = 0; i < handles.length; i++) { + var handle = handles[i]; + + if (typeof handle !== 'function') { + var type = toString.call(handle); + var msg = 'Route.' + method + '() requires callback functions but got a ' + type; + throw new Error(msg); + } + + debug('%s %s', method, this.path); + + var layer = Layer('/', {}, handle); + layer.method = method; + + this.methods[method] = true; + this.stack.push(layer); + } + + return this; + }; +}); diff --git a/node_modules/express/lib/utils.js b/node_modules/express/lib/utils.js new file mode 100644 index 0000000..f418c58 --- /dev/null +++ b/node_modules/express/lib/utils.js @@ -0,0 +1,299 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @api private + */ + +var contentDisposition = require('content-disposition'); +var contentType = require('content-type'); +var deprecate = require('depd')('express'); +var flatten = require('array-flatten'); +var mime = require('send').mime; +var basename = require('path').basename; +var etag = require('etag'); +var proxyaddr = require('proxy-addr'); +var qs = require('qs'); +var querystring = require('querystring'); + +/** + * Return strong ETag for `body`. + * + * @param {String|Buffer} body + * @param {String} [encoding] + * @return {String} + * @api private + */ + +exports.etag = function (body, encoding) { + var buf = !Buffer.isBuffer(body) + ? new Buffer(body, encoding) + : body; + + return etag(buf, {weak: false}); +}; + +/** + * Return weak ETag for `body`. + * + * @param {String|Buffer} body + * @param {String} [encoding] + * @return {String} + * @api private + */ + +exports.wetag = function wetag(body, encoding){ + var buf = !Buffer.isBuffer(body) + ? new Buffer(body, encoding) + : body; + + return etag(buf, {weak: true}); +}; + +/** + * Check if `path` looks absolute. + * + * @param {String} path + * @return {Boolean} + * @api private + */ + +exports.isAbsolute = function(path){ + if ('/' === path[0]) return true; + if (':' === path[1] && ('\\' === path[2] || '/' === path[2])) return true; // Windows device path + if ('\\\\' === path.substring(0, 2)) return true; // Microsoft Azure absolute path +}; + +/** + * Flatten the given `arr`. + * + * @param {Array} arr + * @return {Array} + * @api private + */ + +exports.flatten = deprecate.function(flatten, + 'utils.flatten: use array-flatten npm module instead'); + +/** + * Normalize the given `type`, for example "html" becomes "text/html". + * + * @param {String} type + * @return {Object} + * @api private + */ + +exports.normalizeType = function(type){ + return ~type.indexOf('/') + ? acceptParams(type) + : { value: mime.lookup(type), params: {} }; +}; + +/** + * Normalize `types`, for example "html" becomes "text/html". + * + * @param {Array} types + * @return {Array} + * @api private + */ + +exports.normalizeTypes = function(types){ + var ret = []; + + for (var i = 0; i < types.length; ++i) { + ret.push(exports.normalizeType(types[i])); + } + + return ret; +}; + +/** + * Generate Content-Disposition header appropriate for the filename. + * non-ascii filenames are urlencoded and a filename* parameter is added + * + * @param {String} filename + * @return {String} + * @api private + */ + +exports.contentDisposition = deprecate.function(contentDisposition, + 'utils.contentDisposition: use content-disposition npm module instead'); + +/** + * Parse accept params `str` returning an + * object with `.value`, `.quality` and `.params`. + * also includes `.originalIndex` for stable sorting + * + * @param {String} str + * @return {Object} + * @api private + */ + +function acceptParams(str, index) { + var parts = str.split(/ *; */); + var ret = { value: parts[0], quality: 1, params: {}, originalIndex: index }; + + for (var i = 1; i < parts.length; ++i) { + var pms = parts[i].split(/ *= */); + if ('q' === pms[0]) { + ret.quality = parseFloat(pms[1]); + } else { + ret.params[pms[0]] = pms[1]; + } + } + + return ret; +} + +/** + * Compile "etag" value to function. + * + * @param {Boolean|String|Function} val + * @return {Function} + * @api private + */ + +exports.compileETag = function(val) { + var fn; + + if (typeof val === 'function') { + return val; + } + + switch (val) { + case true: + fn = exports.wetag; + break; + case false: + break; + case 'strong': + fn = exports.etag; + break; + case 'weak': + fn = exports.wetag; + break; + default: + throw new TypeError('unknown value for etag function: ' + val); + } + + return fn; +} + +/** + * Compile "query parser" value to function. + * + * @param {String|Function} val + * @return {Function} + * @api private + */ + +exports.compileQueryParser = function compileQueryParser(val) { + var fn; + + if (typeof val === 'function') { + return val; + } + + switch (val) { + case true: + fn = querystring.parse; + break; + case false: + fn = newObject; + break; + case 'extended': + fn = parseExtendedQueryString; + break; + case 'simple': + fn = querystring.parse; + break; + default: + throw new TypeError('unknown value for query parser function: ' + val); + } + + return fn; +} + +/** + * Compile "proxy trust" value to function. + * + * @param {Boolean|String|Number|Array|Function} val + * @return {Function} + * @api private + */ + +exports.compileTrust = function(val) { + if (typeof val === 'function') return val; + + if (val === true) { + // Support plain true/false + return function(){ return true }; + } + + if (typeof val === 'number') { + // Support trusting hop count + return function(a, i){ return i < val }; + } + + if (typeof val === 'string') { + // Support comma-separated values + val = val.split(/ *, */); + } + + return proxyaddr.compile(val || []); +} + +/** + * Set the charset in a given Content-Type string. + * + * @param {String} type + * @param {String} charset + * @return {String} + * @api private + */ + +exports.setCharset = function setCharset(type, charset) { + if (!type || !charset) { + return type; + } + + // parse type + var parsed = contentType.parse(type); + + // set charset + parsed.parameters.charset = charset; + + // format type + return contentType.format(parsed); +}; + +/** + * Parse an extended query string with qs. + * + * @return {Object} + * @private + */ + +function parseExtendedQueryString(str) { + return qs.parse(str, { + allowPrototypes: true + }); +} + +/** + * Return new empty object. + * + * @return {Object} + * @api private + */ + +function newObject() { + return {}; +} diff --git a/node_modules/express/lib/view.js b/node_modules/express/lib/view.js new file mode 100644 index 0000000..52415d4 --- /dev/null +++ b/node_modules/express/lib/view.js @@ -0,0 +1,173 @@ +/*! + * express + * Copyright(c) 2009-2013 TJ Holowaychuk + * Copyright(c) 2013 Roman Shtylman + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var debug = require('debug')('express:view'); +var path = require('path'); +var fs = require('fs'); +var utils = require('./utils'); + +/** + * Module variables. + * @private + */ + +var dirname = path.dirname; +var basename = path.basename; +var extname = path.extname; +var join = path.join; +var resolve = path.resolve; + +/** + * Module exports. + * @public + */ + +module.exports = View; + +/** + * Initialize a new `View` with the given `name`. + * + * Options: + * + * - `defaultEngine` the default template engine name + * - `engines` template engine require() cache + * - `root` root path for view lookup + * + * @param {string} name + * @param {object} options + * @public + */ + +function View(name, options) { + var opts = options || {}; + + this.defaultEngine = opts.defaultEngine; + this.ext = extname(name); + this.name = name; + this.root = opts.root; + + if (!this.ext && !this.defaultEngine) { + throw new Error('No default engine was specified and no extension was provided.'); + } + + var fileName = name; + + if (!this.ext) { + // get extension from default engine name + this.ext = this.defaultEngine[0] !== '.' + ? '.' + this.defaultEngine + : this.defaultEngine; + + fileName += this.ext; + } + + if (!opts.engines[this.ext]) { + // load engine + opts.engines[this.ext] = require(this.ext.substr(1)).__express; + } + + // store loaded engine + this.engine = opts.engines[this.ext]; + + // lookup path + this.path = this.lookup(fileName); +} + +/** + * Lookup view by the given `name` + * + * @param {string} name + * @private + */ + +View.prototype.lookup = function lookup(name) { + var path; + var roots = [].concat(this.root); + + debug('lookup "%s"', name); + + for (var i = 0; i < roots.length && !path; i++) { + var root = roots[i]; + + // resolve the path + var loc = resolve(root, name); + var dir = dirname(loc); + var file = basename(loc); + + // resolve the file + path = this.resolve(dir, file); + } + + return path; +}; + +/** + * Render with the given options. + * + * @param {object} options + * @param {function} callback + * @private + */ + +View.prototype.render = function render(options, callback) { + debug('render "%s"', this.path); + this.engine(this.path, options, callback); +}; + +/** + * Resolve the file within the given directory. + * + * @param {string} dir + * @param {string} file + * @private + */ + +View.prototype.resolve = function resolve(dir, file) { + var ext = this.ext; + + // . + var path = join(dir, file); + var stat = tryStat(path); + + if (stat && stat.isFile()) { + return path; + } + + // /index. + path = join(dir, basename(file, ext), 'index' + ext); + stat = tryStat(path); + + if (stat && stat.isFile()) { + return path; + } +}; + +/** + * Return a stat, maybe. + * + * @param {string} path + * @return {fs.Stats} + * @private + */ + +function tryStat(path) { + debug('stat "%s"', path); + + try { + return fs.statSync(path); + } catch (e) { + return undefined; + } +} diff --git a/node_modules/express/package.json b/node_modules/express/package.json new file mode 100644 index 0000000..57dd160 --- /dev/null +++ b/node_modules/express/package.json @@ -0,0 +1,92 @@ +{ + "name": "express", + "description": "Fast, unopinionated, minimalist web framework", + "version": "4.14.0", + "author": "TJ Holowaychuk ", + "contributors": [ + "Aaron Heckmann ", + "Ciaran Jessup ", + "Douglas Christopher Wilson ", + "Guillermo Rauch ", + "Jonathan Ong ", + "Roman Shtylman ", + "Young Jae Sim " + ], + "license": "MIT", + "repository": "expressjs/express", + "homepage": "http://expressjs.com/", + "keywords": [ + "express", + "framework", + "sinatra", + "web", + "rest", + "restful", + "router", + "app", + "api" + ], + "dependencies": { + "accepts": "~1.3.3", + "array-flatten": "1.1.1", + "content-disposition": "0.5.1", + "content-type": "~1.0.2", + "cookie": "0.3.1", + "cookie-signature": "1.0.6", + "debug": "~2.2.0", + "depd": "~1.1.0", + "encodeurl": "~1.0.1", + "escape-html": "~1.0.3", + "etag": "~1.7.0", + "finalhandler": "0.5.0", + "fresh": "0.3.0", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.1", + "path-to-regexp": "0.1.7", + "proxy-addr": "~1.1.2", + "qs": "6.2.0", + "range-parser": "~1.2.0", + "send": "0.14.1", + "serve-static": "~1.11.1", + "type-is": "~1.6.13", + "utils-merge": "1.0.0", + "vary": "~1.1.0" + }, + "devDependencies": { + "after": "0.8.1", + "body-parser": "~1.15.1", + "cookie-parser": "~1.4.3", + "ejs": "2.4.2", + "istanbul": "0.4.3", + "marked": "0.3.5", + "method-override": "~2.3.6", + "mocha": "2.5.3", + "morgan": "~1.7.0", + "should": "9.0.2", + "supertest": "1.2.0", + "connect-redis": "~2.4.1", + "cookie-session": "~1.2.0", + "express-session": "~1.13.0", + "jade": "~1.11.0", + "multiparty": "~4.1.2", + "vhost": "~3.0.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "files": [ + "LICENSE", + "History.md", + "Readme.md", + "index.js", + "lib/" + ], + "scripts": { + "test": "mocha --require test/support/env --reporter spec --bail --check-leaks test/ test/acceptance/", + "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --require test/support/env --reporter spec --check-leaks test/ test/acceptance/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --require test/support/env --reporter dot --check-leaks test/ test/acceptance/", + "test-tap": "mocha --require test/support/env --reporter tap --check-leaks test/ test/acceptance/" + } +} diff --git a/node_modules/extend/.eslintrc b/node_modules/extend/.eslintrc new file mode 100644 index 0000000..d49f173 --- /dev/null +++ b/node_modules/extend/.eslintrc @@ -0,0 +1,192 @@ +{ + "env": { + "browser": false, + "node": true, + "amd": false, + "mocha": false, + "jasmine": false + }, + + "rules": { + "accessor-pairs": [2, { getWithoutSet: false, setWithoutGet: true }], + "array-bracket-spacing": [2, "never", { + "singleValue": false, + "objectsInArrays": false, + "arraysInArrays": false + }], + "block-scoped-var": [0], + "brace-style": [2, "1tbs", { "allowSingleLine": true }], + "camelcase": [2], + "comma-dangle": [2, "never"], + "comma-spacing": [2], + "comma-style": [2, "last"], + "complexity": [2, 15], + "computed-property-spacing": [2, "never"], + "consistent-return": [2], + "consistent-this": [0, "that"], + "constructor-super": [2], + "curly": [2, "all"], + "default-case": [2], + "dot-notation": [2, { "allowKeywords": true }], + "eol-last": [2], + "eqeqeq": [2], + "func-names": [0], + "func-style": [2, "expression"], + "generator-star-spacing": [2, { "before": false, "after": true }], + "global-strict": [0, "never"], + "guard-for-in": [0], + "handle-callback-err": [0], + "key-spacing": [2, { "beforeColon": false, "afterColon": true }], + "linebreak-style": [2, "unix"], + "lines-around-comment": [2, { + "beforeBlockComment": false, + "afterBlockComment": false, + "beforeLineComment": false, + "beforeLineComment": false, + "allowBlockStart": true, + "allowBlockEnd": true + }], + "quotes": [2, "single", "avoid-escape"], + "max-depth": [1, 4], + "max-len": [0, 80, 4], + "max-nested-callbacks": [2, 2], + "max-params": [2, 2], + "max-statements": [2, 21], + "new-parens": [2], + "new-cap": [2], + "newline-after-var": [0], + "no-alert": [2], + "no-array-constructor": [2], + "no-bitwise": [0], + "no-caller": [2], + "no-catch-shadow": [2], + "no-cond-assign": [2], + "no-console": [2], + "no-constant-condition": [2], + "no-continue": [2], + "no-control-regex": [2], + "no-debugger": [2], + "no-delete-var": [2], + "no-div-regex": [0], + "no-dupe-args": [2], + "no-dupe-keys": [2], + "no-duplicate-case": [2], + "no-else-return": [0], + "no-empty": [2], + "no-empty-character-class": [2], + "no-empty-label": [2], + "no-eq-null": [0], + "no-eval": [2], + "no-ex-assign": [2], + "no-extend-native": [2], + "no-extra-bind": [2], + "no-extra-boolean-cast": [2], + "no-extra-parens": [0], + "no-extra-semi": [2], + "no-fallthrough": [2], + "no-floating-decimal": [2], + "no-func-assign": [2], + "no-implied-eval": [2], + "no-inline-comments": [0], + "no-inner-declarations": [2, "functions"], + "no-invalid-regexp": [2], + "no-irregular-whitespace": [2], + "no-iterator": [2], + "no-label-var": [2], + "no-labels": [2], + "no-lone-blocks": [2], + "no-lonely-if": [2], + "no-loop-func": [2], + "no-mixed-requires": [0, false], + "no-mixed-spaces-and-tabs": [2, false], + "no-multi-spaces": [2], + "no-multi-str": [2], + "no-multiple-empty-lines": [2, {"max": 1}], + "no-native-reassign": [2], + "no-negated-in-lhs": [2], + "no-nested-ternary": [0], + "no-new": [2], + "no-new-func": [2], + "no-new-object": [2], + "no-new-require": [0], + "no-new-wrappers": [2], + "no-obj-calls": [2], + "no-octal": [2], + "no-octal-escape": [2], + "no-param-reassign": [2], + "no-path-concat": [0], + "no-plusplus": [0], + "no-process-env": [0], + "no-process-exit": [2], + "no-proto": [2], + "no-redeclare": [2], + "no-regex-spaces": [2], + "no-reserved-keys": [2], + "no-restricted-modules": [0], + "no-return-assign": [2, "always"], + "no-script-url": [2], + "no-self-compare": [0], + "no-sequences": [2], + "no-shadow": [2], + "no-shadow-restricted-names": [2], + "no-space-before-semi": [2], + "no-spaced-func": [2], + "no-sparse-arrays": [2], + "no-sync": [0], + "no-ternary": [0], + "no-this-before-super": [2], + "no-throw-literal": [2], + "no-trailing-spaces": [2, { "skipBlankLines": false }], + "no-undef": [2], + "no-undef-init": [2], + "no-undefined": [0], + "no-underscore-dangle": [2], + "no-unexpected-multiline": [2], + "no-unneeded-ternary": [2], + "no-unreachable": [2], + "no-unused-expressions": [2], + "no-unused-vars": [2, { "vars": "all", "args": "after-used" }], + "no-use-before-define": [2], + "no-void": [0], + "no-warning-comments": [0, { "terms": ["todo", "fixme", "xxx"], "location": "start" }], + "no-with": [2], + "no-wrap-func": [2], + "object-curly-spacing": [2, "always"], + "object-shorthand": [2, "never"], + "one-var": [0], + "operator-assignment": [0, "always"], + "operator-linebreak": [2, "none"], + "padded-blocks": [0], + "prefer-const": [0], + "quote-props": [0], + "radix": [0], + "semi": [2], + "semi-spacing": [2, { "before": false, "after": true }], + "sort-vars": [0], + "space-after-keywords": [2, "always"], + "space-before-function-paren": [2, { "anonymous": "always", "named": "never" }], + "space-before-blocks": [0, "always"], + "space-in-brackets": [0, "never", { + "singleValue": true, + "arraysInArrays": false, + "arraysInObjects": false, + "objectsInArrays": true, + "objectsInObjects": true, + "propertyName": false + }], + "space-in-parens": [2, "never"], + "space-infix-ops": [2], + "space-return-throw-case": [2], + "space-unary-ops": [2, { "words": true, "nonwords": false }], + "spaced-comment": [2, "always"], + "spaced-line-comment": [0, "always"], + "strict": [2, "global"], + "use-isnan": [2], + "valid-jsdoc": [0], + "valid-typeof": [2], + "vars-on-top": [0], + "wrap-iife": [2], + "wrap-regex": [2], + "yoda": [2, "never", { "exceptRange": true, "onlyEquality": false }] + } +} diff --git a/node_modules/extend/.jscs.json b/node_modules/extend/.jscs.json new file mode 100644 index 0000000..7e84b28 --- /dev/null +++ b/node_modules/extend/.jscs.json @@ -0,0 +1,104 @@ +{ + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": ["if", "else", "for", "while", "do", "try", "catch"], + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": "allButReserved", + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": true, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 6 + }, + + "requirePaddingNewLinesAfterUseStrict": true +} + diff --git a/node_modules/extend/.npmignore b/node_modules/extend/.npmignore new file mode 100644 index 0000000..30d74d2 --- /dev/null +++ b/node_modules/extend/.npmignore @@ -0,0 +1 @@ +test \ No newline at end of file diff --git a/node_modules/extend/.travis.yml b/node_modules/extend/.travis.yml new file mode 100644 index 0000000..ebef644 --- /dev/null +++ b/node_modules/extend/.travis.yml @@ -0,0 +1,44 @@ +language: node_js +node_js: + - "iojs-v2.3" + - "iojs-v2.2" + - "iojs-v2.1" + - "iojs-v2.0" + - "iojs-v1.8" + - "iojs-v1.7" + - "iojs-v1.6" + - "iojs-v1.5" + - "iojs-v1.4" + - "iojs-v1.3" + - "iojs-v1.2" + - "iojs-v1.1" + - "iojs-v1.0" + - "0.12" + - "0.11" + - "0.10" + - "0.9" + - "0.8" + - "0.6" + - "0.4" +before_install: + - '[ "${TRAVIS_NODE_VERSION}" = "0.6" ] || npm install -g npm@1.4.28 && npm install -g npm' +sudo: false +matrix: + fast_finish: true + allow_failures: + - node_js: "iojs-v2.2" + - node_js: "iojs-v2.1" + - node_js: "iojs-v2.0" + - node_js: "iojs-v1.7" + - node_js: "iojs-v1.6" + - node_js: "iojs-v1.5" + - node_js: "iojs-v1.4" + - node_js: "iojs-v1.3" + - node_js: "iojs-v1.2" + - node_js: "iojs-v1.1" + - node_js: "iojs-v1.0" + - node_js: "0.11" + - node_js: "0.9" + - node_js: "0.8" + - node_js: "0.6" + - node_js: "0.4" diff --git a/node_modules/extend/CHANGELOG.md b/node_modules/extend/CHANGELOG.md new file mode 100644 index 0000000..ee0cfd6 --- /dev/null +++ b/node_modules/extend/CHANGELOG.md @@ -0,0 +1,69 @@ +3.0.0 / 2015-07-01 +================== + * [Possible breaking change] Use global "strict" directive (#32) + * [Tests] `int` is an ES3 reserved word + * [Tests] Test up to `io.js` `v2.3` + * [Tests] Add `npm run eslint` + * [Dev Deps] Update `covert`, `jscs` + +2.0.1 / 2015-04-25 +================== + * Use an inline `isArray` check, for ES3 browsers. (#27) + * Some old browsers fail when an identifier is `toString` + * Test latest `node` and `io.js` versions on `travis-ci`; speed up builds + * Add license info to package.json (#25) + * Update `tape`, `jscs` + * Adding a CHANGELOG + +2.0.0 / 2014-10-01 +================== + * Increase code coverage to 100%; run code coverage as part of tests + * Add `npm run lint`; Run linter as part of tests + * Remove nodeType and setInterval checks in isPlainObject + * Updating `tape`, `jscs`, `covert` + * General style and README cleanup + +1.3.0 / 2014-06-20 +================== + * Add component.json for browser support (#18) + * Use SVG for badges in README (#16) + * Updating `tape`, `covert` + * Updating travis-ci to work with multiple node versions + * Fix `deep === false` bug (returning target as {}) (#14) + * Fixing constructor checks in isPlainObject + * Adding additional test coverage + * Adding `npm run coverage` + * Add LICENSE (#13) + * Adding a warning about `false`, per #11 + * General style and whitespace cleanup + +1.2.1 / 2013-09-14 +================== + * Fixing hasOwnProperty bugs that would only have shown up in specific browsers. Fixes #8 + * Updating `tape` + +1.2.0 / 2013-09-02 +================== + * Updating the README: add badges + * Adding a missing variable reference. + * Using `tape` instead of `buster` for tests; add more tests (#7) + * Adding node 0.10 to Travis CI (#6) + * Enabling "npm test" and cleaning up package.json (#5) + * Add Travis CI. + +1.1.3 / 2012-12-06 +================== + * Added unit tests. + * Ensure extend function is named. (Looks nicer in a stack trace.) + * README cleanup. + +1.1.1 / 2012-11-07 +================== + * README cleanup. + * Added installation instructions. + * Added a missing semicolon + +1.0.0 / 2012-04-08 +================== + * Initial commit + diff --git a/node_modules/extend/LICENSE b/node_modules/extend/LICENSE new file mode 100644 index 0000000..e16d6a5 --- /dev/null +++ b/node_modules/extend/LICENSE @@ -0,0 +1,23 @@ +The MIT License (MIT) + +Copyright (c) 2014 Stefan Thomas + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/extend/README.md b/node_modules/extend/README.md new file mode 100644 index 0000000..632fb0f --- /dev/null +++ b/node_modules/extend/README.md @@ -0,0 +1,62 @@ +[![Build Status][travis-svg]][travis-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] + +# extend() for Node.js [![Version Badge][npm-version-png]][npm-url] + +`node-extend` is a port of the classic extend() method from jQuery. It behaves as you expect. It is simple, tried and true. + +## Installation + +This package is available on [npm][npm-url] as: `extend` + +``` sh +npm install extend +``` + +## Usage + +**Syntax:** extend **(** [`deep`], `target`, `object1`, [`objectN`] **)** + +*Extend one object with one or more others, returning the modified object.* + +Keep in mind that the target object will be modified, and will be returned from extend(). + +If a boolean true is specified as the first argument, extend performs a deep copy, recursively copying any objects it finds. Otherwise, the copy will share structure with the original object(s). +Undefined properties are not copied. However, properties inherited from the object's prototype will be copied over. +Warning: passing `false` as the first argument is not supported. + +### Arguments + +* `deep` *Boolean* (optional) +If set, the merge becomes recursive (i.e. deep copy). +* `target` *Object* +The object to extend. +* `object1` *Object* +The object that will be merged into the first. +* `objectN` *Object* (Optional) +More objects to merge into the first. + +## License + +`node-extend` is licensed under the [MIT License][mit-license-url]. + +## Acknowledgements + +All credit to the jQuery authors for perfecting this amazing utility. + +Ported to Node.js by [Stefan Thomas][github-justmoon] with contributions by [Jonathan Buchanan][github-insin] and [Jordan Harband][github-ljharb]. + +[travis-svg]: https://travis-ci.org/justmoon/node-extend.svg +[travis-url]: https://travis-ci.org/justmoon/node-extend +[npm-url]: https://npmjs.org/package/extend +[mit-license-url]: http://opensource.org/licenses/MIT +[github-justmoon]: https://github.com/justmoon +[github-insin]: https://github.com/insin +[github-ljharb]: https://github.com/ljharb +[npm-version-png]: http://vb.teelaun.ch/justmoon/node-extend.svg +[deps-svg]: https://david-dm.org/justmoon/node-extend.svg +[deps-url]: https://david-dm.org/justmoon/node-extend +[dev-deps-svg]: https://david-dm.org/justmoon/node-extend/dev-status.svg +[dev-deps-url]: https://david-dm.org/justmoon/node-extend#info=devDependencies + diff --git a/node_modules/extend/component.json b/node_modules/extend/component.json new file mode 100644 index 0000000..1500a2f --- /dev/null +++ b/node_modules/extend/component.json @@ -0,0 +1,32 @@ +{ + "name": "extend", + "author": "Stefan Thomas (http://www.justmoon.net)", + "version": "3.0.0", + "description": "Port of jQuery.extend for node.js and the browser.", + "scripts": [ + "index.js" + ], + "contributors": [ + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "keywords": [ + "extend", + "clone", + "merge" + ], + "repository" : { + "type": "git", + "url": "https://github.com/justmoon/node-extend.git" + }, + "dependencies": { + }, + "devDependencies": { + "tape" : "~3.0.0", + "covert": "~0.4.0", + "jscs": "~1.6.2" + } +} + diff --git a/node_modules/extend/index.js b/node_modules/extend/index.js new file mode 100644 index 0000000..f5ec75d --- /dev/null +++ b/node_modules/extend/index.js @@ -0,0 +1,86 @@ +'use strict'; + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) {/**/} + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone, + target = arguments[0], + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } else if ((typeof target !== 'object' && typeof target !== 'function') || target == null) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = target[name]; + copy = options[name]; + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + target[name] = extend(deep, clone, copy); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + target[name] = copy; + } + } + } + } + } + + // Return the modified object + return target; +}; + diff --git a/node_modules/extend/package.json b/node_modules/extend/package.json new file mode 100644 index 0000000..ab1dfe4 --- /dev/null +++ b/node_modules/extend/package.json @@ -0,0 +1,39 @@ +{ + "name": "extend", + "author": "Stefan Thomas (http://www.justmoon.net)", + "version": "3.0.0", + "description": "Port of jQuery.extend for node.js and the browser", + "main": "index", + "scripts": { + "test": "npm run lint && node test/index.js && npm run coverage-quiet", + "coverage": "covert test/index.js", + "coverage-quiet": "covert test/index.js --quiet", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js */*.js", + "eslint": "eslint *.js */*.js" + }, + "contributors": [ + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "keywords": [ + "extend", + "clone", + "merge" + ], + "repository": { + "type": "git", + "url": "https://github.com/justmoon/node-extend.git" + }, + "dependencies": {}, + "devDependencies": { + "tape": "^4.0.0", + "covert": "^1.1.0", + "jscs": "^1.13.1", + "eslint": "^0.24.0" + }, + "license": "MIT" +} + diff --git a/node_modules/extglob/LICENSE b/node_modules/extglob/LICENSE new file mode 100644 index 0000000..65f90ac --- /dev/null +++ b/node_modules/extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/extglob/README.md b/node_modules/extglob/README.md new file mode 100644 index 0000000..6664406 --- /dev/null +++ b/node_modules/extglob/README.md @@ -0,0 +1,88 @@ +# extglob [![NPM version](https://badge.fury.io/js/extglob.svg)](http://badge.fury.io/js/extglob) [![Build Status](https://travis-ci.org/jonschlinkert/extglob.svg)](https://travis-ci.org/jonschlinkert/extglob) + +> Convert extended globs to regex-compatible strings. Add (almost) the expressive power of regular expressions to glob patterns. + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i extglob --save +``` + +Used by [micromatch](https://github.com/jonschlinkert/micromatch). + +**Features** + +* Convert an extglob string to a regex-compatible string. **Only converts extglobs**, to handle full globs use [micromatch](https://github.com/jonschlinkert/micromatch). +* Pass `{regex: true}` to return a regex +* Handles nested patterns +* More complete (and correct) support than [minimatch](https://github.com/isaacs/minimatch) + +## Usage + +```js +var extglob = require('extglob'); + +extglob('?(z)'); +//=> '(?:z)?' +extglob('*(z)'); +//=> '(?:z)*' +extglob('+(z)'); +//=> '(?:z)+' +extglob('@(z)'); +//=> '(?:z)' +extglob('!(z)'); +//=> '(?!^(?:(?!z)[^/]*?)).*$' +``` + +**Optionally return regex** + +```js +extglob('!(z)', {regex: true}); +//=> /(?!^(?:(?!z)[^/]*?)).*$/ +``` + +## Extglob patterns + +To learn more about how extglobs work, see the docs for [Bash pattern matching](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html): + +* `?(pattern)`: Match zero or one occurrence of the given pattern. +* `*(pattern)`: Match zero or more occurrences of the given pattern. +* `+(pattern)`: Match one or more occurrences of the given pattern. +* `@(pattern)`: Match one of the given pattern. +* `!(pattern)`: Match anything except one of the given pattern. + +## Related + +* [braces](https://github.com/jonschlinkert/braces): Fastest brace expansion for node.js, with the most complete support for the Bash 4.3 braces… [more](https://github.com/jonschlinkert/braces) +* [expand-brackets](https://github.com/jonschlinkert/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. +* [expand-range](https://github.com/jonschlinkert/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See… [more](https://github.com/jonschlinkert/expand-range) +* [fill-range](https://github.com/jonschlinkert/fill-range): Fill in a range of numbers or letters, optionally passing an increment or multiplier to… [more](https://github.com/jonschlinkert/fill-range) +* [micromatch](https://github.com/jonschlinkert/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. Just… [more](https://github.com/jonschlinkert/micromatch) + +## Run tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/extglob/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on August 01, 2015._ \ No newline at end of file diff --git a/node_modules/extglob/index.js b/node_modules/extglob/index.js new file mode 100644 index 0000000..2e774d4 --- /dev/null +++ b/node_modules/extglob/index.js @@ -0,0 +1,178 @@ +/*! + * extglob + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +/** + * Module dependencies + */ + +var isExtglob = require('is-extglob'); +var re, cache = {}; + +/** + * Expose `extglob` + */ + +module.exports = extglob; + +/** + * Convert the given extglob `string` to a regex-compatible + * string. + * + * ```js + * var extglob = require('extglob'); + * extglob('!(a?(b))'); + * //=> '(?!a(?:b)?)[^/]*?' + * ``` + * + * @param {String} `str` The string to convert. + * @param {Object} `options` + * @option {Boolean} [options] `esc` If `false` special characters will not be escaped. Defaults to `true`. + * @option {Boolean} [options] `regex` If `true` a regular expression is returned instead of a string. + * @return {String} + * @api public + */ + + +function extglob(str, opts) { + opts = opts || {}; + var o = {}, i = 0; + + // fix common character reversals + // '*!(.js)' => '*.!(js)' + str = str.replace(/!\(([^\w*()])/g, '$1!('); + + // support file extension negation + str = str.replace(/([*\/])\.!\([*]\)/g, function (m, ch) { + if (ch === '/') { + return escape('\\/[^.]+'); + } + return escape('[^.]+'); + }); + + // create a unique key for caching by + // combining the string and options + var key = str + + String(!!opts.regex) + + String(!!opts.contains) + + String(!!opts.escape); + + if (cache.hasOwnProperty(key)) { + return cache[key]; + } + + if (!(re instanceof RegExp)) { + re = regex(); + } + + opts.negate = false; + var m; + + while (m = re.exec(str)) { + var prefix = m[1]; + var inner = m[3]; + if (prefix === '!') { + opts.negate = true; + } + + var id = '__EXTGLOB_' + (i++) + '__'; + // use the prefix of the _last_ (outtermost) pattern + o[id] = wrap(inner, prefix, opts.escape); + str = str.split(m[0]).join(id); + } + + var keys = Object.keys(o); + var len = keys.length; + + // we have to loop again to allow us to convert + // patterns in reverse order (starting with the + // innermost/last pattern first) + while (len--) { + var prop = keys[len]; + str = str.split(prop).join(o[prop]); + } + + var result = opts.regex + ? toRegex(str, opts.contains, opts.negate) + : str; + + result = result.split('.').join('\\.'); + + // cache the result and return it + return (cache[key] = result); +} + +/** + * Convert `string` to a regex string. + * + * @param {String} `str` + * @param {String} `prefix` Character that determines how to wrap the string. + * @param {Boolean} `esc` If `false` special characters will not be escaped. Defaults to `true`. + * @return {String} + */ + +function wrap(inner, prefix, esc) { + if (esc) inner = escape(inner); + + switch (prefix) { + case '!': + return '(?!' + inner + ')[^/]' + (esc ? '%%%~' : '*?'); + case '@': + return '(?:' + inner + ')'; + case '+': + return '(?:' + inner + ')+'; + case '*': + return '(?:' + inner + ')' + (esc ? '%%' : '*') + case '?': + return '(?:' + inner + '|)'; + default: + return inner; + } +} + +function escape(str) { + str = str.split('*').join('[^/]%%%~'); + str = str.split('.').join('\\.'); + return str; +} + +/** + * extglob regex. + */ + +function regex() { + return /(\\?[@?!+*$]\\?)(\(([^()]*?)\))/; +} + +/** + * Negation regex + */ + +function negate(str) { + return '(?!^' + str + ').*$'; +} + +/** + * Create the regex to do the matching. If + * the leading character in the `pattern` is `!` + * a negation regex is returned. + * + * @param {String} `pattern` + * @param {Boolean} `contains` Allow loose matching. + * @param {Boolean} `isNegated` True if the pattern is a negation pattern. + */ + +function toRegex(pattern, contains, isNegated) { + var prefix = contains ? '^' : ''; + var after = contains ? '$' : ''; + pattern = ('(?:' + pattern + ')' + after); + if (isNegated) { + pattern = prefix + negate(pattern); + } + return new RegExp(prefix + pattern); +} diff --git a/node_modules/extglob/package.json b/node_modules/extglob/package.json new file mode 100644 index 0000000..969cbe5 --- /dev/null +++ b/node_modules/extglob/package.json @@ -0,0 +1,60 @@ +{ + "name": "extglob", + "description": "Convert extended globs to regex-compatible strings. Add (almost) the expressive power of regular expressions to glob patterns.", + "version": "0.3.2", + "homepage": "https://github.com/jonschlinkert/extglob", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/extglob.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-extglob": "^1.0.0" + }, + "devDependencies": { + "ansi-green": "^0.1.1", + "micromatch": "^2.1.6", + "minimatch": "^2.0.1", + "minimist": "^1.1.0", + "mocha": "*", + "should": "*", + "success-symbol": "^0.1.0" + }, + "keywords": [ + "bash", + "extended", + "extglob", + "glob", + "ksh", + "match", + "wildcard" + ], + "verb": { + "related": { + "list": [ + "micromatch", + "expand-brackets", + "braces", + "fill-range", + "expand-range" + ] + } + } +} diff --git a/node_modules/extsprintf/.gitmodules b/node_modules/extsprintf/.gitmodules new file mode 100644 index 0000000..4e0f5e2 --- /dev/null +++ b/node_modules/extsprintf/.gitmodules @@ -0,0 +1,6 @@ +[submodule "deps/jsstyle"] + path = deps/jsstyle + url = git://github.com/davepacheco/jsstyle +[submodule "deps/javascriptlint"] + path = deps/javascriptlint + url = git://github.com/davepacheco/javascriptlint diff --git a/node_modules/extsprintf/LICENSE b/node_modules/extsprintf/LICENSE new file mode 100644 index 0000000..cbc0bb3 --- /dev/null +++ b/node_modules/extsprintf/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012, Joyent, Inc. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/extsprintf/Makefile b/node_modules/extsprintf/Makefile new file mode 100644 index 0000000..1deeb5f --- /dev/null +++ b/node_modules/extsprintf/Makefile @@ -0,0 +1,23 @@ +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile: top-level Makefile +# +# This Makefile contains only repo-specific logic and uses included makefiles +# to supply common targets (javascriptlint, jsstyle, restdown, etc.), which are +# used by other repos as well. +# + +# +# Files +# +JS_FILES := $(shell find lib -name '*.js') +JSL_FILES_NODE = $(JS_FILES) +JSSTYLE_FILES = $(JS_FILES) +JSL_CONF_NODE = jsl.node.conf + +# Default target is "check" +check: + +include ./Makefile.deps +include ./Makefile.targ diff --git a/node_modules/extsprintf/Makefile.deps b/node_modules/extsprintf/Makefile.deps new file mode 100644 index 0000000..2811bde --- /dev/null +++ b/node_modules/extsprintf/Makefile.deps @@ -0,0 +1,39 @@ +# -*- mode: makefile -*- +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile.deps: Makefile for including common tools as dependencies +# +# NOTE: This makefile comes from the "eng" repo. It's designed to be dropped +# into other repos as-is without requiring any modifications. If you find +# yourself changing this file, you should instead update the original copy in +# eng.git and then update your repo to use the new version. +# +# This file is separate from Makefile.targ so that teams can choose +# independently whether to use the common targets in Makefile.targ and the +# common tools here. +# + +# +# javascriptlint +# +JSL_EXEC ?= deps/javascriptlint/build/install/jsl +JSL ?= python2.6 $(JSL_EXEC) + +$(JSL_EXEC): | deps/javascriptlint/.git + cd deps/javascriptlint && make install + +# +# jsstyle +# +JSSTYLE_EXEC ?= deps/jsstyle/jsstyle +JSSTYLE ?= $(JSSTYLE_EXEC) + +$(JSSTYLE_EXEC): | deps/jsstyle/.git + +# +# restdown +# +RESTDOWN_EXEC ?= deps/restdown/bin/restdown +RESTDOWN ?= python2.6 $(RESTDOWN_EXEC) +$(RESTDOWN_EXEC): | deps/restdown/.git diff --git a/node_modules/extsprintf/Makefile.targ b/node_modules/extsprintf/Makefile.targ new file mode 100644 index 0000000..2a64fe7 --- /dev/null +++ b/node_modules/extsprintf/Makefile.targ @@ -0,0 +1,285 @@ +# -*- mode: makefile -*- +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile.targ: common targets. +# +# NOTE: This makefile comes from the "eng" repo. It's designed to be dropped +# into other repos as-is without requiring any modifications. If you find +# yourself changing this file, you should instead update the original copy in +# eng.git and then update your repo to use the new version. +# +# This Makefile defines several useful targets and rules. You can use it by +# including it from a Makefile that specifies some of the variables below. +# +# Targets defined in this Makefile: +# +# check Checks JavaScript files for lint and style +# Checks bash scripts for syntax +# Checks SMF manifests for validity against the SMF DTD +# +# clean Removes built files +# +# docs Builds restdown documentation in docs/ +# +# prepush Depends on "check" and "test" +# +# test Does nothing (you should override this) +# +# xref Generates cscope (source cross-reference index) +# +# For details on what these targets are supposed to do, see the Joyent +# Engineering Guide. +# +# To make use of these targets, you'll need to set some of these variables. Any +# variables left unset will simply not be used. +# +# BASH_FILES Bash scripts to check for syntax +# (paths relative to top-level Makefile) +# +# CLEAN_FILES Files to remove as part of the "clean" target. Note +# that files generated by targets in this Makefile are +# automatically included in CLEAN_FILES. These include +# restdown-generated HTML and JSON files. +# +# DOC_FILES Restdown (documentation source) files. These are +# assumed to be contained in "docs/", and must NOT +# contain the "docs/" prefix. +# +# JSL_CONF_NODE Specify JavaScriptLint configuration files +# JSL_CONF_WEB (paths relative to top-level Makefile) +# +# Node.js and Web configuration files are separate +# because you'll usually want different global variable +# configurations. If no file is specified, none is given +# to jsl, which causes it to use a default configuration, +# which probably isn't what you want. +# +# JSL_FILES_NODE JavaScript files to check with Node config file. +# JSL_FILES_WEB JavaScript files to check with Web config file. +# +# You can also override these variables: +# +# BASH Path to bash (default: bash) +# +# CSCOPE_DIRS Directories to search for source files for the cscope +# index. (default: ".") +# +# JSL Path to JavaScriptLint (default: "jsl") +# +# JSL_FLAGS_NODE Additional flags to pass through to JSL +# JSL_FLAGS_WEB +# JSL_FLAGS +# +# JSSTYLE Path to jsstyle (default: jsstyle) +# +# JSSTYLE_FLAGS Additional flags to pass through to jsstyle +# + +# +# Defaults for the various tools we use. +# +BASH ?= bash +BASHSTYLE ?= tools/bashstyle +CP ?= cp +CSCOPE ?= cscope +CSCOPE_DIRS ?= . +JSL ?= jsl +JSSTYLE ?= jsstyle +MKDIR ?= mkdir -p +MV ?= mv +RESTDOWN_FLAGS ?= +RMTREE ?= rm -rf +JSL_FLAGS ?= --nologo --nosummary + +ifeq ($(shell uname -s),SunOS) + TAR ?= gtar +else + TAR ?= tar +endif + + +# +# Defaults for other fixed values. +# +BUILD = build +DISTCLEAN_FILES += $(BUILD) +DOC_BUILD = $(BUILD)/docs/public + +# +# Configure JSL_FLAGS_{NODE,WEB} based on JSL_CONF_{NODE,WEB}. +# +ifneq ($(origin JSL_CONF_NODE), undefined) + JSL_FLAGS_NODE += --conf=$(JSL_CONF_NODE) +endif + +ifneq ($(origin JSL_CONF_WEB), undefined) + JSL_FLAGS_WEB += --conf=$(JSL_CONF_WEB) +endif + +# +# Targets. For descriptions on what these are supposed to do, see the +# Joyent Engineering Guide. +# + +# +# Instruct make to keep around temporary files. We have rules below that +# automatically update git submodules as needed, but they employ a deps/*/.git +# temporary file. Without this directive, make tries to remove these .git +# directories after the build has completed. +# +.SECONDARY: $($(wildcard deps/*):%=%/.git) + +# +# This rule enables other rules that use files from a git submodule to have +# those files depend on deps/module/.git and have "make" automatically check +# out the submodule as needed. +# +deps/%/.git: + git submodule update --init deps/$* + +# +# These recipes make heavy use of dynamically-created phony targets. The parent +# Makefile defines a list of input files like BASH_FILES. We then say that each +# of these files depends on a fake target called filename.bashchk, and then we +# define a pattern rule for those targets that runs bash in check-syntax-only +# mode. This mechanism has the nice properties that if you specify zero files, +# the rule becomes a noop (unlike a single rule to check all bash files, which +# would invoke bash with zero files), and you can check individual files from +# the command line with "make filename.bashchk". +# +.PHONY: check-bash +check-bash: $(BASH_FILES:%=%.bashchk) $(BASH_FILES:%=%.bashstyle) + +%.bashchk: % + $(BASH) -n $^ + +%.bashstyle: % + $(BASHSTYLE) $^ + +.PHONY: check-jsl check-jsl-node check-jsl-web +check-jsl: check-jsl-node check-jsl-web + +check-jsl-node: $(JSL_FILES_NODE:%=%.jslnodechk) + +check-jsl-web: $(JSL_FILES_WEB:%=%.jslwebchk) + +%.jslnodechk: % $(JSL_EXEC) + $(JSL) $(JSL_FLAGS) $(JSL_FLAGS_NODE) $< + +%.jslwebchk: % $(JSL_EXEC) + $(JSL) $(JSL_FLAGS) $(JSL_FLAGS_WEB) $< + +.PHONY: check-jsstyle +check-jsstyle: $(JSSTYLE_FILES:%=%.jsstylechk) + +%.jsstylechk: % $(JSSTYLE_EXEC) + $(JSSTYLE) $(JSSTYLE_FLAGS) $< + +.PHONY: check +check: check-jsl check-jsstyle check-bash + @echo check ok + +.PHONY: clean +clean:: + -$(RMTREE) $(CLEAN_FILES) + +.PHONY: distclean +distclean:: clean + -$(RMTREE) $(DISTCLEAN_FILES) + +CSCOPE_FILES = cscope.in.out cscope.out cscope.po.out +CLEAN_FILES += $(CSCOPE_FILES) + +.PHONY: xref +xref: cscope.files + $(CSCOPE) -bqR + +.PHONY: cscope.files +cscope.files: + find $(CSCOPE_DIRS) -name '*.c' -o -name '*.h' -o -name '*.cc' \ + -o -name '*.js' -o -name '*.s' -o -name '*.cpp' > $@ + +# +# The "docs" target is complicated because we do several things here: +# +# (1) Use restdown to build HTML and JSON files from each of DOC_FILES. +# +# (2) Copy these files into $(DOC_BUILD) (build/docs/public), which +# functions as a complete copy of the documentation that could be +# mirrored or served over HTTP. +# +# (3) Then copy any directories and media from docs/media into +# $(DOC_BUILD)/media. This allows projects to include their own media, +# including files that will override same-named files provided by +# restdown. +# +# Step (3) is the surprisingly complex part: in order to do this, we need to +# identify the subdirectories in docs/media, recreate them in +# $(DOC_BUILD)/media, then do the same with the files. +# +DOC_MEDIA_DIRS := $(shell find docs/media -type d 2>/dev/null | grep -v "^docs/media$$") +DOC_MEDIA_DIRS := $(DOC_MEDIA_DIRS:docs/media/%=%) +DOC_MEDIA_DIRS_BUILD := $(DOC_MEDIA_DIRS:%=$(DOC_BUILD)/media/%) + +DOC_MEDIA_FILES := $(shell find docs/media -type f 2>/dev/null) +DOC_MEDIA_FILES := $(DOC_MEDIA_FILES:docs/media/%=%) +DOC_MEDIA_FILES_BUILD := $(DOC_MEDIA_FILES:%=$(DOC_BUILD)/media/%) + +# +# Like the other targets, "docs" just depends on the final files we want to +# create in $(DOC_BUILD), leveraging other targets and recipes to define how +# to get there. +# +.PHONY: docs +docs: \ + $(DOC_FILES:%.restdown=$(DOC_BUILD)/%.html) \ + $(DOC_FILES:%.restdown=$(DOC_BUILD)/%.json) \ + $(DOC_MEDIA_FILES_BUILD) + +# +# We keep the intermediate files so that the next build can see whether the +# files in DOC_BUILD are up to date. +# +.PRECIOUS: \ + $(DOC_FILES:%.restdown=docs/%.html) \ + $(DOC_FILES:%.restdown=docs/%json) + +# +# We do clean those intermediate files, as well as all of DOC_BUILD. +# +CLEAN_FILES += \ + $(DOC_BUILD) \ + $(DOC_FILES:%.restdown=docs/%.html) \ + $(DOC_FILES:%.restdown=docs/%.json) + +# +# Before installing the files, we must make sure the directories exist. The | +# syntax tells make that the dependency need only exist, not be up to date. +# Otherwise, it might try to rebuild spuriously because the directory itself +# appears out of date. +# +$(DOC_MEDIA_FILES_BUILD): | $(DOC_MEDIA_DIRS_BUILD) + +$(DOC_BUILD)/%: docs/% | $(DOC_BUILD) + $(CP) $< $@ + +docs/%.json docs/%.html: docs/%.restdown | $(DOC_BUILD) $(RESTDOWN_EXEC) + $(RESTDOWN) $(RESTDOWN_FLAGS) -m $(DOC_BUILD) $< + +$(DOC_BUILD): + $(MKDIR) $@ + +$(DOC_MEDIA_DIRS_BUILD): + $(MKDIR) $@ + +# +# The default "test" target does nothing. This should usually be overridden by +# the parent Makefile. It's included here so we can define "prepush" without +# requiring the repo to define "test". +# +.PHONY: test +test: + +.PHONY: prepush +prepush: check test diff --git a/node_modules/extsprintf/README.md b/node_modules/extsprintf/README.md new file mode 100644 index 0000000..702e4e2 --- /dev/null +++ b/node_modules/extsprintf/README.md @@ -0,0 +1,39 @@ +# extsprintf: extended POSIX-style sprintf + +Stripped down version of s[n]printf(3c). We make a best effort to throw an +exception when given a format string we don't understand, rather than ignoring +it, so that we won't break existing programs if/when we go implement the rest +of this. + +This implementation currently supports specifying + +* field alignment ('-' flag), +* zero-pad ('0' flag) +* always show numeric sign ('+' flag), +* field width +* conversions for strings, decimal integers, and floats (numbers). +* argument size specifiers. These are all accepted but ignored, since + Javascript has no notion of the physical size of an argument. + +Everything else is currently unsupported, most notably: precision, unsigned +numbers, non-decimal numbers, and characters. + +Besides the usual POSIX conversions, this implementation supports: + +* `%j`: pretty-print a JSON object (using node's "inspect") +* `%r`: pretty-print an Error object + +# Example + +First, install it: + + # npm install extsprintf + +Now, use it: + + var mod_extsprintf = require('extsprintf'); + console.log(mod_extsprintf.sprintf('hello %25s', 'world')); + +outputs: + + hello world diff --git a/node_modules/extsprintf/examples/simple.js b/node_modules/extsprintf/examples/simple.js new file mode 100644 index 0000000..9f342f5 --- /dev/null +++ b/node_modules/extsprintf/examples/simple.js @@ -0,0 +1,2 @@ +var mod_extsprintf = require('extsprintf'); +console.log(mod_extsprintf.sprintf('hello %25s', 'world')); diff --git a/node_modules/extsprintf/jsl.node.conf b/node_modules/extsprintf/jsl.node.conf new file mode 100644 index 0000000..03f787f --- /dev/null +++ b/node_modules/extsprintf/jsl.node.conf @@ -0,0 +1,137 @@ +# +# Configuration File for JavaScript Lint +# +# This configuration file can be used to lint a collection of scripts, or to enable +# or disable warnings for scripts that are linted via the command line. +# + +### Warnings +# Enable or disable warnings based on requirements. +# Use "+WarningName" to display or "-WarningName" to suppress. +# ++ambiguous_else_stmt # the else statement could be matched with one of multiple if statements (use curly braces to indicate intent ++ambiguous_nested_stmt # block statements containing block statements should use curly braces to resolve ambiguity ++ambiguous_newline # unexpected end of line; it is ambiguous whether these lines are part of the same statement ++anon_no_return_value # anonymous function does not always return value ++assign_to_function_call # assignment to a function call +-block_without_braces # block statement without curly braces ++comma_separated_stmts # multiple statements separated by commas (use semicolons?) ++comparison_type_conv # comparisons against null, 0, true, false, or an empty string allowing implicit type conversion (use === or !==) ++default_not_at_end # the default case is not at the end of the switch statement ++dup_option_explicit # duplicate "option explicit" control comment ++duplicate_case_in_switch # duplicate case in switch statement ++duplicate_formal # duplicate formal argument {name} ++empty_statement # empty statement or extra semicolon ++identifier_hides_another # identifer {name} hides an identifier in a parent scope +-inc_dec_within_stmt # increment (++) and decrement (--) operators used as part of greater statement ++incorrect_version # Expected /*jsl:content-type*/ control comment. The script was parsed with the wrong version. ++invalid_fallthru # unexpected "fallthru" control comment ++invalid_pass # unexpected "pass" control comment ++jsl_cc_not_understood # couldn't understand control comment using /*jsl:keyword*/ syntax ++leading_decimal_point # leading decimal point may indicate a number or an object member ++legacy_cc_not_understood # couldn't understand control comment using /*@keyword@*/ syntax ++meaningless_block # meaningless block; curly braces have no impact ++mismatch_ctrl_comments # mismatched control comment; "ignore" and "end" control comments must have a one-to-one correspondence ++misplaced_regex # regular expressions should be preceded by a left parenthesis, assignment, colon, or comma ++missing_break # missing break statement ++missing_break_for_last_case # missing break statement for last case in switch ++missing_default_case # missing default case in switch statement ++missing_option_explicit # the "option explicit" control comment is missing ++missing_semicolon # missing semicolon ++missing_semicolon_for_lambda # missing semicolon for lambda assignment ++multiple_plus_minus # unknown order of operations for successive plus (e.g. x+++y) or minus (e.g. x---y) signs ++nested_comment # nested comment ++no_return_value # function {name} does not always return a value ++octal_number # leading zeros make an octal number ++parseint_missing_radix # parseInt missing radix parameter ++partial_option_explicit # the "option explicit" control comment, if used, must be in the first script tag ++redeclared_var # redeclaration of {name} ++trailing_comma_in_array # extra comma is not recommended in array initializers ++trailing_decimal_point # trailing decimal point may indicate a number or an object member ++undeclared_identifier # undeclared identifier: {name} ++unreachable_code # unreachable code +-unreferenced_argument # argument declared but never referenced: {name} +-unreferenced_function # function is declared but never referenced: {name} ++unreferenced_variable # variable is declared but never referenced: {name} ++unsupported_version # JavaScript {version} is not supported ++use_of_label # use of label ++useless_assign # useless assignment ++useless_comparison # useless comparison; comparing identical expressions +-useless_quotes # the quotation marks are unnecessary ++useless_void # use of the void type may be unnecessary (void is always undefined) ++var_hides_arg # variable {name} hides argument ++want_assign_or_call # expected an assignment or function call ++with_statement # with statement hides undeclared variables; use temporary variable instead + + +### Output format +# Customize the format of the error message. +# __FILE__ indicates current file path +# __FILENAME__ indicates current file name +# __LINE__ indicates current line +# __COL__ indicates current column +# __ERROR__ indicates error message (__ERROR_PREFIX__: __ERROR_MSG__) +# __ERROR_NAME__ indicates error name (used in configuration file) +# __ERROR_PREFIX__ indicates error prefix +# __ERROR_MSG__ indicates error message +# +# For machine-friendly output, the output format can be prefixed with +# "encode:". If specified, all items will be encoded with C-slashes. +# +# Visual Studio syntax (default): ++output-format __FILE__(__LINE__): __ERROR__ +# Alternative syntax: +#+output-format __FILE__:__LINE__: __ERROR__ + + +### Context +# Show the in-line position of the error. +# Use "+context" to display or "-context" to suppress. +# ++context + + +### Control Comments +# Both JavaScript Lint and the JScript interpreter confuse each other with the syntax for +# the /*@keyword@*/ control comments and JScript conditional comments. (The latter is +# enabled in JScript with @cc_on@). The /*jsl:keyword*/ syntax is preferred for this reason, +# although legacy control comments are enabled by default for backward compatibility. +# +-legacy_control_comments + + +### Defining identifiers +# By default, "option explicit" is enabled on a per-file basis. +# To enable this for all files, use "+always_use_option_explicit" +-always_use_option_explicit + +# Define certain identifiers of which the lint is not aware. +# (Use this in conjunction with the "undeclared identifier" warning.) +# +# Common uses for webpages might be: ++define __dirname ++define clearInterval ++define clearTimeout ++define console ++define exports ++define global ++define process ++define require ++define setInterval ++define setTimeout ++define Buffer ++define JSON ++define Math + +### JavaScript Version +# To change the default JavaScript version: +#+default-type text/javascript;version=1.5 +#+default-type text/javascript;e4x=1 + +### Files +# Specify which files to lint +# Use "+recurse" to enable recursion (disabled by default). +# To add a set of files, use "+process FileName", "+process Folder\Path\*.js", +# or "+process Folder\Path\*.htm". +# + diff --git a/node_modules/extsprintf/lib/extsprintf.js b/node_modules/extsprintf/lib/extsprintf.js new file mode 100644 index 0000000..61ff891 --- /dev/null +++ b/node_modules/extsprintf/lib/extsprintf.js @@ -0,0 +1,166 @@ +/* + * extsprintf.js: extended POSIX-style sprintf + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +/* + * Public interface + */ +exports.sprintf = jsSprintf; + +/* + * Stripped down version of s[n]printf(3c). We make a best effort to throw an + * exception when given a format string we don't understand, rather than + * ignoring it, so that we won't break existing programs if/when we go implement + * the rest of this. + * + * This implementation currently supports specifying + * - field alignment ('-' flag), + * - zero-pad ('0' flag) + * - always show numeric sign ('+' flag), + * - field width + * - conversions for strings, decimal integers, and floats (numbers). + * - argument size specifiers. These are all accepted but ignored, since + * Javascript has no notion of the physical size of an argument. + * + * Everything else is currently unsupported, most notably precision, unsigned + * numbers, non-decimal numbers, and characters. + */ +function jsSprintf(fmt) +{ + var regex = [ + '([^%]*)', /* normal text */ + '%', /* start of format */ + '([\'\\-+ #0]*?)', /* flags (optional) */ + '([1-9]\\d*)?', /* width (optional) */ + '(\\.([1-9]\\d*))?', /* precision (optional) */ + '[lhjztL]*?', /* length mods (ignored) */ + '([diouxXfFeEgGaAcCsSp%jr])' /* conversion */ + ].join(''); + + var re = new RegExp(regex); + var args = Array.prototype.slice.call(arguments, 1); + var flags, width, precision, conversion; + var left, pad, sign, arg, match; + var ret = ''; + var argn = 1; + + mod_assert.equal('string', typeof (fmt)); + + while ((match = re.exec(fmt)) !== null) { + ret += match[1]; + fmt = fmt.substring(match[0].length); + + flags = match[2] || ''; + width = match[3] || 0; + precision = match[4] || ''; + conversion = match[6]; + left = false; + sign = false; + pad = ' '; + + if (conversion == '%') { + ret += '%'; + continue; + } + + if (args.length === 0) + throw (new Error('too few args to sprintf')); + + arg = args.shift(); + argn++; + + if (flags.match(/[\' #]/)) + throw (new Error( + 'unsupported flags: ' + flags)); + + if (precision.length > 0) + throw (new Error( + 'non-zero precision not supported')); + + if (flags.match(/-/)) + left = true; + + if (flags.match(/0/)) + pad = '0'; + + if (flags.match(/\+/)) + sign = true; + + switch (conversion) { + case 's': + if (arg === undefined || arg === null) + throw (new Error('argument ' + argn + + ': attempted to print undefined or null ' + + 'as a string')); + ret += doPad(pad, width, left, arg.toString()); + break; + + case 'd': + arg = Math.floor(arg); + /*jsl:fallthru*/ + case 'f': + sign = sign && arg > 0 ? '+' : ''; + ret += sign + doPad(pad, width, left, + arg.toString()); + break; + + case 'j': /* non-standard */ + if (width === 0) + width = 10; + ret += mod_util.inspect(arg, false, width); + break; + + case 'r': /* non-standard */ + ret += dumpException(arg); + break; + + default: + throw (new Error('unsupported conversion: ' + + conversion)); + } + } + + ret += fmt; + return (ret); +} + +function doPad(chr, width, left, str) +{ + var ret = str; + + while (ret.length < width) { + if (left) + ret += chr; + else + ret = chr + ret; + } + + return (ret); +} + +/* + * This function dumps long stack traces for exceptions having a cause() method. + * See node-verror for an example. + */ +function dumpException(ex) +{ + var ret; + + if (!(ex instanceof Error)) + throw (new Error(jsSprintf('invalid type for %%r: %j', ex))); + + /* Note that V8 prepends "ex.stack" with ex.toString(). */ + ret = 'EXCEPTION: ' + ex.constructor.name + ': ' + ex.stack; + + if (ex.cause && typeof (ex.cause) === 'function') { + var cex = ex.cause(); + if (cex) { + ret += '\nCaused by: ' + dumpException(cex); + } + } + + return (ret); +} diff --git a/node_modules/extsprintf/package.json b/node_modules/extsprintf/package.json new file mode 100644 index 0000000..1de4651 --- /dev/null +++ b/node_modules/extsprintf/package.json @@ -0,0 +1,13 @@ +{ + "name": "extsprintf", + "version": "1.0.2", + "description": "extended POSIX-style sprintf", + "main": "./lib/extsprintf.js", + + "repository": { + "type": "git", + "url": "git://github.com/davepacheco/node-extsprintf.git" + }, + + "engines": [ "node >=0.6.0" ] +} diff --git a/node_modules/filename-regex/README.md b/node_modules/filename-regex/README.md new file mode 100644 index 0000000..d001e7a --- /dev/null +++ b/node_modules/filename-regex/README.md @@ -0,0 +1,51 @@ +# filename-regex [![NPM version](https://badge.fury.io/js/filename-regex.svg)](http://badge.fury.io/js/filename-regex) + +> Regular expression for matching file names, with or without extension. + + +## Install with [npm](npmjs.org) + +```bash +npm i filename-regex --save +``` + +## Usage + +```js +var regex = require('filename-regex'); + +'a/b/c/d.min.js'.match(regex()); +//=> match[0] = 'd.min.js' + +'a/b/c/.dotfile'.match(regex()); +//=> match[0] = '.dotfile' +``` + + +## Run tests + +Install dev dependencies: + +```bash +npm i -d && npm test +``` + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/regexps/filename-regex/issues) + + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + + +## License +Copyright (c) 2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb](https://github.com/assemble/verb) on January 24, 2015._ diff --git a/node_modules/filename-regex/index.js b/node_modules/filename-regex/index.js new file mode 100644 index 0000000..bb1888b --- /dev/null +++ b/node_modules/filename-regex/index.js @@ -0,0 +1,10 @@ +/*! + * filename-regex + * + * Copyright (c) 2014-2015, Jon Schlinkert + * Licensed under the MIT license. + */ + +module.exports = function filenameRegex() { + return /([^\\\/]+)$/; +}; diff --git a/node_modules/filename-regex/package.json b/node_modules/filename-regex/package.json new file mode 100644 index 0000000..a078400 --- /dev/null +++ b/node_modules/filename-regex/package.json @@ -0,0 +1,41 @@ +{ + "name": "filename-regex", + "description": "Regular expression for matching file names, with or without extension.", + "version": "2.0.0", + "homepage": "https://github.com/regexps/filename-regex", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/regexps/filename-regex.git" + }, + "bugs": { + "url": "https://github.com/regexps/filename-regex/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/regexps/filename-regex/blob/master/LICENSE-MIT" + }, + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "files": ["index.js"], + "scripts": { + "test": "mocha -R spec" + }, + "keywords": [ + "basename", + "regular expression", + "file", + "filename", + "filepath", + "match", + "name", + "path", + "regex", + "regexp" + ] +} diff --git a/node_modules/fill-range/LICENSE b/node_modules/fill-range/LICENSE new file mode 100755 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/fill-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fill-range/README.md b/node_modules/fill-range/README.md new file mode 100755 index 0000000..c69694a --- /dev/null +++ b/node_modules/fill-range/README.md @@ -0,0 +1,290 @@ +# fill-range [![NPM version](https://badge.fury.io/js/fill-range.svg)](http://badge.fury.io/js/fill-range) [![Build Status](https://travis-ci.org/jonschlinkert/fill-range.svg)](https://travis-ci.org/jonschlinkert/fill-range) + +> Fill in a range of numbers or letters, optionally passing an increment or multiplier to use. + +## Install with [npm](npmjs.org) + +```bash +npm i fill-range --save +``` + + + +- [Usage](#usage) + * [Invalid ranges](#invalid-ranges) + * [Custom function](#custom-function) + * [Special characters](#special-characters) + + [plus](#plus) + + [pipe and tilde](#pipe-and-tilde) + + [angle bracket](#angle-bracket) + + [question mark](#question-mark) +- [Other useful libs](#other-useful-libs) +- [Running tests](#running-tests) +- [Contributing](#contributing) +- [Author](#author) +- [License](#license) + +_(Table of contents generated by [verb])_ + + + +## Usage + +```js +var range = require('fill-range'); + +range('a', 'e'); +//=> ['a', 'b', 'c', 'd', 'e'] +``` + +**Params** + +```js +range(start, stop, step, options, fn); +``` + + - `start`: **{String|Number}** the number or letter to start with + - `end`: **{String|Number}** the number or letter to end with + - `step`: **{String|Number}** optionally pass the step to use. works for letters or numbers. + - `options`: **{Object}**: + + `makeRe`: return a regex-compatible string (still returned as an array for consistency) + + `step`: pass the step on the options as an alternative to passing it as an argument + + `silent`: `true` by default, set to false to throw errors for invalid ranges. + - `fn`: **{Function}** optionally [pass a function](#custom-function) to modify each character + + +**Examples** + +```js +range(1, 3) +//=> ['1', '2', '3'] + +range('1', '3') +//=> ['1', '2', '3'] + +range('0', '-5') +//=> [ '0', '-1', '-2', '-3', '-4', '-5' ] + +range(-9, 9, 3) +//=> [ '-9', '-6', '-3', '0', '3', '6', '9' ]) + +range('-1', '-10', '-2') +//=> [ '-1', '-3', '-5', '-7', '-9' ] + +range('1', '10', '2') +//=> [ '1', '3', '5', '7', '9' ] + +range('a', 'e') +//=> ['a', 'b', 'c', 'd', 'e'] + +range('a', 'e', 2) +//=> ['a', 'c', 'e'] + +range('A', 'E', 2) +//=> ['A', 'C', 'E'] +``` + +### Invalid ranges + +When an invalid range is passed, `null` is returned. + +```js +range('1.1', '2'); +//=> null + +range('a', '2'); +//=> null + +range(1, 10, 'foo'); +//=> null +``` + +If you want errors to be throw, pass `silent: false` on the options: + + +### Custom function + +Optionally pass a custom function as the third or fourth argument: + +```js +range('a', 'e', function (val, isNumber, pad, i) { + if (!isNumber) { + return String.fromCharCode(val) + i; + } + return val; +}); +//=> ['a0', 'b1', 'c2', 'd3', 'e4'] +``` + +### Special characters + +A special character may be passed as the third arg instead of a step increment. These characters can be pretty useful for brace expansion, creating file paths, test fixtures and similar use case. + +```js +range('a', 'z', SPECIAL_CHARACTER_HERE); +``` + +**Supported characters** + + - `+`: repeat the given string `n` times + - `|`: create a regex-ready string, instead of an array + - `>`: join values to single array element + - `?`: randomize the given pattern using [randomatic] + +#### plus + +Character: _(`+`)_ + +Repeat the first argument the number of times passed on the second argument. + +**Examples:** + +```js +range('a', 3, '+'); +//=> ['a', 'a', 'a'] + +range('abc', 2, '+'); +//=> ['abc', 'abc'] +``` + +#### pipe and tilde + +Characters: _(`|` and `~`)_ + +Creates a regex-capable string (either a logical `or` or a character class) from the expanded arguments. + +**Examples:** + +```js +range('a', 'c', '|'); +//=> ['(a|b|c)' + +range('a', 'c', '~'); +//=> ['[a-c]' + +range('a', 'z', '|5'); +//=> ['(a|f|k|p|u|z)' +``` + +**Automatic separator correction** + +To avoid this error: + +> `Range out of order in character class` + +Fill-range detects invalid sequences and uses the correct syntax. For example: + +**invalid** (regex) + +If you pass these: + +```js +range('a', 'z', '~5'); +// which would result in this +//=> ['[a-f-k-p-u-z]'] + +range('10', '20', '~'); +// which would result in this +//=> ['[10-20]'] +``` + +**valid** (regex) + +fill-range corrects them to this: + +```js +range('a', 'z', '~5'); +//=> ['(a|f|k|p|u|z)' + +range('10', '20', '~'); +//=> ['(10-20)' +``` + +#### angle bracket + +Character: _(`>`)_ + +Joins all values in the returned array to a single value. + +**Examples:** + +```js +range('a', 'e', '>'); +//=> ['abcde'] + +range('5', '8', '>'); +//=> ['5678'] + +range('2', '20', '2>'); +//=> ['2468101214161820'] +``` + + +#### question mark + +Character: _(`?`)_ + +Uses [randomatic] to generate randomized alpha, numeric, or alpha-numeric patterns based on the provided arguments. + +**Examples:** + +_(actual results would obviously be randomized)_ + +Generate a 5-character, uppercase, alphabetical string: + +```js +range('A', 5, '?'); +//=> ['NSHAK'] +``` + +Generate a 5-digit random number: + +```js +range('0', 5, '?'); +//=> ['36583'] +``` + +Generate a 10-character alpha-numeric string: + +```js +range('A0', 10, '?'); +//=> ['5YJD60VQNN'] +``` + +See the [randomatic] repo for all available options and or to create issues or feature requests related to randomization. + +## Other useful libs + * [micromatch](https://github.com/jonschlinkert/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. Just use `micromatch.isMatch()` instead of `minimatch()`, or use `micromatch()` instead of `multimatch()`. + * [expand-range](https://github.com/jonschlinkert/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See the benchmarks. Used by micromatch. + * [braces](https://github.com/jonschlinkert/braces): Fastest brace expansion for node.js, with the most complete support for the Bash 4.3 braces specification. + * [is-glob](https://github.com/jonschlinkert/is-glob): Returns `true` if the given string looks like a glob pattern. + +## Running tests +Install dev dependencies: + +```bash +npm i -d && npm test +``` + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/fill-range/issues) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2014-2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on April 07, 2015._ + +[randomatic]: https://github.com/jonschlinkert/randomatic +[expand-range]: https://github.com/jonschlinkert/expand-range +[micromatch]: https://github.com/jonschlinkert/micromatch +[braces]: https://github.com/jonschlinkert/braces \ No newline at end of file diff --git a/node_modules/fill-range/index.js b/node_modules/fill-range/index.js new file mode 100644 index 0000000..5657051 --- /dev/null +++ b/node_modules/fill-range/index.js @@ -0,0 +1,408 @@ +/*! + * fill-range + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isObject = require('isobject'); +var isNumber = require('is-number'); +var randomize = require('randomatic'); +var repeatStr = require('repeat-string'); +var repeat = require('repeat-element'); + +/** + * Expose `fillRange` + */ + +module.exports = fillRange; + +/** + * Return a range of numbers or letters. + * + * @param {String} `a` Start of the range + * @param {String} `b` End of the range + * @param {String} `step` Increment or decrement to use. + * @param {Function} `fn` Custom function to modify each element in the range. + * @return {Array} + */ + +function fillRange(a, b, step, options, fn) { + if (a == null || b == null) { + throw new Error('fill-range expects the first and second args to be strings.'); + } + + if (typeof step === 'function') { + fn = step; options = {}; step = null; + } + + if (typeof options === 'function') { + fn = options; options = {}; + } + + if (isObject(step)) { + options = step; step = ''; + } + + var expand, regex = false, sep = ''; + var opts = options || {}; + + if (typeof opts.silent === 'undefined') { + opts.silent = true; + } + + step = step || opts.step; + + // store a ref to unmodified arg + var origA = a, origB = b; + + b = (b.toString() === '-0') ? 0 : b; + + if (opts.optimize || opts.makeRe) { + step = step ? (step += '~') : step; + expand = true; + regex = true; + sep = '~'; + } + + // handle special step characters + if (typeof step === 'string') { + var match = stepRe().exec(step); + + if (match) { + var i = match.index; + var m = match[0]; + + // repeat string + if (m === '+') { + return repeat(a, b); + + // randomize a, `b` times + } else if (m === '?') { + return [randomize(a, b)]; + + // expand right, no regex reduction + } else if (m === '>') { + step = step.substr(0, i) + step.substr(i + 1); + expand = true; + + // expand to an array, or if valid create a reduced + // string for a regex logic `or` + } else if (m === '|') { + step = step.substr(0, i) + step.substr(i + 1); + expand = true; + regex = true; + sep = m; + + // expand to an array, or if valid create a reduced + // string for a regex range + } else if (m === '~') { + step = step.substr(0, i) + step.substr(i + 1); + expand = true; + regex = true; + sep = m; + } + } else if (!isNumber(step)) { + if (!opts.silent) { + throw new TypeError('fill-range: invalid step.'); + } + return null; + } + } + + if (/[.&*()[\]^%$#@!]/.test(a) || /[.&*()[\]^%$#@!]/.test(b)) { + if (!opts.silent) { + throw new RangeError('fill-range: invalid range arguments.'); + } + return null; + } + + // has neither a letter nor number, or has both letters and numbers + // this needs to be after the step logic + if (!noAlphaNum(a) || !noAlphaNum(b) || hasBoth(a) || hasBoth(b)) { + if (!opts.silent) { + throw new RangeError('fill-range: invalid range arguments.'); + } + return null; + } + + // validate arguments + var isNumA = isNumber(zeros(a)); + var isNumB = isNumber(zeros(b)); + + if ((!isNumA && isNumB) || (isNumA && !isNumB)) { + if (!opts.silent) { + throw new TypeError('fill-range: first range argument is incompatible with second.'); + } + return null; + } + + // by this point both are the same, so we + // can use A to check going forward. + var isNum = isNumA; + var num = formatStep(step); + + // is the range alphabetical? or numeric? + if (isNum) { + // if numeric, coerce to an integer + a = +a; b = +b; + } else { + // otherwise, get the charCode to expand alpha ranges + a = a.charCodeAt(0); + b = b.charCodeAt(0); + } + + // is the pattern descending? + var isDescending = a > b; + + // don't create a character class if the args are < 0 + if (a < 0 || b < 0) { + expand = false; + regex = false; + } + + // detect padding + var padding = isPadded(origA, origB); + var res, pad, arr = []; + var ii = 0; + + // character classes, ranges and logical `or` + if (regex) { + if (shouldExpand(a, b, num, isNum, padding, opts)) { + // make sure the correct separator is used + if (sep === '|' || sep === '~') { + sep = detectSeparator(a, b, num, isNum, isDescending); + } + return wrap([origA, origB], sep, opts); + } + } + + while (isDescending ? (a >= b) : (a <= b)) { + if (padding && isNum) { + pad = padding(a); + } + + // custom function + if (typeof fn === 'function') { + res = fn(a, isNum, pad, ii++); + + // letters + } else if (!isNum) { + if (regex && isInvalidChar(a)) { + res = null; + } else { + res = String.fromCharCode(a); + } + + // numbers + } else { + res = formatPadding(a, pad); + } + + // add result to the array, filtering any nulled values + if (res !== null) arr.push(res); + + // increment or decrement + if (isDescending) { + a -= num; + } else { + a += num; + } + } + + // now that the array is expanded, we need to handle regex + // character classes, ranges or logical `or` that wasn't + // already handled before the loop + if ((regex || expand) && !opts.noexpand) { + // make sure the correct separator is used + if (sep === '|' || sep === '~') { + sep = detectSeparator(a, b, num, isNum, isDescending); + } + if (arr.length === 1 || a < 0 || b < 0) { return arr; } + return wrap(arr, sep, opts); + } + + return arr; +} + +/** + * Wrap the string with the correct regex + * syntax. + */ + +function wrap(arr, sep, opts) { + if (sep === '~') { sep = '-'; } + var str = arr.join(sep); + var pre = opts && opts.regexPrefix; + + // regex logical `or` + if (sep === '|') { + str = pre ? pre + str : str; + str = '(' + str + ')'; + } + + // regex character class + if (sep === '-') { + str = (pre && pre === '^') + ? pre + str + : str; + str = '[' + str + ']'; + } + return [str]; +} + +/** + * Check for invalid characters + */ + +function isCharClass(a, b, step, isNum, isDescending) { + if (isDescending) { return false; } + if (isNum) { return a <= 9 && b <= 9; } + if (a < b) { return step === 1; } + return false; +} + +/** + * Detect the correct separator to use + */ + +function shouldExpand(a, b, num, isNum, padding, opts) { + if (isNum && (a > 9 || b > 9)) { return false; } + return !padding && num === 1 && a < b; +} + +/** + * Detect the correct separator to use + */ + +function detectSeparator(a, b, step, isNum, isDescending) { + var isChar = isCharClass(a, b, step, isNum, isDescending); + if (!isChar) { + return '|'; + } + return '~'; +} + +/** + * Correctly format the step based on type + */ + +function formatStep(step) { + return Math.abs(step >> 0) || 1; +} + +/** + * Format padding, taking leading `-` into account + */ + +function formatPadding(ch, pad) { + var res = pad ? pad + ch : ch; + if (pad && ch.toString().charAt(0) === '-') { + res = '-' + pad + ch.toString().substr(1); + } + return res.toString(); +} + +/** + * Check for invalid characters + */ + +function isInvalidChar(str) { + var ch = toStr(str); + return ch === '\\' + || ch === '[' + || ch === ']' + || ch === '^' + || ch === '(' + || ch === ')' + || ch === '`'; +} + +/** + * Convert to a string from a charCode + */ + +function toStr(ch) { + return String.fromCharCode(ch); +} + + +/** + * Step regex + */ + +function stepRe() { + return /\?|>|\||\+|\~/g; +} + +/** + * Return true if `val` has either a letter + * or a number + */ + +function noAlphaNum(val) { + return /[a-z0-9]/i.test(val); +} + +/** + * Return true if `val` has both a letter and + * a number (invalid) + */ + +function hasBoth(val) { + return /[a-z][0-9]|[0-9][a-z]/i.test(val); +} + +/** + * Normalize zeros for checks + */ + +function zeros(val) { + if (/^-*0+$/.test(val.toString())) { + return '0'; + } + return val; +} + +/** + * Return true if `val` has leading zeros, + * or a similar valid pattern. + */ + +function hasZeros(val) { + return /[^.]\.|^-*0+[0-9]/.test(val); +} + +/** + * If the string is padded, returns a curried function with + * the a cached padding string, or `false` if no padding. + * + * @param {*} `origA` String or number. + * @return {String|Boolean} + */ + +function isPadded(origA, origB) { + if (hasZeros(origA) || hasZeros(origB)) { + var alen = length(origA); + var blen = length(origB); + + var len = alen >= blen + ? alen + : blen; + + return function (a) { + return repeatStr('0', len - length(a)); + }; + } + return false; +} + +/** + * Get the string length of `val` + */ + +function length(val) { + return val.toString().length; +} diff --git a/node_modules/fill-range/package.json b/node_modules/fill-range/package.json new file mode 100644 index 0000000..a8a7bcf --- /dev/null +++ b/node_modules/fill-range/package.json @@ -0,0 +1,61 @@ +{ + "name": "fill-range", + "description": "Fill in a range of numbers or letters, optionally passing an increment or multiplier to use.", + "version": "2.2.3", + "homepage": "https://github.com/jonschlinkert/fill-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/fill-range", + "bugs": { + "url": "https://github.com/jonschlinkert/fill-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^2.1.0", + "isobject": "^2.0.0", + "randomatic": "^1.1.3", + "repeat-element": "^1.1.2", + "repeat-string": "^1.5.2" + }, + "devDependencies": { + "benchmarked": "^0.1.3", + "chalk": "^0.5.1", + "should": "*" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "expand", + "expansion", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "sh" + ], + "verb": { + "related": { + "list": [ + "micromatch", + "expand-range", + "braces", + "is-glob" + ] + } + } +} diff --git a/node_modules/finalhandler/HISTORY.md b/node_modules/finalhandler/HISTORY.md new file mode 100644 index 0000000..e7e144c --- /dev/null +++ b/node_modules/finalhandler/HISTORY.md @@ -0,0 +1,108 @@ +0.5.0 / 2016-06-15 +================== + + * Change invalid or non-numeric status code to 500 + * Overwrite status message to match set status code + * Prefer `err.statusCode` if `err.status` is invalid + * Set response headers from `err.headers` object + * Use `statuses` instead of `http` module for status messages + - Includes all defined status messages + +0.4.1 / 2015-12-02 +================== + + * deps: escape-html@~1.0.3 + - perf: enable strict mode + - perf: optimize string replacement + - perf: use faster string coercion + +0.4.0 / 2015-06-14 +================== + + * Fix a false-positive when unpiping in Node.js 0.8 + * Support `statusCode` property on `Error` objects + * Use `unpipe` module for unpiping requests + * deps: escape-html@1.0.2 + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * perf: enable strict mode + * perf: remove argument reassignment + +0.3.6 / 2015-05-11 +================== + + * deps: debug@~2.2.0 + - deps: ms@0.7.1 + +0.3.5 / 2015-04-22 +================== + + * deps: on-finished@~2.2.1 + - Fix `isFinished(req)` when data buffered + +0.3.4 / 2015-03-15 +================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + +0.3.3 / 2015-01-01 +================== + + * deps: debug@~2.1.1 + * deps: on-finished@~2.2.0 + +0.3.2 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + +0.3.1 / 2014-10-16 +================== + + * deps: debug@~2.1.0 + - Implement `DEBUG_FD` env variable support + +0.3.0 / 2014-09-17 +================== + + * Terminate in progress response only on error + * Use `on-finished` to determine request status + +0.2.0 / 2014-09-03 +================== + + * Set `X-Content-Type-Options: nosniff` header + * deps: debug@~2.0.0 + +0.1.0 / 2014-07-16 +================== + + * Respond after request fully read + - prevents hung responses and socket hang ups + * deps: debug@1.0.4 + +0.0.3 / 2014-07-11 +================== + + * deps: debug@1.0.3 + - Add support for multiple wildcards in namespaces + +0.0.2 / 2014-06-19 +================== + + * Handle invalid status codes + +0.0.1 / 2014-06-05 +================== + + * deps: debug@1.0.2 + +0.0.0 / 2014-06-05 +================== + + * Extracted from connect/express diff --git a/node_modules/finalhandler/LICENSE b/node_modules/finalhandler/LICENSE new file mode 100644 index 0000000..b60a5ad --- /dev/null +++ b/node_modules/finalhandler/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/finalhandler/README.md b/node_modules/finalhandler/README.md new file mode 100644 index 0000000..58bf720 --- /dev/null +++ b/node_modules/finalhandler/README.md @@ -0,0 +1,142 @@ +# finalhandler + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Node.js function to invoke as the final step to respond to HTTP request. + +## Installation + +```sh +$ npm install finalhandler +``` + +## API + +```js +var finalhandler = require('finalhandler') +``` + +### finalhandler(req, res, [options]) + +Returns function to be invoked as the final step for the given `req` and `res`. +This function is to be invoked as `fn(err)`. If `err` is falsy, the handler will +write out a 404 response to the `res`. If it is truthy, an error response will +be written out to the `res`. + +When an error is written, the following information is added to the response: + + * The `res.statusCode` is set from `err.status` (or `err.statusCode`). If + this value is outside the 4xx or 5xx range, it will be set to 500. + * The `res.statusMessage` is set according to the status code. + * The body will be the HTML of the status code message if `env` is + `'production'`, otherwise will be `err.stack`. + * Any headers specified in an `err.headers` object. + +The final handler will also unpipe anything from `req` when it is invoked. + +#### options.env + +By default, the environment is determined by `NODE_ENV` variable, but it can be +overridden by this option. + +#### options.onerror + +Provide a function to be called with the `err` when it exists. Can be used for +writing errors to a central location without excessive function generation. Called +as `onerror(err, req, res)`. + +## Examples + +### always 404 + +```js +var finalhandler = require('finalhandler') +var http = require('http') + +var server = http.createServer(function (req, res) { + var done = finalhandler(req, res) + done() +}) + +server.listen(3000) +``` + +### perform simple action + +```js +var finalhandler = require('finalhandler') +var fs = require('fs') +var http = require('http') + +var server = http.createServer(function (req, res) { + var done = finalhandler(req, res) + + fs.readFile('index.html', function (err, buf) { + if (err) return done(err) + res.setHeader('Content-Type', 'text/html') + res.end(buf) + }) +}) + +server.listen(3000) +``` + +### use with middleware-style functions + +```js +var finalhandler = require('finalhandler') +var http = require('http') +var serveStatic = require('serve-static') + +var serve = serveStatic('public') + +var server = http.createServer(function (req, res) { + var done = finalhandler(req, res) + serve(req, res, done) +}) + +server.listen(3000) +``` + +### keep log of all errors + +```js +var finalhandler = require('finalhandler') +var fs = require('fs') +var http = require('http') + +var server = http.createServer(function (req, res) { + var done = finalhandler(req, res, {onerror: logerror}) + + fs.readFile('index.html', function (err, buf) { + if (err) return done(err) + res.setHeader('Content-Type', 'text/html') + res.end(buf) + }) +}) + +server.listen(3000) + +function logerror(err) { + console.error(err.stack || err.toString()) +} +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/finalhandler.svg +[npm-url]: https://npmjs.org/package/finalhandler +[node-image]: https://img.shields.io/node/v/finalhandler.svg +[node-url]: https://nodejs.org/en/download +[travis-image]: https://img.shields.io/travis/pillarjs/finalhandler.svg +[travis-url]: https://travis-ci.org/pillarjs/finalhandler +[coveralls-image]: https://img.shields.io/coveralls/pillarjs/finalhandler.svg +[coveralls-url]: https://coveralls.io/r/pillarjs/finalhandler?branch=master +[downloads-image]: https://img.shields.io/npm/dm/finalhandler.svg +[downloads-url]: https://npmjs.org/package/finalhandler diff --git a/node_modules/finalhandler/index.js b/node_modules/finalhandler/index.js new file mode 100644 index 0000000..884d802 --- /dev/null +++ b/node_modules/finalhandler/index.js @@ -0,0 +1,189 @@ +/*! + * finalhandler + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var debug = require('debug')('finalhandler') +var escapeHtml = require('escape-html') +var onFinished = require('on-finished') +var statuses = require('statuses') +var unpipe = require('unpipe') + +/** + * Module variables. + * @private + */ + +/* istanbul ignore next */ +var defer = typeof setImmediate === 'function' + ? setImmediate + : function (fn) { process.nextTick(fn.bind.apply(fn, arguments)) } +var isFinished = onFinished.isFinished + +/** + * Module exports. + * @public + */ + +module.exports = finalhandler + +/** + * Create a function to handle the final response. + * + * @param {Request} req + * @param {Response} res + * @param {Object} [options] + * @return {Function} + * @public + */ + +function finalhandler (req, res, options) { + var opts = options || {} + + // get environment + var env = opts.env || process.env.NODE_ENV || 'development' + + // get error callback + var onerror = opts.onerror + + return function (err) { + var headers = Object.create(null) + var status + + // ignore 404 on in-flight response + if (!err && res._header) { + debug('cannot 404 after headers sent') + return + } + + // unhandled error + if (err) { + // respect status code from error + status = getErrorStatusCode(err) || res.statusCode + + // default status code to 500 if outside valid range + if (typeof status !== 'number' || status < 400 || status > 599) { + status = 500 + } + + // respect err.headers + if (err.headers && (err.status === status || err.statusCode === status)) { + var keys = Object.keys(err.headers) + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + headers[key] = err.headers[key] + } + } + + // production gets a basic error message + var msg = env === 'production' + ? statuses[status] + : err.stack || err.toString() + msg = escapeHtml(msg) + .replace(/\n/g, '
') + .replace(/\x20{2}/g, '  ') + '\n' + } else { + status = 404 + msg = 'Cannot ' + escapeHtml(req.method) + ' ' + escapeHtml(req.originalUrl || req.url) + '\n' + } + + debug('default %s', status) + + // schedule onerror callback + if (err && onerror) { + defer(onerror, err, req, res) + } + + // cannot actually respond + if (res._header) { + debug('cannot %d after headers sent', status) + req.socket.destroy() + return + } + + // send response + send(req, res, status, headers, msg) + } +} + +/** + * Get status code from Error object. + * + * @param {Error} err + * @return {number} + * @private + */ + +function getErrorStatusCode (err) { + // check err.status + if (typeof err.status === 'number' && err.status >= 400 && err.status < 600) { + return err.status + } + + // check err.statusCode + if (typeof err.statusCode === 'number' && err.statusCode >= 400 && err.statusCode < 600) { + return err.statusCode + } + + return undefined +} + +/** + * Send response. + * + * @param {IncomingMessage} req + * @param {OutgoingMessage} res + * @param {number} status + * @param {object} headers + * @param {string} body + * @private + */ + +function send (req, res, status, headers, body) { + function write () { + // response status + res.statusCode = status + res.statusMessage = statuses[status] + + // response headers + var keys = Object.keys(headers) + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + res.setHeader(key, headers[key]) + } + + // security header for content sniffing + res.setHeader('X-Content-Type-Options', 'nosniff') + + // standard headers + res.setHeader('Content-Type', 'text/html; charset=utf-8') + res.setHeader('Content-Length', Buffer.byteLength(body, 'utf8')) + + if (req.method === 'HEAD') { + res.end() + return + } + + res.end(body, 'utf8') + } + + if (isFinished(req)) { + write() + return + } + + // unpipe everything from the request + unpipe(req) + + // flush the request + onFinished(req, write) + req.resume() +} diff --git a/node_modules/finalhandler/package.json b/node_modules/finalhandler/package.json new file mode 100644 index 0000000..6e649dc --- /dev/null +++ b/node_modules/finalhandler/package.json @@ -0,0 +1,39 @@ +{ + "name": "finalhandler", + "description": "Node.js final http responder", + "version": "0.5.0", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "repository": "pillarjs/finalhandler", + "dependencies": { + "debug": "~2.2.0", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "statuses": "~1.3.0", + "unpipe": "~1.0.0" + }, + "devDependencies": { + "eslint": "2.12.0", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.3.2", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "2.5.3", + "readable-stream": "2.1.2", + "supertest": "1.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint **/*.js", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/for-in/LICENSE b/node_modules/for-in/LICENSE new file mode 100644 index 0000000..39245ac --- /dev/null +++ b/node_modules/for-in/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/for-in/README.md b/node_modules/for-in/README.md new file mode 100644 index 0000000..8514165 --- /dev/null +++ b/node_modules/for-in/README.md @@ -0,0 +1,74 @@ +# for-in [![NPM version](https://img.shields.io/npm/v/for-in.svg?style=flat)](https://www.npmjs.com/package/for-in) [![NPM downloads](https://img.shields.io/npm/dm/for-in.svg?style=flat)](https://npmjs.org/package/for-in) [![Build Status](https://img.shields.io/travis/jonschlinkert/for-in.svg?style=flat)](https://travis-ci.org/jonschlinkert/for-in) + +> Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install for-in --save +``` + +## Usage + +```js +var forIn = require('for-in'); + +var obj = {a: 'foo', b: 'bar', c: 'baz'}; +var values = []; +var keys = []; + +forIn(obj, function (value, key, o) { + keys.push(key); + values.push(value); +}); + +console.log(keys); +//=> ['a', 'b', 'c']; + +console.log(values); +//=> ['foo', 'bar', 'baz']; +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/for-in/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/for-in/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v, on March 27, 2016._ diff --git a/node_modules/for-in/index.js b/node_modules/for-in/index.js new file mode 100644 index 0000000..9657550 --- /dev/null +++ b/node_modules/for-in/index.js @@ -0,0 +1,16 @@ +/*! + * for-in + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function forIn(o, fn, thisArg) { + for (var key in o) { + if (fn.call(thisArg, o[key], key, o) === false) { + break; + } + } +}; diff --git a/node_modules/for-in/package.json b/node_modules/for-in/package.json new file mode 100644 index 0000000..ff921ef --- /dev/null +++ b/node_modules/for-in/package.json @@ -0,0 +1,56 @@ +{ + "name": "for-in", + "description": "Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js", + "version": "0.1.6", + "homepage": "https://github.com/jonschlinkert/for-in", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/for-in", + "bugs": { + "url": "https://github.com/jonschlinkert/for-in/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.7", + "mocha": "^2.4.5", + "should": "^8.3.0" + }, + "keywords": [ + "for-in", + "for-own", + "has", + "has-own", + "hasOwn", + "key", + "keys", + "object", + "own", + "value" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/for-own/LICENSE b/node_modules/for-own/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/for-own/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/for-own/README.md b/node_modules/for-own/README.md new file mode 100644 index 0000000..16831ab --- /dev/null +++ b/node_modules/for-own/README.md @@ -0,0 +1,52 @@ +# for-own [![NPM version](https://badge.fury.io/js/for-own.svg)](http://badge.fury.io/js/for-own) + +> Iterate over the own and inherited enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. + +## Install +#### Install with [npm](https://www.npmjs.com/): + +```bash +npm i for-own --save +``` + +## Run tests + +```bash +npm test +``` + +## Usage + +```js +var forOwn = require('for-own'); + +var obj = {a: 'foo', b: 'bar', c: 'baz'}; +var values = []; +var keys = []; + +forOwn(obj, function (value, key, o) { + keys.push(key); + values.push(value); +}); + +console.log(keys); +//=> ['a', 'b', 'c']; + +console.log(values); +//=> ['foo', 'bar', 'baz']; +``` + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2014 Jon Schlinkert, contributors. +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on September 20, 2014._ diff --git a/node_modules/for-own/index.js b/node_modules/for-own/index.js new file mode 100644 index 0000000..d07256c --- /dev/null +++ b/node_modules/for-own/index.js @@ -0,0 +1,19 @@ +/*! + * for-own + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var forIn = require('for-in'); +var hasOwn = Object.prototype.hasOwnProperty; + +module.exports = function forOwn(o, fn, thisArg) { + forIn(o, function(val, key) { + if (hasOwn.call(o, key)) { + return fn.call(thisArg, o[key], key, o); + } + }); +}; diff --git a/node_modules/for-own/package.json b/node_modules/for-own/package.json new file mode 100644 index 0000000..02ae917 --- /dev/null +++ b/node_modules/for-own/package.json @@ -0,0 +1,58 @@ +{ + "name": "for-own", + "description": "Iterate over the own enumerable properties of an object, and return an object with properties that evaluate to true from the callback. Exit early by returning `false`. JavaScript/Node.js.", + "version": "0.1.4", + "homepage": "https://github.com/jonschlinkert/for-own", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/for-own", + "bugs": { + "url": "https://github.com/jonschlinkert/for-own/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "for-in": "^0.1.5" + }, + "devDependencies": { + "gulp-format-md": "^0.1.7", + "mocha": "^2.4.5" + }, + "keywords": [ + "for-in", + "for-own", + "has", + "has-own", + "hasOwn", + "key", + "keys", + "object", + "own", + "value" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/forever-agent/LICENSE b/node_modules/forever-agent/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/forever-agent/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/forever-agent/README.md b/node_modules/forever-agent/README.md new file mode 100644 index 0000000..9d5b663 --- /dev/null +++ b/node_modules/forever-agent/README.md @@ -0,0 +1,4 @@ +forever-agent +============= + +HTTP Agent that keeps socket connections alive between keep-alive requests. Formerly part of mikeal/request, now a standalone module. diff --git a/node_modules/forever-agent/index.js b/node_modules/forever-agent/index.js new file mode 100644 index 0000000..416c7ab --- /dev/null +++ b/node_modules/forever-agent/index.js @@ -0,0 +1,138 @@ +module.exports = ForeverAgent +ForeverAgent.SSL = ForeverAgentSSL + +var util = require('util') + , Agent = require('http').Agent + , net = require('net') + , tls = require('tls') + , AgentSSL = require('https').Agent + +function getConnectionName(host, port) { + var name = '' + if (typeof host === 'string') { + name = host + ':' + port + } else { + // For node.js v012.0 and iojs-v1.5.1, host is an object. And any existing localAddress is part of the connection name. + name = host.host + ':' + host.port + ':' + (host.localAddress ? (host.localAddress + ':') : ':') + } + return name +} + +function ForeverAgent(options) { + var self = this + self.options = options || {} + self.requests = {} + self.sockets = {} + self.freeSockets = {} + self.maxSockets = self.options.maxSockets || Agent.defaultMaxSockets + self.minSockets = self.options.minSockets || ForeverAgent.defaultMinSockets + self.on('free', function(socket, host, port) { + var name = getConnectionName(host, port) + + if (self.requests[name] && self.requests[name].length) { + self.requests[name].shift().onSocket(socket) + } else if (self.sockets[name].length < self.minSockets) { + if (!self.freeSockets[name]) self.freeSockets[name] = [] + self.freeSockets[name].push(socket) + + // if an error happens while we don't use the socket anyway, meh, throw the socket away + var onIdleError = function() { + socket.destroy() + } + socket._onIdleError = onIdleError + socket.on('error', onIdleError) + } else { + // If there are no pending requests just destroy the + // socket and it will get removed from the pool. This + // gets us out of timeout issues and allows us to + // default to Connection:keep-alive. + socket.destroy() + } + }) + +} +util.inherits(ForeverAgent, Agent) + +ForeverAgent.defaultMinSockets = 5 + + +ForeverAgent.prototype.createConnection = net.createConnection +ForeverAgent.prototype.addRequestNoreuse = Agent.prototype.addRequest +ForeverAgent.prototype.addRequest = function(req, host, port) { + var name = getConnectionName(host, port) + + if (typeof host !== 'string') { + var options = host + port = options.port + host = options.host + } + + if (this.freeSockets[name] && this.freeSockets[name].length > 0 && !req.useChunkedEncodingByDefault) { + var idleSocket = this.freeSockets[name].pop() + idleSocket.removeListener('error', idleSocket._onIdleError) + delete idleSocket._onIdleError + req._reusedSocket = true + req.onSocket(idleSocket) + } else { + this.addRequestNoreuse(req, host, port) + } +} + +ForeverAgent.prototype.removeSocket = function(s, name, host, port) { + if (this.sockets[name]) { + var index = this.sockets[name].indexOf(s) + if (index !== -1) { + this.sockets[name].splice(index, 1) + } + } else if (this.sockets[name] && this.sockets[name].length === 0) { + // don't leak + delete this.sockets[name] + delete this.requests[name] + } + + if (this.freeSockets[name]) { + var index = this.freeSockets[name].indexOf(s) + if (index !== -1) { + this.freeSockets[name].splice(index, 1) + if (this.freeSockets[name].length === 0) { + delete this.freeSockets[name] + } + } + } + + if (this.requests[name] && this.requests[name].length) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(name, host, port).emit('free') + } +} + +function ForeverAgentSSL (options) { + ForeverAgent.call(this, options) +} +util.inherits(ForeverAgentSSL, ForeverAgent) + +ForeverAgentSSL.prototype.createConnection = createConnectionSSL +ForeverAgentSSL.prototype.addRequestNoreuse = AgentSSL.prototype.addRequest + +function createConnectionSSL (port, host, options) { + if (typeof port === 'object') { + options = port; + } else if (typeof host === 'object') { + options = host; + } else if (typeof options === 'object') { + options = options; + } else { + options = {}; + } + + if (typeof port === 'number') { + options.port = port; + } + + if (typeof host === 'string') { + options.host = host; + } + + return tls.connect(options); +} diff --git a/node_modules/forever-agent/package.json b/node_modules/forever-agent/package.json new file mode 100644 index 0000000..f760629 --- /dev/null +++ b/node_modules/forever-agent/package.json @@ -0,0 +1,17 @@ +{ + "author": "Mikeal Rogers (http://www.futurealoof.com)", + "name": "forever-agent", + "description": "HTTP Agent that keeps socket connections alive between keep-alive requests. Formerly part of mikeal/request, now a standalone module.", + "version": "0.6.1", + "license": "Apache-2.0", + "repository": { + "url": "https://github.com/mikeal/forever-agent" + }, + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "optionalDependencies": {}, + "engines": { + "node": "*" + } +} diff --git a/node_modules/form-data/License b/node_modules/form-data/License new file mode 100644 index 0000000..c7ff12a --- /dev/null +++ b/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/form-data/README.md b/node_modules/form-data/README.md new file mode 100644 index 0000000..38d6d9f --- /dev/null +++ b/node_modules/form-data/README.md @@ -0,0 +1,217 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.1.1.svg?label=linux:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.1.1.svg?label=macos:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/v2.1.1.svg?label=windows:0.12-6.x)](https://ci.appveyor.com/project/alexindigo/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.1.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) +[![bitHound Overall Score](https://www.bithound.io/github/form-data/form-data/badges/score.svg)](https://www.bithound.io/github/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', + contentType: 'image/jpg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/lib/browser.js b/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000..f683b05 --- /dev/null +++ b/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = self.FormData; diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000..d2cc924 --- /dev/null +++ b/node_modules/form-data/lib/form_data.js @@ -0,0 +1,440 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + */ +function FormData() { + if (!(this instanceof FormData)) { + return new FormData(); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var contentDisposition; + + // custom filename takes precedence + // fs- and request- streams have path property + // formidable and the browser add a name property. + var filename = options.filename || value.name || value.path; + + // or try http response + if (!filename && value.readable && value.hasOwnProperty('httpVersion')) { + filename = value.client._httpMessage.path; + } + + if (filename) { + contentDisposition = 'filename="' + path.basename(filename) + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filename + if (!contentType && options.filename) { + contentType = mime.lookup(options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + this.pipe(request); + if (cb) { + request.on('error', cb); + request.on('response', cb.bind(this, null)); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; diff --git a/node_modules/form-data/lib/populate.js b/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000..4d35738 --- /dev/null +++ b/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/form-data/package.json b/node_modules/form-data/package.json new file mode 100644 index 0000000..a0cfe83 --- /dev/null +++ b/node_modules/form-data/package.json @@ -0,0 +1,57 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "2.1.1", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "ci-lint": "is-node-modern && npm run lint || is-node-not-modern", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "test", + "check" + ], + "engines": { + "node": ">= 0.12" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.5", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "coveralls": "^2.11.14", + "cross-spawn": "^4.0.2", + "eslint": "^3.7.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.75.0", + "rimraf": "^2.5.4" + }, + "license": "MIT" +} diff --git a/node_modules/forwarded/HISTORY.md b/node_modules/forwarded/HISTORY.md new file mode 100644 index 0000000..97fa1d1 --- /dev/null +++ b/node_modules/forwarded/HISTORY.md @@ -0,0 +1,4 @@ +0.1.0 / 2014-09-21 +================== + + * Initial release diff --git a/node_modules/forwarded/LICENSE b/node_modules/forwarded/LICENSE new file mode 100644 index 0000000..b7dce6c --- /dev/null +++ b/node_modules/forwarded/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/forwarded/README.md b/node_modules/forwarded/README.md new file mode 100644 index 0000000..2b4988f --- /dev/null +++ b/node_modules/forwarded/README.md @@ -0,0 +1,53 @@ +# forwarded + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Parse HTTP X-Forwarded-For header + +## Installation + +```sh +$ npm install forwarded +``` + +## API + +```js +var forwarded = require('forwarded') +``` + +### forwarded(req) + +```js +var addresses = forwarded(req) +``` + +Parse the `X-Forwarded-For` header from the request. Returns an array +of the addresses, including the socket address for the `req`. In reverse +order (i.e. index `0` is the socket address and the last index is the +furthest address, typically the end-user). + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/forwarded.svg?style=flat +[npm-url]: https://npmjs.org/package/forwarded +[node-version-image]: https://img.shields.io/node/v/forwarded.svg?style=flat +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/forwarded.svg?style=flat +[travis-url]: https://travis-ci.org/jshttp/forwarded +[coveralls-image]: https://img.shields.io/coveralls/jshttp/forwarded.svg?style=flat +[coveralls-url]: https://coveralls.io/r/jshttp/forwarded?branch=master +[downloads-image]: https://img.shields.io/npm/dm/forwarded.svg?style=flat +[downloads-url]: https://npmjs.org/package/forwarded diff --git a/node_modules/forwarded/index.js b/node_modules/forwarded/index.js new file mode 100644 index 0000000..2f5c340 --- /dev/null +++ b/node_modules/forwarded/index.js @@ -0,0 +1,35 @@ +/*! + * forwarded + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = forwarded + +/** + * Get all addresses in the request, using the `X-Forwarded-For` header. + * + * @param {Object} req + * @api public + */ + +function forwarded(req) { + if (!req) { + throw new TypeError('argument req is required') + } + + // simple header parsing + var proxyAddrs = (req.headers['x-forwarded-for'] || '') + .split(/ *, */) + .filter(Boolean) + .reverse() + var socketAddr = req.connection.remoteAddress + var addrs = [socketAddr].concat(proxyAddrs) + + // return all addresses + return addrs +} diff --git a/node_modules/forwarded/package.json b/node_modules/forwarded/package.json new file mode 100644 index 0000000..d447523 --- /dev/null +++ b/node_modules/forwarded/package.json @@ -0,0 +1,33 @@ +{ + "name": "forwarded", + "description": "Parse HTTP X-Forwarded-For header", + "version": "0.1.0", + "contributors": [ + "Douglas Christopher Wilson " + ], + "license": "MIT", + "keywords": [ + "x-forwarded-for", + "http", + "req" + ], + "repository": "jshttp/forwarded", + "devDependencies": { + "istanbul": "0.3.2", + "mocha": "~1.21.4" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/fresh/HISTORY.md b/node_modules/fresh/HISTORY.md new file mode 100644 index 0000000..3c95fbb --- /dev/null +++ b/node_modules/fresh/HISTORY.md @@ -0,0 +1,38 @@ +0.3.0 / 2015-05-12 +================== + + * Add weak `ETag` matching support + +0.2.4 / 2014-09-07 +================== + + * Support Node.js 0.6 + +0.2.3 / 2014-09-07 +================== + + * Move repository to jshttp + +0.2.2 / 2014-02-19 +================== + + * Revert "Fix for blank page on Safari reload" + +0.2.1 / 2014-01-29 +================== + + * Fix for blank page on Safari reload + +0.2.0 / 2013-08-11 +================== + + * Return stale for `Cache-Control: no-cache` + +0.1.0 / 2012-06-15 +================== + * Add `If-None-Match: *` support + +0.0.1 / 2012-06-10 +================== + + * Initial release diff --git a/node_modules/fresh/LICENSE b/node_modules/fresh/LICENSE new file mode 100644 index 0000000..f527394 --- /dev/null +++ b/node_modules/fresh/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2012 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fresh/README.md b/node_modules/fresh/README.md new file mode 100644 index 0000000..0813e30 --- /dev/null +++ b/node_modules/fresh/README.md @@ -0,0 +1,58 @@ +# fresh + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +HTTP response freshness testing + +## Installation + +``` +$ npm install fresh +``` + +## API + +```js +var fresh = require('fresh') +``` + +### fresh(req, res) + + Check freshness of `req` and `res` headers. + + When the cache is "fresh" __true__ is returned, + otherwise __false__ is returned to indicate that + the cache is now stale. + +## Example + +```js +var req = { 'if-none-match': 'tobi' }; +var res = { 'etag': 'luna' }; +fresh(req, res); +// => false + +var req = { 'if-none-match': 'tobi' }; +var res = { 'etag': 'tobi' }; +fresh(req, res); +// => true +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/fresh.svg +[npm-url]: https://npmjs.org/package/fresh +[node-version-image]: https://img.shields.io/node/v/fresh.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/fresh/master.svg +[travis-url]: https://travis-ci.org/jshttp/fresh +[coveralls-image]: https://img.shields.io/coveralls/jshttp/fresh/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/fresh?branch=master +[downloads-image]: https://img.shields.io/npm/dm/fresh.svg +[downloads-url]: https://npmjs.org/package/fresh diff --git a/node_modules/fresh/index.js b/node_modules/fresh/index.js new file mode 100644 index 0000000..a900873 --- /dev/null +++ b/node_modules/fresh/index.js @@ -0,0 +1,57 @@ + +/** + * Expose `fresh()`. + */ + +module.exports = fresh; + +/** + * Check freshness of `req` and `res` headers. + * + * When the cache is "fresh" __true__ is returned, + * otherwise __false__ is returned to indicate that + * the cache is now stale. + * + * @param {Object} req + * @param {Object} res + * @return {Boolean} + * @api public + */ + +function fresh(req, res) { + // defaults + var etagMatches = true; + var notModified = true; + + // fields + var modifiedSince = req['if-modified-since']; + var noneMatch = req['if-none-match']; + var lastModified = res['last-modified']; + var etag = res['etag']; + var cc = req['cache-control']; + + // unconditional request + if (!modifiedSince && !noneMatch) return false; + + // check for no-cache cache request directive + if (cc && cc.indexOf('no-cache') !== -1) return false; + + // parse if-none-match + if (noneMatch) noneMatch = noneMatch.split(/ *, */); + + // if-none-match + if (noneMatch) { + etagMatches = noneMatch.some(function (match) { + return match === '*' || match === etag || match === 'W/' + etag; + }); + } + + // if-modified-since + if (modifiedSince) { + modifiedSince = new Date(modifiedSince); + lastModified = new Date(lastModified); + notModified = lastModified <= modifiedSince; + } + + return !! (etagMatches && notModified); +} diff --git a/node_modules/fresh/package.json b/node_modules/fresh/package.json new file mode 100644 index 0000000..714f54c --- /dev/null +++ b/node_modules/fresh/package.json @@ -0,0 +1,35 @@ +{ + "name": "fresh", + "description": "HTTP response freshness testing", + "version": "0.3.0", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "fresh", + "http", + "conditional", + "cache" + ], + "repository": "jshttp/fresh", + "devDependencies": { + "istanbul": "0.3.9", + "mocha": "1.21.5" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/fs-extra/.npmignore b/node_modules/fs-extra/.npmignore new file mode 100644 index 0000000..68eefb7 --- /dev/null +++ b/node_modules/fs-extra/.npmignore @@ -0,0 +1,8 @@ +.nyc_output/ +coverage/ +test/ +.travis.yml +appveyor.yml +lib/**/__tests__/ +test/readme.md +test.js diff --git a/node_modules/fs-extra/CHANGELOG.md b/node_modules/fs-extra/CHANGELOG.md new file mode 100644 index 0000000..dc59a13 --- /dev/null +++ b/node_modules/fs-extra/CHANGELOG.md @@ -0,0 +1,726 @@ +0.26.7 / 2016-03-16 +------------------- +- fixed `copy()` if source and dest are the same. [#230][#230] + +0.26.6 / 2016-03-15 +------------------- +- fixed if `emptyDir()` does not have a callback: [#229][#229] + +0.26.5 / 2016-01-27 +------------------- +- `copy()` with two arguments (w/o callback) was broken. See: [#215][#215] + +0.26.4 / 2016-01-05 +------------------- +- `copySync()` made `preserveTimestamps` default consistent with `copy()` which is `false`. See: [#208][#208] + +0.26.3 / 2015-12-17 +------------------- +- fixed `copy()` hangup in copying blockDevice / characterDevice / `/dev/null`. See: [#193][#193] + +0.26.2 / 2015-11-02 +------------------- +- fixed `outputJson{Sync}()` spacing adherence to `fs.spaces` + +0.26.1 / 2015-11-02 +------------------- +- fixed `copySync()` when `clogger=true` and the destination is read only. See: [#190][#190] + +0.26.0 / 2015-10-25 +------------------- +- extracted the `walk()` function into its own module [`klaw`](https://github.com/jprichardson/node-klaw). + +0.25.0 / 2015-10-24 +------------------- +- now has a file walker `walk()` + +0.24.0 / 2015-08-28 +------------------- +- removed alias `delete()` and `deleteSync()`. See: [#171][#171] + +0.23.1 / 2015-08-07 +------------------- +- Better handling of errors for `move()` when moving across devices. [#170][#170] +- `ensureSymlink()` and `ensureLink()` should not throw errors if link exists. [#169][#169] + +0.23.0 / 2015-08-06 +------------------- +- added `ensureLink{Sync}()` and `ensureSymlink{Sync}()`. See: [#165][#165] + +0.22.1 / 2015-07-09 +------------------- +- Prevent calling `hasMillisResSync()` on module load. See: [#149][#149]. +Fixes regression that was introduced in `0.21.0`. + +0.22.0 / 2015-07-09 +------------------- +- preserve permissions / ownership in `copy()`. See: [#54][#54] + +0.21.0 / 2015-07-04 +------------------- +- add option to preserve timestamps in `copy()` and `copySync()`. See: [#141][#141] +- updated `graceful-fs@3.x` to `4.x`. This brings in features from `amazing-graceful-fs` (much cleaner code / less hacks) + +0.20.1 / 2015-06-23 +------------------- +- fixed regression caused by latest jsonfile update: See: https://github.com/jprichardson/node-jsonfile/issues/26 + +0.20.0 / 2015-06-19 +------------------- +- removed `jsonfile` aliases with `File` in the name, they weren't documented and probably weren't in use e.g. +this package had both `fs.readJsonFile` and `fs.readJson` that were aliases to each other, now use `fs.readJson`. +- preliminary walker created. Intentionally not documented. If you use it, it will almost certainly change and break your code. +- started moving tests inline +- upgraded to `jsonfile@2.1.0`, can now pass JSON revivers/replacers to `readJson()`, `writeJson()`, `outputJson()` + +0.19.0 / 2015-06-08 +------------------- +- `fs.copy()` had support for Node v0.8, dropped support + +0.18.4 / 2015-05-22 +------------------- +- fixed license field according to this: [#136][#136] and https://github.com/npm/npm/releases/tag/v2.10.0 + +0.18.3 / 2015-05-08 +------------------- +- bugfix: handle `EEXIST` when clobbering on some Linux systems. [#134][#134] + +0.18.2 / 2015-04-17 +------------------- +- bugfix: allow `F_OK` ([#120][#120]) + +0.18.1 / 2015-04-15 +------------------- +- improved windows support for `move()` a bit. https://github.com/jprichardson/node-fs-extra/commit/92838980f25dc2ee4ec46b43ee14d3c4a1d30c1b +- fixed a lot of tests for Windows (appveyor) + +0.18.0 / 2015-03-31 +------------------- +- added `emptyDir()` and `emptyDirSync()` + +0.17.0 / 2015-03-28 +------------------- +- `copySync` added `clobber` option (before always would clobber, now if `clobber` is `false` it throws an error if the destination exists). +**Only works with files at the moment.** +- `createOutputStream()` added. See: [#118][#118] + +0.16.5 / 2015-03-08 +------------------- +- fixed `fs.move` when `clobber` is `true` and destination is a directory, it should clobber. [#114][#114] + +0.16.4 / 2015-03-01 +------------------- +- `fs.mkdirs` fix infinite loop on Windows. See: See https://github.com/substack/node-mkdirp/pull/74 and https://github.com/substack/node-mkdirp/issues/66 + +0.16.3 / 2015-01-28 +------------------- +- reverted https://github.com/jprichardson/node-fs-extra/commit/1ee77c8a805eba5b99382a2591ff99667847c9c9 + + +0.16.2 / 2015-01-28 +------------------- +- fixed `fs.copy` for Node v0.8 (support is temporary and will be removed in the near future) + +0.16.1 / 2015-01-28 +------------------- +- if `setImmediate` is not available, fall back to `process.nextTick` + +0.16.0 / 2015-01-28 +------------------- +- bugfix `fs.move()` into itself. Closes #104 +- bugfix `fs.move()` moving directory across device. Closes #108 +- added coveralls support +- bugfix: nasty multiple callback `fs.copy()` bug. Closes #98 +- misc fs.copy code cleanups + +0.15.0 / 2015-01-21 +------------------- +- dropped `ncp`, imported code in +- because of previous, now supports `io.js` +- `graceful-fs` is now a dependency + +0.14.0 / 2015-01-05 +------------------- +- changed `copy`/`copySync` from `fs.copy(src, dest, [filters], callback)` to `fs.copy(src, dest, [options], callback)` [#100][#100] +- removed mockfs tests for mkdirp (this may be temporary, but was getting in the way of other tests) + +0.13.0 / 2014-12-10 +------------------- +- removed `touch` and `touchSync` methods (they didn't handle permissions like UNIX touch) +- updated `"ncp": "^0.6.0"` to `"ncp": "^1.0.1"` +- imported `mkdirp` => `minimist` and `mkdirp` are no longer dependences, should now appease people who wanted `mkdirp` to be `--use_strict` safe. See [#59]([#59][#59]) + +0.12.0 / 2014-09-22 +------------------- +- copy symlinks in `copySync()` [#85][#85] + +0.11.1 / 2014-09-02 +------------------- +- bugfix `copySync()` preserve file permissions [#80][#80] + +0.11.0 / 2014-08-11 +------------------- +- upgraded `"ncp": "^0.5.1"` to `"ncp": "^0.6.0"` +- upgrade `jsonfile": "^1.2.0"` to `jsonfile": "^2.0.0"` => on write, json files now have `\n` at end. Also adds `options.throws` to `readJsonSync()` +see https://github.com/jprichardson/node-jsonfile#readfilesyncfilename-options for more details. + +0.10.0 / 2014-06-29 +------------------ +* bugfix: upgaded `"jsonfile": "~1.1.0"` to `"jsonfile": "^1.2.0"`, bumped minor because of `jsonfile` dep change +from `~` to `^`. #67 + +0.9.1 / 2014-05-22 +------------------ +* removed Node.js `0.8.x` support, `0.9.0` was published moments ago and should have been done there + +0.9.0 / 2014-05-22 +------------------ +* upgraded `ncp` from `~0.4.2` to `^0.5.1`, #58 +* upgraded `rimraf` from `~2.2.6` to `^2.2.8` +* upgraded `mkdirp` from `0.3.x` to `^0.5.0` +* added methods `ensureFile()`, `ensureFileSync()` +* added methods `ensureDir()`, `ensureDirSync()` #31 +* added `move()` method. From: https://github.com/andrewrk/node-mv + + +0.8.1 / 2013-10-24 +------------------ +* copy failed to return an error to the callback if a file doesn't exist (ulikoehler #38, #39) + +0.8.0 / 2013-10-14 +------------------ +* `filter` implemented on `copy()` and `copySync()`. (Srirangan / #36) + +0.7.1 / 2013-10-12 +------------------ +* `copySync()` implemented (Srirangan / #33) +* updated to the latest `jsonfile` version `1.1.0` which gives `options` params for the JSON methods. Closes #32 + +0.7.0 / 2013-10-07 +------------------ +* update readme conventions +* `copy()` now works if destination directory does not exist. Closes #29 + +0.6.4 / 2013-09-05 +------------------ +* changed `homepage` field in package.json to remove NPM warning + +0.6.3 / 2013-06-28 +------------------ +* changed JSON spacing default from `4` to `2` to follow Node conventions +* updated `jsonfile` dep +* updated `rimraf` dep + +0.6.2 / 2013-06-28 +------------------ +* added .npmignore, #25 + +0.6.1 / 2013-05-14 +------------------ +* modified for `strict` mode, closes #24 +* added `outputJson()/outputJsonSync()`, closes #23 + +0.6.0 / 2013-03-18 +------------------ +* removed node 0.6 support +* added node 0.10 support +* upgraded to latest `ncp` and `rimraf`. +* optional `graceful-fs` support. Closes #17 + + +0.5.0 / 2013-02-03 +------------------ +* Removed `readTextFile`. +* Renamed `readJSONFile` to `readJSON` and `readJson`, same with write. +* Restructured documentation a bit. Added roadmap. + +0.4.0 / 2013-01-28 +------------------ +* Set default spaces in `jsonfile` from 4 to 2. +* Updated `testutil` deps for tests. +* Renamed `touch()` to `createFile()` +* Added `outputFile()` and `outputFileSync()` +* Changed creation of testing diretories so the /tmp dir is not littered. +* Added `readTextFile()` and `readTextFileSync()`. + +0.3.2 / 2012-11-01 +------------------ +* Added `touch()` and `touchSync()` methods. + +0.3.1 / 2012-10-11 +------------------ +* Fixed some stray globals. + +0.3.0 / 2012-10-09 +------------------ +* Removed all CoffeeScript from tests. +* Renamed `mkdir` to `mkdirs`/`mkdirp`. + +0.2.1 / 2012-09-11 +------------------ +* Updated `rimraf` dep. + +0.2.0 / 2012-09-10 +------------------ +* Rewrote module into JavaScript. (Must still rewrite tests into JavaScript) +* Added all methods of [jsonfile][https://github.com/jprichardson/node-jsonfile] +* Added Travis-CI. + +0.1.3 / 2012-08-13 +------------------ +* Added method `readJSONFile`. + +0.1.2 / 2012-06-15 +------------------ +* Bug fix: `deleteSync()` didn't exist. +* Verified Node v0.8 compatibility. + +0.1.1 / 2012-06-15 +------------------ +* Fixed bug in `remove()`/`delete()` that wouldn't execute the function if a callback wasn't passed. + +0.1.0 / 2012-05-31 +------------------ +* Renamed `copyFile()` to `copy()`. `copy()` can now copy directories (recursively) too. +* Renamed `rmrf()` to `remove()`. +* `remove()` aliased with `delete()`. +* Added `mkdirp` capabilities. Named: `mkdir()`. Hides Node.js native `mkdir()`. +* Instead of exporting the native `fs` module with new functions, I now copy over the native methods to a new object and export that instead. + +0.0.4 / 2012-03-14 +------------------ +* Removed CoffeeScript dependency + +0.0.3 / 2012-01-11 +------------------ +* Added methods rmrf and rmrfSync +* Moved tests from Jasmine to Mocha + + +[#215]: https://github.com/jprichardson/node-fs-extra/pull/215 + +[#214]: https://github.com/jprichardson/node-fs-extra/pull/214 + +[#213]: https://github.com/jprichardson/node-fs-extra/issues/213 + +[#212]: https://github.com/jprichardson/node-fs-extra/pull/212 + +[#211]: https://github.com/jprichardson/node-fs-extra/issues/211 + +[#210]: https://github.com/jprichardson/node-fs-extra/issues/210 + +[#209]: https://github.com/jprichardson/node-fs-extra/issues/209 + +[#208]: https://github.com/jprichardson/node-fs-extra/pull/208 + +[#207]: https://github.com/jprichardson/node-fs-extra/issues/207 + +[#206]: https://github.com/jprichardson/node-fs-extra/issues/206 + +[#205]: https://github.com/jprichardson/node-fs-extra/issues/205 + +[#204]: https://github.com/jprichardson/node-fs-extra/pull/204 + +[#203]: https://github.com/jprichardson/node-fs-extra/issues/203 + +[#202]: https://github.com/jprichardson/node-fs-extra/issues/202 + +[#201]: https://github.com/jprichardson/node-fs-extra/issues/201 + +[#200]: https://github.com/jprichardson/node-fs-extra/issues/200 + +[#199]: https://github.com/jprichardson/node-fs-extra/issues/199 + +[#198]: https://github.com/jprichardson/node-fs-extra/issues/198 + +[#197]: https://github.com/jprichardson/node-fs-extra/issues/197 + +[#196]: https://github.com/jprichardson/node-fs-extra/issues/196 + +[#195]: https://github.com/jprichardson/node-fs-extra/issues/195 + +[#194]: https://github.com/jprichardson/node-fs-extra/pull/194 + +[#193]: https://github.com/jprichardson/node-fs-extra/issues/193 + +[#192]: https://github.com/jprichardson/node-fs-extra/issues/192 + +[#191]: https://github.com/jprichardson/node-fs-extra/issues/191 + +[#190]: https://github.com/jprichardson/node-fs-extra/pull/190 + +[#189]: https://github.com/jprichardson/node-fs-extra/pull/189 + +[#188]: https://github.com/jprichardson/node-fs-extra/issues/188 + +[#187]: https://github.com/jprichardson/node-fs-extra/issues/187 + +[#186]: https://github.com/jprichardson/node-fs-extra/issues/186 + +[#185]: https://github.com/jprichardson/node-fs-extra/issues/185 + +[#184]: https://github.com/jprichardson/node-fs-extra/issues/184 + +[#183]: https://github.com/jprichardson/node-fs-extra/issues/183 + +[#182]: https://github.com/jprichardson/node-fs-extra/issues/182 + +[#181]: https://github.com/jprichardson/node-fs-extra/issues/181 + +[#180]: https://github.com/jprichardson/node-fs-extra/issues/180 + +[#179]: https://github.com/jprichardson/node-fs-extra/issues/179 + +[#178]: https://github.com/jprichardson/node-fs-extra/issues/178 + +[#177]: https://github.com/jprichardson/node-fs-extra/issues/177 + +[#176]: https://github.com/jprichardson/node-fs-extra/issues/176 + +[#175]: https://github.com/jprichardson/node-fs-extra/issues/175 + +[#174]: https://github.com/jprichardson/node-fs-extra/pull/174 + +[#173]: https://github.com/jprichardson/node-fs-extra/issues/173 + +[#172]: https://github.com/jprichardson/node-fs-extra/issues/172 + +[#171]: https://github.com/jprichardson/node-fs-extra/issues/171 + +[#170]: https://github.com/jprichardson/node-fs-extra/pull/170 + +[#169]: https://github.com/jprichardson/node-fs-extra/pull/169 + +[#168]: https://github.com/jprichardson/node-fs-extra/pull/168 + +[#167]: https://github.com/jprichardson/node-fs-extra/pull/167 + +[#166]: https://github.com/jprichardson/node-fs-extra/pull/166 + +[#165]: https://github.com/jprichardson/node-fs-extra/pull/165 + +[#164]: https://github.com/jprichardson/node-fs-extra/issues/164 + +[#163]: https://github.com/jprichardson/node-fs-extra/issues/163 + +[#162]: https://github.com/jprichardson/node-fs-extra/pull/162 + +[#161]: https://github.com/jprichardson/node-fs-extra/pull/161 + +[#160]: https://github.com/jprichardson/node-fs-extra/pull/160 + +[#159]: https://github.com/jprichardson/node-fs-extra/pull/159 + +[#158]: https://github.com/jprichardson/node-fs-extra/issues/158 + +[#157]: https://github.com/jprichardson/node-fs-extra/issues/157 + +[#156]: https://github.com/jprichardson/node-fs-extra/issues/156 + +[#155]: https://github.com/jprichardson/node-fs-extra/issues/155 + +[#154]: https://github.com/jprichardson/node-fs-extra/issues/154 + +[#153]: https://github.com/jprichardson/node-fs-extra/pull/153 + +[#152]: https://github.com/jprichardson/node-fs-extra/issues/152 + +[#151]: https://github.com/jprichardson/node-fs-extra/issues/151 + +[#150]: https://github.com/jprichardson/node-fs-extra/issues/150 + +[#149]: https://github.com/jprichardson/node-fs-extra/issues/149 + +[#148]: https://github.com/jprichardson/node-fs-extra/issues/148 + +[#147]: https://github.com/jprichardson/node-fs-extra/issues/147 + +[#146]: https://github.com/jprichardson/node-fs-extra/pull/146 + +[#145]: https://github.com/jprichardson/node-fs-extra/issues/145 + +[#144]: https://github.com/jprichardson/node-fs-extra/issues/144 + +[#143]: https://github.com/jprichardson/node-fs-extra/issues/143 + +[#142]: https://github.com/jprichardson/node-fs-extra/issues/142 + +[#141]: https://github.com/jprichardson/node-fs-extra/pull/141 + +[#140]: https://github.com/jprichardson/node-fs-extra/issues/140 + +[#139]: https://github.com/jprichardson/node-fs-extra/pull/139 + +[#138]: https://github.com/jprichardson/node-fs-extra/issues/138 + +[#137]: https://github.com/jprichardson/node-fs-extra/issues/137 + +[#136]: https://github.com/jprichardson/node-fs-extra/pull/136 + +[#135]: https://github.com/jprichardson/node-fs-extra/issues/135 + +[#134]: https://github.com/jprichardson/node-fs-extra/pull/134 + +[#133]: https://github.com/jprichardson/node-fs-extra/pull/133 + +[#132]: https://github.com/jprichardson/node-fs-extra/pull/132 + +[#131]: https://github.com/jprichardson/node-fs-extra/issues/131 + +[#130]: https://github.com/jprichardson/node-fs-extra/pull/130 + +[#129]: https://github.com/jprichardson/node-fs-extra/pull/129 + +[#128]: https://github.com/jprichardson/node-fs-extra/issues/128 + +[#127]: https://github.com/jprichardson/node-fs-extra/issues/127 + +[#126]: https://github.com/jprichardson/node-fs-extra/issues/126 + +[#125]: https://github.com/jprichardson/node-fs-extra/issues/125 + +[#124]: https://github.com/jprichardson/node-fs-extra/issues/124 + +[#123]: https://github.com/jprichardson/node-fs-extra/issues/123 + +[#122]: https://github.com/jprichardson/node-fs-extra/pull/122 + +[#121]: https://github.com/jprichardson/node-fs-extra/issues/121 + +[#120]: https://github.com/jprichardson/node-fs-extra/issues/120 + +[#119]: https://github.com/jprichardson/node-fs-extra/issues/119 + +[#118]: https://github.com/jprichardson/node-fs-extra/pull/118 + +[#117]: https://github.com/jprichardson/node-fs-extra/pull/117 + +[#116]: https://github.com/jprichardson/node-fs-extra/issues/116 + +[#115]: https://github.com/jprichardson/node-fs-extra/issues/115 + +[#114]: https://github.com/jprichardson/node-fs-extra/issues/114 + +[#113]: https://github.com/jprichardson/node-fs-extra/issues/113 + +[#112]: https://github.com/jprichardson/node-fs-extra/pull/112 + +[#111]: https://github.com/jprichardson/node-fs-extra/pull/111 + +[#110]: https://github.com/jprichardson/node-fs-extra/pull/110 + +[#109]: https://github.com/jprichardson/node-fs-extra/issues/109 + +[#108]: https://github.com/jprichardson/node-fs-extra/issues/108 + +[#107]: https://github.com/jprichardson/node-fs-extra/pull/107 + +[#106]: https://github.com/jprichardson/node-fs-extra/issues/106 + +[#105]: https://github.com/jprichardson/node-fs-extra/issues/105 + +[#104]: https://github.com/jprichardson/node-fs-extra/issues/104 + +[#103]: https://github.com/jprichardson/node-fs-extra/issues/103 + +[#102]: https://github.com/jprichardson/node-fs-extra/issues/102 + +[#101]: https://github.com/jprichardson/node-fs-extra/issues/101 + +[#100]: https://github.com/jprichardson/node-fs-extra/pull/100 + +[#99]: https://github.com/jprichardson/node-fs-extra/issues/99 + +[#98]: https://github.com/jprichardson/node-fs-extra/issues/98 + +[#97]: https://github.com/jprichardson/node-fs-extra/pull/97 + +[#96]: https://github.com/jprichardson/node-fs-extra/issues/96 + +[#95]: https://github.com/jprichardson/node-fs-extra/pull/95 + +[#94]: https://github.com/jprichardson/node-fs-extra/issues/94 + +[#93]: https://github.com/jprichardson/node-fs-extra/issues/93 + +[#92]: https://github.com/jprichardson/node-fs-extra/issues/92 + +[#91]: https://github.com/jprichardson/node-fs-extra/issues/91 + +[#90]: https://github.com/jprichardson/node-fs-extra/issues/90 + +[#89]: https://github.com/jprichardson/node-fs-extra/issues/89 + +[#88]: https://github.com/jprichardson/node-fs-extra/issues/88 + +[#87]: https://github.com/jprichardson/node-fs-extra/issues/87 + +[#86]: https://github.com/jprichardson/node-fs-extra/issues/86 + +[#85]: https://github.com/jprichardson/node-fs-extra/pull/85 + +[#84]: https://github.com/jprichardson/node-fs-extra/issues/84 + +[#83]: https://github.com/jprichardson/node-fs-extra/issues/83 + +[#82]: https://github.com/jprichardson/node-fs-extra/pull/82 + +[#81]: https://github.com/jprichardson/node-fs-extra/issues/81 + +[#80]: https://github.com/jprichardson/node-fs-extra/pull/80 + +[#79]: https://github.com/jprichardson/node-fs-extra/issues/79 + +[#78]: https://github.com/jprichardson/node-fs-extra/pull/78 + +[#77]: https://github.com/jprichardson/node-fs-extra/issues/77 + +[#76]: https://github.com/jprichardson/node-fs-extra/issues/76 + +[#75]: https://github.com/jprichardson/node-fs-extra/issues/75 + +[#74]: https://github.com/jprichardson/node-fs-extra/pull/74 + +[#73]: https://github.com/jprichardson/node-fs-extra/pull/73 + +[#72]: https://github.com/jprichardson/node-fs-extra/pull/72 + +[#71]: https://github.com/jprichardson/node-fs-extra/issues/71 + +[#70]: https://github.com/jprichardson/node-fs-extra/issues/70 + +[#69]: https://github.com/jprichardson/node-fs-extra/issues/69 + +[#68]: https://github.com/jprichardson/node-fs-extra/issues/68 + +[#67]: https://github.com/jprichardson/node-fs-extra/issues/67 + +[#66]: https://github.com/jprichardson/node-fs-extra/issues/66 + +[#65]: https://github.com/jprichardson/node-fs-extra/issues/65 + +[#64]: https://github.com/jprichardson/node-fs-extra/issues/64 + +[#63]: https://github.com/jprichardson/node-fs-extra/issues/63 + +[#62]: https://github.com/jprichardson/node-fs-extra/issues/62 + +[#61]: https://github.com/jprichardson/node-fs-extra/issues/61 + +[#60]: https://github.com/jprichardson/node-fs-extra/issues/60 + +[#59]: https://github.com/jprichardson/node-fs-extra/issues/59 + +[#58]: https://github.com/jprichardson/node-fs-extra/issues/58 + +[#57]: https://github.com/jprichardson/node-fs-extra/issues/57 + +[#56]: https://github.com/jprichardson/node-fs-extra/issues/56 + +[#55]: https://github.com/jprichardson/node-fs-extra/issues/55 + +[#54]: https://github.com/jprichardson/node-fs-extra/issues/54 + +[#53]: https://github.com/jprichardson/node-fs-extra/issues/53 + +[#52]: https://github.com/jprichardson/node-fs-extra/pull/52 + +[#51]: https://github.com/jprichardson/node-fs-extra/pull/51 + +[#50]: https://github.com/jprichardson/node-fs-extra/issues/50 + +[#49]: https://github.com/jprichardson/node-fs-extra/pull/49 + +[#48]: https://github.com/jprichardson/node-fs-extra/issues/48 + +[#47]: https://github.com/jprichardson/node-fs-extra/issues/47 + +[#46]: https://github.com/jprichardson/node-fs-extra/pull/46 + +[#45]: https://github.com/jprichardson/node-fs-extra/pull/45 + +[#44]: https://github.com/jprichardson/node-fs-extra/pull/44 + +[#43]: https://github.com/jprichardson/node-fs-extra/issues/43 + +[#42]: https://github.com/jprichardson/node-fs-extra/issues/42 + +[#41]: https://github.com/jprichardson/node-fs-extra/pull/41 + +[#40]: https://github.com/jprichardson/node-fs-extra/issues/40 + +[#39]: https://github.com/jprichardson/node-fs-extra/pull/39 + +[#38]: https://github.com/jprichardson/node-fs-extra/pull/38 + +[#37]: https://github.com/jprichardson/node-fs-extra/issues/37 + +[#36]: https://github.com/jprichardson/node-fs-extra/pull/36 + +[#35]: https://github.com/jprichardson/node-fs-extra/pull/35 + +[#34]: https://github.com/jprichardson/node-fs-extra/issues/34 + +[#33]: https://github.com/jprichardson/node-fs-extra/pull/33 + +[#32]: https://github.com/jprichardson/node-fs-extra/issues/32 + +[#31]: https://github.com/jprichardson/node-fs-extra/issues/31 + +[#30]: https://github.com/jprichardson/node-fs-extra/issues/30 + +[#29]: https://github.com/jprichardson/node-fs-extra/issues/29 + +[#28]: https://github.com/jprichardson/node-fs-extra/issues/28 + +[#27]: https://github.com/jprichardson/node-fs-extra/issues/27 + +[#26]: https://github.com/jprichardson/node-fs-extra/issues/26 + +[#25]: https://github.com/jprichardson/node-fs-extra/pull/25 + +[#24]: https://github.com/jprichardson/node-fs-extra/issues/24 + +[#23]: https://github.com/jprichardson/node-fs-extra/issues/23 + +[#22]: https://github.com/jprichardson/node-fs-extra/pull/22 + +[#21]: https://github.com/jprichardson/node-fs-extra/issues/21 + +[#20]: https://github.com/jprichardson/node-fs-extra/issues/20 + +[#19]: https://github.com/jprichardson/node-fs-extra/issues/19 + +[#18]: https://github.com/jprichardson/node-fs-extra/pull/18 + +[#17]: https://github.com/jprichardson/node-fs-extra/issues/17 + +[#16]: https://github.com/jprichardson/node-fs-extra/pull/16 + +[#15]: https://github.com/jprichardson/node-fs-extra/issues/15 + +[#14]: https://github.com/jprichardson/node-fs-extra/issues/14 + +[#13]: https://github.com/jprichardson/node-fs-extra/issues/13 + +[#12]: https://github.com/jprichardson/node-fs-extra/issues/12 + +[#11]: https://github.com/jprichardson/node-fs-extra/issues/11 + +[#10]: https://github.com/jprichardson/node-fs-extra/issues/10 + +[#9]: https://github.com/jprichardson/node-fs-extra/issues/9 + +[#8]: https://github.com/jprichardson/node-fs-extra/pull/8 + +[#6]: https://github.com/jprichardson/node-fs-extra/issues/6 + +[#5]: https://github.com/jprichardson/node-fs-extra/issues/5 + +[#4]: https://github.com/jprichardson/node-fs-extra/issues/4 + +[#3]: https://github.com/jprichardson/node-fs-extra/pull/3 + +[#2]: https://github.com/jprichardson/node-fs-extra/issues/2 + +[#1]: https://github.com/jprichardson/node-fs-extra/issues/1 diff --git a/node_modules/fs-extra/LICENSE b/node_modules/fs-extra/LICENSE new file mode 100644 index 0000000..f109d23 --- /dev/null +++ b/node_modules/fs-extra/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2016 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs-extra/README.md b/node_modules/fs-extra/README.md new file mode 100644 index 0000000..00579c3 --- /dev/null +++ b/node_modules/fs-extra/README.md @@ -0,0 +1,586 @@ +Node.js: fs-extra +================= + +[![build status](https://api.travis-ci.org/jprichardson/node-fs-extra.svg)](http://travis-ci.org/jprichardson/node-fs-extra) +[![windows Build status](https://img.shields.io/appveyor/ci/jprichardson/node-fs-extra/master.svg?label=windows%20build)](https://ci.appveyor.com/project/jprichardson/node-fs-extra/branch/master) +[![downloads per month](http://img.shields.io/npm/dm/fs-extra.svg)](https://www.npmjs.org/package/fs-extra) +[![Coverage Status](https://img.shields.io/coveralls/jprichardson/node-fs-extra.svg)](https://coveralls.io/r/jprichardson/node-fs-extra) + + +`fs-extra` adds file system methods that aren't included in the native `fs` module. It is a drop in replacement for `fs`. + +**NOTE (2016-01-13):** Node v0.10 will be unsupported AFTER Ubuntu LTS releases their next version AND [Amazon Lambda +upgrades](http://docs.aws.amazon.com/lambda/latest/dg/current-supported-versions.html) its Node.js runtime from v0.10. +I anticipate this will happen around late spring / summer 2016. Please prepare accordingly. After this, we'll make a strong push +for a 1.0.0 release. + + +Why? +---- + +I got tired of including `mkdirp`, `rimraf`, and `cp -r` in most of my projects. + + + + +Installation +------------ + + npm install --save fs-extra + + + +Usage +----- + +`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are unmodified and attached to `fs-extra`. + +You don't ever need to include the original `fs` module again: + +```js +var fs = require('fs') // this is no longer necessary +``` + +you can now do this: + +```js +var fs = require('fs-extra') +``` + +or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want +to name your `fs` variable `fse` like so: + +```js +var fse = require('fs-extra') +``` + +you can also keep both, but it's redundant: + +```js +var fs = require('fs') +var fse = require('fs-extra') +``` + +Sync vs Async +------------- +Most methods are async by default (they take a callback with an `Error` as first argument). + +Sync methods on the other hand will throw if an error occurs. + +Example: + +```js +var fs = require('fs-extra') + +fs.copy('/tmp/myfile', '/tmp/mynewfile', function (err) { + if (err) return console.error(err) + console.log("success!") +}); + +try { + fs.copySync('/tmp/myfile', '/tmp/mynewfile') + console.log("success!") +} catch (err) { + console.error(err) +} +``` + + +Methods +------- +- [copy](#copy) +- [copySync](#copy) +- [createOutputStream](#createoutputstreamfile-options) +- [emptyDir](#emptydirdir-callback) +- [emptyDirSync](#emptydirdir-callback) +- [ensureFile](#ensurefilefile-callback) +- [ensureFileSync](#ensurefilefile-callback) +- [ensureDir](#ensuredirdir-callback) +- [ensureDirSync](#ensuredirdir-callback) +- [ensureLink](#ensurelinksrcpath-dstpath-callback) +- [ensureLinkSync](#ensurelinksrcpath-dstpath-callback) +- [ensureSymlink](#ensuresymlinksrcpath-dstpath-type-callback) +- [ensureSymlinkSync](#ensuresymlinksrcpath-dstpath-type-callback) +- [mkdirs](#mkdirsdir-callback) +- [mkdirsSync](#mkdirsdir-callback) +- [move](#movesrc-dest-options-callback) +- [outputFile](#outputfilefile-data-options-callback) +- [outputFileSync](#outputfilefile-data-options-callback) +- [outputJson](#outputjsonfile-data-options-callback) +- [outputJsonSync](#outputjsonfile-data-options-callback) +- [readJson](#readjsonfile-options-callback) +- [readJsonSync](#readjsonfile-options-callback) +- [remove](#removedir-callback) +- [removeSync](#removedir-callback) +- [walk](#walk) +- [writeJson](#writejsonfile-object-options-callback) +- [writeJsonSync](#writejsonfile-object-options-callback) + + +**NOTE:** You can still use the native Node.js methods. They are copied over to `fs-extra`. + + +### copy() + +**copy(src, dest, [options], callback)** + + +Copy a file or directory. The directory can have contents. Like `cp -r`. + +Options: +- clobber (boolean): overwrite existing file or directory +- preserveTimestamps (boolean): will set last modification and access times to the ones of the original source files, default is `false`. +- filter: Function or RegExp to filter copied files. If function, return true to include, false to exclude. If RegExp, same as function, where `filter` is `filter.test`. + +Sync: `copySync()` + +Example: + +```js +var fs = require('fs-extra') + +fs.copy('/tmp/myfile', '/tmp/mynewfile', function (err) { + if (err) return console.error(err) + console.log("success!") +}) // copies file + +fs.copy('/tmp/mydir', '/tmp/mynewdir', function (err) { + if (err) return console.error(err) + console.log('success!') +}) // copies directory, even if it has subdirectories or files +``` + +### createOutputStream(file, [options]) + +Exactly like `createWriteStream`, but if the directory does not exist, it's created. + +Examples: + +```js +var fs = require('fs-extra') + +// if /tmp/some does not exist, it is created +var ws = fs.createOutputStream('/tmp/some/file.txt') +ws.write('hello\n') +``` + +Note on naming: you'll notice that fs-extra has some methods like `fs.outputJson`, `fs.outputFile`, etc that use the +word `output` to denote that if the containing directory does not exist, it should be created. If you can think of a +better succinct nomenclature for these methods, please open an issue for discussion. Thanks. + + +### emptyDir(dir, [callback]) + +Ensures that a directory is empty. Deletes directory contents if the directory is not empty. If the directory does not exist, it is created. The directory itself is not deleted. + +Alias: `emptydir()` + +Sync: `emptyDirSync()`, `emptydirSync()` + +Example: + +```js +var fs = require('fs-extra') + +// assume this directory has a lot of files and folders +fs.emptyDir('/tmp/some/dir', function (err) { + if (!err) console.log('success!') +}) +``` + + +### ensureFile(file, callback) + +Ensures that the file exists. If the file that is requested to be created is in directories that do not exist, these directories are created. If the file already exists, it is **NOT MODIFIED**. + +Alias: `createFile()` + +Sync: `createFileSync()`,`ensureFileSync()` + + +Example: + +```js +var fs = require('fs-extra') + +var file = '/tmp/this/path/does/not/exist/file.txt' +fs.ensureFile(file, function (err) { + console.log(err) // => null + // file has now been created, including the directory it is to be placed in +}) +``` + + +### ensureDir(dir, callback) + +Ensures that the directory exists. If the directory structure does not exist, it is created. + +Sync: `ensureDirSync()` + + +Example: + +```js +var fs = require('fs-extra') + +var dir = '/tmp/this/path/does/not/exist' +fs.ensureDir(dir, function (err) { + console.log(err) // => null + // dir has now been created, including the directory it is to be placed in +}) +``` + + +### ensureLink(srcpath, dstpath, callback) + +Ensures that the link exists. If the directory structure does not exist, it is created. + +Sync: `ensureLinkSync()` + + +Example: + +```js +var fs = require('fs-extra') + +var srcpath = '/tmp/file.txt' +var dstpath = '/tmp/this/path/does/not/exist/file.txt' +fs.ensureLink(srcpath, dstpath, function (err) { + console.log(err) // => null + // link has now been created, including the directory it is to be placed in +}) +``` + + +### ensureSymlink(srcpath, dstpath, [type], callback) + +Ensures that the symlink exists. If the directory structure does not exist, it is created. + +Sync: `ensureSymlinkSync()` + + +Example: + +```js +var fs = require('fs-extra') + +var srcpath = '/tmp/file.txt' +var dstpath = '/tmp/this/path/does/not/exist/file.txt' +fs.ensureSymlink(srcpath, dstpath, function (err) { + console.log(err) // => null + // symlink has now been created, including the directory it is to be placed in +}) +``` + + +### mkdirs(dir, callback) + +Creates a directory. If the parent hierarchy doesn't exist, it's created. Like `mkdir -p`. + +Alias: `mkdirp()` + +Sync: `mkdirsSync()` / `mkdirpSync()` + + +Examples: + +```js +var fs = require('fs-extra') + +fs.mkdirs('/tmp/some/long/path/that/prob/doesnt/exist', function (err) { + if (err) return console.error(err) + console.log("success!") +}) + +fs.mkdirsSync('/tmp/another/path') +``` + + +### move(src, dest, [options], callback) + +Moves a file or directory, even across devices. + +Options: +- clobber (boolean): overwrite existing file or directory +- limit (number): number of concurrent moves, see ncp for more information + +Example: + +```js +var fs = require('fs-extra') + +fs.move('/tmp/somefile', '/tmp/does/not/exist/yet/somefile', function (err) { + if (err) return console.error(err) + console.log("success!") +}) +``` + + +### outputFile(file, data, [options], callback) + +Almost the same as `writeFile` (i.e. it [overwrites](http://pages.citebite.com/v2o5n8l2f5reb)), except that if the parent directory does not exist, it's created. `options` are what you'd pass to [`fs.writeFile()`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback). + +Sync: `outputFileSync()` + + +Example: + +```js +var fs = require('fs-extra') +var file = '/tmp/this/path/does/not/exist/file.txt' + +fs.outputFile(file, 'hello!', function (err) { + console.log(err) // => null + + fs.readFile(file, 'utf8', function (err, data) { + console.log(data) // => hello! + }) +}) +``` + + + +### outputJson(file, data, [options], callback) + +Almost the same as `writeJson`, except that if the directory does not exist, it's created. +`options` are what you'd pass to [`jsonFile.writeFile()`](https://github.com/jprichardson/node-jsonfile#writefilefilename-options-callback). + +Alias: `outputJSON()` + +Sync: `outputJsonSync()`, `outputJSONSync()` + + +Example: + +```js +var fs = require('fs-extra') +var file = '/tmp/this/path/does/not/exist/file.txt' + +fs.outputJson(file, {name: 'JP'}, function (err) { + console.log(err) // => null + + fs.readJson(file, function(err, data) { + console.log(data.name) // => JP + }) +}) +``` + + + +### readJson(file, [options], callback) + +Reads a JSON file and then parses it into an object. `options` are the same +that you'd pass to [`jsonFile.readFile`](https://github.com/jprichardson/node-jsonfile#readfilefilename-options-callback). + +Alias: `readJSON()` + +Sync: `readJsonSync()`, `readJSONSync()` + + +Example: + +```js +var fs = require('fs-extra') + +fs.readJson('./package.json', function (err, packageObj) { + console.log(packageObj.version) // => 0.1.3 +}) +``` + +`readJsonSync()` can take a `throws` option set to `false` and it won't throw if the JSON is invalid. Example: + +```js +var fs = require('fs-extra') +var file = path.join('/tmp/some-invalid.json') +var data = '{not valid JSON' +fs.writeFileSync(file, data) + +var obj = fs.readJsonSync(file, {throws: false}) +console.log(obj) // => null +``` + + +### remove(dir, callback) + +Removes a file or directory. The directory can have contents. Like `rm -rf`. + +Sync: `removeSync()` + + +Examples: + +```js +var fs = require('fs-extra') + +fs.remove('/tmp/myfile', function (err) { + if (err) return console.error(err) + + console.log('success!') +}) + +fs.removeSync('/home/jprichardson') //I just deleted my entire HOME directory. +``` + +### walk() + +**walk(dir, [streamOptions])** + +The function `walk()` from the module [`klaw`](https://github.com/jprichardson/node-klaw). + +Returns a [Readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable) that iterates +through every file and directory starting with `dir` as the root. Every `read()` or `data` event +returns an object with two properties: `path` and `stats`. `path` is the full path of the file and +`stats` is an instance of [fs.Stats](https://nodejs.org/api/fs.html#fs_class_fs_stats). + +Streams 1 (push) example: + +```js +var items = [] // files, directories, symlinks, etc +fse.walk(TEST_DIR) + .on('data', function (item) { + items.push(item.path) + }) + .on('end', function () { + console.dir(items) // => [ ... array of files] + }) +``` + +Streams 2 & 3 (pull) example: + +```js +var items = [] // files, directories, symlinks, etc +fse.walk(TEST_DIR) + .on('readable', function () { + var item + while ((item = this.read())) { + items.push(item.path) + } + }) + .on('end', function () { + console.dir(items) // => [ ... array of files] + }) +``` + +If you're not sure of the differences on Node.js streams 1, 2, 3 then I'd +recommend this resource as a good starting point: https://strongloop.com/strongblog/whats-new-io-js-beta-streams3/. + +**See [`klaw` documentation](https://github.com/jprichardson/node-klaw) for more detailed usage.** + + +### writeJson(file, object, [options], callback) + +Writes an object to a JSON file. `options` are the same that +you'd pass to [`jsonFile.writeFile()`](https://github.com/jprichardson/node-jsonfile#writefilefilename-options-callback). + +Alias: `writeJSON()` + +Sync: `writeJsonSync()`, `writeJSONSync()` + +Example: + +```js +var fs = require('fs-extra') +fs.writeJson('./package.json', {name: 'fs-extra'}, function (err) { + console.log(err) +}) +``` + + +Third Party +----------- + +### Promises + +Use [Bluebird](https://github.com/petkaantonov/bluebird). See https://github.com/petkaantonov/bluebird/blob/master/API.md#promisification. `fs-extra` is +explicitly listed as supported. + +```js +var Promise = require('bluebird') +var fs = Promise.promisifyAll(require('fs-extra')) +``` + +Or you can use the package [`fs-extra-promise`](https://github.com/overlookmotel/fs-extra-promise) that marries the two together. + + +### TypeScript + +If you like TypeScript, you can use `fs-extra` with it: https://github.com/borisyankov/DefinitelyTyped/tree/master/fs-extra + + +### File / Directory Watching + +If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar). + + +### Misc. + +- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls. + + + +Hacking on fs-extra +------------------- + +Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project +uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you, +you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`. + +[![js-standard-style](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + +What's needed? +- First, take a look at existing issues. Those are probably going to be where the priority lies. +- More tests for edge cases. Specifically on different platforms. There can never be enough tests. +- Really really help with the Windows tests. See appveyor outputs for more info. +- Improve test coverage. See coveralls output for more info. +- A directory walker. Probably this one: https://github.com/thlorenz/readdirp imported into `fs-extra`. +- After the directory walker is integrated, any function that needs to traverse directories like +`copy`, `remove`, or `mkdirs` should be built on top of it. + +Note: If you make any big changes, **you should definitely post an issue for discussion first.** + + +Naming +------ + +I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here: + +* https://github.com/jprichardson/node-fs-extra/issues/2 +* https://github.com/flatiron/utile/issues/11 +* https://github.com/ryanmcgrath/wrench-js/issues/29 +* https://github.com/substack/node-mkdirp/issues/17 + +First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes. + +For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc. + +We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`? + +My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too. + +So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`. + + +Credit +------ + +`fs-extra` wouldn't be possible without using the modules from the following authors: + +- [Isaac Shlueter](https://github.com/isaacs) +- [Charlie McConnel](https://github.com/avianflu) +- [James Halliday](https://github.com/substack) +- [Andrew Kelley](https://github.com/andrewrk) + + + + +License +------- + +Licensed under MIT + +Copyright (c) 2011-2016 [JP Richardson](https://github.com/jprichardson) + +[1]: http://nodejs.org/docs/latest/api/fs.html + + +[jsonfile]: https://github.com/jprichardson/node-jsonfile diff --git a/node_modules/fs-extra/lib/copy-sync/copy-file-sync.js b/node_modules/fs-extra/lib/copy-sync/copy-file-sync.js new file mode 100644 index 0000000..c6c5711 --- /dev/null +++ b/node_modules/fs-extra/lib/copy-sync/copy-file-sync.js @@ -0,0 +1,39 @@ +var fs = require('graceful-fs') + +var BUF_LENGTH = 64 * 1024 +var _buff = new Buffer(BUF_LENGTH) + +function copyFileSync (srcFile, destFile, options) { + var clobber = options.clobber + var preserveTimestamps = options.preserveTimestamps + + if (fs.existsSync(destFile)) { + if (clobber) { + fs.chmodSync(destFile, parseInt('777', 8)) + fs.unlinkSync(destFile) + } else { + throw Error('EEXIST') + } + } + + var fdr = fs.openSync(srcFile, 'r') + var stat = fs.fstatSync(fdr) + var fdw = fs.openSync(destFile, 'w', stat.mode) + var bytesRead = 1 + var pos = 0 + + while (bytesRead > 0) { + bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos) + fs.writeSync(fdw, _buff, 0, bytesRead) + pos += bytesRead + } + + if (preserveTimestamps) { + fs.futimesSync(fdw, stat.atime, stat.mtime) + } + + fs.closeSync(fdr) + fs.closeSync(fdw) +} + +module.exports = copyFileSync diff --git a/node_modules/fs-extra/lib/copy-sync/copy-sync.js b/node_modules/fs-extra/lib/copy-sync/copy-sync.js new file mode 100644 index 0000000..fdd316a --- /dev/null +++ b/node_modules/fs-extra/lib/copy-sync/copy-sync.js @@ -0,0 +1,47 @@ +var fs = require('graceful-fs') +var path = require('path') +var copyFileSync = require('./copy-file-sync') +var mkdir = require('../mkdirs') + +function copySync (src, dest, options) { + if (typeof options === 'function' || options instanceof RegExp) { + options = {filter: options} + } + + options = options || {} + options.recursive = !!options.recursive + + // default to true for now + options.clobber = 'clobber' in options ? !!options.clobber : true + options.preserveTimestamps = 'preserveTimestamps' in options ? !!options.preserveTimestamps : false + + options.filter = options.filter || function () { return true } + + var stats = options.recursive ? fs.lstatSync(src) : fs.statSync(src) + var destFolder = path.dirname(dest) + var destFolderExists = fs.existsSync(destFolder) + var performCopy = false + + if (stats.isFile()) { + if (options.filter instanceof RegExp) performCopy = options.filter.test(src) + else if (typeof options.filter === 'function') performCopy = options.filter(src) + + if (performCopy) { + if (!destFolderExists) mkdir.mkdirsSync(destFolder) + copyFileSync(src, dest, {clobber: options.clobber, preserveTimestamps: options.preserveTimestamps}) + } + } else if (stats.isDirectory()) { + if (!fs.existsSync(dest)) mkdir.mkdirsSync(dest) + var contents = fs.readdirSync(src) + contents.forEach(function (content) { + var opts = options + opts.recursive = true + copySync(path.join(src, content), path.join(dest, content), opts) + }) + } else if (options.recursive && stats.isSymbolicLink()) { + var srcPath = fs.readlinkSync(src) + fs.symlinkSync(srcPath, dest) + } +} + +module.exports = copySync diff --git a/node_modules/fs-extra/lib/copy-sync/index.js b/node_modules/fs-extra/lib/copy-sync/index.js new file mode 100644 index 0000000..ebc7e0b --- /dev/null +++ b/node_modules/fs-extra/lib/copy-sync/index.js @@ -0,0 +1,3 @@ +module.exports = { + copySync: require('./copy-sync') +} diff --git a/node_modules/fs-extra/lib/copy/copy.js b/node_modules/fs-extra/lib/copy/copy.js new file mode 100644 index 0000000..d9d2912 --- /dev/null +++ b/node_modules/fs-extra/lib/copy/copy.js @@ -0,0 +1,44 @@ +var fs = require('graceful-fs') +var path = require('path') +var ncp = require('./ncp') +var mkdir = require('../mkdirs') + +function copy (src, dest, options, callback) { + if (typeof options === 'function' && !callback) { + callback = options + options = {} + } else if (typeof options === 'function' || options instanceof RegExp) { + options = {filter: options} + } + callback = callback || function () {} + options = options || {} + + // don't allow src and dest to be the same + var basePath = process.cwd() + var currentPath = path.resolve(basePath, src) + var targetPath = path.resolve(basePath, dest) + if (currentPath === targetPath) return callback(new Error('Source and destination must not be the same.')) + + fs.lstat(src, function (err, stats) { + if (err) return callback(err) + + var dir = null + if (stats.isDirectory()) { + var parts = dest.split(path.sep) + parts.pop() + dir = parts.join(path.sep) + } else { + dir = path.dirname(dest) + } + + fs.exists(dir, function (dirExists) { + if (dirExists) return ncp(src, dest, options, callback) + mkdir.mkdirs(dir, function (err) { + if (err) return callback(err) + ncp(src, dest, options, callback) + }) + }) + }) +} + +module.exports = copy diff --git a/node_modules/fs-extra/lib/copy/index.js b/node_modules/fs-extra/lib/copy/index.js new file mode 100644 index 0000000..3e09016 --- /dev/null +++ b/node_modules/fs-extra/lib/copy/index.js @@ -0,0 +1,3 @@ +module.exports = { + copy: require('./copy') +} diff --git a/node_modules/fs-extra/lib/copy/ncp.js b/node_modules/fs-extra/lib/copy/ncp.js new file mode 100644 index 0000000..d30cae5 --- /dev/null +++ b/node_modules/fs-extra/lib/copy/ncp.js @@ -0,0 +1,243 @@ +// imported from ncp (this is temporary, will rewrite) + +var fs = require('graceful-fs') +var path = require('path') +var utimes = require('../util/utimes') + +function ncp (source, dest, options, callback) { + if (!callback) { + callback = options + options = {} + } + + var basePath = process.cwd() + var currentPath = path.resolve(basePath, source) + var targetPath = path.resolve(basePath, dest) + + var filter = options.filter + var transform = options.transform + var clobber = options.clobber !== false + var dereference = options.dereference + var preserveTimestamps = options.preserveTimestamps === true + + var errs = null + + var started = 0 + var finished = 0 + var running = 0 + // this is pretty useless now that we're using graceful-fs + // consider removing + var limit = options.limit || 512 + + startCopy(currentPath) + + function startCopy (source) { + started++ + if (filter) { + if (filter instanceof RegExp) { + if (!filter.test(source)) { + return doneOne(true) + } + } else if (typeof filter === 'function') { + if (!filter(source)) { + return doneOne(true) + } + } + } + return getStats(source) + } + + function getStats (source) { + var stat = dereference ? fs.stat : fs.lstat + if (running >= limit) { + return setImmediate(function () { + getStats(source) + }) + } + running++ + stat(source, function (err, stats) { + if (err) return onError(err) + + // We need to get the mode from the stats object and preserve it. + var item = { + name: source, + mode: stats.mode, + mtime: stats.mtime, // modified time + atime: stats.atime, // access time + stats: stats // temporary + } + + if (stats.isDirectory()) { + return onDir(item) + } else if (stats.isFile() || stats.isCharacterDevice() || stats.isBlockDevice()) { + return onFile(item) + } else if (stats.isSymbolicLink()) { + // Symlinks don't really need to know about the mode. + return onLink(source) + } + }) + } + + function onFile (file) { + var target = file.name.replace(currentPath, targetPath) + isWritable(target, function (writable) { + if (writable) { + copyFile(file, target) + } else { + if (clobber) { + rmFile(target, function () { + copyFile(file, target) + }) + } else { + doneOne() + } + } + }) + } + + function copyFile (file, target) { + var readStream = fs.createReadStream(file.name) + var writeStream = fs.createWriteStream(target, { mode: file.mode }) + + readStream.on('error', onError) + writeStream.on('error', onError) + + if (transform) { + transform(readStream, writeStream, file) + } else { + writeStream.on('open', function () { + readStream.pipe(writeStream) + }) + } + + writeStream.once('finish', function () { + fs.chmod(target, file.mode, function (err) { + if (err) return onError(err) + if (preserveTimestamps) { + utimes.utimesMillis(target, file.atime, file.mtime, function (err) { + if (err) return onError(err) + return doneOne() + }) + } else { + doneOne() + } + }) + }) + } + + function rmFile (file, done) { + fs.unlink(file, function (err) { + if (err) return onError(err) + return done() + }) + } + + function onDir (dir) { + var target = dir.name.replace(currentPath, targetPath) + isWritable(target, function (writable) { + if (writable) { + return mkDir(dir, target) + } + copyDir(dir.name) + }) + } + + function mkDir (dir, target) { + fs.mkdir(target, dir.mode, function (err) { + if (err) return onError(err) + // despite setting mode in fs.mkdir, doesn't seem to work + // so we set it here. + fs.chmod(target, dir.mode, function (err) { + if (err) return onError(err) + copyDir(dir.name) + }) + }) + } + + function copyDir (dir) { + fs.readdir(dir, function (err, items) { + if (err) return onError(err) + items.forEach(function (item) { + startCopy(path.join(dir, item)) + }) + return doneOne() + }) + } + + function onLink (link) { + var target = link.replace(currentPath, targetPath) + fs.readlink(link, function (err, resolvedPath) { + if (err) return onError(err) + checkLink(resolvedPath, target) + }) + } + + function checkLink (resolvedPath, target) { + if (dereference) { + resolvedPath = path.resolve(basePath, resolvedPath) + } + isWritable(target, function (writable) { + if (writable) { + return makeLink(resolvedPath, target) + } + fs.readlink(target, function (err, targetDest) { + if (err) return onError(err) + + if (dereference) { + targetDest = path.resolve(basePath, targetDest) + } + if (targetDest === resolvedPath) { + return doneOne() + } + return rmFile(target, function () { + makeLink(resolvedPath, target) + }) + }) + }) + } + + function makeLink (linkPath, target) { + fs.symlink(linkPath, target, function (err) { + if (err) return onError(err) + return doneOne() + }) + } + + function isWritable (path, done) { + fs.lstat(path, function (err) { + if (err) { + if (err.code === 'ENOENT') return done(true) + return done(false) + } + return done(false) + }) + } + + function onError (err) { + if (options.stopOnError) { + return callback(err) + } else if (!errs && options.errs) { + errs = fs.createWriteStream(options.errs) + } else if (!errs) { + errs = [] + } + if (typeof errs.write === 'undefined') { + errs.push(err) + } else { + errs.write(err.stack + '\n\n') + } + return doneOne() + } + + function doneOne (skipped) { + if (!skipped) running-- + finished++ + if ((started === finished) && (running === 0)) { + if (callback !== undefined) { + return errs ? callback(errs) : callback(null) + } + } + } +} + +module.exports = ncp diff --git a/node_modules/fs-extra/lib/empty/index.js b/node_modules/fs-extra/lib/empty/index.js new file mode 100644 index 0000000..a17cbae --- /dev/null +++ b/node_modules/fs-extra/lib/empty/index.js @@ -0,0 +1,47 @@ +var fs = require('fs') +var path = require('path') +var mkdir = require('../mkdirs') +var remove = require('../remove') + +function emptyDir (dir, callback) { + callback = callback || function () {} + fs.readdir(dir, function (err, items) { + if (err) return mkdir.mkdirs(dir, callback) + + items = items.map(function (item) { + return path.join(dir, item) + }) + + deleteItem() + + function deleteItem () { + var item = items.pop() + if (!item) return callback() + remove.remove(item, function (err) { + if (err) return callback(err) + deleteItem() + }) + } + }) +} + +function emptyDirSync (dir) { + var items + try { + items = fs.readdirSync(dir) + } catch (err) { + return mkdir.mkdirsSync(dir) + } + + items.forEach(function (item) { + item = path.join(dir, item) + remove.removeSync(item) + }) +} + +module.exports = { + emptyDirSync: emptyDirSync, + emptydirSync: emptyDirSync, + emptyDir: emptyDir, + emptydir: emptyDir +} diff --git a/node_modules/fs-extra/lib/ensure/file.js b/node_modules/fs-extra/lib/ensure/file.js new file mode 100644 index 0000000..1c9c2de --- /dev/null +++ b/node_modules/fs-extra/lib/ensure/file.js @@ -0,0 +1,43 @@ +var path = require('path') +var fs = require('graceful-fs') +var mkdir = require('../mkdirs') + +function createFile (file, callback) { + function makeFile () { + fs.writeFile(file, '', function (err) { + if (err) return callback(err) + callback() + }) + } + + fs.exists(file, function (fileExists) { + if (fileExists) return callback() + var dir = path.dirname(file) + fs.exists(dir, function (dirExists) { + if (dirExists) return makeFile() + mkdir.mkdirs(dir, function (err) { + if (err) return callback(err) + makeFile() + }) + }) + }) +} + +function createFileSync (file) { + if (fs.existsSync(file)) return + + var dir = path.dirname(file) + if (!fs.existsSync(dir)) { + mkdir.mkdirsSync(dir) + } + + fs.writeFileSync(file, '') +} + +module.exports = { + createFile: createFile, + createFileSync: createFileSync, + // alias + ensureFile: createFile, + ensureFileSync: createFileSync +} diff --git a/node_modules/fs-extra/lib/ensure/index.js b/node_modules/fs-extra/lib/ensure/index.js new file mode 100644 index 0000000..26e8705 --- /dev/null +++ b/node_modules/fs-extra/lib/ensure/index.js @@ -0,0 +1,21 @@ +var file = require('./file') +var link = require('./link') +var symlink = require('./symlink') + +module.exports = { + // file + createFile: file.createFile, + createFileSync: file.createFileSync, + ensureFile: file.createFile, + ensureFileSync: file.createFileSync, + // link + createLink: link.createLink, + createLinkSync: link.createLinkSync, + ensureLink: link.createLink, + ensureLinkSync: link.createLinkSync, + // symlink + createSymlink: symlink.createSymlink, + createSymlinkSync: symlink.createSymlinkSync, + ensureSymlink: symlink.createSymlink, + ensureSymlinkSync: symlink.createSymlinkSync +} diff --git a/node_modules/fs-extra/lib/ensure/link.js b/node_modules/fs-extra/lib/ensure/link.js new file mode 100644 index 0000000..4e4e283 --- /dev/null +++ b/node_modules/fs-extra/lib/ensure/link.js @@ -0,0 +1,58 @@ +var path = require('path') +var fs = require('graceful-fs') +var mkdir = require('../mkdirs') + +function createLink (srcpath, dstpath, callback) { + function makeLink (srcpath, dstpath) { + fs.link(srcpath, dstpath, function (err) { + if (err) return callback(err) + callback(null) + }) + } + + fs.exists(dstpath, function (destinationExists) { + if (destinationExists) return callback(null) + fs.lstat(srcpath, function (err, stat) { + if (err) { + err.message = err.message.replace('lstat', 'ensureLink') + return callback(err) + } + + var dir = path.dirname(dstpath) + fs.exists(dir, function (dirExists) { + if (dirExists) return makeLink(srcpath, dstpath) + mkdir.mkdirs(dir, function (err) { + if (err) return callback(err) + makeLink(srcpath, dstpath) + }) + }) + }) + }) +} + +function createLinkSync (srcpath, dstpath, callback) { + var destinationExists = fs.existsSync(dstpath) + if (destinationExists) return undefined + + try { + fs.lstatSync(srcpath) + } catch (err) { + err.message = err.message.replace('lstat', 'ensureLink') + throw err + } + + var dir = path.dirname(dstpath) + var dirExists = fs.existsSync(dir) + if (dirExists) return fs.linkSync(srcpath, dstpath) + mkdir.mkdirsSync(dir) + + return fs.linkSync(srcpath, dstpath) +} + +module.exports = { + createLink: createLink, + createLinkSync: createLinkSync, + // alias + ensureLink: createLink, + ensureLinkSync: createLinkSync +} diff --git a/node_modules/fs-extra/lib/ensure/symlink-paths.js b/node_modules/fs-extra/lib/ensure/symlink-paths.js new file mode 100644 index 0000000..cc27d04 --- /dev/null +++ b/node_modules/fs-extra/lib/ensure/symlink-paths.js @@ -0,0 +1,97 @@ +var path = require('path') +// path.isAbsolute shim for Node.js 0.10 support +path.isAbsolute = (path.isAbsolute) ? path.isAbsolute : require('path-is-absolute') +var fs = require('graceful-fs') + +/** + * Function that returns two types of paths, one relative to symlink, and one + * relative to the current working directory. Checks if path is absolute or + * relative. If the path is relative, this function checks if the path is + * relative to symlink or relative to current working directory. This is an + * initiative to find a smarter `srcpath` to supply when building symlinks. + * This allows you to determine which path to use out of one of three possible + * types of source paths. The first is an absolute path. This is detected by + * `path.isAbsolute()`. When an absolute path is provided, it is checked to + * see if it exists. If it does it's used, if not an error is returned + * (callback)/ thrown (sync). The other two options for `srcpath` are a + * relative url. By default Node's `fs.symlink` works by creating a symlink + * using `dstpath` and expects the `srcpath` to be relative to the newly + * created symlink. If you provide a `srcpath` that does not exist on the file + * system it results in a broken symlink. To minimize this, the function + * checks to see if the 'relative to symlink' source file exists, and if it + * does it will use it. If it does not, it checks if there's a file that + * exists that is relative to the current working directory, if does its used. + * This preserves the expectations of the original fs.symlink spec and adds + * the ability to pass in `relative to current working direcotry` paths. + */ + +function symlinkPaths (srcpath, dstpath, callback) { + if (path.isAbsolute(srcpath)) { + return fs.lstat(srcpath, function (err, stat) { + if (err) { + err.message = err.message.replace('lstat', 'ensureSymlink') + return callback(err) + } + return callback(null, { + 'toCwd': srcpath, + 'toDst': srcpath + }) + }) + } else { + var dstdir = path.dirname(dstpath) + var relativeToDst = path.join(dstdir, srcpath) + return fs.exists(relativeToDst, function (exists) { + if (exists) { + return callback(null, { + 'toCwd': relativeToDst, + 'toDst': srcpath + }) + } else { + return fs.lstat(srcpath, function (err, stat) { + if (err) { + err.message = err.message.replace('lstat', 'ensureSymlink') + return callback(err) + } + return callback(null, { + 'toCwd': srcpath, + 'toDst': path.relative(dstdir, srcpath) + }) + }) + } + }) + } +} + +function symlinkPathsSync (srcpath, dstpath) { + var exists + if (path.isAbsolute(srcpath)) { + exists = fs.existsSync(srcpath) + if (!exists) throw new Error('absolute srcpath does not exist') + return { + 'toCwd': srcpath, + 'toDst': srcpath + } + } else { + var dstdir = path.dirname(dstpath) + var relativeToDst = path.join(dstdir, srcpath) + exists = fs.existsSync(relativeToDst) + if (exists) { + return { + 'toCwd': relativeToDst, + 'toDst': srcpath + } + } else { + exists = fs.existsSync(srcpath) + if (!exists) throw new Error('relative srcpath does not exist') + return { + 'toCwd': srcpath, + 'toDst': path.relative(dstdir, srcpath) + } + } + } +} + +module.exports = { + 'symlinkPaths': symlinkPaths, + 'symlinkPathsSync': symlinkPathsSync +} diff --git a/node_modules/fs-extra/lib/ensure/symlink-type.js b/node_modules/fs-extra/lib/ensure/symlink-type.js new file mode 100644 index 0000000..81e3588 --- /dev/null +++ b/node_modules/fs-extra/lib/ensure/symlink-type.js @@ -0,0 +1,27 @@ +var fs = require('graceful-fs') + +function symlinkType (srcpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type + if (type) return callback(null, type) + fs.lstat(srcpath, function (err, stats) { + if (err) return callback(null, 'file') + type = (stats && stats.isDirectory()) ? 'dir' : 'file' + callback(null, type) + }) +} + +function symlinkTypeSync (srcpath, type) { + if (type) return type + try { + var stats = fs.lstatSync(srcpath) + } catch (e) { + return 'file' + } + return (stats && stats.isDirectory()) ? 'dir' : 'file' +} + +module.exports = { + symlinkType: symlinkType, + symlinkTypeSync: symlinkTypeSync +} diff --git a/node_modules/fs-extra/lib/ensure/symlink.js b/node_modules/fs-extra/lib/ensure/symlink.js new file mode 100644 index 0000000..6244790 --- /dev/null +++ b/node_modules/fs-extra/lib/ensure/symlink.js @@ -0,0 +1,62 @@ +var path = require('path') +var fs = require('graceful-fs') +var _mkdirs = require('../mkdirs') +var mkdirs = _mkdirs.mkdirs +var mkdirsSync = _mkdirs.mkdirsSync + +var _symlinkPaths = require('./symlink-paths') +var symlinkPaths = _symlinkPaths.symlinkPaths +var symlinkPathsSync = _symlinkPaths.symlinkPathsSync + +var _symlinkType = require('./symlink-type') +var symlinkType = _symlinkType.symlinkType +var symlinkTypeSync = _symlinkType.symlinkTypeSync + +function createSymlink (srcpath, dstpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type + + fs.exists(dstpath, function (destinationExists) { + if (destinationExists) return callback(null) + symlinkPaths(srcpath, dstpath, function (err, relative) { + if (err) return callback(err) + srcpath = relative.toDst + symlinkType(relative.toCwd, type, function (err, type) { + if (err) return callback(err) + var dir = path.dirname(dstpath) + fs.exists(dir, function (dirExists) { + if (dirExists) return fs.symlink(srcpath, dstpath, type, callback) + mkdirs(dir, function (err) { + if (err) return callback(err) + fs.symlink(srcpath, dstpath, type, callback) + }) + }) + }) + }) + }) +} + +function createSymlinkSync (srcpath, dstpath, type, callback) { + callback = (typeof type === 'function') ? type : callback + type = (typeof type === 'function') ? false : type + + var destinationExists = fs.existsSync(dstpath) + if (destinationExists) return undefined + + var relative = symlinkPathsSync(srcpath, dstpath) + srcpath = relative.toDst + type = symlinkTypeSync(relative.toCwd, type) + var dir = path.dirname(dstpath) + var exists = fs.existsSync(dir) + if (exists) return fs.symlinkSync(srcpath, dstpath, type) + mkdirsSync(dir) + return fs.symlinkSync(srcpath, dstpath, type) +} + +module.exports = { + createSymlink: createSymlink, + createSymlinkSync: createSymlinkSync, + // alias + ensureSymlink: createSymlink, + ensureSymlinkSync: createSymlinkSync +} diff --git a/node_modules/fs-extra/lib/index.js b/node_modules/fs-extra/lib/index.js new file mode 100644 index 0000000..2914f53 --- /dev/null +++ b/node_modules/fs-extra/lib/index.js @@ -0,0 +1,38 @@ +var assign = require('./util/assign') + +var fse = {} +var gfs = require('graceful-fs') + +// attach fs methods to fse +Object.keys(gfs).forEach(function (key) { + fse[key] = gfs[key] +}) + +var fs = fse + +assign(fs, require('./copy')) +assign(fs, require('./copy-sync')) +assign(fs, require('./mkdirs')) +assign(fs, require('./remove')) +assign(fs, require('./json')) +assign(fs, require('./move')) +assign(fs, require('./streams')) +assign(fs, require('./empty')) +assign(fs, require('./ensure')) +assign(fs, require('./output')) +assign(fs, require('./walk')) + +module.exports = fs + +// maintain backwards compatibility for awhile +var jsonfile = {} +Object.defineProperty(jsonfile, 'spaces', { + get: function () { + return fs.spaces // found in ./json + }, + set: function (val) { + fs.spaces = val + } +}) + +module.exports.jsonfile = jsonfile // so users of fs-extra can modify jsonFile.spaces diff --git a/node_modules/fs-extra/lib/json/index.js b/node_modules/fs-extra/lib/json/index.js new file mode 100644 index 0000000..b13cf54 --- /dev/null +++ b/node_modules/fs-extra/lib/json/index.js @@ -0,0 +1,9 @@ +var jsonFile = require('./jsonfile') + +jsonFile.outputJsonSync = require('./output-json-sync') +jsonFile.outputJson = require('./output-json') +// aliases +jsonFile.outputJSONSync = require('./output-json-sync') +jsonFile.outputJSON = require('./output-json') + +module.exports = jsonFile diff --git a/node_modules/fs-extra/lib/json/jsonfile.js b/node_modules/fs-extra/lib/json/jsonfile.js new file mode 100644 index 0000000..51d8390 --- /dev/null +++ b/node_modules/fs-extra/lib/json/jsonfile.js @@ -0,0 +1,14 @@ +var jsonFile = require('jsonfile') + +module.exports = { + // jsonfile exports + readJson: jsonFile.readFile, + readJSON: jsonFile.readFile, + readJsonSync: jsonFile.readFileSync, + readJSONSync: jsonFile.readFileSync, + writeJson: jsonFile.writeFile, + writeJSON: jsonFile.writeFile, + writeJsonSync: jsonFile.writeFileSync, + writeJSONSync: jsonFile.writeFileSync, + spaces: 2 // default in fs-extra +} diff --git a/node_modules/fs-extra/lib/json/output-json-sync.js b/node_modules/fs-extra/lib/json/output-json-sync.js new file mode 100644 index 0000000..7684843 --- /dev/null +++ b/node_modules/fs-extra/lib/json/output-json-sync.js @@ -0,0 +1,16 @@ +var fs = require('graceful-fs') +var path = require('path') +var jsonFile = require('./jsonfile') +var mkdir = require('../mkdirs') + +function outputJsonSync (file, data, options) { + var dir = path.dirname(file) + + if (!fs.existsSync(dir)) { + mkdir.mkdirsSync(dir) + } + + jsonFile.writeJsonSync(file, data, options) +} + +module.exports = outputJsonSync diff --git a/node_modules/fs-extra/lib/json/output-json.js b/node_modules/fs-extra/lib/json/output-json.js new file mode 100644 index 0000000..7824597 --- /dev/null +++ b/node_modules/fs-extra/lib/json/output-json.js @@ -0,0 +1,24 @@ +var fs = require('graceful-fs') +var path = require('path') +var jsonFile = require('./jsonfile') +var mkdir = require('../mkdirs') + +function outputJson (file, data, options, callback) { + if (typeof options === 'function') { + callback = options + options = {} + } + + var dir = path.dirname(file) + + fs.exists(dir, function (itDoes) { + if (itDoes) return jsonFile.writeJson(file, data, options, callback) + + mkdir.mkdirs(dir, function (err) { + if (err) return callback(err) + jsonFile.writeJson(file, data, options, callback) + }) + }) +} + +module.exports = outputJson diff --git a/node_modules/fs-extra/lib/mkdirs/index.js b/node_modules/fs-extra/lib/mkdirs/index.js new file mode 100644 index 0000000..2611217 --- /dev/null +++ b/node_modules/fs-extra/lib/mkdirs/index.js @@ -0,0 +1,9 @@ +module.exports = { + mkdirs: require('./mkdirs'), + mkdirsSync: require('./mkdirs-sync'), + // alias + mkdirp: require('./mkdirs'), + mkdirpSync: require('./mkdirs-sync'), + ensureDir: require('./mkdirs'), + ensureDirSync: require('./mkdirs-sync') +} diff --git a/node_modules/fs-extra/lib/mkdirs/mkdirs-sync.js b/node_modules/fs-extra/lib/mkdirs/mkdirs-sync.js new file mode 100644 index 0000000..2dc4f23 --- /dev/null +++ b/node_modules/fs-extra/lib/mkdirs/mkdirs-sync.js @@ -0,0 +1,49 @@ +var fs = require('graceful-fs') +var path = require('path') + +var o777 = parseInt('0777', 8) + +function mkdirsSync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts } + } + + var mode = opts.mode + var xfs = opts.fs || fs + + if (mode === undefined) { + mode = o777 & (~process.umask()) + } + if (!made) made = null + + p = path.resolve(p) + + try { + xfs.mkdirSync(p, mode) + made = made || p + } catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = mkdirsSync(path.dirname(p), opts, made) + mkdirsSync(p, opts, made) + break + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat + try { + stat = xfs.statSync(p) + } catch (err1) { + throw err0 + } + if (!stat.isDirectory()) throw err0 + break + } + } + + return made +} + +module.exports = mkdirsSync diff --git a/node_modules/fs-extra/lib/mkdirs/mkdirs.js b/node_modules/fs-extra/lib/mkdirs/mkdirs.js new file mode 100644 index 0000000..2bdfa04 --- /dev/null +++ b/node_modules/fs-extra/lib/mkdirs/mkdirs.js @@ -0,0 +1,54 @@ +var fs = require('graceful-fs') +var path = require('path') + +var o777 = parseInt('0777', 8) + +function mkdirs (p, opts, callback, made) { + if (typeof opts === 'function') { + callback = opts + opts = {} + } else if (!opts || typeof opts !== 'object') { + opts = { mode: opts } + } + + var mode = opts.mode + var xfs = opts.fs || fs + + if (mode === undefined) { + mode = o777 & (~process.umask()) + } + if (!made) made = null + + callback = callback || function () {} + p = path.resolve(p) + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p + return callback(null, made) + } + switch (er.code) { + case 'ENOENT': + if (path.dirname(p) === p) return callback(er) + mkdirs(path.dirname(p), opts, function (er, made) { + if (er) callback(er, made) + else mkdirs(p, opts, callback, made) + }) + break + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) callback(er, made) + else callback(null, made) + }) + break + } + }) +} + +module.exports = mkdirs diff --git a/node_modules/fs-extra/lib/move/index.js b/node_modules/fs-extra/lib/move/index.js new file mode 100644 index 0000000..f28152f --- /dev/null +++ b/node_modules/fs-extra/lib/move/index.js @@ -0,0 +1,161 @@ +// most of this code was written by Andrew Kelley +// licensed under the BSD license: see +// https://github.com/andrewrk/node-mv/blob/master/package.json + +// this needs a cleanup + +var fs = require('graceful-fs') +var ncp = require('../copy/ncp') +var path = require('path') +var rimraf = require('rimraf') +var mkdirp = require('../mkdirs').mkdirs + +function mv (source, dest, options, callback) { + if (typeof options === 'function') { + callback = options + options = {} + } + + var shouldMkdirp = ('mkdirp' in options) ? options.mkdirp : true + var clobber = ('clobber' in options) ? options.clobber : false + + var limit = options.limit || 16 + + if (shouldMkdirp) { + mkdirs() + } else { + doRename() + } + + function mkdirs () { + mkdirp(path.dirname(dest), function (err) { + if (err) return callback(err) + doRename() + }) + } + + function doRename () { + if (clobber) { + fs.rename(source, dest, function (err) { + if (!err) return callback() + + if (err.code === 'ENOTEMPTY' || err.code === 'EEXIST') { + rimraf(dest, function (err) { + if (err) return callback(err) + options.clobber = false // just clobbered it, no need to do it again + mv(source, dest, options, callback) + }) + return + } + + // weird Windows shit + if (err.code === 'EPERM') { + setTimeout(function () { + rimraf(dest, function (err) { + if (err) return callback(err) + options.clobber = false + mv(source, dest, options, callback) + }) + }, 200) + return + } + + if (err.code !== 'EXDEV') return callback(err) + moveAcrossDevice(source, dest, clobber, limit, callback) + }) + } else { + fs.link(source, dest, function (err) { + if (err) { + if (err.code === 'EXDEV' || err.code === 'EISDIR' || err.code === 'EPERM') { + moveAcrossDevice(source, dest, clobber, limit, callback) + return + } + callback(err) + return + } + fs.unlink(source, callback) + }) + } + } +} + +function moveAcrossDevice (source, dest, clobber, limit, callback) { + fs.stat(source, function (err, stat) { + if (err) { + callback(err) + return + } + + if (stat.isDirectory()) { + moveDirAcrossDevice(source, dest, clobber, limit, callback) + } else { + moveFileAcrossDevice(source, dest, clobber, limit, callback) + } + }) +} + +function moveFileAcrossDevice (source, dest, clobber, limit, callback) { + var outFlags = clobber ? 'w' : 'wx' + var ins = fs.createReadStream(source) + var outs = fs.createWriteStream(dest, {flags: outFlags}) + + ins.on('error', function (err) { + ins.destroy() + outs.destroy() + outs.removeListener('close', onClose) + + // may want to create a directory but `out` line above + // creates an empty file for us: See #108 + // don't care about error here + fs.unlink(dest, function () { + // note: `err` here is from the input stream errror + if (err.code === 'EISDIR' || err.code === 'EPERM') { + moveDirAcrossDevice(source, dest, clobber, limit, callback) + } else { + callback(err) + } + }) + }) + + outs.on('error', function (err) { + ins.destroy() + outs.destroy() + outs.removeListener('close', onClose) + callback(err) + }) + + outs.once('close', onClose) + ins.pipe(outs) + + function onClose () { + fs.unlink(source, callback) + } +} + +function moveDirAcrossDevice (source, dest, clobber, limit, callback) { + var options = { + stopOnErr: true, + clobber: false, + limit: limit + } + + function startNcp () { + ncp(source, dest, options, function (errList) { + if (errList) return callback(errList[0]) + rimraf(source, callback) + }) + } + + if (clobber) { + rimraf(dest, function (err) { + if (err) return callback(err) + startNcp() + }) + } else { + startNcp() + } +} + +module.exports = { + move: mv +} diff --git a/node_modules/fs-extra/lib/output/index.js b/node_modules/fs-extra/lib/output/index.js new file mode 100644 index 0000000..e8f45f3 --- /dev/null +++ b/node_modules/fs-extra/lib/output/index.js @@ -0,0 +1,35 @@ +var path = require('path') +var fs = require('graceful-fs') +var mkdir = require('../mkdirs') + +function outputFile (file, data, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = 'utf8' + } + + var dir = path.dirname(file) + fs.exists(dir, function (itDoes) { + if (itDoes) return fs.writeFile(file, data, encoding, callback) + + mkdir.mkdirs(dir, function (err) { + if (err) return callback(err) + + fs.writeFile(file, data, encoding, callback) + }) + }) +} + +function outputFileSync (file, data, encoding) { + var dir = path.dirname(file) + if (fs.existsSync(dir)) { + return fs.writeFileSync.apply(fs, arguments) + } + mkdir.mkdirsSync(dir) + fs.writeFileSync.apply(fs, arguments) +} + +module.exports = { + outputFile: outputFile, + outputFileSync: outputFileSync +} diff --git a/node_modules/fs-extra/lib/remove/index.js b/node_modules/fs-extra/lib/remove/index.js new file mode 100644 index 0000000..925de67 --- /dev/null +++ b/node_modules/fs-extra/lib/remove/index.js @@ -0,0 +1,14 @@ +var rimraf = require('rimraf') + +function removeSync (dir) { + return rimraf.sync(dir) +} + +function remove (dir, callback) { + return callback ? rimraf(dir, callback) : rimraf(dir, function () {}) +} + +module.exports = { + remove: remove, + removeSync: removeSync +} diff --git a/node_modules/fs-extra/lib/streams/create-output-stream.js b/node_modules/fs-extra/lib/streams/create-output-stream.js new file mode 100644 index 0000000..e6db2dd --- /dev/null +++ b/node_modules/fs-extra/lib/streams/create-output-stream.js @@ -0,0 +1,43 @@ +var path = require('path') +var fs = require('fs') +var mkdir = require('../mkdirs') +var WriteStream = fs.WriteStream + +function createOutputStream (file, options) { + var dirExists = false + var dir = path.dirname(file) + options = options || {} + + // if fd is set with an actual number, file is created, hence directory is too + if (options.fd) { + return fs.createWriteStream(file, options) + } else { + // this hacks the WriteStream constructor from calling open() + options.fd = -1 + } + + var ws = new WriteStream(file, options) + + var oldOpen = ws.open + ws.open = function () { + ws.fd = null // set actual fd + if (dirExists) return oldOpen.call(ws) + + // this only runs once on first write + mkdir.mkdirs(dir, function (err) { + if (err) { + ws.destroy() + ws.emit('error', err) + return + } + dirExists = true + oldOpen.call(ws) + }) + } + + ws.open() + + return ws +} + +module.exports = createOutputStream diff --git a/node_modules/fs-extra/lib/streams/index.js b/node_modules/fs-extra/lib/streams/index.js new file mode 100644 index 0000000..33be56e --- /dev/null +++ b/node_modules/fs-extra/lib/streams/index.js @@ -0,0 +1,3 @@ +module.exports = { + createOutputStream: require('./create-output-stream') +} diff --git a/node_modules/fs-extra/lib/util/assign.js b/node_modules/fs-extra/lib/util/assign.js new file mode 100644 index 0000000..8e41f9a --- /dev/null +++ b/node_modules/fs-extra/lib/util/assign.js @@ -0,0 +1,14 @@ +// simple mutable assign +function assign () { + var args = [].slice.call(arguments).filter(function (i) { return i }) + var dest = args.shift() + args.forEach(function (src) { + Object.keys(src).forEach(function (key) { + dest[key] = src[key] + }) + }) + + return dest +} + +module.exports = assign diff --git a/node_modules/fs-extra/lib/util/utimes.js b/node_modules/fs-extra/lib/util/utimes.js new file mode 100644 index 0000000..c99b010 --- /dev/null +++ b/node_modules/fs-extra/lib/util/utimes.js @@ -0,0 +1,69 @@ +var fs = require('graceful-fs') +var path = require('path') +var os = require('os') + +// HFS, ext{2,3}, FAT do not, Node.js v0.10 does not +function hasMillisResSync () { + var tmpfile = path.join('millis-test-sync' + Date.now().toString() + Math.random().toString().slice(2)) + tmpfile = path.join(os.tmpdir(), tmpfile) + + // 550 millis past UNIX epoch + var d = new Date(1435410243862) + fs.writeFileSync(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141') + var fd = fs.openSync(tmpfile, 'r+') + fs.futimesSync(fd, d, d) + fs.closeSync(fd) + return fs.statSync(tmpfile).mtime > 1435410243000 +} + +function hasMillisRes (callback) { + var tmpfile = path.join('millis-test' + Date.now().toString() + Math.random().toString().slice(2)) + tmpfile = path.join(os.tmpdir(), tmpfile) + + // 550 millis past UNIX epoch + var d = new Date(1435410243862) + fs.writeFile(tmpfile, 'https://github.com/jprichardson/node-fs-extra/pull/141', function (err) { + if (err) return callback(err) + fs.open(tmpfile, 'r+', function (err, fd) { + if (err) return callback(err) + fs.futimes(fd, d, d, function (err) { + if (err) return callback(err) + fs.close(fd, function (err) { + if (err) return callback(err) + fs.stat(tmpfile, function (err, stats) { + if (err) return callback(err) + callback(null, stats.mtime > 1435410243000) + }) + }) + }) + }) + }) +} + +function timeRemoveMillis (timestamp) { + if (typeof timestamp === 'number') { + return Math.floor(timestamp / 1000) * 1000 + } else if (timestamp instanceof Date) { + return new Date(Math.floor(timestamp.getTime() / 1000) * 1000) + } else { + throw new Error('fs-extra: timeRemoveMillis() unknown parameter type') + } +} + +function utimesMillis (path, atime, mtime, callback) { + // if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback) + fs.open(path, 'r+', function (err, fd) { + if (err) return callback(err) + fs.futimes(fd, atime, mtime, function (err) { + if (err) return callback(err) + fs.close(fd, callback) + }) + }) +} + +module.exports = { + hasMillisRes: hasMillisRes, + hasMillisResSync: hasMillisResSync, + timeRemoveMillis: timeRemoveMillis, + utimesMillis: utimesMillis +} diff --git a/node_modules/fs-extra/lib/walk/index.js b/node_modules/fs-extra/lib/walk/index.js new file mode 100644 index 0000000..8626d47 --- /dev/null +++ b/node_modules/fs-extra/lib/walk/index.js @@ -0,0 +1,5 @@ +var klaw = require('klaw') + +module.exports = { + walk: klaw +} diff --git a/node_modules/fs-extra/node_modules/.bin/rimraf b/node_modules/fs-extra/node_modules/.bin/rimraf new file mode 120000 index 0000000..632d6da --- /dev/null +++ b/node_modules/fs-extra/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../../../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/fs-extra/package.json b/node_modules/fs-extra/package.json new file mode 100644 index 0000000..deb4c9a --- /dev/null +++ b/node_modules/fs-extra/package.json @@ -0,0 +1,59 @@ +{ + "name": "fs-extra", + "version": "0.26.7", + "description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as mkdir -p, cp -r, and rm -rf.", + "homepage": "https://github.com/jprichardson/node-fs-extra", + "repository": { + "type": "git", + "url": "https://github.com/jprichardson/node-fs-extra" + }, + "keywords": [ + "fs", + "file", + "file system", + "copy", + "directory", + "extra", + "mkdirp", + "mkdir", + "mkdirs", + "recursive", + "json", + "read", + "write", + "extra", + "delete", + "remove", + "touch", + "create", + "text", + "output", + "move" + ], + "author": "JP Richardson ", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.1.2", + "jsonfile": "^2.1.0", + "klaw": "^1.0.0", + "path-is-absolute": "^1.0.0", + "rimraf": "^2.2.8" + }, + "devDependencies": { + "coveralls": "^2.11.2", + "istanbul": "^0.3.5", + "minimist": "^1.1.1", + "mocha": "^2.1.0", + "read-dir-files": "^0.1.1", + "secure-random": "^1.1.1", + "semver": "^4.3.6", + "standard": "^5.3.1" + }, + "main": "./lib/index", + "scripts": { + "coverage": "istanbul cover test.js", + "coveralls": "npm run coverage && coveralls < coverage/lcov.info", + "test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha", + "test": "standard && node test.js" + } +} diff --git a/node_modules/fs.realpath/LICENSE b/node_modules/fs.realpath/LICENSE new file mode 100644 index 0000000..5bd884c --- /dev/null +++ b/node_modules/fs.realpath/LICENSE @@ -0,0 +1,43 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs.realpath/README.md b/node_modules/fs.realpath/README.md new file mode 100644 index 0000000..a42ceac --- /dev/null +++ b/node_modules/fs.realpath/README.md @@ -0,0 +1,33 @@ +# fs.realpath + +A backwards-compatible fs.realpath for Node v6 and above + +In Node v6, the JavaScript implementation of fs.realpath was replaced +with a faster (but less resilient) native implementation. That raises +new and platform-specific errors and cannot handle long or excessively +symlink-looping paths. + +This module handles those cases by detecting the new errors and +falling back to the JavaScript implementation. On versions of Node +prior to v6, it has no effect. + +## USAGE + +```js +var rp = require('fs.realpath') + +// async version +rp.realpath(someLongAndLoopingPath, function (er, real) { + // the ELOOP was handled, but it was a bit slower +}) + +// sync version +var real = rp.realpathSync(someLongAndLoopingPath) + +// monkeypatch at your own risk! +// This replaces the fs.realpath/fs.realpathSync builtins +rp.monkeypatch() + +// un-do the monkeypatching +rp.unmonkeypatch() +``` diff --git a/node_modules/fs.realpath/index.js b/node_modules/fs.realpath/index.js new file mode 100644 index 0000000..b09c7c7 --- /dev/null +++ b/node_modules/fs.realpath/index.js @@ -0,0 +1,66 @@ +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = require('fs') +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = require('./old.js') + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} diff --git a/node_modules/fs.realpath/old.js b/node_modules/fs.realpath/old.js new file mode 100644 index 0000000..b40305e --- /dev/null +++ b/node_modules/fs.realpath/old.js @@ -0,0 +1,303 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = require('path'); +var isWindows = process.platform === 'win32'; +var fs = require('fs'); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; diff --git a/node_modules/fs.realpath/package.json b/node_modules/fs.realpath/package.json new file mode 100644 index 0000000..3edc57d --- /dev/null +++ b/node_modules/fs.realpath/package.json @@ -0,0 +1,26 @@ +{ + "name": "fs.realpath", + "version": "1.0.0", + "description": "Use node's fs.realpath, but fall back to the JS implementation if the native one fails", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "scripts": { + "test": "tap test/*.js --cov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/fs.realpath.git" + }, + "keywords": [ + "realpath", + "fs", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "old.js", + "index.js" + ] +} diff --git a/node_modules/fsevents/.npmignore b/node_modules/fsevents/.npmignore new file mode 100644 index 0000000..65e3ba2 --- /dev/null +++ b/node_modules/fsevents/.npmignore @@ -0,0 +1 @@ +test/ diff --git a/node_modules/fsevents/.travis.yml b/node_modules/fsevents/.travis.yml new file mode 100644 index 0000000..bb0760f --- /dev/null +++ b/node_modules/fsevents/.travis.yml @@ -0,0 +1,102 @@ +language: objective-c + +env: + global: + - secure: "gve1nkeKkwFEG1VAT3i+JwYyAdF0gKXwKx0uxbkBTsmm2M+0MDusohQdFLoEIkSIFktXBIDefoa7iGpLKRfG2VsZLpwJgnvnD0HqbnuR+k+W+bu7BHt4CAaR6GTllsDCjyq9zNyhUThzSnf2WNIpOEF5kHspZlbGfawURuUJH/U=" + - secure: "jqVpmWxxBVXu2X8+XJMpKH0cooc2EKz9xKL2znBfYdNafJORSXcFAVbjCX5mZmVDcgIMwDtm2+gIG4P73hzJ2e3S+y2Z9ROJGyXHa3AxUTvXHQsxqzH8coHHqB8vTvfr0t2O5aKfpvpICpSea39r0hzNoMv6Ie5SwBdqj1YY9K0=" + matrix: + - NODE_VERSION="v6" + - NODE_VERSION="v5" + - NODE_VERSION="v4" + - NODE_VERSION="v0.12" + - NODE_VERSION="v0.10" + - NODE_VERSION="iojs-v3" + - NODE_VERSION="iojs-v2" + - NODE_VERSION="iojs-v1" + +before_install: + + - echo $TRAVIS_OS_NAME + + # commit + # ------------------------ + # The commit message is used to determine the whether to manually + # invoke a binary publish + + - COMMIT_MESSAGE=$(git show -s --format=%B $TRAVIS_COMMIT | tr -d '\n') + + # node + # ------------------------ + + - export PATH=./node_modules/.bin/:$PATH + - rm -rf ~/.nvm && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm + - source ~/.nvm/nvm.sh + - nvm install $NODE_VERSION + - nvm use $NODE_VERSION + - npm install -g npm@latest + - node --version + - npm --version + - nvm --version + + # publish dependencies + # ------------------------ + + - npm install node-gyp -g + - npm install aws-sdk + +install: + + # in the first instance we build from source to create the initial binary + # which can then be used to create a package + + - npm install --build-from-source + - npm test + +before_script: + + # Detemine if a publish is required. + # + # a) we are building a tag + # b) we put [publish binary] in the commit message + + - PUBLISH_BINARY=false + + - if [[ $TRAVIS_BRANCH == `git describe --tags --always HEAD` ]]; then PUBLISH_BINARY=true; fi; # a + - if test "${COMMIT_MESSAGE#*'[publish binary]'}" != "$COMMIT_MESSAGE"; then PUBLISH_BINARY=true; fi; # b + + # package & publish + # ------------------------ + + - if [[ $PUBLISH_BINARY == true ]]; then node-pre-gyp package publish; fi; + + # clean-up + # ------------------------ + + - node-pre-gyp clean + - node-gyp clean + +script: + + # validate + # ------------------------ + # Post publishing a release verify that installing will pull down latest + # binary from remote host + + - INSTALL_RESULT=0 + - if [[ $PUBLISH_BINARY == true ]]; then INSTALL_RESULT=$(npm install --fallback-to-build=false > /dev/null)$? || true; fi; + + - node-pre-gyp clean + + # failure? + # ------------------------ + # if install returned non zero (errored) then we first unpublish and then + # call false so travis will bail at this line. + + - if [[ $INSTALL_RESULT != 0 ]]; then node-pre-gyp unpublish; fi; + - if [[ $INSTALL_RESULT != 0 ]]; then echo "returned $INSTALL_RESULT";false; fi; + +after_success: + + # display all published binaries + + - node-pre-gyp info diff --git a/node_modules/fsevents/LICENSE b/node_modules/fsevents/LICENSE new file mode 100644 index 0000000..4a49ad8 --- /dev/null +++ b/node_modules/fsevents/LICENSE @@ -0,0 +1,22 @@ +MIT License +----------- + +Copyright (C) 2010-2014 Philipp Dunkel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/Readme.md b/node_modules/fsevents/Readme.md new file mode 100644 index 0000000..03ddfa9 --- /dev/null +++ b/node_modules/fsevents/Readme.md @@ -0,0 +1,78 @@ +# fsevents [![NPM](https://nodei.co/npm/fsevents.png)](https://nodei.co/npm/fsevents/) + +Native access to OS X FSEvents in [Node.js](http://nodejs.org/) + +The FSEvents API in OS X allows applications to register for notifications of +changes to a given directory tree. It is a very fast and lightweight alternative +to kqueue. + +This is a low-level library. For a cross-compatible file watching module that +uses fsevents, check out [Chokidar](https://www.npmjs.com/package/chokidar). + +* [Module Site & GitHub](https://github.com/strongloop/fsevents) +* [NPM Page](https://npmjs.org/package/fsevents) + +## Installation + + $ npm install fsevents + +## Usage + +```js +var fsevents = require('fsevents'); +var watcher = fsevents(__dirname); +watcher.on('fsevent', function(path, flags, id) { }); // RAW Event as emitted by OS-X +watcher.on('change', function(path, info) { }); // Common Event for all changes +watcher.start() // To start observation +watcher.stop() // To end observation +``` + +### Events + + * *fsevent* - RAW Event as emitted by OS-X + * *change* - Common Event for all changes + * *created* - A File-System-Item has been created + * *deleted* - A File-System-Item has been deleted + * *modified* - A File-System-Item has been modified + * *moved-out* - A File-System-Item has been moved away from this location + * *moved-in* - A File-System-Item has been moved into this location + +All events except *fsevent* take an *info* object as the second parameter of the callback. The structure of this object is: + +```js +{ + "event": "", + "id": , + "path": "", + "type": "", + "changes": { + "inode": true, // Has the iNode Meta-Information changed + "finder": false, // Has the Finder Meta-Data changed + "access": false, // Have the access permissions changed + "xattrs": false // Have the xAttributes changed + }, + "flags": +} +``` + +## MIT License + +Copyright (C) 2010-2014 Philipp Dunkel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/binding.gyp b/node_modules/fsevents/binding.gyp new file mode 100644 index 0000000..06ca003 --- /dev/null +++ b/node_modules/fsevents/binding.gyp @@ -0,0 +1,29 @@ +{ + "targets": [ + { "target_name": "" } + ], + "conditions": [ + ['OS=="mac"', { + "targets": [{ + "target_name": "<(module_name)", + "sources": ["fsevents.cc"], + "xcode_settings": { + "OTHER_LDFLAGS": [ + "-framework CoreFoundation -framework CoreServices" + ] + }, + "include_dirs": [ + "> $(depfile) +# Add extra rules as in (2). +# We remove slashes and replace spaces with new lines; +# remove blank lines; +# delete the first line and append a colon to the remaining lines. +sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ + grep -v '^$$' |\ + sed -e 1d -e 's|$$|:|' \ + >> $(depfile) +rm $(depfile).raw +endef + +# Command definitions: +# - cmd_foo is the actual command to run; +# - quiet_cmd_foo is the brief-output summary of the command. + +quiet_cmd_cc = CC($(TOOLSET)) $@ +cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< + +quiet_cmd_cxx = CXX($(TOOLSET)) $@ +cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< + +quiet_cmd_objc = CXX($(TOOLSET)) $@ +cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< + +quiet_cmd_objcxx = CXX($(TOOLSET)) $@ +cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# Commands for precompiled header files. +quiet_cmd_pch_c = CXX($(TOOLSET)) $@ +cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ +cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_m = CXX($(TOOLSET)) $@ +cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< +quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ +cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# gyp-mac-tool is written next to the root Makefile by gyp. +# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd +# already. +quiet_cmd_mac_tool = MACTOOL $(4) $< +cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" + +quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ +cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) + +quiet_cmd_infoplist = INFOPLIST $@ +cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" + +quiet_cmd_touch = TOUCH $@ +cmd_touch = touch $@ + +quiet_cmd_copy = COPY $@ +# send stderr to /dev/null to ignore messages when linking directories. +cmd_copy = rm -rf "$@" && cp -af "$<" "$@" + +quiet_cmd_alink = LIBTOOL-STATIC $@ +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) + + +# Define an escape_quotes function to escape single quotes. +# This allows us to handle quotes properly as long as we always use +# use single quotes and escape_quotes. +escape_quotes = $(subst ','\'',$(1)) +# This comment is here just to include a ' to unconfuse syntax highlighting. +# Define an escape_vars function to escape '$' variable syntax. +# This allows us to read/write command lines with shell variables (e.g. +# $LD_LIBRARY_PATH), without triggering make substitution. +escape_vars = $(subst $$,$$$$,$(1)) +# Helper that expands to a shell command to echo a string exactly as it is in +# make. This uses printf instead of echo because printf's behaviour with respect +# to escape sequences is more portable than echo's across different shells +# (e.g., dash, bash). +exact_echo = printf '%s\n' '$(call escape_quotes,$(1))' + +# Helper to compare the command we're about to run against the command +# we logged the last time we ran the command. Produces an empty +# string (false) when the commands match. +# Tricky point: Make has no string-equality test function. +# The kernel uses the following, but it seems like it would have false +# positives, where one string reordered its arguments. +# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \ +# $(filter-out $(cmd_$@), $(cmd_$(1)))) +# We instead substitute each for the empty string into the other, and +# say they're equal if both substitutions produce the empty string. +# .d files contain ? instead of spaces, take that into account. +command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\ + $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) + +# Helper that is non-empty when a prerequisite changes. +# Normally make does this implicitly, but we force rules to always run +# so we can check their command lines. +# $? -- new prerequisites +# $| -- order-only dependencies +prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) + +# Helper that executes all postbuilds until one fails. +define do_postbuilds + @E=0;\ + for p in $(POSTBUILDS); do\ + eval $$p;\ + E=$$?;\ + if [ $$E -ne 0 ]; then\ + break;\ + fi;\ + done;\ + if [ $$E -ne 0 ]; then\ + rm -rf "$@";\ + exit $$E;\ + fi +endef + +# do_cmd: run a command via the above cmd_foo names, if necessary. +# Should always run for a given target to handle command-line changes. +# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. +# Third argument, if non-zero, makes it do POSTBUILDS processing. +# Note: We intentionally do NOT call dirx for depfile, since it contains ? for +# spaces already and dirx strips the ? characters. +define do_cmd +$(if $(or $(command_changed),$(prereq_changed)), + @$(call exact_echo, $($(quiet)cmd_$(1))) + @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" + $(if $(findstring flock,$(word 2,$(cmd_$1))), + @$(cmd_$(1)) + @echo " $(quiet_cmd_$(1)): Finished", + @$(cmd_$(1)) + ) + @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) + @$(if $(2),$(fixup_dep)) + $(if $(and $(3), $(POSTBUILDS)), + $(call do_postbuilds) + ) +) +endef + +# Declare the "all" target first so it is the default, +# even though we don't have the deps yet. +.PHONY: all +all: + +# make looks for ways to re-generate included makefiles, but in our case, we +# don't have a direct way. Explicitly telling make that it has nothing to do +# for them makes it go faster. +%.d: ; + +# Use FORCE_DO_CMD to force a target to run. Should be coupled with +# do_cmd. +.PHONY: FORCE_DO_CMD +FORCE_DO_CMD: + +TOOLSET := target +# Suffix rules, putting all outputs into $(obj). +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD + @$(call do_cmd,cc,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD + @$(call do_cmd,cc,1) +$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD + @$(call do_cmd,cc,1) + +# Try building from generated source, too. +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD + @$(call do_cmd,cc,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD + @$(call do_cmd,cc,1) +$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD + @$(call do_cmd,cc,1) + +$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD + @$(call do_cmd,cc,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD + @$(call do_cmd,cxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.m FORCE_DO_CMD + @$(call do_cmd,objc,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.mm FORCE_DO_CMD + @$(call do_cmd,objcxx,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD + @$(call do_cmd,cc,1) +$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD + @$(call do_cmd,cc,1) + + +ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ + $(findstring $(join ^,$(prefix)),\ + $(join ^,.target.mk)))),) + include .target.mk +endif +ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ + $(findstring $(join ^,$(prefix)),\ + $(join ^,action_after_build.target.mk)))),) + include action_after_build.target.mk +endif +ifeq ($(strip $(foreach prefix,$(NO_LOAD),\ + $(findstring $(join ^,$(prefix)),\ + $(join ^,fse.target.mk)))),) + include fse.target.mk +endif + +quiet_cmd_regen_makefile = ACTION Regenerating $@ +cmd_regen_makefile = cd $(srcdir); /Users/eshanker/.nvm/versions/node/v5.0.0/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/Users/eshanker/Code/fsevents/build/config.gypi -I/Users/eshanker/.nvm/versions/node/v5.0.0/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/Users/eshanker/.node-gyp/5.0.0/include/node/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/Users/eshanker/.node-gyp/5.0.0" "-Dnode_gyp_dir=/Users/eshanker/.nvm/versions/node/v5.0.0/lib/node_modules/npm/node_modules/node-gyp" "-Dnode_lib_file=node.lib" "-Dmodule_root_dir=/Users/eshanker/Code/fsevents" binding.gyp +Makefile: $(srcdir)/../../.nvm/versions/node/v5.0.0/lib/node_modules/npm/node_modules/node-gyp/addon.gypi $(srcdir)/../../.node-gyp/5.0.0/include/node/common.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp + $(call do_cmd,regen_makefile) + +# "all" is a concatenation of the "all" targets from all the included +# sub-makefiles. This is just here to clarify. +all: + +# Add in dependency-tracking rules. $(all_deps) is the list of every single +# target in our tree. Only consider the ones with .d (dependency) info: +d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) +ifneq ($(d_files),) + include $(d_files) +endif diff --git a/node_modules/fsevents/build/action_after_build.target.mk b/node_modules/fsevents/build/action_after_build.target.mk new file mode 100644 index 0000000..cf7a250 --- /dev/null +++ b/node_modules/fsevents/build/action_after_build.target.mk @@ -0,0 +1,32 @@ +# This file is generated by gyp; do not edit. + +TOOLSET := target +TARGET := action_after_build +### Generated for copy rule. +/Users/eshanker/Code/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node: TOOLSET := $(TOOLSET) +/Users/eshanker/Code/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node: $(builddir)/fse.node FORCE_DO_CMD + $(call do_cmd,copy) + +all_deps += /Users/eshanker/Code/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node +binding_gyp_action_after_build_target_copies = /Users/eshanker/Code/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node + +### Rules for final target. +# Build our special outputs first. +$(obj).target/action_after_build.stamp: | $(binding_gyp_action_after_build_target_copies) + +# Preserve order dependency of special output on deps. +$(binding_gyp_action_after_build_target_copies): | $(builddir)/fse.node + +$(obj).target/action_after_build.stamp: TOOLSET := $(TOOLSET) +$(obj).target/action_after_build.stamp: $(builddir)/fse.node FORCE_DO_CMD + $(call do_cmd,touch) + +all_deps += $(obj).target/action_after_build.stamp +# Add target alias +.PHONY: action_after_build +action_after_build: $(obj).target/action_after_build.stamp + +# Add target alias to "all" target. +.PHONY: all +all: action_after_build + diff --git a/node_modules/fsevents/build/binding.Makefile b/node_modules/fsevents/build/binding.Makefile new file mode 100644 index 0000000..f2ba1f7 --- /dev/null +++ b/node_modules/fsevents/build/binding.Makefile @@ -0,0 +1,6 @@ +# This file is generated by gyp; do not edit. + +export builddir_name ?= ./build/. +.PHONY: all +all: + $(MAKE) fse action_after_build diff --git a/node_modules/fsevents/build/fse.target.mk b/node_modules/fsevents/build/fse.target.mk new file mode 100644 index 0000000..b09ec0e --- /dev/null +++ b/node_modules/fsevents/build/fse.target.mk @@ -0,0 +1,169 @@ +# This file is generated by gyp; do not edit. + +TOOLSET := target +TARGET := fse +DEFS_Debug := \ + '-DNODE_GYP_MODULE_NAME=fse' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DBUILDING_NODE_EXTENSION' \ + '-DDEBUG' \ + '-D_DEBUG' + +# Flags passed to all source files. +CFLAGS_Debug := \ + -O0 \ + -gdwarf-2 \ + -mmacosx-version-min=10.5 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Debug := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Debug := \ + -std=gnu++0x \ + -fno-rtti \ + -fno-exceptions \ + -fno-threadsafe-statics \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Debug := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Debug := + +INCS_Debug := \ + -I/Users/eshanker/.node-gyp/5.0.0/include/node \ + -I/Users/eshanker/.node-gyp/5.0.0/src \ + -I/Users/eshanker/.node-gyp/5.0.0/deps/uv/include \ + -I/Users/eshanker/.node-gyp/5.0.0/deps/v8/include \ + -I$(srcdir)/node_modules/nan + +DEFS_Release := \ + '-DNODE_GYP_MODULE_NAME=fse' \ + '-D_DARWIN_USE_64_BIT_INODE=1' \ + '-D_LARGEFILE_SOURCE' \ + '-D_FILE_OFFSET_BITS=64' \ + '-DBUILDING_NODE_EXTENSION' + +# Flags passed to all source files. +CFLAGS_Release := \ + -Os \ + -gdwarf-2 \ + -mmacosx-version-min=10.5 \ + -arch x86_64 \ + -Wall \ + -Wendif-labels \ + -W \ + -Wno-unused-parameter + +# Flags passed to only C files. +CFLAGS_C_Release := \ + -fno-strict-aliasing + +# Flags passed to only C++ files. +CFLAGS_CC_Release := \ + -std=gnu++0x \ + -fno-rtti \ + -fno-exceptions \ + -fno-threadsafe-statics \ + -fno-strict-aliasing + +# Flags passed to only ObjC files. +CFLAGS_OBJC_Release := + +# Flags passed to only ObjC++ files. +CFLAGS_OBJCC_Release := + +INCS_Release := \ + -I/Users/eshanker/.node-gyp/5.0.0/include/node \ + -I/Users/eshanker/.node-gyp/5.0.0/src \ + -I/Users/eshanker/.node-gyp/5.0.0/deps/uv/include \ + -I/Users/eshanker/.node-gyp/5.0.0/deps/v8/include \ + -I$(srcdir)/node_modules/nan + +OBJS := \ + $(obj).target/$(TARGET)/fsevents.o + +# Add to the list of files we specially track dependencies for. +all_deps += $(OBJS) + +# CFLAGS et al overrides must be target-local. +# See "Target-specific Variable Values" in the GNU Make manual. +$(OBJS): TOOLSET := $(TOOLSET) +$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) +$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE)) +$(OBJS): GYP_OBJCXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE)) + +# Suffix rules, putting all outputs into $(obj). + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +# Try building from generated source, too. + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD + @$(call do_cmd,cxx,1) + +# End of this set of suffix rules +### Rules for final target. +LDFLAGS_Debug := \ + -framework CoreFoundation -framework CoreServices \ + -undefined dynamic_lookup \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.5 \ + -arch x86_64 \ + -L$(builddir) + +LIBTOOLFLAGS_Debug := \ + -framework CoreFoundation -framework CoreServices \ + -undefined dynamic_lookup \ + -Wl,-search_paths_first + +LDFLAGS_Release := \ + -framework CoreFoundation -framework CoreServices \ + -undefined dynamic_lookup \ + -Wl,-search_paths_first \ + -mmacosx-version-min=10.5 \ + -arch x86_64 \ + -L$(builddir) + +LIBTOOLFLAGS_Release := \ + -framework CoreFoundation -framework CoreServices \ + -undefined dynamic_lookup \ + -Wl,-search_paths_first + +LIBS := + +$(builddir)/fse.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE)) +$(builddir)/fse.node: LIBS := $(LIBS) +$(builddir)/fse.node: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE)) +$(builddir)/fse.node: TOOLSET := $(TOOLSET) +$(builddir)/fse.node: $(OBJS) FORCE_DO_CMD + $(call do_cmd,solink_module) + +all_deps += $(builddir)/fse.node +# Add target alias +.PHONY: fse +fse: $(builddir)/fse.node + +# Short alias for building this executable. +.PHONY: fse.node +fse.node: $(builddir)/fse.node + +# Add executable to "all" target. +.PHONY: all +all: $(builddir)/fse.node + diff --git a/node_modules/fsevents/build/gyp-mac-tool b/node_modules/fsevents/build/gyp-mac-tool new file mode 100755 index 0000000..976c598 --- /dev/null +++ b/node_modules/fsevents/build/gyp-mac-tool @@ -0,0 +1,612 @@ +#!/usr/bin/env python +# Generated by gyp. Do not edit. +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions to perform Xcode-style build steps. + +These functions are executed via gyp-mac-tool when using the Makefile generator. +""" + +import fcntl +import fnmatch +import glob +import json +import os +import plistlib +import re +import shutil +import string +import subprocess +import sys +import tempfile + + +def main(args): + executor = MacTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class MacTool(object): + """This class performs all the Mac tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace('-', '') + + def ExecCopyBundleResource(self, source, dest, convert_to_binary): + """Copies a resource file to the bundle/Resources directory, performing any + necessary compilation on each resource.""" + extension = os.path.splitext(source)[1].lower() + if os.path.isdir(source): + # Copy tree. + # TODO(thakis): This copies file attributes like mtime, while the + # single-file branch below doesn't. This should probably be changed to + # be consistent with the single-file branch. + if os.path.exists(dest): + shutil.rmtree(dest) + shutil.copytree(source, dest) + elif extension == '.xib': + return self._CopyXIBFile(source, dest) + elif extension == '.storyboard': + return self._CopyXIBFile(source, dest) + elif extension == '.strings': + self._CopyStringsFile(source, dest, convert_to_binary) + else: + shutil.copy(source, dest) + + def _CopyXIBFile(self, source, dest): + """Compiles a XIB file with ibtool into a binary plist in the bundle.""" + + # ibtool sometimes crashes with relative paths. See crbug.com/314728. + base = os.path.dirname(os.path.realpath(__file__)) + if os.path.relpath(source): + source = os.path.join(base, source) + if os.path.relpath(dest): + dest = os.path.join(base, dest) + + args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices', + '--output-format', 'human-readable-text', '--compile', dest, source] + ibtool_section_re = re.compile(r'/\*.*\*/') + ibtool_re = re.compile(r'.*note:.*is clipping its content') + ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) + current_section_header = None + for line in ibtoolout.stdout: + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + sys.stdout.write(current_section_header) + current_section_header = None + sys.stdout.write(line) + return ibtoolout.returncode + + def _ConvertToBinary(self, dest): + subprocess.check_call([ + 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest]) + + def _CopyStringsFile(self, source, dest, convert_to_binary): + """Copies a .strings file using iconv to reconvert the input into UTF-16.""" + input_code = self._DetectInputEncoding(source) or "UTF-8" + + # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call + # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints + # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing + # semicolon in dictionary. + # on invalid files. Do the same kind of validation. + import CoreFoundation + s = open(source, 'rb').read() + d = CoreFoundation.CFDataCreate(None, s, len(s)) + _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) + if error: + return + + fp = open(dest, 'wb') + fp.write(s.decode(input_code).encode('UTF-16')) + fp.close() + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _DetectInputEncoding(self, file_name): + """Reads the first few bytes from file_name and tries to guess the text + encoding. Returns None as a guess if it can't detect it.""" + fp = open(file_name, 'rb') + try: + header = fp.read(3) + except e: + fp.close() + return None + fp.close() + if header.startswith("\xFE\xFF"): + return "UTF-16" + elif header.startswith("\xFF\xFE"): + return "UTF-16" + elif header.startswith("\xEF\xBB\xBF"): + return "UTF-8" + else: + return None + + def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): + """Copies the |source| Info.plist to the destination directory |dest|.""" + # Read the source Info.plist into memory. + fd = open(source, 'r') + lines = fd.read() + fd.close() + + # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). + plist = plistlib.readPlistFromString(lines) + if keys: + plist = dict(plist.items() + json.loads(keys[0]).items()) + lines = plistlib.writePlistToString(plist) + + # Go through all the environment variables and replace them as variables in + # the file. + IDENT_RE = re.compile(r'[/\s]') + for key in os.environ: + if key.startswith('_'): + continue + evar = '${%s}' % key + evalue = os.environ[key] + lines = string.replace(lines, evar, evalue) + + # Xcode supports various suffices on environment variables, which are + # all undocumented. :rfc1034identifier is used in the standard project + # template these days, and :identifier was used earlier. They are used to + # convert non-url characters into things that look like valid urls -- + # except that the replacement character for :identifier, '_' isn't valid + # in a URL either -- oops, hence :rfc1034identifier was born. + evar = '${%s:identifier}' % key + evalue = IDENT_RE.sub('_', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + evar = '${%s:rfc1034identifier}' % key + evalue = IDENT_RE.sub('-', os.environ[key]) + lines = string.replace(lines, evar, evalue) + + # Remove any keys with values that haven't been replaced. + lines = lines.split('\n') + for i in range(len(lines)): + if lines[i].strip().startswith("${"): + lines[i] = None + lines[i - 1] = None + lines = '\n'.join(filter(lambda x: x is not None, lines)) + + # Write out the file with variables replaced. + fd = open(dest, 'w') + fd.write(lines) + fd.close() + + # Now write out PkgInfo file now that the Info.plist file has been + # "compiled". + self._WritePkgInfo(dest) + + if convert_to_binary == 'True': + self._ConvertToBinary(dest) + + def _WritePkgInfo(self, info_plist): + """This writes the PkgInfo file from the data stored in Info.plist.""" + plist = plistlib.readPlist(info_plist) + if not plist: + return + + # Only create PkgInfo for executable types. + package_type = plist['CFBundlePackageType'] + if package_type != 'APPL': + return + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get('CFBundleSignature', '????') + if len(signature_code) != 4: # Wrong length resets everything, too. + signature_code = '?' * 4 + + dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') + fp = open(dest, 'w') + fp.write('%s%s' % (package_type, signature_code)) + fp.close() + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) + fcntl.flock(fd, fcntl.LOCK_EX) + return subprocess.call(cmd_list) + + def ExecFilterLibtool(self, *cmd_list): + """Calls libtool and filters out '/path/to/libtool: file: foo.o has no + symbols'.""" + libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$') + libtool_re5 = re.compile( + r'^.*libtool: warning for library: ' + + r'.* the table of contents is empty ' + + r'\(no object file members in the library define global symbols\)$') + env = os.environ.copy() + # Ref: + # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c + # The problem with this flag is that it resets the file mtime on the file to + # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. + env['ZERO_AR_DATE'] = '1' + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + _, err = libtoolout.communicate() + for line in err.splitlines(): + if not libtool_re.match(line) and not libtool_re5.match(line): + print >>sys.stderr, line + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + if not libtoolout.returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'): + os.utime(cmd_list[i+1], None) + break + return libtoolout.returncode + + def ExecPackageFramework(self, framework, version): + """Takes a path to Something.framework and the Current version of that and + sets up all the symlinks.""" + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split('.')[0] + + CURRENT = 'Current' + RESOURCES = 'Resources' + VERSIONS = 'Versions' + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return + + # Move into the framework directory to set the symlinks correctly. + pwd = os.getcwd() + os.chdir(framework) + + # Set up the Current version. + self._Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # Back to where we were before! + os.chdir(pwd) + + def _Relink(self, dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + def ExecCompileXcassets(self, keys, *inputs): + """Compiles multiple .xcassets files into a single .car file. + + This invokes 'actool' to compile all the inputs .xcassets files. The + |keys| arguments is a json-encoded dictionary of extra arguments to + pass to 'actool' when the asset catalogs contains an application icon + or a launch image. + + Note that 'actool' does not create the Assets.car file if the asset + catalogs does not contains imageset. + """ + command_line = [ + 'xcrun', 'actool', '--output-format', 'human-readable-text', + '--compress-pngs', '--notices', '--warnings', '--errors', + ] + is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ + if is_iphone_target: + platform = os.environ['CONFIGURATION'].split('-')[-1] + if platform not in ('iphoneos', 'iphonesimulator'): + platform = 'iphonesimulator' + command_line.extend([ + '--platform', platform, '--target-device', 'iphone', + '--target-device', 'ipad', '--minimum-deployment-target', + os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile', + os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']), + ]) + else: + command_line.extend([ + '--platform', 'macosx', '--target-device', 'mac', + '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'], + '--compile', + os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']), + ]) + if keys: + keys = json.loads(keys) + for key, value in keys.iteritems(): + arg_name = '--' + key + if isinstance(value, bool): + if value: + command_line.append(arg_name) + elif isinstance(value, list): + for v in value: + command_line.append(arg_name) + command_line.append(str(v)) + else: + command_line.append(arg_name) + command_line.append(str(value)) + # Note: actool crashes if inputs path are relative, so use os.path.abspath + # to get absolute path name for inputs. + command_line.extend(map(os.path.abspath, inputs)) + subprocess.check_call(command_line) + + def ExecMergeInfoPlist(self, output, *inputs): + """Merge multiple .plist files into a single .plist file.""" + merged_plist = {} + for path in inputs: + plist = self._LoadPlistMaybeBinary(path) + self._MergePlist(merged_plist, plist) + plistlib.writePlist(merged_plist, output) + + def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning): + """Code sign a bundle. + + This function tries to code sign an iOS bundle, following the same + algorithm as Xcode: + 1. copy ResourceRules.plist from the user or the SDK into the bundle, + 2. pick the provisioning profile that best match the bundle identifier, + and copy it into the bundle as embedded.mobileprovision, + 3. copy Entitlements.plist from user or SDK next to the bundle, + 4. code sign the bundle. + """ + resource_rules_path = self._InstallResourceRules(resource_rules) + substitutions, overrides = self._InstallProvisioningProfile( + provisioning, self._GetCFBundleIdentifier()) + entitlements_path = self._InstallEntitlements( + entitlements, substitutions, overrides) + subprocess.check_call([ + 'codesign', '--force', '--sign', key, '--resource-rules', + resource_rules_path, '--entitlements', entitlements_path, + os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['FULL_PRODUCT_NAME'])]) + + def _InstallResourceRules(self, resource_rules): + """Installs ResourceRules.plist from user or SDK into the bundle. + + Args: + resource_rules: string, optional, path to the ResourceRules.plist file + to use, default to "${SDKROOT}/ResourceRules.plist" + + Returns: + Path to the copy of ResourceRules.plist into the bundle. + """ + source_path = resource_rules + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'ResourceRules.plist') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], 'ResourceRules.plist') + shutil.copy2(source_path, target_path) + return target_path + + def _InstallProvisioningProfile(self, profile, bundle_identifier): + """Installs embedded.mobileprovision into the bundle. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple containing two dictionary: variables substitutions and values + to overrides when generating the entitlements file. + """ + source_path, provisioning_data, team_id = self._FindProvisioningProfile( + profile, bundle_identifier) + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['CONTENTS_FOLDER_PATH'], + 'embedded.mobileprovision') + shutil.copy2(source_path, target_path) + substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.') + return substitutions, provisioning_data['Entitlements'] + + def _FindProvisioningProfile(self, profile, bundle_identifier): + """Finds the .mobileprovision file to use for signing the bundle. + + Checks all the installed provisioning profiles (or if the user specified + the PROVISIONING_PROFILE variable, only consult it) and select the most + specific that correspond to the bundle identifier. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple of the path to the selected provisioning profile, the data of + the embedded plist in the provisioning profile and the team identifier + to use for code signing. + + Raises: + SystemExit: if no .mobileprovision can be used to sign the bundle. + """ + profiles_dir = os.path.join( + os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') + if not os.path.isdir(profiles_dir): + print >>sys.stderr, ( + 'cannot find mobile provisioning for %s' % bundle_identifier) + sys.exit(1) + provisioning_profiles = None + if profile: + profile_path = os.path.join(profiles_dir, profile + '.mobileprovision') + if os.path.exists(profile_path): + provisioning_profiles = [profile_path] + if not provisioning_profiles: + provisioning_profiles = glob.glob( + os.path.join(profiles_dir, '*.mobileprovision')) + valid_provisioning_profiles = {} + for profile_path in provisioning_profiles: + profile_data = self._LoadProvisioningProfile(profile_path) + app_id_pattern = profile_data.get( + 'Entitlements', {}).get('application-identifier', '') + for team_identifier in profile_data.get('TeamIdentifier', []): + app_id = '%s.%s' % (team_identifier, bundle_identifier) + if fnmatch.fnmatch(app_id, app_id_pattern): + valid_provisioning_profiles[app_id_pattern] = ( + profile_path, profile_data, team_identifier) + if not valid_provisioning_profiles: + print >>sys.stderr, ( + 'cannot find mobile provisioning for %s' % bundle_identifier) + sys.exit(1) + # If the user has multiple provisioning profiles installed that can be + # used for ${bundle_identifier}, pick the most specific one (ie. the + # provisioning profile whose pattern is the longest). + selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) + return valid_provisioning_profiles[selected_key] + + def _LoadProvisioningProfile(self, profile_path): + """Extracts the plist embedded in a provisioning profile. + + Args: + profile_path: string, path to the .mobileprovision file + + Returns: + Content of the plist embedded in the provisioning profile as a dictionary. + """ + with tempfile.NamedTemporaryFile() as temp: + subprocess.check_call([ + 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name]) + return self._LoadPlistMaybeBinary(temp.name) + + def _MergePlist(self, merged_plist, plist): + """Merge |plist| into |merged_plist|.""" + for key, value in plist.iteritems(): + if isinstance(value, dict): + merged_value = merged_plist.get(key, {}) + if isinstance(merged_value, dict): + self._MergePlist(merged_value, value) + merged_plist[key] = merged_value + else: + merged_plist[key] = value + else: + merged_plist[key] = value + + def _LoadPlistMaybeBinary(self, plist_path): + """Loads into a memory a plist possibly encoded in binary format. + + This is a wrapper around plistlib.readPlist that tries to convert the + plist to the XML format if it can't be parsed (assuming that it is in + the binary format). + + Args: + plist_path: string, path to a plist file, in XML or binary format + + Returns: + Content of the plist as a dictionary. + """ + try: + # First, try to read the file using plistlib that only supports XML, + # and if an exception is raised, convert a temporary copy to XML and + # load that copy. + return plistlib.readPlist(plist_path) + except: + pass + with tempfile.NamedTemporaryFile() as temp: + shutil.copy2(plist_path, temp.name) + subprocess.check_call(['plutil', '-convert', 'xml1', temp.name]) + return plistlib.readPlist(temp.name) + + def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): + """Constructs a dictionary of variable substitutions for Entitlements.plist. + + Args: + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + app_identifier_prefix: string, value for AppIdentifierPrefix + + Returns: + Dictionary of substitutions to apply when generating Entitlements.plist. + """ + return { + 'CFBundleIdentifier': bundle_identifier, + 'AppIdentifierPrefix': app_identifier_prefix, + } + + def _GetCFBundleIdentifier(self): + """Extracts CFBundleIdentifier value from Info.plist in the bundle. + + Returns: + Value of CFBundleIdentifier in the Info.plist located in the bundle. + """ + info_plist_path = os.path.join( + os.environ['TARGET_BUILD_DIR'], + os.environ['INFOPLIST_PATH']) + info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) + return info_plist_data['CFBundleIdentifier'] + + def _InstallEntitlements(self, entitlements, substitutions, overrides): + """Generates and install the ${BundleName}.xcent entitlements file. + + Expands variables "$(variable)" pattern in the source entitlements file, + add extra entitlements defined in the .mobileprovision file and the copy + the generated plist to "${BundlePath}.xcent". + + Args: + entitlements: string, optional, path to the Entitlements.plist template + to use, defaults to "${SDKROOT}/Entitlements.plist" + substitutions: dictionary, variable substitutions + overrides: dictionary, values to add to the entitlements + + Returns: + Path to the generated entitlements file. + """ + source_path = entitlements + target_path = os.path.join( + os.environ['BUILT_PRODUCTS_DIR'], + os.environ['PRODUCT_NAME'] + '.xcent') + if not source_path: + source_path = os.path.join( + os.environ['SDKROOT'], + 'Entitlements.plist') + shutil.copy2(source_path, target_path) + data = self._LoadPlistMaybeBinary(target_path) + data = self._ExpandVariables(data, substitutions) + if overrides: + for key in overrides: + if key not in data: + data[key] = overrides[key] + plistlib.writePlist(data, target_path) + return target_path + + def _ExpandVariables(self, data, substitutions): + """Expands variables "$(variable)" in data. + + Args: + data: object, can be either string, list or dictionary + substitutions: dictionary, variable substitutions to perform + + Returns: + Copy of data where each references to "$(variable)" has been replaced + by the corresponding value found in substitutions, or left intact if + the key was not found. + """ + if isinstance(data, str): + for key, value in substitutions.iteritems(): + data = data.replace('$(%s)' % key, value) + return data + if isinstance(data, list): + return [self._ExpandVariables(v, substitutions) for v in data] + if isinstance(data, dict): + return dict((k, self._ExpandVariables(data[k], + substitutions)) for k in data) + return data + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/node_modules/fsevents/fsevents.cc b/node_modules/fsevents/fsevents.cc new file mode 100644 index 0000000..a347db1 --- /dev/null +++ b/node_modules/fsevents/fsevents.cc @@ -0,0 +1,102 @@ +/* +** © 2014 by Philipp Dunkel +** Licensed under MIT License. +*/ + +#include "nan.h" +#include "uv.h" +#include "v8.h" +#include "pthread.h" +#include "CoreFoundation/CoreFoundation.h" +#include "CoreServices/CoreServices.h" +#include +#include + +#include "src/storage.cc" +namespace fse { + class FSEvents : public node::ObjectWrap { + public: + FSEvents(const char *path, Nan::Callback *handler); + ~FSEvents(); + + // locking.cc + bool lockStarted; + pthread_mutex_t lockmutex; + void lockingStart(); + void lock(); + void unlock(); + void lockingStop(); + + // async.cc + uv_async_t async; + void asyncStart(); + void asyncTrigger(); + void asyncStop(); + + // thread.cc + pthread_t thread; + CFRunLoopRef threadloop; + void threadStart(); + static void *threadRun(void *ctx); + void threadStop(); + + // methods.cc - internal + Nan::Callback *handler; + void emitEvent(const char *path, UInt32 flags, UInt64 id); + + // Common + CFArrayRef paths; + std::vector events; + static void Initialize(v8::Handle exports); + + // methods.cc - exposed + static NAN_METHOD(New); + static NAN_METHOD(Stop); + static NAN_METHOD(Start); + + }; +} + +using namespace fse; + +FSEvents::FSEvents(const char *path, Nan::Callback *handler): handler(handler) { + CFStringRef dirs[] = { CFStringCreateWithCString(NULL, path, kCFStringEncodingUTF8) }; + paths = CFArrayCreate(NULL, (const void **)&dirs, 1, NULL); + threadloop = NULL; + lockingStart(); +} +FSEvents::~FSEvents() { + std::cout << "YIKES" << std::endl; + lockingStop(); + delete handler; + handler = NULL; + + CFRelease(paths); +} + +#ifndef kFSEventStreamEventFlagItemCreated +#define kFSEventStreamEventFlagItemCreated 0x00000010 +#endif + +#include "src/locking.cc" +#include "src/async.cc" +#include "src/thread.cc" +#include "src/constants.cc" +#include "src/methods.cc" + +void FSEvents::Initialize(v8::Handle exports) { + v8::Local tpl = Nan::New(FSEvents::New); + tpl->SetClassName(Nan::New("FSEvents").ToLocalChecked()); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + tpl->PrototypeTemplate()->Set( + Nan::New("start").ToLocalChecked(), + Nan::New(FSEvents::Start)); + tpl->PrototypeTemplate()->Set( + Nan::New("stop").ToLocalChecked(), + Nan::New(FSEvents::Stop)); + exports->Set(Nan::New("Constants").ToLocalChecked(), Constants()); + exports->Set(Nan::New("FSEvents").ToLocalChecked(), + tpl->GetFunction()); +} + +NODE_MODULE(fse, FSEvents::Initialize) diff --git a/node_modules/fsevents/fsevents.js b/node_modules/fsevents/fsevents.js new file mode 100644 index 0000000..4f36824 --- /dev/null +++ b/node_modules/fsevents/fsevents.js @@ -0,0 +1,105 @@ +/* + ** © 2014 by Philipp Dunkel + ** Licensed under MIT License. + */ + +/* jshint node:true */ +'use strict'; + +var path = require('path'); +var binary = require('node-pre-gyp'); +var Native = require(binary.find(path.join(__dirname, 'package.json'))); + +var EventEmitter = require('events').EventEmitter; +var fs = require('fs'); +var inherits = require('util').inherits; + +function FSEvents(path, handler) { + EventEmitter.call(this); + + Object.defineProperty(this, '_impl', { + value: new Native.FSEvents(String(path || ''), handler), + enumerable: false, + writable: false + }); +} + +inherits(FSEvents, EventEmitter); +proxies(FSEvents, Native.FSEvents); + +module.exports = watch; +module.exports.getInfo = getInfo; +module.exports.FSEvents = Native.FSEvents; +module.exports.Constants = Native.Constants; + +var defer = global.setImmediate || process.nextTick; + +function watch(path) { + var fse = new FSEvents(String(path || ''), handler); + EventEmitter.call(fse); + return fse; + + function handler(path, flags, id) { + defer(function() { + fse.emit('fsevent', path, flags, id); + var info = getInfo(path, flags); + info.id = id; + if (info.event === 'moved') { + fs.stat(info.path, function(err, stat) { + info.event = (err || !stat) ? 'moved-out' : 'moved-in'; + fse.emit('change', path, info); + fse.emit(info.event, path, info); + }); + } else { + fse.emit('change', path, info); + fse.emit(info.event, path, info); + } + }); + } +} + +function proxies(ctor, target) { + Object.keys(target.prototype).filter(function(key) { + return typeof target.prototype[key] === 'function'; + }).forEach(function(key) { + ctor.prototype[key] = function() { + this._impl[key].apply(this._impl, arguments); + return this; + } + }); +} + +function getFileType(flags) { + if (Native.Constants.kFSEventStreamEventFlagItemIsFile & flags) return 'file'; + if (Native.Constants.kFSEventStreamEventFlagItemIsDir & flags) return 'directory'; + if (Native.Constants.kFSEventStreamEventFlagItemIsSymlink & flags) return 'symlink'; +} + +function getEventType(flags) { + if (Native.Constants.kFSEventStreamEventFlagItemRemoved & flags) return 'deleted'; + if (Native.Constants.kFSEventStreamEventFlagItemRenamed & flags) return 'moved'; + if (Native.Constants.kFSEventStreamEventFlagItemCreated & flags) return 'created'; + if (Native.Constants.kFSEventStreamEventFlagItemModified & flags) return 'modified'; + if (Native.Constants.kFSEventStreamEventFlagRootChanged & flags) return 'root-changed'; + + return 'unknown'; +} + +function getFileChanges(flags) { + return { + inode: !! (Native.Constants.kFSEventStreamEventFlagItemInodeMetaMod & flags), + finder: !! (Native.Constants.kFSEventStreamEventFlagItemFinderInfoMod & flags), + access: !! (Native.Constants.kFSEventStreamEventFlagItemChangeOwner & flags), + xattrs: !! (Native.Constants.kFSEventStreamEventFlagItemXattrMod & flags) + }; +} + +function getInfo(path, flags) { + return { + path: path, + event: getEventType(flags), + type: getFileType(flags), + changes: getFileChanges(flags), + flags: flags + }; +} diff --git a/node_modules/fsevents/lib/binding/Release/node-v11-darwin-x64/fse.node b/node_modules/fsevents/lib/binding/Release/node-v11-darwin-x64/fse.node new file mode 100755 index 0000000..1d4e2d1 Binary files /dev/null and b/node_modules/fsevents/lib/binding/Release/node-v11-darwin-x64/fse.node differ diff --git a/node_modules/fsevents/lib/binding/Release/node-v46-darwin-x64/fse.node b/node_modules/fsevents/lib/binding/Release/node-v46-darwin-x64/fse.node new file mode 100755 index 0000000..ca473e6 Binary files /dev/null and b/node_modules/fsevents/lib/binding/Release/node-v46-darwin-x64/fse.node differ diff --git a/node_modules/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node b/node_modules/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node new file mode 100755 index 0000000..393572a Binary files /dev/null and b/node_modules/fsevents/lib/binding/Release/node-v47-darwin-x64/fse.node differ diff --git a/node_modules/fsevents/lib/binding/Release/node-v48-darwin-x64/fse.node b/node_modules/fsevents/lib/binding/Release/node-v48-darwin-x64/fse.node new file mode 100755 index 0000000..25e58f2 Binary files /dev/null and b/node_modules/fsevents/lib/binding/Release/node-v48-darwin-x64/fse.node differ diff --git a/node_modules/fsevents/node_modules/.bin/node-pre-gyp b/node_modules/fsevents/node_modules/.bin/node-pre-gyp new file mode 120000 index 0000000..47a90a5 --- /dev/null +++ b/node_modules/fsevents/node_modules/.bin/node-pre-gyp @@ -0,0 +1 @@ +../node-pre-gyp/bin/node-pre-gyp \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/abbrev/LICENSE b/node_modules/fsevents/node_modules/abbrev/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/abbrev/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/abbrev/README.md b/node_modules/fsevents/node_modules/abbrev/README.md new file mode 100644 index 0000000..99746fe --- /dev/null +++ b/node_modules/fsevents/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/fsevents/node_modules/abbrev/abbrev.js b/node_modules/fsevents/node_modules/abbrev/abbrev.js new file mode 100644 index 0000000..69cfeac --- /dev/null +++ b/node_modules/fsevents/node_modules/abbrev/abbrev.js @@ -0,0 +1,62 @@ + +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/fsevents/node_modules/abbrev/package.json b/node_modules/fsevents/node_modules/abbrev/package.json new file mode 100644 index 0000000..7e08838 --- /dev/null +++ b/node_modules/fsevents/node_modules/abbrev/package.json @@ -0,0 +1,82 @@ +{ + "_args": [ + [ + "abbrev@1", + "/Users/eshanker/Code/fsevents/node_modules/nopt" + ] + ], + "_from": "abbrev@>=1.0.0 <2.0.0", + "_id": "abbrev@1.0.9", + "_inCache": true, + "_installable": true, + "_location": "/abbrev", + "_nodeVersion": "4.4.4", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/abbrev-1.0.9.tgz_1466016055839_0.7825860097073019" + }, + "_npmUser": { + "email": "i@izs.me", + "name": "isaacs" + }, + "_npmVersion": "3.9.1", + "_phantomChildren": {}, + "_requested": { + "name": "abbrev", + "raw": "abbrev@1", + "rawSpec": "1", + "scope": null, + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/nopt", + "/tap/nopt" + ], + "_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz", + "_shasum": "91b4792588a7738c25f35dd6f63752a2f8776135", + "_shrinkwrap": null, + "_spec": "abbrev@1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/nopt", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter" + }, + "bugs": { + "url": "https://github.com/isaacs/abbrev-js/issues" + }, + "dependencies": {}, + "description": "Like ruby's abbrev module, but in js", + "devDependencies": { + "tap": "^5.7.2" + }, + "directories": {}, + "dist": { + "shasum": "91b4792588a7738c25f35dd6f63752a2f8776135", + "tarball": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz" + }, + "files": [ + "abbrev.js" + ], + "gitHead": "c386cd9dbb1d8d7581718c54d4ba944cc9298d6f", + "homepage": "https://github.com/isaacs/abbrev-js#readme", + "license": "ISC", + "main": "abbrev.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "name": "abbrev", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/isaacs/abbrev-js.git" + }, + "scripts": { + "test": "tap test.js --cov" + }, + "version": "1.0.9" +} diff --git a/node_modules/fsevents/node_modules/ansi-regex/index.js b/node_modules/fsevents/node_modules/ansi-regex/index.js new file mode 100644 index 0000000..4906755 --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-regex/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = function () { + return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; +}; diff --git a/node_modules/fsevents/node_modules/ansi-regex/license b/node_modules/fsevents/node_modules/ansi-regex/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-regex/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/ansi-regex/package.json b/node_modules/fsevents/node_modules/ansi-regex/package.json new file mode 100644 index 0000000..19c404f --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-regex/package.json @@ -0,0 +1,113 @@ +{ + "_args": [ + [ + "ansi-regex@^2.0.0", + "/Users/eshanker/Code/fsevents/node_modules/strip-ansi" + ] + ], + "_from": "ansi-regex@>=2.0.0 <3.0.0", + "_id": "ansi-regex@2.0.0", + "_inCache": true, + "_installable": true, + "_location": "/ansi-regex", + "_nodeVersion": "0.12.5", + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "2.11.2", + "_phantomChildren": {}, + "_requested": { + "name": "ansi-regex", + "raw": "ansi-regex@^2.0.0", + "rawSpec": "^2.0.0", + "scope": null, + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/has-ansi", + "/strip-ansi" + ], + "_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz", + "_shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107", + "_shrinkwrap": null, + "_spec": "ansi-regex@^2.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/strip-ansi", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/ansi-regex/issues" + }, + "dependencies": {}, + "description": "Regular expression for matching ANSI escape codes", + "devDependencies": { + "mocha": "*" + }, + "directories": {}, + "dist": { + "shasum": "c5061b6e0ef8a81775e50f5d66151bf6bf371107", + "tarball": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.0.0.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "57c3f2941a73079fa8b081e02a522e3d29913e2f", + "homepage": "https://github.com/sindresorhus/ansi-regex", + "keywords": [ + "256", + "ansi", + "cli", + "color", + "colors", + "colour", + "command-line", + "console", + "escape", + "find", + "formatting", + "match", + "pattern", + "re", + "regex", + "regexp", + "rgb", + "shell", + "string", + "styles", + "terminal", + "test", + "text", + "tty", + "xterm" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + }, + { + "name": "jbnicolai", + "email": "jappelman@xebia.com" + } + ], + "name": "ansi-regex", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/ansi-regex.git" + }, + "scripts": { + "test": "mocha test/test.js", + "view-supported": "node test/viewCodes.js" + }, + "version": "2.0.0" +} diff --git a/node_modules/fsevents/node_modules/ansi-regex/readme.md b/node_modules/fsevents/node_modules/ansi-regex/readme.md new file mode 100644 index 0000000..1a4894e --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-regex/readme.md @@ -0,0 +1,31 @@ +# ansi-regex [![Build Status](https://travis-ci.org/sindresorhus/ansi-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ansi-regex) + +> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save ansi-regex +``` + + +## Usage + +```js +var ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001b[4mcake\u001b[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001b[4mcake\u001b[0m'.match(ansiRegex()); +//=> ['\u001b[4m', '\u001b[0m'] +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/ansi-styles/index.js b/node_modules/fsevents/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..7894527 --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-styles/index.js @@ -0,0 +1,65 @@ +'use strict'; + +function assembleStyles () { + var styles = { + modifiers: { + reset: [0, 0], + bold: [1, 22], // 21 isn't widely supported and 22 does the same thing + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + colors: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39] + }, + bgColors: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49] + } + }; + + // fix humans + styles.colors.grey = styles.colors.gray; + + Object.keys(styles).forEach(function (groupName) { + var group = styles[groupName]; + + Object.keys(group).forEach(function (styleName) { + var style = group[styleName]; + + styles[styleName] = group[styleName] = { + open: '\u001b[' + style[0] + 'm', + close: '\u001b[' + style[1] + 'm' + }; + }); + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + }); + + return styles; +} + +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/fsevents/node_modules/ansi-styles/license b/node_modules/fsevents/node_modules/ansi-styles/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-styles/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/ansi-styles/package.json b/node_modules/fsevents/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..3920ede --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-styles/package.json @@ -0,0 +1,106 @@ +{ + "_args": [ + [ + "ansi-styles@^2.2.1", + "/Users/eshanker/Code/fsevents/node_modules/chalk" + ] + ], + "_from": "ansi-styles@>=2.2.1 <3.0.0", + "_id": "ansi-styles@2.2.1", + "_inCache": true, + "_installable": true, + "_location": "/ansi-styles", + "_nodeVersion": "4.3.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/ansi-styles-2.2.1.tgz_1459197317833_0.9694824463222176" + }, + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "3.8.3", + "_phantomChildren": {}, + "_requested": { + "name": "ansi-styles", + "raw": "ansi-styles@^2.2.1", + "rawSpec": "^2.2.1", + "scope": null, + "spec": ">=2.2.1 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/chalk" + ], + "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "_shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe", + "_shrinkwrap": null, + "_spec": "ansi-styles@^2.2.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/chalk", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/chalk/ansi-styles/issues" + }, + "dependencies": {}, + "description": "ANSI escape codes for styling strings in the terminal", + "devDependencies": { + "mocha": "*" + }, + "directories": {}, + "dist": { + "shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe", + "tarball": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "95c59b23be760108b6530ca1c89477c21b258032", + "homepage": "https://github.com/chalk/ansi-styles#readme", + "keywords": [ + "256", + "ansi", + "cli", + "color", + "colors", + "colour", + "command-line", + "console", + "escape", + "formatting", + "log", + "logging", + "rgb", + "shell", + "string", + "styles", + "terminal", + "text", + "tty", + "xterm" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + } + ], + "name": "ansi-styles", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/ansi-styles.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "2.2.1" +} diff --git a/node_modules/fsevents/node_modules/ansi-styles/readme.md b/node_modules/fsevents/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..3f933f6 --- /dev/null +++ b/node_modules/fsevents/node_modules/ansi-styles/readme.md @@ -0,0 +1,86 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + +![](screenshot.png) + + +## Install + +``` +$ npm install --save ansi-styles +``` + + +## Usage + +```js +var ansi = require('ansi-styles'); + +console.log(ansi.green.open + 'Hello world!' + ansi.green.close); +``` + + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` + + +## Advanced usage + +By default you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `ansi.modifiers` +- `ansi.colors` +- `ansi.bgColors` + + +###### Example + +```js +console.log(ansi.colors.green.open); +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/aproba/LICENSE b/node_modules/fsevents/node_modules/aproba/LICENSE new file mode 100644 index 0000000..f4be44d --- /dev/null +++ b/node_modules/fsevents/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/fsevents/node_modules/aproba/README.md b/node_modules/fsevents/node_modules/aproba/README.md new file mode 100644 index 0000000..8246a9c --- /dev/null +++ b/node_modules/fsevents/node_modules/aproba/README.md @@ -0,0 +1,59 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +type | description +---- | ----------- +* | matches any type +A | Array.isArray OR an arguments object +S | typeof == string +N | typeof == number +F | typeof == function +O | typeof == object and not type A and not type E +B | typeof == boolean +E | instanceof Error OR null + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an error argument is found and is not null then the remaining arguments +will not be validated. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/fsevents/node_modules/aproba/index.js b/node_modules/fsevents/node_modules/aproba/index.js new file mode 100644 index 0000000..bb5ac3c --- /dev/null +++ b/node_modules/fsevents/node_modules/aproba/index.js @@ -0,0 +1,62 @@ +'use strict' + +function isArguments (thingy) { + return typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +var types = { + '*': ['any', function () { return true }], + A: ['array', function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }], + S: ['string', function (thingy) { return typeof thingy === 'string' }], + N: ['number', function (thingy) { return typeof thingy === 'number' }], + F: ['function', function (thingy) { return typeof thingy === 'function' }], + O: ['object', function (thingy) { return typeof thingy === 'object' && !types.A[1](thingy) && !types.E[1](thingy) }], + B: ['boolean', function (thingy) { return typeof thingy === 'boolean' }], + E: ['error', function (thingy) { return thingy instanceof Error }] +} + +var validate = module.exports = function (schema, args) { + if (!schema) throw missingRequiredArg(0, 'schema') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S[1](schema)) throw invalidType(0, 'string', schema) + if (!types.A[1](args)) throw invalidType(1, 'array', args) + for (var ii = 0; ii < schema.length; ++ii) { + var type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + var typeLabel = types[type][0] + var typeCheck = types[type][1] + if (type === 'E' && args[ii] == null) continue + if (args[ii] == null) throw missingRequiredArg(ii) + if (!typeCheck(args[ii])) throw invalidType(ii, typeLabel, args[ii]) + if (type === 'E') return + } + if (schema.length < args.length) throw tooManyArgs(schema.length, args.length) +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedType, value) { + var valueType + Object.keys(types).forEach(function (typeCode) { + if (types[typeCode][1](value)) valueType = types[typeCode][0] + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + expectedType + ' but got ' + valueType) +} + +function tooManyArgs (expected, got) { + return newException('ETOOMANYARGS', 'Too many arguments, expected ' + expected + ' and got ' + got) +} + +function newException (code, msg) { + var e = new Error(msg) + e.code = code + Error.captureStackTrace(e, validate) + return e +} diff --git a/node_modules/fsevents/node_modules/aproba/package.json b/node_modules/fsevents/node_modules/aproba/package.json new file mode 100644 index 0000000..f0cfc9d --- /dev/null +++ b/node_modules/fsevents/node_modules/aproba/package.json @@ -0,0 +1,88 @@ +{ + "_args": [ + [ + "aproba@^1.0.3", + "/Users/eshanker/Code/fsevents/node_modules/gauge" + ] + ], + "_from": "aproba@>=1.0.3 <2.0.0", + "_id": "aproba@1.0.4", + "_inCache": true, + "_installable": true, + "_location": "/aproba", + "_nodeVersion": "4.4.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/aproba-1.0.4.tgz_1466718885402_0.5348939662799239" + }, + "_npmUser": { + "email": "me@re-becca.org", + "name": "iarna" + }, + "_npmVersion": "3.10.2", + "_phantomChildren": {}, + "_requested": { + "name": "aproba", + "raw": "aproba@^1.0.3", + "rawSpec": "^1.0.3", + "scope": null, + "spec": ">=1.0.3 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/gauge" + ], + "_resolved": "https://registry.npmjs.org/aproba/-/aproba-1.0.4.tgz", + "_shasum": "2713680775e7614c8ba186c065d4e2e52d1072c0", + "_shrinkwrap": null, + "_spec": "aproba@^1.0.3", + "_where": "/Users/eshanker/Code/fsevents/node_modules/gauge", + "author": { + "email": "me@re-becca.org", + "name": "Rebecca Turner" + }, + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "dependencies": {}, + "description": "A rediculously light-weight argument validator", + "devDependencies": { + "standard": "^7.1.2", + "tap": "^5.7.3" + }, + "directories": { + "test": "test" + }, + "dist": { + "shasum": "2713680775e7614c8ba186c065d4e2e52d1072c0", + "tarball": "https://registry.npmjs.org/aproba/-/aproba-1.0.4.tgz" + }, + "files": [ + "index.js" + ], + "gitHead": "c6c8f82d519b9ec3816f20f23a9101083c022200", + "homepage": "https://github.com/iarna/aproba", + "keywords": [ + "argument", + "validate" + ], + "license": "ISC", + "main": "index.js", + "maintainers": [ + { + "name": "iarna", + "email": "me@re-becca.org" + } + ], + "name": "aproba", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/aproba.git" + }, + "scripts": { + "test": "standard && tap test/*.js" + }, + "version": "1.0.4" +} diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/.npmignore b/node_modules/fsevents/node_modules/are-we-there-yet/.npmignore new file mode 100644 index 0000000..bc81897 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/.npmignore @@ -0,0 +1,5 @@ +*~ +.#* +node_modules +coverage +.nyc_output diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/CHANGES.md b/node_modules/fsevents/node_modules/are-we-there-yet/CHANGES.md new file mode 100644 index 0000000..e990b86 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/CHANGES.md @@ -0,0 +1,19 @@ +Hi, figured we could actually use a changelog now: + +## 1.1.1 2016-01-29 + +* Fix a typo in stream completion tracker + +## 1.1.0 2016-01-29 + +* Rewrote completion percent computation to be low impact– no more walking a + tree of completion groups every time we need this info. Previously, with + medium sized tree of completion groups, even a relatively modest number of + calls to the top level `completed()` method would result in absurd numbers + of calls overall as it walked down the tree. We now, instead, keep track as + we bubble up changes, so the computation is limited to when data changes and + to the depth of that one branch, instead of _every_ node. (Plus, we were already + incurring _this_ cost, since we already bubbled out changes.) +* Moved different tracker types out to their own files. +* Made tests test for TOO MANY events too. +* Standarized the source code formatting diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/LICENSE b/node_modules/fsevents/node_modules/are-we-there-yet/LICENSE new file mode 100644 index 0000000..af45880 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/LICENSE @@ -0,0 +1,5 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/README.md b/node_modules/fsevents/node_modules/are-we-there-yet/README.md new file mode 100644 index 0000000..c41d392 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/README.md @@ -0,0 +1,194 @@ +are-we-there-yet +---------------- + +Track complex hiearchies of asynchronous task completion statuses. This is +intended to give you a way of recording and reporting the progress of the big +recursive fan-out and gather type workflows that are so common in async. + +What you do with this completion data is up to you, but the most common use case is to +feed it to one of the many progress bar modules. + +Most progress bar modules include a rudamentary version of this, but my +needs were more complex. + +Usage +===== + +```javascript +var TrackerGroup = require("are-we-there-yet").TrackerGroup + +var top = new TrackerGroup("program") + +var single = top.newItem("one thing", 100) +single.completeWork(20) + +console.log(top.completed()) // 0.2 + +fs.stat("file", function(er, stat) { + if (er) throw er + var stream = top.newStream("file", stat.size) + console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete + // and 50% * 20% == 10% + fs.createReadStream("file").pipe(stream).on("data", function (chunk) { + // do stuff with chunk + }) + top.on("change", function (name) { + // called each time a chunk is read from "file" + // top.completed() will start at 0.1 and fill up to 0.6 as the file is read + }) +}) +``` + +Shared Methods +============== + +All tracker objects described below have the following methods, they, along +with the event comprise the interface for consumers of tracker objects. + +* var completed = tracker.completed() + +Returns the ratio of completed work to work to be done. Range of 0 to 1. + +* tracker.finish() + +Marks the tracker as completed. With a TrackerGroup this marks all of its +components as completed. + +Marks all of the components of this tracker as finished, which in turn means +that `tracker.completed()` for this will now be 1. + +This will result in one or more `change` events being emitted. + +Events +====== + +All tracker objects emit `change` events with the following arguments: + +``` +function (name, completed, tracker) +``` + +`name` is the name of the tracker that originally emitted the event, +or if it didn't have one, the first containing tracker group that had one. + +`completed` is the percent complete (as returned by `tracker.completed()` method). + +`tracker` is the tracker object that you are listening for events on. + +TrackerGroup +============ + +* var tracker = new TrackerGroup(**name**) + + * **name** *(optional)* - The name of this tracker group, used in change + notifications if the component updating didn't have a name. Defaults to undefined. + +Creates a new empty tracker aggregation group. These are trackers whose +completion status is determined by the completion status of other trackers. + +* tracker.addUnit(**otherTracker**, **weight**) + + * **otherTracker** - Any of the other are-we-there-yet tracker objects + * **weight** *(optional)* - The weight to give the tracker, defaults to 1. + +Adds the **otherTracker** to this aggregation group. The weight determines +how long you expect this tracker to take to complete in proportion to other +units. So for instance, if you add one tracker with a weight of 1 and +another with a weight of 2, you're saying the second will take twice as long +to complete as the first. As such, the first will account for 33% of the +completion of this tracker and the second will account for the other 67%. + +Returns **otherTracker**. + +* var subGroup = tracker.newGroup(**name**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subGroup = tracker.addUnit(new TrackerGroup(name), weight) +``` + +* var subItem = tracker.newItem(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subItem = tracker.addUnit(new Tracker(name, todo), weight) +``` + +* var subStream = tracker.newStream(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subStream = tracker.addUnit(new TrackerStream(name, todo), weight) +``` + +* console.log( tracker.debug() ) + +Returns a tree showing the completion of this tracker group and all of its +children, including recursively entering all of the children. + +Tracker +======= + +* var tracker = new Tracker(**name**, **todo**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **todo** *(optional)* The amount of work todo (a number). Defaults to 0. + +Ordinarily these are constructed as a part of a tracker group (via +`newItem`). + +* var completed = tracker.completed() + +Returns the ratio of completed work to work to be done. Range of 0 to 1. If +total work to be done is 0 then it will return 0. + +* tracker.addWork(**todo**) + + * **todo** A number to add to the amount of work to be done. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. + +* tracker.completeWork(**completed**) + + * **completed** A number to add to the work complete + +Increase the amount of work complete, thus increasing the completion percentage. +Will never increase the work completed past the amount of work todo. That is, +percentages > 100% are not allowed. Triggers a `change` event. + +* tracker.finish() + +Marks this tracker as finished, tracker.completed() will now be 1. Triggers +a `change` event. + +TrackerStream +============= + +* var tracker = new TrackerStream(**name**, **size**, **options**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **size** *(optional)* The number of bytes being sent through this stream. + * **options** *(optional)* A hash of stream options + +The tracker stream object is a pass through stream that updates an internal +tracker object each time a block passes through. It's intended to track +downloads, file extraction and other related activities. You use it by piping +your data source into it and then using it as your data source. + +If your data has a length attribute then that's used as the amount of work +completed when the chunk is passed through. If it does not (eg, object +streams) then each chunk counts as completing 1 unit of work, so your size +should be the total number of objects being streamed. + +* tracker.addWork(**todo**) + + * **todo** Increase the expected overall size by **todo** bytes. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/index.js b/node_modules/fsevents/node_modules/are-we-there-yet/index.js new file mode 100644 index 0000000..57d8743 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/index.js @@ -0,0 +1,4 @@ +'use strict' +exports.TrackerGroup = require('./tracker-group.js') +exports.Tracker = require('./tracker.js') +exports.TrackerStream = require('./tracker-stream.js') diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/package.json b/node_modules/fsevents/node_modules/are-we-there-yet/package.json new file mode 100644 index 0000000..420716e --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/package.json @@ -0,0 +1,82 @@ +{ + "_args": [ + [ + "are-we-there-yet@~1.1.2", + "/Users/eshanker/Code/fsevents/node_modules/npmlog" + ] + ], + "_from": "are-we-there-yet@>=1.1.2 <1.2.0", + "_id": "are-we-there-yet@1.1.2", + "_inCache": true, + "_installable": true, + "_location": "/are-we-there-yet", + "_nodeVersion": "4.2.2", + "_npmOperationalInternal": { + "host": "packages-13-west.internal.npmjs.com", + "tmp": "tmp/are-we-there-yet-1.1.2.tgz_1458084397358_0.15847722673788667" + }, + "_npmUser": { + "email": "me@re-becca.org", + "name": "iarna" + }, + "_npmVersion": "3.8.1", + "_phantomChildren": {}, + "_requested": { + "name": "are-we-there-yet", + "raw": "are-we-there-yet@~1.1.2", + "rawSpec": "~1.1.2", + "scope": null, + "spec": ">=1.1.2 <1.2.0", + "type": "range" + }, + "_requiredBy": [ + "/npmlog" + ], + "_resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.2.tgz", + "_shasum": "80e470e95a084794fe1899262c5667c6e88de1b3", + "_shrinkwrap": null, + "_spec": "are-we-there-yet@~1.1.2", + "_where": "/Users/eshanker/Code/fsevents/node_modules/npmlog", + "author": { + "name": "Rebecca Turner", + "url": "http://re-becca.org" + }, + "bugs": { + "url": "https://github.com/iarna/are-we-there-yet/issues" + }, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.0 || ^1.1.13" + }, + "description": "Keep track of the overall completion of many dispirate processes", + "devDependencies": { + "standard": "^6.0.8", + "tap": "^5.7.0" + }, + "directories": {}, + "dist": { + "shasum": "80e470e95a084794fe1899262c5667c6e88de1b3", + "tarball": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.2.tgz" + }, + "gitHead": "dd5706e2204cb681e78031d0ffe156ed4cc75823", + "homepage": "https://github.com/iarna/are-we-there-yet", + "license": "ISC", + "main": "index.js", + "maintainers": [ + { + "name": "iarna", + "email": "me@re-becca.org" + } + ], + "name": "are-we-there-yet", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/are-we-there-yet.git" + }, + "scripts": { + "test": "standard && tap test/*.js" + }, + "version": "1.1.2" +} diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/test/lib/test-event.js b/node_modules/fsevents/node_modules/are-we-there-yet/test/lib/test-event.js new file mode 100644 index 0000000..2aa7c05 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/test/lib/test-event.js @@ -0,0 +1,29 @@ +'use strict' +var util = require('util') + +module.exports = function (obj, event, next) { + var timeout = setTimeout(gotTimeout, 10) + obj.once(event, gotResult) + + function gotTimeout () { + obj.removeListener(event, gotResult) + next(new Error('Timeout listening for ' + event)) + } + var result = [] + function gotResult () { + result = Array.prototype.slice.call(arguments) + clearTimeout(timeout) + timeout = setTimeout(gotNoMoreResults, 10) + obj.once(event, gotTooManyResults) + } + function gotNoMoreResults () { + obj.removeListener(event, gotTooManyResults) + var args = [null].concat(result) + next.apply(null, args) + } + function gotTooManyResults () { + var secondResult = Array.prototype.slice.call(arguments) + clearTimeout(timeout) + next(new Error('Got too many results, first ' + util.inspect(result) + ' and then ' + util.inspect(secondResult))) + } +} diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/test/tracker.js b/node_modules/fsevents/node_modules/are-we-there-yet/test/tracker.js new file mode 100644 index 0000000..374c773 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/test/tracker.js @@ -0,0 +1,57 @@ +'use strict' +var test = require('tap').test +var Tracker = require('../index.js').Tracker + +var testEvent = require('./lib/test-event.js') + +var name = 'test' + +test('initialization', function (t) { + var simple = new Tracker(name) + + t.is(simple.completed(), 0, 'Nothing todo is 0 completion') + t.done() +}) + +var track +var todo = 100 +test('completion', function (t) { + track = new Tracker(name, todo) + t.is(track.completed(), 0, 'Nothing done is 0 completion') + + testEvent(track, 'change', afterCompleteWork) + + track.completeWork(todo) + t.is(track.completed(), 1, 'completeWork: 100% completed') + + function afterCompleteWork (er, onChangeName) { + t.is(er, null, 'completeWork: on change event fired') + t.is(onChangeName, name, 'completeWork: on change emits the correct name') + t.done() + } +}) + +test('add more work', function (t) { + testEvent(track, 'change', afterAddWork) + track.addWork(todo) + t.is(track.completed(), 0.5, 'addWork: 50% completed') + function afterAddWork (er, onChangeName) { + t.is(er, null, 'addWork: on change event fired') + t.is(onChangeName, name, 'addWork: on change emits the correct name') + t.done() + } +}) + +test('complete more work', function (t) { + track.completeWork(200) + t.is(track.completed(), 1, 'completeWork: Over completion is still only 100% complete') + t.done() +}) + +test('finish is always 100%', function (t) { + var finishtest = new Tracker(name, todo) + finishtest.completeWork(50) + finishtest.finish() + t.is(finishtest.completed(), 1, 'finish: Explicitly finishing moves to 100%') + t.done() +}) diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/test/trackergroup.js b/node_modules/fsevents/node_modules/are-we-there-yet/test/trackergroup.js new file mode 100644 index 0000000..799a767 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/test/trackergroup.js @@ -0,0 +1,96 @@ +'use strict' +var test = require('tap').test +var TrackerGroup = require('../index.js').TrackerGroup +var testEvent = require('./lib/test-event.js') + +test('TrackerGroup', function (t) { + var name = 'test' + + var track = new TrackerGroup(name) + t.is(track.completed(), 0, 'Nothing todo is 0 completion') + testEvent(track, 'change', afterFinishEmpty) + track.finish() + var a, b + function afterFinishEmpty (er, onChangeName, completion) { + t.is(er, null, 'finishEmpty: on change event fired') + t.is(onChangeName, name, 'finishEmpty: on change emits the correct name') + t.is(completion, 1, 'finishEmpty: passed through completion was correct') + t.is(track.completed(), 1, 'finishEmpty: Finishing an empty group actually finishes it') + + track = new TrackerGroup(name) + a = track.newItem('a', 10, 1) + b = track.newItem('b', 10, 1) + t.is(track.completed(), 0, 'Initially empty') + testEvent(track, 'change', afterCompleteWork) + a.completeWork(5) + } + function afterCompleteWork (er, onChangeName, completion) { + t.is(er, null, 'on change event fired') + t.is(onChangeName, 'a', 'on change emits the correct name') + t.is(completion, 0.25, 'Complete half of one is a quarter overall') + t.is(track.completed(), 0.25, 'Complete half of one is a quarter overall') + testEvent(track, 'change', afterFinishAll) + track.finish() + } + function afterFinishAll (er, onChangeName, completion) { + t.is(er, null, 'finishAll: on change event fired') + t.is(onChangeName, name, 'finishAll: on change emits the correct name') + t.is(completion, 1, 'Finishing everything ') + t.is(track.completed(), 1, 'Finishing everything ') + + track = new TrackerGroup(name) + a = track.newItem('a', 10, 2) + b = track.newItem('b', 10, 1) + t.is(track.completed(), 0, 'weighted: Initially empty') + testEvent(track, 'change', afterWeightedCompleteWork) + a.completeWork(5) + } + function afterWeightedCompleteWork (er, onChangeName, completion) { + t.is(er, null, 'weighted: on change event fired') + t.is(onChangeName, 'a', 'weighted: on change emits the correct name') + t.is(Math.floor(completion * 100), 33, 'weighted: Complete half of double weighted') + t.is(Math.floor(track.completed() * 100), 33, 'weighted: Complete half of double weighted') + testEvent(track, 'change', afterWeightedFinishAll) + track.finish() + } + function afterWeightedFinishAll (er, onChangeName, completion) { + t.is(er, null, 'weightedFinishAll: on change event fired') + t.is(onChangeName, name, 'weightedFinishAll: on change emits the correct name') + t.is(completion, 1, 'weightedFinishaAll: Finishing everything ') + t.is(track.completed(), 1, 'weightedFinishaAll: Finishing everything ') + + track = new TrackerGroup(name) + a = track.newGroup('a', 10) + b = track.newGroup('b', 10) + var a1 = a.newItem('a.1', 10) + a1.completeWork(5) + t.is(track.completed(), 0.25, 'nested: Initially quarter done') + testEvent(track, 'change', afterNestedComplete) + b.finish() + } + function afterNestedComplete (er, onChangeName, completion) { + t.is(er, null, 'nestedComplete: on change event fired') + t.is(onChangeName, 'b', 'nestedComplete: on change emits the correct name') + t.is(completion, 0.75, 'nestedComplete: Finishing everything ') + t.is(track.completed(), 0.75, 'nestedComplete: Finishing everything ') + t.end() + } +}) + +test('cycles', function (t) { + var track = new TrackerGroup('top') + testCycle(track, track) + var layer1 = track.newGroup('layer1') + testCycle(layer1, track) + t.end() + + function testCycle (addTo, toAdd) { + try { + addTo.addUnit(toAdd) + t.fail(toAdd.name) + } catch (ex) { + console.log(ex) + t.pass(toAdd.name) + } + } +}) diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/test/trackerstream.js b/node_modules/fsevents/node_modules/are-we-there-yet/test/trackerstream.js new file mode 100644 index 0000000..65f04b0 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/test/trackerstream.js @@ -0,0 +1,51 @@ +'use strict' +var test = require('tap').test +var util = require('util') +var stream = require('readable-stream') +var TrackerStream = require('../index.js').TrackerStream +var testEvent = require('./lib/test-event.js') + +var Sink = function () { + stream.Writable.apply(this, arguments) +} +util.inherits(Sink, stream.Writable) +Sink.prototype._write = function (data, encoding, cb) { + cb() +} + +test('TrackerStream', function (t) { + t.plan(9) + + var name = 'test' + var track = new TrackerStream(name) + + t.is(track.completed(), 0, 'Nothing todo is 0 completion') + + var todo = 10 + track = new TrackerStream(name, todo) + t.is(track.completed(), 0, 'Nothing done is 0 completion') + + track.pipe(new Sink()) + + testEvent(track, 'change', afterCompleteWork) + track.write('0123456789') + function afterCompleteWork (er, onChangeName) { + t.is(er, null, 'write: on change event fired') + t.is(onChangeName, name, 'write: on change emits the correct name') + t.is(track.completed(), 1, 'write: 100% completed') + + testEvent(track, 'change', afterAddWork) + track.addWork(10) + } + function afterAddWork (er, onChangeName) { + t.is(er, null, 'addWork: on change event fired') + t.is(track.completed(), 0.5, 'addWork: 50% completed') + + testEvent(track, 'change', afterAllWork) + track.write('ABCDEFGHIJKLMNOPQRST') + } + function afterAllWork (er) { + t.is(er, null, 'allWork: on change event fired') + t.is(track.completed(), 1, 'allWork: 100% completed') + } +}) diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/tracker-base.js b/node_modules/fsevents/node_modules/are-we-there-yet/tracker-base.js new file mode 100644 index 0000000..6f43687 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/tracker-base.js @@ -0,0 +1,11 @@ +'use strict' +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/tracker-group.js b/node_modules/fsevents/node_modules/are-we-there-yet/tracker-group.js new file mode 100644 index 0000000..9759e12 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/tracker-group.js @@ -0,0 +1,107 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') +var Tracker = require('./tracker.js') +var TrackerStream = require('./tracker-stream.js') + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) return + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit) + return unit +} + +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) return 0 + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} + +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) this.addUnit(new Tracker(), 1, true) + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output +} diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/tracker-stream.js b/node_modules/fsevents/node_modules/are-we-there-yet/tracker-stream.js new file mode 100644 index 0000000..fb9598e --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/tracker-stream.js @@ -0,0 +1,35 @@ +'use strict' +var util = require('util') +var stream = require('readable-stream') +var delegate = require('delegates') +var Tracker = require('./tracker.js') + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) + +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} + +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} + +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} + +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') diff --git a/node_modules/fsevents/node_modules/are-we-there-yet/tracker.js b/node_modules/fsevents/node_modules/are-we-there-yet/tracker.js new file mode 100644 index 0000000..68c2339 --- /dev/null +++ b/node_modules/fsevents/node_modules/are-we-there-yet/tracker.js @@ -0,0 +1,30 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') + +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) + +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} + +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) this.workDone = this.workTodo + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} diff --git a/node_modules/fsevents/node_modules/asn1/.npmignore b/node_modules/fsevents/node_modules/asn1/.npmignore new file mode 100644 index 0000000..eb03e3e --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/.npmignore @@ -0,0 +1,2 @@ +node_modules +*.log diff --git a/node_modules/fsevents/node_modules/asn1/.travis.yml b/node_modules/fsevents/node_modules/asn1/.travis.yml new file mode 100644 index 0000000..09d3ef3 --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.8 + - 0.10 diff --git a/node_modules/fsevents/node_modules/asn1/LICENSE b/node_modules/fsevents/node_modules/asn1/LICENSE new file mode 100644 index 0000000..9b5dcdb --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2011 Mark Cavage, All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/fsevents/node_modules/asn1/README.md b/node_modules/fsevents/node_modules/asn1/README.md new file mode 100644 index 0000000..7cebf7a --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/README.md @@ -0,0 +1,50 @@ +node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS. +Currently BER encoding is supported; at some point I'll likely have to do DER. + +## Usage + +Mostly, if you're *actually* needing to read and write ASN.1, you probably don't +need this readme to explain what and why. If you have no idea what ASN.1 is, +see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +The source is pretty much self-explanatory, and has read/write methods for the +common types out there. + +### Decoding + +The following reads an ASN.1 sequence with a boolean. + + var Ber = require('asn1').Ber; + + var reader = new Ber.Reader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff])); + + reader.readSequence(); + console.log('Sequence len: ' + reader.length); + if (reader.peek() === Ber.Boolean) + console.log(reader.readBoolean()); + +### Encoding + +The following generates the same payload as above. + + var Ber = require('asn1').Ber; + + var writer = new Ber.Writer(); + + writer.startSequence(); + writer.writeBoolean(true); + writer.endSequence(); + + console.log(writer.buffer); + +## Installation + + npm install asn1 + +## License + +MIT. + +## Bugs + +See . diff --git a/node_modules/fsevents/node_modules/asn1/lib/ber/errors.js b/node_modules/fsevents/node_modules/asn1/lib/ber/errors.js new file mode 100644 index 0000000..ff21d4f --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/lib/ber/errors.js @@ -0,0 +1,13 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + + newInvalidAsn1Error: function(msg) { + var e = new Error(); + e.name = 'InvalidAsn1Error'; + e.message = msg || ''; + return e; + } + +}; diff --git a/node_modules/fsevents/node_modules/asn1/lib/ber/index.js b/node_modules/fsevents/node_modules/asn1/lib/ber/index.js new file mode 100644 index 0000000..4fb90ae --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/lib/ber/index.js @@ -0,0 +1,27 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var errors = require('./errors'); +var types = require('./types'); + +var Reader = require('./reader'); +var Writer = require('./writer'); + + +///--- Exports + +module.exports = { + + Reader: Reader, + + Writer: Writer + +}; + +for (var t in types) { + if (types.hasOwnProperty(t)) + module.exports[t] = types[t]; +} +for (var e in errors) { + if (errors.hasOwnProperty(e)) + module.exports[e] = errors[e]; +} diff --git a/node_modules/fsevents/node_modules/asn1/lib/ber/reader.js b/node_modules/fsevents/node_modules/asn1/lib/ber/reader.js new file mode 100644 index 0000000..0a00e98 --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/lib/ber/reader.js @@ -0,0 +1,261 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); + +var ASN1 = require('./types'); +var errors = require('./errors'); + + +///--- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + + + +///--- API + +function Reader(data) { + if (!data || !Buffer.isBuffer(data)) + throw new TypeError('data must be a node Buffer'); + + this._buf = data; + this._size = data.length; + + // These hold the "current" state + this._len = 0; + this._offset = 0; +} + +Object.defineProperty(Reader.prototype, 'length', { + enumerable: true, + get: function () { return (this._len); } +}); + +Object.defineProperty(Reader.prototype, 'offset', { + enumerable: true, + get: function () { return (this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'remain', { + get: function () { return (this._size - this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'buffer', { + get: function () { return (this._buf.slice(this._offset)); } +}); + + +/** + * Reads a single byte and advances offset; you can pass in `true` to make this + * a "peek" operation (i.e., get the byte, but don't advance the offset). + * + * @param {Boolean} peek true means don't move offset. + * @return {Number} the next byte, null if not enough data. + */ +Reader.prototype.readByte = function(peek) { + if (this._size - this._offset < 1) + return null; + + var b = this._buf[this._offset] & 0xff; + + if (!peek) + this._offset += 1; + + return b; +}; + + +Reader.prototype.peek = function() { + return this.readByte(true); +}; + + +/** + * Reads a (potentially) variable length off the BER buffer. This call is + * not really meant to be called directly, as callers have to manipulate + * the internal buffer afterwards. + * + * As a result of this call, you can call `Reader.length`, until the + * next thing called that does a readLength. + * + * @return {Number} the amount of offset to advance the buffer. + * @throws {InvalidAsn1Error} on bad ASN.1 + */ +Reader.prototype.readLength = function(offset) { + if (offset === undefined) + offset = this._offset; + + if (offset >= this._size) + return null; + + var lenB = this._buf[offset++] & 0xff; + if (lenB === null) + return null; + + if ((lenB & 0x80) == 0x80) { + lenB &= 0x7f; + + if (lenB == 0) + throw newInvalidAsn1Error('Indefinite length not supported'); + + if (lenB > 4) + throw newInvalidAsn1Error('encoding too long'); + + if (this._size - offset < lenB) + return null; + + this._len = 0; + for (var i = 0; i < lenB; i++) + this._len = (this._len << 8) + (this._buf[offset++] & 0xff); + + } else { + // Wasn't a variable length + this._len = lenB; + } + + return offset; +}; + + +/** + * Parses the next sequence in this BER buffer. + * + * To get the length of the sequence, call `Reader.length`. + * + * @return {Number} the sequence's tag. + */ +Reader.prototype.readSequence = function(tag) { + var seq = this.peek(); + if (seq === null) + return null; + if (tag !== undefined && tag !== seq) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + seq.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + this._offset = o; + return seq; +}; + + +Reader.prototype.readInt = function() { + return this._readTag(ASN1.Integer); +}; + + +Reader.prototype.readBoolean = function() { + return (this._readTag(ASN1.Boolean) === 0 ? false : true); +}; + + +Reader.prototype.readEnumeration = function() { + return this._readTag(ASN1.Enumeration); +}; + + +Reader.prototype.readString = function(tag, retbuf) { + if (!tag) + tag = ASN1.OctetString; + + var b = this.peek(); + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + + if (o === null) + return null; + + if (this.length > this._size - o) + return null; + + this._offset = o; + + if (this.length === 0) + return retbuf ? new Buffer(0) : ''; + + var str = this._buf.slice(this._offset, this._offset + this.length); + this._offset += this.length; + + return retbuf ? str : str.toString('utf8'); +}; + +Reader.prototype.readOID = function(tag) { + if (!tag) + tag = ASN1.OID; + + var b = this.readString(tag, true); + if (b === null) + return null; + + var values = []; + var value = 0; + + for (var i = 0; i < b.length; i++) { + var byte = b[i] & 0xff; + + value <<= 7; + value += byte & 0x7f; + if ((byte & 0x80) == 0) { + values.push(value); + value = 0; + } + } + + value = values.shift(); + values.unshift(value % 40); + values.unshift((value / 40) >> 0); + + return values.join('.'); +}; + + +Reader.prototype._readTag = function(tag) { + assert.ok(tag !== undefined); + + var b = this.peek(); + + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + if (this.length > 4) + throw newInvalidAsn1Error('Integer too long: ' + this.length); + + if (this.length > this._size - o) + return null; + this._offset = o; + + var fb = this._buf[this._offset]; + var value = 0; + + for (var i = 0; i < this.length; i++) { + value <<= 8; + value |= (this._buf[this._offset++] & 0xff); + } + + if ((fb & 0x80) == 0x80 && i !== 4) + value -= (1 << (i * 8)); + + return value >> 0; +}; + + + +///--- Exported API + +module.exports = Reader; diff --git a/node_modules/fsevents/node_modules/asn1/lib/ber/types.js b/node_modules/fsevents/node_modules/asn1/lib/ber/types.js new file mode 100644 index 0000000..8aea000 --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/lib/ber/types.js @@ -0,0 +1,36 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + EOC: 0, + Boolean: 1, + Integer: 2, + BitString: 3, + OctetString: 4, + Null: 5, + OID: 6, + ObjectDescriptor: 7, + External: 8, + Real: 9, // float + Enumeration: 10, + PDV: 11, + Utf8String: 12, + RelativeOID: 13, + Sequence: 16, + Set: 17, + NumericString: 18, + PrintableString: 19, + T61String: 20, + VideotexString: 21, + IA5String: 22, + UTCTime: 23, + GeneralizedTime: 24, + GraphicString: 25, + VisibleString: 26, + GeneralString: 28, + UniversalString: 29, + CharacterString: 30, + BMPString: 31, + Constructor: 32, + Context: 128 +}; diff --git a/node_modules/fsevents/node_modules/asn1/lib/ber/writer.js b/node_modules/fsevents/node_modules/asn1/lib/ber/writer.js new file mode 100644 index 0000000..d9d99af --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/lib/ber/writer.js @@ -0,0 +1,316 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); +var ASN1 = require('./types'); +var errors = require('./errors'); + + +///--- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + +var DEFAULT_OPTS = { + size: 1024, + growthFactor: 8 +}; + + +///--- Helpers + +function merge(from, to) { + assert.ok(from); + assert.equal(typeof(from), 'object'); + assert.ok(to); + assert.equal(typeof(to), 'object'); + + var keys = Object.getOwnPropertyNames(from); + keys.forEach(function(key) { + if (to[key]) + return; + + var value = Object.getOwnPropertyDescriptor(from, key); + Object.defineProperty(to, key, value); + }); + + return to; +} + + + +///--- API + +function Writer(options) { + options = merge(DEFAULT_OPTS, options || {}); + + this._buf = new Buffer(options.size || 1024); + this._size = this._buf.length; + this._offset = 0; + this._options = options; + + // A list of offsets in the buffer where we need to insert + // sequence tag/len pairs. + this._seq = []; +} + +Object.defineProperty(Writer.prototype, 'buffer', { + get: function () { + if (this._seq.length) + throw new InvalidAsn1Error(this._seq.length + ' unended sequence(s)'); + + return (this._buf.slice(0, this._offset)); + } +}); + +Writer.prototype.writeByte = function(b) { + if (typeof(b) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(1); + this._buf[this._offset++] = b; +}; + + +Writer.prototype.writeInt = function(i, tag) { + if (typeof(i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof(tag) !== 'number') + tag = ASN1.Integer; + + var sz = 4; + + while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && + (sz > 1)) { + sz--; + i <<= 8; + } + + if (sz > 4) + throw new InvalidAsn1Error('BER ints cannot be > 0xffffffff'); + + this._ensure(2 + sz); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = sz; + + while (sz-- > 0) { + this._buf[this._offset++] = ((i & 0xff000000) >>> 24); + i <<= 8; + } + +}; + + +Writer.prototype.writeNull = function() { + this.writeByte(ASN1.Null); + this.writeByte(0x00); +}; + + +Writer.prototype.writeEnumeration = function(i, tag) { + if (typeof(i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof(tag) !== 'number') + tag = ASN1.Enumeration; + + return this.writeInt(i, tag); +}; + + +Writer.prototype.writeBoolean = function(b, tag) { + if (typeof(b) !== 'boolean') + throw new TypeError('argument must be a Boolean'); + if (typeof(tag) !== 'number') + tag = ASN1.Boolean; + + this._ensure(3); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = 0x01; + this._buf[this._offset++] = b ? 0xff : 0x00; +}; + + +Writer.prototype.writeString = function(s, tag) { + if (typeof(s) !== 'string') + throw new TypeError('argument must be a string (was: ' + typeof(s) + ')'); + if (typeof(tag) !== 'number') + tag = ASN1.OctetString; + + var len = Buffer.byteLength(s); + this.writeByte(tag); + this.writeLength(len); + if (len) { + this._ensure(len); + this._buf.write(s, this._offset); + this._offset += len; + } +}; + + +Writer.prototype.writeBuffer = function(buf, tag) { + if (typeof(tag) !== 'number') + throw new TypeError('tag must be a number'); + if (!Buffer.isBuffer(buf)) + throw new TypeError('argument must be a buffer'); + + this.writeByte(tag); + this.writeLength(buf.length); + this._ensure(buf.length); + buf.copy(this._buf, this._offset, 0, buf.length); + this._offset += buf.length; +}; + + +Writer.prototype.writeStringArray = function(strings) { + if ((!strings instanceof Array)) + throw new TypeError('argument must be an Array[String]'); + + var self = this; + strings.forEach(function(s) { + self.writeString(s); + }); +}; + +// This is really to solve DER cases, but whatever for now +Writer.prototype.writeOID = function(s, tag) { + if (typeof(s) !== 'string') + throw new TypeError('argument must be a string'); + if (typeof(tag) !== 'number') + tag = ASN1.OID; + + if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) + throw new Error('argument is not a valid OID string'); + + function encodeOctet(bytes, octet) { + if (octet < 128) { + bytes.push(octet); + } else if (octet < 16384) { + bytes.push((octet >>> 7) | 0x80); + bytes.push(octet & 0x7F); + } else if (octet < 2097152) { + bytes.push((octet >>> 14) | 0x80); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else if (octet < 268435456) { + bytes.push((octet >>> 21) | 0x80); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else { + bytes.push(((octet >>> 28) | 0x80) & 0xFF); + bytes.push(((octet >>> 21) | 0x80) & 0xFF); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } + } + + var tmp = s.split('.'); + var bytes = []; + bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); + tmp.slice(2).forEach(function(b) { + encodeOctet(bytes, parseInt(b, 10)); + }); + + var self = this; + this._ensure(2 + bytes.length); + this.writeByte(tag); + this.writeLength(bytes.length); + bytes.forEach(function(b) { + self.writeByte(b); + }); +}; + + +Writer.prototype.writeLength = function(len) { + if (typeof(len) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(4); + + if (len <= 0x7f) { + this._buf[this._offset++] = len; + } else if (len <= 0xff) { + this._buf[this._offset++] = 0x81; + this._buf[this._offset++] = len; + } else if (len <= 0xffff) { + this._buf[this._offset++] = 0x82; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else if (len <= 0xffffff) { + this._buf[this._offset++] = 0x83; + this._buf[this._offset++] = len >> 16; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else { + throw new InvalidAsn1ERror('Length too long (> 4 bytes)'); + } +}; + +Writer.prototype.startSequence = function(tag) { + if (typeof(tag) !== 'number') + tag = ASN1.Sequence | ASN1.Constructor; + + this.writeByte(tag); + this._seq.push(this._offset); + this._ensure(3); + this._offset += 3; +}; + + +Writer.prototype.endSequence = function() { + var seq = this._seq.pop(); + var start = seq + 3; + var len = this._offset - start; + + if (len <= 0x7f) { + this._shift(start, len, -2); + this._buf[seq] = len; + } else if (len <= 0xff) { + this._shift(start, len, -1); + this._buf[seq] = 0x81; + this._buf[seq + 1] = len; + } else if (len <= 0xffff) { + this._buf[seq] = 0x82; + this._buf[seq + 1] = len >> 8; + this._buf[seq + 2] = len; + } else if (len <= 0xffffff) { + this._shift(start, len, 1); + this._buf[seq] = 0x83; + this._buf[seq + 1] = len >> 16; + this._buf[seq + 2] = len >> 8; + this._buf[seq + 3] = len; + } else { + throw new InvalidAsn1Error('Sequence too long'); + } +}; + + +Writer.prototype._shift = function(start, len, shift) { + assert.ok(start !== undefined); + assert.ok(len !== undefined); + assert.ok(shift); + + this._buf.copy(this._buf, start + shift, start, start + len); + this._offset += shift; +}; + +Writer.prototype._ensure = function(len) { + assert.ok(len); + + if (this._size - this._offset < len) { + var sz = this._size * this._options.growthFactor; + if (sz - this._offset < len) + sz += len; + + var buf = new Buffer(sz); + + this._buf.copy(buf, 0, 0, this._offset); + this._buf = buf; + this._size = sz; + } +}; + + + +///--- Exported API + +module.exports = Writer; diff --git a/node_modules/fsevents/node_modules/asn1/lib/index.js b/node_modules/fsevents/node_modules/asn1/lib/index.js new file mode 100644 index 0000000..d1766e7 --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/lib/index.js @@ -0,0 +1,20 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +// If you have no idea what ASN.1 or BER is, see this: +// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +var Ber = require('./ber/index'); + + + +///--- Exported API + +module.exports = { + + Ber: Ber, + + BerReader: Ber.Reader, + + BerWriter: Ber.Writer + +}; diff --git a/node_modules/fsevents/node_modules/asn1/package.json b/node_modules/fsevents/node_modules/asn1/package.json new file mode 100644 index 0000000..d842a54 --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/package.json @@ -0,0 +1,90 @@ +{ + "_args": [ + [ + "asn1@~0.2.3", + "/Users/eshanker/Code/fsevents/node_modules/sshpk" + ] + ], + "_from": "asn1@>=0.2.3 <0.3.0", + "_id": "asn1@0.2.3", + "_inCache": true, + "_installable": true, + "_location": "/asn1", + "_npmUser": { + "email": "patrick.f.mooney@gmail.com", + "name": "pfmooney" + }, + "_npmVersion": "1.4.28", + "_phantomChildren": {}, + "_requested": { + "name": "asn1", + "raw": "asn1@~0.2.3", + "rawSpec": "~0.2.3", + "scope": null, + "spec": ">=0.2.3 <0.3.0", + "type": "range" + }, + "_requiredBy": [ + "/sshpk" + ], + "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "_shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86", + "_shrinkwrap": null, + "_spec": "asn1@~0.2.3", + "_where": "/Users/eshanker/Code/fsevents/node_modules/sshpk", + "author": { + "email": "mcavage@gmail.com", + "name": "Mark Cavage" + }, + "bugs": { + "url": "https://github.com/mcavage/node-asn1/issues" + }, + "contributors": [ + { + "name": "David Gwynne", + "email": "loki@animata.net" + }, + { + "name": "Yunong Xiao", + "email": "yunong@joyent.com" + }, + { + "name": "Alex Wilson", + "email": "alex.wilson@joyent.com" + } + ], + "dependencies": {}, + "description": "Contains parsers and serializers for ASN.1 (currently BER only)", + "devDependencies": { + "tap": "0.4.8" + }, + "directories": {}, + "dist": { + "shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86", + "tarball": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz" + }, + "homepage": "https://github.com/mcavage/node-asn1", + "license": "MIT", + "main": "lib/index.js", + "maintainers": [ + { + "name": "mcavage", + "email": "mcavage@gmail.com" + }, + { + "name": "pfmooney", + "email": "patrick.f.mooney@gmail.com" + } + ], + "name": "asn1", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/mcavage/node-asn1.git" + }, + "scripts": { + "test": "tap ./tst" + }, + "version": "0.2.3" +} diff --git a/node_modules/fsevents/node_modules/asn1/tst/ber/reader.test.js b/node_modules/fsevents/node_modules/asn1/tst/ber/reader.test.js new file mode 100644 index 0000000..062fd7e --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/tst/ber/reader.test.js @@ -0,0 +1,208 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var test = require('tap').test; + + + +///--- Globals + +var BerReader; + + + +///--- Tests + +test('load library', function(t) { + BerReader = require('../../lib/index').BerReader; + t.ok(BerReader); + try { + new BerReader(); + t.fail('Should have thrown'); + } catch (e) { + t.ok(e instanceof TypeError, 'Should have been a type error'); + } + t.end(); +}); + + +test('read byte', function(t) { + var reader = new BerReader(new Buffer([0xde])); + t.ok(reader); + t.equal(reader.readByte(), 0xde, 'wrong value'); + t.end(); +}); + + +test('read 1 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x01, 0x03])); + t.ok(reader); + t.equal(reader.readInt(), 0x03, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read 2 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x02, 0x7e, 0xde])); + t.ok(reader); + t.equal(reader.readInt(), 0x7ede, 'wrong value'); + t.equal(reader.length, 0x02, 'wrong length'); + t.end(); +}); + + +test('read 3 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x03, 0x7e, 0xde, 0x03])); + t.ok(reader); + t.equal(reader.readInt(), 0x7ede03, 'wrong value'); + t.equal(reader.length, 0x03, 'wrong length'); + t.end(); +}); + + +test('read 4 byte int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x04, 0x7e, 0xde, 0x03, 0x01])); + t.ok(reader); + t.equal(reader.readInt(), 0x7ede0301, 'wrong value'); + t.equal(reader.length, 0x04, 'wrong length'); + t.end(); +}); + + +test('read 1 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x01, 0xdc])); + t.ok(reader); + t.equal(reader.readInt(), -36, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read 2 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x02, 0xc0, 0x4e])); + t.ok(reader); + t.equal(reader.readInt(), -16306, 'wrong value'); + t.equal(reader.length, 0x02, 'wrong length'); + t.end(); +}); + + +test('read 3 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x03, 0xff, 0x00, 0x19])); + t.ok(reader); + t.equal(reader.readInt(), -65511, 'wrong value'); + t.equal(reader.length, 0x03, 'wrong length'); + t.end(); +}); + + +test('read 4 byte negative int', function(t) { + var reader = new BerReader(new Buffer([0x02, 0x04, 0x91, 0x7c, 0x22, 0x1f])); + t.ok(reader); + t.equal(reader.readInt(), -1854135777, 'wrong value'); + t.equal(reader.length, 0x04, 'wrong length'); + t.end(); +}); + + +test('read boolean true', function(t) { + var reader = new BerReader(new Buffer([0x01, 0x01, 0xff])); + t.ok(reader); + t.equal(reader.readBoolean(), true, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read boolean false', function(t) { + var reader = new BerReader(new Buffer([0x01, 0x01, 0x00])); + t.ok(reader); + t.equal(reader.readBoolean(), false, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read enumeration', function(t) { + var reader = new BerReader(new Buffer([0x0a, 0x01, 0x20])); + t.ok(reader); + t.equal(reader.readEnumeration(), 0x20, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('read string', function(t) { + var dn = 'cn=foo,ou=unit,o=test'; + var buf = new Buffer(dn.length + 2); + buf[0] = 0x04; + buf[1] = Buffer.byteLength(dn); + buf.write(dn, 2); + var reader = new BerReader(buf); + t.ok(reader); + t.equal(reader.readString(), dn, 'wrong value'); + t.equal(reader.length, dn.length, 'wrong length'); + t.end(); +}); + + +test('read sequence', function(t) { + var reader = new BerReader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff])); + t.ok(reader); + t.equal(reader.readSequence(), 0x30, 'wrong value'); + t.equal(reader.length, 0x03, 'wrong length'); + t.equal(reader.readBoolean(), true, 'wrong value'); + t.equal(reader.length, 0x01, 'wrong length'); + t.end(); +}); + + +test('anonymous LDAPv3 bind', function(t) { + var BIND = new Buffer(14); + BIND[0] = 0x30; // Sequence + BIND[1] = 12; // len + BIND[2] = 0x02; // ASN.1 Integer + BIND[3] = 1; // len + BIND[4] = 0x04; // msgid (make up 4) + BIND[5] = 0x60; // Bind Request + BIND[6] = 7; // len + BIND[7] = 0x02; // ASN.1 Integer + BIND[8] = 1; // len + BIND[9] = 0x03; // v3 + BIND[10] = 0x04; // String (bind dn) + BIND[11] = 0; // len + BIND[12] = 0x80; // ContextSpecific (choice) + BIND[13] = 0; // simple bind + + // Start testing ^^ + var ber = new BerReader(BIND); + t.equal(ber.readSequence(), 48, 'Not an ASN.1 Sequence'); + t.equal(ber.length, 12, 'Message length should be 12'); + t.equal(ber.readInt(), 4, 'Message id should have been 4'); + t.equal(ber.readSequence(), 96, 'Bind Request should have been 96'); + t.equal(ber.length, 7, 'Bind length should have been 7'); + t.equal(ber.readInt(), 3, 'LDAP version should have been 3'); + t.equal(ber.readString(), '', 'Bind DN should have been empty'); + t.equal(ber.length, 0, 'string length should have been 0'); + t.equal(ber.readByte(), 0x80, 'Should have been ContextSpecific (choice)'); + t.equal(ber.readByte(), 0, 'Should have been simple bind'); + t.equal(null, ber.readByte(), 'Should be out of data'); + t.end(); +}); + + +test('long string', function(t) { + var buf = new Buffer(256); + var o; + var s = + '2;649;CN=Red Hat CS 71GA Demo,O=Red Hat CS 71GA Demo,C=US;' + + 'CN=RHCS Agent - admin01,UID=admin01,O=redhat,C=US [1] This is ' + + 'Teena Vradmin\'s description.'; + buf[0] = 0x04; + buf[1] = 0x81; + buf[2] = 0x94; + buf.write(s, 3); + var ber = new BerReader(buf.slice(0, 3 + s.length)); + t.equal(ber.readString(), s); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/asn1/tst/ber/writer.test.js b/node_modules/fsevents/node_modules/asn1/tst/ber/writer.test.js new file mode 100644 index 0000000..d87cb7b --- /dev/null +++ b/node_modules/fsevents/node_modules/asn1/tst/ber/writer.test.js @@ -0,0 +1,370 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var test = require('tap').test; +var sys = require('sys'); + +///--- Globals + +var BerWriter; + +var BerReader; + + +///--- Tests + +test('load library', function(t) { + BerWriter = require('../../lib/index').BerWriter; + t.ok(BerWriter); + t.ok(new BerWriter()); + t.end(); +}); + + +test('write byte', function(t) { + var writer = new BerWriter(); + + writer.writeByte(0xC2); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 1, 'Wrong length'); + t.equal(ber[0], 0xC2, 'value wrong'); + + t.end(); +}); + + +test('write 1 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7f); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 3, 'Wrong length for an int: ' + ber.length); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong (2) -> ' + ber[0]); + t.equal(ber[1], 0x01, 'length wrong(1) -> ' + ber[1]); + t.equal(ber[2], 0x7f, 'value wrong(3) -> ' + ber[2]); + + t.end(); +}); + + +test('write 2 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7ffe); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 4, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x02, 'length wrong'); + t.equal(ber[2], 0x7f, 'value wrong (byte 1)'); + t.equal(ber[3], 0xfe, 'value wrong (byte 2)'); + + t.end(); +}); + + +test('write 3 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7ffffe); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 5, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x03, 'length wrong'); + t.equal(ber[2], 0x7f, 'value wrong (byte 1)'); + t.equal(ber[3], 0xff, 'value wrong (byte 2)'); + t.equal(ber[4], 0xfe, 'value wrong (byte 3)'); + + t.end(); +}); + + +test('write 4 byte int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(0x7ffffffe); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 6, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x04, 'length wrong'); + t.equal(ber[2], 0x7f, 'value wrong (byte 1)'); + t.equal(ber[3], 0xff, 'value wrong (byte 2)'); + t.equal(ber[4], 0xff, 'value wrong (byte 3)'); + t.equal(ber[5], 0xfe, 'value wrong (byte 4)'); + + t.end(); +}); + + +test('write 1 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-128); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 3, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x01, 'length wrong'); + t.equal(ber[2], 0x80, 'value wrong (byte 1)'); + + t.end(); +}); + + +test('write 2 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-22400); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 4, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x02, 'length wrong'); + t.equal(ber[2], 0xa8, 'value wrong (byte 1)'); + t.equal(ber[3], 0x80, 'value wrong (byte 2)'); + + t.end(); +}); + + +test('write 3 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-481653); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 5, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x03, 'length wrong'); + t.equal(ber[2], 0xf8, 'value wrong (byte 1)'); + t.equal(ber[3], 0xa6, 'value wrong (byte 2)'); + t.equal(ber[4], 0x8b, 'value wrong (byte 3)'); + + t.end(); +}); + + +test('write 4 byte negative int', function(t) { + var writer = new BerWriter(); + + writer.writeInt(-1522904131); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 6, 'Wrong length for an int'); + t.equal(ber[0], 0x02, 'ASN.1 tag wrong'); + t.equal(ber[1], 0x04, 'length wrong'); + t.equal(ber[2], 0xa5, 'value wrong (byte 1)'); + t.equal(ber[3], 0x3a, 'value wrong (byte 2)'); + t.equal(ber[4], 0x53, 'value wrong (byte 3)'); + t.equal(ber[5], 0xbd, 'value wrong (byte 4)'); + + t.end(); +}); + + +test('write boolean', function(t) { + var writer = new BerWriter(); + + writer.writeBoolean(true); + writer.writeBoolean(false); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 6, 'Wrong length'); + t.equal(ber[0], 0x01, 'tag wrong'); + t.equal(ber[1], 0x01, 'length wrong'); + t.equal(ber[2], 0xff, 'value wrong'); + t.equal(ber[3], 0x01, 'tag wrong'); + t.equal(ber[4], 0x01, 'length wrong'); + t.equal(ber[5], 0x00, 'value wrong'); + + t.end(); +}); + + +test('write string', function(t) { + var writer = new BerWriter(); + writer.writeString('hello world'); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 13, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + +test('write buffer', function(t) { + var writer = new BerWriter(); + // write some stuff to start with + writer.writeString('hello world'); + var ber = writer.buffer; + var buf = new Buffer([0x04, 0x0b, 0x30, 0x09, 0x02, 0x01, 0x0f, 0x01, 0x01, + 0xff, 0x01, 0x01, 0xff]); + writer.writeBuffer(buf.slice(2, buf.length), 0x04); + ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 26, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2, 13).toString('utf8'), 'hello world', 'wrong value'); + t.equal(ber[13], buf[0], 'wrong tag'); + t.equal(ber[14], buf[1], 'wrong length'); + for (var i = 13, j = 0; i < ber.length && j < buf.length; i++, j++) { + t.equal(ber[i], buf[j], 'buffer contents not identical'); + } + t.end(); +}); + +test('write string array', function(t) { + var writer = new BerWriter(); + writer.writeStringArray(['hello world', 'fubar!']); + var ber = writer.buffer; + + t.ok(ber); + + t.equal(ber.length, 21, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2, 13).toString('utf8'), 'hello world', 'wrong value'); + + t.equal(ber[13], 0x04, 'wrong tag'); + t.equal(ber[14], 6, 'wrong length'); + t.equal(ber.slice(15).toString('utf8'), 'fubar!', 'wrong value'); + + t.end(); +}); + + +test('resize internal buffer', function(t) { + var writer = new BerWriter({size: 2}); + writer.writeString('hello world'); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 13, 'wrong length'); + t.equal(ber[0], 0x04, 'wrong tag'); + t.equal(ber[1], 11, 'wrong length'); + t.equal(ber.slice(2).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + + +test('sequence', function(t) { + var writer = new BerWriter({size: 25}); + writer.startSequence(); + writer.writeString('hello world'); + writer.endSequence(); + var ber = writer.buffer; + + t.ok(ber); + console.log(ber); + t.equal(ber.length, 15, 'wrong length'); + t.equal(ber[0], 0x30, 'wrong tag'); + t.equal(ber[1], 13, 'wrong length'); + t.equal(ber[2], 0x04, 'wrong tag'); + t.equal(ber[3], 11, 'wrong length'); + t.equal(ber.slice(4).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + + +test('nested sequence', function(t) { + var writer = new BerWriter({size: 25}); + writer.startSequence(); + writer.writeString('hello world'); + writer.startSequence(); + writer.writeString('hello world'); + writer.endSequence(); + writer.endSequence(); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 30, 'wrong length'); + t.equal(ber[0], 0x30, 'wrong tag'); + t.equal(ber[1], 28, 'wrong length'); + t.equal(ber[2], 0x04, 'wrong tag'); + t.equal(ber[3], 11, 'wrong length'); + t.equal(ber.slice(4, 15).toString('utf8'), 'hello world', 'wrong value'); + t.equal(ber[15], 0x30, 'wrong tag'); + t.equal(ber[16], 13, 'wrong length'); + t.equal(ber[17], 0x04, 'wrong tag'); + t.equal(ber[18], 11, 'wrong length'); + t.equal(ber.slice(19, 30).toString('utf8'), 'hello world', 'wrong value'); + + t.end(); +}); + + +test('LDAP bind message', function(t) { + var dn = 'cn=foo,ou=unit,o=test'; + var writer = new BerWriter(); + writer.startSequence(); + writer.writeInt(3); // msgid = 3 + writer.startSequence(0x60); // ldap bind + writer.writeInt(3); // ldap v3 + writer.writeString(dn); + writer.writeByte(0x80); + writer.writeByte(0x00); + writer.endSequence(); + writer.endSequence(); + var ber = writer.buffer; + + t.ok(ber); + t.equal(ber.length, 35, 'wrong length (buffer)'); + t.equal(ber[0], 0x30, 'wrong tag'); + t.equal(ber[1], 33, 'wrong length'); + t.equal(ber[2], 0x02, 'wrong tag'); + t.equal(ber[3], 1, 'wrong length'); + t.equal(ber[4], 0x03, 'wrong value'); + t.equal(ber[5], 0x60, 'wrong tag'); + t.equal(ber[6], 28, 'wrong length'); + t.equal(ber[7], 0x02, 'wrong tag'); + t.equal(ber[8], 1, 'wrong length'); + t.equal(ber[9], 0x03, 'wrong value'); + t.equal(ber[10], 0x04, 'wrong tag'); + t.equal(ber[11], dn.length, 'wrong length'); + t.equal(ber.slice(12, 33).toString('utf8'), dn, 'wrong value'); + t.equal(ber[33], 0x80, 'wrong tag'); + t.equal(ber[34], 0x00, 'wrong len'); + + t.end(); +}); + + +test('Write OID', function(t) { + var oid = '1.2.840.113549.1.1.1'; + var writer = new BerWriter(); + writer.writeOID(oid); + + var ber = writer.buffer; + t.ok(ber); + console.log(require('util').inspect(ber)); + console.log(require('util').inspect(new Buffer([0x06, 0x09, 0x2a, 0x86, + 0x48, 0x86, 0xf7, 0x0d, + 0x01, 0x01, 0x01]))); + + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/assert-plus/AUTHORS b/node_modules/fsevents/node_modules/assert-plus/AUTHORS new file mode 100644 index 0000000..1923524 --- /dev/null +++ b/node_modules/fsevents/node_modules/assert-plus/AUTHORS @@ -0,0 +1,6 @@ +Dave Eddy +Fred Kuo +Lars-Magnus Skog +Mark Cavage +Patrick Mooney +Rob Gulewich diff --git a/node_modules/fsevents/node_modules/assert-plus/CHANGES.md b/node_modules/fsevents/node_modules/assert-plus/CHANGES.md new file mode 100644 index 0000000..d249d9b --- /dev/null +++ b/node_modules/fsevents/node_modules/assert-plus/CHANGES.md @@ -0,0 +1,8 @@ +# assert-plus Changelog + +## 0.2.0 + +- Fix `assert.object(null)` so it throws +- Fix optional/arrayOf exports for non-type-of asserts +- Add optiona/arrayOf exports for Stream/Date/Regex/uuid +- Add basic unit test coverage diff --git a/node_modules/fsevents/node_modules/assert-plus/README.md b/node_modules/fsevents/node_modules/assert-plus/README.md new file mode 100644 index 0000000..0b39593 --- /dev/null +++ b/node_modules/fsevents/node_modules/assert-plus/README.md @@ -0,0 +1,155 @@ +# assert-plus + +This library is a super small wrapper over node's assert module that has two +things: (1) the ability to disable assertions with the environment variable +NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like +`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks +like this: + +```javascript + var assert = require('assert-plus'); + + function fooAccount(options, callback) { + assert.object(options, 'options'); + assert.number(options.id, 'options.id'); + assert.bool(options.isManager, 'options.isManager'); + assert.string(options.name, 'options.name'); + assert.arrayOfString(options.email, 'options.email'); + assert.func(callback, 'callback'); + + // Do stuff + callback(null, {}); + } +``` + +# API + +All methods that *aren't* part of node's core assert API are simply assumed to +take an argument, and then a string 'name' that's not a message; `AssertionError` +will be thrown if the assertion fails with a message like: + + AssertionError: foo (string) is required + at test (/home/mark/work/foo/foo.js:3:9) + at Object. (/home/mark/work/foo/foo.js:15:1) + at Module._compile (module.js:446:26) + at Object..js (module.js:464:10) + at Module.load (module.js:353:31) + at Function._load (module.js:311:12) + at Array.0 (module.js:484:10) + at EventEmitter._tickCallback (node.js:190:38) + +from: + +```javascript + function test(foo) { + assert.string(foo, 'foo'); + } +``` + +There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: + +```javascript + function test(foo) { + assert.arrayOfString(foo, 'foo'); + } +``` + +You can assert IFF an argument is not `undefined` (i.e., an optional arg): + +```javascript + assert.optionalString(foo, 'foo'); +``` + +Lastly, you can opt-out of assertion checking altogether by setting the +environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have +lots of assertions, and don't want to pay `typeof ()` taxes to v8 in +production. Be advised: The standard functions re-exported from `assert` are +also disabled in assert-plus if NDEBUG is specified. Using them directly from +the `assert` module avoids this behavior. + +The complete list of APIs is: + +* assert.array +* assert.bool +* assert.buffer +* assert.func +* assert.number +* assert.object +* assert.string +* assert.stream +* assert.date +* assert.regex +* assert.uuid +* assert.arrayOfArray +* assert.arrayOfBool +* assert.arrayOfBuffer +* assert.arrayOfFunc +* assert.arrayOfNumber +* assert.arrayOfObject +* assert.arrayOfString +* assert.arrayOfStream +* assert.arrayOfDate +* assert.arrayOfUuid +* assert.optionalArray +* assert.optionalBool +* assert.optionalBuffer +* assert.optionalFunc +* assert.optionalNumber +* assert.optionalObject +* assert.optionalString +* assert.optionalStream +* assert.optionalDate +* assert.optionalUuid +* assert.optionalArrayOfArray +* assert.optionalArrayOfBool +* assert.optionalArrayOfBuffer +* assert.optionalArrayOfFunc +* assert.optionalArrayOfNumber +* assert.optionalArrayOfObject +* assert.optionalArrayOfString +* assert.optionalArrayOfStream +* assert.optionalArrayOfDate +* assert.optionalArrayOfUuid +* assert.AssertionError +* assert.fail +* assert.ok +* assert.equal +* assert.notEqual +* assert.deepEqual +* assert.notDeepEqual +* assert.strictEqual +* assert.notStrictEqual +* assert.throws +* assert.doesNotThrow +* assert.ifError + +# Installation + + npm install assert-plus + +## License + +The MIT License (MIT) +Copyright (c) 2012 Mark Cavage + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## Bugs + +See . diff --git a/node_modules/fsevents/node_modules/assert-plus/assert.js b/node_modules/fsevents/node_modules/assert-plus/assert.js new file mode 100644 index 0000000..6bce4d8 --- /dev/null +++ b/node_modules/fsevents/node_modules/assert-plus/assert.js @@ -0,0 +1,206 @@ +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = require('assert'); +var Stream = require('stream').Stream; +var util = require('util'); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); diff --git a/node_modules/fsevents/node_modules/assert-plus/package.json b/node_modules/fsevents/node_modules/assert-plus/package.json new file mode 100644 index 0000000..6ccbac5 --- /dev/null +++ b/node_modules/fsevents/node_modules/assert-plus/package.json @@ -0,0 +1,107 @@ +{ + "_args": [ + [ + "assert-plus@^0.2.0", + "/Users/eshanker/Code/fsevents/node_modules/http-signature" + ] + ], + "_from": "assert-plus@>=0.2.0 <0.3.0", + "_id": "assert-plus@0.2.0", + "_inCache": true, + "_installable": true, + "_location": "/assert-plus", + "_nodeVersion": "0.10.36", + "_npmUser": { + "email": "patrick.f.mooney@gmail.com", + "name": "pfmooney" + }, + "_npmVersion": "3.3.8", + "_phantomChildren": {}, + "_requested": { + "name": "assert-plus", + "raw": "assert-plus@^0.2.0", + "rawSpec": "^0.2.0", + "scope": null, + "spec": ">=0.2.0 <0.3.0", + "type": "range" + }, + "_requiredBy": [ + "/http-signature" + ], + "_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz", + "_shasum": "d74e1b87e7affc0db8aadb7021f3fe48101ab234", + "_shrinkwrap": null, + "_spec": "assert-plus@^0.2.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/http-signature", + "author": { + "email": "mcavage@gmail.com", + "name": "Mark Cavage" + }, + "bugs": { + "url": "https://github.com/mcavage/node-assert-plus/issues" + }, + "contributors": [ + { + "name": "Dave Eddy", + "email": "dave@daveeddy.com" + }, + { + "name": "Fred Kuo", + "email": "fred.kuo@joyent.com" + }, + { + "name": "Lars-Magnus Skog", + "email": "ralphtheninja@riseup.net" + }, + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "Patrick Mooney", + "email": "pmooney@pfmooney.com" + }, + { + "name": "Rob Gulewich", + "email": "robert.gulewich@joyent.com" + } + ], + "dependencies": {}, + "description": "Extra assertions on top of node's assert module", + "devDependencies": { + "faucet": "0.0.1", + "tape": "4.2.2" + }, + "directories": {}, + "dist": { + "shasum": "d74e1b87e7affc0db8aadb7021f3fe48101ab234", + "tarball": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz" + }, + "engines": { + "node": ">=0.8" + }, + "homepage": "https://github.com/mcavage/node-assert-plus#readme", + "license": "MIT", + "main": "./assert.js", + "maintainers": [ + { + "name": "mcavage", + "email": "mcavage@gmail.com" + }, + { + "name": "pfmooney", + "email": "patrick.f.mooney@gmail.com" + } + ], + "name": "assert-plus", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/mcavage/node-assert-plus.git" + }, + "scripts": { + "test": "tape tests/*.js | ./node_modules/.bin/faucet" + }, + "version": "0.2.0" +} diff --git a/node_modules/fsevents/node_modules/async/CHANGELOG.md b/node_modules/fsevents/node_modules/async/CHANGELOG.md new file mode 100644 index 0000000..f15e081 --- /dev/null +++ b/node_modules/fsevents/node_modules/async/CHANGELOG.md @@ -0,0 +1,125 @@ +# v1.5.2 +- Allow using `"consructor"` as an argument in `memoize` (#998) +- Give a better error messsage when `auto` dependency checking fails (#994) +- Various doc updates (#936, #956, #979, #1002) + +# v1.5.1 +- Fix issue with `pause` in `queue` with concurrency enabled (#946) +- `while` and `until` now pass the final result to callback (#963) +- `auto` will properly handle concurrency when there is no callback (#966) +- `auto` will now properly stop execution when an error occurs (#988, #993) +- Various doc fixes (#971, #980) + +# v1.5.0 + +- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) (#892) +- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. (#873) +- `auto` now accepts an optional `concurrency` argument to limit the number of running tasks (#637) +- Added `queue#workersList()`, to retrieve the list of currently running tasks. (#891) +- Various code simplifications (#896, #904) +- Various doc fixes :scroll: (#890, #894, #903, #905, #912) + +# v1.4.2 + +- Ensure coverage files don't get published on npm (#879) + +# v1.4.1 + +- Add in overlooked `detectLimit` method (#866) +- Removed unnecessary files from npm releases (#861) +- Removed usage of a reserved word to prevent :boom: in older environments (#870) + +# v1.4.0 + +- `asyncify` now supports promises (#840) +- Added `Limit` versions of `filter` and `reject` (#836) +- Add `Limit` versions of `detect`, `some` and `every` (#828, #829) +- `some`, `every` and `detect` now short circuit early (#828, #829) +- Improve detection of the global object (#804), enabling use in WebWorkers +- `whilst` now called with arguments from iterator (#823) +- `during` now gets called with arguments from iterator (#824) +- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0)) + + +# v1.3.0 + +New Features: +- Added `constant` +- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. (#671, #806) +- Added `during` and `doDuring`, which are like `whilst` with an async truth test. (#800) +- `retry` now accepts an `interval` parameter to specify a delay between retries. (#793) +- `async` should work better in Web Workers due to better `root` detection (#804) +- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` (#642) +- Various internal updates (#786, #801, #802, #803) +- Various doc fixes (#790, #794) + +Bug Fixes: +- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. (#740, #744, #783) + + +# v1.2.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. (#782) + + +# v1.2.0 + +New Features: + +- Added `timesLimit` (#743) +- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. (#747, #772) + +Bug Fixes: + +- Fixed a regression in `each` and family with empty arrays that have additional properties. (#775, #777) + + +# v1.1.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. (#782) + + +# v1.1.0 + +New Features: + +- `cargo` now supports all of the same methods and event callbacks as `queue`. +- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. (#769) +- Optimized `map`, `eachOf`, and `waterfall` families of functions +- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array (#667). +- The callback is now optional for the composed results of `compose` and `seq`. (#618) +- Reduced file size by 4kb, (minified version by 1kb) +- Added code coverage through `nyc` and `coveralls` (#768) + +Bug Fixes: + +- `forever` will no longer stack overflow with a synchronous iterator (#622) +- `eachLimit` and other limit functions will stop iterating once an error occurs (#754) +- Always pass `null` in callbacks when there is no error (#439) +- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue (#668) +- `each` and family will properly handle an empty array (#578) +- `eachSeries` and family will finish if the underlying array is modified during execution (#557) +- `queue` will throw if a non-function is passed to `q.push()` (#593) +- Doc fixes (#629, #766) + + +# v1.0.0 + +No known breaking changes, we are simply complying with semver from here on out. + +Changes: + +- Start using a changelog! +- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) (#168 #704 #321) +- Detect deadlocks in `auto` (#663) +- Better support for require.js (#527) +- Throw if queue created with concurrency `0` (#714) +- Fix unneeded iteration in `queue.resume()` (#758) +- Guard against timer mocking overriding `setImmediate` (#609 #611) +- Miscellaneous doc fixes (#542 #596 #615 #628 #631 #690 #729) +- Use single noop function internally (#546) +- Optimize internal `_each`, `_map` and `_keys` functions. diff --git a/node_modules/fsevents/node_modules/async/LICENSE b/node_modules/fsevents/node_modules/async/LICENSE new file mode 100644 index 0000000..8f29698 --- /dev/null +++ b/node_modules/fsevents/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2014 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/async/README.md b/node_modules/fsevents/node_modules/async/README.md new file mode 100644 index 0000000..316c405 --- /dev/null +++ b/node_modules/fsevents/node_modules/async/README.md @@ -0,0 +1,1877 @@ +# Async.js + +[![Build Status via Travis CI](https://travis-ci.org/caolan/async.svg?branch=master)](https://travis-ci.org/caolan/async) +[![NPM version](http://img.shields.io/npm/v/async.svg)](https://www.npmjs.org/package/async) +[![Coverage Status](https://coveralls.io/repos/caolan/async/badge.svg?branch=master)](https://coveralls.io/r/caolan/async?branch=master) +[![Join the chat at https://gitter.im/caolan/async](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + + +Async is a utility module which provides straight-forward, powerful functions +for working with asynchronous JavaScript. Although originally designed for +use with [Node.js](http://nodejs.org) and installable via `npm install async`, +it can also be used directly in the browser. + +Async is also installable via: + +- [bower](http://bower.io/): `bower install async` +- [component](https://github.com/component/component): `component install + caolan/async` +- [jam](http://jamjs.org/): `jam install async` +- [spm](http://spmjs.io/): `spm install async` + +Async provides around 20 functions that include the usual 'functional' +suspects (`map`, `reduce`, `filter`, `each`…) as well as some common patterns +for asynchronous control flow (`parallel`, `series`, `waterfall`…). All these +functions assume you follow the Node.js convention of providing a single +callback as the last argument of your `async` function. + + +## Quick Examples + +```javascript +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); + +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); + +async.parallel([ + function(){ ... }, + function(){ ... } +], callback); + +async.series([ + function(){ ... }, + function(){ ... } +]); +``` + +There are many more functions available so take a look at the docs below for a +full list. This module aims to be comprehensive, so if you feel anything is +missing please create a GitHub issue for it. + +## Common Pitfalls [(StackOverflow)](http://stackoverflow.com/questions/tagged/async.js) +### Synchronous iteration functions + +If you get an error like `RangeError: Maximum call stack size exceeded.` or other stack overflow issues when using async, you are likely using a synchronous iterator. By *synchronous* we mean a function that calls its callback on the same tick in the javascript event loop, without doing any I/O or using any timers. Calling many callbacks iteratively will quickly overflow the stack. If you run into this issue, just defer your callback with `async.setImmediate` to start a new call stack on the next tick of the event loop. + +This can also arise by accident if you callback early in certain cases: + +```js +async.eachSeries(hugeArray, function iterator(item, callback) { + if (inCache(item)) { + callback(null, cache[item]); // if many items are cached, you'll overflow + } else { + doSomeIO(item, callback); + } +}, function done() { + //... +}); +``` + +Just change it to: + +```js +async.eachSeries(hugeArray, function iterator(item, callback) { + if (inCache(item)) { + async.setImmediate(function () { + callback(null, cache[item]); + }); + } else { + doSomeIO(item, callback); + //... +``` + +Async guards against synchronous functions in some, but not all, cases. If you are still running into stack overflows, you can defer as suggested above, or wrap functions with [`async.ensureAsync`](#ensureAsync) Functions that are asynchronous by their nature do not have this problem and don't need the extra callback deferral. + +If JavaScript's event loop is still a bit nebulous, check out [this article](http://blog.carbonfive.com/2013/10/27/the-javascript-event-loop-explained/) or [this talk](http://2014.jsconf.eu/speakers/philip-roberts-what-the-heck-is-the-event-loop-anyway.html) for more detailed information about how it works. + + +### Multiple callbacks + +Make sure to always `return` when calling a callback early, otherwise you will cause multiple callbacks and unpredictable behavior in many cases. + +```js +async.waterfall([ + function (callback) { + getSomething(options, function (err, result) { + if (err) { + callback(new Error("failed getting something:" + err.message)); + // we should return here + } + // since we did not return, this callback still will be called and + // `processData` will be called twice + callback(null, result); + }); + }, + processData +], done) +``` + +It is always good practice to `return callback(err, result)` whenever a callback call is not the last statement of a function. + + +### Binding a context to an iterator + +This section is really about `bind`, not about `async`. If you are wondering how to +make `async` execute your iterators in a given context, or are confused as to why +a method of another library isn't working as an iterator, study this example: + +```js +// Here is a simple object with an (unnecessarily roundabout) squaring method +var AsyncSquaringLibrary = { + squareExponent: 2, + square: function(number, callback){ + var result = Math.pow(number, this.squareExponent); + setTimeout(function(){ + callback(null, result); + }, 200); + } +}; + +async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){ + // result is [NaN, NaN, NaN] + // This fails because the `this.squareExponent` expression in the square + // function is not evaluated in the context of AsyncSquaringLibrary, and is + // therefore undefined. +}); + +async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){ + // result is [1, 4, 9] + // With the help of bind we can attach a context to the iterator before + // passing it to async. Now the square function will be executed in its + // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent` + // will be as expected. +}); +``` + +## Download + +The source is available for download from +[GitHub](https://github.com/caolan/async/blob/master/lib/async.js). +Alternatively, you can install using Node Package Manager (`npm`): + + npm install async + +As well as using Bower: + + bower install async + +__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed + +## In the Browser + +So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. + +Usage: + +```html + + +``` + +## Documentation + +Some functions are also available in the following forms: +* `Series` - the same as `` but runs only a single async operation at a time +* `Limit` - the same as `` but runs a maximum of `limit` async operations at a time + +### Collections + +* [`each`](#each), `eachSeries`, `eachLimit` +* [`forEachOf`](#forEachOf), `forEachOfSeries`, `forEachOfLimit` +* [`map`](#map), `mapSeries`, `mapLimit` +* [`filter`](#filter), `filterSeries`, `filterLimit` +* [`reject`](#reject), `rejectSeries`, `rejectLimit` +* [`reduce`](#reduce), [`reduceRight`](#reduceRight) +* [`detect`](#detect), `detectSeries`, `detectLimit` +* [`sortBy`](#sortBy) +* [`some`](#some), `someLimit` +* [`every`](#every), `everyLimit` +* [`concat`](#concat), `concatSeries` + +### Control Flow + +* [`series`](#seriestasks-callback) +* [`parallel`](#parallel), `parallelLimit` +* [`whilst`](#whilst), [`doWhilst`](#doWhilst) +* [`until`](#until), [`doUntil`](#doUntil) +* [`during`](#during), [`doDuring`](#doDuring) +* [`forever`](#forever) +* [`waterfall`](#waterfall) +* [`compose`](#compose) +* [`seq`](#seq) +* [`applyEach`](#applyEach), `applyEachSeries` +* [`queue`](#queue), [`priorityQueue`](#priorityQueue) +* [`cargo`](#cargo) +* [`auto`](#auto) +* [`retry`](#retry) +* [`iterator`](#iterator) +* [`times`](#times), `timesSeries`, `timesLimit` + +### Utils + +* [`apply`](#apply) +* [`nextTick`](#nextTick) +* [`memoize`](#memoize) +* [`unmemoize`](#unmemoize) +* [`ensureAsync`](#ensureAsync) +* [`constant`](#constant) +* [`asyncify`](#asyncify) +* [`wrapSync`](#wrapSync) +* [`log`](#log) +* [`dir`](#dir) +* [`noConflict`](#noConflict) + +## Collections + + + +### each(arr, iterator, [callback]) + +Applies the function `iterator` to each item in `arr`, in parallel. +The `iterator` is called with an item from the list, and a callback for when it +has finished. If the `iterator` passes an error to its `callback`, the main +`callback` (for the `each` function) is immediately called with the error. + +Note, that since this function applies `iterator` to each item in parallel, +there is no guarantee that the iterator functions will complete in order. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err)` which must be called once it has + completed. If no error has occurred, the `callback` should be run without + arguments or with an explicit `null` argument. The array index is not passed + to the iterator. If you need the index, use [`forEachOf`](#forEachOf). +* `callback(err)` - *Optional* A callback which is called when all `iterator` functions + have finished, or an error occurs. + +__Examples__ + + +```js +// assuming openFiles is an array of file names and saveFile is a function +// to save the modified contents of that file: + +async.each(openFiles, saveFile, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +```js +// assuming openFiles is an array of file names + +async.each(openFiles, function(file, callback) { + + // Perform operation on file here. + console.log('Processing file ' + file); + + if( file.length > 32 ) { + console.log('This file name is too long'); + callback('File name too long'); + } else { + // Do work to process file here + console.log('File processed'); + callback(); + } +}, function(err){ + // if any of the file processing produced an error, err would equal that error + if( err ) { + // One of the iterations produced an error. + // All processing will now stop. + console.log('A file failed to process'); + } else { + console.log('All files have been processed successfully'); + } +}); +``` + +__Related__ + +* eachSeries(arr, iterator, [callback]) +* eachLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + + + +### forEachOf(obj, iterator, [callback]) + +Like `each`, except that it iterates over objects, and passes the key as the second argument to the iterator. + +__Arguments__ + +* `obj` - An object or array to iterate over. +* `iterator(item, key, callback)` - A function to apply to each item in `obj`. +The `key` is the item's key, or index in the case of an array. The iterator is +passed a `callback(err)` which must be called once it has completed. If no +error has occurred, the callback should be run without arguments or with an +explicit `null` argument. +* `callback(err)` - *Optional* A callback which is called when all `iterator` functions have finished, or an error occurs. + +__Example__ + +```js +var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; +var configs = {}; + +async.forEachOf(obj, function (value, key, callback) { + fs.readFile(__dirname + value, "utf8", function (err, data) { + if (err) return callback(err); + try { + configs[key] = JSON.parse(data); + } catch (e) { + return callback(e); + } + callback(); + }) +}, function (err) { + if (err) console.error(err.message); + // configs is now a map of JSON data + doSomethingWith(configs); +}) +``` + +__Related__ + +* forEachOfSeries(obj, iterator, [callback]) +* forEachOfLimit(obj, limit, iterator, [callback]) + +--------------------------------------- + + +### map(arr, iterator, [callback]) + +Produces a new array of values by mapping each value in `arr` through +the `iterator` function. The `iterator` is called with an item from `arr` and a +callback for when it has finished processing. Each of these callback takes 2 arguments: +an `error`, and the transformed item from `arr`. If `iterator` passes an error to its +callback, the main `callback` (for the `map` function) is immediately called with the error. + +Note, that since this function applies the `iterator` to each item in parallel, +there is no guarantee that the `iterator` functions will complete in order. +However, the results array will be in the same order as the original `arr`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, transformed)` which must be called once + it has completed with an error (which can be `null`) and a transformed item. +* `callback(err, results)` - *Optional* A callback which is called when all `iterator` + functions have finished, or an error occurs. Results is an array of the + transformed items from the `arr`. + +__Example__ + +```js +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +__Related__ +* mapSeries(arr, iterator, [callback]) +* mapLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + + +### filter(arr, iterator, [callback]) + +__Alias:__ `select` + +Returns a new array of all the values in `arr` which pass an async truth test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. This operation is +performed in parallel, but the results array will be in the same order as the +original. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The `iterator` is passed a `callback(truthValue)`, which must be called with a + boolean argument once it has completed. +* `callback(results)` - *Optional* A callback which is called after all the `iterator` + functions have finished. + +__Example__ + +```js +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); +``` + +__Related__ + +* filterSeries(arr, iterator, [callback]) +* filterLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + +### reject(arr, iterator, [callback]) + +The opposite of [`filter`](#filter). Removes values that pass an `async` truth test. + +__Related__ + +* rejectSeries(arr, iterator, [callback]) +* rejectLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + +### reduce(arr, memo, iterator, [callback]) + +__Aliases:__ `inject`, `foldl` + +Reduces `arr` into a single value using an async `iterator` to return +each successive step. `memo` is the initial state of the reduction. +This function only operates in series. + +For performance reasons, it may make sense to split a call to this function into +a parallel map, and then use the normal `Array.prototype.reduce` on the results. +This function is for situations where each step in the reduction needs to be async; +if you can get the data before reducing it, then it's probably a good idea to do so. + +__Arguments__ + +* `arr` - An array to iterate over. +* `memo` - The initial state of the reduction. +* `iterator(memo, item, callback)` - A function applied to each item in the + array to produce the next step in the reduction. The `iterator` is passed a + `callback(err, reduction)` which accepts an optional error as its first + argument, and the state of the reduction as the second. If an error is + passed to the callback, the reduction is stopped and the main `callback` is + immediately called with the error. +* `callback(err, result)` - *Optional* A callback which is called after all the `iterator` + functions have finished. Result is the reduced value. + +__Example__ + +```js +async.reduce([1,2,3], 0, function(memo, item, callback){ + // pointless async: + process.nextTick(function(){ + callback(null, memo + item) + }); +}, function(err, result){ + // result is now equal to the last value of memo, which is 6 +}); +``` + +--------------------------------------- + + +### reduceRight(arr, memo, iterator, [callback]) + +__Alias:__ `foldr` + +Same as [`reduce`](#reduce), only operates on `arr` in reverse order. + + +--------------------------------------- + + +### detect(arr, iterator, [callback]) + +Returns the first value in `arr` that passes an async truth test. The +`iterator` is applied in parallel, meaning the first iterator to return `true` will +fire the detect `callback` with that result. That means the result might not be +the first item in the original `arr` (in terms of order) that passes the test. + +If order within the original `arr` is important, then look at [`detectSeries`](#detectSeries). + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The iterator is passed a `callback(truthValue)` which must be called with a + boolean argument once it has completed. **Note: this callback does not take an error as its first argument.** +* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns + `true`, or after all the `iterator` functions have finished. Result will be + the first item in the array that passes the truth test (iterator) or the + value `undefined` if none passed. **Note: this callback does not take an error as its first argument.** + +__Example__ + +```js +async.detect(['file1','file2','file3'], fs.exists, function(result){ + // result now equals the first file in the list that exists +}); +``` + +__Related__ + +* detectSeries(arr, iterator, [callback]) +* detectLimit(arr, limit, iterator, [callback]) + +--------------------------------------- + + +### sortBy(arr, iterator, [callback]) + +Sorts a list by the results of running each `arr` value through an async `iterator`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, sortValue)` which must be called once it + has completed with an error (which can be `null`) and a value to use as the sort + criteria. +* `callback(err, results)` - *Optional* A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is the items from + the original `arr` sorted by the values returned by the `iterator` calls. + +__Example__ + +```js +async.sortBy(['file1','file2','file3'], function(file, callback){ + fs.stat(file, function(err, stats){ + callback(err, stats.mtime); + }); +}, function(err, results){ + // results is now the original array of files sorted by + // modified date +}); +``` + +__Sort Order__ + +By modifying the callback parameter the sorting order can be influenced: + +```js +//ascending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x); +}, function(err,result){ + //result callback +} ); + +//descending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x*-1); //<- x*-1 instead of x, turns the order around +}, function(err,result){ + //result callback +} ); +``` + +--------------------------------------- + + +### some(arr, iterator, [callback]) + +__Alias:__ `any` + +Returns `true` if at least one element in the `arr` satisfies an async test. +_The callback for each iterator call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. Once any iterator +call returns `true`, the main `callback` is immediately called. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a `callback(truthValue)`` which must be + called with a boolean argument once it has completed. +* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns + `true`, or after all the iterator functions have finished. Result will be + either `true` or `false` depending on the values of the async tests. + + **Note: the callbacks do not take an error as their first argument.** +__Example__ + +```js +async.some(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then at least one of the files exists +}); +``` + +__Related__ + +* someLimit(arr, limit, iterator, callback) + +--------------------------------------- + + +### every(arr, iterator, [callback]) + +__Alias:__ `all` + +Returns `true` if every element in `arr` satisfies an async test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a `callback(truthValue)` which must be + called with a boolean argument once it has completed. +* `callback(result)` - *Optional* A callback which is called as soon as any iterator returns + `false`, or after all the iterator functions have finished. Result will be + either `true` or `false` depending on the values of the async tests. + + **Note: the callbacks do not take an error as their first argument.** + +__Example__ + +```js +async.every(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then every file exists +}); +``` + +__Related__ + +* everyLimit(arr, limit, iterator, callback) + +--------------------------------------- + + +### concat(arr, iterator, [callback]) + +Applies `iterator` to each item in `arr`, concatenating the results. Returns the +concatenated list. The `iterator`s are called in parallel, and the results are +concatenated as they return. There is no guarantee that the results array will +be returned in the original order of `arr` passed to the `iterator` function. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, results)` which must be called once it + has completed with an error (which can be `null`) and an array of results. +* `callback(err, results)` - *Optional* A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is an array containing + the concatenated results of the `iterator` function. + +__Example__ + +```js +async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){ + // files is now a list of filenames that exist in the 3 directories +}); +``` + +__Related__ + +* concatSeries(arr, iterator, [callback]) + + +## Control Flow + + +### series(tasks, [callback]) + +Run the functions in the `tasks` array in series, each one running once the previous +function has completed. If any functions in the series pass an error to its +callback, no more functions are run, and `callback` is immediately called with the value of the error. +Otherwise, `callback` receives an array of results when `tasks` have completed. + +It is also possible to use an object instead of an array. Each property will be +run as a function, and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`series`](#series). + +**Note** that while many implementations preserve the order of object properties, the +[ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) +explicitly states that + +> The mechanics and order of enumerating the properties is not specified. + +So if you rely on the order in which your series of functions are executed, and want +this to work on all platforms, consider using an array. + +__Arguments__ + +* `tasks` - An array or object containing functions to run, each function is passed + a `callback(err, result)` it must call on completion with an error `err` (which can + be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the `task` callbacks. + +__Example__ + +```js +async.series([ + function(callback){ + // do some stuff ... + callback(null, 'one'); + }, + function(callback){ + // do some more stuff ... + callback(null, 'two'); + } +], +// optional callback +function(err, results){ + // results is now equal to ['one', 'two'] +}); + + +// an example using an object instead of an array +async.series({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equal to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + + +### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its +callback, the main `callback` is immediately called with the value of the error. +Once the `tasks` have completed, the results are passed to the final `callback` as an +array. + +**Note:** `parallel` is about kicking-off I/O tasks in parallel, not about parallel execution of code. If your tasks do not use any timers or perform any I/O, they will actually be executed in series. Any synchronous setup sections for each task will happen one after the other. JavaScript remains single-threaded. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`parallel`](#parallel). + + +__Arguments__ + +* `tasks` - An array or object containing functions to run. Each function is passed + a `callback(err, result)` which it must call on completion with an error `err` + (which can be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed successfully. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +__Example__ + +```js +async.parallel([ + function(callback){ + setTimeout(function(){ + callback(null, 'one'); + }, 200); + }, + function(callback){ + setTimeout(function(){ + callback(null, 'two'); + }, 100); + } +], +// optional callback +function(err, results){ + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}); + + +// an example using an object instead of an array +async.parallel({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equals to: {one: 1, two: 2} +}); +``` + +__Related__ + +* parallelLimit(tasks, limit, [callback]) + +--------------------------------------- + + +### whilst(test, fn, callback) + +Repeatedly call `fn`, while `test` returns `true`. Calls `callback` when stopped, +or an error occurs. + +__Arguments__ + +* `test()` - synchronous truth test to perform before each execution of `fn`. +* `fn(callback)` - A function which is called each time `test` passes. The function is + passed a `callback(err)`, which must be called once it has completed with an + optional `err` argument. +* `callback(err, [results])` - A callback which is called after the test + function has failed and repeated execution of `fn` has stopped. `callback` + will be passed an error and any arguments passed to the final `fn`'s callback. + +__Example__ + +```js +var count = 0; + +async.whilst( + function () { return count < 5; }, + function (callback) { + count++; + setTimeout(function () { + callback(null, count); + }, 1000); + }, + function (err, n) { + // 5 seconds have passed, n = 5 + } +); +``` + +--------------------------------------- + + +### doWhilst(fn, test, callback) + +The post-check version of [`whilst`](#whilst). To reflect the difference in +the order of operations, the arguments `test` and `fn` are switched. + +`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + +--------------------------------------- + + +### until(test, fn, callback) + +Repeatedly call `fn` until `test` returns `true`. Calls `callback` when stopped, +or an error occurs. `callback` will be passed an error and any arguments passed +to the final `fn`'s callback. + +The inverse of [`whilst`](#whilst). + +--------------------------------------- + + +### doUntil(fn, test, callback) + +Like [`doWhilst`](#doWhilst), except the `test` is inverted. Note the argument ordering differs from `until`. + +--------------------------------------- + + +### during(test, fn, callback) + +Like [`whilst`](#whilst), except the `test` is an asynchronous function that is passed a callback in the form of `function (err, truth)`. If error is passed to `test` or `fn`, the main callback is immediately called with the value of the error. + +__Example__ + +```js +var count = 0; + +async.during( + function (callback) { + return callback(null, count < 5); + }, + function (callback) { + count++; + setTimeout(callback, 1000); + }, + function (err) { + // 5 seconds have passed + } +); +``` + +--------------------------------------- + + +### doDuring(fn, test, callback) + +The post-check version of [`during`](#during). To reflect the difference in +the order of operations, the arguments `test` and `fn` are switched. + +Also a version of [`doWhilst`](#doWhilst) with asynchronous `test` function. + +--------------------------------------- + + +### forever(fn, [errback]) + +Calls the asynchronous function `fn` with a callback parameter that allows it to +call itself again, in series, indefinitely. + +If an error is passed to the callback then `errback` is called with the +error, and execution stops, otherwise it will never be called. + +```js +async.forever( + function(next) { + // next is suitable for passing to things that need a callback(err [, whatever]); + // it will result in this function being called again. + }, + function(err) { + // if next is called with a value in its first parameter, it will appear + // in here as 'err', and execution will stop. + } +); +``` + +--------------------------------------- + + +### waterfall(tasks, [callback]) + +Runs the `tasks` array of functions in series, each passing their results to the next in +the array. However, if any of the `tasks` pass an error to their own callback, the +next function is not executed, and the main `callback` is immediately called with +the error. + +__Arguments__ + +* `tasks` - An array of functions to run, each function is passed a + `callback(err, result1, result2, ...)` it must call on completion. The first + argument is an error (which can be `null`) and any further arguments will be + passed as arguments in order to the next task. +* `callback(err, [results])` - An optional callback to run once all the functions + have completed. This will be passed the results of the last task's callback. + + + +__Example__ + +```js +async.waterfall([ + function(callback) { + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); + }, + function(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); + } +], function (err, result) { + // result now equals 'done' +}); +``` +Or, with named functions: + +```js +async.waterfall([ + myFirstFunction, + mySecondFunction, + myLastFunction, +], function (err, result) { + // result now equals 'done' +}); +function myFirstFunction(callback) { + callback(null, 'one', 'two'); +} +function mySecondFunction(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); +} +function myLastFunction(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); +} +``` + +Or, if you need to pass any argument to the first function: + +```js +async.waterfall([ + async.apply(myFirstFunction, 'zero'), + mySecondFunction, + myLastFunction, +], function (err, result) { + // result now equals 'done' +}); +function myFirstFunction(arg1, callback) { + // arg1 now equals 'zero' + callback(null, 'one', 'two'); +} +function mySecondFunction(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); +} +function myLastFunction(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); +} +``` + +--------------------------------------- + +### compose(fn1, fn2...) + +Creates a function which is a composition of the passed asynchronous +functions. Each function consumes the return value of the function that +follows. Composing functions `f()`, `g()`, and `h()` would produce the result of +`f(g(h()))`, only this version uses callbacks to obtain the return values. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* `functions...` - the asynchronous functions to compose + + +__Example__ + +```js +function add1(n, callback) { + setTimeout(function () { + callback(null, n + 1); + }, 10); +} + +function mul3(n, callback) { + setTimeout(function () { + callback(null, n * 3); + }, 10); +} + +var add1mul3 = async.compose(mul3, add1); + +add1mul3(4, function (err, result) { + // result now equals 15 +}); +``` + +--------------------------------------- + +### seq(fn1, fn2...) + +Version of the compose function that is more natural to read. +Each function consumes the return value of the previous function. +It is the equivalent of [`compose`](#compose) with the arguments reversed. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* `functions...` - the asynchronous functions to compose + + +__Example__ + +```js +// Requires lodash (or underscore), express3 and dresende's orm2. +// Part of an app, that fetches cats of the logged user. +// This example uses `seq` function to avoid overnesting and error +// handling clutter. +app.get('/cats', function(request, response) { + var User = request.models.User; + async.seq( + _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) + function(user, fn) { + user.getCats(fn); // 'getCats' has signature (callback(err, data)) + } + )(req.session.user_id, function (err, cats) { + if (err) { + console.error(err); + response.json({ status: 'error', message: err.message }); + } else { + response.json({ status: 'ok', message: 'Cats found', data: cats }); + } + }); +}); +``` + +--------------------------------------- + +### applyEach(fns, args..., callback) + +Applies the provided arguments to each function in the array, calling +`callback` after all functions have completed. If you only provide the first +argument, then it will return a function which lets you pass in the +arguments as if it were a single function call. + +__Arguments__ + +* `fns` - the asynchronous functions to all call with the same arguments +* `args...` - any number of separate arguments to pass to the function +* `callback` - the final argument should be the callback, called when all + functions have completed processing + + +__Example__ + +```js +async.applyEach([enableSearch, updateSchema], 'bucket', callback); + +// partial application example: +async.each( + buckets, + async.applyEach([enableSearch, updateSchema]), + callback +); +``` + +__Related__ + +* applyEachSeries(tasks, args..., [callback]) + +--------------------------------------- + + +### queue(worker, [concurrency]) + +Creates a `queue` object with the specified `concurrency`. Tasks added to the +`queue` are processed in parallel (up to the `concurrency` limit). If all +`worker`s are in progress, the task is queued until one becomes available. +Once a `worker` completes a `task`, that `task`'s callback is called. + +__Arguments__ + +* `worker(task, callback)` - An asynchronous function for processing a queued + task, which must call its `callback(err)` argument when finished, with an + optional `error` as an argument. If you want to handle errors from an individual task, pass a callback to `q.push()`. +* `concurrency` - An `integer` for determining how many `worker` functions should be + run in parallel. If omitted, the concurrency defaults to `1`. If the concurrency is `0`, an error is thrown. + +__Queue objects__ + +The `queue` object returned by this function has the following properties and +methods: + +* `length()` - a function returning the number of items waiting to be processed. +* `started` - a function returning whether or not any items have been pushed and processed by the queue +* `running()` - a function returning the number of items currently being processed. +* `workersList()` - a function returning the array of items currently being processed. +* `idle()` - a function returning false if there are items waiting or being processed, or true if not. +* `concurrency` - an integer for determining how many `worker` functions should be + run in parallel. This property can be changed after a `queue` is created to + alter the concurrency on-the-fly. +* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once + the `worker` has finished processing the task. Instead of a single task, a `tasks` array + can be submitted. The respective callback is used for every task in the list. +* `unshift(task, [callback])` - add a new task to the front of the `queue`. +* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit, + and further tasks will be queued. +* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`. +* `paused` - a boolean for determining whether the queue is in a paused state +* `pause()` - a function that pauses the processing of tasks until `resume()` is called. +* `resume()` - a function that resumes the processing of queued tasks when the queue is paused. +* `kill()` - a function that removes the `drain` callback and empties remaining tasks from the queue forcing it to go idle. + +__Example__ + +```js +// create a queue object with concurrency 2 + +var q = async.queue(function (task, callback) { + console.log('hello ' + task.name); + callback(); +}, 2); + + +// assign a callback +q.drain = function() { + console.log('all items have been processed'); +} + +// add some items to the queue + +q.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); +}); +q.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); + +// add some items to the queue (batch-wise) + +q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) { + console.log('finished processing item'); +}); + +// add some items to the front of the queue + +q.unshift({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); +``` + + +--------------------------------------- + + +### priorityQueue(worker, concurrency) + +The same as [`queue`](#queue) only tasks are assigned a priority and completed in ascending priority order. There are two differences between `queue` and `priorityQueue` objects: + +* `push(task, priority, [callback])` - `priority` should be a number. If an array of + `tasks` is given, all tasks will be assigned the same priority. +* The `unshift` method was removed. + +--------------------------------------- + + +### cargo(worker, [payload]) + +Creates a `cargo` object with the specified payload. Tasks added to the +cargo will be processed altogether (up to the `payload` limit). If the +`worker` is in progress, the task is queued until it becomes available. Once +the `worker` has completed some tasks, each callback of those tasks is called. +Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) for how `cargo` and `queue` work. + +While [queue](#queue) passes only one task to one of a group of workers +at a time, cargo passes an array of tasks to a single worker, repeating +when the worker is finished. + +__Arguments__ + +* `worker(tasks, callback)` - An asynchronous function for processing an array of + queued tasks, which must call its `callback(err)` argument when finished, with + an optional `err` argument. +* `payload` - An optional `integer` for determining how many tasks should be + processed per round; if omitted, the default is unlimited. + +__Cargo objects__ + +The `cargo` object returned by this function has the following properties and +methods: + +* `length()` - A function returning the number of items waiting to be processed. +* `payload` - An `integer` for determining how many tasks should be + process per round. This property can be changed after a `cargo` is created to + alter the payload on-the-fly. +* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called + once the `worker` has finished processing the task. Instead of a single task, an array of `tasks` + can be submitted. The respective callback is used for every task in the list. +* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued. +* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - A callback that is called when the last item from the `queue` has returned from the `worker`. +* `idle()`, `pause()`, `resume()`, `kill()` - cargo inherits all of the same methods and event calbacks as [`queue`](#queue) + +__Example__ + +```js +// create a cargo object with payload 2 + +var cargo = async.cargo(function (tasks, callback) { + for(var i=0; i +### auto(tasks, [concurrency], [callback]) + +Determines the best order for running the functions in `tasks`, based on their requirements. Each function can optionally depend on other functions being completed first, and each function is run as soon as its requirements are satisfied. + +If any of the functions pass an error to their callback, the `auto` sequence will stop. Further tasks will not execute (so any other functions depending on it will not run), and the main `callback` is immediately called with the error. Functions also receive an object containing the results of functions which have completed so far. + +Note, all functions are called with a `results` object as a second argument, +so it is unsafe to pass functions in the `tasks` object which cannot handle the +extra argument. + +For example, this snippet of code: + +```js +async.auto({ + readData: async.apply(fs.readFile, 'data.txt', 'utf-8') +}, callback); +``` + +will have the effect of calling `readFile` with the results object as the last +argument, which will fail: + +```js +fs.readFile('data.txt', 'utf-8', cb, {}); +``` + +Instead, wrap the call to `readFile` in a function which does not forward the +`results` object: + +```js +async.auto({ + readData: function(cb, results){ + fs.readFile('data.txt', 'utf-8', cb); + } +}, callback); +``` + +__Arguments__ + +* `tasks` - An object. Each of its properties is either a function or an array of + requirements, with the function itself the last item in the array. The object's key + of a property serves as the name of the task defined by that property, + i.e. can be used when specifying requirements for other tasks. + The function receives two arguments: (1) a `callback(err, result)` which must be + called when finished, passing an `error` (which can be `null`) and the result of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions. +* `concurrency` - An optional `integer` for determining the maximum number of tasks that can be run in parallel. By default, as many as possible. +* `callback(err, results)` - An optional callback which is called when all the + tasks have been completed. It receives the `err` argument if any `tasks` + pass an error to their callback. Results are always returned; however, if + an error occurs, no further `tasks` will be performed, and the results + object will only contain partial results. + + +__Example__ + +```js +async.auto({ + get_data: function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + make_folder: function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + }, + write_file: ['get_data', 'make_folder', function(callback, results){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + callback(null, 'filename'); + }], + email_link: ['write_file', function(callback, results){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + // results.write_file contains the filename returned by write_file. + callback(null, {'file':results.write_file, 'email':'user@example.com'}); + }] +}, function(err, results) { + console.log('err = ', err); + console.log('results = ', results); +}); +``` + +This is a fairly trivial example, but to do this using the basic parallel and +series functions would look like this: + +```js +async.parallel([ + function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + } +], +function(err, results){ + async.series([ + function(callback){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + results.push('filename'); + callback(null); + }, + function(callback){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + callback(null, {'file':results.pop(), 'email':'user@example.com'}); + } + ]); +}); +``` + +For a complicated series of `async` tasks, using the [`auto`](#auto) function makes adding +new tasks much easier (and the code more readable). + + +--------------------------------------- + + +### retry([opts = {times: 5, interval: 0}| 5], task, [callback]) + +Attempts to get a successful response from `task` no more than `times` times before +returning an error. If the task is successful, the `callback` will be passed the result +of the successful task. If all attempts fail, the callback will be passed the error and +result (if any) of the final attempt. + +__Arguments__ + +* `opts` - Can be either an object with `times` and `interval` or a number. + * `times` - The number of attempts to make before giving up. The default is `5`. + * `interval` - The time to wait between retries, in milliseconds. The default is `0`. + * If `opts` is a number, the number specifies the number of times to retry, with the default interval of `0`. +* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)` + which must be called when finished, passing `err` (which can be `null`) and the `result` of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions (if nested inside another control flow). +* `callback(err, results)` - An optional callback which is called when the + task has succeeded, or after the final failed attempt. It receives the `err` and `result` arguments of the last attempt at completing the `task`. + +The [`retry`](#retry) function can be used as a stand-alone control flow by passing a callback, as shown below: + +```js +// try calling apiMethod 3 times +async.retry(3, apiMethod, function(err, result) { + // do something with the result +}); +``` + +```js +// try calling apiMethod 3 times, waiting 200 ms between each retry +async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + // do something with the result +}); +``` + +```js +// try calling apiMethod the default 5 times no delay between each retry +async.retry(apiMethod, function(err, result) { + // do something with the result +}); +``` + +It can also be embedded within other control flow functions to retry individual methods +that are not as reliable, like this: + +```js +async.auto({ + users: api.getUsers.bind(api), + payments: async.retry(3, api.getPayments.bind(api)) +}, function(err, results) { + // do something with the results +}); +``` + + +--------------------------------------- + + +### iterator(tasks) + +Creates an iterator function which calls the next function in the `tasks` array, +returning a continuation to call the next one after that. It's also possible to +“peek” at the next iterator with `iterator.next()`. + +This function is used internally by the `async` module, but can be useful when +you want to manually control the flow of functions in series. + +__Arguments__ + +* `tasks` - An array of functions to run. + +__Example__ + +```js +var iterator = async.iterator([ + function(){ sys.p('one'); }, + function(){ sys.p('two'); }, + function(){ sys.p('three'); } +]); + +node> var iterator2 = iterator(); +'one' +node> var iterator3 = iterator2(); +'two' +node> iterator3(); +'three' +node> var nextfn = iterator2.next(); +node> nextfn(); +'three' +``` + +--------------------------------------- + + +### apply(function, arguments..) + +Creates a continuation function with some arguments already applied. + +Useful as a shorthand when combined with other control flow functions. Any arguments +passed to the returned function are added to the arguments originally passed +to apply. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to automatically apply when the + continuation is called. + +__Example__ + +```js +// using apply + +async.parallel([ + async.apply(fs.writeFile, 'testfile1', 'test1'), + async.apply(fs.writeFile, 'testfile2', 'test2'), +]); + + +// the same process without using apply + +async.parallel([ + function(callback){ + fs.writeFile('testfile1', 'test1', callback); + }, + function(callback){ + fs.writeFile('testfile2', 'test2', callback); + } +]); +``` + +It's possible to pass any number of additional arguments when calling the +continuation: + +```js +node> var fn = async.apply(sys.puts, 'one'); +node> fn('two', 'three'); +one +two +three +``` + +--------------------------------------- + + +### nextTick(callback), setImmediate(callback) + +Calls `callback` on a later loop around the event loop. In Node.js this just +calls `process.nextTick`; in the browser it falls back to `setImmediate(callback)` +if available, otherwise `setTimeout(callback, 0)`, which means other higher priority +events may precede the execution of `callback`. + +This is used internally for browser-compatibility purposes. + +__Arguments__ + +* `callback` - The function to call on a later loop around the event loop. + +__Example__ + +```js +var call_order = []; +async.nextTick(function(){ + call_order.push('two'); + // call_order now equals ['one','two'] +}); +call_order.push('one') +``` + + +### times(n, iterator, [callback]) + +Calls the `iterator` function `n` times, and accumulates results in the same manner +you would use with [`map`](#map). + +__Arguments__ + +* `n` - The number of times to run the function. +* `iterator` - The function to call `n` times. +* `callback` - see [`map`](#map) + +__Example__ + +```js +// Pretend this is some complicated async factory +var createUser = function(id, callback) { + callback(null, { + id: 'user' + id + }) +} +// generate 5 users +async.times(5, function(n, next){ + createUser(n, function(err, user) { + next(err, user) + }) +}, function(err, users) { + // we should now have 5 users +}); +``` + +__Related__ + +* timesSeries(n, iterator, [callback]) +* timesLimit(n, limit, iterator, [callback]) + + +## Utils + + +### memoize(fn, [hasher]) + +Caches the results of an `async` function. When creating a hash to store function +results against, the callback is omitted from the hash and an optional hash +function can be used. + +If no hash function is specified, the first argument is used as a hash key, which may work reasonably if it is a string or a data type that converts to a distinct string. Note that objects and arrays will not behave reasonably. Neither will cases where the other arguments are significant. In such cases, specify your own hash function. + +The cache of results is exposed as the `memo` property of the function returned +by `memoize`. + +__Arguments__ + +* `fn` - The function to proxy and cache results from. +* `hasher` - An optional function for generating a custom hash for storing + results. It has all the arguments applied to it apart from the callback, and + must be synchronous. + +__Example__ + +```js +var slow_fn = function (name, callback) { + // do something + callback(null, result); +}; +var fn = async.memoize(slow_fn); + +// fn can now be used as if it were slow_fn +fn('some name', function () { + // callback +}); +``` + + +### unmemoize(fn) + +Undoes a [`memoize`](#memoize)d function, reverting it to the original, unmemoized +form. Handy for testing. + +__Arguments__ + +* `fn` - the memoized function + +--------------------------------------- + + +### ensureAsync(fn) + +Wrap an async function and ensure it calls its callback on a later tick of the event loop. If the function already calls its callback on a next tick, no extra deferral is added. This is useful for preventing stack overflows (`RangeError: Maximum call stack size exceeded`) and generally keeping [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) contained. + +__Arguments__ + +* `fn` - an async function, one that expects a node-style callback as its last argument + +Returns a wrapped function with the exact same call signature as the function passed in. + +__Example__ + +```js +function sometimesAsync(arg, callback) { + if (cache[arg]) { + return callback(null, cache[arg]); // this would be synchronous!! + } else { + doSomeIO(arg, callback); // this IO would be asynchronous + } +} + +// this has a risk of stack overflows if many results are cached in a row +async.mapSeries(args, sometimesAsync, done); + +// this will defer sometimesAsync's callback if necessary, +// preventing stack overflows +async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + +``` + +--------------------------------------- + + +### constant(values...) + +Returns a function that when called, calls-back with the values provided. Useful as the first function in a `waterfall`, or for plugging values in to `auto`. + +__Example__ + +```js +async.waterfall([ + async.constant(42), + function (value, next) { + // value === 42 + }, + //... +], callback); + +async.waterfall([ + async.constant(filename, "utf8"), + fs.readFile, + function (fileData, next) { + //... + } + //... +], callback); + +async.auto({ + hostname: async.constant("https://server.net/"), + port: findFreePort, + launchServer: ["hostname", "port", function (cb, options) { + startServer(options, cb); + }], + //... +}, callback); + +``` + +--------------------------------------- + + + +### asyncify(func) + +__Alias:__ `wrapSync` + +Take a sync function and make it async, passing its return value to a callback. This is useful for plugging sync functions into a waterfall, series, or other async functions. Any arguments passed to the generated function will be passed to the wrapped function (except for the final callback argument). Errors thrown will be passed to the callback. + +__Example__ + +```js +async.waterfall([ + async.apply(fs.readFile, filename, "utf8"), + async.asyncify(JSON.parse), + function (data, next) { + // data is the result of parsing the text. + // If there was a parsing error, it would have been caught. + } +], callback) +``` + +If the function passed to `asyncify` returns a Promise, that promises's resolved/rejected state will be used to call the callback, rather than simply the synchronous return value. Example: + +```js +async.waterfall([ + async.apply(fs.readFile, filename, "utf8"), + async.asyncify(function (contents) { + return db.model.create(contents); + }), + function (model, next) { + // `model` is the instantiated model object. + // If there was an error, this function would be skipped. + } +], callback) +``` + +This also means you can asyncify ES2016 `async` functions. + +```js +var q = async.queue(async.asyncify(async function (file) { + var intermediateStep = await processFile(file); + return await somePromise(intermediateStep) +})); + +q.push(files); +``` + +--------------------------------------- + + +### log(function, arguments) + +Logs the result of an `async` function to the `console`. Only works in Node.js or +in browsers that support `console.log` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.log` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, 'hello ' + name); + }, 1000); +}; +``` +```js +node> async.log(hello, 'world'); +'hello world' +``` + +--------------------------------------- + + +### dir(function, arguments) + +Logs the result of an `async` function to the `console` using `console.dir` to +display the properties of the resulting object. Only works in Node.js or +in browsers that support `console.dir` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.dir` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, {hello: name}); + }, 1000); +}; +``` +```js +node> async.dir(hello, 'world'); +{hello: 'world'} +``` + +--------------------------------------- + + +### noConflict() + +Changes the value of `async` back to its original value, returning a reference to the +`async` object. diff --git a/node_modules/fsevents/node_modules/async/dist/async.js b/node_modules/fsevents/node_modules/async/dist/async.js new file mode 100644 index 0000000..31e7620 --- /dev/null +++ b/node_modules/fsevents/node_modules/async/dist/async.js @@ -0,0 +1,1265 @@ +/*! + * async + * https://github.com/caolan/async + * + * Copyright 2010-2014 Caolan McMahon + * Released under the MIT license + */ +(function () { + + var async = {}; + function noop() {} + function identity(v) { + return v; + } + function toBool(v) { + return !!v; + } + function notId(v) { + return !v; + } + + // global on the server, window in the browser + var previous_async; + + // Establish the root object, `window` (`self`) in the browser, `global` + // on the server, or `this` in some virtual machines. We use `self` + // instead of `window` for `WebWorker` support. + var root = typeof self === 'object' && self.self === self && self || + typeof global === 'object' && global.global === global && global || + this; + + if (root != null) { + previous_async = root.async; + } + + async.noConflict = function () { + root.async = previous_async; + return async; + }; + + function only_once(fn) { + return function() { + if (fn === null) throw new Error("Callback was already called."); + fn.apply(this, arguments); + fn = null; + }; + } + + function _once(fn) { + return function() { + if (fn === null) return; + fn.apply(this, arguments); + fn = null; + }; + } + + //// cross-browser compatiblity functions //// + + var _toString = Object.prototype.toString; + + var _isArray = Array.isArray || function (obj) { + return _toString.call(obj) === '[object Array]'; + }; + + // Ported from underscore.js isObject + var _isObject = function(obj) { + var type = typeof obj; + return type === 'function' || type === 'object' && !!obj; + }; + + function _isArrayLike(arr) { + return _isArray(arr) || ( + // has a positive integer length property + typeof arr.length === "number" && + arr.length >= 0 && + arr.length % 1 === 0 + ); + } + + function _arrayEach(arr, iterator) { + var index = -1, + length = arr.length; + + while (++index < length) { + iterator(arr[index], index, arr); + } + } + + function _map(arr, iterator) { + var index = -1, + length = arr.length, + result = Array(length); + + while (++index < length) { + result[index] = iterator(arr[index], index, arr); + } + return result; + } + + function _range(count) { + return _map(Array(count), function (v, i) { return i; }); + } + + function _reduce(arr, iterator, memo) { + _arrayEach(arr, function (x, i, a) { + memo = iterator(memo, x, i, a); + }); + return memo; + } + + function _forEachOf(object, iterator) { + _arrayEach(_keys(object), function (key) { + iterator(object[key], key); + }); + } + + function _indexOf(arr, item) { + for (var i = 0; i < arr.length; i++) { + if (arr[i] === item) return i; + } + return -1; + } + + var _keys = Object.keys || function (obj) { + var keys = []; + for (var k in obj) { + if (obj.hasOwnProperty(k)) { + keys.push(k); + } + } + return keys; + }; + + function _keyIterator(coll) { + var i = -1; + var len; + var keys; + if (_isArrayLike(coll)) { + len = coll.length; + return function next() { + i++; + return i < len ? i : null; + }; + } else { + keys = _keys(coll); + len = keys.length; + return function next() { + i++; + return i < len ? keys[i] : null; + }; + } + } + + // Similar to ES6's rest param (http://ariya.ofilabs.com/2013/03/es6-and-rest-parameter.html) + // This accumulates the arguments passed into an array, after a given index. + // From underscore.js (https://github.com/jashkenas/underscore/pull/2140). + function _restParam(func, startIndex) { + startIndex = startIndex == null ? func.length - 1 : +startIndex; + return function() { + var length = Math.max(arguments.length - startIndex, 0); + var rest = Array(length); + for (var index = 0; index < length; index++) { + rest[index] = arguments[index + startIndex]; + } + switch (startIndex) { + case 0: return func.call(this, rest); + case 1: return func.call(this, arguments[0], rest); + } + // Currently unused but handle cases outside of the switch statement: + // var args = Array(startIndex + 1); + // for (index = 0; index < startIndex; index++) { + // args[index] = arguments[index]; + // } + // args[startIndex] = rest; + // return func.apply(this, args); + }; + } + + function _withoutIndex(iterator) { + return function (value, index, callback) { + return iterator(value, callback); + }; + } + + //// exported async module functions //// + + //// nextTick implementation with browser-compatible fallback //// + + // capture the global reference to guard against fakeTimer mocks + var _setImmediate = typeof setImmediate === 'function' && setImmediate; + + var _delay = _setImmediate ? function(fn) { + // not a direct alias for IE10 compatibility + _setImmediate(fn); + } : function(fn) { + setTimeout(fn, 0); + }; + + if (typeof process === 'object' && typeof process.nextTick === 'function') { + async.nextTick = process.nextTick; + } else { + async.nextTick = _delay; + } + async.setImmediate = _setImmediate ? _delay : async.nextTick; + + + async.forEach = + async.each = function (arr, iterator, callback) { + return async.eachOf(arr, _withoutIndex(iterator), callback); + }; + + async.forEachSeries = + async.eachSeries = function (arr, iterator, callback) { + return async.eachOfSeries(arr, _withoutIndex(iterator), callback); + }; + + + async.forEachLimit = + async.eachLimit = function (arr, limit, iterator, callback) { + return _eachOfLimit(limit)(arr, _withoutIndex(iterator), callback); + }; + + async.forEachOf = + async.eachOf = function (object, iterator, callback) { + callback = _once(callback || noop); + object = object || []; + + var iter = _keyIterator(object); + var key, completed = 0; + + while ((key = iter()) != null) { + completed += 1; + iterator(object[key], key, only_once(done)); + } + + if (completed === 0) callback(null); + + function done(err) { + completed--; + if (err) { + callback(err); + } + // Check key is null in case iterator isn't exhausted + // and done resolved synchronously. + else if (key === null && completed <= 0) { + callback(null); + } + } + }; + + async.forEachOfSeries = + async.eachOfSeries = function (obj, iterator, callback) { + callback = _once(callback || noop); + obj = obj || []; + var nextKey = _keyIterator(obj); + var key = nextKey(); + function iterate() { + var sync = true; + if (key === null) { + return callback(null); + } + iterator(obj[key], key, only_once(function (err) { + if (err) { + callback(err); + } + else { + key = nextKey(); + if (key === null) { + return callback(null); + } else { + if (sync) { + async.setImmediate(iterate); + } else { + iterate(); + } + } + } + })); + sync = false; + } + iterate(); + }; + + + + async.forEachOfLimit = + async.eachOfLimit = function (obj, limit, iterator, callback) { + _eachOfLimit(limit)(obj, iterator, callback); + }; + + function _eachOfLimit(limit) { + + return function (obj, iterator, callback) { + callback = _once(callback || noop); + obj = obj || []; + var nextKey = _keyIterator(obj); + if (limit <= 0) { + return callback(null); + } + var done = false; + var running = 0; + var errored = false; + + (function replenish () { + if (done && running <= 0) { + return callback(null); + } + + while (running < limit && !errored) { + var key = nextKey(); + if (key === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iterator(obj[key], key, only_once(function (err) { + running -= 1; + if (err) { + callback(err); + errored = true; + } + else { + replenish(); + } + })); + } + })(); + }; + } + + + function doParallel(fn) { + return function (obj, iterator, callback) { + return fn(async.eachOf, obj, iterator, callback); + }; + } + function doParallelLimit(fn) { + return function (obj, limit, iterator, callback) { + return fn(_eachOfLimit(limit), obj, iterator, callback); + }; + } + function doSeries(fn) { + return function (obj, iterator, callback) { + return fn(async.eachOfSeries, obj, iterator, callback); + }; + } + + function _asyncMap(eachfn, arr, iterator, callback) { + callback = _once(callback || noop); + arr = arr || []; + var results = _isArrayLike(arr) ? [] : {}; + eachfn(arr, function (value, index, callback) { + iterator(value, function (err, v) { + results[index] = v; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + + async.map = doParallel(_asyncMap); + async.mapSeries = doSeries(_asyncMap); + async.mapLimit = doParallelLimit(_asyncMap); + + // reduce only has a series version, as doing reduce in parallel won't + // work in many situations. + async.inject = + async.foldl = + async.reduce = function (arr, memo, iterator, callback) { + async.eachOfSeries(arr, function (x, i, callback) { + iterator(memo, x, function (err, v) { + memo = v; + callback(err); + }); + }, function (err) { + callback(err, memo); + }); + }; + + async.foldr = + async.reduceRight = function (arr, memo, iterator, callback) { + var reversed = _map(arr, identity).reverse(); + async.reduce(reversed, memo, iterator, callback); + }; + + async.transform = function (arr, memo, iterator, callback) { + if (arguments.length === 3) { + callback = iterator; + iterator = memo; + memo = _isArray(arr) ? [] : {}; + } + + async.eachOf(arr, function(v, k, cb) { + iterator(memo, v, k, cb); + }, function(err) { + callback(err, memo); + }); + }; + + function _filter(eachfn, arr, iterator, callback) { + var results = []; + eachfn(arr, function (x, index, callback) { + iterator(x, function (v) { + if (v) { + results.push({index: index, value: x}); + } + callback(); + }); + }, function () { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + } + + async.select = + async.filter = doParallel(_filter); + + async.selectLimit = + async.filterLimit = doParallelLimit(_filter); + + async.selectSeries = + async.filterSeries = doSeries(_filter); + + function _reject(eachfn, arr, iterator, callback) { + _filter(eachfn, arr, function(value, cb) { + iterator(value, function(v) { + cb(!v); + }); + }, callback); + } + async.reject = doParallel(_reject); + async.rejectLimit = doParallelLimit(_reject); + async.rejectSeries = doSeries(_reject); + + function _createTester(eachfn, check, getResult) { + return function(arr, limit, iterator, cb) { + function done() { + if (cb) cb(getResult(false, void 0)); + } + function iteratee(x, _, callback) { + if (!cb) return callback(); + iterator(x, function (v) { + if (cb && check(v)) { + cb(getResult(true, x)); + cb = iterator = false; + } + callback(); + }); + } + if (arguments.length > 3) { + eachfn(arr, limit, iteratee, done); + } else { + cb = iterator; + iterator = limit; + eachfn(arr, iteratee, done); + } + }; + } + + async.any = + async.some = _createTester(async.eachOf, toBool, identity); + + async.someLimit = _createTester(async.eachOfLimit, toBool, identity); + + async.all = + async.every = _createTester(async.eachOf, notId, notId); + + async.everyLimit = _createTester(async.eachOfLimit, notId, notId); + + function _findGetResult(v, x) { + return x; + } + async.detect = _createTester(async.eachOf, identity, _findGetResult); + async.detectSeries = _createTester(async.eachOfSeries, identity, _findGetResult); + async.detectLimit = _createTester(async.eachOfLimit, identity, _findGetResult); + + async.sortBy = function (arr, iterator, callback) { + async.map(arr, function (x, callback) { + iterator(x, function (err, criteria) { + if (err) { + callback(err); + } + else { + callback(null, {value: x, criteria: criteria}); + } + }); + }, function (err, results) { + if (err) { + return callback(err); + } + else { + callback(null, _map(results.sort(comparator), function (x) { + return x.value; + })); + } + + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } + }; + + async.auto = function (tasks, concurrency, callback) { + if (typeof arguments[1] === 'function') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = _once(callback || noop); + var keys = _keys(tasks); + var remainingTasks = keys.length; + if (!remainingTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = remainingTasks; + } + + var results = {}; + var runningTasks = 0; + + var hasError = false; + + var listeners = []; + function addListener(fn) { + listeners.unshift(fn); + } + function removeListener(fn) { + var idx = _indexOf(listeners, fn); + if (idx >= 0) listeners.splice(idx, 1); + } + function taskComplete() { + remainingTasks--; + _arrayEach(listeners.slice(0), function (fn) { + fn(); + }); + } + + addListener(function () { + if (!remainingTasks) { + callback(null, results); + } + }); + + _arrayEach(keys, function (k) { + if (hasError) return; + var task = _isArray(tasks[k]) ? tasks[k]: [tasks[k]]; + var taskCallback = _restParam(function(err, args) { + runningTasks--; + if (args.length <= 1) { + args = args[0]; + } + if (err) { + var safeResults = {}; + _forEachOf(results, function(val, rkey) { + safeResults[rkey] = val; + }); + safeResults[k] = args; + hasError = true; + + callback(err, safeResults); + } + else { + results[k] = args; + async.setImmediate(taskComplete); + } + }); + var requires = task.slice(0, task.length - 1); + // prevent dead-locks + var len = requires.length; + var dep; + while (len--) { + if (!(dep = tasks[requires[len]])) { + throw new Error('Has nonexistent dependency in ' + requires.join(', ')); + } + if (_isArray(dep) && _indexOf(dep, k) >= 0) { + throw new Error('Has cyclic dependencies'); + } + } + function ready() { + return runningTasks < concurrency && _reduce(requires, function (a, x) { + return (a && results.hasOwnProperty(x)); + }, true) && !results.hasOwnProperty(k); + } + if (ready()) { + runningTasks++; + task[task.length - 1](taskCallback, results); + } + else { + addListener(listener); + } + function listener() { + if (ready()) { + runningTasks++; + removeListener(listener); + task[task.length - 1](taskCallback, results); + } + } + }); + }; + + + + async.retry = function(times, task, callback) { + var DEFAULT_TIMES = 5; + var DEFAULT_INTERVAL = 0; + + var attempts = []; + + var opts = { + times: DEFAULT_TIMES, + interval: DEFAULT_INTERVAL + }; + + function parseTimes(acc, t){ + if(typeof t === 'number'){ + acc.times = parseInt(t, 10) || DEFAULT_TIMES; + } else if(typeof t === 'object'){ + acc.times = parseInt(t.times, 10) || DEFAULT_TIMES; + acc.interval = parseInt(t.interval, 10) || DEFAULT_INTERVAL; + } else { + throw new Error('Unsupported argument type for \'times\': ' + typeof t); + } + } + + var length = arguments.length; + if (length < 1 || length > 3) { + throw new Error('Invalid arguments - must be either (task), (task, callback), (times, task) or (times, task, callback)'); + } else if (length <= 2 && typeof times === 'function') { + callback = task; + task = times; + } + if (typeof times !== 'function') { + parseTimes(opts, times); + } + opts.callback = callback; + opts.task = task; + + function wrappedTask(wrappedCallback, wrappedResults) { + function retryAttempt(task, finalAttempt) { + return function(seriesCallback) { + task(function(err, result){ + seriesCallback(!err || finalAttempt, {err: err, result: result}); + }, wrappedResults); + }; + } + + function retryInterval(interval){ + return function(seriesCallback){ + setTimeout(function(){ + seriesCallback(null); + }, interval); + }; + } + + while (opts.times) { + + var finalAttempt = !(opts.times-=1); + attempts.push(retryAttempt(opts.task, finalAttempt)); + if(!finalAttempt && opts.interval > 0){ + attempts.push(retryInterval(opts.interval)); + } + } + + async.series(attempts, function(done, data){ + data = data[data.length - 1]; + (wrappedCallback || opts.callback)(data.err, data.result); + }); + } + + // If a callback is passed, run this as a controll flow + return opts.callback ? wrappedTask() : wrappedTask; + }; + + async.waterfall = function (tasks, callback) { + callback = _once(callback || noop); + if (!_isArray(tasks)) { + var err = new Error('First argument to waterfall must be an array of functions'); + return callback(err); + } + if (!tasks.length) { + return callback(); + } + function wrapIterator(iterator) { + return _restParam(function (err, args) { + if (err) { + callback.apply(null, [err].concat(args)); + } + else { + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + ensureAsync(iterator).apply(null, args); + } + }); + } + wrapIterator(async.iterator(tasks))(); + }; + + function _parallel(eachfn, tasks, callback) { + callback = callback || noop; + var results = _isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, function (task, key, callback) { + task(_restParam(function (err, args) { + if (args.length <= 1) { + args = args[0]; + } + results[key] = args; + callback(err); + })); + }, function (err) { + callback(err, results); + }); + } + + async.parallel = function (tasks, callback) { + _parallel(async.eachOf, tasks, callback); + }; + + async.parallelLimit = function(tasks, limit, callback) { + _parallel(_eachOfLimit(limit), tasks, callback); + }; + + async.series = function(tasks, callback) { + _parallel(async.eachOfSeries, tasks, callback); + }; + + async.iterator = function (tasks) { + function makeCallback(index) { + function fn() { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + } + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + } + return makeCallback(0); + }; + + async.apply = _restParam(function (fn, args) { + return _restParam(function (callArgs) { + return fn.apply( + null, args.concat(callArgs) + ); + }); + }); + + function _concat(eachfn, arr, fn, callback) { + var result = []; + eachfn(arr, function (x, index, cb) { + fn(x, function (err, y) { + result = result.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, result); + }); + } + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + callback = callback || noop; + if (test()) { + var next = _restParam(function(err, args) { + if (err) { + callback(err); + } else if (test.apply(this, args)) { + iterator(next); + } else { + callback.apply(null, [null].concat(args)); + } + }); + iterator(next); + } else { + callback(null); + } + }; + + async.doWhilst = function (iterator, test, callback) { + var calls = 0; + return async.whilst(function() { + return ++calls <= 1 || test.apply(this, arguments); + }, iterator, callback); + }; + + async.until = function (test, iterator, callback) { + return async.whilst(function() { + return !test.apply(this, arguments); + }, iterator, callback); + }; + + async.doUntil = function (iterator, test, callback) { + return async.doWhilst(iterator, function() { + return !test.apply(this, arguments); + }, callback); + }; + + async.during = function (test, iterator, callback) { + callback = callback || noop; + + var next = _restParam(function(err, args) { + if (err) { + callback(err); + } else { + args.push(check); + test.apply(this, args); + } + }); + + var check = function(err, truth) { + if (err) { + callback(err); + } else if (truth) { + iterator(next); + } else { + callback(null); + } + }; + + test(check); + }; + + async.doDuring = function (iterator, test, callback) { + var calls = 0; + async.during(function(next) { + if (calls++ < 1) { + next(null, true); + } else { + test.apply(this, arguments); + } + }, iterator, callback); + }; + + function _queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new Error('Concurrency must not be zero'); + } + function _insert(q, data, pos, callback) { + if (callback != null && typeof callback !== "function") { + throw new Error("task callback must be a function"); + } + q.started = true; + if (!_isArray(data)) { + data = [data]; + } + if(data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + q.drain(); + }); + } + _arrayEach(data, function(task) { + var item = { + data: task, + callback: callback || noop + }; + + if (pos) { + q.tasks.unshift(item); + } else { + q.tasks.push(item); + } + + if (q.tasks.length === q.concurrency) { + q.saturated(); + } + }); + async.setImmediate(q.process); + } + function _next(q, tasks) { + return function(){ + workers -= 1; + + var removed = false; + var args = arguments; + _arrayEach(tasks, function (task) { + _arrayEach(workersList, function (worker, index) { + if (worker === task && !removed) { + workersList.splice(index, 1); + removed = true; + } + }); + + task.callback.apply(task, args); + }); + if (q.tasks.length + workers === 0) { + q.drain(); + } + q.process(); + }; + } + + var workers = 0; + var workersList = []; + var q = { + tasks: [], + concurrency: concurrency, + payload: payload, + saturated: noop, + empty: noop, + drain: noop, + started: false, + paused: false, + push: function (data, callback) { + _insert(q, data, false, callback); + }, + kill: function () { + q.drain = noop; + q.tasks = []; + }, + unshift: function (data, callback) { + _insert(q, data, true, callback); + }, + process: function () { + while(!q.paused && workers < q.concurrency && q.tasks.length){ + + var tasks = q.payload ? + q.tasks.splice(0, q.payload) : + q.tasks.splice(0, q.tasks.length); + + var data = _map(tasks, function (task) { + return task.data; + }); + + if (q.tasks.length === 0) { + q.empty(); + } + workers += 1; + workersList.push(tasks[0]); + var cb = only_once(_next(q, tasks)); + worker(data, cb); + } + }, + length: function () { + return q.tasks.length; + }, + running: function () { + return workers; + }, + workersList: function () { + return workersList; + }, + idle: function() { + return q.tasks.length + workers === 0; + }, + pause: function () { + q.paused = true; + }, + resume: function () { + if (q.paused === false) { return; } + q.paused = false; + var resumeCount = Math.min(q.concurrency, q.tasks.length); + // Need to call q.process once per concurrent + // worker to preserve full concurrency after pause + for (var w = 1; w <= resumeCount; w++) { + async.setImmediate(q.process); + } + } + }; + return q; + } + + async.queue = function (worker, concurrency) { + var q = _queue(function (items, cb) { + worker(items[0], cb); + }, concurrency, 1); + + return q; + }; + + async.priorityQueue = function (worker, concurrency) { + + function _compareTasks(a, b){ + return a.priority - b.priority; + } + + function _binarySearch(sequence, item, compare) { + var beg = -1, + end = sequence.length - 1; + while (beg < end) { + var mid = beg + ((end - beg + 1) >>> 1); + if (compare(item, sequence[mid]) >= 0) { + beg = mid; + } else { + end = mid - 1; + } + } + return beg; + } + + function _insert(q, data, priority, callback) { + if (callback != null && typeof callback !== "function") { + throw new Error("task callback must be a function"); + } + q.started = true; + if (!_isArray(data)) { + data = [data]; + } + if(data.length === 0) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + q.drain(); + }); + } + _arrayEach(data, function(task) { + var item = { + data: task, + priority: priority, + callback: typeof callback === 'function' ? callback : noop + }; + + q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item); + + if (q.tasks.length === q.concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + // Start with a normal queue + var q = async.queue(worker, concurrency); + + // Override push to accept second parameter representing priority + q.push = function (data, priority, callback) { + _insert(q, data, priority, callback); + }; + + // Remove unshift function + delete q.unshift; + + return q; + }; + + async.cargo = function (worker, payload) { + return _queue(worker, 1, payload); + }; + + function _console_fn(name) { + return _restParam(function (fn, args) { + fn.apply(null, args.concat([_restParam(function (err, args) { + if (typeof console === 'object') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _arrayEach(args, function (x) { + console[name](x); + }); + } + } + })])); + }); + } + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + + async.memoize = function (fn, hasher) { + var memo = {}; + var queues = {}; + var has = Object.prototype.hasOwnProperty; + hasher = hasher || identity; + var memoized = _restParam(function memoized(args) { + var callback = args.pop(); + var key = hasher.apply(null, args); + if (has.call(memo, key)) { + async.setImmediate(function () { + callback.apply(null, memo[key]); + }); + } + else if (has.call(queues, key)) { + queues[key].push(callback); + } + else { + queues[key] = [callback]; + fn.apply(null, args.concat([_restParam(function (args) { + memo[key] = args; + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i].apply(null, args); + } + })])); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + }; + + async.unmemoize = function (fn) { + return function () { + return (fn.unmemoized || fn).apply(null, arguments); + }; + }; + + function _times(mapper) { + return function (count, iterator, callback) { + mapper(_range(count), iterator, callback); + }; + } + + async.times = _times(async.map); + async.timesSeries = _times(async.mapSeries); + async.timesLimit = function (count, limit, iterator, callback) { + return async.mapLimit(_range(count), limit, iterator, callback); + }; + + async.seq = function (/* functions... */) { + var fns = arguments; + return _restParam(function (args) { + var that = this; + + var callback = args[args.length - 1]; + if (typeof callback == 'function') { + args.pop(); + } else { + callback = noop; + } + + async.reduce(fns, args, function (newargs, fn, cb) { + fn.apply(that, newargs.concat([_restParam(function (err, nextargs) { + cb(err, nextargs); + })])); + }, + function (err, results) { + callback.apply(that, [err].concat(results)); + }); + }); + }; + + async.compose = function (/* functions... */) { + return async.seq.apply(null, Array.prototype.reverse.call(arguments)); + }; + + + function _applyEach(eachfn) { + return _restParam(function(fns, args) { + var go = _restParam(function(args) { + var that = this; + var callback = args.pop(); + return eachfn(fns, function (fn, _, cb) { + fn.apply(that, args.concat([cb])); + }, + callback); + }); + if (args.length) { + return go.apply(this, args); + } + else { + return go; + } + }); + } + + async.applyEach = _applyEach(async.eachOf); + async.applyEachSeries = _applyEach(async.eachOfSeries); + + + async.forever = function (fn, callback) { + var done = only_once(callback || noop); + var task = ensureAsync(fn); + function next(err) { + if (err) { + return done(err); + } + task(next); + } + next(); + }; + + function ensureAsync(fn) { + return _restParam(function (args) { + var callback = args.pop(); + args.push(function () { + var innerArgs = arguments; + if (sync) { + async.setImmediate(function () { + callback.apply(null, innerArgs); + }); + } else { + callback.apply(null, innerArgs); + } + }); + var sync = true; + fn.apply(this, args); + sync = false; + }); + } + + async.ensureAsync = ensureAsync; + + async.constant = _restParam(function(values) { + var args = [null].concat(values); + return function (callback) { + return callback.apply(this, args); + }; + }); + + async.wrapSync = + async.asyncify = function asyncify(func) { + return _restParam(function (args) { + var callback = args.pop(); + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (_isObject(result) && typeof result.then === "function") { + result.then(function(value) { + callback(null, value); + })["catch"](function(err) { + callback(err.message ? err : new Error(err)); + }); + } else { + callback(null, result); + } + }); + }; + + // Node.js + if (typeof module === 'object' && module.exports) { + module.exports = async; + } + // AMD / RequireJS + else if (typeof define === 'function' && define.amd) { + define([], function () { + return async; + }); + } + // included directly via '); + expect(encoded).to.equal('\\x3cscript\\x3ealert\\x281\\x29\\x3c\\x2fscript\\x3e'); + done(); + }); + + it('encodes \' characters', function (done) { + + var encoded = Hoek.escapeJavaScript('something(\'param\')'); + expect(encoded).to.equal('something\\x28\\x27param\\x27\\x29'); + done(); + }); + + it('encodes large unicode characters with the correct padding', function (done) { + + var encoded = Hoek.escapeJavaScript(String.fromCharCode(500) + String.fromCharCode(1000)); + expect(encoded).to.equal('\\u0500\\u1000'); + done(); + }); + + it('doesn\'t throw an exception when passed null', function (done) { + + var encoded = Hoek.escapeJavaScript(null); + expect(encoded).to.equal(''); + done(); + }); +}); + +describe('escapeHtml()', function () { + + it('encodes / characters', function (done) { + + var encoded = Hoek.escapeHtml(''); + expect(encoded).to.equal('<script>alert(1)</script>'); + done(); + }); + + it('encodes < and > as named characters', function (done) { + + var encoded = Hoek.escapeHtml(' + + \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/jsbn/example.js b/node_modules/fsevents/node_modules/jsbn/example.js new file mode 100644 index 0000000..664c1b4 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsbn/example.js @@ -0,0 +1,3 @@ +var BigInteger = require('./'); +var a = new BigInteger('91823918239182398123'); +console.log(a.bitLength()); \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/jsbn/index.js b/node_modules/fsevents/node_modules/jsbn/index.js new file mode 100644 index 0000000..e32fe13 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsbn/index.js @@ -0,0 +1,1358 @@ +(function(){ + + // Copyright (c) 2005 Tom Wu + // All Rights Reserved. + // See "LICENSE" for details. + + // Basic JavaScript BN library - subset useful for RSA encryption. + + // Bits per digit + var dbits; + + // JavaScript engine analysis + var canary = 0xdeadbeefcafe; + var j_lm = ((canary&0xffffff)==0xefcafe); + + // (public) Constructor + function BigInteger(a,b,c) { + if(a != null) + if("number" == typeof a) this.fromNumber(a,b,c); + else if(b == null && "string" != typeof a) this.fromString(a,256); + else this.fromString(a,b); + } + + // return new, unset BigInteger + function nbi() { return new BigInteger(null); } + + // am: Compute w_j += (x*this_i), propagate carries, + // c is initial carry, returns final carry. + // c < 3*dvalue, x < 2*dvalue, this_i < dvalue + // We need to select the fastest one that works in this environment. + + // am1: use a single mult and divide to get the high bits, + // max digit bits should be 26 because + // max internal value = 2*dvalue^2-2*dvalue (< 2^53) + function am1(i,x,w,j,c,n) { + while(--n >= 0) { + var v = x*this[i++]+w[j]+c; + c = Math.floor(v/0x4000000); + w[j++] = v&0x3ffffff; + } + return c; + } + // am2 avoids a big mult-and-extract completely. + // Max digit bits should be <= 30 because we do bitwise ops + // on values up to 2*hdvalue^2-hdvalue-1 (< 2^31) + function am2(i,x,w,j,c,n) { + var xl = x&0x7fff, xh = x>>15; + while(--n >= 0) { + var l = this[i]&0x7fff; + var h = this[i++]>>15; + var m = xh*l+h*xl; + l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff); + c = (l>>>30)+(m>>>15)+xh*h+(c>>>30); + w[j++] = l&0x3fffffff; + } + return c; + } + // Alternately, set max digit bits to 28 since some + // browsers slow down when dealing with 32-bit numbers. + function am3(i,x,w,j,c,n) { + var xl = x&0x3fff, xh = x>>14; + while(--n >= 0) { + var l = this[i]&0x3fff; + var h = this[i++]>>14; + var m = xh*l+h*xl; + l = xl*l+((m&0x3fff)<<14)+w[j]+c; + c = (l>>28)+(m>>14)+xh*h; + w[j++] = l&0xfffffff; + } + return c; + } + var inBrowser = typeof navigator !== "undefined"; + if(inBrowser && j_lm && (navigator.appName == "Microsoft Internet Explorer")) { + BigInteger.prototype.am = am2; + dbits = 30; + } + else if(inBrowser && j_lm && (navigator.appName != "Netscape")) { + BigInteger.prototype.am = am1; + dbits = 26; + } + else { // Mozilla/Netscape seems to prefer am3 + BigInteger.prototype.am = am3; + dbits = 28; + } + + BigInteger.prototype.DB = dbits; + BigInteger.prototype.DM = ((1<= 0; --i) r[i] = this[i]; + r.t = this.t; + r.s = this.s; + } + + // (protected) set from integer value x, -DV <= x < DV + function bnpFromInt(x) { + this.t = 1; + this.s = (x<0)?-1:0; + if(x > 0) this[0] = x; + else if(x < -1) this[0] = x+this.DV; + else this.t = 0; + } + + // return bigint initialized to value + function nbv(i) { var r = nbi(); r.fromInt(i); return r; } + + // (protected) set from string and radix + function bnpFromString(s,b) { + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 256) k = 8; // byte array + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else { this.fromRadix(s,b); return; } + this.t = 0; + this.s = 0; + var i = s.length, mi = false, sh = 0; + while(--i >= 0) { + var x = (k==8)?s[i]&0xff:intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-") mi = true; + continue; + } + mi = false; + if(sh == 0) + this[this.t++] = x; + else if(sh+k > this.DB) { + this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh)); + } + else + this[this.t-1] |= x<= this.DB) sh -= this.DB; + } + if(k == 8 && (s[0]&0x80) != 0) { + this.s = -1; + if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this[this.t-1] == c) --this.t; + } + + // (public) return string representation in given radix + function bnToString(b) { + if(this.s < 0) return "-"+this.negate().toString(b); + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else return this.toRadix(b); + var km = (1< 0) { + if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); } + while(i >= 0) { + if(p < k) { + d = (this[i]&((1<>(p+=this.DB-k); + } + else { + d = (this[i]>>(p-=k))&km; + if(p <= 0) { p += this.DB; --i; } + } + if(d > 0) m = true; + if(m) r += int2char(d); + } + } + return m?r:"0"; + } + + // (public) -this + function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; } + + // (public) |this| + function bnAbs() { return (this.s<0)?this.negate():this; } + + // (public) return + if this > a, - if this < a, 0 if equal + function bnCompareTo(a) { + var r = this.s-a.s; + if(r != 0) return r; + var i = this.t; + r = i-a.t; + if(r != 0) return (this.s<0)?-r:r; + while(--i >= 0) if((r=this[i]-a[i]) != 0) return r; + return 0; + } + + // returns bit length of the integer x + function nbits(x) { + var r = 1, t; + if((t=x>>>16) != 0) { x = t; r += 16; } + if((t=x>>8) != 0) { x = t; r += 8; } + if((t=x>>4) != 0) { x = t; r += 4; } + if((t=x>>2) != 0) { x = t; r += 2; } + if((t=x>>1) != 0) { x = t; r += 1; } + return r; + } + + // (public) return the number of bits in "this" + function bnBitLength() { + if(this.t <= 0) return 0; + return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM)); + } + + // (protected) r = this << n*DB + function bnpDLShiftTo(n,r) { + var i; + for(i = this.t-1; i >= 0; --i) r[i+n] = this[i]; + for(i = n-1; i >= 0; --i) r[i] = 0; + r.t = this.t+n; + r.s = this.s; + } + + // (protected) r = this >> n*DB + function bnpDRShiftTo(n,r) { + for(var i = n; i < this.t; ++i) r[i-n] = this[i]; + r.t = Math.max(this.t-n,0); + r.s = this.s; + } + + // (protected) r = this << n + function bnpLShiftTo(n,r) { + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<= 0; --i) { + r[i+ds+1] = (this[i]>>cbs)|c; + c = (this[i]&bm)<= 0; --i) r[i] = 0; + r[ds] = c; + r.t = this.t+ds+1; + r.s = this.s; + r.clamp(); + } + + // (protected) r = this >> n + function bnpRShiftTo(n,r) { + r.s = this.s; + var ds = Math.floor(n/this.DB); + if(ds >= this.t) { r.t = 0; return; } + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<>bs; + for(var i = ds+1; i < this.t; ++i) { + r[i-ds-1] |= (this[i]&bm)<>bs; + } + if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB; + } + if(a.t < this.t) { + c -= a.s; + while(i < this.t) { + c += this[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } + else { + c += this.s; + while(i < a.t) { + c -= a[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c -= a.s; + } + r.s = (c<0)?-1:0; + if(c < -1) r[i++] = this.DV+c; + else if(c > 0) r[i++] = c; + r.t = i; + r.clamp(); + } + + // (protected) r = this * a, r != this,a (HAC 14.12) + // "this" should be the larger one if appropriate. + function bnpMultiplyTo(a,r) { + var x = this.abs(), y = a.abs(); + var i = x.t; + r.t = i+y.t; + while(--i >= 0) r[i] = 0; + for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t); + r.s = 0; + r.clamp(); + if(this.s != a.s) BigInteger.ZERO.subTo(r,r); + } + + // (protected) r = this^2, r != this (HAC 14.16) + function bnpSquareTo(r) { + var x = this.abs(); + var i = r.t = 2*x.t; + while(--i >= 0) r[i] = 0; + for(i = 0; i < x.t-1; ++i) { + var c = x.am(i,x[i],r,2*i,0,1); + if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) { + r[i+x.t] -= x.DV; + r[i+x.t+1] = 1; + } + } + if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1); + r.s = 0; + r.clamp(); + } + + // (protected) divide this by m, quotient and remainder to q, r (HAC 14.20) + // r != q, this != m. q or r may be null. + function bnpDivRemTo(m,q,r) { + var pm = m.abs(); + if(pm.t <= 0) return; + var pt = this.abs(); + if(pt.t < pm.t) { + if(q != null) q.fromInt(0); + if(r != null) this.copyTo(r); + return; + } + if(r == null) r = nbi(); + var y = nbi(), ts = this.s, ms = m.s; + var nsh = this.DB-nbits(pm[pm.t-1]); // normalize modulus + if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } + else { pm.copyTo(y); pt.copyTo(r); } + var ys = y.t; + var y0 = y[ys-1]; + if(y0 == 0) return; + var yt = y0*(1<1)?y[ys-2]>>this.F2:0); + var d1 = this.FV/yt, d2 = (1<= 0) { + r[r.t++] = 1; + r.subTo(t,r); + } + BigInteger.ONE.dlShiftTo(ys,t); + t.subTo(y,y); // "negative" y so we can replace sub with am later + while(y.t < ys) y[y.t++] = 0; + while(--j >= 0) { + // Estimate quotient digit + var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2); + if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out + y.dlShiftTo(j,t); + r.subTo(t,r); + while(r[i] < --qd) r.subTo(t,r); + } + } + if(q != null) { + r.drShiftTo(ys,q); + if(ts != ms) BigInteger.ZERO.subTo(q,q); + } + r.t = ys; + r.clamp(); + if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder + if(ts < 0) BigInteger.ZERO.subTo(r,r); + } + + // (public) this mod a + function bnMod(a) { + var r = nbi(); + this.abs().divRemTo(a,null,r); + if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r); + return r; + } + + // Modular reduction using "classic" algorithm + function Classic(m) { this.m = m; } + function cConvert(x) { + if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m); + else return x; + } + function cRevert(x) { return x; } + function cReduce(x) { x.divRemTo(this.m,null,x); } + function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + + Classic.prototype.convert = cConvert; + Classic.prototype.revert = cRevert; + Classic.prototype.reduce = cReduce; + Classic.prototype.mulTo = cMulTo; + Classic.prototype.sqrTo = cSqrTo; + + // (protected) return "-1/this % 2^DB"; useful for Mont. reduction + // justification: + // xy == 1 (mod m) + // xy = 1+km + // xy(2-xy) = (1+km)(1-km) + // x[y(2-xy)] = 1-k^2m^2 + // x[y(2-xy)] == 1 (mod m^2) + // if y is 1/x mod m, then y(2-xy) is 1/x mod m^2 + // should reduce x and y(2-xy) by m^2 at each step to keep size bounded. + // JS multiply "overflows" differently from C/C++, so care is needed here. + function bnpInvDigit() { + if(this.t < 1) return 0; + var x = this[0]; + if((x&1) == 0) return 0; + var y = x&3; // y == 1/x mod 2^2 + y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4 + y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8 + y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16 + // last step - calculate inverse mod DV directly; + // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints + y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits + // we really want the negative inverse, and -DV < y < DV + return (y>0)?this.DV-y:-y; + } + + // Montgomery reduction + function Montgomery(m) { + this.m = m; + this.mp = m.invDigit(); + this.mpl = this.mp&0x7fff; + this.mph = this.mp>>15; + this.um = (1<<(m.DB-15))-1; + this.mt2 = 2*m.t; + } + + // xR mod m + function montConvert(x) { + var r = nbi(); + x.abs().dlShiftTo(this.m.t,r); + r.divRemTo(this.m,null,r); + if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r); + return r; + } + + // x/R mod m + function montRevert(x) { + var r = nbi(); + x.copyTo(r); + this.reduce(r); + return r; + } + + // x = x/R mod m (HAC 14.32) + function montReduce(x) { + while(x.t <= this.mt2) // pad x so am has enough room later + x[x.t++] = 0; + for(var i = 0; i < this.m.t; ++i) { + // faster way of calculating u0 = x[i]*mp mod DV + var j = x[i]&0x7fff; + var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM; + // use am to combine the multiply-shift-add into one call + j = i+this.m.t; + x[j] += this.m.am(0,u0,x,i,0,this.m.t); + // propagate carry + while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; } + } + x.clamp(); + x.drShiftTo(this.m.t,x); + if(x.compareTo(this.m) >= 0) x.subTo(this.m,x); + } + + // r = "x^2/R mod m"; x != r + function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + + // r = "xy/R mod m"; x,y != r + function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + + Montgomery.prototype.convert = montConvert; + Montgomery.prototype.revert = montRevert; + Montgomery.prototype.reduce = montReduce; + Montgomery.prototype.mulTo = montMulTo; + Montgomery.prototype.sqrTo = montSqrTo; + + // (protected) true iff this is even + function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; } + + // (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79) + function bnpExp(e,z) { + if(e > 0xffffffff || e < 1) return BigInteger.ONE; + var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1; + g.copyTo(r); + while(--i >= 0) { + z.sqrTo(r,r2); + if((e&(1< 0) z.mulTo(r2,g,r); + else { var t = r; r = r2; r2 = t; } + } + return z.revert(r); + } + + // (public) this^e % m, 0 <= e < 2^32 + function bnModPowInt(e,m) { + var z; + if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m); + return this.exp(e,z); + } + + // protected + BigInteger.prototype.copyTo = bnpCopyTo; + BigInteger.prototype.fromInt = bnpFromInt; + BigInteger.prototype.fromString = bnpFromString; + BigInteger.prototype.clamp = bnpClamp; + BigInteger.prototype.dlShiftTo = bnpDLShiftTo; + BigInteger.prototype.drShiftTo = bnpDRShiftTo; + BigInteger.prototype.lShiftTo = bnpLShiftTo; + BigInteger.prototype.rShiftTo = bnpRShiftTo; + BigInteger.prototype.subTo = bnpSubTo; + BigInteger.prototype.multiplyTo = bnpMultiplyTo; + BigInteger.prototype.squareTo = bnpSquareTo; + BigInteger.prototype.divRemTo = bnpDivRemTo; + BigInteger.prototype.invDigit = bnpInvDigit; + BigInteger.prototype.isEven = bnpIsEven; + BigInteger.prototype.exp = bnpExp; + + // public + BigInteger.prototype.toString = bnToString; + BigInteger.prototype.negate = bnNegate; + BigInteger.prototype.abs = bnAbs; + BigInteger.prototype.compareTo = bnCompareTo; + BigInteger.prototype.bitLength = bnBitLength; + BigInteger.prototype.mod = bnMod; + BigInteger.prototype.modPowInt = bnModPowInt; + + // "constants" + BigInteger.ZERO = nbv(0); + BigInteger.ONE = nbv(1); + + // Copyright (c) 2005-2009 Tom Wu + // All Rights Reserved. + // See "LICENSE" for details. + + // Extended JavaScript BN functions, required for RSA private ops. + + // Version 1.1: new BigInteger("0", 10) returns "proper" zero + // Version 1.2: square() API, isProbablePrime fix + + // (public) + function bnClone() { var r = nbi(); this.copyTo(r); return r; } + + // (public) return value as integer + function bnIntValue() { + if(this.s < 0) { + if(this.t == 1) return this[0]-this.DV; + else if(this.t == 0) return -1; + } + else if(this.t == 1) return this[0]; + else if(this.t == 0) return 0; + // assumes 16 < DB < 32 + return ((this[1]&((1<<(32-this.DB))-1))<>24; } + + // (public) return value as short (assumes DB>=16) + function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; } + + // (protected) return x s.t. r^x < DV + function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); } + + // (public) 0 if this == 0, 1 if this > 0 + function bnSigNum() { + if(this.s < 0) return -1; + else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0; + else return 1; + } + + // (protected) convert to radix string + function bnpToRadix(b) { + if(b == null) b = 10; + if(this.signum() == 0 || b < 2 || b > 36) return "0"; + var cs = this.chunkSize(b); + var a = Math.pow(b,cs); + var d = nbv(a), y = nbi(), z = nbi(), r = ""; + this.divRemTo(d,y,z); + while(y.signum() > 0) { + r = (a+z.intValue()).toString(b).substr(1) + r; + y.divRemTo(d,y,z); + } + return z.intValue().toString(b) + r; + } + + // (protected) convert from radix string + function bnpFromRadix(s,b) { + this.fromInt(0); + if(b == null) b = 10; + var cs = this.chunkSize(b); + var d = Math.pow(b,cs), mi = false, j = 0, w = 0; + for(var i = 0; i < s.length; ++i) { + var x = intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-" && this.signum() == 0) mi = true; + continue; + } + w = b*w+x; + if(++j >= cs) { + this.dMultiply(d); + this.dAddOffset(w,0); + j = 0; + w = 0; + } + } + if(j > 0) { + this.dMultiply(Math.pow(b,j)); + this.dAddOffset(w,0); + } + if(mi) BigInteger.ZERO.subTo(this,this); + } + + // (protected) alternate constructor + function bnpFromNumber(a,b,c) { + if("number" == typeof b) { + // new BigInteger(int,int,RNG) + if(a < 2) this.fromInt(1); + else { + this.fromNumber(a,c); + if(!this.testBit(a-1)) // force MSB set + this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this); + if(this.isEven()) this.dAddOffset(1,0); // force odd + while(!this.isProbablePrime(b)) { + this.dAddOffset(2,0); + if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this); + } + } + } + else { + // new BigInteger(int,RNG) + var x = new Array(), t = a&7; + x.length = (a>>3)+1; + b.nextBytes(x); + if(t > 0) x[0] &= ((1< 0) { + if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p) + r[k++] = d|(this.s<<(this.DB-p)); + while(i >= 0) { + if(p < 8) { + d = (this[i]&((1<>(p+=this.DB-8); + } + else { + d = (this[i]>>(p-=8))&0xff; + if(p <= 0) { p += this.DB; --i; } + } + if((d&0x80) != 0) d |= -256; + if(k == 0 && (this.s&0x80) != (d&0x80)) ++k; + if(k > 0 || d != this.s) r[k++] = d; + } + } + return r; + } + + function bnEquals(a) { return(this.compareTo(a)==0); } + function bnMin(a) { return(this.compareTo(a)<0)?this:a; } + function bnMax(a) { return(this.compareTo(a)>0)?this:a; } + + // (protected) r = this op a (bitwise) + function bnpBitwiseTo(a,op,r) { + var i, f, m = Math.min(a.t,this.t); + for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]); + if(a.t < this.t) { + f = a.s&this.DM; + for(i = m; i < this.t; ++i) r[i] = op(this[i],f); + r.t = this.t; + } + else { + f = this.s&this.DM; + for(i = m; i < a.t; ++i) r[i] = op(f,a[i]); + r.t = a.t; + } + r.s = op(this.s,a.s); + r.clamp(); + } + + // (public) this & a + function op_and(x,y) { return x&y; } + function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; } + + // (public) this | a + function op_or(x,y) { return x|y; } + function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; } + + // (public) this ^ a + function op_xor(x,y) { return x^y; } + function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; } + + // (public) this & ~a + function op_andnot(x,y) { return x&~y; } + function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; } + + // (public) ~this + function bnNot() { + var r = nbi(); + for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i]; + r.t = this.t; + r.s = ~this.s; + return r; + } + + // (public) this << n + function bnShiftLeft(n) { + var r = nbi(); + if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r); + return r; + } + + // (public) this >> n + function bnShiftRight(n) { + var r = nbi(); + if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r); + return r; + } + + // return index of lowest 1-bit in x, x < 2^31 + function lbit(x) { + if(x == 0) return -1; + var r = 0; + if((x&0xffff) == 0) { x >>= 16; r += 16; } + if((x&0xff) == 0) { x >>= 8; r += 8; } + if((x&0xf) == 0) { x >>= 4; r += 4; } + if((x&3) == 0) { x >>= 2; r += 2; } + if((x&1) == 0) ++r; + return r; + } + + // (public) returns index of lowest 1-bit (or -1 if none) + function bnGetLowestSetBit() { + for(var i = 0; i < this.t; ++i) + if(this[i] != 0) return i*this.DB+lbit(this[i]); + if(this.s < 0) return this.t*this.DB; + return -1; + } + + // return number of 1 bits in x + function cbit(x) { + var r = 0; + while(x != 0) { x &= x-1; ++r; } + return r; + } + + // (public) return number of set bits + function bnBitCount() { + var r = 0, x = this.s&this.DM; + for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x); + return r; + } + + // (public) true iff nth bit is set + function bnTestBit(n) { + var j = Math.floor(n/this.DB); + if(j >= this.t) return(this.s!=0); + return((this[j]&(1<<(n%this.DB)))!=0); + } + + // (protected) this op (1<>= this.DB; + } + if(a.t < this.t) { + c += a.s; + while(i < this.t) { + c += this[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } + else { + c += this.s; + while(i < a.t) { + c += a[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += a.s; + } + r.s = (c<0)?-1:0; + if(c > 0) r[i++] = c; + else if(c < -1) r[i++] = this.DV+c; + r.t = i; + r.clamp(); + } + + // (public) this + a + function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; } + + // (public) this - a + function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; } + + // (public) this * a + function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; } + + // (public) this^2 + function bnSquare() { var r = nbi(); this.squareTo(r); return r; } + + // (public) this / a + function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; } + + // (public) this % a + function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; } + + // (public) [this/a,this%a] + function bnDivideAndRemainder(a) { + var q = nbi(), r = nbi(); + this.divRemTo(a,q,r); + return new Array(q,r); + } + + // (protected) this *= n, this >= 0, 1 < n < DV + function bnpDMultiply(n) { + this[this.t] = this.am(0,n-1,this,0,0,this.t); + ++this.t; + this.clamp(); + } + + // (protected) this += n << w words, this >= 0 + function bnpDAddOffset(n,w) { + if(n == 0) return; + while(this.t <= w) this[this.t++] = 0; + this[w] += n; + while(this[w] >= this.DV) { + this[w] -= this.DV; + if(++w >= this.t) this[this.t++] = 0; + ++this[w]; + } + } + + // A "null" reducer + function NullExp() {} + function nNop(x) { return x; } + function nMulTo(x,y,r) { x.multiplyTo(y,r); } + function nSqrTo(x,r) { x.squareTo(r); } + + NullExp.prototype.convert = nNop; + NullExp.prototype.revert = nNop; + NullExp.prototype.mulTo = nMulTo; + NullExp.prototype.sqrTo = nSqrTo; + + // (public) this^e + function bnPow(e) { return this.exp(e,new NullExp()); } + + // (protected) r = lower n words of "this * a", a.t <= n + // "this" should be the larger one if appropriate. + function bnpMultiplyLowerTo(a,n,r) { + var i = Math.min(this.t+a.t,n); + r.s = 0; // assumes a,this >= 0 + r.t = i; + while(i > 0) r[--i] = 0; + var j; + for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t); + for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i); + r.clamp(); + } + + // (protected) r = "this * a" without lower n words, n > 0 + // "this" should be the larger one if appropriate. + function bnpMultiplyUpperTo(a,n,r) { + --n; + var i = r.t = this.t+a.t-n; + r.s = 0; // assumes a,this >= 0 + while(--i >= 0) r[i] = 0; + for(i = Math.max(n-this.t,0); i < a.t; ++i) + r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n); + r.clamp(); + r.drShiftTo(1,r); + } + + // Barrett modular reduction + function Barrett(m) { + // setup Barrett + this.r2 = nbi(); + this.q3 = nbi(); + BigInteger.ONE.dlShiftTo(2*m.t,this.r2); + this.mu = this.r2.divide(m); + this.m = m; + } + + function barrettConvert(x) { + if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m); + else if(x.compareTo(this.m) < 0) return x; + else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; } + } + + function barrettRevert(x) { return x; } + + // x = x mod m (HAC 14.42) + function barrettReduce(x) { + x.drShiftTo(this.m.t-1,this.r2); + if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); } + this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3); + this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2); + while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1); + x.subTo(this.r2,x); + while(x.compareTo(this.m) >= 0) x.subTo(this.m,x); + } + + // r = x^2 mod m; x != r + function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + + // r = x*y mod m; x,y != r + function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + + Barrett.prototype.convert = barrettConvert; + Barrett.prototype.revert = barrettRevert; + Barrett.prototype.reduce = barrettReduce; + Barrett.prototype.mulTo = barrettMulTo; + Barrett.prototype.sqrTo = barrettSqrTo; + + // (public) this^e % m (HAC 14.85) + function bnModPow(e,m) { + var i = e.bitLength(), k, r = nbv(1), z; + if(i <= 0) return r; + else if(i < 18) k = 1; + else if(i < 48) k = 3; + else if(i < 144) k = 4; + else if(i < 768) k = 5; + else k = 6; + if(i < 8) + z = new Classic(m); + else if(m.isEven()) + z = new Barrett(m); + else + z = new Montgomery(m); + + // precomputation + var g = new Array(), n = 3, k1 = k-1, km = (1< 1) { + var g2 = nbi(); + z.sqrTo(g[1],g2); + while(n <= km) { + g[n] = nbi(); + z.mulTo(g2,g[n-2],g[n]); + n += 2; + } + } + + var j = e.t-1, w, is1 = true, r2 = nbi(), t; + i = nbits(e[j])-1; + while(j >= 0) { + if(i >= k1) w = (e[j]>>(i-k1))&km; + else { + w = (e[j]&((1<<(i+1))-1))<<(k1-i); + if(j > 0) w |= e[j-1]>>(this.DB+i-k1); + } + + n = k; + while((w&1) == 0) { w >>= 1; --n; } + if((i -= n) < 0) { i += this.DB; --j; } + if(is1) { // ret == 1, don't bother squaring or multiplying it + g[w].copyTo(r); + is1 = false; + } + else { + while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; } + if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; } + z.mulTo(r2,g[w],r); + } + + while(j >= 0 && (e[j]&(1< 0) { + x.rShiftTo(g,x); + y.rShiftTo(g,y); + } + while(x.signum() > 0) { + if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x); + if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y); + if(x.compareTo(y) >= 0) { + x.subTo(y,x); + x.rShiftTo(1,x); + } + else { + y.subTo(x,y); + y.rShiftTo(1,y); + } + } + if(g > 0) y.lShiftTo(g,y); + return y; + } + + // (protected) this % n, n < 2^26 + function bnpModInt(n) { + if(n <= 0) return 0; + var d = this.DV%n, r = (this.s<0)?n-1:0; + if(this.t > 0) + if(d == 0) r = this[0]%n; + else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n; + return r; + } + + // (public) 1/this % m (HAC 14.61) + function bnModInverse(m) { + var ac = m.isEven(); + if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO; + var u = m.clone(), v = this.clone(); + var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1); + while(u.signum() != 0) { + while(u.isEven()) { + u.rShiftTo(1,u); + if(ac) { + if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); } + a.rShiftTo(1,a); + } + else if(!b.isEven()) b.subTo(m,b); + b.rShiftTo(1,b); + } + while(v.isEven()) { + v.rShiftTo(1,v); + if(ac) { + if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); } + c.rShiftTo(1,c); + } + else if(!d.isEven()) d.subTo(m,d); + d.rShiftTo(1,d); + } + if(u.compareTo(v) >= 0) { + u.subTo(v,u); + if(ac) a.subTo(c,a); + b.subTo(d,b); + } + else { + v.subTo(u,v); + if(ac) c.subTo(a,c); + d.subTo(b,d); + } + } + if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO; + if(d.compareTo(m) >= 0) return d.subtract(m); + if(d.signum() < 0) d.addTo(m,d); else return d; + if(d.signum() < 0) return d.add(m); else return d; + } + + var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997]; + var lplim = (1<<26)/lowprimes[lowprimes.length-1]; + + // (public) test primality with certainty >= 1-.5^t + function bnIsProbablePrime(t) { + var i, x = this.abs(); + if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) { + for(i = 0; i < lowprimes.length; ++i) + if(x[0] == lowprimes[i]) return true; + return false; + } + if(x.isEven()) return false; + i = 1; + while(i < lowprimes.length) { + var m = lowprimes[i], j = i+1; + while(j < lowprimes.length && m < lplim) m *= lowprimes[j++]; + m = x.modInt(m); + while(i < j) if(m%lowprimes[i++] == 0) return false; + } + return x.millerRabin(t); + } + + // (protected) true if probably prime (HAC 4.24, Miller-Rabin) + function bnpMillerRabin(t) { + var n1 = this.subtract(BigInteger.ONE); + var k = n1.getLowestSetBit(); + if(k <= 0) return false; + var r = n1.shiftRight(k); + t = (t+1)>>1; + if(t > lowprimes.length) t = lowprimes.length; + var a = nbi(); + for(var i = 0; i < t; ++i) { + //Pick bases at random, instead of starting at 2 + a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]); + var y = a.modPow(r,this); + if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) { + var j = 1; + while(j++ < k && y.compareTo(n1) != 0) { + y = y.modPowInt(2,this); + if(y.compareTo(BigInteger.ONE) == 0) return false; + } + if(y.compareTo(n1) != 0) return false; + } + } + return true; + } + + // protected + BigInteger.prototype.chunkSize = bnpChunkSize; + BigInteger.prototype.toRadix = bnpToRadix; + BigInteger.prototype.fromRadix = bnpFromRadix; + BigInteger.prototype.fromNumber = bnpFromNumber; + BigInteger.prototype.bitwiseTo = bnpBitwiseTo; + BigInteger.prototype.changeBit = bnpChangeBit; + BigInteger.prototype.addTo = bnpAddTo; + BigInteger.prototype.dMultiply = bnpDMultiply; + BigInteger.prototype.dAddOffset = bnpDAddOffset; + BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo; + BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo; + BigInteger.prototype.modInt = bnpModInt; + BigInteger.prototype.millerRabin = bnpMillerRabin; + + // public + BigInteger.prototype.clone = bnClone; + BigInteger.prototype.intValue = bnIntValue; + BigInteger.prototype.byteValue = bnByteValue; + BigInteger.prototype.shortValue = bnShortValue; + BigInteger.prototype.signum = bnSigNum; + BigInteger.prototype.toByteArray = bnToByteArray; + BigInteger.prototype.equals = bnEquals; + BigInteger.prototype.min = bnMin; + BigInteger.prototype.max = bnMax; + BigInteger.prototype.and = bnAnd; + BigInteger.prototype.or = bnOr; + BigInteger.prototype.xor = bnXor; + BigInteger.prototype.andNot = bnAndNot; + BigInteger.prototype.not = bnNot; + BigInteger.prototype.shiftLeft = bnShiftLeft; + BigInteger.prototype.shiftRight = bnShiftRight; + BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit; + BigInteger.prototype.bitCount = bnBitCount; + BigInteger.prototype.testBit = bnTestBit; + BigInteger.prototype.setBit = bnSetBit; + BigInteger.prototype.clearBit = bnClearBit; + BigInteger.prototype.flipBit = bnFlipBit; + BigInteger.prototype.add = bnAdd; + BigInteger.prototype.subtract = bnSubtract; + BigInteger.prototype.multiply = bnMultiply; + BigInteger.prototype.divide = bnDivide; + BigInteger.prototype.remainder = bnRemainder; + BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder; + BigInteger.prototype.modPow = bnModPow; + BigInteger.prototype.modInverse = bnModInverse; + BigInteger.prototype.pow = bnPow; + BigInteger.prototype.gcd = bnGCD; + BigInteger.prototype.isProbablePrime = bnIsProbablePrime; + + // JSBN-specific extension + BigInteger.prototype.square = bnSquare; + + // Expose the Barrett function + BigInteger.prototype.Barrett = Barrett + + // BigInteger interfaces not implemented in jsbn: + + // BigInteger(int signum, byte[] magnitude) + // double doubleValue() + // float floatValue() + // int hashCode() + // long longValue() + // static BigInteger valueOf(long val) + + // Random number generator - requires a PRNG backend, e.g. prng4.js + + // For best results, put code like + // + // in your main HTML document. + + var rng_state; + var rng_pool; + var rng_pptr; + + // Mix in a 32-bit integer into the pool + function rng_seed_int(x) { + rng_pool[rng_pptr++] ^= x & 255; + rng_pool[rng_pptr++] ^= (x >> 8) & 255; + rng_pool[rng_pptr++] ^= (x >> 16) & 255; + rng_pool[rng_pptr++] ^= (x >> 24) & 255; + if(rng_pptr >= rng_psize) rng_pptr -= rng_psize; + } + + // Mix in the current time (w/milliseconds) into the pool + function rng_seed_time() { + rng_seed_int(new Date().getTime()); + } + + // Initialize the pool with junk if needed. + if(rng_pool == null) { + rng_pool = new Array(); + rng_pptr = 0; + var t; + if(typeof window !== "undefined" && window.crypto) { + if (window.crypto.getRandomValues) { + // Use webcrypto if available + var ua = new Uint8Array(32); + window.crypto.getRandomValues(ua); + for(t = 0; t < 32; ++t) + rng_pool[rng_pptr++] = ua[t]; + } + else if(navigator.appName == "Netscape" && navigator.appVersion < "5") { + // Extract entropy (256 bits) from NS4 RNG if available + var z = window.crypto.random(32); + for(t = 0; t < z.length; ++t) + rng_pool[rng_pptr++] = z.charCodeAt(t) & 255; + } + } + while(rng_pptr < rng_psize) { // extract some randomness from Math.random() + t = Math.floor(65536 * Math.random()); + rng_pool[rng_pptr++] = t >>> 8; + rng_pool[rng_pptr++] = t & 255; + } + rng_pptr = 0; + rng_seed_time(); + //rng_seed_int(window.screenX); + //rng_seed_int(window.screenY); + } + + function rng_get_byte() { + if(rng_state == null) { + rng_seed_time(); + rng_state = prng_newstate(); + rng_state.init(rng_pool); + for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr) + rng_pool[rng_pptr] = 0; + rng_pptr = 0; + //rng_pool = null; + } + // TODO: allow reseeding after first request + return rng_state.next(); + } + + function rng_get_bytes(ba) { + var i; + for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte(); + } + + function SecureRandom() {} + + SecureRandom.prototype.nextBytes = rng_get_bytes; + + // prng4.js - uses Arcfour as a PRNG + + function Arcfour() { + this.i = 0; + this.j = 0; + this.S = new Array(); + } + + // Initialize arcfour context from key, an array of ints, each from [0..255] + function ARC4init(key) { + var i, j, t; + for(i = 0; i < 256; ++i) + this.S[i] = i; + j = 0; + for(i = 0; i < 256; ++i) { + j = (j + this.S[i] + key[i % key.length]) & 255; + t = this.S[i]; + this.S[i] = this.S[j]; + this.S[j] = t; + } + this.i = 0; + this.j = 0; + } + + function ARC4next() { + var t; + this.i = (this.i + 1) & 255; + this.j = (this.j + this.S[this.i]) & 255; + t = this.S[this.i]; + this.S[this.i] = this.S[this.j]; + this.S[this.j] = t; + return this.S[(t + this.S[this.i]) & 255]; + } + + Arcfour.prototype.init = ARC4init; + Arcfour.prototype.next = ARC4next; + + // Plug in your RNG constructor here + function prng_newstate() { + return new Arcfour(); + } + + // Pool size must be a multiple of 4 and greater than 32. + // An array of bytes the size of the pool will be passed to init() + var rng_psize = 256; + + if (typeof exports !== 'undefined') { + exports = module.exports = { + BigInteger: BigInteger, + SecureRandom: SecureRandom, + }; + } else { + this.BigInteger = BigInteger; + this.SecureRandom = SecureRandom; + } + +}).call(this); diff --git a/node_modules/fsevents/node_modules/jsbn/package.json b/node_modules/fsevents/node_modules/jsbn/package.json new file mode 100644 index 0000000..92f64ef --- /dev/null +++ b/node_modules/fsevents/node_modules/jsbn/package.json @@ -0,0 +1,79 @@ +{ + "_args": [ + [ + "jsbn@~0.1.0", + "/Users/eshanker/Code/fsevents/node_modules/sshpk" + ] + ], + "_from": "jsbn@>=0.1.0 <0.2.0", + "_id": "jsbn@0.1.0", + "_inCache": true, + "_installable": true, + "_location": "/jsbn", + "_nodeVersion": "0.12.2", + "_npmUser": { + "email": "andyperlitch@gmail.com", + "name": "andyperlitch" + }, + "_npmVersion": "2.7.4", + "_phantomChildren": {}, + "_requested": { + "name": "jsbn", + "raw": "jsbn@~0.1.0", + "rawSpec": "~0.1.0", + "scope": null, + "spec": ">=0.1.0 <0.2.0", + "type": "range" + }, + "_requiredBy": [ + "/ecc-jsbn", + "/jodid25519", + "/sshpk" + ], + "_resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz", + "_shasum": "650987da0dd74f4ebf5a11377a2aa2d273e97dfd", + "_shrinkwrap": null, + "_spec": "jsbn@~0.1.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/sshpk", + "author": { + "name": "Tom Wu" + }, + "bugs": { + "url": "https://github.com/andyperlitch/jsbn/issues" + }, + "dependencies": {}, + "description": "The jsbn library is a fast, portable implementation of large-number math in pure JavaScript, enabling public-key crypto and other applications on desktop and mobile browsers.", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "650987da0dd74f4ebf5a11377a2aa2d273e97dfd", + "tarball": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.0.tgz" + }, + "gitHead": "148a967b112806e63ddeeed78ee7938eef74c84a", + "homepage": "https://github.com/andyperlitch/jsbn", + "keywords": [ + "big", + "biginteger", + "bignumber", + "integer" + ], + "license": "BSD", + "main": "index.js", + "maintainers": [ + { + "name": "andyperlitch", + "email": "andyperlitch@gmail.com" + } + ], + "name": "jsbn", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/andyperlitch/jsbn.git" + }, + "scripts": { + "test": "mocha test.js" + }, + "version": "0.1.0" +} diff --git a/node_modules/fsevents/node_modules/json-schema/README.md b/node_modules/fsevents/node_modules/json-schema/README.md new file mode 100644 index 0000000..ccc591b --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/README.md @@ -0,0 +1,5 @@ +JSON Schema is a repository for the JSON Schema specification, reference schemas and a CommonJS implementation of JSON Schema (not the only JavaScript implementation of JSON Schema, JSV is another excellent JavaScript validator). + +Code is licensed under the AFL or BSD license as part of the Persevere +project which is administered under the Dojo foundation, +and all contributions require a Dojo CLA. \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-00/hyper-schema b/node_modules/fsevents/node_modules/json-schema/draft-00/hyper-schema new file mode 100644 index 0000000..12fe26b --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-00/hyper-schema @@ -0,0 +1,68 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-00/links#"}, + "optional" : true + }, + + "fragmentResolution" : { + "type" : "string", + "optional" : true, + "default" : "dot-delimited" + }, + + "root" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pathStart" : { + "type" : "string", + "optional" : true, + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "optional" : true, + "format" : "media-type" + }, + + "alternate" : { + "type" : "array", + "items" : {"$ref" : "#"}, + "optional" : true + } + }, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited", + "extends" : {"$ref" : "http://json-schema.org/draft-00/schema#"} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-00/json-ref b/node_modules/fsevents/node_modules/json-schema/draft-00/json-ref new file mode 100644 index 0000000..0c825bc --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-00/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/json-ref#", + + "items" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-00/links b/node_modules/fsevents/node_modules/json-schema/draft-00/links new file mode 100644 index 0000000..c9b5517 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-00/links @@ -0,0 +1,33 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string" + }, + + "rel" : { + "type" : "string" + }, + + "method" : { + "type" : "string", + "default" : "GET", + "optional" : true + }, + + "enctype" : { + "type" : "string", + "requires" : "method", + "optional" : true + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-00/hyper-schema#"}, + "optional" : true + } + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-00/schema b/node_modules/fsevents/node_modules/json-schema/draft-00/schema new file mode 100644 index 0000000..a3a2144 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-00/schema @@ -0,0 +1,155 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "optional" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "optional" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "optional" : true, + "default" : {} + }, + + "requires" : { + "type" : ["string", {"$ref" : "#"}], + "optional" : true + }, + + "minimum" : { + "type" : "number", + "optional" : true + }, + + "maximum" : { + "type" : "number", + "optional" : true + }, + + "minimumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "minimum", + "default" : true + }, + + "maximumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "maximum", + "default" : true + }, + + "minItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "pattern" : { + "type" : "string", + "optional" : true, + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer", + "optional" : true + }, + + "enum" : { + "type" : "array", + "optional" : true, + "minItems" : 1 + }, + + "title" : { + "type" : "string", + "optional" : true + }, + + "description" : { + "type" : "string", + "optional" : true + }, + + "format" : { + "type" : "string", + "optional" : true + }, + + "contentEncoding" : { + "type" : "string", + "optional" : true + }, + + "default" : { + "type" : "any", + "optional" : true + }, + + "maxDecimal" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : {"type" : "string"}, + "optional" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + } + }, + + "optional" : true, + "default" : {} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-01/hyper-schema b/node_modules/fsevents/node_modules/json-schema/draft-01/hyper-schema new file mode 100644 index 0000000..66e835b --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-01/hyper-schema @@ -0,0 +1,68 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-01/links#"}, + "optional" : true + }, + + "fragmentResolution" : { + "type" : "string", + "optional" : true, + "default" : "dot-delimited" + }, + + "root" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pathStart" : { + "type" : "string", + "optional" : true, + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "optional" : true, + "format" : "media-type" + }, + + "alternate" : { + "type" : "array", + "items" : {"$ref" : "#"}, + "optional" : true + } + }, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited", + "extends" : {"$ref" : "http://json-schema.org/draft-01/schema#"} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-01/json-ref b/node_modules/fsevents/node_modules/json-schema/draft-01/json-ref new file mode 100644 index 0000000..f2ad55b --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-01/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/json-ref#", + + "items" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-01/links b/node_modules/fsevents/node_modules/json-schema/draft-01/links new file mode 100644 index 0000000..cb183c4 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-01/links @@ -0,0 +1,33 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string" + }, + + "rel" : { + "type" : "string" + }, + + "method" : { + "type" : "string", + "default" : "GET", + "optional" : true + }, + + "enctype" : { + "type" : "string", + "requires" : "method", + "optional" : true + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-01/hyper-schema#"}, + "optional" : true + } + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-01/schema b/node_modules/fsevents/node_modules/json-schema/draft-01/schema new file mode 100644 index 0000000..e6b6aea --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-01/schema @@ -0,0 +1,155 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "optional" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "optional" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "optional" : true, + "default" : {} + }, + + "requires" : { + "type" : ["string", {"$ref" : "#"}], + "optional" : true + }, + + "minimum" : { + "type" : "number", + "optional" : true + }, + + "maximum" : { + "type" : "number", + "optional" : true + }, + + "minimumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "minimum", + "default" : true + }, + + "maximumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "maximum", + "default" : true + }, + + "minItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "pattern" : { + "type" : "string", + "optional" : true, + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer", + "optional" : true + }, + + "enum" : { + "type" : "array", + "optional" : true, + "minItems" : 1 + }, + + "title" : { + "type" : "string", + "optional" : true + }, + + "description" : { + "type" : "string", + "optional" : true + }, + + "format" : { + "type" : "string", + "optional" : true + }, + + "contentEncoding" : { + "type" : "string", + "optional" : true + }, + + "default" : { + "type" : "any", + "optional" : true + }, + + "maxDecimal" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : {"type" : "string"}, + "optional" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + } + }, + + "optional" : true, + "default" : {} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-02/hyper-schema b/node_modules/fsevents/node_modules/json-schema/draft-02/hyper-schema new file mode 100644 index 0000000..2d2bc68 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-02/hyper-schema @@ -0,0 +1,68 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-02/links#"}, + "optional" : true + }, + + "fragmentResolution" : { + "type" : "string", + "optional" : true, + "default" : "slash-delimited" + }, + + "root" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pathStart" : { + "type" : "string", + "optional" : true, + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "optional" : true, + "format" : "media-type" + }, + + "alternate" : { + "type" : "array", + "items" : {"$ref" : "#"}, + "optional" : true + } + }, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "slash-delimited", + "extends" : {"$ref" : "http://json-schema.org/draft-02/schema#"} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-02/json-ref b/node_modules/fsevents/node_modules/json-schema/draft-02/json-ref new file mode 100644 index 0000000..2b23fcd --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-02/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/json-ref#", + + "items" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-02/links b/node_modules/fsevents/node_modules/json-schema/draft-02/links new file mode 100644 index 0000000..ab971b7 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-02/links @@ -0,0 +1,35 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string" + }, + + "rel" : { + "type" : "string" + }, + + "targetSchema" : {"$ref" : "http://json-schema.org/draft-02/hyper-schema#"}, + + "method" : { + "type" : "string", + "default" : "GET", + "optional" : true + }, + + "enctype" : { + "type" : "string", + "requires" : "method", + "optional" : true + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-02/hyper-schema#"}, + "optional" : true + } + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-02/schema b/node_modules/fsevents/node_modules/json-schema/draft-02/schema new file mode 100644 index 0000000..cc2b669 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-02/schema @@ -0,0 +1,166 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "optional" : true, + "uniqueItems" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "optional" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "optional" : true, + "default" : {} + }, + + "requires" : { + "type" : ["string", {"$ref" : "#"}], + "optional" : true + }, + + "minimum" : { + "type" : "number", + "optional" : true + }, + + "maximum" : { + "type" : "number", + "optional" : true + }, + + "minimumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "minimum", + "default" : true + }, + + "maximumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "maximum", + "default" : true + }, + + "minItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pattern" : { + "type" : "string", + "optional" : true, + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer", + "optional" : true + }, + + "enum" : { + "type" : "array", + "optional" : true, + "minItems" : 1, + "uniqueItems" : true + }, + + "title" : { + "type" : "string", + "optional" : true + }, + + "description" : { + "type" : "string", + "optional" : true + }, + + "format" : { + "type" : "string", + "optional" : true + }, + + "contentEncoding" : { + "type" : "string", + "optional" : true + }, + + "default" : { + "type" : "any", + "optional" : true + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "minimumCanEqual" : false, + "optional" : true, + "default" : 1 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : {"type" : "string"}, + "optional" : true, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + } + }, + + "optional" : true, + "default" : {} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/examples/address b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/address new file mode 100644 index 0000000..401f20f --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/address @@ -0,0 +1,20 @@ +{ + "description" : "An Address following the convention of http://microformats.org/wiki/hcard", + "type" : "object", + "properties" : { + "post-office-box" : { "type" : "string" }, + "extended-address" : { "type" : "string" }, + "street-address" : { "type":"string" }, + "locality" : { "type" : "string", "required" : true }, + "region" : { "type" : "string", "required" : true }, + "postal-code" : { "type" : "string" }, + "country-name" : { "type" : "string", "required" : true } + }, + "dependencies" : { + "post-office-box" : "street-address", + "extended-address" : "street-address", + "street-address" : "region", + "locality" : "region", + "region" : "country-name" + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/examples/calendar b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/calendar new file mode 100644 index 0000000..0ec47c2 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/calendar @@ -0,0 +1,53 @@ +{ + "description" : "A representation of an event", + "type" : "object", + "properties" : { + "dtstart" : { + "format" : "date-time", + "type" : "string", + "description" : "Event starting time", + "required":true + }, + "summary" : { + "type":"string", + "required":true + }, + "location" : { + "type" : "string" + }, + "url" : { + "type" : "string", + "format" : "url" + }, + "dtend" : { + "format" : "date-time", + "type" : "string", + "description" : "Event ending time" + }, + "duration" : { + "format" : "date", + "type" : "string", + "description" : "Event duration" + }, + "rdate" : { + "format" : "date-time", + "type" : "string", + "description" : "Recurrence date" + }, + "rrule" : { + "type" : "string", + "description" : "Recurrence rule" + }, + "category" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "geo" : { "$ref" : "http://json-schema.org/draft-03/geo" } + } +} + + + + diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/examples/card b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/card new file mode 100644 index 0000000..a5667ff --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/card @@ -0,0 +1,105 @@ +{ + "description":"A representation of a person, company, organization, or place", + "type":"object", + "properties":{ + "fn":{ + "description":"Formatted Name", + "type":"string" + }, + "familyName":{ + "type":"string", + "required":true + }, + "givenName":{ + "type":"string", + "required":true + }, + "additionalName":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "honorificPrefix":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "honorificSuffix":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "nickname":{ + "type":"string" + }, + "url":{ + "type":"string", + "format":"url" + }, + "email":{ + "type":"object", + "properties":{ + "type":{ + "type":"string" + }, + "value":{ + "type":"string", + "format":"email" + } + } + }, + "tel":{ + "type":"object", + "properties":{ + "type":{ + "type":"string" + }, + "value":{ + "type":"string", + "format":"phone" + } + } + }, + "adr":{"$ref" : "http://json-schema.org/address"}, + "geo":{"$ref" : "http://json-schema.org/geo"}, + "tz":{ + "type":"string" + }, + "photo":{ + "format":"image", + "type":"string" + }, + "logo":{ + "format":"image", + "type":"string" + }, + "sound":{ + "format":"attachment", + "type":"string" + }, + "bday":{ + "type":"string", + "format":"date" + }, + "title":{ + "type":"string" + }, + "role":{ + "type":"string" + }, + "org":{ + "type":"object", + "properties":{ + "organizationName":{ + "type":"string" + }, + "organizationUnit":{ + "type":"string" + } + } + } + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/examples/geo b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/geo new file mode 100644 index 0000000..4357a90 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/geo @@ -0,0 +1,8 @@ +{ + "description" : "A geographical coordinate", + "type" : "object", + "properties" : { + "latitude" : { "type" : "number" }, + "longitude" : { "type" : "number" } + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/examples/interfaces b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/interfaces new file mode 100644 index 0000000..b8532f2 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/examples/interfaces @@ -0,0 +1,23 @@ +{ + "extends":"http://json-schema.org/hyper-schema", + "description":"A schema for schema interface definitions that describe programmatic class structures using JSON schema syntax", + "properties":{ + "methods":{ + "type":"object", + "description":"This defines the set of methods available to the class instances", + "additionalProperties":{ + "type":"object", + "description":"The definition of the method", + "properties":{ + "parameters":{ + "type":"array", + "description":"The set of parameters that should be passed to the method when it is called", + "items":{"$ref":"#"}, + "required": true + }, + "returns":{"$ref":"#"} + } + } + } + } +} diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/hyper-schema b/node_modules/fsevents/node_modules/json-schema/draft-03/hyper-schema new file mode 100644 index 0000000..38ca2e1 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/hyper-schema @@ -0,0 +1,60 @@ +{ + "$schema" : "http://json-schema.org/draft-03/hyper-schema#", + "extends" : {"$ref" : "http://json-schema.org/draft-03/schema#"}, + "id" : "http://json-schema.org/draft-03/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-03/links#"} + }, + + "fragmentResolution" : { + "type" : "string", + "default" : "slash-delimited" + }, + + "root" : { + "type" : "boolean", + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "default" : false + }, + + "contentEncoding" : { + "type" : "string" + }, + + "pathStart" : { + "type" : "string", + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "format" : "media-type" + } + }, + + "links" : [ + { + "href" : "{id}", + "rel" : "self" + }, + + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + } + ], + + "fragmentResolution" : "slash-delimited" +} diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/json-ref b/node_modules/fsevents/node_modules/json-schema/draft-03/json-ref new file mode 100644 index 0000000..66e08f2 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-03/hyper-schema#", + "id" : "http://json-schema.org/draft-03/json-ref#", + + "additionalItems" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{id}", + "rel" : "self" + }, + + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/links b/node_modules/fsevents/node_modules/json-schema/draft-03/links new file mode 100644 index 0000000..9fa63f9 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/links @@ -0,0 +1,35 @@ +{ + "$schema" : "http://json-schema.org/draft-03/hyper-schema#", + "id" : "http://json-schema.org/draft-03/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string", + "required" : true, + "format" : "link-description-object-template" + }, + + "rel" : { + "type" : "string", + "required" : true + }, + + "targetSchema" : {"$ref" : "http://json-schema.org/draft-03/hyper-schema#"}, + + "method" : { + "type" : "string", + "default" : "GET" + }, + + "enctype" : { + "type" : "string", + "requires" : "method" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-03/hyper-schema#"} + } + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-03/schema b/node_modules/fsevents/node_modules/json-schema/draft-03/schema new file mode 100644 index 0000000..29d9469 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-03/schema @@ -0,0 +1,174 @@ +{ + "$schema" : "http://json-schema.org/draft-03/schema#", + "id" : "http://json-schema.org/draft-03/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "patternProperties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalItems" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "required" : { + "type" : "boolean", + "default" : false + }, + + "dependencies" : { + "type" : "object", + "additionalProperties" : { + "type" : ["string", "array", {"$ref" : "#"}], + "items" : { + "type" : "string" + } + }, + "default" : {} + }, + + "minimum" : { + "type" : "number" + }, + + "maximum" : { + "type" : "number" + }, + + "exclusiveMinimum" : { + "type" : "boolean", + "default" : false + }, + + "exclusiveMaximum" : { + "type" : "boolean", + "default" : false + }, + + "minItems" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "default" : false + }, + + "pattern" : { + "type" : "string", + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer" + }, + + "enum" : { + "type" : "array", + "minItems" : 1, + "uniqueItems" : true + }, + + "default" : { + "type" : "any" + }, + + "title" : { + "type" : "string" + }, + + "description" : { + "type" : "string" + }, + + "format" : { + "type" : "string" + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "exclusiveMinimum" : true, + "default" : 1 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "id" : { + "type" : "string", + "format" : "uri" + }, + + "$ref" : { + "type" : "string", + "format" : "uri" + }, + + "$schema" : { + "type" : "string", + "format" : "uri" + } + }, + + "dependencies" : { + "exclusiveMinimum" : "minimum", + "exclusiveMaximum" : "maximum" + }, + + "default" : {} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-04/hyper-schema b/node_modules/fsevents/node_modules/json-schema/draft-04/hyper-schema new file mode 100644 index 0000000..63fb34d --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-04/hyper-schema @@ -0,0 +1,60 @@ +{ + "$schema" : "http://json-schema.org/draft-04/hyper-schema#", + "extends" : {"$ref" : "http://json-schema.org/draft-04/schema#"}, + "id" : "http://json-schema.org/draft-04/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-04/links#"} + }, + + "fragmentResolution" : { + "type" : "string", + "default" : "json-pointer" + }, + + "root" : { + "type" : "boolean", + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "default" : false + }, + + "contentEncoding" : { + "type" : "string" + }, + + "pathStart" : { + "type" : "string", + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "format" : "media-type" + } + }, + + "links" : [ + { + "href" : "{id}", + "rel" : "self" + }, + + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + } + ], + + "fragmentResolution" : "json-pointer" +} diff --git a/node_modules/fsevents/node_modules/json-schema/draft-04/links b/node_modules/fsevents/node_modules/json-schema/draft-04/links new file mode 100644 index 0000000..6c06d29 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-04/links @@ -0,0 +1,41 @@ +{ + "$schema" : "http://json-schema.org/draft-04/hyper-schema#", + "id" : "http://json-schema.org/draft-04/links#", + "type" : "object", + + "properties" : { + "rel" : { + "type" : "string" + }, + + "href" : { + "type" : "string" + }, + + "template" : { + "type" : "string" + }, + + "targetSchema" : {"$ref" : "http://json-schema.org/draft-04/hyper-schema#"}, + + "method" : { + "type" : "string", + "default" : "GET" + }, + + "enctype" : { + "type" : "string" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-04/hyper-schema#"} + } + }, + + "required" : ["rel", "href"], + + "dependencies" : { + "enctype" : "method" + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-04/schema b/node_modules/fsevents/node_modules/json-schema/draft-04/schema new file mode 100644 index 0000000..4231b16 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-04/schema @@ -0,0 +1,189 @@ +{ + "$schema" : "http://json-schema.org/draft-04/schema#", + "id" : "http://json-schema.org/draft-04/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : [ + { + "id" : "#simple-type", + "type" : "string", + "enum" : ["object", "array", "string", "number", "boolean", "null", "any"] + }, + "array" + ], + "items" : { + "type" : [ + {"$ref" : "#simple-type"}, + {"$ref" : "#"} + ] + }, + "uniqueItems" : true, + "default" : "any" + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "enum" : { + "type" : "array", + "minItems" : 1, + "uniqueItems" : true + }, + + "minimum" : { + "type" : "number" + }, + + "maximum" : { + "type" : "number" + }, + + "exclusiveMinimum" : { + "type" : "boolean", + "default" : false + }, + + "exclusiveMaximum" : { + "type" : "boolean", + "default" : false + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "exclusiveMinimum" : true, + "default" : 1 + }, + + "minLength" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer" + }, + + "pattern" : { + "type" : "string" + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalItems" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "minItems" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "default" : false + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "patternProperties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "minProperties" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxProperties" : { + "type" : "integer", + "minimum" : 0 + }, + + "required" : { + "type" : "array", + "items" : { + "type" : "string" + } + }, + + "dependencies" : { + "type" : "object", + "additionalProperties" : { + "type" : ["string", "array", {"$ref" : "#"}], + "items" : { + "type" : "string" + } + }, + "default" : {} + }, + + "id" : { + "type" : "string" + }, + + "$ref" : { + "type" : "string" + }, + + "$schema" : { + "type" : "string" + }, + + "title" : { + "type" : "string" + }, + + "description" : { + "type" : "string" + }, + + "default" : { + "type" : "any" + } + }, + + "dependencies" : { + "exclusiveMinimum" : "minimum", + "exclusiveMaximum" : "maximum" + }, + + "default" : {} +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/draft-zyp-json-schema-03.xml b/node_modules/fsevents/node_modules/json-schema/draft-zyp-json-schema-03.xml new file mode 100644 index 0000000..cf60620 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-zyp-json-schema-03.xml @@ -0,0 +1,1120 @@ + + + + + + + + + + + + + + + +]> + + + + + + + + + A JSON Media Type for Describing the Structure and Meaning of JSON Documents + + + SitePen (USA) +
+ + 530 Lytton Avenue + Palo Alto, CA 94301 + USA + + +1 650 968 8787 + kris@sitepen.com +
+
+ + +
+ + + Calgary, AB + Canada + + gary.court@gmail.com +
+
+ + + Internet Engineering Task Force + JSON + Schema + JavaScript + Object + Notation + Hyper Schema + Hypermedia + + + + JSON (JavaScript Object Notation) Schema defines the media type "application/schema+json", + a JSON based format for defining + the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + + +
+ + +
+ + JSON (JavaScript Object Notation) Schema is a JSON media type for defining + the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + +
+ +
+ + + + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", + "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be + interpreted as described in RFC 2119. + +
+ + + +
+ + JSON Schema defines the media type "application/schema+json" for + describing the structure of other + JSON documents. JSON Schema is JSON-based and includes facilities + for describing the structure of JSON documents in terms of + allowable values, descriptions, and interpreting relations with other resources. + + + JSON Schema format is organized into several separate definitions. The first + definition is the core schema specification. This definition is primary + concerned with describing a JSON structure and specifying valid elements + in the structure. The second definition is the Hyper Schema specification + which is intended define elements in a structure that can be interpreted as + hyperlinks. + Hyper Schema builds on JSON Schema to describe the hyperlink structure of + other JSON documents and elements of interaction. This allows user agents to be able to successfully navigate + JSON documents based on their schemas. + + + Cumulatively JSON Schema acts as a meta-document that can be used to define the required type and constraints on + property values, as well as define the meaning of the property values + for the purpose of describing a resource and determining hyperlinks + within the representation. + +
+ An example JSON Schema that describes products might look like: + + + + + This schema defines the properties of the instance JSON documents, + the required properties (id, name, and price), as well as an optional + property (tags). This also defines the link relations of the instance + JSON documents. + +
+ +
+ + For this specification, schema will be used to denote a JSON Schema + definition, and an instance refers to a JSON value that the schema + will be describing and validating. + +
+ +
+ + The JSON Schema media type does not attempt to dictate the structure of JSON + representations that contain data, but rather provides a separate format + for flexibly communicating how a JSON representation should be + interpreted and validated, such that user agents can properly understand + acceptable structures and extrapolate hyperlink information + with the JSON document. It is acknowledged that JSON documents come + in a variety of structures, and JSON is unique in that the structure + of stored data structures often prescribes a non-ambiguous definite + JSON representation. Attempting to force a specific structure is generally + not viable, and therefore JSON Schema allows for a great flexibility + in the structure of the JSON data that it describes. + + + This specification is protocol agnostic. + The underlying protocol (such as HTTP) should sufficiently define the + semantics of the client-server interface, the retrieval of resource + representations linked to by JSON representations, and modification of + those resources. The goal of this + format is to sufficiently describe JSON structures such that one can + utilize existing information available in existing JSON + representations from a large variety of services that leverage a representational state transfer + architecture using existing protocols. + +
+
+ +
+ + JSON Schema instances are correlated to their schema by the "describedby" + relation, where the schema is defined to be the target of the relation. + Instance representations may be of the "application/json" media type or + any other subtype. Consequently, dictating how an instance + representation should specify the relation to the schema is beyond the normative scope + of this document (since this document specifically defines the JSON + Schema media type, and no other), but it is recommended that instances + specify their schema so that user agents can interpret the instance + representation and messages may retain the self-descriptive + characteristic, avoiding the need for out-of-band information about + instance data. Two approaches are recommended for declaring the + relation to the schema that describes the meaning of a JSON instance's (or collection + of instances) structure. A MIME type parameter named + "profile" or a relation of "describedby" (which could be defined by a Link header) may be used: + +
+ + + +
+ + or if the content is being transferred by a protocol (such as HTTP) that + provides headers, a Link header can be used: + +
+ +; rel="describedby" +]]> + +
+ + Instances MAY specify multiple schemas, to indicate all the schemas that + are applicable to the data, and the data SHOULD be valid by all the schemas. + The instance data MAY have multiple schemas + that it is defined by (the instance data SHOULD be valid for those schemas). + Or if the document is a collection of instances, the collection MAY contain + instances from different schemas. When collections contain heterogeneous + instances, the "pathStart" attribute MAY be specified in the + schema to disambiguate which schema should be applied for each item in the + collection. However, ultimately, the mechanism for referencing a schema is up to the + media type of the instance documents (if they choose to specify that schemas + can be referenced). +
+ +
+ + JSON Schemas can themselves be described using JSON Schemas. + A self-describing JSON Schema for the core JSON Schema can + be found at http://json-schema.org/schema for the latest version or + http://json-schema.org/draft-03/schema for the draft-03 version. The hyper schema + self-description can be found at http://json-schema.org/hyper-schema + or http://json-schema.org/draft-03/hyper-schema. All schemas + used within a protocol with media type definitions + SHOULD include a MIME parameter that refers to the self-descriptive + hyper schema or another schema that extends this hyper schema: + +
+ + + +
+
+
+
+ +
+ + A JSON Schema is a JSON Object that defines various attributes + (including usage and valid values) of a JSON value. JSON + Schema has recursive capabilities; there are a number of elements + in the structure that allow for nested JSON Schemas. + + +
+ An example JSON Schema definition could look like: + + + +
+ + + A JSON Schema object may have any of the following properties, called schema + attributes (all attributes are optional): + + +
+ + This attribute defines what the primitive type or the schema of the instance MUST be in order to validate. + This attribute can take one of two forms: + + + + A string indicating a primitive or simple type. The following are acceptable string values: + + + Value MUST be a string. + Value MUST be a number, floating point numbers are allowed. + Value MUST be an integer, no floating point numbers are allowed. This is a subset of the number type. + Value MUST be a boolean. + Value MUST be an object. + Value MUST be an array. + Value MUST be null. Note this is mainly for purpose of being able use union types to define nullability. If this type is not included in a union, null values are not allowed (the primitives listed above do not allow nulls on their own). + Value MAY be of any type including null. + + + If the property is not defined or is not in this list, then any type of value is acceptable. + Other type values MAY be used for custom purposes, but minimal validators of the specification + implementation can allow any instance value on unknown type values. + + + + An array of two or more simple type definitions. Each item in the array MUST be a simple type definition or a schema. + The instance value is valid if it is of the same type as one of the simple type definitions, or valid by one of the schemas, in the array. + + + + +
+ For example, a schema that defines if an instance can be a string or a number would be: + + +
+
+ +
+ This attribute is an object with property definitions that define the valid values of instance object property values. When the instance value is an object, the property values of the instance object MUST conform to the property definitions in this object. In this object, each property definition's value MUST be a schema, and the property's name MUST be the name of the instance property that it defines. The instance property value MUST be valid according to the schema from the property definition. Properties are considered unordered, the order of the instance properties MAY be in any order. +
+ +
+ This attribute is an object that defines the schema for a set of property names of an object instance. The name of each property of this attribute's object is a regular expression pattern in the ECMA 262/Perl 5 format, while the value is a schema. If the pattern matches the name of a property on the instance object, the value of the instance's property MUST be valid against the pattern name's schema value. +
+ +
+ This attribute defines a schema for all properties that are not explicitly defined in an object type definition. If specified, the value MUST be a schema or a boolean. If false is provided, no additional properties are allowed beyond the properties defined in the schema. The default value is an empty schema which allows any value for additional properties. +
+ +
+ This attribute defines the allowed items in an instance array, and MUST be a schema or an array of schemas. The default value is an empty schema which allows any value for items in the instance array. + When this attribute value is a schema and the instance value is an array, then all the items in the array MUST be valid according to the schema. + When this attribute value is an array of schemas and the instance value is an array, each position in the instance array MUST conform to the schema in the corresponding position for this array. This called tuple typing. When tuple typing is used, additional items are allowed, disallowed, or constrained by the "additionalItems" attribute using the same rules as "additionalProperties" for objects. +
+ +
+ This provides a definition for additional items in an array instance when tuple definitions of the items is provided. This can be false to indicate additional items in the array are not allowed, or it can be a schema that defines the schema of the additional items. +
+ +
+ This attribute indicates if the instance must have a value, and not be undefined. This is false by default, making the instance optional. +
+ +
+ This attribute is an object that defines the requirements of a property on an instance object. If an object instance has a property with the same name as a property in this attribute's object, then the instance must be valid against the attribute's property value (hereafter referred to as the "dependency value"). + + The dependency value can take one of two forms: + + + + If the dependency value is a string, then the instance object MUST have a property with the same name as the dependency value. + If the dependency value is an array of strings, then the instance object MUST have a property with the same name as each string in the dependency value's array. + + + If the dependency value is a schema, then the instance object MUST be valid against the schema. + + + +
+ +
+ This attribute defines the minimum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute defines the maximum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "minimum" attribute. This is false by default, meaning the instance value can be greater then or equal to the minimum value. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "maximum" attribute. This is false by default, meaning the instance value can be less then or equal to the maximum value. +
+ +
+ This attribute defines the minimum number of values in an array when the array is the instance value. +
+ +
+ This attribute defines the maximum number of values in an array when the array is the instance value. +
+ +
+ This attribute indicates that all items in an array instance MUST be unique (contains no two identical values). + + Two instance are consider equal if they are both of the same type and: + + + are null; or + are booleans/numbers/strings and have the same value; or + are arrays, contains the same number of items, and each item in the array is equal to the corresponding item in the other array; or + are objects, contains the same property names, and each property in the object is equal to the corresponding property in the other object. + + +
+ +
+ When the instance value is a string, this provides a regular expression that a string instance MUST match in order to be valid. Regular expressions SHOULD follow the regular expression specification from ECMA 262/Perl 5 +
+ +
+ When the instance value is a string, this defines the minimum length of the string. +
+ +
+ When the instance value is a string, this defines the maximum length of the string. +
+ +
+ This provides an enumeration of all possible values that are valid for the instance property. This MUST be an array, and each item in the array represents a possible value for the instance value. If this attribute is defined, the instance value MUST be one of the values in the array in order for the schema to be valid. Comparison of enum values uses the same algorithm as defined in "uniqueItems". +
+ +
+ This attribute defines the default value of the instance when the instance is undefined. +
+ +
+ This attribute is a string that provides a short description of the instance property. +
+ +
+ This attribute is a string that provides a full description of the of purpose the instance property. +
+ +
+ This property defines the type of data, content type, or microformat to be expected in the instance property values. A format attribute MAY be one of the values listed below, and if so, SHOULD adhere to the semantics describing for the format. A format SHOULD only be used to give meaning to primitive types (string, integer, number, or boolean). Validators MAY (but are not required to) validate that the instance values conform to a format. + + + The following formats are predefined: + + + This SHOULD be a date in ISO 8601 format of YYYY-MM-DDThh:mm:ssZ in UTC time. This is the recommended form of date/timestamp. + This SHOULD be a date in the format of YYYY-MM-DD. It is recommended that you use the "date-time" format instead of "date" unless you need to transfer only the date part. + This SHOULD be a time in the format of hh:mm:ss. It is recommended that you use the "date-time" format instead of "time" unless you need to transfer only the time part. + This SHOULD be the difference, measured in milliseconds, between the specified time and midnight, 00:00 of January 1, 1970 UTC. The value SHOULD be a number (integer or float). + A regular expression, following the regular expression specification from ECMA 262/Perl 5. + This is a CSS color (like "#FF0000" or "red"), based on CSS 2.1. + This is a CSS style definition (like "color: red; background-color:#FFF"), based on CSS 2.1. + This SHOULD be a phone number (format MAY follow E.123). + This value SHOULD be a URI. + This SHOULD be an email address. + This SHOULD be an ip version 4 address. + This SHOULD be an ip version 6 address. + This SHOULD be a host-name. + + + + Additional custom formats MAY be created. These custom formats MAY be expressed as an URI, and this URI MAY reference a schema of that format. +
+ +
+ This attribute defines what value the number instance must be divisible by with no remainder (the result of the division must be an integer.) The value of this attribute SHOULD NOT be 0. +
+ +
+ This attribute takes the same values as the "type" attribute, however if the instance matches the type or if this value is an array and the instance matches any type or schema in the array, then this instance is not valid. +
+ +
+ The value of this property MUST be another schema which will provide a base schema which the current schema will inherit from. The inheritance rules are such that any instance that is valid according to the current schema MUST be valid according to the referenced schema. This MAY also be an array, in which case, the instance MUST be valid for all the schemas in the array. A schema that extends another schema MAY define additional attributes, constrain existing attributes, or add other constraints. + + Conceptually, the behavior of extends can be seen as validating an + instance against all constraints in the extending schema as well as + the extended schema(s). More optimized implementations that merge + schemas are possible, but are not required. Some examples of using "extends": + +
+ + + +
+ +
+ + + +
+
+
+ +
+ + This attribute defines the current URI of this schema (this attribute is + effectively a "self" link). This URI MAY be relative or absolute. If + the URI is relative it is resolved against the current URI of the parent + schema it is contained in. If this schema is not contained in any + parent schema, the current URI of the parent schema is held to be the + URI under which this schema was addressed. If id is missing, the current URI of a schema is + defined to be that of the parent schema. The current URI of the schema + is also used to construct relative references such as for $ref. + +
+ +
+ + This attribute defines a URI of a schema that contains the full representation of this schema. + When a validator encounters this attribute, it SHOULD replace the current schema with the schema referenced by the value's URI (if known and available) and re-validate the instance. + This URI MAY be relative or absolute, and relative URIs SHOULD be resolved against the URI of the current schema. + +
+ +
+ + This attribute defines a URI of a JSON Schema that is the schema of the current schema. + When this attribute is defined, a validator SHOULD use the schema referenced by the value's URI (if known and available) when resolving Hyper Schemalinks. + + + + A validator MAY use this attribute's value to determine which version of JSON Schema the current schema is written in, and provide the appropriate validation features and behavior. + Therefore, it is RECOMMENDED that all schema authors include this attribute in their schemas to prevent conflicts with future JSON Schema specification changes. + +
+
+ +
+ + The following attributes are specified in addition to those + attributes that already provided by the core schema with the specific + purpose of informing user agents of relations between resources based + on JSON data. Just as with JSON + schema attributes, all the attributes in hyper schemas are optional. + Therefore, an empty object is a valid (non-informative) schema, and + essentially describes plain JSON (no constraints on the structures). + Addition of attributes provides additive information for user agents. + + +
+ + The value of the links property MUST be an array, where each item + in the array is a link description object which describes the link + relations of the instances. + + +
+ + A link description object is used to describe link relations. In + the context of a schema, it defines the link relations of the + instances of the schema, and can be parameterized by the instance + values. The link description format can be used on its own in + regular (non-schema documents), and use of this format can + be declared by referencing the normative link description + schema as the the schema for the data structure that uses the + links. The URI of the normative link description schema is: + http://json-schema.org/links (latest version) or + http://json-schema.org/draft-03/links (draft-03 version). + + +
+ + The value of the "href" link description property + indicates the target URI of the related resource. The value + of the instance property SHOULD be resolved as a URI-Reference per RFC 3986 + and MAY be a relative URI. The base URI to be used for relative resolution + SHOULD be the URI used to retrieve the instance object (not the schema) + when used within a schema. Also, when links are used within a schema, the URI + SHOULD be parametrized by the property values of the instance + object, if property values exist for the corresponding variables + in the template (otherwise they MAY be provided from alternate sources, like user input). + + + + Instance property values SHOULD be substituted into the URIs where + matching braces ('{', '}') are found surrounding zero or more characters, + creating an expanded URI. Instance property value substitutions are resolved + by using the text between the braces to denote the property name + from the instance to get the value to substitute. + +
+ For example, if an href value is defined: + + + + Then it would be resolved by replace the value of the "id" property value from the instance object. +
+ +
+ If the value of the "id" property was "45", the expanded URI would be: + + + +
+ + If matching braces are found with the string "@" (no quotes) between the braces, then the + actual instance value SHOULD be used to replace the braces, rather than a property value. + This should only be used in situations where the instance is a scalar (string, + boolean, or number), and not for objects or arrays. +
+
+ +
+ + The value of the "rel" property indicates the name of the + relation to the target resource. The relation to the target SHOULD be interpreted as specifically from the instance object that the schema (or sub-schema) applies to, not just the top level resource that contains the object within its hierarchy. If a resource JSON representation contains a sub object with a property interpreted as a link, that sub-object holds the relation with the target. A relation to target from the top level resource MUST be indicated with the schema describing the top level JSON representation. + + + + Relationship definitions SHOULD NOT be media type dependent, and users are encouraged to utilize existing accepted relation definitions, including those in existing relation registries (see RFC 4287). However, we define these relations here for clarity of normative interpretation within the context of JSON hyper schema defined relations: + + + + If the relation value is "self", when this property is encountered in + the instance object, the object represents a resource and the instance object is + treated as a full representation of the target resource identified by + the specified URI. + + + + This indicates that the target of the link is the full representation for the instance object. The object that contains this link possibly may not be the full representation. + + + + This indicates the target of the link is the schema for the instance object. This MAY be used to specifically denote the schemas of objects within a JSON object hierarchy, facilitating polymorphic type data structures. + + + + This relation indicates that the target of the link + SHOULD be treated as the root or the body of the representation for the + purposes of user agent interaction or fragment resolution. All other + properties of the instance objects can be regarded as meta-data + descriptions for the data. + + + + + + The following relations are applicable for schemas (the schema as the "from" resource in the relation): + + + This indicates the target resource that represents collection of instances of a schema. + This indicates a target to use for creating new instances of a schema. This link definition SHOULD be a submission link with a non-safe method (like POST). + + + + +
+ For example, if a schema is defined: + + + +
+ +
+ And if a collection of instance resource's JSON representation was retrieved: + + + +
+ + This would indicate that for the first item in the collection, its own + (self) URI would resolve to "/Resource/thing" and the first item's "up" + relation SHOULD be resolved to the resource at "/Resource/parent". + The "children" collection would be located at "/Resource/?upId=thing". +
+
+ +
+ This property value is a schema that defines the expected structure of the JSON representation of the target of the link. +
+ +
+ + The following properties also apply to link definition objects, and + provide functionality analogous to HTML forms, in providing a + means for submitting extra (often user supplied) information to send to a server. + + +
+ + This attribute defines which method can be used to access the target resource. + In an HTTP environment, this would be "GET" or "POST" (other HTTP methods + such as "PUT" and "DELETE" have semantics that are clearly implied by + accessed resources, and do not need to be defined here). + This defaults to "GET". + +
+ +
+ + If present, this property indicates a query media type format that the server + supports for querying or posting to the collection of instances at the target + resource. The query can be + suffixed to the target URI to query the collection with + property-based constraints on the resources that SHOULD be returned from + the server or used to post data to the resource (depending on the method). + +
+ For example, with the following schema: + + + + This indicates that the client can query the server for instances that have a specific name. +
+ +
+ For example: + + + +
+ + If no enctype or method is specified, only the single URI specified by + the href property is defined. If the method is POST, "application/json" is + the default media type. +
+
+ +
+ + This attribute contains a schema which defines the acceptable structure of the submitted + request (for a GET request, this schema would define the properties for the query string + and for a POST request, this would define the body). + +
+
+
+
+ +
+ + This property indicates the fragment resolution protocol to use for + resolving fragment identifiers in URIs within the instance + representations. This applies to the instance object URIs and all + children of the instance object's URIs. The default fragment resolution + protocol is "slash-delimited", which is defined below. Other fragment + resolution protocols MAY be used, but are not defined in this document. + + + + The fragment identifier is based on RFC 2396, Sec 5, and defines the + mechanism for resolving references to entities within a document. + + +
+ + With the slash-delimited fragment resolution protocol, the fragment + identifier is interpreted as a series of property reference tokens that start with and + are delimited by the "/" character (\x2F). Each property reference token + is a series of unreserved or escaped URI characters. Each property + reference token SHOULD be interpreted, starting from the beginning of + the fragment identifier, as a path reference in the target JSON + structure. The final target value of the fragment can be determined by + starting with the root of the JSON structure from the representation of + the resource identified by the pre-fragment URI. If the target is a JSON + object, then the new target is the value of the property with the name + identified by the next property reference token in the fragment. If the + target is a JSON array, then the target is determined by finding the + item in array the array with the index defined by the next property + reference token (which MUST be a number). The target is successively + updated for each property reference token, until the entire fragment has + been traversed. + + + + Property names SHOULD be URI-encoded. In particular, any "/" in a + property name MUST be encoded to avoid being interpreted as a property + delimiter. + + + +
+ For example, for the following JSON representation: + + + +
+ +
+ The following fragment identifiers would be resolved: + + + +
+
+
+ +
+ + The dot-delimited fragment resolution protocol is the same as + slash-delimited fragment resolution protocol except that the "." character + (\x2E) is used as the delimiter between property names (instead of "/") and + the path does not need to start with a ".". For example, #.foo and #foo are a valid fragment + identifiers for referencing the value of the foo propery. + +
+
+ +
+ This attribute indicates that the instance property SHOULD NOT be changed. Attempts by a user agent to modify the value of this property are expected to be rejected by a server. +
+ +
+ If the instance property value is a string, this attribute defines that the string SHOULD be interpreted as binary data and decoded using the encoding named by this schema property. RFC 2045, Sec 6.1 lists the possible values for this property. +
+ +
+ + This attribute is a URI that defines what the instance's URI MUST start with in order to validate. + The value of the "pathStart" attribute MUST be resolved as per RFC 3986, Sec 5, + and is relative to the instance's URI. + + + + When multiple schemas have been referenced for an instance, the user agent + can determine if this schema is applicable for a particular instance by + determining if the URI of the instance begins with the the value of the "pathStart" + attribute. If the URI of the instance does not start with this URI, + or if another schema specifies a starting URI that is longer and also matches the + instance, this schema SHOULD NOT be applied to the instance. Any schema + that does not have a pathStart attribute SHOULD be considered applicable + to all the instances for which it is referenced. + +
+ +
+ This attribute defines the media type of the instance representations that this schema is defining. +
+
+ +
+ + This specification is a sub-type of the JSON format, and + consequently the security considerations are generally the same as RFC 4627. + However, an additional issue is that when link relation of "self" + is used to denote a full representation of an object, the user agent + SHOULD NOT consider the representation to be the authoritative representation + of the resource denoted by the target URI if the target URI is not + equivalent to or a sub-path of the the URI used to request the resource + representation which contains the target URI with the "self" link. + +
+ For example, if a hyper schema was defined: + + + +
+ +
+ And a resource was requested from somesite.com: + + + +
+ +
+ With a response of: + + + +
+
+
+ +
+ The proposed MIME media type for JSON Schema is "application/schema+json". + Type name: application + Subtype name: schema+json + Required parameters: profile + + The value of the profile parameter SHOULD be a URI (relative or absolute) that + refers to the schema used to define the structure of this structure (the + meta-schema). Normally the value would be http://json-schema.org/draft-03/hyper-schema, + but it is allowable to use other schemas that extend the hyper schema's meta- + schema. + + Optional parameters: pretty + The value of the pretty parameter MAY be true or false to indicate if additional whitespace has been included to make the JSON representation easier to read. + +
+ + This registry is maintained by IANA per RFC 4287 and this specification adds + four values: "full", "create", "instances", "root". New + assignments are subject to IESG Approval, as outlined in RFC 5226. + Requests should be made by email to IANA, which will then forward the + request to the IESG, requesting approval. + +
+
+
+ + + + + &rfc2045; + &rfc2119; + &rfc2396; + &rfc3339; + &rfc3986; + &rfc4287; + + + &rfc2616; + &rfc4627; + &rfc5226; + &iddiscovery; + &uritemplate; + &linkheader; + &html401; + &css21; + + +
+ + + + + Added example and verbiage to "extends" attribute. + Defined slash-delimited to use a leading slash. + Made "root" a relation instead of an attribute. + Removed address values, and MIME media type from format to reduce confusion (mediaType already exists, so it can be used for MIME types). + Added more explanation of nullability. + Removed "alternate" attribute. + Upper cased many normative usages of must, may, and should. + Replaced the link submission "properties" attribute to "schema" attribute. + Replaced "optional" attribute with "required" attribute. + Replaced "maximumCanEqual" attribute with "exclusiveMaximum" attribute. + Replaced "minimumCanEqual" attribute with "exclusiveMinimum" attribute. + Replaced "requires" attribute with "dependencies" attribute. + Moved "contentEncoding" attribute to hyper schema. + Added "additionalItems" attribute. + Added "id" attribute. + Switched self-referencing variable substitution from "-this" to "@" to align with reserved characters in URI template. + Added "patternProperties" attribute. + Schema URIs are now namespace versioned. + Added "$ref" and "$schema" attributes. + + + + + + Replaced "maxDecimal" attribute with "divisibleBy" attribute. + Added slash-delimited fragment resolution protocol and made it the default. + Added language about using links outside of schemas by referencing its normative URI. + Added "uniqueItems" attribute. + Added "targetSchema" attribute to link description object. + + + + + + Fixed category and updates from template. + + + + + + Initial draft. + + + + +
+ +
+ + + Should we give a preference to MIME headers over Link headers (or only use one)? + Should "root" be a MIME parameter? + Should "format" be renamed to "mediaType" or "contentType" to reflect the usage MIME media types that are allowed? + How should dates be handled? + + +
+
+
diff --git a/node_modules/fsevents/node_modules/json-schema/draft-zyp-json-schema-04.xml b/node_modules/fsevents/node_modules/json-schema/draft-zyp-json-schema-04.xml new file mode 100644 index 0000000..8ede6bf --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/draft-zyp-json-schema-04.xml @@ -0,0 +1,1072 @@ + + + + + + + + + + + + + + +]> + + + + + + + + + A JSON Media Type for Describing the Structure and Meaning of JSON Documents + + + SitePen (USA) +
+ + 530 Lytton Avenue + Palo Alto, CA 94301 + USA + + +1 650 968 8787 + kris@sitepen.com +
+
+ + +
+ + + Calgary, AB + Canada + + gary.court@gmail.com +
+
+ + + Internet Engineering Task Force + JSON + Schema + JavaScript + Object + Notation + Hyper Schema + Hypermedia + + + + JSON (JavaScript Object Notation) Schema defines the media type "application/schema+json", + a JSON based format for defining the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + + +
+ + +
+ + JSON (JavaScript Object Notation) Schema is a JSON media type for defining + the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + +
+ +
+ + + + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", + "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be + interpreted as described in RFC 2119. + + + + The terms "JSON", "JSON text", "JSON value", "member", "element", "object", + "array", "number", "string", "boolean", "true", "false", and "null" in this + document are to be interpreted as defined in RFC 4627. + + + + This specification also uses the following defined terms: + + + A JSON Schema object. + Equivalent to "JSON value" as defined in RFC 4627. + Equivalent to "member" as defined in RFC 4627. + Equivalent to "element" as defined in RFC 4627. + A property of a JSON Schema object. + + +
+ +
+ + JSON Schema defines the media type "application/schema+json" for + describing the structure of JSON text. JSON Schemas are also written in JSON and includes facilities + for describing the structure of JSON in terms of + allowable values, descriptions, and interpreting relations with other resources. + + + This document is organized into several separate definitions. The first + definition is the core schema specification. This definition is primary + concerned with describing a JSON structure and specifying valid elements + in the structure. The second definition is the Hyper Schema specification + which is intended to define elements in a structure that can be interpreted as + hyperlinks. + Hyper Schema builds on JSON Schema to describe the hyperlink structure of + JSON values. This allows user agents to be able to successfully navigate + documents containing JSON based on their schemas. + + + Cumulatively JSON Schema acts as meta-JSON that can be used to define the + required type and constraints on JSON values, as well as define the meaning + of the JSON values for the purpose of describing a resource and determining + hyperlinks within the representation. + +
+ An example JSON Schema that describes products might look like: + + + + + This schema defines the properties of the instance, + the required properties (id, name, and price), as well as an optional + property (tags). This also defines the link relations of the instance. + +
+ +
+ + The JSON Schema media type does not attempt to dictate the structure of JSON + values that contain data, but rather provides a separate format + for flexibly communicating how a JSON value should be + interpreted and validated, such that user agents can properly understand + acceptable structures and extrapolate hyperlink information + from the JSON. It is acknowledged that JSON values come + in a variety of structures, and JSON is unique in that the structure + of stored data structures often prescribes a non-ambiguous definite + JSON representation. Attempting to force a specific structure is generally + not viable, and therefore JSON Schema allows for a great flexibility + in the structure of the JSON data that it describes. + + + This specification is protocol agnostic. + The underlying protocol (such as HTTP) should sufficiently define the + semantics of the client-server interface, the retrieval of resource + representations linked to by JSON representations, and modification of + those resources. The goal of this + format is to sufficiently describe JSON structures such that one can + utilize existing information available in existing JSON + representations from a large variety of services that leverage a representational state transfer + architecture using existing protocols. + +
+
+ +
+ + JSON values are correlated to their schema by the "describedby" + relation, where the schema is the target of the relation. + JSON values MUST be of the "application/json" media type or + any other subtype. Consequently, dictating how a JSON value should + specify the relation to the schema is beyond the normative scope + of this document since this document specifically defines the JSON + Schema media type, and no other. It is RECOMMNENDED that JSON values + specify their schema so that user agents can interpret the instance + and retain the self-descriptive characteristics. This avoides the need for out-of-band information about + instance data. Two approaches are recommended for declaring the + relation to the schema that describes the meaning of a JSON instance's (or collection + of instances) structure. A MIME type parameter named + "profile" or a relation of "describedby" (which could be specified by a Link header) may be used: + +
+ + + +
+ + or if the content is being transferred by a protocol (such as HTTP) that + provides headers, a Link header can be used: + +
+ +; rel="describedby" +]]> + +
+ + Instances MAY specify multiple schemas, to indicate all the schemas that + are applicable to the data, and the data SHOULD be valid by all the schemas. + The instance data MAY have multiple schemas + that it is described by (the instance data SHOULD be valid for those schemas). + Or if the document is a collection of instances, the collection MAY contain + instances from different schemas. The mechanism for referencing a schema is + determined by the media type of the instance (if it provides a method for + referencing schemas). +
+ +
+ + JSON Schemas can themselves be described using JSON Schemas. + A self-describing JSON Schema for the core JSON Schema can + be found at http://json-schema.org/schema for the latest version or + http://json-schema.org/draft-04/schema for the draft-04 version. The hyper schema + self-description can be found at http://json-schema.org/hyper-schema + or http://json-schema.org/draft-04/hyper-schema. All schemas + used within a protocol with a media type specified SHOULD include a MIME parameter that refers to the self-descriptive + hyper schema or another schema that extends this hyper schema: + +
+ + + +
+
+
+
+ +
+ + A JSON Schema is a JSON object that defines various attributes + (including usage and valid values) of a JSON value. JSON + Schema has recursive capabilities; there are a number of elements + in the structure that allow for nested JSON Schemas. + + +
+ An example JSON Schema could look like: + + + +
+ + + A JSON Schema object MAY have any of the following optional properties: + + + + + +
+ + This attribute defines what the primitive type or the schema of the instance MUST be in order to validate. + This attribute can take one of two forms: + + + + A string indicating a primitive or simple type. The string MUST be one of the following values: + + + Instance MUST be an object. + Instance MUST be an array. + Instance MUST be a string. + Instance MUST be a number, including floating point numbers. + Instance MUST be the JSON literal "true" or "false". + Instance MUST be the JSON literal "null". Note that without this type, null values are not allowed. + Instance MAY be of any type, including null. + + + + + An array of one or more simple or schema types. + The instance value is valid if it is of the same type as one of the simple types, or valid by one of the schemas, in the array. + + + + If this attribute is not specified, then all value types are accepted. + + +
+ For example, a schema that defines if an instance can be a string or a number would be: + + +
+
+ +
+ + This attribute is an object with properties that specify the schemas for the properties of the instance object. + In this attribute's object, each property value MUST be a schema. + When the instance value is an object, the value of the instance's properties MUST be valid according to the schemas with the same property names specified in this attribute. + Objects are unordered, so therefore the order of the instance properties or attribute properties MUST NOT determine validation success. + +
+ +
+ + This attribute is an object that defines the schema for a set of property names of an object instance. + The name of each property of this attribute's object is a regular expression pattern in the ECMA 262/Perl 5 format, while the value is a schema. + If the pattern matches the name of a property on the instance object, the value of the instance's property MUST be valid against the pattern name's schema value. + +
+ +
+ This attribute specifies how any instance property that is not explicitly defined by either the "properties" or "patternProperties" attributes (hereafter referred to as "additional properties") is handled. If specified, the value MUST be a schema or a boolean. + If a schema is provided, then all additional properties MUST be valid according to the schema. + If false is provided, then no additional properties are allowed. + The default value is an empty schema, which allows any value for additional properties. +
+ +
+ This attribute provides the allowed items in an array instance. If specified, this attribute MUST be a schema or an array of schemas. + When this attribute value is a schema and the instance value is an array, then all the items in the array MUST be valid according to the schema. + When this attribute value is an array of schemas and the instance value is an array, each position in the instance array MUST be valid according to the schema in the corresponding position for this array. This called tuple typing. When tuple typing is used, additional items are allowed, disallowed, or constrained by the "additionalItems" attribute the same way as "additionalProperties" for objects is. +
+ +
+ This attribute specifies how any item in the array instance that is not explicitly defined by "items" (hereafter referred to as "additional items") is handled. If specified, the value MUST be a schema or a boolean. + If a schema is provided: + + If the "items" attribute is unspecified, then all items in the array instance must be valid against this schema. + If the "items" attribute is a schema, then this attribute is ignored. + If the "items" attribute is an array (during tuple typing), then any additional items MUST be valid against this schema. + + + If false is provided, then any additional items in the array are not allowed. + The default value is an empty schema, which allows any value for additional items. +
+ +
+ This attribute is an array of strings that defines all the property names that must exist on the object instance. +
+ +
+ This attribute is an object that specifies the requirements of a property on an object instance. If an object instance has a property with the same name as a property in this attribute's object, then the instance must be valid against the attribute's property value (hereafter referred to as the "dependency value"). + + The dependency value can take one of two forms: + + + + If the dependency value is a string, then the instance object MUST have a property with the same name as the dependency value. + If the dependency value is an array of strings, then the instance object MUST have a property with the same name as each string in the dependency value's array. + + + If the dependency value is a schema, then the instance object MUST be valid against the schema. + + + +
+ +
+ This attribute defines the minimum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute defines the maximum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "minimum" attribute. This is false by default, meaning the instance value can be greater then or equal to the minimum value. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "maximum" attribute. This is false by default, meaning the instance value can be less then or equal to the maximum value. +
+ +
+ This attribute defines the minimum number of values in an array when the array is the instance value. +
+ +
+ This attribute defines the maximum number of values in an array when the array is the instance value. +
+ +
+ This attribute defines the minimum number of properties required on an object instance. +
+ +
+ This attribute defines the maximum number of properties the object instance can have. +
+ +
+ This attribute indicates that all items in an array instance MUST be unique (contains no two identical values). + + Two instance are consider equal if they are both of the same type and: + + + are null; or + are booleans/numbers/strings and have the same value; or + are arrays, contains the same number of items, and each item in the array is equal to the item at the corresponding index in the other array; or + are objects, contains the same property names, and each property in the object is equal to the corresponding property in the other object. + + +
+ +
+ When the instance value is a string, this provides a regular expression that a string instance MUST match in order to be valid. Regular expressions SHOULD follow the regular expression specification from ECMA 262/Perl 5 +
+ +
+ When the instance value is a string, this defines the minimum length of the string. +
+ +
+ When the instance value is a string, this defines the maximum length of the string. +
+ +
+ This provides an enumeration of all possible values that are valid for the instance property. This MUST be an array, and each item in the array represents a possible value for the instance value. If this attribute is defined, the instance value MUST be one of the values in the array in order for the schema to be valid. Comparison of enum values uses the same algorithm as defined in "uniqueItems". +
+ +
+ This attribute defines the default value of the instance when the instance is undefined. +
+ +
+ This attribute is a string that provides a short description of the instance property. +
+ +
+ This attribute is a string that provides a full description of the of purpose the instance property. +
+ +
+ This attribute defines what value the number instance must be divisible by with no remainder (the result of the division must be an integer.) The value of this attribute SHOULD NOT be 0. +
+ +
+ This attribute takes the same values as the "type" attribute, however if the instance matches the type or if this value is an array and the instance matches any type or schema in the array, then this instance is not valid. +
+ +
+ The value of this property MUST be another schema which will provide a base schema which the current schema will inherit from. The inheritance rules are such that any instance that is valid according to the current schema MUST be valid according to the referenced schema. This MAY also be an array, in which case, the instance MUST be valid for all the schemas in the array. A schema that extends another schema MAY define additional attributes, constrain existing attributes, or add other constraints. + + Conceptually, the behavior of extends can be seen as validating an + instance against all constraints in the extending schema as well as + the extended schema(s). More optimized implementations that merge + schemas are possible, but are not required. Some examples of using "extends": + +
+ + + +
+ +
+ + + +
+
+
+ +
+ + This attribute defines the current URI of this schema (this attribute is + effectively a "self" link). This URI MAY be relative or absolute. If + the URI is relative it is resolved against the current URI of the parent + schema it is contained in. If this schema is not contained in any + parent schema, the current URI of the parent schema is held to be the + URI under which this schema was addressed. If id is missing, the current URI of a schema is + defined to be that of the parent schema. The current URI of the schema + is also used to construct relative references such as for $ref. + +
+ +
+ + This attribute defines a URI of a schema that contains the full representation of this schema. + When a validator encounters this attribute, it SHOULD replace the current schema with the schema referenced by the value's URI (if known and available) and re-validate the instance. + This URI MAY be relative or absolute, and relative URIs SHOULD be resolved against the URI of the current schema. + +
+ +
+ + This attribute defines a URI of a JSON Schema that is the schema of the current schema. + When this attribute is defined, a validator SHOULD use the schema referenced by the value's URI (if known and available) when resolving Hyper Schemalinks. + + + + A validator MAY use this attribute's value to determine which version of JSON Schema the current schema is written in, and provide the appropriate validation features and behavior. + Therefore, it is RECOMMENDED that all schema authors include this attribute in their schemas to prevent conflicts with future JSON Schema specification changes. + +
+
+ +
+ + The following attributes are specified in addition to those + attributes that already provided by the core schema with the specific + purpose of informing user agents of relations between resources based + on JSON data. Just as with JSON + schema attributes, all the attributes in hyper schemas are optional. + Therefore, an empty object is a valid (non-informative) schema, and + essentially describes plain JSON (no constraints on the structures). + Addition of attributes provides additive information for user agents. + + +
+ + The value of the links property MUST be an array, where each item + in the array is a link description object which describes the link + relations of the instances. + + + + +
+ + A link description object is used to describe link relations. In + the context of a schema, it defines the link relations of the + instances of the schema, and can be parameterized by the instance + values. The link description format can be used without JSON Schema, + and use of this format can + be declared by referencing the normative link description + schema as the the schema for the data structure that uses the + links. The URI of the normative link description schema is: + http://json-schema.org/links (latest version) or + http://json-schema.org/draft-04/links (draft-04 version). + + +
+ + The value of the "href" link description property + indicates the target URI of the related resource. The value + of the instance property SHOULD be resolved as a URI-Reference per RFC 3986 + and MAY be a relative URI. The base URI to be used for relative resolution + SHOULD be the URI used to retrieve the instance object (not the schema) + when used within a schema. Also, when links are used within a schema, the URI + SHOULD be parametrized by the property values of the instance + object, if property values exist for the corresponding variables + in the template (otherwise they MAY be provided from alternate sources, like user input). + + + + Instance property values SHOULD be substituted into the URIs where + matching braces ('{', '}') are found surrounding zero or more characters, + creating an expanded URI. Instance property value substitutions are resolved + by using the text between the braces to denote the property name + from the instance to get the value to substitute. + +
+ For example, if an href value is defined: + + + + Then it would be resolved by replace the value of the "id" property value from the instance object. +
+ +
+ If the value of the "id" property was "45", the expanded URI would be: + + + +
+ + If matching braces are found with the string "@" (no quotes) between the braces, then the + actual instance value SHOULD be used to replace the braces, rather than a property value. + This should only be used in situations where the instance is a scalar (string, + boolean, or number), and not for objects or arrays. +
+
+ +
+ + The value of the "rel" property indicates the name of the + relation to the target resource. The relation to the target SHOULD be interpreted as specifically from the instance object that the schema (or sub-schema) applies to, not just the top level resource that contains the object within its hierarchy. If a resource JSON representation contains a sub object with a property interpreted as a link, that sub-object holds the relation with the target. A relation to target from the top level resource MUST be indicated with the schema describing the top level JSON representation. + + + + Relationship definitions SHOULD NOT be media type dependent, and users are encouraged to utilize existing accepted relation definitions, including those in existing relation registries (see RFC 4287). However, we define these relations here for clarity of normative interpretation within the context of JSON hyper schema defined relations: + + + + If the relation value is "self", when this property is encountered in + the instance object, the object represents a resource and the instance object is + treated as a full representation of the target resource identified by + the specified URI. + + + + This indicates that the target of the link is the full representation for the instance object. The object that contains this link possibly may not be the full representation. + + + + This indicates the target of the link is the schema for the instance object. This MAY be used to specifically denote the schemas of objects within a JSON object hierarchy, facilitating polymorphic type data structures. + + + + This relation indicates that the target of the link + SHOULD be treated as the root or the body of the representation for the + purposes of user agent interaction or fragment resolution. All other + properties of the instance objects can be regarded as meta-data + descriptions for the data. + + + + + + The following relations are applicable for schemas (the schema as the "from" resource in the relation): + + + This indicates the target resource that represents collection of instances of a schema. + This indicates a target to use for creating new instances of a schema. This link definition SHOULD be a submission link with a non-safe method (like POST). + + + + +
+ For example, if a schema is defined: + + + +
+ +
+ And if a collection of instance resource's JSON representation was retrieved: + + + +
+ + This would indicate that for the first item in the collection, its own + (self) URI would resolve to "/Resource/thing" and the first item's "up" + relation SHOULD be resolved to the resource at "/Resource/parent". + The "children" collection would be located at "/Resource/?upId=thing". +
+
+ +
+ This property value is a string that defines the templating language used in the "href" attribute. If no templating language is defined, then the default Link Description Object templating langauge is used. +
+ +
+ This property value is a schema that defines the expected structure of the JSON representation of the target of the link. +
+ +
+ + The following properties also apply to link definition objects, and + provide functionality analogous to HTML forms, in providing a + means for submitting extra (often user supplied) information to send to a server. + + +
+ + This attribute defines which method can be used to access the target resource. + In an HTTP environment, this would be "GET" or "POST" (other HTTP methods + such as "PUT" and "DELETE" have semantics that are clearly implied by + accessed resources, and do not need to be defined here). + This defaults to "GET". + +
+ +
+ + If present, this property indicates a query media type format that the server + supports for querying or posting to the collection of instances at the target + resource. The query can be + suffixed to the target URI to query the collection with + property-based constraints on the resources that SHOULD be returned from + the server or used to post data to the resource (depending on the method). + +
+ For example, with the following schema: + + + + This indicates that the client can query the server for instances that have a specific name. +
+ +
+ For example: + + + +
+ + If no enctype or method is specified, only the single URI specified by + the href property is defined. If the method is POST, "application/json" is + the default media type. +
+
+ +
+ + This attribute contains a schema which defines the acceptable structure of the submitted + request (for a GET request, this schema would define the properties for the query string + and for a POST request, this would define the body). + +
+
+
+
+ +
+ + This property indicates the fragment resolution protocol to use for + resolving fragment identifiers in URIs within the instance + representations. This applies to the instance object URIs and all + children of the instance object's URIs. The default fragment resolution + protocol is "json-pointer", which is defined below. Other fragment + resolution protocols MAY be used, but are not defined in this document. + + + + The fragment identifier is based on RFC 3986, Sec 5, and defines the + mechanism for resolving references to entities within a document. + + +
+ The "json-pointer" fragment resolution protocol uses a JSON Pointer to resolve fragment identifiers in URIs within instance representations. +
+
+ + + +
+ This attribute indicates that the instance value SHOULD NOT be changed. Attempts by a user agent to modify the value of this property are expected to be rejected by a server. +
+ +
+ If the instance property value is a string, this attribute defines that the string SHOULD be interpreted as binary data and decoded using the encoding named by this schema property. RFC 2045, Sec 6.1 lists the possible values for this property. +
+ +
+ + This attribute is a URI that defines what the instance's URI MUST start with in order to validate. + The value of the "pathStart" attribute MUST be resolved as per RFC 3986, Sec 5, + and is relative to the instance's URI. + + + + When multiple schemas have been referenced for an instance, the user agent + can determine if this schema is applicable for a particular instance by + determining if the URI of the instance begins with the the value of the "pathStart" + attribute. If the URI of the instance does not start with this URI, + or if another schema specifies a starting URI that is longer and also matches the + instance, this schema SHOULD NOT be applied to the instance. Any schema + that does not have a pathStart attribute SHOULD be considered applicable + to all the instances for which it is referenced. + +
+ +
+ This attribute defines the media type of the instance representations that this schema is defining. +
+
+ +
+ + This specification is a sub-type of the JSON format, and + consequently the security considerations are generally the same as RFC 4627. + However, an additional issue is that when link relation of "self" + is used to denote a full representation of an object, the user agent + SHOULD NOT consider the representation to be the authoritative representation + of the resource denoted by the target URI if the target URI is not + equivalent to or a sub-path of the the URI used to request the resource + representation which contains the target URI with the "self" link. + +
+ For example, if a hyper schema was defined: + + + +
+ +
+ And a resource was requested from somesite.com: + + + +
+ +
+ With a response of: + + + +
+
+
+ +
+ The proposed MIME media type for JSON Schema is "application/schema+json". + Type name: application + Subtype name: schema+json + Required parameters: profile + + The value of the profile parameter SHOULD be a URI (relative or absolute) that + refers to the schema used to define the structure of this structure (the + meta-schema). Normally the value would be http://json-schema.org/draft-04/hyper-schema, + but it is allowable to use other schemas that extend the hyper schema's meta- + schema. + + Optional parameters: pretty + The value of the pretty parameter MAY be true or false to indicate if additional whitespace has been included to make the JSON representation easier to read. + +
+ + This registry is maintained by IANA per RFC 4287 and this specification adds + four values: "full", "create", "instances", "root". New + assignments are subject to IESG Approval, as outlined in RFC 5226. + Requests should be made by email to IANA, which will then forward the + request to the IESG, requesting approval. + +
+
+
+ + + + + &rfc2045; + &rfc2119; + &rfc3339; + &rfc3986; + &rfc4287; + + + JSON Pointer + + ForgeRock US, Inc. + + + SitePen (USA) + + + + + + + &rfc2616; + &rfc4627; + &rfc5226; + &iddiscovery; + &uritemplate; + &linkheader; + &html401; + &css21; + + +
+ + + + + Changed "required" attribute to an array of strings. + Removed "format" attribute. + Added "minProperties" and "maxProperties" attributes. + Replaced "slash-delimited" fragment resolution with "json-pointer". + Added "template" LDO attribute. + Removed irrelevant "Open Issues" section. + Merged Conventions and Terminology sections. + Defined terms used in specification. + Removed "integer" type in favor of {"type":"number", "divisibleBy":1}. + Restricted "type" to only the core JSON types. + Improved wording of many sections. + + + + + + Added example and verbiage to "extends" attribute. + Defined slash-delimited to use a leading slash. + Made "root" a relation instead of an attribute. + Removed address values, and MIME media type from format to reduce confusion (mediaType already exists, so it can be used for MIME types). + Added more explanation of nullability. + Removed "alternate" attribute. + Upper cased many normative usages of must, may, and should. + Replaced the link submission "properties" attribute to "schema" attribute. + Replaced "optional" attribute with "required" attribute. + Replaced "maximumCanEqual" attribute with "exclusiveMaximum" attribute. + Replaced "minimumCanEqual" attribute with "exclusiveMinimum" attribute. + Replaced "requires" attribute with "dependencies" attribute. + Moved "contentEncoding" attribute to hyper schema. + Added "additionalItems" attribute. + Added "id" attribute. + Switched self-referencing variable substitution from "-this" to "@" to align with reserved characters in URI template. + Added "patternProperties" attribute. + Schema URIs are now namespace versioned. + Added "$ref" and "$schema" attributes. + + + + + + Replaced "maxDecimal" attribute with "divisibleBy" attribute. + Added slash-delimited fragment resolution protocol and made it the default. + Added language about using links outside of schemas by referencing its normative URI. + Added "uniqueItems" attribute. + Added "targetSchema" attribute to link description object. + + + + + + Fixed category and updates from template. + + + + + + Initial draft. + + + + +
+
+
diff --git a/node_modules/fsevents/node_modules/json-schema/lib/links.js b/node_modules/fsevents/node_modules/json-schema/lib/links.js new file mode 100644 index 0000000..1bea0aa --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/lib/links.js @@ -0,0 +1,52 @@ +/** + * JSON Schema link handler + * Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com) + * Licensed under the MIT (MIT-LICENSE.txt) license. + */ +({define:typeof define!="undefined"?define:function(deps, factory){module.exports = factory();}}). +define([], function(){ +var exports = {}; +exports.cacheLinks = true; +exports.getLink = function(relation, instance, schema){ + // gets the URI of the link for the given relation based on the instance and schema + // for example: + // getLink( + // "brother", + // {"brother_id":33}, + // {links:[{rel:"brother", href:"Brother/{brother_id}"}]}) -> + // "Brother/33" + var links = schema.__linkTemplates; + if(!links){ + links = {}; + var schemaLinks = schema.links; + if(schemaLinks && schemaLinks instanceof Array){ + schemaLinks.forEach(function(link){ + /* // TODO: allow for multiple same-name relations + if(links[link.rel]){ + if(!(links[link.rel] instanceof Array)){ + links[link.rel] = [links[link.rel]]; + } + }*/ + links[link.rel] = link.href; + }); + } + if(exports.cacheLinks){ + schema.__linkTemplates = links; + } + } + var linkTemplate = links[relation]; + return linkTemplate && exports.substitute(linkTemplate, instance); +}; + +exports.substitute = function(linkTemplate, instance){ + return linkTemplate.replace(/\{([^\}]*)\}/g, function(t, property){ + var value = instance[decodeURIComponent(property)]; + if(value instanceof Array){ + // the value is an array, it should produce a URI like /Table/(4,5,8) and store.get() should handle that as an array of values + return '(' + value.join(',') + ')'; + } + return value; + }); +}; +return exports; +}); \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/json-schema/lib/validate.js b/node_modules/fsevents/node_modules/json-schema/lib/validate.js new file mode 100644 index 0000000..55d4c3a --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/lib/validate.js @@ -0,0 +1,260 @@ +/** + * JSONSchema Validator - Validates JavaScript objects using JSON Schemas + * (http://www.json.com/json-schema-proposal/) + * + * Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com) + * Licensed under the MIT (MIT-LICENSE.txt) license. +To use the validator call the validate function with an instance object and an optional schema object. +If a schema is provided, it will be used to validate. If the instance object refers to a schema (self-validating), +that schema will be used to validate and the schema parameter is not necessary (if both exist, +both validations will occur). +The validate method will return an array of validation errors. If there are no errors, then an +empty list will be returned. A validation error will have two properties: +"property" which indicates which property had the error +"message" which indicates what the error was + */ +({define:typeof define!="undefined"?define:function(deps, factory){module.exports = factory();}}). +define([], function(){ +var exports = validate; +// setup primitive classes to be JSON Schema types +exports.Integer = {type:"integer"}; +var primitiveConstructors = { + String: String, + Boolean: Boolean, + Number: Number, + Object: Object, + Array: Array, + Date: Date +} +exports.validate = validate; +function validate(/*Any*/instance,/*Object*/schema) { + // Summary: + // To use the validator call JSONSchema.validate with an instance object and an optional schema object. + // If a schema is provided, it will be used to validate. If the instance object refers to a schema (self-validating), + // that schema will be used to validate and the schema parameter is not necessary (if both exist, + // both validations will occur). + // The validate method will return an object with two properties: + // valid: A boolean indicating if the instance is valid by the schema + // errors: An array of validation errors. If there are no errors, then an + // empty list will be returned. A validation error will have two properties: + // property: which indicates which property had the error + // message: which indicates what the error was + // + return validate(instance, schema, {changing: false});//, coerce: false, existingOnly: false}); + }; +exports.checkPropertyChange = function(/*Any*/value,/*Object*/schema, /*String*/property) { + // Summary: + // The checkPropertyChange method will check to see if an value can legally be in property with the given schema + // This is slightly different than the validate method in that it will fail if the schema is readonly and it will + // not check for self-validation, it is assumed that the passed in value is already internally valid. + // The checkPropertyChange method will return the same object type as validate, see JSONSchema.validate for + // information. + // + return validate(value, schema, {changing: property || "property"}); + }; +var validate = exports._validate = function(/*Any*/instance,/*Object*/schema,/*Object*/options) { + + if (!options) options = {}; + var _changing = options.changing; + + function getType(schema){ + return schema.type || (primitiveConstructors[schema.name] == schema && schema.name.toLowerCase()); + } + var errors = []; + // validate a value against a property definition + function checkProp(value, schema, path,i){ + + var l; + path += path ? typeof i == 'number' ? '[' + i + ']' : typeof i == 'undefined' ? '' : '.' + i : i; + function addError(message){ + errors.push({property:path,message:message}); + } + + if((typeof schema != 'object' || schema instanceof Array) && (path || typeof schema != 'function') && !(schema && getType(schema))){ + if(typeof schema == 'function'){ + if(!(value instanceof schema)){ + addError("is not an instance of the class/constructor " + schema.name); + } + }else if(schema){ + addError("Invalid schema/property definition " + schema); + } + return null; + } + if(_changing && schema.readonly){ + addError("is a readonly field, it can not be changed"); + } + if(schema['extends']){ // if it extends another schema, it must pass that schema as well + checkProp(value,schema['extends'],path,i); + } + // validate a value against a type definition + function checkType(type,value){ + if(type){ + if(typeof type == 'string' && type != 'any' && + (type == 'null' ? value !== null : typeof value != type) && + !(value instanceof Array && type == 'array') && + !(value instanceof Date && type == 'date') && + !(type == 'integer' && value%1===0)){ + return [{property:path,message:(typeof value) + " value found, but a " + type + " is required"}]; + } + if(type instanceof Array){ + var unionErrors=[]; + for(var j = 0; j < type.length; j++){ // a union type + if(!(unionErrors=checkType(type[j],value)).length){ + break; + } + } + if(unionErrors.length){ + return unionErrors; + } + }else if(typeof type == 'object'){ + var priorErrors = errors; + errors = []; + checkProp(value,type,path); + var theseErrors = errors; + errors = priorErrors; + return theseErrors; + } + } + return []; + } + if(value === undefined){ + if(schema.required){ + addError("is missing and it is required"); + } + }else{ + errors = errors.concat(checkType(getType(schema),value)); + if(schema.disallow && !checkType(schema.disallow,value).length){ + addError(" disallowed value was matched"); + } + if(value !== null){ + if(value instanceof Array){ + if(schema.items){ + var itemsIsArray = schema.items instanceof Array; + var propDef = schema.items; + for (i = 0, l = value.length; i < l; i += 1) { + if (itemsIsArray) + propDef = schema.items[i]; + if (options.coerce) + value[i] = options.coerce(value[i], propDef); + errors.concat(checkProp(value[i],propDef,path,i)); + } + } + if(schema.minItems && value.length < schema.minItems){ + addError("There must be a minimum of " + schema.minItems + " in the array"); + } + if(schema.maxItems && value.length > schema.maxItems){ + addError("There must be a maximum of " + schema.maxItems + " in the array"); + } + }else if(schema.properties || schema.additionalProperties){ + errors.concat(checkObj(value, schema.properties, path, schema.additionalProperties)); + } + if(schema.pattern && typeof value == 'string' && !value.match(schema.pattern)){ + addError("does not match the regex pattern " + schema.pattern); + } + if(schema.maxLength && typeof value == 'string' && value.length > schema.maxLength){ + addError("may only be " + schema.maxLength + " characters long"); + } + if(schema.minLength && typeof value == 'string' && value.length < schema.minLength){ + addError("must be at least " + schema.minLength + " characters long"); + } + if(typeof schema.minimum !== undefined && typeof value == typeof schema.minimum && + schema.minimum > value){ + addError("must have a minimum value of " + schema.minimum); + } + if(typeof schema.maximum !== undefined && typeof value == typeof schema.maximum && + schema.maximum < value){ + addError("must have a maximum value of " + schema.maximum); + } + if(schema['enum']){ + var enumer = schema['enum']; + l = enumer.length; + var found; + for(var j = 0; j < l; j++){ + if(enumer[j]===value){ + found=1; + break; + } + } + if(!found){ + addError("does not have a value in the enumeration " + enumer.join(", ")); + } + } + if(typeof schema.maxDecimal == 'number' && + (value.toString().match(new RegExp("\\.[0-9]{" + (schema.maxDecimal + 1) + ",}")))){ + addError("may only have " + schema.maxDecimal + " digits of decimal places"); + } + } + } + return null; + } + // validate an object against a schema + function checkObj(instance,objTypeDef,path,additionalProp){ + + if(typeof objTypeDef =='object'){ + if(typeof instance != 'object' || instance instanceof Array){ + errors.push({property:path,message:"an object is required"}); + } + + for(var i in objTypeDef){ + if(objTypeDef.hasOwnProperty(i)){ + var value = instance[i]; + // skip _not_ specified properties + if (value === undefined && options.existingOnly) continue; + var propDef = objTypeDef[i]; + // set default + if(value === undefined && propDef["default"]){ + value = instance[i] = propDef["default"]; + } + if(options.coerce && i in instance){ + value = instance[i] = options.coerce(value, propDef); + } + checkProp(value,propDef,path,i); + } + } + } + for(i in instance){ + if(instance.hasOwnProperty(i) && !(i.charAt(0) == '_' && i.charAt(1) == '_') && objTypeDef && !objTypeDef[i] && additionalProp===false){ + if (options.filter) { + delete instance[i]; + continue; + } else { + errors.push({property:path,message:(typeof value) + "The property " + i + + " is not defined in the schema and the schema does not allow additional properties"}); + } + } + var requires = objTypeDef && objTypeDef[i] && objTypeDef[i].requires; + if(requires && !(requires in instance)){ + errors.push({property:path,message:"the presence of the property " + i + " requires that " + requires + " also be present"}); + } + value = instance[i]; + if(additionalProp && (!(objTypeDef && typeof objTypeDef == 'object') || !(i in objTypeDef))){ + if(options.coerce){ + value = instance[i] = options.coerce(value, additionalProp); + } + checkProp(value,additionalProp,path,i); + } + if(!_changing && value && value.$schema){ + errors = errors.concat(checkProp(value,value.$schema,path,i)); + } + } + return errors; + } + if(schema){ + checkProp(instance,schema,'',_changing || ''); + } + if(!_changing && instance && instance.$schema){ + checkProp(instance,instance.$schema,'',''); + } + return {valid:!errors.length,errors:errors}; +}; +exports.mustBeValid = function(result){ + // summary: + // This checks to ensure that the result is valid and will throw an appropriate error message if it is not + // result: the result returned from checkPropertyChange or validate + if(!result.valid){ + throw new TypeError(result.errors.map(function(error){return "for property " + error.property + ': ' + error.message;}).join(", \n")); + } +} + +return exports; +}); diff --git a/node_modules/fsevents/node_modules/json-schema/package.json b/node_modules/fsevents/node_modules/json-schema/package.json new file mode 100644 index 0000000..274fd62 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/package.json @@ -0,0 +1,86 @@ +{ + "_args": [ + [ + "json-schema@0.2.2", + "/Users/eshanker/Code/fsevents/node_modules/jsprim" + ] + ], + "_from": "json-schema@0.2.2", + "_id": "json-schema@0.2.2", + "_inCache": true, + "_installable": true, + "_location": "/json-schema", + "_npmUser": { + "email": "kriszyp@gmail.com", + "name": "kriszyp" + }, + "_npmVersion": "1.1.59", + "_phantomChildren": {}, + "_requested": { + "name": "json-schema", + "raw": "json-schema@0.2.2", + "rawSpec": "0.2.2", + "scope": null, + "spec": "0.2.2", + "type": "version" + }, + "_requiredBy": [ + "/jsprim" + ], + "_resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.2.tgz", + "_shasum": "50354f19f603917c695f70b85afa77c3b0f23506", + "_shrinkwrap": null, + "_spec": "json-schema@0.2.2", + "_where": "/Users/eshanker/Code/fsevents/node_modules/jsprim", + "author": { + "name": "Kris Zyp" + }, + "bugs": { + "url": "https://github.com/kriszyp/json-schema/issues" + }, + "dependencies": {}, + "description": "JSON Schema validation and specifications", + "devDependencies": { + "vows": "*" + }, + "directories": { + "lib": "./lib" + }, + "dist": { + "shasum": "50354f19f603917c695f70b85afa77c3b0f23506", + "tarball": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.2.tgz" + }, + "homepage": "https://github.com/kriszyp/json-schema#readme", + "keywords": [ + "json", + "schema" + ], + "licenses": [ + { + "type": "AFLv2.1", + "url": "http://trac.dojotoolkit.org/browser/dojo/trunk/LICENSE#L43" + }, + { + "type": "BSD", + "url": "http://trac.dojotoolkit.org/browser/dojo/trunk/LICENSE#L13" + } + ], + "main": "./lib/validate.js", + "maintainers": [ + { + "name": "kriszyp", + "email": "kriszyp@gmail.com" + } + ], + "name": "json-schema", + "optionalDependencies": {}, + "readme": "JSON Schema is a repository for the JSON Schema specification, reference schemas and a CommonJS implementation of JSON Schema (not the only JavaScript implementation of JSON Schema, JSV is another excellent JavaScript validator).\r\n\r\nCode is licensed under the AFL or BSD license as part of the Persevere \r\nproject which is administered under the Dojo foundation,\r\nand all contributions require a Dojo CLA.", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/kriszyp/json-schema.git" + }, + "scripts": { + "test": "echo TESTS DISABLED vows --spec test/*.js" + }, + "version": "0.2.2" +} diff --git a/node_modules/fsevents/node_modules/json-schema/test/tests.js b/node_modules/fsevents/node_modules/json-schema/test/tests.js new file mode 100644 index 0000000..2938aea --- /dev/null +++ b/node_modules/fsevents/node_modules/json-schema/test/tests.js @@ -0,0 +1,95 @@ +var assert = require('assert'); +var vows = require('vows'); +var path = require('path'); +var fs = require('fs'); + +var validate = require('../lib/validate').validate; + + +var revision = 'draft-03'; +var schemaRoot = path.join(__dirname, '..', revision); +var schemaNames = ['schema', 'hyper-schema', 'links', 'json-ref' ]; +var schemas = {}; + +schemaNames.forEach(function(name) { + var file = path.join(schemaRoot, name); + schemas[name] = loadSchema(file); +}); + +schemaNames.forEach(function(name) { + var s, n = name+'-nsd', f = path.join(schemaRoot, name); + schemas[n] = loadSchema(f); + s = schemas[n]; + delete s['$schema']; +}); + +function loadSchema(path) { + var data = fs.readFileSync(path, 'utf-8'); + var schema = JSON.parse(data); + return schema; +} + +function resultIsValid() { + return function(result) { + assert.isObject(result); + //assert.isBoolean(result.valid); + assert.equal(typeof(result.valid), 'boolean'); + assert.isArray(result.errors); + for (var i = 0; i < result.errors.length; i++) { + assert.notEqual(result.errors[i], null, 'errors['+i+'] is null'); + } + } +} + +function assertValidates(doc, schema) { + var context = {}; + + context[': validate('+doc+', '+schema+')'] = { + topic: validate(schemas[doc], schemas[schema]), + 'returns valid result': resultIsValid(), + 'with valid=true': function(result) { assert.equal(result.valid, true); }, + 'and no errors': function(result) { + // XXX work-around for bug in vows: [null] chokes it + if (result.errors[0] == null) assert.fail('(errors contains null)'); + assert.length(result.errors, 0); + } + }; + + return context; +} + +function assertSelfValidates(doc) { + var context = {}; + + context[': validate('+doc+')'] = { + topic: validate(schemas[doc]), + 'returns valid result': resultIsValid(), + 'with valid=true': function(result) { assert.equal(result.valid, true); }, + 'and no errors': function(result) { assert.length(result.errors, 0); } + }; + + return context; +} + +var suite = vows.describe('JSON Schema').addBatch({ + 'Core-NSD self-validates': assertSelfValidates('schema-nsd'), + 'Core-NSD/Core-NSD': assertValidates('schema-nsd', 'schema-nsd'), + 'Core-NSD/Core': assertValidates('schema-nsd', 'schema'), + + 'Core self-validates': assertSelfValidates('schema'), + 'Core/Core': assertValidates('schema', 'schema'), + + 'Hyper-NSD self-validates': assertSelfValidates('hyper-schema-nsd'), + 'Hyper self-validates': assertSelfValidates('hyper-schema'), + 'Hyper/Hyper': assertValidates('hyper-schema', 'hyper-schema'), + 'Hyper/Core': assertValidates('hyper-schema', 'schema'), + + 'Links-NSD self-validates': assertSelfValidates('links-nsd'), + 'Links self-validates': assertSelfValidates('links'), + 'Links/Hyper': assertValidates('links', 'hyper-schema'), + 'Links/Core': assertValidates('links', 'schema'), + + 'Json-Ref self-validates': assertSelfValidates('json-ref'), + 'Json-Ref/Hyper': assertValidates('json-ref', 'hyper-schema'), + 'Json-Ref/Core': assertValidates('json-ref', 'schema') +}).export(module); diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/.npmignore b/node_modules/fsevents/node_modules/json-stringify-safe/.npmignore new file mode 100644 index 0000000..17d6b36 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/.npmignore @@ -0,0 +1 @@ +/*.tgz diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/CHANGELOG.md b/node_modules/fsevents/node_modules/json-stringify-safe/CHANGELOG.md new file mode 100644 index 0000000..42bcb60 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/CHANGELOG.md @@ -0,0 +1,14 @@ +## Unreleased +- Fixes stringify to only take ancestors into account when checking + circularity. + It previously assumed every visited object was circular which led to [false + positives][issue9]. + Uses the tiny serializer I wrote for [Must.js][must] a year and a half ago. +- Fixes calling the `replacer` function in the proper context (`thisArg`). +- Fixes calling the `cycleReplacer` function in the proper context (`thisArg`). +- Speeds serializing by a factor of + Big-O(h-my-god-it-linearly-searched-every-object) it had ever seen. Searching + only the ancestors for a circular references speeds up things considerably. + +[must]: https://github.com/moll/js-must +[issue9]: https://github.com/isaacs/json-stringify-safe/issues/9 diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/LICENSE b/node_modules/fsevents/node_modules/json-stringify-safe/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/Makefile b/node_modules/fsevents/node_modules/json-stringify-safe/Makefile new file mode 100644 index 0000000..36088c7 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/Makefile @@ -0,0 +1,35 @@ +NODE_OPTS = +TEST_OPTS = + +love: + @echo "Feel like makin' love." + +test: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R dot $(TEST_OPTS) + +spec: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R spec $(TEST_OPTS) + +autotest: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R dot --watch $(TEST_OPTS) + +autospec: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R spec --watch $(TEST_OPTS) + +pack: + @file=$$(npm pack); echo "$$file"; tar tf "$$file" + +publish: + npm publish + +tag: + git tag "v$$(node -e 'console.log(require("./package").version)')" + +clean: + rm -f *.tgz + npm prune --production + +.PHONY: love +.PHONY: test spec autotest autospec +.PHONY: pack publish tag +.PHONY: clean diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/README.md b/node_modules/fsevents/node_modules/json-stringify-safe/README.md new file mode 100644 index 0000000..a11f302 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/README.md @@ -0,0 +1,52 @@ +# json-stringify-safe + +Like JSON.stringify, but doesn't throw on circular references. + +## Usage + +Takes the same arguments as `JSON.stringify`. + +```javascript +var stringify = require('json-stringify-safe'); +var circularObj = {}; +circularObj.circularRef = circularObj; +circularObj.list = [ circularObj, circularObj ]; +console.log(stringify(circularObj, null, 2)); +``` + +Output: + +```json +{ + "circularRef": "[Circular]", + "list": [ + "[Circular]", + "[Circular]" + ] +} +``` + +## Details + +``` +stringify(obj, serializer, indent, decycler) +``` + +The first three arguments are the same as to JSON.stringify. The last +is an argument that's only used when the object has been seen already. + +The default `decycler` function returns the string `'[Circular]'`. +If, for example, you pass in `function(k,v){}` (return nothing) then it +will prune cycles. If you pass in `function(k,v){ return {foo: 'bar'}}`, +then cyclical objects will always be represented as `{"foo":"bar"}` in +the result. + +``` +stringify.getSerialize(serializer, decycler) +``` + +Returns a serializer that can be used elsewhere. This is the actual +function that's passed to JSON.stringify. + +**Note** that the function returned from `getSerialize` is stateful for now, so +do **not** use it more than once. diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/package.json b/node_modules/fsevents/node_modules/json-stringify-safe/package.json new file mode 100644 index 0000000..3b0f91c --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/package.json @@ -0,0 +1,94 @@ +{ + "_args": [ + [ + "json-stringify-safe@~5.0.1", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "json-stringify-safe@>=5.0.1 <5.1.0", + "_id": "json-stringify-safe@5.0.1", + "_inCache": true, + "_installable": true, + "_location": "/json-stringify-safe", + "_nodeVersion": "2.0.1", + "_npmUser": { + "email": "isaacs@npmjs.com", + "name": "isaacs" + }, + "_npmVersion": "2.10.0", + "_phantomChildren": {}, + "_requested": { + "name": "json-stringify-safe", + "raw": "json-stringify-safe@~5.0.1", + "rawSpec": "~5.0.1", + "scope": null, + "spec": ">=5.0.1 <5.1.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "_shasum": "1296a2d58fd45f19a0f6ce01d65701e2c735b6eb", + "_shrinkwrap": null, + "_spec": "json-stringify-safe@~5.0.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me" + }, + "bugs": { + "url": "https://github.com/isaacs/json-stringify-safe/issues" + }, + "contributors": [ + { + "name": "Andri Möll", + "email": "andri@dot.ee", + "url": "http://themoll.com" + } + ], + "dependencies": {}, + "description": "Like JSON.stringify, but doesn't blow up on circular refs.", + "devDependencies": { + "mocha": ">= 2.1.0 < 3", + "must": ">= 0.12 < 0.13", + "sinon": ">= 1.12.2 < 2" + }, + "directories": {}, + "dist": { + "shasum": "1296a2d58fd45f19a0f6ce01d65701e2c735b6eb", + "tarball": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" + }, + "gitHead": "3890dceab3ad14f8701e38ca74f38276abc76de5", + "homepage": "https://github.com/isaacs/json-stringify-safe", + "keywords": [ + "circular", + "json", + "safe", + "stringify" + ], + "license": "ISC", + "main": "stringify.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + }, + { + "name": "moll", + "email": "andri@dot.ee" + } + ], + "name": "json-stringify-safe", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/json-stringify-safe.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "5.0.1" +} diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/stringify.js b/node_modules/fsevents/node_modules/json-stringify-safe/stringify.js new file mode 100644 index 0000000..124a452 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/stringify.js @@ -0,0 +1,27 @@ +exports = module.exports = stringify +exports.getSerialize = serializer + +function stringify(obj, replacer, spaces, cycleReplacer) { + return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces) +} + +function serializer(replacer, cycleReplacer) { + var stack = [], keys = [] + + if (cycleReplacer == null) cycleReplacer = function(key, value) { + if (stack[0] === value) return "[Circular ~]" + return "[Circular ~." + keys.slice(0, stack.indexOf(value)).join(".") + "]" + } + + return function(key, value) { + if (stack.length > 0) { + var thisPos = stack.indexOf(this) + ~thisPos ? stack.splice(thisPos + 1) : stack.push(this) + ~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key) + if (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value) + } + else stack.push(value) + + return replacer == null ? value : replacer.call(this, key, value) + } +} diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/test/mocha.opts b/node_modules/fsevents/node_modules/json-stringify-safe/test/mocha.opts new file mode 100644 index 0000000..2544e58 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/test/mocha.opts @@ -0,0 +1,2 @@ +--recursive +--require must diff --git a/node_modules/fsevents/node_modules/json-stringify-safe/test/stringify_test.js b/node_modules/fsevents/node_modules/json-stringify-safe/test/stringify_test.js new file mode 100644 index 0000000..5b32583 --- /dev/null +++ b/node_modules/fsevents/node_modules/json-stringify-safe/test/stringify_test.js @@ -0,0 +1,246 @@ +var Sinon = require("sinon") +var stringify = require("..") +function jsonify(obj) { return JSON.stringify(obj, null, 2) } + +describe("Stringify", function() { + it("must stringify circular objects", function() { + var obj = {name: "Alice"} + obj.self = obj + var json = stringify(obj, null, 2) + json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"})) + }) + + it("must stringify circular objects with intermediaries", function() { + var obj = {name: "Alice"} + obj.identity = {self: obj} + var json = stringify(obj, null, 2) + json.must.eql(jsonify({name: "Alice", identity: {self: "[Circular ~]"}})) + }) + + it("must stringify circular objects deeper", function() { + var obj = {name: "Alice", child: {name: "Bob"}} + obj.child.self = obj.child + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", + child: {name: "Bob", self: "[Circular ~.child]"} + })) + }) + + it("must stringify circular objects deeper with intermediaries", function() { + var obj = {name: "Alice", child: {name: "Bob"}} + obj.child.identity = {self: obj.child} + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", + child: {name: "Bob", identity: {self: "[Circular ~.child]"}} + })) + }) + + it("must stringify circular objects in an array", function() { + var obj = {name: "Alice"} + obj.self = [obj, obj] + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", self: ["[Circular ~]", "[Circular ~]"] + })) + }) + + it("must stringify circular objects deeper in an array", function() { + var obj = {name: "Alice", children: [{name: "Bob"}, {name: "Eve"}]} + obj.children[0].self = obj.children[0] + obj.children[1].self = obj.children[1] + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", + children: [ + {name: "Bob", self: "[Circular ~.children.0]"}, + {name: "Eve", self: "[Circular ~.children.1]"} + ] + })) + }) + + it("must stringify circular arrays", function() { + var obj = [] + obj.push(obj) + obj.push(obj) + var json = stringify(obj, null, 2) + json.must.eql(jsonify(["[Circular ~]", "[Circular ~]"])) + }) + + it("must stringify circular arrays with intermediaries", function() { + var obj = [] + obj.push({name: "Alice", self: obj}) + obj.push({name: "Bob", self: obj}) + + stringify(obj, null, 2).must.eql(jsonify([ + {name: "Alice", self: "[Circular ~]"}, + {name: "Bob", self: "[Circular ~]"} + ])) + }) + + it("must stringify repeated objects in objects", function() { + var obj = {} + var alice = {name: "Alice"} + obj.alice1 = alice + obj.alice2 = alice + + stringify(obj, null, 2).must.eql(jsonify({ + alice1: {name: "Alice"}, + alice2: {name: "Alice"} + })) + }) + + it("must stringify repeated objects in arrays", function() { + var alice = {name: "Alice"} + var obj = [alice, alice] + var json = stringify(obj, null, 2) + json.must.eql(jsonify([{name: "Alice"}, {name: "Alice"}])) + }) + + it("must call given decycler and use its output", function() { + var obj = {} + obj.a = obj + obj.b = obj + + var decycle = Sinon.spy(function() { return decycle.callCount }) + var json = stringify(obj, null, 2, decycle) + json.must.eql(jsonify({a: 1, b: 2}, null, 2)) + + decycle.callCount.must.equal(2) + decycle.thisValues[0].must.equal(obj) + decycle.args[0][0].must.equal("a") + decycle.args[0][1].must.equal(obj) + decycle.thisValues[1].must.equal(obj) + decycle.args[1][0].must.equal("b") + decycle.args[1][1].must.equal(obj) + }) + + it("must call replacer and use its output", function() { + var obj = {name: "Alice", child: {name: "Bob"}} + + var replacer = Sinon.spy(bangString) + var json = stringify(obj, replacer, 2) + json.must.eql(jsonify({name: "Alice!", child: {name: "Bob!"}})) + + replacer.callCount.must.equal(4) + replacer.args[0][0].must.equal("") + replacer.args[0][1].must.equal(obj) + replacer.thisValues[1].must.equal(obj) + replacer.args[1][0].must.equal("name") + replacer.args[1][1].must.equal("Alice") + replacer.thisValues[2].must.equal(obj) + replacer.args[2][0].must.equal("child") + replacer.args[2][1].must.equal(obj.child) + replacer.thisValues[3].must.equal(obj.child) + replacer.args[3][0].must.equal("name") + replacer.args[3][1].must.equal("Bob") + }) + + it("must call replacer after describing circular references", function() { + var obj = {name: "Alice"} + obj.self = obj + + var replacer = Sinon.spy(bangString) + var json = stringify(obj, replacer, 2) + json.must.eql(jsonify({name: "Alice!", self: "[Circular ~]!"})) + + replacer.callCount.must.equal(3) + replacer.args[0][0].must.equal("") + replacer.args[0][1].must.equal(obj) + replacer.thisValues[1].must.equal(obj) + replacer.args[1][0].must.equal("name") + replacer.args[1][1].must.equal("Alice") + replacer.thisValues[2].must.equal(obj) + replacer.args[2][0].must.equal("self") + replacer.args[2][1].must.equal("[Circular ~]") + }) + + it("must call given decycler and use its output for nested objects", + function() { + var obj = {} + obj.a = obj + obj.b = {self: obj} + + var decycle = Sinon.spy(function() { return decycle.callCount }) + var json = stringify(obj, null, 2, decycle) + json.must.eql(jsonify({a: 1, b: {self: 2}})) + + decycle.callCount.must.equal(2) + decycle.args[0][0].must.equal("a") + decycle.args[0][1].must.equal(obj) + decycle.args[1][0].must.equal("self") + decycle.args[1][1].must.equal(obj) + }) + + it("must use decycler's output when it returned null", function() { + var obj = {a: "b"} + obj.self = obj + obj.selves = [obj, obj] + + function decycle() { return null } + stringify(obj, null, 2, decycle).must.eql(jsonify({ + a: "b", + self: null, + selves: [null, null] + })) + }) + + it("must use decycler's output when it returned undefined", function() { + var obj = {a: "b"} + obj.self = obj + obj.selves = [obj, obj] + + function decycle() {} + stringify(obj, null, 2, decycle).must.eql(jsonify({ + a: "b", + selves: [null, null] + })) + }) + + it("must throw given a decycler that returns a cycle", function() { + var obj = {} + obj.self = obj + var err + function identity(key, value) { return value } + try { stringify(obj, null, 2, identity) } catch (ex) { err = ex } + err.must.be.an.instanceof(TypeError) + }) + + describe(".getSerialize", function() { + it("must stringify circular objects", function() { + var obj = {a: "b"} + obj.circularRef = obj + obj.list = [obj, obj] + + var json = JSON.stringify(obj, stringify.getSerialize(), 2) + json.must.eql(jsonify({ + "a": "b", + "circularRef": "[Circular ~]", + "list": ["[Circular ~]", "[Circular ~]"] + })) + }) + + // This is the behavior as of Mar 3, 2015. + // The serializer function keeps state inside the returned function and + // so far I'm not sure how to not do that. JSON.stringify's replacer is not + // called _after_ serialization. + xit("must return a function that could be called twice", function() { + var obj = {name: "Alice"} + obj.self = obj + + var json + var serializer = stringify.getSerialize() + + json = JSON.stringify(obj, serializer, 2) + json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"})) + + json = JSON.stringify(obj, serializer, 2) + json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"})) + }) + }) +}) + +function bangString(key, value) { + return typeof value == "string" ? value + "!" : value +} diff --git a/node_modules/fsevents/node_modules/jsonpointer/.travis.yml b/node_modules/fsevents/node_modules/jsonpointer/.travis.yml new file mode 100644 index 0000000..9338bf1 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsonpointer/.travis.yml @@ -0,0 +1,10 @@ +language: "node_js" +node_js: + - 0.6 + - 0.8 + - 0.10 + - 0.11 + - 0.12 + - iojs-v1.0 + - iojs-v2.0 + - iojs diff --git a/node_modules/fsevents/node_modules/jsonpointer/README.md b/node_modules/fsevents/node_modules/jsonpointer/README.md new file mode 100644 index 0000000..e096dfa --- /dev/null +++ b/node_modules/fsevents/node_modules/jsonpointer/README.md @@ -0,0 +1,32 @@ +# JSON Pointer for nodejs + +This is an implementation of [JSON Pointer](http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-08). + +## Usage + + var jsonpointer = require("jsonpointer"); + var obj = { foo: 1, bar: { baz: 2}, qux: [3, 4, 5]}; + var one = jsonpointer.get(obj, "/foo"); + var two = jsonpointer.get(obj, "/bar/baz"); + var three = jsonpointer.get(obj, "/qux/0"); + var four = jsonpointer.get(obj, "/qux/1"); + var five = jsonpointer.get(obj, "/qux/2"); + var notfound = jsonpointer.get(obj, "/quo"); // returns null + + jsonpointer.set(obj, "/foo", 6); // obj.foo = 6; + +## Testing + + $ node test.js + All tests pass. + $ + +[![Build Status](https://travis-ci.org/janl/node-jsonpointer.png?branch=master)](https://travis-ci.org/janl/node-jsonpointer) + +## Author + +(c) 2011 Jan Lehnardt + +## License + +MIT License. diff --git a/node_modules/fsevents/node_modules/jsonpointer/jsonpointer.js b/node_modules/fsevents/node_modules/jsonpointer/jsonpointer.js new file mode 100644 index 0000000..006f85e --- /dev/null +++ b/node_modules/fsevents/node_modules/jsonpointer/jsonpointer.js @@ -0,0 +1,76 @@ +var untilde = function(str) { + return str.replace(/~./g, function(m) { + switch (m) { + case "~0": + return "~"; + case "~1": + return "/"; + } + throw new Error("Invalid tilde escape: " + m); + }); +} + +var traverse = function(obj, pointer, value) { + // assert(isArray(pointer)) + var part = untilde(pointer.shift()); + if(!obj.hasOwnProperty(part)) { + return null; + } + if(pointer.length !== 0) { // keep traversin! + return traverse(obj[part], pointer, value); + } + // we're done + if(typeof value === "undefined") { + // just reading + return obj[part]; + } + // set new value, return old value + var old_value = obj[part]; + if(value === null) { + delete obj[part]; + } else { + obj[part] = value; + } + return old_value; +} + +var validate_input = function(obj, pointer) { + if(typeof obj !== "object") { + throw new Error("Invalid input object."); + } + + if(pointer === "") { + return []; + } + + if(!pointer) { + throw new Error("Invalid JSON pointer."); + } + + pointer = pointer.split("/"); + var first = pointer.shift(); + if (first !== "") { + throw new Error("Invalid JSON pointer."); + } + + return pointer; +} + +var get = function(obj, pointer) { + pointer = validate_input(obj, pointer); + if (pointer.length === 0) { + return obj; + } + return traverse(obj, pointer); +} + +var set = function(obj, pointer, value) { + pointer = validate_input(obj, pointer); + if (pointer.length === 0) { + throw new Error("Invalid JSON pointer for set.") + } + return traverse(obj, pointer, value); +} + +exports.get = get +exports.set = set diff --git a/node_modules/fsevents/node_modules/jsonpointer/package.json b/node_modules/fsevents/node_modules/jsonpointer/package.json new file mode 100644 index 0000000..a15f066 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsonpointer/package.json @@ -0,0 +1,91 @@ +{ + "_args": [ + [ + "jsonpointer@2.0.0", + "/Users/eshanker/Code/fsevents/node_modules/is-my-json-valid" + ] + ], + "_from": "jsonpointer@2.0.0", + "_id": "jsonpointer@2.0.0", + "_inCache": true, + "_installable": true, + "_location": "/jsonpointer", + "_nodeVersion": "0.10.36", + "_npmUser": { + "email": "marc.brookman@gmail.com", + "name": "marcbachmann" + }, + "_npmVersion": "2.10.1", + "_phantomChildren": {}, + "_requested": { + "name": "jsonpointer", + "raw": "jsonpointer@2.0.0", + "rawSpec": "2.0.0", + "scope": null, + "spec": "2.0.0", + "type": "version" + }, + "_requiredBy": [ + "/is-my-json-valid" + ], + "_resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz", + "_shasum": "3af1dd20fe85463910d469a385e33017d2a030d9", + "_shrinkwrap": null, + "_spec": "jsonpointer@2.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/is-my-json-valid", + "author": { + "email": "jan@apache.org", + "name": "Jan Lehnardt" + }, + "bugs": { + "url": "http://github.com/janl/node-jsonpointer/issues" + }, + "contributors": [ + { + "name": "Joe Hildebrand", + "email": "joe-github@cursive.net" + } + ], + "dependencies": {}, + "description": "Simple JSON Addressing.", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "3af1dd20fe85463910d469a385e33017d2a030d9", + "tarball": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz" + }, + "engines": { + "node": ">=0.6.0" + }, + "gitHead": "26ea4a5c0fcb6d9a2e87f733403791dd05637af8", + "homepage": "https://github.com/janl/node-jsonpointer#readme", + "license": "MIT", + "main": "./jsonpointer", + "maintainers": [ + { + "name": "jan", + "email": "jan@apache.org" + }, + { + "name": "marcbachmann", + "email": "marc.brookman@gmail.com" + } + ], + "name": "jsonpointer", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/janl/node-jsonpointer.git" + }, + "scripts": { + "test": "node test.js" + }, + "tags": [ + "simple", + "util", + "util", + "utility" + ], + "version": "2.0.0" +} diff --git a/node_modules/fsevents/node_modules/jsonpointer/test.js b/node_modules/fsevents/node_modules/jsonpointer/test.js new file mode 100644 index 0000000..1c67d7f --- /dev/null +++ b/node_modules/fsevents/node_modules/jsonpointer/test.js @@ -0,0 +1,98 @@ +var assert = require("assert"); +var jsonpointer = require("./jsonpointer"); + +var obj = { + a: 1, + b: { + c: 2 + }, + d: { + e: [{a:3}, {b:4}, {c:5}] + } +}; + +assert.equal(jsonpointer.get(obj, "/a"), 1); +assert.equal(jsonpointer.get(obj, "/b/c"), 2); +assert.equal(jsonpointer.get(obj, "/d/e/0/a"), 3); +assert.equal(jsonpointer.get(obj, "/d/e/1/b"), 4); +assert.equal(jsonpointer.get(obj, "/d/e/2/c"), 5); + +// set returns old value +assert.equal(jsonpointer.set(obj, "/a", 2), 1); +assert.equal(jsonpointer.set(obj, "/b/c", 3), 2); +assert.equal(jsonpointer.set(obj, "/d/e/0/a", 4), 3); +assert.equal(jsonpointer.set(obj, "/d/e/1/b", 5), 4); +assert.equal(jsonpointer.set(obj, "/d/e/2/c", 6), 5); + +assert.equal(jsonpointer.get(obj, "/a"), 2); +assert.equal(jsonpointer.get(obj, "/b/c"), 3); +assert.equal(jsonpointer.get(obj, "/d/e/0/a"), 4); +assert.equal(jsonpointer.get(obj, "/d/e/1/b"), 5); +assert.equal(jsonpointer.get(obj, "/d/e/2/c"), 6); + +assert.equal(jsonpointer.get(obj, ""), obj); +assert.throws(function(){ jsonpointer.get(obj, "a"); }, validateError); +assert.throws(function(){ jsonpointer.get(obj, "a/"); }, validateError); + +function validateError(err) { + if ( (err instanceof Error) && /Invalid JSON pointer/.test(err.message) ) { + return true; + } +} + +var complexKeys = { + "a/b": { + c: 1 + }, + d: { + "e/f": 2 + }, + "~1": 3, + "01": 4 +} + +assert.equal(jsonpointer.get(complexKeys, "/a~1b/c"), 1); +assert.equal(jsonpointer.get(complexKeys, "/d/e~1f"), 2); +assert.equal(jsonpointer.get(complexKeys, "/~01"), 3); +assert.equal(jsonpointer.get(complexKeys, "/01"), 4); +assert.equal(jsonpointer.get(complexKeys, "/a/b/c"), null); +assert.equal(jsonpointer.get(complexKeys, "/~1"), null); + +// draft-ietf-appsawg-json-pointer-08 has special array rules +var ary = [ "zero", "one", "two" ]; +assert.equal(jsonpointer.get(ary, "/01"), null); + +//assert.equal(jsonpointer.set(ary, "/-", "three"), null); +//assert.equal(ary[3], "three"); + +// Examples from the draft: +var example = { + "foo": ["bar", "baz"], + "": 0, + "a/b": 1, + "c%d": 2, + "e^f": 3, + "g|h": 4, + "i\\j": 5, + "k\"l": 6, + " ": 7, + "m~n": 8 +}; + +assert.equal(jsonpointer.get(example, ""), example); +var ans = jsonpointer.get(example, "/foo"); +assert.equal(ans.length, 2); +assert.equal(ans[0], "bar"); +assert.equal(ans[1], "baz"); +assert.equal(jsonpointer.get(example, "/foo/0"), "bar"); +assert.equal(jsonpointer.get(example, "/"), 0); +assert.equal(jsonpointer.get(example, "/a~1b"), 1); +assert.equal(jsonpointer.get(example, "/c%d"), 2); +assert.equal(jsonpointer.get(example, "/e^f"), 3); +assert.equal(jsonpointer.get(example, "/g|h"), 4); +assert.equal(jsonpointer.get(example, "/i\\j"), 5); +assert.equal(jsonpointer.get(example, "/k\"l"), 6); +assert.equal(jsonpointer.get(example, "/ "), 7); +assert.equal(jsonpointer.get(example, "/m~0n"), 8); + +console.log("All tests pass."); diff --git a/node_modules/fsevents/node_modules/jsprim/CHANGES.md b/node_modules/fsevents/node_modules/jsprim/CHANGES.md new file mode 100644 index 0000000..ab3a664 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsprim/CHANGES.md @@ -0,0 +1,35 @@ +# Changelog + +## not yet released + +None yet. + +## v1.3.0 (2016-06-22) + +* #14 add safer version of hasOwnProperty() +* #15 forEachKey() should ignore inherited properties + +## v1.2.2 (2015-10-15) + +* #11 NPM package shouldn't include any code that does `require('JSV')` +* #12 jsl.node.conf missing definition for "module" + +## v1.2.1 (2015-10-14) + +* #8 odd date parsing behaviour + +## v1.2.0 (2015-10-13) + +* #9 want function for returning RFC1123 dates + +## v1.1.0 (2015-09-02) + +* #6 a new suite of hrtime manipulation routines: `hrtimeAdd()`, + `hrtimeAccum()`, `hrtimeNanosec()`, `hrtimeMicrosec()` and + `hrtimeMillisec()`. + +## v1.0.0 (2015-09-01) + +First tracked release. Includes everything in previous releases, plus: + +* #4 want function for merging objects diff --git a/node_modules/fsevents/node_modules/jsprim/LICENSE b/node_modules/fsevents/node_modules/jsprim/LICENSE new file mode 100644 index 0000000..cbc0bb3 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsprim/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012, Joyent, Inc. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/fsevents/node_modules/jsprim/README.md b/node_modules/fsevents/node_modules/jsprim/README.md new file mode 100644 index 0000000..7303642 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsprim/README.md @@ -0,0 +1,237 @@ +# jsprim: utilities for primitive JavaScript types + +This module provides miscellaneous facilities for working with strings, +numbers, dates, and objects and arrays of these basic types. + + +### deepCopy(obj) + +Creates a deep copy of a primitive type, object, or array of primitive types. + + +### deepEqual(obj1, obj2) + +Returns whether two objects are equal. + + +### isEmpty(obj) + +Returns true if the given object has no properties and false otherwise. This +is O(1) (unlike `Object.keys(obj).length === 0`, which is O(N)). + +### hasKey(obj, key) + +Returns true if the given object has an enumerable, non-inherited property +called `key`. [For information on enumerability and ownership of properties, see +the MDN +documentation.](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Enumerability_and_ownership_of_properties) + +### forEachKey(obj, callback) + +Like Array.forEach, but iterates enumerable, owned properties of an object +rather than elements of an array. Equivalent to: + + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + callback(key, obj[key]); + } + } + + +### flattenObject(obj, depth) + +Flattens an object up to a given level of nesting, returning an array of arrays +of length "depth + 1", where the first "depth" elements correspond to flattened +columns and the last element contains the remaining object . For example: + + flattenObject({ + 'I': { + 'A': { + 'i': { + 'datum1': [ 1, 2 ], + 'datum2': [ 3, 4 ] + }, + 'ii': { + 'datum1': [ 3, 4 ] + } + }, + 'B': { + 'i': { + 'datum1': [ 5, 6 ] + }, + 'ii': { + 'datum1': [ 7, 8 ], + 'datum2': [ 3, 4 ], + }, + 'iii': { + } + } + }, + 'II': { + 'A': { + 'i': { + 'datum1': [ 1, 2 ], + 'datum2': [ 3, 4 ] + } + } + } + }, 3) + +becomes: + + [ + [ 'I', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ], + [ 'I', 'A', 'ii', { 'datum1': [ 3, 4 ] } ], + [ 'I', 'B', 'i', { 'datum1': [ 5, 6 ] } ], + [ 'I', 'B', 'ii', { 'datum1': [ 7, 8 ], 'datum2': [ 3, 4 ] } ], + [ 'I', 'B', 'iii', {} ], + [ 'II', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ] + ] + +This function is strict: "depth" must be a non-negative integer and "obj" must +be a non-null object with at least "depth" levels of nesting under all keys. + + +### flattenIter(obj, depth, func) + +This is similar to `flattenObject` except that instead of returning an array, +this function invokes `func(entry)` for each `entry` in the array that +`flattenObject` would return. `flattenIter(obj, depth, func)` is logically +equivalent to `flattenObject(obj, depth).forEach(func)`. Importantly, this +version never constructs the full array. Its memory usage is O(depth) rather +than O(n) (where `n` is the number of flattened elements). + +There's another difference between `flattenObject` and `flattenIter` that's +related to the special case where `depth === 0`. In this case, `flattenObject` +omits the array wrapping `obj` (which is regrettable). + + +### pluck(obj, key) + +Fetch nested property "key" from object "obj", traversing objects as needed. +For example, `pluck(obj, "foo.bar.baz")` is roughly equivalent to +`obj.foo.bar.baz`, except that: + +1. If traversal fails, the resulting value is undefined, and no error is + thrown. For example, `pluck({}, "foo.bar")` is just undefined. +2. If "obj" has property "key" directly (without traversing), the + corresponding property is returned. For example, + `pluck({ 'foo.bar': 1 }, 'foo.bar')` is 1, not undefined. This is also + true recursively, so `pluck({ 'a': { 'foo.bar': 1 } }, 'a.foo.bar')` is + also 1, not undefined. + + +### randElt(array) + +Returns an element from "array" selected uniformly at random. If "array" is +empty, throws an Error. + + +### startsWith(str, prefix) + +Returns true if the given string starts with the given prefix and false +otherwise. + + +### endsWith(str, suffix) + +Returns true if the given string ends with the given suffix and false +otherwise. + + +### iso8601(date) + +Converts a Date object to an ISO8601 date string of the form +"YYYY-MM-DDTHH:MM:SS.sssZ". This format is not customizable. + + +### parseDateTime(str) + +Parses a date expressed as a string, as either a number of milliseconds since +the epoch or any string format that Date accepts, giving preference to the +former where these two sets overlap (e.g., strings containing small numbers). + + +### hrtimeDiff(timeA, timeB) + +Given two hrtime readings (as from Node's `process.hrtime()`), where timeA is +later than timeB, compute the difference and return that as an hrtime. It is +illegal to invoke this for a pair of times where timeB is newer than timeA. + +### hrtimeAdd(timeA, timeB) + +Add two hrtime intervals (as from Node's `process.hrtime()`), returning a new +hrtime interval array. This function does not modify either input argument. + + +### hrtimeAccum(timeA, timeB) + +Add two hrtime intervals (as from Node's `process.hrtime()`), storing the +result in `timeA`. This function overwrites (and returns) the first argument +passed in. + + +### hrtimeNanosec(timeA), hrtimeMicrosec(timeA), hrtimeMillisec(timeA) + +This suite of functions converts a hrtime interval (as from Node's +`process.hrtime()`) into a scalar number of nanoseconds, microseconds or +milliseconds. Results are truncated, as with `Math.floor()`. + + +### validateJsonObject(schema, object) + +Uses JSON validation (via JSV) to validate the given object against the given +schema. On success, returns null. On failure, *returns* (does not throw) a +useful Error object. + + +### extraProperties(object, allowed) + +Check an object for unexpected properties. Accepts the object to check, and an +array of allowed property name strings. If extra properties are detected, an +array of extra property names is returned. If no properties other than those +in the allowed list are present on the object, the returned array will be of +zero length. + +### mergeObjects(provided, overrides, defaults) + +Merge properties from objects "provided", "overrides", and "defaults". The +intended use case is for functions that accept named arguments in an "args" +object, but want to provide some default values and override other values. In +that case, "provided" is what the caller specified, "overrides" are what the +function wants to override, and "defaults" contains default values. + +The function starts with the values in "defaults", overrides them with the +values in "provided", and then overrides those with the values in "overrides". +For convenience, any of these objects may be falsey, in which case they will be +ignored. The input objects are never modified, but properties in the returned +object are not deep-copied. + +For example: + + mergeObjects(undefined, { 'objectMode': true }, { 'highWaterMark': 0 }) + +returns: + + { 'objectMode': true, 'highWaterMark': 0 } + +For another example: + + mergeObjects( + { 'highWaterMark': 16, 'objectMode': 7 }, /* from caller */ + { 'objectMode': true }, /* overrides */ + { 'highWaterMark': 0 }); /* default */ + +returns: + + { 'objectMode': true, 'highWaterMark': 16 } + + +# Contributing + +Code should be "make check" clean. This target assumes that +[jsl](http://github.com/davepacheco/javascriptlint) and +[jsstyle](http://github.com/davepacheco/jsstyle) are on your path. + +New tests should generally accompany new functions and bug fixes. The tests +should pass cleanly (run tests/basic.js). diff --git a/node_modules/fsevents/node_modules/jsprim/lib/jsprim.js b/node_modules/fsevents/node_modules/jsprim/lib/jsprim.js new file mode 100644 index 0000000..26c3ba1 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsprim/lib/jsprim.js @@ -0,0 +1,488 @@ +/* + * lib/jsprim.js: utilities for primitive JavaScript types + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +var mod_extsprintf = require('extsprintf'); +var mod_verror = require('verror'); +var mod_jsonschema = require('json-schema'); + +/* + * Public interface + */ +exports.deepCopy = deepCopy; +exports.deepEqual = deepEqual; +exports.isEmpty = isEmpty; +exports.hasKey = hasKey; +exports.forEachKey = forEachKey; +exports.pluck = pluck; +exports.flattenObject = flattenObject; +exports.flattenIter = flattenIter; +exports.validateJsonObject = validateJsonObjectJS; +exports.validateJsonObjectJS = validateJsonObjectJS; +exports.randElt = randElt; +exports.extraProperties = extraProperties; +exports.mergeObjects = mergeObjects; + +exports.startsWith = startsWith; +exports.endsWith = endsWith; + +exports.iso8601 = iso8601; +exports.rfc1123 = rfc1123; +exports.parseDateTime = parseDateTime; + +exports.hrtimediff = hrtimeDiff; +exports.hrtimeDiff = hrtimeDiff; +exports.hrtimeAccum = hrtimeAccum; +exports.hrtimeAdd = hrtimeAdd; +exports.hrtimeNanosec = hrtimeNanosec; +exports.hrtimeMicrosec = hrtimeMicrosec; +exports.hrtimeMillisec = hrtimeMillisec; + + +/* + * Deep copy an acyclic *basic* Javascript object. This only handles basic + * scalars (strings, numbers, booleans) and arbitrarily deep arrays and objects + * containing these. This does *not* handle instances of other classes. + */ +function deepCopy(obj) +{ + var ret, key; + var marker = '__deepCopy'; + + if (obj && obj[marker]) + throw (new Error('attempted deep copy of cyclic object')); + + if (obj && obj.constructor == Object) { + ret = {}; + obj[marker] = true; + + for (key in obj) { + if (key == marker) + continue; + + ret[key] = deepCopy(obj[key]); + } + + delete (obj[marker]); + return (ret); + } + + if (obj && obj.constructor == Array) { + ret = []; + obj[marker] = true; + + for (key = 0; key < obj.length; key++) + ret.push(deepCopy(obj[key])); + + delete (obj[marker]); + return (ret); + } + + /* + * It must be a primitive type -- just return it. + */ + return (obj); +} + +function deepEqual(obj1, obj2) +{ + if (typeof (obj1) != typeof (obj2)) + return (false); + + if (obj1 === null || obj2 === null || typeof (obj1) != 'object') + return (obj1 === obj2); + + if (obj1.constructor != obj2.constructor) + return (false); + + var k; + for (k in obj1) { + if (!obj2.hasOwnProperty(k)) + return (false); + + if (!deepEqual(obj1[k], obj2[k])) + return (false); + } + + for (k in obj2) { + if (!obj1.hasOwnProperty(k)) + return (false); + } + + return (true); +} + +function isEmpty(obj) +{ + var key; + for (key in obj) + return (false); + return (true); +} + +function hasKey(obj, key) +{ + mod_assert.equal(typeof (key), 'string'); + return (Object.prototype.hasOwnProperty.call(obj, key)); +} + +function forEachKey(obj, callback) +{ + for (var key in obj) { + if (hasKey(obj, key)) { + callback(key, obj[key]); + } + } +} + +function pluck(obj, key) +{ + mod_assert.equal(typeof (key), 'string'); + return (pluckv(obj, key)); +} + +function pluckv(obj, key) +{ + if (obj === null || typeof (obj) !== 'object') + return (undefined); + + if (obj.hasOwnProperty(key)) + return (obj[key]); + + var i = key.indexOf('.'); + if (i == -1) + return (undefined); + + var key1 = key.substr(0, i); + if (!obj.hasOwnProperty(key1)) + return (undefined); + + return (pluckv(obj[key1], key.substr(i + 1))); +} + +/* + * Invoke callback(row) for each entry in the array that would be returned by + * flattenObject(data, depth). This is just like flattenObject(data, + * depth).forEach(callback), except that the intermediate array is never + * created. + */ +function flattenIter(data, depth, callback) +{ + doFlattenIter(data, depth, [], callback); +} + +function doFlattenIter(data, depth, accum, callback) +{ + var each; + var key; + + if (depth === 0) { + each = accum.slice(0); + each.push(data); + callback(each); + return; + } + + mod_assert.ok(data !== null); + mod_assert.equal(typeof (data), 'object'); + mod_assert.equal(typeof (depth), 'number'); + mod_assert.ok(depth >= 0); + + for (key in data) { + each = accum.slice(0); + each.push(key); + doFlattenIter(data[key], depth - 1, each, callback); + } +} + +function flattenObject(data, depth) +{ + if (depth === 0) + return ([ data ]); + + mod_assert.ok(data !== null); + mod_assert.equal(typeof (data), 'object'); + mod_assert.equal(typeof (depth), 'number'); + mod_assert.ok(depth >= 0); + + var rv = []; + var key; + + for (key in data) { + flattenObject(data[key], depth - 1).forEach(function (p) { + rv.push([ key ].concat(p)); + }); + } + + return (rv); +} + +function startsWith(str, prefix) +{ + return (str.substr(0, prefix.length) == prefix); +} + +function endsWith(str, suffix) +{ + return (str.substr( + str.length - suffix.length, suffix.length) == suffix); +} + +function iso8601(d) +{ + if (typeof (d) == 'number') + d = new Date(d); + mod_assert.ok(d.constructor === Date); + return (mod_extsprintf.sprintf('%4d-%02d-%02dT%02d:%02d:%02d.%03dZ', + d.getUTCFullYear(), d.getUTCMonth() + 1, d.getUTCDate(), + d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(), + d.getUTCMilliseconds())); +} + +var RFC1123_MONTHS = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']; +var RFC1123_DAYS = [ + 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']; + +function rfc1123(date) { + return (mod_extsprintf.sprintf('%s, %02d %s %04d %02d:%02d:%02d GMT', + RFC1123_DAYS[date.getUTCDay()], date.getUTCDate(), + RFC1123_MONTHS[date.getUTCMonth()], date.getUTCFullYear(), + date.getUTCHours(), date.getUTCMinutes(), + date.getUTCSeconds())); +} + +/* + * Parses a date expressed as a string, as either a number of milliseconds since + * the epoch or any string format that Date accepts, giving preference to the + * former where these two sets overlap (e.g., small numbers). + */ +function parseDateTime(str) +{ + /* + * This is irritatingly implicit, but significantly more concise than + * alternatives. The "+str" will convert a string containing only a + * number directly to a Number, or NaN for other strings. Thus, if the + * conversion succeeds, we use it (this is the milliseconds-since-epoch + * case). Otherwise, we pass the string directly to the Date + * constructor to parse. + */ + var numeric = +str; + if (!isNaN(numeric)) { + return (new Date(numeric)); + } else { + return (new Date(str)); + } +} + +function validateJsonObjectJS(schema, input) +{ + var report = mod_jsonschema.validate(input, schema); + + if (report.errors.length === 0) + return (null); + + /* Currently, we only do anything useful with the first error. */ + var error = report.errors[0]; + + /* The failed property is given by a URI with an irrelevant prefix. */ + var propname = error['property']; + var reason = error['message'].toLowerCase(); + var i, j; + + /* + * There's at least one case where the property error message is + * confusing at best. We work around this here. + */ + if ((i = reason.indexOf('the property ')) != -1 && + (j = reason.indexOf(' is not defined in the schema and the ' + + 'schema does not allow additional properties')) != -1) { + i += 'the property '.length; + if (propname === '') + propname = reason.substr(i, j - i); + else + propname = propname + '.' + reason.substr(i, j - i); + + reason = 'unsupported property'; + } + + var rv = new mod_verror.VError('property "%s": %s', propname, reason); + rv.jsv_details = error; + return (rv); +} + +function randElt(arr) +{ + mod_assert.ok(Array.isArray(arr) && arr.length > 0, + 'randElt argument must be a non-empty array'); + + return (arr[Math.floor(Math.random() * arr.length)]); +} + +function assertHrtime(a) +{ + mod_assert.ok(a[0] >= 0 && a[1] >= 0, + 'negative numbers not allowed in hrtimes'); + mod_assert.ok(a[1] < 1e9, 'nanoseconds column overflow'); +} + +/* + * Compute the time elapsed between hrtime readings A and B, where A is later + * than B. hrtime readings come from Node's process.hrtime(). There is no + * defined way to represent negative deltas, so it's illegal to diff B from A + * where the time denoted by B is later than the time denoted by A. If this + * becomes valuable, we can define a representation and extend the + * implementation to support it. + */ +function hrtimeDiff(a, b) +{ + assertHrtime(a); + assertHrtime(b); + mod_assert.ok(a[0] > b[0] || (a[0] == b[0] && a[1] >= b[1]), + 'negative differences not allowed'); + + var rv = [ a[0] - b[0], 0 ]; + + if (a[1] >= b[1]) { + rv[1] = a[1] - b[1]; + } else { + rv[0]--; + rv[1] = 1e9 - (b[1] - a[1]); + } + + return (rv); +} + +/* + * Convert a hrtime reading from the array format returned by Node's + * process.hrtime() into a scalar number of nanoseconds. + */ +function hrtimeNanosec(a) +{ + assertHrtime(a); + + return (Math.floor(a[0] * 1e9 + a[1])); +} + +/* + * Convert a hrtime reading from the array format returned by Node's + * process.hrtime() into a scalar number of microseconds. + */ +function hrtimeMicrosec(a) +{ + assertHrtime(a); + + return (Math.floor(a[0] * 1e6 + a[1] / 1e3)); +} + +/* + * Convert a hrtime reading from the array format returned by Node's + * process.hrtime() into a scalar number of milliseconds. + */ +function hrtimeMillisec(a) +{ + assertHrtime(a); + + return (Math.floor(a[0] * 1e3 + a[1] / 1e6)); +} + +/* + * Add two hrtime readings A and B, overwriting A with the result of the + * addition. This function is useful for accumulating several hrtime intervals + * into a counter. Returns A. + */ +function hrtimeAccum(a, b) +{ + assertHrtime(a); + assertHrtime(b); + + /* + * Accumulate the nanosecond component. + */ + a[1] += b[1]; + if (a[1] >= 1e9) { + /* + * The nanosecond component overflowed, so carry to the seconds + * field. + */ + a[0]++; + a[1] -= 1e9; + } + + /* + * Accumulate the seconds component. + */ + a[0] += b[0]; + + return (a); +} + +/* + * Add two hrtime readings A and B, returning the result as a new hrtime array. + * Does not modify either input argument. + */ +function hrtimeAdd(a, b) +{ + assertHrtime(a); + + var rv = [ a[0], a[1] ]; + + return (hrtimeAccum(rv, b)); +} + + +/* + * Check an object for unexpected properties. Accepts the object to check, and + * an array of allowed property names (strings). Returns an array of key names + * that were found on the object, but did not appear in the list of allowed + * properties. If no properties were found, the returned array will be of + * zero length. + */ +function extraProperties(obj, allowed) +{ + mod_assert.ok(typeof (obj) === 'object' && obj !== null, + 'obj argument must be a non-null object'); + mod_assert.ok(Array.isArray(allowed), + 'allowed argument must be an array of strings'); + for (var i = 0; i < allowed.length; i++) { + mod_assert.ok(typeof (allowed[i]) === 'string', + 'allowed argument must be an array of strings'); + } + + return (Object.keys(obj).filter(function (key) { + return (allowed.indexOf(key) === -1); + })); +} + +/* + * Given three sets of properties "provided" (may be undefined), "overrides" + * (required), and "defaults" (may be undefined), construct an object containing + * the union of these sets with "overrides" overriding "provided", and + * "provided" overriding "defaults". None of the input objects are modified. + */ +function mergeObjects(provided, overrides, defaults) +{ + var rv, k; + + rv = {}; + if (defaults) { + for (k in defaults) + rv[k] = defaults[k]; + } + + if (provided) { + for (k in provided) + rv[k] = provided[k]; + } + + if (overrides) { + for (k in overrides) + rv[k] = overrides[k]; + } + + return (rv); +} diff --git a/node_modules/fsevents/node_modules/jsprim/package.json b/node_modules/fsevents/node_modules/jsprim/package.json new file mode 100644 index 0000000..ff7f550 --- /dev/null +++ b/node_modules/fsevents/node_modules/jsprim/package.json @@ -0,0 +1,77 @@ +{ + "_args": [ + [ + "jsprim@^1.2.2", + "/Users/eshanker/Code/fsevents/node_modules/http-signature" + ] + ], + "_from": "jsprim@>=1.2.2 <2.0.0", + "_id": "jsprim@1.3.0", + "_inCache": true, + "_installable": true, + "_location": "/jsprim", + "_nodeVersion": "0.12.7", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/jsprim-1.3.0.tgz_1466708163640_0.5282344303559512" + }, + "_npmUser": { + "email": "dap@cs.brown.edu", + "name": "dap" + }, + "_npmVersion": "2.11.3", + "_phantomChildren": {}, + "_requested": { + "name": "jsprim", + "raw": "jsprim@^1.2.2", + "rawSpec": "^1.2.2", + "scope": null, + "spec": ">=1.2.2 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/http-signature" + ], + "_resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.0.tgz", + "_shasum": "ce2e1bef835204b4f3099928c602f8b6ae615650", + "_shrinkwrap": null, + "_spec": "jsprim@^1.2.2", + "_where": "/Users/eshanker/Code/fsevents/node_modules/http-signature", + "bugs": { + "url": "https://github.com/davepacheco/node-jsprim/issues" + }, + "dependencies": { + "extsprintf": "1.0.2", + "json-schema": "0.2.2", + "verror": "1.3.6" + }, + "description": "utilities for primitive JavaScript types", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "ce2e1bef835204b4f3099928c602f8b6ae615650", + "tarball": "https://registry.npmjs.org/jsprim/-/jsprim-1.3.0.tgz" + }, + "engines": [ + "node >=0.6.0" + ], + "gitHead": "694edcb22e2291c21f6c2a23907bf02e1edbfdf4", + "homepage": "https://github.com/davepacheco/node-jsprim#readme", + "license": "MIT", + "main": "./lib/jsprim.js", + "maintainers": [ + { + "name": "dap", + "email": "dap@cs.brown.edu" + } + ], + "name": "jsprim", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/davepacheco/node-jsprim.git" + }, + "scripts": {}, + "version": "1.3.0" +} diff --git a/node_modules/fsevents/node_modules/mime-db/HISTORY.md b/node_modules/fsevents/node_modules/mime-db/HISTORY.md new file mode 100644 index 0000000..d6705ac --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-db/HISTORY.md @@ -0,0 +1,341 @@ +1.23.0 / 2016-05-01 +=================== + + * Add `application/efi` + * Add `application/vnd.3gpp.sms+xml` + * Add `application/vnd.3lightssoftware.imagescal` + * Add `application/vnd.coreos.ignition+json` + * Add `application/vnd.desmume.movie` + * Add `application/vnd.onepager` + * Add `application/vnd.vel+json` + * Add `text/prs.prop.logic` + * Add `video/encaprtp` + * Add `video/h265` + * Add `video/iso.segment` + * Add `video/raptorfec` + * Add `video/rtploopback` + * Add `video/vnd.radgamettools.bink` + * Add `video/vnd.radgamettools.smacker` + * Add `video/vp8` + * Add extension `.3gpp` to `audio/3gpp` + +1.22.0 / 2016-02-15 +=================== + + * Add `application/ppsp-tracker+json` + * Add `application/problem+json` + * Add `application/problem+xml` + * Add `application/vnd.hdt` + * Add `application/vnd.ms-printschematicket+xml` + * Add `model/vnd.rosette.annotated-data-model` + * Add `text/slim` + * Add extension `.rng` to `application/xml` + * Fix extension of `application/dash+xml` to be `.mpd` + * Update primary extension to `.m4a` for `audio/mp4` + +1.21.0 / 2016-01-06 +=================== + + * Add `application/emergencycalldata.comment+xml` + * Add `application/emergencycalldata.deviceinfo+xml` + * Add `application/emergencycalldata.providerinfo+xml` + * Add `application/emergencycalldata.serviceinfo+xml` + * Add `application/emergencycalldata.subscriberinfo+xml` + * Add `application/vnd.filmit.zfc` + * Add `application/vnd.google-apps.document` + * Add `application/vnd.google-apps.presentation` + * Add `application/vnd.google-apps.spreadsheet` + * Add `application/vnd.mapbox-vector-tile` + * Add `application/vnd.ms-printdevicecapabilities+xml` + * Add `application/vnd.ms-windows.devicepairing` + * Add `application/vnd.ms-windows.nwprinting.oob` + * Add `application/vnd.tml` + * Add `audio/evs` + +1.20.0 / 2015-11-10 +=================== + + * Add `application/cdni` + * Add `application/csvm+json` + * Add `application/rfc+xml` + * Add `application/vnd.3gpp.access-transfer-events+xml` + * Add `application/vnd.3gpp.srvcc-ext+xml` + * Add `application/vnd.ms-windows.wsd.oob` + * Add `application/vnd.oxli.countgraph` + * Add `application/vnd.pagerduty+json` + * Add `text/x-suse-ymp` + +1.19.0 / 2015-09-17 +=================== + + * Add `application/vnd.3gpp-prose-pc3ch+xml` + * Add `application/vnd.3gpp.srvcc-info+xml` + * Add `application/vnd.apple.pkpass` + * Add `application/vnd.drive+json` + +1.18.0 / 2015-09-03 +=================== + + * Add `application/pkcs12` + * Add `application/vnd.3gpp-prose+xml` + * Add `application/vnd.3gpp.mid-call+xml` + * Add `application/vnd.3gpp.state-and-event-info+xml` + * Add `application/vnd.anki` + * Add `application/vnd.firemonkeys.cloudcell` + * Add `application/vnd.openblox.game+xml` + * Add `application/vnd.openblox.game-binary` + +1.17.0 / 2015-08-13 +=================== + + * Add `application/x-msdos-program` + * Add `audio/g711-0` + * Add `image/vnd.mozilla.apng` + * Add extension `.exe` to `application/x-msdos-program` + +1.16.0 / 2015-07-29 +=================== + + * Add `application/vnd.uri-map` + +1.15.0 / 2015-07-13 +=================== + + * Add `application/x-httpd-php` + +1.14.0 / 2015-06-25 +=================== + + * Add `application/scim+json` + * Add `application/vnd.3gpp.ussd+xml` + * Add `application/vnd.biopax.rdf+xml` + * Add `text/x-processing` + +1.13.0 / 2015-06-07 +=================== + + * Add nginx as a source + * Add `application/x-cocoa` + * Add `application/x-java-archive-diff` + * Add `application/x-makeself` + * Add `application/x-perl` + * Add `application/x-pilot` + * Add `application/x-redhat-package-manager` + * Add `application/x-sea` + * Add `audio/x-m4a` + * Add `audio/x-realaudio` + * Add `image/x-jng` + * Add `text/mathml` + +1.12.0 / 2015-06-05 +=================== + + * Add `application/bdoc` + * Add `application/vnd.hyperdrive+json` + * Add `application/x-bdoc` + * Add extension `.rtf` to `text/rtf` + +1.11.0 / 2015-05-31 +=================== + + * Add `audio/wav` + * Add `audio/wave` + * Add extension `.litcoffee` to `text/coffeescript` + * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` + * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` + +1.10.0 / 2015-05-19 +=================== + + * Add `application/vnd.balsamiq.bmpr` + * Add `application/vnd.microsoft.portable-executable` + * Add `application/x-ns-proxy-autoconfig` + +1.9.1 / 2015-04-19 +================== + + * Remove `.json` extension from `application/manifest+json` + - This is causing bugs downstream + +1.9.0 / 2015-04-19 +================== + + * Add `application/manifest+json` + * Add `application/vnd.micro+json` + * Add `image/vnd.zbrush.pcx` + * Add `image/x-ms-bmp` + +1.8.0 / 2015-03-13 +================== + + * Add `application/vnd.citationstyles.style+xml` + * Add `application/vnd.fastcopy-disk-image` + * Add `application/vnd.gov.sk.xmldatacontainer+xml` + * Add extension `.jsonld` to `application/ld+json` + +1.7.0 / 2015-02-08 +================== + + * Add `application/vnd.gerber` + * Add `application/vnd.msa-disk-image` + +1.6.1 / 2015-02-05 +================== + + * Community extensions ownership transferred from `node-mime` + +1.6.0 / 2015-01-29 +================== + + * Add `application/jose` + * Add `application/jose+json` + * Add `application/json-seq` + * Add `application/jwk+json` + * Add `application/jwk-set+json` + * Add `application/jwt` + * Add `application/rdap+json` + * Add `application/vnd.gov.sk.e-form+xml` + * Add `application/vnd.ims.imsccv1p3` + +1.5.0 / 2014-12-30 +================== + + * Add `application/vnd.oracle.resource+json` + * Fix various invalid MIME type entries + - `application/mbox+xml` + - `application/oscp-response` + - `application/vwg-multiplexed` + - `audio/g721` + +1.4.0 / 2014-12-21 +================== + + * Add `application/vnd.ims.imsccv1p2` + * Fix various invalid MIME type entries + - `application/vnd-acucobol` + - `application/vnd-curl` + - `application/vnd-dart` + - `application/vnd-dxr` + - `application/vnd-fdf` + - `application/vnd-mif` + - `application/vnd-sema` + - `application/vnd-wap-wmlc` + - `application/vnd.adobe.flash-movie` + - `application/vnd.dece-zip` + - `application/vnd.dvb_service` + - `application/vnd.micrografx-igx` + - `application/vnd.sealed-doc` + - `application/vnd.sealed-eml` + - `application/vnd.sealed-mht` + - `application/vnd.sealed-ppt` + - `application/vnd.sealed-tiff` + - `application/vnd.sealed-xls` + - `application/vnd.sealedmedia.softseal-html` + - `application/vnd.sealedmedia.softseal-pdf` + - `application/vnd.wap-slc` + - `application/vnd.wap-wbxml` + - `audio/vnd.sealedmedia.softseal-mpeg` + - `image/vnd-djvu` + - `image/vnd-svf` + - `image/vnd-wap-wbmp` + - `image/vnd.sealed-png` + - `image/vnd.sealedmedia.softseal-gif` + - `image/vnd.sealedmedia.softseal-jpg` + - `model/vnd-dwf` + - `model/vnd.parasolid.transmit-binary` + - `model/vnd.parasolid.transmit-text` + - `text/vnd-a` + - `text/vnd-curl` + - `text/vnd.wap-wml` + * Remove example template MIME types + - `application/example` + - `audio/example` + - `image/example` + - `message/example` + - `model/example` + - `multipart/example` + - `text/example` + - `video/example` + +1.3.1 / 2014-12-16 +================== + + * Fix missing extensions + - `application/json5` + - `text/hjson` + +1.3.0 / 2014-12-07 +================== + + * Add `application/a2l` + * Add `application/aml` + * Add `application/atfx` + * Add `application/atxml` + * Add `application/cdfx+xml` + * Add `application/dii` + * Add `application/json5` + * Add `application/lxf` + * Add `application/mf4` + * Add `application/vnd.apache.thrift.compact` + * Add `application/vnd.apache.thrift.json` + * Add `application/vnd.coffeescript` + * Add `application/vnd.enphase.envoy` + * Add `application/vnd.ims.imsccv1p1` + * Add `text/csv-schema` + * Add `text/hjson` + * Add `text/markdown` + * Add `text/yaml` + +1.2.0 / 2014-11-09 +================== + + * Add `application/cea` + * Add `application/dit` + * Add `application/vnd.gov.sk.e-form+zip` + * Add `application/vnd.tmd.mediaflex.api+xml` + * Type `application/epub+zip` is now IANA-registered + +1.1.2 / 2014-10-23 +================== + + * Rebuild database for `application/x-www-form-urlencoded` change + +1.1.1 / 2014-10-20 +================== + + * Mark `application/x-www-form-urlencoded` as compressible. + +1.1.0 / 2014-09-28 +================== + + * Add `application/font-woff2` + +1.0.3 / 2014-09-25 +================== + + * Fix engine requirement in package + +1.0.2 / 2014-09-25 +================== + + * Add `application/coap-group+json` + * Add `application/dcd` + * Add `application/vnd.apache.thrift.binary` + * Add `image/vnd.tencent.tap` + * Mark all JSON-derived types as compressible + * Update `text/vtt` data + +1.0.1 / 2014-08-30 +================== + + * Fix extension ordering + +1.0.0 / 2014-08-30 +================== + + * Add `application/atf` + * Add `application/merge-patch+json` + * Add `multipart/x-mixed-replace` + * Add `source: 'apache'` metadata + * Add `source: 'iana'` metadata + * Remove badly-assumed charset data diff --git a/node_modules/fsevents/node_modules/mime-db/LICENSE b/node_modules/fsevents/node_modules/mime-db/LICENSE new file mode 100644 index 0000000..a7ae8ee --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-db/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong me@jongleberry.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/mime-db/README.md b/node_modules/fsevents/node_modules/mime-db/README.md new file mode 100644 index 0000000..7662440 --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-db/README.md @@ -0,0 +1,82 @@ +# mime-db + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +This is a database of all mime types. +It consists of a single, public JSON file and does not include any logic, +allowing it to remain as un-opinionated as possible with an API. +It aggregates data from the following sources: + +- http://www.iana.org/assignments/media-types/media-types.xhtml +- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types + +## Installation + +```bash +npm install mime-db +``` + +### Database Download + +If you're crazy enough to use this in the browser, you can just grab the +JSON file using [RawGit](https://rawgit.com/). It is recommended to replace +`master` with [a release tag](https://github.com/jshttp/mime-db/tags) as the +JSON format may change in the future. + +``` +https://cdn.rawgit.com/jshttp/mime-db/master/db.json +``` + +## Usage + +```js +var db = require('mime-db'); + +// grab data on .js files +var data = db['application/javascript']; +``` + +## Data Structure + +The JSON file is a map lookup for lowercased mime types. +Each mime type has the following properties: + +- `.source` - where the mime type is defined. + If not set, it's probably a custom media type. + - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) + - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) + - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) +- `.extensions[]` - known extensions associated with this mime type. +- `.compressible` - whether a file of this type can be gzipped. +- `.charset` - the default charset associated with this type, if any. + +If unknown, every property could be `undefined`. + +## Contributing + +To edit the database, only make PRs against `src/custom.json` or +`src/custom-suffix.json`. + +To update the build, run `npm run build`. + +## Adding Custom Media Types + +The best way to get new media types included in this library is to register +them with the IANA. The community registration procedure is outlined in +[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types +registered with the IANA are automatically pulled into this library. + +[npm-version-image]: https://img.shields.io/npm/v/mime-db.svg +[npm-downloads-image]: https://img.shields.io/npm/dm/mime-db.svg +[npm-url]: https://npmjs.org/package/mime-db +[travis-image]: https://img.shields.io/travis/jshttp/mime-db/master.svg +[travis-url]: https://travis-ci.org/jshttp/mime-db +[coveralls-image]: https://img.shields.io/coveralls/jshttp/mime-db/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master +[node-image]: https://img.shields.io/node/v/mime-db.svg +[node-url]: http://nodejs.org/download/ diff --git a/node_modules/fsevents/node_modules/mime-db/db.json b/node_modules/fsevents/node_modules/mime-db/db.json new file mode 100644 index 0000000..0a5a8a7 --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-db/db.json @@ -0,0 +1,6627 @@ +{ + "application/1d-interleaved-parityfec": { + "source": "iana" + }, + "application/3gpdash-qoe-report+xml": { + "source": "iana" + }, + "application/3gpp-ims+xml": { + "source": "iana" + }, + "application/a2l": { + "source": "iana" + }, + "application/activemessage": { + "source": "iana" + }, + "application/alto-costmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-directory+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcost+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcostparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointprop+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointpropparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-error+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/aml": { + "source": "iana" + }, + "application/andrew-inset": { + "source": "iana", + "extensions": ["ez"] + }, + "application/applefile": { + "source": "iana" + }, + "application/applixware": { + "source": "apache", + "extensions": ["aw"] + }, + "application/atf": { + "source": "iana" + }, + "application/atfx": { + "source": "iana" + }, + "application/atom+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atom"] + }, + "application/atomcat+xml": { + "source": "iana", + "extensions": ["atomcat"] + }, + "application/atomdeleted+xml": { + "source": "iana" + }, + "application/atomicmail": { + "source": "iana" + }, + "application/atomsvc+xml": { + "source": "iana", + "extensions": ["atomsvc"] + }, + "application/atxml": { + "source": "iana" + }, + "application/auth-policy+xml": { + "source": "iana" + }, + "application/bacnet-xdd+zip": { + "source": "iana" + }, + "application/batch-smtp": { + "source": "iana" + }, + "application/bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/beep+xml": { + "source": "iana" + }, + "application/calendar+json": { + "source": "iana", + "compressible": true + }, + "application/calendar+xml": { + "source": "iana" + }, + "application/call-completion": { + "source": "iana" + }, + "application/cals-1840": { + "source": "iana" + }, + "application/cbor": { + "source": "iana" + }, + "application/ccmp+xml": { + "source": "iana" + }, + "application/ccxml+xml": { + "source": "iana", + "extensions": ["ccxml"] + }, + "application/cdfx+xml": { + "source": "iana" + }, + "application/cdmi-capability": { + "source": "iana", + "extensions": ["cdmia"] + }, + "application/cdmi-container": { + "source": "iana", + "extensions": ["cdmic"] + }, + "application/cdmi-domain": { + "source": "iana", + "extensions": ["cdmid"] + }, + "application/cdmi-object": { + "source": "iana", + "extensions": ["cdmio"] + }, + "application/cdmi-queue": { + "source": "iana", + "extensions": ["cdmiq"] + }, + "application/cdni": { + "source": "iana" + }, + "application/cea": { + "source": "iana" + }, + "application/cea-2018+xml": { + "source": "iana" + }, + "application/cellml+xml": { + "source": "iana" + }, + "application/cfw": { + "source": "iana" + }, + "application/cms": { + "source": "iana" + }, + "application/cnrp+xml": { + "source": "iana" + }, + "application/coap-group+json": { + "source": "iana", + "compressible": true + }, + "application/commonground": { + "source": "iana" + }, + "application/conference-info+xml": { + "source": "iana" + }, + "application/cpl+xml": { + "source": "iana" + }, + "application/csrattrs": { + "source": "iana" + }, + "application/csta+xml": { + "source": "iana" + }, + "application/cstadata+xml": { + "source": "iana" + }, + "application/csvm+json": { + "source": "iana", + "compressible": true + }, + "application/cu-seeme": { + "source": "apache", + "extensions": ["cu"] + }, + "application/cybercash": { + "source": "iana" + }, + "application/dart": { + "compressible": true + }, + "application/dash+xml": { + "source": "iana", + "extensions": ["mpd"] + }, + "application/dashdelta": { + "source": "iana" + }, + "application/davmount+xml": { + "source": "iana", + "extensions": ["davmount"] + }, + "application/dca-rft": { + "source": "iana" + }, + "application/dcd": { + "source": "iana" + }, + "application/dec-dx": { + "source": "iana" + }, + "application/dialog-info+xml": { + "source": "iana" + }, + "application/dicom": { + "source": "iana" + }, + "application/dii": { + "source": "iana" + }, + "application/dit": { + "source": "iana" + }, + "application/dns": { + "source": "iana" + }, + "application/docbook+xml": { + "source": "apache", + "extensions": ["dbk"] + }, + "application/dskpp+xml": { + "source": "iana" + }, + "application/dssc+der": { + "source": "iana", + "extensions": ["dssc"] + }, + "application/dssc+xml": { + "source": "iana", + "extensions": ["xdssc"] + }, + "application/dvcs": { + "source": "iana" + }, + "application/ecmascript": { + "source": "iana", + "compressible": true, + "extensions": ["ecma"] + }, + "application/edi-consent": { + "source": "iana" + }, + "application/edi-x12": { + "source": "iana", + "compressible": false + }, + "application/edifact": { + "source": "iana", + "compressible": false + }, + "application/efi": { + "source": "iana" + }, + "application/emergencycalldata.comment+xml": { + "source": "iana" + }, + "application/emergencycalldata.deviceinfo+xml": { + "source": "iana" + }, + "application/emergencycalldata.providerinfo+xml": { + "source": "iana" + }, + "application/emergencycalldata.serviceinfo+xml": { + "source": "iana" + }, + "application/emergencycalldata.subscriberinfo+xml": { + "source": "iana" + }, + "application/emma+xml": { + "source": "iana", + "extensions": ["emma"] + }, + "application/emotionml+xml": { + "source": "iana" + }, + "application/encaprtp": { + "source": "iana" + }, + "application/epp+xml": { + "source": "iana" + }, + "application/epub+zip": { + "source": "iana", + "extensions": ["epub"] + }, + "application/eshop": { + "source": "iana" + }, + "application/exi": { + "source": "iana", + "extensions": ["exi"] + }, + "application/fastinfoset": { + "source": "iana" + }, + "application/fastsoap": { + "source": "iana" + }, + "application/fdt+xml": { + "source": "iana" + }, + "application/fits": { + "source": "iana" + }, + "application/font-sfnt": { + "source": "iana" + }, + "application/font-tdpfr": { + "source": "iana", + "extensions": ["pfr"] + }, + "application/font-woff": { + "source": "iana", + "compressible": false, + "extensions": ["woff"] + }, + "application/font-woff2": { + "compressible": false, + "extensions": ["woff2"] + }, + "application/framework-attributes+xml": { + "source": "iana" + }, + "application/gml+xml": { + "source": "apache", + "extensions": ["gml"] + }, + "application/gpx+xml": { + "source": "apache", + "extensions": ["gpx"] + }, + "application/gxf": { + "source": "apache", + "extensions": ["gxf"] + }, + "application/gzip": { + "source": "iana", + "compressible": false + }, + "application/h224": { + "source": "iana" + }, + "application/held+xml": { + "source": "iana" + }, + "application/http": { + "source": "iana" + }, + "application/hyperstudio": { + "source": "iana", + "extensions": ["stk"] + }, + "application/ibe-key-request+xml": { + "source": "iana" + }, + "application/ibe-pkg-reply+xml": { + "source": "iana" + }, + "application/ibe-pp-data": { + "source": "iana" + }, + "application/iges": { + "source": "iana" + }, + "application/im-iscomposing+xml": { + "source": "iana" + }, + "application/index": { + "source": "iana" + }, + "application/index.cmd": { + "source": "iana" + }, + "application/index.obj": { + "source": "iana" + }, + "application/index.response": { + "source": "iana" + }, + "application/index.vnd": { + "source": "iana" + }, + "application/inkml+xml": { + "source": "iana", + "extensions": ["ink","inkml"] + }, + "application/iotp": { + "source": "iana" + }, + "application/ipfix": { + "source": "iana", + "extensions": ["ipfix"] + }, + "application/ipp": { + "source": "iana" + }, + "application/isup": { + "source": "iana" + }, + "application/its+xml": { + "source": "iana" + }, + "application/java-archive": { + "source": "apache", + "compressible": false, + "extensions": ["jar","war","ear"] + }, + "application/java-serialized-object": { + "source": "apache", + "compressible": false, + "extensions": ["ser"] + }, + "application/java-vm": { + "source": "apache", + "compressible": false, + "extensions": ["class"] + }, + "application/javascript": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["js"] + }, + "application/jose": { + "source": "iana" + }, + "application/jose+json": { + "source": "iana", + "compressible": true + }, + "application/jrd+json": { + "source": "iana", + "compressible": true + }, + "application/json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["json","map"] + }, + "application/json-patch+json": { + "source": "iana", + "compressible": true + }, + "application/json-seq": { + "source": "iana" + }, + "application/json5": { + "extensions": ["json5"] + }, + "application/jsonml+json": { + "source": "apache", + "compressible": true, + "extensions": ["jsonml"] + }, + "application/jwk+json": { + "source": "iana", + "compressible": true + }, + "application/jwk-set+json": { + "source": "iana", + "compressible": true + }, + "application/jwt": { + "source": "iana" + }, + "application/kpml-request+xml": { + "source": "iana" + }, + "application/kpml-response+xml": { + "source": "iana" + }, + "application/ld+json": { + "source": "iana", + "compressible": true, + "extensions": ["jsonld"] + }, + "application/link-format": { + "source": "iana" + }, + "application/load-control+xml": { + "source": "iana" + }, + "application/lost+xml": { + "source": "iana", + "extensions": ["lostxml"] + }, + "application/lostsync+xml": { + "source": "iana" + }, + "application/lxf": { + "source": "iana" + }, + "application/mac-binhex40": { + "source": "iana", + "extensions": ["hqx"] + }, + "application/mac-compactpro": { + "source": "apache", + "extensions": ["cpt"] + }, + "application/macwriteii": { + "source": "iana" + }, + "application/mads+xml": { + "source": "iana", + "extensions": ["mads"] + }, + "application/manifest+json": { + "charset": "UTF-8", + "compressible": true, + "extensions": ["webmanifest"] + }, + "application/marc": { + "source": "iana", + "extensions": ["mrc"] + }, + "application/marcxml+xml": { + "source": "iana", + "extensions": ["mrcx"] + }, + "application/mathematica": { + "source": "iana", + "extensions": ["ma","nb","mb"] + }, + "application/mathml+xml": { + "source": "iana", + "extensions": ["mathml"] + }, + "application/mathml-content+xml": { + "source": "iana" + }, + "application/mathml-presentation+xml": { + "source": "iana" + }, + "application/mbms-associated-procedure-description+xml": { + "source": "iana" + }, + "application/mbms-deregister+xml": { + "source": "iana" + }, + "application/mbms-envelope+xml": { + "source": "iana" + }, + "application/mbms-msk+xml": { + "source": "iana" + }, + "application/mbms-msk-response+xml": { + "source": "iana" + }, + "application/mbms-protection-description+xml": { + "source": "iana" + }, + "application/mbms-reception-report+xml": { + "source": "iana" + }, + "application/mbms-register+xml": { + "source": "iana" + }, + "application/mbms-register-response+xml": { + "source": "iana" + }, + "application/mbms-schedule+xml": { + "source": "iana" + }, + "application/mbms-user-service-description+xml": { + "source": "iana" + }, + "application/mbox": { + "source": "iana", + "extensions": ["mbox"] + }, + "application/media-policy-dataset+xml": { + "source": "iana" + }, + "application/media_control+xml": { + "source": "iana" + }, + "application/mediaservercontrol+xml": { + "source": "iana", + "extensions": ["mscml"] + }, + "application/merge-patch+json": { + "source": "iana", + "compressible": true + }, + "application/metalink+xml": { + "source": "apache", + "extensions": ["metalink"] + }, + "application/metalink4+xml": { + "source": "iana", + "extensions": ["meta4"] + }, + "application/mets+xml": { + "source": "iana", + "extensions": ["mets"] + }, + "application/mf4": { + "source": "iana" + }, + "application/mikey": { + "source": "iana" + }, + "application/mods+xml": { + "source": "iana", + "extensions": ["mods"] + }, + "application/moss-keys": { + "source": "iana" + }, + "application/moss-signature": { + "source": "iana" + }, + "application/mosskey-data": { + "source": "iana" + }, + "application/mosskey-request": { + "source": "iana" + }, + "application/mp21": { + "source": "iana", + "extensions": ["m21","mp21"] + }, + "application/mp4": { + "source": "iana", + "extensions": ["mp4s","m4p"] + }, + "application/mpeg4-generic": { + "source": "iana" + }, + "application/mpeg4-iod": { + "source": "iana" + }, + "application/mpeg4-iod-xmt": { + "source": "iana" + }, + "application/mrb-consumer+xml": { + "source": "iana" + }, + "application/mrb-publish+xml": { + "source": "iana" + }, + "application/msc-ivr+xml": { + "source": "iana" + }, + "application/msc-mixer+xml": { + "source": "iana" + }, + "application/msword": { + "source": "iana", + "compressible": false, + "extensions": ["doc","dot"] + }, + "application/mxf": { + "source": "iana", + "extensions": ["mxf"] + }, + "application/nasdata": { + "source": "iana" + }, + "application/news-checkgroups": { + "source": "iana" + }, + "application/news-groupinfo": { + "source": "iana" + }, + "application/news-transmission": { + "source": "iana" + }, + "application/nlsml+xml": { + "source": "iana" + }, + "application/nss": { + "source": "iana" + }, + "application/ocsp-request": { + "source": "iana" + }, + "application/ocsp-response": { + "source": "iana" + }, + "application/octet-stream": { + "source": "iana", + "compressible": false, + "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] + }, + "application/oda": { + "source": "iana", + "extensions": ["oda"] + }, + "application/odx": { + "source": "iana" + }, + "application/oebps-package+xml": { + "source": "iana", + "extensions": ["opf"] + }, + "application/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogx"] + }, + "application/omdoc+xml": { + "source": "apache", + "extensions": ["omdoc"] + }, + "application/onenote": { + "source": "apache", + "extensions": ["onetoc","onetoc2","onetmp","onepkg"] + }, + "application/oxps": { + "source": "iana", + "extensions": ["oxps"] + }, + "application/p2p-overlay+xml": { + "source": "iana" + }, + "application/parityfec": { + "source": "iana" + }, + "application/patch-ops-error+xml": { + "source": "iana", + "extensions": ["xer"] + }, + "application/pdf": { + "source": "iana", + "compressible": false, + "extensions": ["pdf"] + }, + "application/pdx": { + "source": "iana" + }, + "application/pgp-encrypted": { + "source": "iana", + "compressible": false, + "extensions": ["pgp"] + }, + "application/pgp-keys": { + "source": "iana" + }, + "application/pgp-signature": { + "source": "iana", + "extensions": ["asc","sig"] + }, + "application/pics-rules": { + "source": "apache", + "extensions": ["prf"] + }, + "application/pidf+xml": { + "source": "iana" + }, + "application/pidf-diff+xml": { + "source": "iana" + }, + "application/pkcs10": { + "source": "iana", + "extensions": ["p10"] + }, + "application/pkcs12": { + "source": "iana" + }, + "application/pkcs7-mime": { + "source": "iana", + "extensions": ["p7m","p7c"] + }, + "application/pkcs7-signature": { + "source": "iana", + "extensions": ["p7s"] + }, + "application/pkcs8": { + "source": "iana", + "extensions": ["p8"] + }, + "application/pkix-attr-cert": { + "source": "iana", + "extensions": ["ac"] + }, + "application/pkix-cert": { + "source": "iana", + "extensions": ["cer"] + }, + "application/pkix-crl": { + "source": "iana", + "extensions": ["crl"] + }, + "application/pkix-pkipath": { + "source": "iana", + "extensions": ["pkipath"] + }, + "application/pkixcmp": { + "source": "iana", + "extensions": ["pki"] + }, + "application/pls+xml": { + "source": "iana", + "extensions": ["pls"] + }, + "application/poc-settings+xml": { + "source": "iana" + }, + "application/postscript": { + "source": "iana", + "compressible": true, + "extensions": ["ai","eps","ps"] + }, + "application/ppsp-tracker+json": { + "source": "iana", + "compressible": true + }, + "application/problem+json": { + "source": "iana", + "compressible": true + }, + "application/problem+xml": { + "source": "iana" + }, + "application/provenance+xml": { + "source": "iana" + }, + "application/prs.alvestrand.titrax-sheet": { + "source": "iana" + }, + "application/prs.cww": { + "source": "iana", + "extensions": ["cww"] + }, + "application/prs.hpub+zip": { + "source": "iana" + }, + "application/prs.nprend": { + "source": "iana" + }, + "application/prs.plucker": { + "source": "iana" + }, + "application/prs.rdf-xml-crypt": { + "source": "iana" + }, + "application/prs.xsf+xml": { + "source": "iana" + }, + "application/pskc+xml": { + "source": "iana", + "extensions": ["pskcxml"] + }, + "application/qsig": { + "source": "iana" + }, + "application/raptorfec": { + "source": "iana" + }, + "application/rdap+json": { + "source": "iana", + "compressible": true + }, + "application/rdf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rdf"] + }, + "application/reginfo+xml": { + "source": "iana", + "extensions": ["rif"] + }, + "application/relax-ng-compact-syntax": { + "source": "iana", + "extensions": ["rnc"] + }, + "application/remote-printing": { + "source": "iana" + }, + "application/reputon+json": { + "source": "iana", + "compressible": true + }, + "application/resource-lists+xml": { + "source": "iana", + "extensions": ["rl"] + }, + "application/resource-lists-diff+xml": { + "source": "iana", + "extensions": ["rld"] + }, + "application/rfc+xml": { + "source": "iana" + }, + "application/riscos": { + "source": "iana" + }, + "application/rlmi+xml": { + "source": "iana" + }, + "application/rls-services+xml": { + "source": "iana", + "extensions": ["rs"] + }, + "application/rpki-ghostbusters": { + "source": "iana", + "extensions": ["gbr"] + }, + "application/rpki-manifest": { + "source": "iana", + "extensions": ["mft"] + }, + "application/rpki-roa": { + "source": "iana", + "extensions": ["roa"] + }, + "application/rpki-updown": { + "source": "iana" + }, + "application/rsd+xml": { + "source": "apache", + "extensions": ["rsd"] + }, + "application/rss+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rss"] + }, + "application/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "application/rtploopback": { + "source": "iana" + }, + "application/rtx": { + "source": "iana" + }, + "application/samlassertion+xml": { + "source": "iana" + }, + "application/samlmetadata+xml": { + "source": "iana" + }, + "application/sbml+xml": { + "source": "iana", + "extensions": ["sbml"] + }, + "application/scaip+xml": { + "source": "iana" + }, + "application/scim+json": { + "source": "iana", + "compressible": true + }, + "application/scvp-cv-request": { + "source": "iana", + "extensions": ["scq"] + }, + "application/scvp-cv-response": { + "source": "iana", + "extensions": ["scs"] + }, + "application/scvp-vp-request": { + "source": "iana", + "extensions": ["spq"] + }, + "application/scvp-vp-response": { + "source": "iana", + "extensions": ["spp"] + }, + "application/sdp": { + "source": "iana", + "extensions": ["sdp"] + }, + "application/sep+xml": { + "source": "iana" + }, + "application/sep-exi": { + "source": "iana" + }, + "application/session-info": { + "source": "iana" + }, + "application/set-payment": { + "source": "iana" + }, + "application/set-payment-initiation": { + "source": "iana", + "extensions": ["setpay"] + }, + "application/set-registration": { + "source": "iana" + }, + "application/set-registration-initiation": { + "source": "iana", + "extensions": ["setreg"] + }, + "application/sgml": { + "source": "iana" + }, + "application/sgml-open-catalog": { + "source": "iana" + }, + "application/shf+xml": { + "source": "iana", + "extensions": ["shf"] + }, + "application/sieve": { + "source": "iana" + }, + "application/simple-filter+xml": { + "source": "iana" + }, + "application/simple-message-summary": { + "source": "iana" + }, + "application/simplesymbolcontainer": { + "source": "iana" + }, + "application/slate": { + "source": "iana" + }, + "application/smil": { + "source": "iana" + }, + "application/smil+xml": { + "source": "iana", + "extensions": ["smi","smil"] + }, + "application/smpte336m": { + "source": "iana" + }, + "application/soap+fastinfoset": { + "source": "iana" + }, + "application/soap+xml": { + "source": "iana", + "compressible": true + }, + "application/sparql-query": { + "source": "iana", + "extensions": ["rq"] + }, + "application/sparql-results+xml": { + "source": "iana", + "extensions": ["srx"] + }, + "application/spirits-event+xml": { + "source": "iana" + }, + "application/sql": { + "source": "iana" + }, + "application/srgs": { + "source": "iana", + "extensions": ["gram"] + }, + "application/srgs+xml": { + "source": "iana", + "extensions": ["grxml"] + }, + "application/sru+xml": { + "source": "iana", + "extensions": ["sru"] + }, + "application/ssdl+xml": { + "source": "apache", + "extensions": ["ssdl"] + }, + "application/ssml+xml": { + "source": "iana", + "extensions": ["ssml"] + }, + "application/tamp-apex-update": { + "source": "iana" + }, + "application/tamp-apex-update-confirm": { + "source": "iana" + }, + "application/tamp-community-update": { + "source": "iana" + }, + "application/tamp-community-update-confirm": { + "source": "iana" + }, + "application/tamp-error": { + "source": "iana" + }, + "application/tamp-sequence-adjust": { + "source": "iana" + }, + "application/tamp-sequence-adjust-confirm": { + "source": "iana" + }, + "application/tamp-status-query": { + "source": "iana" + }, + "application/tamp-status-response": { + "source": "iana" + }, + "application/tamp-update": { + "source": "iana" + }, + "application/tamp-update-confirm": { + "source": "iana" + }, + "application/tar": { + "compressible": true + }, + "application/tei+xml": { + "source": "iana", + "extensions": ["tei","teicorpus"] + }, + "application/thraud+xml": { + "source": "iana", + "extensions": ["tfi"] + }, + "application/timestamp-query": { + "source": "iana" + }, + "application/timestamp-reply": { + "source": "iana" + }, + "application/timestamped-data": { + "source": "iana", + "extensions": ["tsd"] + }, + "application/ttml+xml": { + "source": "iana" + }, + "application/tve-trigger": { + "source": "iana" + }, + "application/ulpfec": { + "source": "iana" + }, + "application/urc-grpsheet+xml": { + "source": "iana" + }, + "application/urc-ressheet+xml": { + "source": "iana" + }, + "application/urc-targetdesc+xml": { + "source": "iana" + }, + "application/urc-uisocketdesc+xml": { + "source": "iana" + }, + "application/vcard+json": { + "source": "iana", + "compressible": true + }, + "application/vcard+xml": { + "source": "iana" + }, + "application/vemmi": { + "source": "iana" + }, + "application/vividence.scriptfile": { + "source": "apache" + }, + "application/vnd.3gpp-prose+xml": { + "source": "iana" + }, + "application/vnd.3gpp-prose-pc3ch+xml": { + "source": "iana" + }, + "application/vnd.3gpp.access-transfer-events+xml": { + "source": "iana" + }, + "application/vnd.3gpp.bsf+xml": { + "source": "iana" + }, + "application/vnd.3gpp.mid-call+xml": { + "source": "iana" + }, + "application/vnd.3gpp.pic-bw-large": { + "source": "iana", + "extensions": ["plb"] + }, + "application/vnd.3gpp.pic-bw-small": { + "source": "iana", + "extensions": ["psb"] + }, + "application/vnd.3gpp.pic-bw-var": { + "source": "iana", + "extensions": ["pvb"] + }, + "application/vnd.3gpp.sms": { + "source": "iana" + }, + "application/vnd.3gpp.sms+xml": { + "source": "iana" + }, + "application/vnd.3gpp.srvcc-ext+xml": { + "source": "iana" + }, + "application/vnd.3gpp.srvcc-info+xml": { + "source": "iana" + }, + "application/vnd.3gpp.state-and-event-info+xml": { + "source": "iana" + }, + "application/vnd.3gpp.ussd+xml": { + "source": "iana" + }, + "application/vnd.3gpp2.bcmcsinfo+xml": { + "source": "iana" + }, + "application/vnd.3gpp2.sms": { + "source": "iana" + }, + "application/vnd.3gpp2.tcap": { + "source": "iana", + "extensions": ["tcap"] + }, + "application/vnd.3lightssoftware.imagescal": { + "source": "iana" + }, + "application/vnd.3m.post-it-notes": { + "source": "iana", + "extensions": ["pwn"] + }, + "application/vnd.accpac.simply.aso": { + "source": "iana", + "extensions": ["aso"] + }, + "application/vnd.accpac.simply.imp": { + "source": "iana", + "extensions": ["imp"] + }, + "application/vnd.acucobol": { + "source": "iana", + "extensions": ["acu"] + }, + "application/vnd.acucorp": { + "source": "iana", + "extensions": ["atc","acutc"] + }, + "application/vnd.adobe.air-application-installer-package+zip": { + "source": "apache", + "extensions": ["air"] + }, + "application/vnd.adobe.flash.movie": { + "source": "iana" + }, + "application/vnd.adobe.formscentral.fcdt": { + "source": "iana", + "extensions": ["fcdt"] + }, + "application/vnd.adobe.fxp": { + "source": "iana", + "extensions": ["fxp","fxpl"] + }, + "application/vnd.adobe.partial-upload": { + "source": "iana" + }, + "application/vnd.adobe.xdp+xml": { + "source": "iana", + "extensions": ["xdp"] + }, + "application/vnd.adobe.xfdf": { + "source": "iana", + "extensions": ["xfdf"] + }, + "application/vnd.aether.imp": { + "source": "iana" + }, + "application/vnd.ah-barcode": { + "source": "iana" + }, + "application/vnd.ahead.space": { + "source": "iana", + "extensions": ["ahead"] + }, + "application/vnd.airzip.filesecure.azf": { + "source": "iana", + "extensions": ["azf"] + }, + "application/vnd.airzip.filesecure.azs": { + "source": "iana", + "extensions": ["azs"] + }, + "application/vnd.amazon.ebook": { + "source": "apache", + "extensions": ["azw"] + }, + "application/vnd.americandynamics.acc": { + "source": "iana", + "extensions": ["acc"] + }, + "application/vnd.amiga.ami": { + "source": "iana", + "extensions": ["ami"] + }, + "application/vnd.amundsen.maze+xml": { + "source": "iana" + }, + "application/vnd.android.package-archive": { + "source": "apache", + "compressible": false, + "extensions": ["apk"] + }, + "application/vnd.anki": { + "source": "iana" + }, + "application/vnd.anser-web-certificate-issue-initiation": { + "source": "iana", + "extensions": ["cii"] + }, + "application/vnd.anser-web-funds-transfer-initiation": { + "source": "apache", + "extensions": ["fti"] + }, + "application/vnd.antix.game-component": { + "source": "iana", + "extensions": ["atx"] + }, + "application/vnd.apache.thrift.binary": { + "source": "iana" + }, + "application/vnd.apache.thrift.compact": { + "source": "iana" + }, + "application/vnd.apache.thrift.json": { + "source": "iana" + }, + "application/vnd.api+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apple.installer+xml": { + "source": "iana", + "extensions": ["mpkg"] + }, + "application/vnd.apple.mpegurl": { + "source": "iana", + "extensions": ["m3u8"] + }, + "application/vnd.apple.pkpass": { + "compressible": false, + "extensions": ["pkpass"] + }, + "application/vnd.arastra.swi": { + "source": "iana" + }, + "application/vnd.aristanetworks.swi": { + "source": "iana", + "extensions": ["swi"] + }, + "application/vnd.artsquare": { + "source": "iana" + }, + "application/vnd.astraea-software.iota": { + "source": "iana", + "extensions": ["iota"] + }, + "application/vnd.audiograph": { + "source": "iana", + "extensions": ["aep"] + }, + "application/vnd.autopackage": { + "source": "iana" + }, + "application/vnd.avistar+xml": { + "source": "iana" + }, + "application/vnd.balsamiq.bmml+xml": { + "source": "iana" + }, + "application/vnd.balsamiq.bmpr": { + "source": "iana" + }, + "application/vnd.bekitzur-stech+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.biopax.rdf+xml": { + "source": "iana" + }, + "application/vnd.blueice.multipass": { + "source": "iana", + "extensions": ["mpm"] + }, + "application/vnd.bluetooth.ep.oob": { + "source": "iana" + }, + "application/vnd.bluetooth.le.oob": { + "source": "iana" + }, + "application/vnd.bmi": { + "source": "iana", + "extensions": ["bmi"] + }, + "application/vnd.businessobjects": { + "source": "iana", + "extensions": ["rep"] + }, + "application/vnd.cab-jscript": { + "source": "iana" + }, + "application/vnd.canon-cpdl": { + "source": "iana" + }, + "application/vnd.canon-lips": { + "source": "iana" + }, + "application/vnd.cendio.thinlinc.clientconf": { + "source": "iana" + }, + "application/vnd.century-systems.tcp_stream": { + "source": "iana" + }, + "application/vnd.chemdraw+xml": { + "source": "iana", + "extensions": ["cdxml"] + }, + "application/vnd.chipnuts.karaoke-mmd": { + "source": "iana", + "extensions": ["mmd"] + }, + "application/vnd.cinderella": { + "source": "iana", + "extensions": ["cdy"] + }, + "application/vnd.cirpack.isdn-ext": { + "source": "iana" + }, + "application/vnd.citationstyles.style+xml": { + "source": "iana" + }, + "application/vnd.claymore": { + "source": "iana", + "extensions": ["cla"] + }, + "application/vnd.cloanto.rp9": { + "source": "iana", + "extensions": ["rp9"] + }, + "application/vnd.clonk.c4group": { + "source": "iana", + "extensions": ["c4g","c4d","c4f","c4p","c4u"] + }, + "application/vnd.cluetrust.cartomobile-config": { + "source": "iana", + "extensions": ["c11amc"] + }, + "application/vnd.cluetrust.cartomobile-config-pkg": { + "source": "iana", + "extensions": ["c11amz"] + }, + "application/vnd.coffeescript": { + "source": "iana" + }, + "application/vnd.collection+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.doc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.next+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.commerce-battelle": { + "source": "iana" + }, + "application/vnd.commonspace": { + "source": "iana", + "extensions": ["csp"] + }, + "application/vnd.contact.cmsg": { + "source": "iana", + "extensions": ["cdbcmsg"] + }, + "application/vnd.coreos.ignition+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cosmocaller": { + "source": "iana", + "extensions": ["cmc"] + }, + "application/vnd.crick.clicker": { + "source": "iana", + "extensions": ["clkx"] + }, + "application/vnd.crick.clicker.keyboard": { + "source": "iana", + "extensions": ["clkk"] + }, + "application/vnd.crick.clicker.palette": { + "source": "iana", + "extensions": ["clkp"] + }, + "application/vnd.crick.clicker.template": { + "source": "iana", + "extensions": ["clkt"] + }, + "application/vnd.crick.clicker.wordbank": { + "source": "iana", + "extensions": ["clkw"] + }, + "application/vnd.criticaltools.wbs+xml": { + "source": "iana", + "extensions": ["wbs"] + }, + "application/vnd.ctc-posml": { + "source": "iana", + "extensions": ["pml"] + }, + "application/vnd.ctct.ws+xml": { + "source": "iana" + }, + "application/vnd.cups-pdf": { + "source": "iana" + }, + "application/vnd.cups-postscript": { + "source": "iana" + }, + "application/vnd.cups-ppd": { + "source": "iana", + "extensions": ["ppd"] + }, + "application/vnd.cups-raster": { + "source": "iana" + }, + "application/vnd.cups-raw": { + "source": "iana" + }, + "application/vnd.curl": { + "source": "iana" + }, + "application/vnd.curl.car": { + "source": "apache", + "extensions": ["car"] + }, + "application/vnd.curl.pcurl": { + "source": "apache", + "extensions": ["pcurl"] + }, + "application/vnd.cyan.dean.root+xml": { + "source": "iana" + }, + "application/vnd.cybank": { + "source": "iana" + }, + "application/vnd.dart": { + "source": "iana", + "compressible": true, + "extensions": ["dart"] + }, + "application/vnd.data-vision.rdz": { + "source": "iana", + "extensions": ["rdz"] + }, + "application/vnd.debian.binary-package": { + "source": "iana" + }, + "application/vnd.dece.data": { + "source": "iana", + "extensions": ["uvf","uvvf","uvd","uvvd"] + }, + "application/vnd.dece.ttml+xml": { + "source": "iana", + "extensions": ["uvt","uvvt"] + }, + "application/vnd.dece.unspecified": { + "source": "iana", + "extensions": ["uvx","uvvx"] + }, + "application/vnd.dece.zip": { + "source": "iana", + "extensions": ["uvz","uvvz"] + }, + "application/vnd.denovo.fcselayout-link": { + "source": "iana", + "extensions": ["fe_launch"] + }, + "application/vnd.desmume-movie": { + "source": "iana" + }, + "application/vnd.desmume.movie": { + "source": "apache" + }, + "application/vnd.dir-bi.plate-dl-nosuffix": { + "source": "iana" + }, + "application/vnd.dm.delegation+xml": { + "source": "iana" + }, + "application/vnd.dna": { + "source": "iana", + "extensions": ["dna"] + }, + "application/vnd.document+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dolby.mlp": { + "source": "apache", + "extensions": ["mlp"] + }, + "application/vnd.dolby.mobile.1": { + "source": "iana" + }, + "application/vnd.dolby.mobile.2": { + "source": "iana" + }, + "application/vnd.doremir.scorecloud-binary-document": { + "source": "iana" + }, + "application/vnd.dpgraph": { + "source": "iana", + "extensions": ["dpg"] + }, + "application/vnd.dreamfactory": { + "source": "iana", + "extensions": ["dfac"] + }, + "application/vnd.drive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ds-keypoint": { + "source": "apache", + "extensions": ["kpxx"] + }, + "application/vnd.dtg.local": { + "source": "iana" + }, + "application/vnd.dtg.local.flash": { + "source": "iana" + }, + "application/vnd.dtg.local.html": { + "source": "iana" + }, + "application/vnd.dvb.ait": { + "source": "iana", + "extensions": ["ait"] + }, + "application/vnd.dvb.dvbj": { + "source": "iana" + }, + "application/vnd.dvb.esgcontainer": { + "source": "iana" + }, + "application/vnd.dvb.ipdcdftnotifaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess2": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgpdd": { + "source": "iana" + }, + "application/vnd.dvb.ipdcroaming": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-base": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-enhancement": { + "source": "iana" + }, + "application/vnd.dvb.notif-aggregate-root+xml": { + "source": "iana" + }, + "application/vnd.dvb.notif-container+xml": { + "source": "iana" + }, + "application/vnd.dvb.notif-generic+xml": { + "source": "iana" + }, + "application/vnd.dvb.notif-ia-msglist+xml": { + "source": "iana" + }, + "application/vnd.dvb.notif-ia-registration-request+xml": { + "source": "iana" + }, + "application/vnd.dvb.notif-ia-registration-response+xml": { + "source": "iana" + }, + "application/vnd.dvb.notif-init+xml": { + "source": "iana" + }, + "application/vnd.dvb.pfr": { + "source": "iana" + }, + "application/vnd.dvb.service": { + "source": "iana", + "extensions": ["svc"] + }, + "application/vnd.dxr": { + "source": "iana" + }, + "application/vnd.dynageo": { + "source": "iana", + "extensions": ["geo"] + }, + "application/vnd.dzr": { + "source": "iana" + }, + "application/vnd.easykaraoke.cdgdownload": { + "source": "iana" + }, + "application/vnd.ecdis-update": { + "source": "iana" + }, + "application/vnd.ecowin.chart": { + "source": "iana", + "extensions": ["mag"] + }, + "application/vnd.ecowin.filerequest": { + "source": "iana" + }, + "application/vnd.ecowin.fileupdate": { + "source": "iana" + }, + "application/vnd.ecowin.series": { + "source": "iana" + }, + "application/vnd.ecowin.seriesrequest": { + "source": "iana" + }, + "application/vnd.ecowin.seriesupdate": { + "source": "iana" + }, + "application/vnd.emclient.accessrequest+xml": { + "source": "iana" + }, + "application/vnd.enliven": { + "source": "iana", + "extensions": ["nml"] + }, + "application/vnd.enphase.envoy": { + "source": "iana" + }, + "application/vnd.eprints.data+xml": { + "source": "iana" + }, + "application/vnd.epson.esf": { + "source": "iana", + "extensions": ["esf"] + }, + "application/vnd.epson.msf": { + "source": "iana", + "extensions": ["msf"] + }, + "application/vnd.epson.quickanime": { + "source": "iana", + "extensions": ["qam"] + }, + "application/vnd.epson.salt": { + "source": "iana", + "extensions": ["slt"] + }, + "application/vnd.epson.ssf": { + "source": "iana", + "extensions": ["ssf"] + }, + "application/vnd.ericsson.quickcall": { + "source": "iana" + }, + "application/vnd.eszigno3+xml": { + "source": "iana", + "extensions": ["es3","et3"] + }, + "application/vnd.etsi.aoc+xml": { + "source": "iana" + }, + "application/vnd.etsi.asic-e+zip": { + "source": "iana" + }, + "application/vnd.etsi.asic-s+zip": { + "source": "iana" + }, + "application/vnd.etsi.cug+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvcommand+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvdiscovery+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvprofile+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvsad-bc+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvsad-cod+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvsad-npvr+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvservice+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvsync+xml": { + "source": "iana" + }, + "application/vnd.etsi.iptvueprofile+xml": { + "source": "iana" + }, + "application/vnd.etsi.mcid+xml": { + "source": "iana" + }, + "application/vnd.etsi.mheg5": { + "source": "iana" + }, + "application/vnd.etsi.overload-control-policy-dataset+xml": { + "source": "iana" + }, + "application/vnd.etsi.pstn+xml": { + "source": "iana" + }, + "application/vnd.etsi.sci+xml": { + "source": "iana" + }, + "application/vnd.etsi.simservs+xml": { + "source": "iana" + }, + "application/vnd.etsi.timestamp-token": { + "source": "iana" + }, + "application/vnd.etsi.tsl+xml": { + "source": "iana" + }, + "application/vnd.etsi.tsl.der": { + "source": "iana" + }, + "application/vnd.eudora.data": { + "source": "iana" + }, + "application/vnd.ezpix-album": { + "source": "iana", + "extensions": ["ez2"] + }, + "application/vnd.ezpix-package": { + "source": "iana", + "extensions": ["ez3"] + }, + "application/vnd.f-secure.mobile": { + "source": "iana" + }, + "application/vnd.fastcopy-disk-image": { + "source": "iana" + }, + "application/vnd.fdf": { + "source": "iana", + "extensions": ["fdf"] + }, + "application/vnd.fdsn.mseed": { + "source": "iana", + "extensions": ["mseed"] + }, + "application/vnd.fdsn.seed": { + "source": "iana", + "extensions": ["seed","dataless"] + }, + "application/vnd.ffsns": { + "source": "iana" + }, + "application/vnd.filmit.zfc": { + "source": "iana" + }, + "application/vnd.fints": { + "source": "iana" + }, + "application/vnd.firemonkeys.cloudcell": { + "source": "iana" + }, + "application/vnd.flographit": { + "source": "iana", + "extensions": ["gph"] + }, + "application/vnd.fluxtime.clip": { + "source": "iana", + "extensions": ["ftc"] + }, + "application/vnd.font-fontforge-sfd": { + "source": "iana" + }, + "application/vnd.framemaker": { + "source": "iana", + "extensions": ["fm","frame","maker","book"] + }, + "application/vnd.frogans.fnc": { + "source": "iana", + "extensions": ["fnc"] + }, + "application/vnd.frogans.ltf": { + "source": "iana", + "extensions": ["ltf"] + }, + "application/vnd.fsc.weblaunch": { + "source": "iana", + "extensions": ["fsc"] + }, + "application/vnd.fujitsu.oasys": { + "source": "iana", + "extensions": ["oas"] + }, + "application/vnd.fujitsu.oasys2": { + "source": "iana", + "extensions": ["oa2"] + }, + "application/vnd.fujitsu.oasys3": { + "source": "iana", + "extensions": ["oa3"] + }, + "application/vnd.fujitsu.oasysgp": { + "source": "iana", + "extensions": ["fg5"] + }, + "application/vnd.fujitsu.oasysprs": { + "source": "iana", + "extensions": ["bh2"] + }, + "application/vnd.fujixerox.art-ex": { + "source": "iana" + }, + "application/vnd.fujixerox.art4": { + "source": "iana" + }, + "application/vnd.fujixerox.ddd": { + "source": "iana", + "extensions": ["ddd"] + }, + "application/vnd.fujixerox.docuworks": { + "source": "iana", + "extensions": ["xdw"] + }, + "application/vnd.fujixerox.docuworks.binder": { + "source": "iana", + "extensions": ["xbd"] + }, + "application/vnd.fujixerox.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujixerox.hbpl": { + "source": "iana" + }, + "application/vnd.fut-misnet": { + "source": "iana" + }, + "application/vnd.fuzzysheet": { + "source": "iana", + "extensions": ["fzs"] + }, + "application/vnd.genomatix.tuxedo": { + "source": "iana", + "extensions": ["txd"] + }, + "application/vnd.geo+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geocube+xml": { + "source": "iana" + }, + "application/vnd.geogebra.file": { + "source": "iana", + "extensions": ["ggb"] + }, + "application/vnd.geogebra.tool": { + "source": "iana", + "extensions": ["ggt"] + }, + "application/vnd.geometry-explorer": { + "source": "iana", + "extensions": ["gex","gre"] + }, + "application/vnd.geonext": { + "source": "iana", + "extensions": ["gxt"] + }, + "application/vnd.geoplan": { + "source": "iana", + "extensions": ["g2w"] + }, + "application/vnd.geospace": { + "source": "iana", + "extensions": ["g3w"] + }, + "application/vnd.gerber": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt-response": { + "source": "iana" + }, + "application/vnd.gmx": { + "source": "iana", + "extensions": ["gmx"] + }, + "application/vnd.google-apps.document": { + "compressible": false, + "extensions": ["gdoc"] + }, + "application/vnd.google-apps.presentation": { + "compressible": false, + "extensions": ["gslides"] + }, + "application/vnd.google-apps.spreadsheet": { + "compressible": false, + "extensions": ["gsheet"] + }, + "application/vnd.google-earth.kml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["kml"] + }, + "application/vnd.google-earth.kmz": { + "source": "iana", + "compressible": false, + "extensions": ["kmz"] + }, + "application/vnd.gov.sk.e-form+xml": { + "source": "iana" + }, + "application/vnd.gov.sk.e-form+zip": { + "source": "iana" + }, + "application/vnd.gov.sk.xmldatacontainer+xml": { + "source": "iana" + }, + "application/vnd.grafeq": { + "source": "iana", + "extensions": ["gqf","gqs"] + }, + "application/vnd.gridmp": { + "source": "iana" + }, + "application/vnd.groove-account": { + "source": "iana", + "extensions": ["gac"] + }, + "application/vnd.groove-help": { + "source": "iana", + "extensions": ["ghf"] + }, + "application/vnd.groove-identity-message": { + "source": "iana", + "extensions": ["gim"] + }, + "application/vnd.groove-injector": { + "source": "iana", + "extensions": ["grv"] + }, + "application/vnd.groove-tool-message": { + "source": "iana", + "extensions": ["gtm"] + }, + "application/vnd.groove-tool-template": { + "source": "iana", + "extensions": ["tpl"] + }, + "application/vnd.groove-vcard": { + "source": "iana", + "extensions": ["vcg"] + }, + "application/vnd.hal+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hal+xml": { + "source": "iana", + "extensions": ["hal"] + }, + "application/vnd.handheld-entertainment+xml": { + "source": "iana", + "extensions": ["zmm"] + }, + "application/vnd.hbci": { + "source": "iana", + "extensions": ["hbci"] + }, + "application/vnd.hcl-bireports": { + "source": "iana" + }, + "application/vnd.hdt": { + "source": "iana" + }, + "application/vnd.heroku+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hhe.lesson-player": { + "source": "iana", + "extensions": ["les"] + }, + "application/vnd.hp-hpgl": { + "source": "iana", + "extensions": ["hpgl"] + }, + "application/vnd.hp-hpid": { + "source": "iana", + "extensions": ["hpid"] + }, + "application/vnd.hp-hps": { + "source": "iana", + "extensions": ["hps"] + }, + "application/vnd.hp-jlyt": { + "source": "iana", + "extensions": ["jlt"] + }, + "application/vnd.hp-pcl": { + "source": "iana", + "extensions": ["pcl"] + }, + "application/vnd.hp-pclxl": { + "source": "iana", + "extensions": ["pclxl"] + }, + "application/vnd.httphone": { + "source": "iana" + }, + "application/vnd.hydrostatix.sof-data": { + "source": "iana", + "extensions": ["sfd-hdstx"] + }, + "application/vnd.hyperdrive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hzn-3d-crossword": { + "source": "iana" + }, + "application/vnd.ibm.afplinedata": { + "source": "iana" + }, + "application/vnd.ibm.electronic-media": { + "source": "iana" + }, + "application/vnd.ibm.minipay": { + "source": "iana", + "extensions": ["mpy"] + }, + "application/vnd.ibm.modcap": { + "source": "iana", + "extensions": ["afp","listafp","list3820"] + }, + "application/vnd.ibm.rights-management": { + "source": "iana", + "extensions": ["irm"] + }, + "application/vnd.ibm.secure-container": { + "source": "iana", + "extensions": ["sc"] + }, + "application/vnd.iccprofile": { + "source": "iana", + "extensions": ["icc","icm"] + }, + "application/vnd.ieee.1905": { + "source": "iana" + }, + "application/vnd.igloader": { + "source": "iana", + "extensions": ["igl"] + }, + "application/vnd.immervision-ivp": { + "source": "iana", + "extensions": ["ivp"] + }, + "application/vnd.immervision-ivu": { + "source": "iana", + "extensions": ["ivu"] + }, + "application/vnd.ims.imsccv1p1": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p2": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p3": { + "source": "iana" + }, + "application/vnd.ims.lis.v2.result+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolconsumerprofile+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy.id+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings.simple+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.informedcontrol.rms+xml": { + "source": "iana" + }, + "application/vnd.informix-visionary": { + "source": "iana" + }, + "application/vnd.infotech.project": { + "source": "iana" + }, + "application/vnd.infotech.project+xml": { + "source": "iana" + }, + "application/vnd.innopath.wamp.notification": { + "source": "iana" + }, + "application/vnd.insors.igm": { + "source": "iana", + "extensions": ["igm"] + }, + "application/vnd.intercon.formnet": { + "source": "iana", + "extensions": ["xpw","xpx"] + }, + "application/vnd.intergeo": { + "source": "iana", + "extensions": ["i2g"] + }, + "application/vnd.intertrust.digibox": { + "source": "iana" + }, + "application/vnd.intertrust.nncp": { + "source": "iana" + }, + "application/vnd.intu.qbo": { + "source": "iana", + "extensions": ["qbo"] + }, + "application/vnd.intu.qfx": { + "source": "iana", + "extensions": ["qfx"] + }, + "application/vnd.iptc.g2.catalogitem+xml": { + "source": "iana" + }, + "application/vnd.iptc.g2.conceptitem+xml": { + "source": "iana" + }, + "application/vnd.iptc.g2.knowledgeitem+xml": { + "source": "iana" + }, + "application/vnd.iptc.g2.newsitem+xml": { + "source": "iana" + }, + "application/vnd.iptc.g2.newsmessage+xml": { + "source": "iana" + }, + "application/vnd.iptc.g2.packageitem+xml": { + "source": "iana" + }, + "application/vnd.iptc.g2.planningitem+xml": { + "source": "iana" + }, + "application/vnd.ipunplugged.rcprofile": { + "source": "iana", + "extensions": ["rcprofile"] + }, + "application/vnd.irepository.package+xml": { + "source": "iana", + "extensions": ["irp"] + }, + "application/vnd.is-xpr": { + "source": "iana", + "extensions": ["xpr"] + }, + "application/vnd.isac.fcs": { + "source": "iana", + "extensions": ["fcs"] + }, + "application/vnd.jam": { + "source": "iana", + "extensions": ["jam"] + }, + "application/vnd.japannet-directory-service": { + "source": "iana" + }, + "application/vnd.japannet-jpnstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-payment-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-registration": { + "source": "iana" + }, + "application/vnd.japannet-registration-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-setstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-verification": { + "source": "iana" + }, + "application/vnd.japannet-verification-wakeup": { + "source": "iana" + }, + "application/vnd.jcp.javame.midlet-rms": { + "source": "iana", + "extensions": ["rms"] + }, + "application/vnd.jisp": { + "source": "iana", + "extensions": ["jisp"] + }, + "application/vnd.joost.joda-archive": { + "source": "iana", + "extensions": ["joda"] + }, + "application/vnd.jsk.isdn-ngn": { + "source": "iana" + }, + "application/vnd.kahootz": { + "source": "iana", + "extensions": ["ktz","ktr"] + }, + "application/vnd.kde.karbon": { + "source": "iana", + "extensions": ["karbon"] + }, + "application/vnd.kde.kchart": { + "source": "iana", + "extensions": ["chrt"] + }, + "application/vnd.kde.kformula": { + "source": "iana", + "extensions": ["kfo"] + }, + "application/vnd.kde.kivio": { + "source": "iana", + "extensions": ["flw"] + }, + "application/vnd.kde.kontour": { + "source": "iana", + "extensions": ["kon"] + }, + "application/vnd.kde.kpresenter": { + "source": "iana", + "extensions": ["kpr","kpt"] + }, + "application/vnd.kde.kspread": { + "source": "iana", + "extensions": ["ksp"] + }, + "application/vnd.kde.kword": { + "source": "iana", + "extensions": ["kwd","kwt"] + }, + "application/vnd.kenameaapp": { + "source": "iana", + "extensions": ["htke"] + }, + "application/vnd.kidspiration": { + "source": "iana", + "extensions": ["kia"] + }, + "application/vnd.kinar": { + "source": "iana", + "extensions": ["kne","knp"] + }, + "application/vnd.koan": { + "source": "iana", + "extensions": ["skp","skd","skt","skm"] + }, + "application/vnd.kodak-descriptor": { + "source": "iana", + "extensions": ["sse"] + }, + "application/vnd.las.las+xml": { + "source": "iana", + "extensions": ["lasxml"] + }, + "application/vnd.liberty-request+xml": { + "source": "iana" + }, + "application/vnd.llamagraphics.life-balance.desktop": { + "source": "iana", + "extensions": ["lbd"] + }, + "application/vnd.llamagraphics.life-balance.exchange+xml": { + "source": "iana", + "extensions": ["lbe"] + }, + "application/vnd.lotus-1-2-3": { + "source": "iana", + "extensions": ["123"] + }, + "application/vnd.lotus-approach": { + "source": "iana", + "extensions": ["apr"] + }, + "application/vnd.lotus-freelance": { + "source": "iana", + "extensions": ["pre"] + }, + "application/vnd.lotus-notes": { + "source": "iana", + "extensions": ["nsf"] + }, + "application/vnd.lotus-organizer": { + "source": "iana", + "extensions": ["org"] + }, + "application/vnd.lotus-screencam": { + "source": "iana", + "extensions": ["scm"] + }, + "application/vnd.lotus-wordpro": { + "source": "iana", + "extensions": ["lwp"] + }, + "application/vnd.macports.portpkg": { + "source": "iana", + "extensions": ["portpkg"] + }, + "application/vnd.mapbox-vector-tile": { + "source": "iana" + }, + "application/vnd.marlin.drm.actiontoken+xml": { + "source": "iana" + }, + "application/vnd.marlin.drm.conftoken+xml": { + "source": "iana" + }, + "application/vnd.marlin.drm.license+xml": { + "source": "iana" + }, + "application/vnd.marlin.drm.mdcf": { + "source": "iana" + }, + "application/vnd.mason+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.maxmind.maxmind-db": { + "source": "iana" + }, + "application/vnd.mcd": { + "source": "iana", + "extensions": ["mcd"] + }, + "application/vnd.medcalcdata": { + "source": "iana", + "extensions": ["mc1"] + }, + "application/vnd.mediastation.cdkey": { + "source": "iana", + "extensions": ["cdkey"] + }, + "application/vnd.meridian-slingshot": { + "source": "iana" + }, + "application/vnd.mfer": { + "source": "iana", + "extensions": ["mwf"] + }, + "application/vnd.mfmp": { + "source": "iana", + "extensions": ["mfm"] + }, + "application/vnd.micro+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.micrografx.flo": { + "source": "iana", + "extensions": ["flo"] + }, + "application/vnd.micrografx.igx": { + "source": "iana", + "extensions": ["igx"] + }, + "application/vnd.microsoft.portable-executable": { + "source": "iana" + }, + "application/vnd.miele+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.mif": { + "source": "iana", + "extensions": ["mif"] + }, + "application/vnd.minisoft-hp3000-save": { + "source": "iana" + }, + "application/vnd.mitsubishi.misty-guard.trustweb": { + "source": "iana" + }, + "application/vnd.mobius.daf": { + "source": "iana", + "extensions": ["daf"] + }, + "application/vnd.mobius.dis": { + "source": "iana", + "extensions": ["dis"] + }, + "application/vnd.mobius.mbk": { + "source": "iana", + "extensions": ["mbk"] + }, + "application/vnd.mobius.mqy": { + "source": "iana", + "extensions": ["mqy"] + }, + "application/vnd.mobius.msl": { + "source": "iana", + "extensions": ["msl"] + }, + "application/vnd.mobius.plc": { + "source": "iana", + "extensions": ["plc"] + }, + "application/vnd.mobius.txf": { + "source": "iana", + "extensions": ["txf"] + }, + "application/vnd.mophun.application": { + "source": "iana", + "extensions": ["mpn"] + }, + "application/vnd.mophun.certificate": { + "source": "iana", + "extensions": ["mpc"] + }, + "application/vnd.motorola.flexsuite": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.adsi": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.fis": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.gotap": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.kmr": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.ttc": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.wem": { + "source": "iana" + }, + "application/vnd.motorola.iprm": { + "source": "iana" + }, + "application/vnd.mozilla.xul+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xul"] + }, + "application/vnd.ms-3mfdocument": { + "source": "iana" + }, + "application/vnd.ms-artgalry": { + "source": "iana", + "extensions": ["cil"] + }, + "application/vnd.ms-asf": { + "source": "iana" + }, + "application/vnd.ms-cab-compressed": { + "source": "iana", + "extensions": ["cab"] + }, + "application/vnd.ms-color.iccprofile": { + "source": "apache" + }, + "application/vnd.ms-excel": { + "source": "iana", + "compressible": false, + "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] + }, + "application/vnd.ms-excel.addin.macroenabled.12": { + "source": "iana", + "extensions": ["xlam"] + }, + "application/vnd.ms-excel.sheet.binary.macroenabled.12": { + "source": "iana", + "extensions": ["xlsb"] + }, + "application/vnd.ms-excel.sheet.macroenabled.12": { + "source": "iana", + "extensions": ["xlsm"] + }, + "application/vnd.ms-excel.template.macroenabled.12": { + "source": "iana", + "extensions": ["xltm"] + }, + "application/vnd.ms-fontobject": { + "source": "iana", + "compressible": true, + "extensions": ["eot"] + }, + "application/vnd.ms-htmlhelp": { + "source": "iana", + "extensions": ["chm"] + }, + "application/vnd.ms-ims": { + "source": "iana", + "extensions": ["ims"] + }, + "application/vnd.ms-lrm": { + "source": "iana", + "extensions": ["lrm"] + }, + "application/vnd.ms-office.activex+xml": { + "source": "iana" + }, + "application/vnd.ms-officetheme": { + "source": "iana", + "extensions": ["thmx"] + }, + "application/vnd.ms-opentype": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-package.obfuscated-opentype": { + "source": "apache" + }, + "application/vnd.ms-pki.seccat": { + "source": "apache", + "extensions": ["cat"] + }, + "application/vnd.ms-pki.stl": { + "source": "apache", + "extensions": ["stl"] + }, + "application/vnd.ms-playready.initiator+xml": { + "source": "iana" + }, + "application/vnd.ms-powerpoint": { + "source": "iana", + "compressible": false, + "extensions": ["ppt","pps","pot"] + }, + "application/vnd.ms-powerpoint.addin.macroenabled.12": { + "source": "iana", + "extensions": ["ppam"] + }, + "application/vnd.ms-powerpoint.presentation.macroenabled.12": { + "source": "iana", + "extensions": ["pptm"] + }, + "application/vnd.ms-powerpoint.slide.macroenabled.12": { + "source": "iana", + "extensions": ["sldm"] + }, + "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { + "source": "iana", + "extensions": ["ppsm"] + }, + "application/vnd.ms-powerpoint.template.macroenabled.12": { + "source": "iana", + "extensions": ["potm"] + }, + "application/vnd.ms-printdevicecapabilities+xml": { + "source": "iana" + }, + "application/vnd.ms-printing.printticket+xml": { + "source": "apache" + }, + "application/vnd.ms-printschematicket+xml": { + "source": "iana" + }, + "application/vnd.ms-project": { + "source": "iana", + "extensions": ["mpp","mpt"] + }, + "application/vnd.ms-tnef": { + "source": "iana" + }, + "application/vnd.ms-windows.devicepairing": { + "source": "iana" + }, + "application/vnd.ms-windows.nwprinting.oob": { + "source": "iana" + }, + "application/vnd.ms-windows.printerpairing": { + "source": "iana" + }, + "application/vnd.ms-windows.wsd.oob": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-resp": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-resp": { + "source": "iana" + }, + "application/vnd.ms-word.document.macroenabled.12": { + "source": "iana", + "extensions": ["docm"] + }, + "application/vnd.ms-word.template.macroenabled.12": { + "source": "iana", + "extensions": ["dotm"] + }, + "application/vnd.ms-works": { + "source": "iana", + "extensions": ["wps","wks","wcm","wdb"] + }, + "application/vnd.ms-wpl": { + "source": "iana", + "extensions": ["wpl"] + }, + "application/vnd.ms-xpsdocument": { + "source": "iana", + "compressible": false, + "extensions": ["xps"] + }, + "application/vnd.msa-disk-image": { + "source": "iana" + }, + "application/vnd.mseq": { + "source": "iana", + "extensions": ["mseq"] + }, + "application/vnd.msign": { + "source": "iana" + }, + "application/vnd.multiad.creator": { + "source": "iana" + }, + "application/vnd.multiad.creator.cif": { + "source": "iana" + }, + "application/vnd.music-niff": { + "source": "iana" + }, + "application/vnd.musician": { + "source": "iana", + "extensions": ["mus"] + }, + "application/vnd.muvee.style": { + "source": "iana", + "extensions": ["msty"] + }, + "application/vnd.mynfc": { + "source": "iana", + "extensions": ["taglet"] + }, + "application/vnd.ncd.control": { + "source": "iana" + }, + "application/vnd.ncd.reference": { + "source": "iana" + }, + "application/vnd.nervana": { + "source": "iana" + }, + "application/vnd.netfpx": { + "source": "iana" + }, + "application/vnd.neurolanguage.nlu": { + "source": "iana", + "extensions": ["nlu"] + }, + "application/vnd.nintendo.nitro.rom": { + "source": "iana" + }, + "application/vnd.nintendo.snes.rom": { + "source": "iana" + }, + "application/vnd.nitf": { + "source": "iana", + "extensions": ["ntf","nitf"] + }, + "application/vnd.noblenet-directory": { + "source": "iana", + "extensions": ["nnd"] + }, + "application/vnd.noblenet-sealer": { + "source": "iana", + "extensions": ["nns"] + }, + "application/vnd.noblenet-web": { + "source": "iana", + "extensions": ["nnw"] + }, + "application/vnd.nokia.catalogs": { + "source": "iana" + }, + "application/vnd.nokia.conml+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.conml+xml": { + "source": "iana" + }, + "application/vnd.nokia.iptv.config+xml": { + "source": "iana" + }, + "application/vnd.nokia.isds-radio-presets": { + "source": "iana" + }, + "application/vnd.nokia.landmark+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.landmark+xml": { + "source": "iana" + }, + "application/vnd.nokia.landmarkcollection+xml": { + "source": "iana" + }, + "application/vnd.nokia.n-gage.ac+xml": { + "source": "iana" + }, + "application/vnd.nokia.n-gage.data": { + "source": "iana", + "extensions": ["ngdat"] + }, + "application/vnd.nokia.n-gage.symbian.install": { + "source": "iana", + "extensions": ["n-gage"] + }, + "application/vnd.nokia.ncd": { + "source": "iana" + }, + "application/vnd.nokia.pcd+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.pcd+xml": { + "source": "iana" + }, + "application/vnd.nokia.radio-preset": { + "source": "iana", + "extensions": ["rpst"] + }, + "application/vnd.nokia.radio-presets": { + "source": "iana", + "extensions": ["rpss"] + }, + "application/vnd.novadigm.edm": { + "source": "iana", + "extensions": ["edm"] + }, + "application/vnd.novadigm.edx": { + "source": "iana", + "extensions": ["edx"] + }, + "application/vnd.novadigm.ext": { + "source": "iana", + "extensions": ["ext"] + }, + "application/vnd.ntt-local.content-share": { + "source": "iana" + }, + "application/vnd.ntt-local.file-transfer": { + "source": "iana" + }, + "application/vnd.ntt-local.ogw_remote-access": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_remote": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_tcp_stream": { + "source": "iana" + }, + "application/vnd.oasis.opendocument.chart": { + "source": "iana", + "extensions": ["odc"] + }, + "application/vnd.oasis.opendocument.chart-template": { + "source": "iana", + "extensions": ["otc"] + }, + "application/vnd.oasis.opendocument.database": { + "source": "iana", + "extensions": ["odb"] + }, + "application/vnd.oasis.opendocument.formula": { + "source": "iana", + "extensions": ["odf"] + }, + "application/vnd.oasis.opendocument.formula-template": { + "source": "iana", + "extensions": ["odft"] + }, + "application/vnd.oasis.opendocument.graphics": { + "source": "iana", + "compressible": false, + "extensions": ["odg"] + }, + "application/vnd.oasis.opendocument.graphics-template": { + "source": "iana", + "extensions": ["otg"] + }, + "application/vnd.oasis.opendocument.image": { + "source": "iana", + "extensions": ["odi"] + }, + "application/vnd.oasis.opendocument.image-template": { + "source": "iana", + "extensions": ["oti"] + }, + "application/vnd.oasis.opendocument.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["odp"] + }, + "application/vnd.oasis.opendocument.presentation-template": { + "source": "iana", + "extensions": ["otp"] + }, + "application/vnd.oasis.opendocument.spreadsheet": { + "source": "iana", + "compressible": false, + "extensions": ["ods"] + }, + "application/vnd.oasis.opendocument.spreadsheet-template": { + "source": "iana", + "extensions": ["ots"] + }, + "application/vnd.oasis.opendocument.text": { + "source": "iana", + "compressible": false, + "extensions": ["odt"] + }, + "application/vnd.oasis.opendocument.text-master": { + "source": "iana", + "extensions": ["odm"] + }, + "application/vnd.oasis.opendocument.text-template": { + "source": "iana", + "extensions": ["ott"] + }, + "application/vnd.oasis.opendocument.text-web": { + "source": "iana", + "extensions": ["oth"] + }, + "application/vnd.obn": { + "source": "iana" + }, + "application/vnd.oftn.l10n+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessdownload+xml": { + "source": "iana" + }, + "application/vnd.oipf.contentaccessstreaming+xml": { + "source": "iana" + }, + "application/vnd.oipf.cspg-hexbinary": { + "source": "iana" + }, + "application/vnd.oipf.dae.svg+xml": { + "source": "iana" + }, + "application/vnd.oipf.dae.xhtml+xml": { + "source": "iana" + }, + "application/vnd.oipf.mippvcontrolmessage+xml": { + "source": "iana" + }, + "application/vnd.oipf.pae.gem": { + "source": "iana" + }, + "application/vnd.oipf.spdiscovery+xml": { + "source": "iana" + }, + "application/vnd.oipf.spdlist+xml": { + "source": "iana" + }, + "application/vnd.oipf.ueprofile+xml": { + "source": "iana" + }, + "application/vnd.oipf.userprofile+xml": { + "source": "iana" + }, + "application/vnd.olpc-sugar": { + "source": "iana", + "extensions": ["xo"] + }, + "application/vnd.oma-scws-config": { + "source": "iana" + }, + "application/vnd.oma-scws-http-request": { + "source": "iana" + }, + "application/vnd.oma-scws-http-response": { + "source": "iana" + }, + "application/vnd.oma.bcast.associated-procedure-parameter+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.drm-trigger+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.imd+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.ltkm": { + "source": "iana" + }, + "application/vnd.oma.bcast.notification+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.provisioningtrigger": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgboot": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdd+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdu": { + "source": "iana" + }, + "application/vnd.oma.bcast.simple-symbol-container": { + "source": "iana" + }, + "application/vnd.oma.bcast.smartcard-trigger+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.sprov+xml": { + "source": "iana" + }, + "application/vnd.oma.bcast.stkm": { + "source": "iana" + }, + "application/vnd.oma.cab-address-book+xml": { + "source": "iana" + }, + "application/vnd.oma.cab-feature-handler+xml": { + "source": "iana" + }, + "application/vnd.oma.cab-pcc+xml": { + "source": "iana" + }, + "application/vnd.oma.cab-subs-invite+xml": { + "source": "iana" + }, + "application/vnd.oma.cab-user-prefs+xml": { + "source": "iana" + }, + "application/vnd.oma.dcd": { + "source": "iana" + }, + "application/vnd.oma.dcdc": { + "source": "iana" + }, + "application/vnd.oma.dd2+xml": { + "source": "iana", + "extensions": ["dd2"] + }, + "application/vnd.oma.drm.risd+xml": { + "source": "iana" + }, + "application/vnd.oma.group-usage-list+xml": { + "source": "iana" + }, + "application/vnd.oma.pal+xml": { + "source": "iana" + }, + "application/vnd.oma.poc.detailed-progress-report+xml": { + "source": "iana" + }, + "application/vnd.oma.poc.final-report+xml": { + "source": "iana" + }, + "application/vnd.oma.poc.groups+xml": { + "source": "iana" + }, + "application/vnd.oma.poc.invocation-descriptor+xml": { + "source": "iana" + }, + "application/vnd.oma.poc.optimized-progress-report+xml": { + "source": "iana" + }, + "application/vnd.oma.push": { + "source": "iana" + }, + "application/vnd.oma.scidm.messages+xml": { + "source": "iana" + }, + "application/vnd.oma.xcap-directory+xml": { + "source": "iana" + }, + "application/vnd.omads-email+xml": { + "source": "iana" + }, + "application/vnd.omads-file+xml": { + "source": "iana" + }, + "application/vnd.omads-folder+xml": { + "source": "iana" + }, + "application/vnd.omaloc-supl-init": { + "source": "iana" + }, + "application/vnd.onepager": { + "source": "iana" + }, + "application/vnd.openblox.game+xml": { + "source": "iana" + }, + "application/vnd.openblox.game-binary": { + "source": "iana" + }, + "application/vnd.openeye.oeb": { + "source": "iana" + }, + "application/vnd.openofficeorg.extension": { + "source": "apache", + "extensions": ["oxt"] + }, + "application/vnd.openxmlformats-officedocument.custom-properties+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawing+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.extended-properties+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml-template": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["pptx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide": { + "source": "iana", + "extensions": ["sldx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { + "source": "iana", + "extensions": ["ppsx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.template": { + "source": "apache", + "extensions": ["potx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml-template": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { + "source": "iana", + "compressible": false, + "extensions": ["xlsx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { + "source": "apache", + "extensions": ["xltx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.theme+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.themeoverride+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.vmldrawing": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml-template": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { + "source": "iana", + "compressible": false, + "extensions": ["docx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { + "source": "apache", + "extensions": ["dotx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-package.core-properties+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { + "source": "iana" + }, + "application/vnd.openxmlformats-package.relationships+xml": { + "source": "iana" + }, + "application/vnd.oracle.resource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.orange.indata": { + "source": "iana" + }, + "application/vnd.osa.netdeploy": { + "source": "iana" + }, + "application/vnd.osgeo.mapguide.package": { + "source": "iana", + "extensions": ["mgp"] + }, + "application/vnd.osgi.bundle": { + "source": "iana" + }, + "application/vnd.osgi.dp": { + "source": "iana", + "extensions": ["dp"] + }, + "application/vnd.osgi.subsystem": { + "source": "iana", + "extensions": ["esa"] + }, + "application/vnd.otps.ct-kip+xml": { + "source": "iana" + }, + "application/vnd.oxli.countgraph": { + "source": "iana" + }, + "application/vnd.pagerduty+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.palm": { + "source": "iana", + "extensions": ["pdb","pqa","oprc"] + }, + "application/vnd.panoply": { + "source": "iana" + }, + "application/vnd.paos+xml": { + "source": "iana" + }, + "application/vnd.paos.xml": { + "source": "apache" + }, + "application/vnd.pawaafile": { + "source": "iana", + "extensions": ["paw"] + }, + "application/vnd.pcos": { + "source": "iana" + }, + "application/vnd.pg.format": { + "source": "iana", + "extensions": ["str"] + }, + "application/vnd.pg.osasli": { + "source": "iana", + "extensions": ["ei6"] + }, + "application/vnd.piaccess.application-licence": { + "source": "iana" + }, + "application/vnd.picsel": { + "source": "iana", + "extensions": ["efif"] + }, + "application/vnd.pmi.widget": { + "source": "iana", + "extensions": ["wg"] + }, + "application/vnd.poc.group-advertisement+xml": { + "source": "iana" + }, + "application/vnd.pocketlearn": { + "source": "iana", + "extensions": ["plf"] + }, + "application/vnd.powerbuilder6": { + "source": "iana", + "extensions": ["pbd"] + }, + "application/vnd.powerbuilder6-s": { + "source": "iana" + }, + "application/vnd.powerbuilder7": { + "source": "iana" + }, + "application/vnd.powerbuilder7-s": { + "source": "iana" + }, + "application/vnd.powerbuilder75": { + "source": "iana" + }, + "application/vnd.powerbuilder75-s": { + "source": "iana" + }, + "application/vnd.preminet": { + "source": "iana" + }, + "application/vnd.previewsystems.box": { + "source": "iana", + "extensions": ["box"] + }, + "application/vnd.proteus.magazine": { + "source": "iana", + "extensions": ["mgz"] + }, + "application/vnd.publishare-delta-tree": { + "source": "iana", + "extensions": ["qps"] + }, + "application/vnd.pvi.ptid1": { + "source": "iana", + "extensions": ["ptid"] + }, + "application/vnd.pwg-multiplexed": { + "source": "iana" + }, + "application/vnd.pwg-xhtml-print+xml": { + "source": "iana" + }, + "application/vnd.qualcomm.brew-app-res": { + "source": "iana" + }, + "application/vnd.quark.quarkxpress": { + "source": "iana", + "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] + }, + "application/vnd.quobject-quoxdocument": { + "source": "iana" + }, + "application/vnd.radisys.moml+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-audit+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-audit-conf+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-audit-conn+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-audit-dialog+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-audit-stream+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-conf+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog-base+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog-fax-detect+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog-group+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog-speech+xml": { + "source": "iana" + }, + "application/vnd.radisys.msml-dialog-transform+xml": { + "source": "iana" + }, + "application/vnd.rainstor.data": { + "source": "iana" + }, + "application/vnd.rapid": { + "source": "iana" + }, + "application/vnd.realvnc.bed": { + "source": "iana", + "extensions": ["bed"] + }, + "application/vnd.recordare.musicxml": { + "source": "iana", + "extensions": ["mxl"] + }, + "application/vnd.recordare.musicxml+xml": { + "source": "iana", + "extensions": ["musicxml"] + }, + "application/vnd.renlearn.rlprint": { + "source": "iana" + }, + "application/vnd.rig.cryptonote": { + "source": "iana", + "extensions": ["cryptonote"] + }, + "application/vnd.rim.cod": { + "source": "apache", + "extensions": ["cod"] + }, + "application/vnd.rn-realmedia": { + "source": "apache", + "extensions": ["rm"] + }, + "application/vnd.rn-realmedia-vbr": { + "source": "apache", + "extensions": ["rmvb"] + }, + "application/vnd.route66.link66+xml": { + "source": "iana", + "extensions": ["link66"] + }, + "application/vnd.rs-274x": { + "source": "iana" + }, + "application/vnd.ruckus.download": { + "source": "iana" + }, + "application/vnd.s3sms": { + "source": "iana" + }, + "application/vnd.sailingtracker.track": { + "source": "iana", + "extensions": ["st"] + }, + "application/vnd.sbm.cid": { + "source": "iana" + }, + "application/vnd.sbm.mid2": { + "source": "iana" + }, + "application/vnd.scribus": { + "source": "iana" + }, + "application/vnd.sealed.3df": { + "source": "iana" + }, + "application/vnd.sealed.csf": { + "source": "iana" + }, + "application/vnd.sealed.doc": { + "source": "iana" + }, + "application/vnd.sealed.eml": { + "source": "iana" + }, + "application/vnd.sealed.mht": { + "source": "iana" + }, + "application/vnd.sealed.net": { + "source": "iana" + }, + "application/vnd.sealed.ppt": { + "source": "iana" + }, + "application/vnd.sealed.tiff": { + "source": "iana" + }, + "application/vnd.sealed.xls": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.html": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.pdf": { + "source": "iana" + }, + "application/vnd.seemail": { + "source": "iana", + "extensions": ["see"] + }, + "application/vnd.sema": { + "source": "iana", + "extensions": ["sema"] + }, + "application/vnd.semd": { + "source": "iana", + "extensions": ["semd"] + }, + "application/vnd.semf": { + "source": "iana", + "extensions": ["semf"] + }, + "application/vnd.shana.informed.formdata": { + "source": "iana", + "extensions": ["ifm"] + }, + "application/vnd.shana.informed.formtemplate": { + "source": "iana", + "extensions": ["itp"] + }, + "application/vnd.shana.informed.interchange": { + "source": "iana", + "extensions": ["iif"] + }, + "application/vnd.shana.informed.package": { + "source": "iana", + "extensions": ["ipk"] + }, + "application/vnd.simtech-mindmapper": { + "source": "iana", + "extensions": ["twd","twds"] + }, + "application/vnd.siren+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.smaf": { + "source": "iana", + "extensions": ["mmf"] + }, + "application/vnd.smart.notebook": { + "source": "iana" + }, + "application/vnd.smart.teacher": { + "source": "iana", + "extensions": ["teacher"] + }, + "application/vnd.software602.filler.form+xml": { + "source": "iana" + }, + "application/vnd.software602.filler.form-xml-zip": { + "source": "iana" + }, + "application/vnd.solent.sdkm+xml": { + "source": "iana", + "extensions": ["sdkm","sdkd"] + }, + "application/vnd.spotfire.dxp": { + "source": "iana", + "extensions": ["dxp"] + }, + "application/vnd.spotfire.sfs": { + "source": "iana", + "extensions": ["sfs"] + }, + "application/vnd.sss-cod": { + "source": "iana" + }, + "application/vnd.sss-dtf": { + "source": "iana" + }, + "application/vnd.sss-ntf": { + "source": "iana" + }, + "application/vnd.stardivision.calc": { + "source": "apache", + "extensions": ["sdc"] + }, + "application/vnd.stardivision.draw": { + "source": "apache", + "extensions": ["sda"] + }, + "application/vnd.stardivision.impress": { + "source": "apache", + "extensions": ["sdd"] + }, + "application/vnd.stardivision.math": { + "source": "apache", + "extensions": ["smf"] + }, + "application/vnd.stardivision.writer": { + "source": "apache", + "extensions": ["sdw","vor"] + }, + "application/vnd.stardivision.writer-global": { + "source": "apache", + "extensions": ["sgl"] + }, + "application/vnd.stepmania.package": { + "source": "iana", + "extensions": ["smzip"] + }, + "application/vnd.stepmania.stepchart": { + "source": "iana", + "extensions": ["sm"] + }, + "application/vnd.street-stream": { + "source": "iana" + }, + "application/vnd.sun.wadl+xml": { + "source": "iana" + }, + "application/vnd.sun.xml.calc": { + "source": "apache", + "extensions": ["sxc"] + }, + "application/vnd.sun.xml.calc.template": { + "source": "apache", + "extensions": ["stc"] + }, + "application/vnd.sun.xml.draw": { + "source": "apache", + "extensions": ["sxd"] + }, + "application/vnd.sun.xml.draw.template": { + "source": "apache", + "extensions": ["std"] + }, + "application/vnd.sun.xml.impress": { + "source": "apache", + "extensions": ["sxi"] + }, + "application/vnd.sun.xml.impress.template": { + "source": "apache", + "extensions": ["sti"] + }, + "application/vnd.sun.xml.math": { + "source": "apache", + "extensions": ["sxm"] + }, + "application/vnd.sun.xml.writer": { + "source": "apache", + "extensions": ["sxw"] + }, + "application/vnd.sun.xml.writer.global": { + "source": "apache", + "extensions": ["sxg"] + }, + "application/vnd.sun.xml.writer.template": { + "source": "apache", + "extensions": ["stw"] + }, + "application/vnd.sus-calendar": { + "source": "iana", + "extensions": ["sus","susp"] + }, + "application/vnd.svd": { + "source": "iana", + "extensions": ["svd"] + }, + "application/vnd.swiftview-ics": { + "source": "iana" + }, + "application/vnd.symbian.install": { + "source": "apache", + "extensions": ["sis","sisx"] + }, + "application/vnd.syncml+xml": { + "source": "iana", + "extensions": ["xsm"] + }, + "application/vnd.syncml.dm+wbxml": { + "source": "iana", + "extensions": ["bdm"] + }, + "application/vnd.syncml.dm+xml": { + "source": "iana", + "extensions": ["xdm"] + }, + "application/vnd.syncml.dm.notification": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+xml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+xml": { + "source": "iana" + }, + "application/vnd.syncml.ds.notification": { + "source": "iana" + }, + "application/vnd.tao.intent-module-archive": { + "source": "iana", + "extensions": ["tao"] + }, + "application/vnd.tcpdump.pcap": { + "source": "iana", + "extensions": ["pcap","cap","dmp"] + }, + "application/vnd.tmd.mediaflex.api+xml": { + "source": "iana" + }, + "application/vnd.tml": { + "source": "iana" + }, + "application/vnd.tmobile-livetv": { + "source": "iana", + "extensions": ["tmo"] + }, + "application/vnd.trid.tpt": { + "source": "iana", + "extensions": ["tpt"] + }, + "application/vnd.triscape.mxs": { + "source": "iana", + "extensions": ["mxs"] + }, + "application/vnd.trueapp": { + "source": "iana", + "extensions": ["tra"] + }, + "application/vnd.truedoc": { + "source": "iana" + }, + "application/vnd.ubisoft.webplayer": { + "source": "iana" + }, + "application/vnd.ufdl": { + "source": "iana", + "extensions": ["ufd","ufdl"] + }, + "application/vnd.uiq.theme": { + "source": "iana", + "extensions": ["utz"] + }, + "application/vnd.umajin": { + "source": "iana", + "extensions": ["umj"] + }, + "application/vnd.unity": { + "source": "iana", + "extensions": ["unityweb"] + }, + "application/vnd.uoml+xml": { + "source": "iana", + "extensions": ["uoml"] + }, + "application/vnd.uplanet.alert": { + "source": "iana" + }, + "application/vnd.uplanet.alert-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.channel": { + "source": "iana" + }, + "application/vnd.uplanet.channel-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.list": { + "source": "iana" + }, + "application/vnd.uplanet.list-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.signal": { + "source": "iana" + }, + "application/vnd.uri-map": { + "source": "iana" + }, + "application/vnd.valve.source.material": { + "source": "iana" + }, + "application/vnd.vcx": { + "source": "iana", + "extensions": ["vcx"] + }, + "application/vnd.vd-study": { + "source": "iana" + }, + "application/vnd.vectorworks": { + "source": "iana" + }, + "application/vnd.vel+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.verimatrix.vcas": { + "source": "iana" + }, + "application/vnd.vidsoft.vidconference": { + "source": "iana" + }, + "application/vnd.visio": { + "source": "iana", + "extensions": ["vsd","vst","vss","vsw"] + }, + "application/vnd.visionary": { + "source": "iana", + "extensions": ["vis"] + }, + "application/vnd.vividence.scriptfile": { + "source": "iana" + }, + "application/vnd.vsf": { + "source": "iana", + "extensions": ["vsf"] + }, + "application/vnd.wap.sic": { + "source": "iana" + }, + "application/vnd.wap.slc": { + "source": "iana" + }, + "application/vnd.wap.wbxml": { + "source": "iana", + "extensions": ["wbxml"] + }, + "application/vnd.wap.wmlc": { + "source": "iana", + "extensions": ["wmlc"] + }, + "application/vnd.wap.wmlscriptc": { + "source": "iana", + "extensions": ["wmlsc"] + }, + "application/vnd.webturbo": { + "source": "iana", + "extensions": ["wtb"] + }, + "application/vnd.wfa.p2p": { + "source": "iana" + }, + "application/vnd.wfa.wsc": { + "source": "iana" + }, + "application/vnd.windows.devicepairing": { + "source": "iana" + }, + "application/vnd.wmc": { + "source": "iana" + }, + "application/vnd.wmf.bootstrap": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica.package": { + "source": "iana" + }, + "application/vnd.wolfram.player": { + "source": "iana", + "extensions": ["nbp"] + }, + "application/vnd.wordperfect": { + "source": "iana", + "extensions": ["wpd"] + }, + "application/vnd.wqd": { + "source": "iana", + "extensions": ["wqd"] + }, + "application/vnd.wrq-hp3000-labelled": { + "source": "iana" + }, + "application/vnd.wt.stf": { + "source": "iana", + "extensions": ["stf"] + }, + "application/vnd.wv.csp+wbxml": { + "source": "iana" + }, + "application/vnd.wv.csp+xml": { + "source": "iana" + }, + "application/vnd.wv.ssp+xml": { + "source": "iana" + }, + "application/vnd.xacml+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.xara": { + "source": "iana", + "extensions": ["xar"] + }, + "application/vnd.xfdl": { + "source": "iana", + "extensions": ["xfdl"] + }, + "application/vnd.xfdl.webform": { + "source": "iana" + }, + "application/vnd.xmi+xml": { + "source": "iana" + }, + "application/vnd.xmpie.cpkg": { + "source": "iana" + }, + "application/vnd.xmpie.dpkg": { + "source": "iana" + }, + "application/vnd.xmpie.plan": { + "source": "iana" + }, + "application/vnd.xmpie.ppkg": { + "source": "iana" + }, + "application/vnd.xmpie.xlim": { + "source": "iana" + }, + "application/vnd.yamaha.hv-dic": { + "source": "iana", + "extensions": ["hvd"] + }, + "application/vnd.yamaha.hv-script": { + "source": "iana", + "extensions": ["hvs"] + }, + "application/vnd.yamaha.hv-voice": { + "source": "iana", + "extensions": ["hvp"] + }, + "application/vnd.yamaha.openscoreformat": { + "source": "iana", + "extensions": ["osf"] + }, + "application/vnd.yamaha.openscoreformat.osfpvg+xml": { + "source": "iana", + "extensions": ["osfpvg"] + }, + "application/vnd.yamaha.remote-setup": { + "source": "iana" + }, + "application/vnd.yamaha.smaf-audio": { + "source": "iana", + "extensions": ["saf"] + }, + "application/vnd.yamaha.smaf-phrase": { + "source": "iana", + "extensions": ["spf"] + }, + "application/vnd.yamaha.through-ngn": { + "source": "iana" + }, + "application/vnd.yamaha.tunnel-udpencap": { + "source": "iana" + }, + "application/vnd.yaoweme": { + "source": "iana" + }, + "application/vnd.yellowriver-custom-menu": { + "source": "iana", + "extensions": ["cmp"] + }, + "application/vnd.zul": { + "source": "iana", + "extensions": ["zir","zirz"] + }, + "application/vnd.zzazz.deck+xml": { + "source": "iana", + "extensions": ["zaz"] + }, + "application/voicexml+xml": { + "source": "iana", + "extensions": ["vxml"] + }, + "application/vq-rtcpxr": { + "source": "iana" + }, + "application/watcherinfo+xml": { + "source": "iana" + }, + "application/whoispp-query": { + "source": "iana" + }, + "application/whoispp-response": { + "source": "iana" + }, + "application/widget": { + "source": "iana", + "extensions": ["wgt"] + }, + "application/winhlp": { + "source": "apache", + "extensions": ["hlp"] + }, + "application/wita": { + "source": "iana" + }, + "application/wordperfect5.1": { + "source": "iana" + }, + "application/wsdl+xml": { + "source": "iana", + "extensions": ["wsdl"] + }, + "application/wspolicy+xml": { + "source": "iana", + "extensions": ["wspolicy"] + }, + "application/x-7z-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["7z"] + }, + "application/x-abiword": { + "source": "apache", + "extensions": ["abw"] + }, + "application/x-ace-compressed": { + "source": "apache", + "extensions": ["ace"] + }, + "application/x-amf": { + "source": "apache" + }, + "application/x-apple-diskimage": { + "source": "apache", + "extensions": ["dmg"] + }, + "application/x-authorware-bin": { + "source": "apache", + "extensions": ["aab","x32","u32","vox"] + }, + "application/x-authorware-map": { + "source": "apache", + "extensions": ["aam"] + }, + "application/x-authorware-seg": { + "source": "apache", + "extensions": ["aas"] + }, + "application/x-bcpio": { + "source": "apache", + "extensions": ["bcpio"] + }, + "application/x-bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/x-bittorrent": { + "source": "apache", + "extensions": ["torrent"] + }, + "application/x-blorb": { + "source": "apache", + "extensions": ["blb","blorb"] + }, + "application/x-bzip": { + "source": "apache", + "compressible": false, + "extensions": ["bz"] + }, + "application/x-bzip2": { + "source": "apache", + "compressible": false, + "extensions": ["bz2","boz"] + }, + "application/x-cbr": { + "source": "apache", + "extensions": ["cbr","cba","cbt","cbz","cb7"] + }, + "application/x-cdlink": { + "source": "apache", + "extensions": ["vcd"] + }, + "application/x-cfs-compressed": { + "source": "apache", + "extensions": ["cfs"] + }, + "application/x-chat": { + "source": "apache", + "extensions": ["chat"] + }, + "application/x-chess-pgn": { + "source": "apache", + "extensions": ["pgn"] + }, + "application/x-chrome-extension": { + "extensions": ["crx"] + }, + "application/x-cocoa": { + "source": "nginx", + "extensions": ["cco"] + }, + "application/x-compress": { + "source": "apache" + }, + "application/x-conference": { + "source": "apache", + "extensions": ["nsc"] + }, + "application/x-cpio": { + "source": "apache", + "extensions": ["cpio"] + }, + "application/x-csh": { + "source": "apache", + "extensions": ["csh"] + }, + "application/x-deb": { + "compressible": false + }, + "application/x-debian-package": { + "source": "apache", + "extensions": ["deb","udeb"] + }, + "application/x-dgc-compressed": { + "source": "apache", + "extensions": ["dgc"] + }, + "application/x-director": { + "source": "apache", + "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] + }, + "application/x-doom": { + "source": "apache", + "extensions": ["wad"] + }, + "application/x-dtbncx+xml": { + "source": "apache", + "extensions": ["ncx"] + }, + "application/x-dtbook+xml": { + "source": "apache", + "extensions": ["dtb"] + }, + "application/x-dtbresource+xml": { + "source": "apache", + "extensions": ["res"] + }, + "application/x-dvi": { + "source": "apache", + "compressible": false, + "extensions": ["dvi"] + }, + "application/x-envoy": { + "source": "apache", + "extensions": ["evy"] + }, + "application/x-eva": { + "source": "apache", + "extensions": ["eva"] + }, + "application/x-font-bdf": { + "source": "apache", + "extensions": ["bdf"] + }, + "application/x-font-dos": { + "source": "apache" + }, + "application/x-font-framemaker": { + "source": "apache" + }, + "application/x-font-ghostscript": { + "source": "apache", + "extensions": ["gsf"] + }, + "application/x-font-libgrx": { + "source": "apache" + }, + "application/x-font-linux-psf": { + "source": "apache", + "extensions": ["psf"] + }, + "application/x-font-otf": { + "source": "apache", + "compressible": true, + "extensions": ["otf"] + }, + "application/x-font-pcf": { + "source": "apache", + "extensions": ["pcf"] + }, + "application/x-font-snf": { + "source": "apache", + "extensions": ["snf"] + }, + "application/x-font-speedo": { + "source": "apache" + }, + "application/x-font-sunos-news": { + "source": "apache" + }, + "application/x-font-ttf": { + "source": "apache", + "compressible": true, + "extensions": ["ttf","ttc"] + }, + "application/x-font-type1": { + "source": "apache", + "extensions": ["pfa","pfb","pfm","afm"] + }, + "application/x-font-vfont": { + "source": "apache" + }, + "application/x-freearc": { + "source": "apache", + "extensions": ["arc"] + }, + "application/x-futuresplash": { + "source": "apache", + "extensions": ["spl"] + }, + "application/x-gca-compressed": { + "source": "apache", + "extensions": ["gca"] + }, + "application/x-glulx": { + "source": "apache", + "extensions": ["ulx"] + }, + "application/x-gnumeric": { + "source": "apache", + "extensions": ["gnumeric"] + }, + "application/x-gramps-xml": { + "source": "apache", + "extensions": ["gramps"] + }, + "application/x-gtar": { + "source": "apache", + "extensions": ["gtar"] + }, + "application/x-gzip": { + "source": "apache" + }, + "application/x-hdf": { + "source": "apache", + "extensions": ["hdf"] + }, + "application/x-httpd-php": { + "compressible": true, + "extensions": ["php"] + }, + "application/x-install-instructions": { + "source": "apache", + "extensions": ["install"] + }, + "application/x-iso9660-image": { + "source": "apache", + "extensions": ["iso"] + }, + "application/x-java-archive-diff": { + "source": "nginx", + "extensions": ["jardiff"] + }, + "application/x-java-jnlp-file": { + "source": "apache", + "compressible": false, + "extensions": ["jnlp"] + }, + "application/x-javascript": { + "compressible": true + }, + "application/x-latex": { + "source": "apache", + "compressible": false, + "extensions": ["latex"] + }, + "application/x-lua-bytecode": { + "extensions": ["luac"] + }, + "application/x-lzh-compressed": { + "source": "apache", + "extensions": ["lzh","lha"] + }, + "application/x-makeself": { + "source": "nginx", + "extensions": ["run"] + }, + "application/x-mie": { + "source": "apache", + "extensions": ["mie"] + }, + "application/x-mobipocket-ebook": { + "source": "apache", + "extensions": ["prc","mobi"] + }, + "application/x-mpegurl": { + "compressible": false + }, + "application/x-ms-application": { + "source": "apache", + "extensions": ["application"] + }, + "application/x-ms-shortcut": { + "source": "apache", + "extensions": ["lnk"] + }, + "application/x-ms-wmd": { + "source": "apache", + "extensions": ["wmd"] + }, + "application/x-ms-wmz": { + "source": "apache", + "extensions": ["wmz"] + }, + "application/x-ms-xbap": { + "source": "apache", + "extensions": ["xbap"] + }, + "application/x-msaccess": { + "source": "apache", + "extensions": ["mdb"] + }, + "application/x-msbinder": { + "source": "apache", + "extensions": ["obd"] + }, + "application/x-mscardfile": { + "source": "apache", + "extensions": ["crd"] + }, + "application/x-msclip": { + "source": "apache", + "extensions": ["clp"] + }, + "application/x-msdos-program": { + "extensions": ["exe"] + }, + "application/x-msdownload": { + "source": "apache", + "extensions": ["exe","dll","com","bat","msi"] + }, + "application/x-msmediaview": { + "source": "apache", + "extensions": ["mvb","m13","m14"] + }, + "application/x-msmetafile": { + "source": "apache", + "extensions": ["wmf","wmz","emf","emz"] + }, + "application/x-msmoney": { + "source": "apache", + "extensions": ["mny"] + }, + "application/x-mspublisher": { + "source": "apache", + "extensions": ["pub"] + }, + "application/x-msschedule": { + "source": "apache", + "extensions": ["scd"] + }, + "application/x-msterminal": { + "source": "apache", + "extensions": ["trm"] + }, + "application/x-mswrite": { + "source": "apache", + "extensions": ["wri"] + }, + "application/x-netcdf": { + "source": "apache", + "extensions": ["nc","cdf"] + }, + "application/x-ns-proxy-autoconfig": { + "compressible": true, + "extensions": ["pac"] + }, + "application/x-nzb": { + "source": "apache", + "extensions": ["nzb"] + }, + "application/x-perl": { + "source": "nginx", + "extensions": ["pl","pm"] + }, + "application/x-pilot": { + "source": "nginx", + "extensions": ["prc","pdb"] + }, + "application/x-pkcs12": { + "source": "apache", + "compressible": false, + "extensions": ["p12","pfx"] + }, + "application/x-pkcs7-certificates": { + "source": "apache", + "extensions": ["p7b","spc"] + }, + "application/x-pkcs7-certreqresp": { + "source": "apache", + "extensions": ["p7r"] + }, + "application/x-rar-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["rar"] + }, + "application/x-redhat-package-manager": { + "source": "nginx", + "extensions": ["rpm"] + }, + "application/x-research-info-systems": { + "source": "apache", + "extensions": ["ris"] + }, + "application/x-sea": { + "source": "nginx", + "extensions": ["sea"] + }, + "application/x-sh": { + "source": "apache", + "compressible": true, + "extensions": ["sh"] + }, + "application/x-shar": { + "source": "apache", + "extensions": ["shar"] + }, + "application/x-shockwave-flash": { + "source": "apache", + "compressible": false, + "extensions": ["swf"] + }, + "application/x-silverlight-app": { + "source": "apache", + "extensions": ["xap"] + }, + "application/x-sql": { + "source": "apache", + "extensions": ["sql"] + }, + "application/x-stuffit": { + "source": "apache", + "compressible": false, + "extensions": ["sit"] + }, + "application/x-stuffitx": { + "source": "apache", + "extensions": ["sitx"] + }, + "application/x-subrip": { + "source": "apache", + "extensions": ["srt"] + }, + "application/x-sv4cpio": { + "source": "apache", + "extensions": ["sv4cpio"] + }, + "application/x-sv4crc": { + "source": "apache", + "extensions": ["sv4crc"] + }, + "application/x-t3vm-image": { + "source": "apache", + "extensions": ["t3"] + }, + "application/x-tads": { + "source": "apache", + "extensions": ["gam"] + }, + "application/x-tar": { + "source": "apache", + "compressible": true, + "extensions": ["tar"] + }, + "application/x-tcl": { + "source": "apache", + "extensions": ["tcl","tk"] + }, + "application/x-tex": { + "source": "apache", + "extensions": ["tex"] + }, + "application/x-tex-tfm": { + "source": "apache", + "extensions": ["tfm"] + }, + "application/x-texinfo": { + "source": "apache", + "extensions": ["texinfo","texi"] + }, + "application/x-tgif": { + "source": "apache", + "extensions": ["obj"] + }, + "application/x-ustar": { + "source": "apache", + "extensions": ["ustar"] + }, + "application/x-wais-source": { + "source": "apache", + "extensions": ["src"] + }, + "application/x-web-app-manifest+json": { + "compressible": true, + "extensions": ["webapp"] + }, + "application/x-www-form-urlencoded": { + "source": "iana", + "compressible": true + }, + "application/x-x509-ca-cert": { + "source": "apache", + "extensions": ["der","crt","pem"] + }, + "application/x-xfig": { + "source": "apache", + "extensions": ["fig"] + }, + "application/x-xliff+xml": { + "source": "apache", + "extensions": ["xlf"] + }, + "application/x-xpinstall": { + "source": "apache", + "compressible": false, + "extensions": ["xpi"] + }, + "application/x-xz": { + "source": "apache", + "extensions": ["xz"] + }, + "application/x-zmachine": { + "source": "apache", + "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] + }, + "application/x400-bp": { + "source": "iana" + }, + "application/xacml+xml": { + "source": "iana" + }, + "application/xaml+xml": { + "source": "apache", + "extensions": ["xaml"] + }, + "application/xcap-att+xml": { + "source": "iana" + }, + "application/xcap-caps+xml": { + "source": "iana" + }, + "application/xcap-diff+xml": { + "source": "iana", + "extensions": ["xdf"] + }, + "application/xcap-el+xml": { + "source": "iana" + }, + "application/xcap-error+xml": { + "source": "iana" + }, + "application/xcap-ns+xml": { + "source": "iana" + }, + "application/xcon-conference-info+xml": { + "source": "iana" + }, + "application/xcon-conference-info-diff+xml": { + "source": "iana" + }, + "application/xenc+xml": { + "source": "iana", + "extensions": ["xenc"] + }, + "application/xhtml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xhtml","xht"] + }, + "application/xhtml-voice+xml": { + "source": "apache" + }, + "application/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml","xsl","xsd","rng"] + }, + "application/xml-dtd": { + "source": "iana", + "compressible": true, + "extensions": ["dtd"] + }, + "application/xml-external-parsed-entity": { + "source": "iana" + }, + "application/xml-patch+xml": { + "source": "iana" + }, + "application/xmpp+xml": { + "source": "iana" + }, + "application/xop+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xop"] + }, + "application/xproc+xml": { + "source": "apache", + "extensions": ["xpl"] + }, + "application/xslt+xml": { + "source": "iana", + "extensions": ["xslt"] + }, + "application/xspf+xml": { + "source": "apache", + "extensions": ["xspf"] + }, + "application/xv+xml": { + "source": "iana", + "extensions": ["mxml","xhvml","xvml","xvm"] + }, + "application/yang": { + "source": "iana", + "extensions": ["yang"] + }, + "application/yin+xml": { + "source": "iana", + "extensions": ["yin"] + }, + "application/zip": { + "source": "iana", + "compressible": false, + "extensions": ["zip"] + }, + "application/zlib": { + "source": "iana" + }, + "audio/1d-interleaved-parityfec": { + "source": "iana" + }, + "audio/32kadpcm": { + "source": "iana" + }, + "audio/3gpp": { + "source": "iana", + "compressible": false, + "extensions": ["3gpp"] + }, + "audio/3gpp2": { + "source": "iana" + }, + "audio/ac3": { + "source": "iana" + }, + "audio/adpcm": { + "source": "apache", + "extensions": ["adp"] + }, + "audio/amr": { + "source": "iana" + }, + "audio/amr-wb": { + "source": "iana" + }, + "audio/amr-wb+": { + "source": "iana" + }, + "audio/aptx": { + "source": "iana" + }, + "audio/asc": { + "source": "iana" + }, + "audio/atrac-advanced-lossless": { + "source": "iana" + }, + "audio/atrac-x": { + "source": "iana" + }, + "audio/atrac3": { + "source": "iana" + }, + "audio/basic": { + "source": "iana", + "compressible": false, + "extensions": ["au","snd"] + }, + "audio/bv16": { + "source": "iana" + }, + "audio/bv32": { + "source": "iana" + }, + "audio/clearmode": { + "source": "iana" + }, + "audio/cn": { + "source": "iana" + }, + "audio/dat12": { + "source": "iana" + }, + "audio/dls": { + "source": "iana" + }, + "audio/dsr-es201108": { + "source": "iana" + }, + "audio/dsr-es202050": { + "source": "iana" + }, + "audio/dsr-es202211": { + "source": "iana" + }, + "audio/dsr-es202212": { + "source": "iana" + }, + "audio/dv": { + "source": "iana" + }, + "audio/dvi4": { + "source": "iana" + }, + "audio/eac3": { + "source": "iana" + }, + "audio/encaprtp": { + "source": "iana" + }, + "audio/evrc": { + "source": "iana" + }, + "audio/evrc-qcp": { + "source": "iana" + }, + "audio/evrc0": { + "source": "iana" + }, + "audio/evrc1": { + "source": "iana" + }, + "audio/evrcb": { + "source": "iana" + }, + "audio/evrcb0": { + "source": "iana" + }, + "audio/evrcb1": { + "source": "iana" + }, + "audio/evrcnw": { + "source": "iana" + }, + "audio/evrcnw0": { + "source": "iana" + }, + "audio/evrcnw1": { + "source": "iana" + }, + "audio/evrcwb": { + "source": "iana" + }, + "audio/evrcwb0": { + "source": "iana" + }, + "audio/evrcwb1": { + "source": "iana" + }, + "audio/evs": { + "source": "iana" + }, + "audio/fwdred": { + "source": "iana" + }, + "audio/g711-0": { + "source": "iana" + }, + "audio/g719": { + "source": "iana" + }, + "audio/g722": { + "source": "iana" + }, + "audio/g7221": { + "source": "iana" + }, + "audio/g723": { + "source": "iana" + }, + "audio/g726-16": { + "source": "iana" + }, + "audio/g726-24": { + "source": "iana" + }, + "audio/g726-32": { + "source": "iana" + }, + "audio/g726-40": { + "source": "iana" + }, + "audio/g728": { + "source": "iana" + }, + "audio/g729": { + "source": "iana" + }, + "audio/g7291": { + "source": "iana" + }, + "audio/g729d": { + "source": "iana" + }, + "audio/g729e": { + "source": "iana" + }, + "audio/gsm": { + "source": "iana" + }, + "audio/gsm-efr": { + "source": "iana" + }, + "audio/gsm-hr-08": { + "source": "iana" + }, + "audio/ilbc": { + "source": "iana" + }, + "audio/ip-mr_v2.5": { + "source": "iana" + }, + "audio/isac": { + "source": "apache" + }, + "audio/l16": { + "source": "iana" + }, + "audio/l20": { + "source": "iana" + }, + "audio/l24": { + "source": "iana", + "compressible": false + }, + "audio/l8": { + "source": "iana" + }, + "audio/lpc": { + "source": "iana" + }, + "audio/midi": { + "source": "apache", + "extensions": ["mid","midi","kar","rmi"] + }, + "audio/mobile-xmf": { + "source": "iana" + }, + "audio/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["m4a","mp4a"] + }, + "audio/mp4a-latm": { + "source": "iana" + }, + "audio/mpa": { + "source": "iana" + }, + "audio/mpa-robust": { + "source": "iana" + }, + "audio/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] + }, + "audio/mpeg4-generic": { + "source": "iana" + }, + "audio/musepack": { + "source": "apache" + }, + "audio/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["oga","ogg","spx"] + }, + "audio/opus": { + "source": "iana" + }, + "audio/parityfec": { + "source": "iana" + }, + "audio/pcma": { + "source": "iana" + }, + "audio/pcma-wb": { + "source": "iana" + }, + "audio/pcmu": { + "source": "iana" + }, + "audio/pcmu-wb": { + "source": "iana" + }, + "audio/prs.sid": { + "source": "iana" + }, + "audio/qcelp": { + "source": "iana" + }, + "audio/raptorfec": { + "source": "iana" + }, + "audio/red": { + "source": "iana" + }, + "audio/rtp-enc-aescm128": { + "source": "iana" + }, + "audio/rtp-midi": { + "source": "iana" + }, + "audio/rtploopback": { + "source": "iana" + }, + "audio/rtx": { + "source": "iana" + }, + "audio/s3m": { + "source": "apache", + "extensions": ["s3m"] + }, + "audio/silk": { + "source": "apache", + "extensions": ["sil"] + }, + "audio/smv": { + "source": "iana" + }, + "audio/smv-qcp": { + "source": "iana" + }, + "audio/smv0": { + "source": "iana" + }, + "audio/sp-midi": { + "source": "iana" + }, + "audio/speex": { + "source": "iana" + }, + "audio/t140c": { + "source": "iana" + }, + "audio/t38": { + "source": "iana" + }, + "audio/telephone-event": { + "source": "iana" + }, + "audio/tone": { + "source": "iana" + }, + "audio/uemclip": { + "source": "iana" + }, + "audio/ulpfec": { + "source": "iana" + }, + "audio/vdvi": { + "source": "iana" + }, + "audio/vmr-wb": { + "source": "iana" + }, + "audio/vnd.3gpp.iufp": { + "source": "iana" + }, + "audio/vnd.4sb": { + "source": "iana" + }, + "audio/vnd.audiokoz": { + "source": "iana" + }, + "audio/vnd.celp": { + "source": "iana" + }, + "audio/vnd.cisco.nse": { + "source": "iana" + }, + "audio/vnd.cmles.radio-events": { + "source": "iana" + }, + "audio/vnd.cns.anp1": { + "source": "iana" + }, + "audio/vnd.cns.inf1": { + "source": "iana" + }, + "audio/vnd.dece.audio": { + "source": "iana", + "extensions": ["uva","uvva"] + }, + "audio/vnd.digital-winds": { + "source": "iana", + "extensions": ["eol"] + }, + "audio/vnd.dlna.adts": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.1": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.2": { + "source": "iana" + }, + "audio/vnd.dolby.mlp": { + "source": "iana" + }, + "audio/vnd.dolby.mps": { + "source": "iana" + }, + "audio/vnd.dolby.pl2": { + "source": "iana" + }, + "audio/vnd.dolby.pl2x": { + "source": "iana" + }, + "audio/vnd.dolby.pl2z": { + "source": "iana" + }, + "audio/vnd.dolby.pulse.1": { + "source": "iana" + }, + "audio/vnd.dra": { + "source": "iana", + "extensions": ["dra"] + }, + "audio/vnd.dts": { + "source": "iana", + "extensions": ["dts"] + }, + "audio/vnd.dts.hd": { + "source": "iana", + "extensions": ["dtshd"] + }, + "audio/vnd.dvb.file": { + "source": "iana" + }, + "audio/vnd.everad.plj": { + "source": "iana" + }, + "audio/vnd.hns.audio": { + "source": "iana" + }, + "audio/vnd.lucent.voice": { + "source": "iana", + "extensions": ["lvp"] + }, + "audio/vnd.ms-playready.media.pya": { + "source": "iana", + "extensions": ["pya"] + }, + "audio/vnd.nokia.mobile-xmf": { + "source": "iana" + }, + "audio/vnd.nortel.vbk": { + "source": "iana" + }, + "audio/vnd.nuera.ecelp4800": { + "source": "iana", + "extensions": ["ecelp4800"] + }, + "audio/vnd.nuera.ecelp7470": { + "source": "iana", + "extensions": ["ecelp7470"] + }, + "audio/vnd.nuera.ecelp9600": { + "source": "iana", + "extensions": ["ecelp9600"] + }, + "audio/vnd.octel.sbc": { + "source": "iana" + }, + "audio/vnd.qcelp": { + "source": "iana" + }, + "audio/vnd.rhetorex.32kadpcm": { + "source": "iana" + }, + "audio/vnd.rip": { + "source": "iana", + "extensions": ["rip"] + }, + "audio/vnd.rn-realaudio": { + "compressible": false + }, + "audio/vnd.sealedmedia.softseal.mpeg": { + "source": "iana" + }, + "audio/vnd.vmx.cvsd": { + "source": "iana" + }, + "audio/vnd.wave": { + "compressible": false + }, + "audio/vorbis": { + "source": "iana", + "compressible": false + }, + "audio/vorbis-config": { + "source": "iana" + }, + "audio/wav": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/wave": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/webm": { + "source": "apache", + "compressible": false, + "extensions": ["weba"] + }, + "audio/x-aac": { + "source": "apache", + "compressible": false, + "extensions": ["aac"] + }, + "audio/x-aiff": { + "source": "apache", + "extensions": ["aif","aiff","aifc"] + }, + "audio/x-caf": { + "source": "apache", + "compressible": false, + "extensions": ["caf"] + }, + "audio/x-flac": { + "source": "apache", + "extensions": ["flac"] + }, + "audio/x-m4a": { + "source": "nginx", + "extensions": ["m4a"] + }, + "audio/x-matroska": { + "source": "apache", + "extensions": ["mka"] + }, + "audio/x-mpegurl": { + "source": "apache", + "extensions": ["m3u"] + }, + "audio/x-ms-wax": { + "source": "apache", + "extensions": ["wax"] + }, + "audio/x-ms-wma": { + "source": "apache", + "extensions": ["wma"] + }, + "audio/x-pn-realaudio": { + "source": "apache", + "extensions": ["ram","ra"] + }, + "audio/x-pn-realaudio-plugin": { + "source": "apache", + "extensions": ["rmp"] + }, + "audio/x-realaudio": { + "source": "nginx", + "extensions": ["ra"] + }, + "audio/x-tta": { + "source": "apache" + }, + "audio/x-wav": { + "source": "apache", + "extensions": ["wav"] + }, + "audio/xm": { + "source": "apache", + "extensions": ["xm"] + }, + "chemical/x-cdx": { + "source": "apache", + "extensions": ["cdx"] + }, + "chemical/x-cif": { + "source": "apache", + "extensions": ["cif"] + }, + "chemical/x-cmdf": { + "source": "apache", + "extensions": ["cmdf"] + }, + "chemical/x-cml": { + "source": "apache", + "extensions": ["cml"] + }, + "chemical/x-csml": { + "source": "apache", + "extensions": ["csml"] + }, + "chemical/x-pdb": { + "source": "apache" + }, + "chemical/x-xyz": { + "source": "apache", + "extensions": ["xyz"] + }, + "font/opentype": { + "compressible": true, + "extensions": ["otf"] + }, + "image/bmp": { + "source": "apache", + "compressible": true, + "extensions": ["bmp"] + }, + "image/cgm": { + "source": "iana", + "extensions": ["cgm"] + }, + "image/fits": { + "source": "iana" + }, + "image/g3fax": { + "source": "iana", + "extensions": ["g3"] + }, + "image/gif": { + "source": "iana", + "compressible": false, + "extensions": ["gif"] + }, + "image/ief": { + "source": "iana", + "extensions": ["ief"] + }, + "image/jp2": { + "source": "iana" + }, + "image/jpeg": { + "source": "iana", + "compressible": false, + "extensions": ["jpeg","jpg","jpe"] + }, + "image/jpm": { + "source": "iana" + }, + "image/jpx": { + "source": "iana" + }, + "image/ktx": { + "source": "iana", + "extensions": ["ktx"] + }, + "image/naplps": { + "source": "iana" + }, + "image/pjpeg": { + "compressible": false + }, + "image/png": { + "source": "iana", + "compressible": false, + "extensions": ["png"] + }, + "image/prs.btif": { + "source": "iana", + "extensions": ["btif"] + }, + "image/prs.pti": { + "source": "iana" + }, + "image/pwg-raster": { + "source": "iana" + }, + "image/sgi": { + "source": "apache", + "extensions": ["sgi"] + }, + "image/svg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["svg","svgz"] + }, + "image/t38": { + "source": "iana" + }, + "image/tiff": { + "source": "iana", + "compressible": false, + "extensions": ["tiff","tif"] + }, + "image/tiff-fx": { + "source": "iana" + }, + "image/vnd.adobe.photoshop": { + "source": "iana", + "compressible": true, + "extensions": ["psd"] + }, + "image/vnd.airzip.accelerator.azv": { + "source": "iana" + }, + "image/vnd.cns.inf2": { + "source": "iana" + }, + "image/vnd.dece.graphic": { + "source": "iana", + "extensions": ["uvi","uvvi","uvg","uvvg"] + }, + "image/vnd.djvu": { + "source": "iana", + "extensions": ["djvu","djv"] + }, + "image/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "image/vnd.dwg": { + "source": "iana", + "extensions": ["dwg"] + }, + "image/vnd.dxf": { + "source": "iana", + "extensions": ["dxf"] + }, + "image/vnd.fastbidsheet": { + "source": "iana", + "extensions": ["fbs"] + }, + "image/vnd.fpx": { + "source": "iana", + "extensions": ["fpx"] + }, + "image/vnd.fst": { + "source": "iana", + "extensions": ["fst"] + }, + "image/vnd.fujixerox.edmics-mmr": { + "source": "iana", + "extensions": ["mmr"] + }, + "image/vnd.fujixerox.edmics-rlc": { + "source": "iana", + "extensions": ["rlc"] + }, + "image/vnd.globalgraphics.pgb": { + "source": "iana" + }, + "image/vnd.microsoft.icon": { + "source": "iana" + }, + "image/vnd.mix": { + "source": "iana" + }, + "image/vnd.mozilla.apng": { + "source": "iana" + }, + "image/vnd.ms-modi": { + "source": "iana", + "extensions": ["mdi"] + }, + "image/vnd.ms-photo": { + "source": "apache", + "extensions": ["wdp"] + }, + "image/vnd.net-fpx": { + "source": "iana", + "extensions": ["npx"] + }, + "image/vnd.radiance": { + "source": "iana" + }, + "image/vnd.sealed.png": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.gif": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.jpg": { + "source": "iana" + }, + "image/vnd.svf": { + "source": "iana" + }, + "image/vnd.tencent.tap": { + "source": "iana" + }, + "image/vnd.valve.source.texture": { + "source": "iana" + }, + "image/vnd.wap.wbmp": { + "source": "iana", + "extensions": ["wbmp"] + }, + "image/vnd.xiff": { + "source": "iana", + "extensions": ["xif"] + }, + "image/vnd.zbrush.pcx": { + "source": "iana" + }, + "image/webp": { + "source": "apache", + "extensions": ["webp"] + }, + "image/x-3ds": { + "source": "apache", + "extensions": ["3ds"] + }, + "image/x-cmu-raster": { + "source": "apache", + "extensions": ["ras"] + }, + "image/x-cmx": { + "source": "apache", + "extensions": ["cmx"] + }, + "image/x-freehand": { + "source": "apache", + "extensions": ["fh","fhc","fh4","fh5","fh7"] + }, + "image/x-icon": { + "source": "apache", + "compressible": true, + "extensions": ["ico"] + }, + "image/x-jng": { + "source": "nginx", + "extensions": ["jng"] + }, + "image/x-mrsid-image": { + "source": "apache", + "extensions": ["sid"] + }, + "image/x-ms-bmp": { + "source": "nginx", + "compressible": true, + "extensions": ["bmp"] + }, + "image/x-pcx": { + "source": "apache", + "extensions": ["pcx"] + }, + "image/x-pict": { + "source": "apache", + "extensions": ["pic","pct"] + }, + "image/x-portable-anymap": { + "source": "apache", + "extensions": ["pnm"] + }, + "image/x-portable-bitmap": { + "source": "apache", + "extensions": ["pbm"] + }, + "image/x-portable-graymap": { + "source": "apache", + "extensions": ["pgm"] + }, + "image/x-portable-pixmap": { + "source": "apache", + "extensions": ["ppm"] + }, + "image/x-rgb": { + "source": "apache", + "extensions": ["rgb"] + }, + "image/x-tga": { + "source": "apache", + "extensions": ["tga"] + }, + "image/x-xbitmap": { + "source": "apache", + "extensions": ["xbm"] + }, + "image/x-xcf": { + "compressible": false + }, + "image/x-xpixmap": { + "source": "apache", + "extensions": ["xpm"] + }, + "image/x-xwindowdump": { + "source": "apache", + "extensions": ["xwd"] + }, + "message/cpim": { + "source": "iana" + }, + "message/delivery-status": { + "source": "iana" + }, + "message/disposition-notification": { + "source": "iana" + }, + "message/external-body": { + "source": "iana" + }, + "message/feedback-report": { + "source": "iana" + }, + "message/global": { + "source": "iana" + }, + "message/global-delivery-status": { + "source": "iana" + }, + "message/global-disposition-notification": { + "source": "iana" + }, + "message/global-headers": { + "source": "iana" + }, + "message/http": { + "source": "iana", + "compressible": false + }, + "message/imdn+xml": { + "source": "iana", + "compressible": true + }, + "message/news": { + "source": "iana" + }, + "message/partial": { + "source": "iana", + "compressible": false + }, + "message/rfc822": { + "source": "iana", + "compressible": true, + "extensions": ["eml","mime"] + }, + "message/s-http": { + "source": "iana" + }, + "message/sip": { + "source": "iana" + }, + "message/sipfrag": { + "source": "iana" + }, + "message/tracking-status": { + "source": "iana" + }, + "message/vnd.si.simp": { + "source": "iana" + }, + "message/vnd.wfa.wsc": { + "source": "iana" + }, + "model/iges": { + "source": "iana", + "compressible": false, + "extensions": ["igs","iges"] + }, + "model/mesh": { + "source": "iana", + "compressible": false, + "extensions": ["msh","mesh","silo"] + }, + "model/vnd.collada+xml": { + "source": "iana", + "extensions": ["dae"] + }, + "model/vnd.dwf": { + "source": "iana", + "extensions": ["dwf"] + }, + "model/vnd.flatland.3dml": { + "source": "iana" + }, + "model/vnd.gdl": { + "source": "iana", + "extensions": ["gdl"] + }, + "model/vnd.gs-gdl": { + "source": "apache" + }, + "model/vnd.gs.gdl": { + "source": "iana" + }, + "model/vnd.gtw": { + "source": "iana", + "extensions": ["gtw"] + }, + "model/vnd.moml+xml": { + "source": "iana" + }, + "model/vnd.mts": { + "source": "iana", + "extensions": ["mts"] + }, + "model/vnd.opengex": { + "source": "iana" + }, + "model/vnd.parasolid.transmit.binary": { + "source": "iana" + }, + "model/vnd.parasolid.transmit.text": { + "source": "iana" + }, + "model/vnd.rosette.annotated-data-model": { + "source": "iana" + }, + "model/vnd.valve.source.compiled-map": { + "source": "iana" + }, + "model/vnd.vtu": { + "source": "iana", + "extensions": ["vtu"] + }, + "model/vrml": { + "source": "iana", + "compressible": false, + "extensions": ["wrl","vrml"] + }, + "model/x3d+binary": { + "source": "apache", + "compressible": false, + "extensions": ["x3db","x3dbz"] + }, + "model/x3d+fastinfoset": { + "source": "iana" + }, + "model/x3d+vrml": { + "source": "apache", + "compressible": false, + "extensions": ["x3dv","x3dvz"] + }, + "model/x3d+xml": { + "source": "iana", + "compressible": true, + "extensions": ["x3d","x3dz"] + }, + "model/x3d-vrml": { + "source": "iana" + }, + "multipart/alternative": { + "source": "iana", + "compressible": false + }, + "multipart/appledouble": { + "source": "iana" + }, + "multipart/byteranges": { + "source": "iana" + }, + "multipart/digest": { + "source": "iana" + }, + "multipart/encrypted": { + "source": "iana", + "compressible": false + }, + "multipart/form-data": { + "source": "iana", + "compressible": false + }, + "multipart/header-set": { + "source": "iana" + }, + "multipart/mixed": { + "source": "iana", + "compressible": false + }, + "multipart/parallel": { + "source": "iana" + }, + "multipart/related": { + "source": "iana", + "compressible": false + }, + "multipart/report": { + "source": "iana" + }, + "multipart/signed": { + "source": "iana", + "compressible": false + }, + "multipart/voice-message": { + "source": "iana" + }, + "multipart/x-mixed-replace": { + "source": "iana" + }, + "text/1d-interleaved-parityfec": { + "source": "iana" + }, + "text/cache-manifest": { + "source": "iana", + "compressible": true, + "extensions": ["appcache","manifest"] + }, + "text/calendar": { + "source": "iana", + "extensions": ["ics","ifb"] + }, + "text/calender": { + "compressible": true + }, + "text/cmd": { + "compressible": true + }, + "text/coffeescript": { + "extensions": ["coffee","litcoffee"] + }, + "text/css": { + "source": "iana", + "compressible": true, + "extensions": ["css"] + }, + "text/csv": { + "source": "iana", + "compressible": true, + "extensions": ["csv"] + }, + "text/csv-schema": { + "source": "iana" + }, + "text/directory": { + "source": "iana" + }, + "text/dns": { + "source": "iana" + }, + "text/ecmascript": { + "source": "iana" + }, + "text/encaprtp": { + "source": "iana" + }, + "text/enriched": { + "source": "iana" + }, + "text/fwdred": { + "source": "iana" + }, + "text/grammar-ref-list": { + "source": "iana" + }, + "text/hjson": { + "extensions": ["hjson"] + }, + "text/html": { + "source": "iana", + "compressible": true, + "extensions": ["html","htm","shtml"] + }, + "text/jade": { + "extensions": ["jade"] + }, + "text/javascript": { + "source": "iana", + "compressible": true + }, + "text/jcr-cnd": { + "source": "iana" + }, + "text/jsx": { + "compressible": true, + "extensions": ["jsx"] + }, + "text/less": { + "extensions": ["less"] + }, + "text/markdown": { + "source": "iana" + }, + "text/mathml": { + "source": "nginx", + "extensions": ["mml"] + }, + "text/mizar": { + "source": "iana" + }, + "text/n3": { + "source": "iana", + "compressible": true, + "extensions": ["n3"] + }, + "text/parameters": { + "source": "iana" + }, + "text/parityfec": { + "source": "iana" + }, + "text/plain": { + "source": "iana", + "compressible": true, + "extensions": ["txt","text","conf","def","list","log","in","ini"] + }, + "text/provenance-notation": { + "source": "iana" + }, + "text/prs.fallenstein.rst": { + "source": "iana" + }, + "text/prs.lines.tag": { + "source": "iana", + "extensions": ["dsc"] + }, + "text/prs.prop.logic": { + "source": "iana" + }, + "text/raptorfec": { + "source": "iana" + }, + "text/red": { + "source": "iana" + }, + "text/rfc822-headers": { + "source": "iana" + }, + "text/richtext": { + "source": "iana", + "compressible": true, + "extensions": ["rtx"] + }, + "text/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "text/rtp-enc-aescm128": { + "source": "iana" + }, + "text/rtploopback": { + "source": "iana" + }, + "text/rtx": { + "source": "iana" + }, + "text/sgml": { + "source": "iana", + "extensions": ["sgml","sgm"] + }, + "text/slim": { + "extensions": ["slim","slm"] + }, + "text/stylus": { + "extensions": ["stylus","styl"] + }, + "text/t140": { + "source": "iana" + }, + "text/tab-separated-values": { + "source": "iana", + "compressible": true, + "extensions": ["tsv"] + }, + "text/troff": { + "source": "iana", + "extensions": ["t","tr","roff","man","me","ms"] + }, + "text/turtle": { + "source": "iana", + "extensions": ["ttl"] + }, + "text/ulpfec": { + "source": "iana" + }, + "text/uri-list": { + "source": "iana", + "compressible": true, + "extensions": ["uri","uris","urls"] + }, + "text/vcard": { + "source": "iana", + "compressible": true, + "extensions": ["vcard"] + }, + "text/vnd.a": { + "source": "iana" + }, + "text/vnd.abc": { + "source": "iana" + }, + "text/vnd.curl": { + "source": "iana", + "extensions": ["curl"] + }, + "text/vnd.curl.dcurl": { + "source": "apache", + "extensions": ["dcurl"] + }, + "text/vnd.curl.mcurl": { + "source": "apache", + "extensions": ["mcurl"] + }, + "text/vnd.curl.scurl": { + "source": "apache", + "extensions": ["scurl"] + }, + "text/vnd.debian.copyright": { + "source": "iana" + }, + "text/vnd.dmclientscript": { + "source": "iana" + }, + "text/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "text/vnd.esmertec.theme-descriptor": { + "source": "iana" + }, + "text/vnd.fly": { + "source": "iana", + "extensions": ["fly"] + }, + "text/vnd.fmi.flexstor": { + "source": "iana", + "extensions": ["flx"] + }, + "text/vnd.graphviz": { + "source": "iana", + "extensions": ["gv"] + }, + "text/vnd.in3d.3dml": { + "source": "iana", + "extensions": ["3dml"] + }, + "text/vnd.in3d.spot": { + "source": "iana", + "extensions": ["spot"] + }, + "text/vnd.iptc.newsml": { + "source": "iana" + }, + "text/vnd.iptc.nitf": { + "source": "iana" + }, + "text/vnd.latex-z": { + "source": "iana" + }, + "text/vnd.motorola.reflex": { + "source": "iana" + }, + "text/vnd.ms-mediapackage": { + "source": "iana" + }, + "text/vnd.net2phone.commcenter.command": { + "source": "iana" + }, + "text/vnd.radisys.msml-basic-layout": { + "source": "iana" + }, + "text/vnd.si.uricatalogue": { + "source": "iana" + }, + "text/vnd.sun.j2me.app-descriptor": { + "source": "iana", + "extensions": ["jad"] + }, + "text/vnd.trolltech.linguist": { + "source": "iana" + }, + "text/vnd.wap.si": { + "source": "iana" + }, + "text/vnd.wap.sl": { + "source": "iana" + }, + "text/vnd.wap.wml": { + "source": "iana", + "extensions": ["wml"] + }, + "text/vnd.wap.wmlscript": { + "source": "iana", + "extensions": ["wmls"] + }, + "text/vtt": { + "charset": "UTF-8", + "compressible": true, + "extensions": ["vtt"] + }, + "text/x-asm": { + "source": "apache", + "extensions": ["s","asm"] + }, + "text/x-c": { + "source": "apache", + "extensions": ["c","cc","cxx","cpp","h","hh","dic"] + }, + "text/x-component": { + "source": "nginx", + "extensions": ["htc"] + }, + "text/x-fortran": { + "source": "apache", + "extensions": ["f","for","f77","f90"] + }, + "text/x-gwt-rpc": { + "compressible": true + }, + "text/x-handlebars-template": { + "extensions": ["hbs"] + }, + "text/x-java-source": { + "source": "apache", + "extensions": ["java"] + }, + "text/x-jquery-tmpl": { + "compressible": true + }, + "text/x-lua": { + "extensions": ["lua"] + }, + "text/x-markdown": { + "compressible": true, + "extensions": ["markdown","md","mkd"] + }, + "text/x-nfo": { + "source": "apache", + "extensions": ["nfo"] + }, + "text/x-opml": { + "source": "apache", + "extensions": ["opml"] + }, + "text/x-pascal": { + "source": "apache", + "extensions": ["p","pas"] + }, + "text/x-processing": { + "compressible": true, + "extensions": ["pde"] + }, + "text/x-sass": { + "extensions": ["sass"] + }, + "text/x-scss": { + "extensions": ["scss"] + }, + "text/x-setext": { + "source": "apache", + "extensions": ["etx"] + }, + "text/x-sfv": { + "source": "apache", + "extensions": ["sfv"] + }, + "text/x-suse-ymp": { + "compressible": true, + "extensions": ["ymp"] + }, + "text/x-uuencode": { + "source": "apache", + "extensions": ["uu"] + }, + "text/x-vcalendar": { + "source": "apache", + "extensions": ["vcs"] + }, + "text/x-vcard": { + "source": "apache", + "extensions": ["vcf"] + }, + "text/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml"] + }, + "text/xml-external-parsed-entity": { + "source": "iana" + }, + "text/yaml": { + "extensions": ["yaml","yml"] + }, + "video/1d-interleaved-parityfec": { + "source": "apache" + }, + "video/3gpp": { + "source": "apache", + "extensions": ["3gp","3gpp"] + }, + "video/3gpp-tt": { + "source": "apache" + }, + "video/3gpp2": { + "source": "apache", + "extensions": ["3g2"] + }, + "video/bmpeg": { + "source": "apache" + }, + "video/bt656": { + "source": "apache" + }, + "video/celb": { + "source": "apache" + }, + "video/dv": { + "source": "apache" + }, + "video/encaprtp": { + "source": "apache" + }, + "video/h261": { + "source": "apache", + "extensions": ["h261"] + }, + "video/h263": { + "source": "apache", + "extensions": ["h263"] + }, + "video/h263-1998": { + "source": "apache" + }, + "video/h263-2000": { + "source": "apache" + }, + "video/h264": { + "source": "apache", + "extensions": ["h264"] + }, + "video/h264-rcdo": { + "source": "apache" + }, + "video/h264-svc": { + "source": "apache" + }, + "video/h265": { + "source": "apache" + }, + "video/iso.segment": { + "source": "apache" + }, + "video/jpeg": { + "source": "apache", + "extensions": ["jpgv"] + }, + "video/jpeg2000": { + "source": "apache" + }, + "video/jpm": { + "source": "apache", + "extensions": ["jpm","jpgm"] + }, + "video/mj2": { + "source": "apache", + "extensions": ["mj2","mjp2"] + }, + "video/mp1s": { + "source": "apache" + }, + "video/mp2p": { + "source": "apache" + }, + "video/mp2t": { + "source": "apache", + "extensions": ["ts"] + }, + "video/mp4": { + "source": "apache", + "compressible": false, + "extensions": ["mp4","mp4v","mpg4"] + }, + "video/mp4v-es": { + "source": "apache" + }, + "video/mpeg": { + "source": "apache", + "compressible": false, + "extensions": ["mpeg","mpg","mpe","m1v","m2v"] + }, + "video/mpeg4-generic": { + "source": "apache" + }, + "video/mpv": { + "source": "apache" + }, + "video/nv": { + "source": "apache" + }, + "video/ogg": { + "source": "apache", + "compressible": false, + "extensions": ["ogv"] + }, + "video/parityfec": { + "source": "apache" + }, + "video/pointer": { + "source": "apache" + }, + "video/quicktime": { + "source": "apache", + "compressible": false, + "extensions": ["qt","mov"] + }, + "video/raptorfec": { + "source": "apache" + }, + "video/raw": { + "source": "apache" + }, + "video/rtp-enc-aescm128": { + "source": "apache" + }, + "video/rtploopback": { + "source": "apache" + }, + "video/rtx": { + "source": "apache" + }, + "video/smpte292m": { + "source": "apache" + }, + "video/ulpfec": { + "source": "apache" + }, + "video/vc1": { + "source": "apache" + }, + "video/vnd.cctv": { + "source": "apache" + }, + "video/vnd.dece.hd": { + "source": "apache", + "extensions": ["uvh","uvvh"] + }, + "video/vnd.dece.mobile": { + "source": "apache", + "extensions": ["uvm","uvvm"] + }, + "video/vnd.dece.mp4": { + "source": "apache" + }, + "video/vnd.dece.pd": { + "source": "apache", + "extensions": ["uvp","uvvp"] + }, + "video/vnd.dece.sd": { + "source": "apache", + "extensions": ["uvs","uvvs"] + }, + "video/vnd.dece.video": { + "source": "apache", + "extensions": ["uvv","uvvv"] + }, + "video/vnd.directv.mpeg": { + "source": "apache" + }, + "video/vnd.directv.mpeg-tts": { + "source": "apache" + }, + "video/vnd.dlna.mpeg-tts": { + "source": "apache" + }, + "video/vnd.dvb.file": { + "source": "apache", + "extensions": ["dvb"] + }, + "video/vnd.fvt": { + "source": "apache", + "extensions": ["fvt"] + }, + "video/vnd.hns.video": { + "source": "apache" + }, + "video/vnd.iptvforum.1dparityfec-1010": { + "source": "apache" + }, + "video/vnd.iptvforum.1dparityfec-2005": { + "source": "apache" + }, + "video/vnd.iptvforum.2dparityfec-1010": { + "source": "apache" + }, + "video/vnd.iptvforum.2dparityfec-2005": { + "source": "apache" + }, + "video/vnd.iptvforum.ttsavc": { + "source": "apache" + }, + "video/vnd.iptvforum.ttsmpeg2": { + "source": "apache" + }, + "video/vnd.motorola.video": { + "source": "apache" + }, + "video/vnd.motorola.videop": { + "source": "apache" + }, + "video/vnd.mpegurl": { + "source": "apache", + "extensions": ["mxu","m4u"] + }, + "video/vnd.ms-playready.media.pyv": { + "source": "apache", + "extensions": ["pyv"] + }, + "video/vnd.nokia.interleaved-multimedia": { + "source": "apache" + }, + "video/vnd.nokia.videovoip": { + "source": "apache" + }, + "video/vnd.objectvideo": { + "source": "apache" + }, + "video/vnd.radgamettools.bink": { + "source": "apache" + }, + "video/vnd.radgamettools.smacker": { + "source": "apache" + }, + "video/vnd.sealed.mpeg1": { + "source": "apache" + }, + "video/vnd.sealed.mpeg4": { + "source": "apache" + }, + "video/vnd.sealed.swf": { + "source": "apache" + }, + "video/vnd.sealedmedia.softseal.mov": { + "source": "apache" + }, + "video/vnd.uvvu.mp4": { + "source": "apache", + "extensions": ["uvu","uvvu"] + }, + "video/vnd.vivo": { + "source": "apache", + "extensions": ["viv"] + }, + "video/vp8": { + "source": "apache" + }, + "video/webm": { + "source": "apache", + "compressible": false, + "extensions": ["webm"] + }, + "video/x-f4v": { + "source": "apache", + "extensions": ["f4v"] + }, + "video/x-fli": { + "source": "apache", + "extensions": ["fli"] + }, + "video/x-flv": { + "source": "apache", + "compressible": false, + "extensions": ["flv"] + }, + "video/x-m4v": { + "source": "apache", + "extensions": ["m4v"] + }, + "video/x-matroska": { + "source": "apache", + "compressible": false, + "extensions": ["mkv","mk3d","mks"] + }, + "video/x-mng": { + "source": "apache", + "extensions": ["mng"] + }, + "video/x-ms-asf": { + "source": "apache", + "extensions": ["asf","asx"] + }, + "video/x-ms-vob": { + "source": "apache", + "extensions": ["vob"] + }, + "video/x-ms-wm": { + "source": "apache", + "extensions": ["wm"] + }, + "video/x-ms-wmv": { + "source": "apache", + "compressible": false, + "extensions": ["wmv"] + }, + "video/x-ms-wmx": { + "source": "apache", + "extensions": ["wmx"] + }, + "video/x-ms-wvx": { + "source": "apache", + "extensions": ["wvx"] + }, + "video/x-msvideo": { + "source": "apache", + "extensions": ["avi"] + }, + "video/x-sgi-movie": { + "source": "apache", + "extensions": ["movie"] + }, + "video/x-smv": { + "source": "apache", + "extensions": ["smv"] + }, + "x-conference/x-cooltalk": { + "source": "apache", + "extensions": ["ice"] + }, + "x-shader/x-fragment": { + "compressible": true + }, + "x-shader/x-vertex": { + "compressible": true + } +} diff --git a/node_modules/fsevents/node_modules/mime-db/index.js b/node_modules/fsevents/node_modules/mime-db/index.js new file mode 100644 index 0000000..551031f --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-db/index.js @@ -0,0 +1,11 @@ +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = require('./db.json') diff --git a/node_modules/fsevents/node_modules/mime-db/package.json b/node_modules/fsevents/node_modules/mime-db/package.json new file mode 100644 index 0000000..193655a --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-db/package.json @@ -0,0 +1,125 @@ +{ + "_args": [ + [ + "mime-db@~1.23.0", + "/Users/eshanker/Code/fsevents/node_modules/mime-types" + ] + ], + "_from": "mime-db@>=1.23.0 <1.24.0", + "_id": "mime-db@1.23.0", + "_inCache": true, + "_installable": true, + "_location": "/mime-db", + "_nodeVersion": "4.4.3", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/mime-db-1.23.0.tgz_1462163798086_0.43938886746764183" + }, + "_npmUser": { + "email": "doug@somethingdoug.com", + "name": "dougwilson" + }, + "_npmVersion": "2.15.1", + "_phantomChildren": {}, + "_requested": { + "name": "mime-db", + "raw": "mime-db@~1.23.0", + "rawSpec": "~1.23.0", + "scope": null, + "spec": ">=1.23.0 <1.24.0", + "type": "range" + }, + "_requiredBy": [ + "/mime-types" + ], + "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.23.0.tgz", + "_shasum": "a31b4070adaea27d732ea333740a64d0ec9a6659", + "_shrinkwrap": null, + "_spec": "mime-db@~1.23.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/mime-types", + "bugs": { + "url": "https://github.com/jshttp/mime-db/issues" + }, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + }, + { + "name": "Robert Kieffer", + "email": "robert@broofa.com", + "url": "http://github.com/broofa" + } + ], + "dependencies": {}, + "description": "Media Type Database", + "devDependencies": { + "bluebird": "3.3.5", + "co": "4.6.0", + "cogent": "1.0.1", + "csv-parse": "1.1.0", + "gnode": "0.1.2", + "istanbul": "0.4.3", + "mocha": "1.21.5", + "raw-body": "2.1.6", + "stream-to-array": "2.3.0" + }, + "directories": {}, + "dist": { + "shasum": "a31b4070adaea27d732ea333740a64d0ec9a6659", + "tarball": "https://registry.npmjs.org/mime-db/-/mime-db-1.23.0.tgz" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "db.json", + "index.js" + ], + "gitHead": "ba0d99fd05b3bfdc2ebcd78f858c25cb7db6af41", + "homepage": "https://github.com/jshttp/mime-db#readme", + "keywords": [ + "charset", + "charsets", + "database", + "db", + "mime", + "type", + "types" + ], + "license": "MIT", + "maintainers": [ + { + "name": "dougwilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "jongleberry", + "email": "jonathanrichardong@gmail.com" + } + ], + "name": "mime-db", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/mime-db.git" + }, + "scripts": { + "build": "node scripts/build", + "fetch": "gnode scripts/fetch-apache && gnode scripts/fetch-iana && gnode scripts/fetch-nginx", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "update": "npm run fetch && npm run build" + }, + "version": "1.23.0" +} diff --git a/node_modules/fsevents/node_modules/mime-types/HISTORY.md b/node_modules/fsevents/node_modules/mime-types/HISTORY.md new file mode 100644 index 0000000..63bd4ea --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-types/HISTORY.md @@ -0,0 +1,197 @@ +2.1.11 / 2016-05-01 +=================== + + * deps: mime-db@~1.23.0 + - Add new mime types + +2.1.10 / 2016-02-15 +=================== + + * deps: mime-db@~1.22.0 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +2.1.9 / 2016-01-06 +================== + + * deps: mime-db@~1.21.0 + - Add new mime types + +2.1.8 / 2015-11-30 +================== + + * deps: mime-db@~1.20.0 + - Add new mime types + +2.1.7 / 2015-09-20 +================== + + * deps: mime-db@~1.19.0 + - Add new mime types + +2.1.6 / 2015-09-03 +================== + + * deps: mime-db@~1.18.0 + - Add new mime types + +2.1.5 / 2015-08-20 +================== + + * deps: mime-db@~1.17.0 + - Add new mime types + +2.1.4 / 2015-07-30 +================== + + * deps: mime-db@~1.16.0 + - Add new mime types + +2.1.3 / 2015-07-13 +================== + + * deps: mime-db@~1.15.0 + - Add new mime types + +2.1.2 / 2015-06-25 +================== + + * deps: mime-db@~1.14.0 + - Add new mime types + +2.1.1 / 2015-06-08 +================== + + * perf: fix deopt during mapping + +2.1.0 / 2015-06-07 +================== + + * Fix incorrectly treating extension-less file name as extension + - i.e. `'path/to/json'` will no longer return `application/json` + * Fix `.charset(type)` to accept parameters + * Fix `.charset(type)` to match case-insensitive + * Improve generation of extension to MIME mapping + * Refactor internals for readability and no argument reassignment + * Prefer `application/*` MIME types from the same source + * Prefer any type over `application/octet-stream` + * deps: mime-db@~1.13.0 + - Add nginx as a source + - Add new mime types + +2.0.14 / 2015-06-06 +=================== + + * deps: mime-db@~1.12.0 + - Add new mime types + +2.0.13 / 2015-05-31 +=================== + + * deps: mime-db@~1.11.0 + - Add new mime types + +2.0.12 / 2015-05-19 +=================== + + * deps: mime-db@~1.10.0 + - Add new mime types + +2.0.11 / 2015-05-05 +=================== + + * deps: mime-db@~1.9.1 + - Add new mime types + +2.0.10 / 2015-03-13 +=================== + + * deps: mime-db@~1.8.0 + - Add new mime types + +2.0.9 / 2015-02-09 +================== + + * deps: mime-db@~1.7.0 + - Add new mime types + - Community extensions ownership transferred from `node-mime` + +2.0.8 / 2015-01-29 +================== + + * deps: mime-db@~1.6.0 + - Add new mime types + +2.0.7 / 2014-12-30 +================== + + * deps: mime-db@~1.5.0 + - Add new mime types + - Fix various invalid MIME type entries + +2.0.6 / 2014-12-30 +================== + + * deps: mime-db@~1.4.0 + - Add new mime types + - Fix various invalid MIME type entries + - Remove example template MIME types + +2.0.5 / 2014-12-29 +================== + + * deps: mime-db@~1.3.1 + - Fix missing extensions + +2.0.4 / 2014-12-10 +================== + + * deps: mime-db@~1.3.0 + - Add new mime types + +2.0.3 / 2014-11-09 +================== + + * deps: mime-db@~1.2.0 + - Add new mime types + +2.0.2 / 2014-09-28 +================== + + * deps: mime-db@~1.1.0 + - Add new mime types + - Add additional compressible + - Update charsets + +2.0.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + +2.0.0 / 2014-09-02 +================== + + * Use `mime-db` + * Remove `.define()` + +1.0.2 / 2014-08-04 +================== + + * Set charset=utf-8 for `text/javascript` + +1.0.1 / 2014-06-24 +================== + + * Add `text/jsx` type + +1.0.0 / 2014-05-12 +================== + + * Return `false` for unknown types + * Set charset=utf-8 for `application/json` + +0.1.0 / 2014-05-02 +================== + + * Initial release diff --git a/node_modules/fsevents/node_modules/mime-types/LICENSE b/node_modules/fsevents/node_modules/mime-types/LICENSE new file mode 100644 index 0000000..0616607 --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-types/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/mime-types/README.md b/node_modules/fsevents/node_modules/mime-types/README.md new file mode 100644 index 0000000..e77d615 --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-types/README.md @@ -0,0 +1,103 @@ +# mime-types + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +The ultimate javascript content-type utility. + +Similar to [node-mime](https://github.com/broofa/node-mime), except: + +- __No fallbacks.__ Instead of naively returning the first available type, `mime-types` simply returns `false`, + so do `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. +- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. +- Additional mime types are added such as jade and stylus via [mime-db](https://github.com/jshttp/mime-db) +- No `.define()` functionality + +Otherwise, the API is compatible. + +## Install + +```sh +$ npm install mime-types +``` + +## Adding Types + +All mime types are based on [mime-db](https://github.com/jshttp/mime-db), +so open a PR there if you'd like to add mime types. + +## API + +```js +var mime = require('mime-types') +``` + +All functions return `false` if input is invalid or not found. + +### mime.lookup(path) + +Lookup the content-type associated with a file. + +```js +mime.lookup('json') // 'application/json' +mime.lookup('.md') // 'text/x-markdown' +mime.lookup('file.html') // 'text/html' +mime.lookup('folder/file.js') // 'application/javascript' +mime.lookup('folder/.htaccess') // false + +mime.lookup('cats') // false +``` + +### mime.contentType(type) + +Create a full content-type header given a content-type or extension. + +```js +mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' +mime.contentType('file.json') // 'application/json; charset=utf-8' + +// from a full path +mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' +``` + +### mime.extension(type) + +Get the default extension for a content-type. + +```js +mime.extension('application/octet-stream') // 'bin' +``` + +### mime.charset(type) + +Lookup the implied default charset of a content-type. + +```js +mime.charset('text/x-markdown') // 'UTF-8' +``` + +### var type = mime.types[extension] + +A map of content-types by extension. + +### [extensions...] = mime.extensions[type] + +A map of extensions by content-type. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/mime-types.svg +[npm-url]: https://npmjs.org/package/mime-types +[node-version-image]: https://img.shields.io/node/v/mime-types.svg +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/mime-types/master.svg +[travis-url]: https://travis-ci.org/jshttp/mime-types +[coveralls-image]: https://img.shields.io/coveralls/jshttp/mime-types/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/mime-types +[downloads-image]: https://img.shields.io/npm/dm/mime-types.svg +[downloads-url]: https://npmjs.org/package/mime-types diff --git a/node_modules/fsevents/node_modules/mime-types/index.js b/node_modules/fsevents/node_modules/mime-types/index.js new file mode 100644 index 0000000..f7008b2 --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-types/index.js @@ -0,0 +1,188 @@ +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') +var extname = require('path').extname + +/** + * Module variables. + * @private + */ + +var extractTypeRegExp = /^\s*([^;\s]*)(?:;|\s|$)/ +var textTypeRegExp = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset(type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = extractTypeRegExp.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && textTypeRegExp.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType(str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension(type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = extractTypeRegExp.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup(path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps(extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType(type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' + && from > to || (from === to && types[extension].substr(0, 12) === 'application/')) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} diff --git a/node_modules/fsevents/node_modules/mime-types/package.json b/node_modules/fsevents/node_modules/mime-types/package.json new file mode 100644 index 0000000..e406b6c --- /dev/null +++ b/node_modules/fsevents/node_modules/mime-types/package.json @@ -0,0 +1,115 @@ +{ + "_args": [ + [ + "mime-types@~2.1.7", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "mime-types@>=2.1.7 <2.2.0", + "_id": "mime-types@2.1.11", + "_inCache": true, + "_installable": true, + "_location": "/mime-types", + "_nodeVersion": "4.4.3", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/mime-types-2.1.11.tgz_1462165365027_0.7217204745393246" + }, + "_npmUser": { + "email": "doug@somethingdoug.com", + "name": "dougwilson" + }, + "_npmVersion": "2.15.1", + "_phantomChildren": {}, + "_requested": { + "name": "mime-types", + "raw": "mime-types@~2.1.7", + "rawSpec": "~2.1.7", + "scope": null, + "spec": ">=2.1.7 <2.2.0", + "type": "range" + }, + "_requiredBy": [ + "/form-data", + "/request" + ], + "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.11.tgz", + "_shasum": "c259c471bda808a85d6cd193b430a5fae4473b3c", + "_shrinkwrap": null, + "_spec": "mime-types@~2.1.7", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "bugs": { + "url": "https://github.com/jshttp/mime-types/issues" + }, + "contributors": [ + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Jeremiah Senkpiel", + "email": "fishrock123@rocketmail.com", + "url": "https://searchbeam.jit.su" + }, + { + "name": "Jonathan Ong", + "email": "me@jongleberry.com", + "url": "http://jongleberry.com" + } + ], + "dependencies": { + "mime-db": "~1.23.0" + }, + "description": "The ultimate javascript content-type utility.", + "devDependencies": { + "istanbul": "0.4.3", + "mocha": "1.21.5" + }, + "directories": {}, + "dist": { + "shasum": "c259c471bda808a85d6cd193b430a5fae4473b3c", + "tarball": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.11.tgz" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "gitHead": "298ffcf490a5d6e60edea7bf7a69036df04846b1", + "homepage": "https://github.com/jshttp/mime-types#readme", + "keywords": [ + "mime", + "types" + ], + "license": "MIT", + "maintainers": [ + { + "name": "dougwilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "fishrock123", + "email": "fishrock123@rocketmail.com" + }, + { + "name": "jongleberry", + "email": "jonathanrichardong@gmail.com" + } + ], + "name": "mime-types", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/jshttp/mime-types.git" + }, + "scripts": { + "test": "mocha --reporter spec test/test.js", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot test/test.js", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot test/test.js" + }, + "version": "2.1.11" +} diff --git a/node_modules/fsevents/node_modules/minimatch/LICENSE b/node_modules/fsevents/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/minimatch/README.md b/node_modules/fsevents/node_modules/minimatch/README.md new file mode 100644 index 0000000..ad72b81 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimatch/README.md @@ -0,0 +1,209 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.svg)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/fsevents/node_modules/minimatch/minimatch.js b/node_modules/fsevents/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..830a272 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimatch/minimatch.js @@ -0,0 +1,924 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = require('path') +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var plType + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + plType = stateChar + patternListStack.push({ + type: plType, + start: i - 1, + reStart: re.length + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + re += ')' + var pl = patternListStack.pop() + plType = pl.type + // negation is (?:(?!js)[^/]*) + // The others are (?:) + switch (plType) { + case '!': + negativeLists.push(pl) + re += ')[^/]*?)' + pl.reEnd = re.length + break + case '?': + case '+': + case '*': + re += plType + break + case '@': break // the default anyway + } + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + 3) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/fsevents/node_modules/minimatch/package.json b/node_modules/fsevents/node_modules/minimatch/package.json new file mode 100644 index 0000000..12bf9ad --- /dev/null +++ b/node_modules/fsevents/node_modules/minimatch/package.json @@ -0,0 +1,90 @@ +{ + "_args": [ + [ + "minimatch@^3.0.2", + "/Users/eshanker/Code/fsevents/node_modules/glob" + ] + ], + "_from": "minimatch@>=3.0.2 <4.0.0", + "_id": "minimatch@3.0.2", + "_inCache": true, + "_installable": true, + "_location": "/minimatch", + "_nodeVersion": "4.4.4", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/minimatch-3.0.2.tgz_1466194379770_0.11417287751100957" + }, + "_npmUser": { + "email": "i@izs.me", + "name": "isaacs" + }, + "_npmVersion": "3.9.1", + "_phantomChildren": {}, + "_requested": { + "name": "minimatch", + "raw": "minimatch@^3.0.2", + "rawSpec": "^3.0.2", + "scope": null, + "spec": ">=3.0.2 <4.0.0", + "type": "range" + }, + "_requiredBy": [ + "/fstream-ignore", + "/glob" + ], + "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz", + "_shasum": "0f398a7300ea441e9c348c83d98ab8c9dbf9c40a", + "_shrinkwrap": null, + "_spec": "minimatch@^3.0.2", + "_where": "/Users/eshanker/Code/fsevents/node_modules/glob", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me" + }, + "bugs": { + "url": "https://github.com/isaacs/minimatch/issues" + }, + "dependencies": { + "brace-expansion": "^1.0.0" + }, + "description": "a glob matcher in javascript", + "devDependencies": { + "standard": "^3.7.2", + "tap": "^5.6.0" + }, + "directories": {}, + "dist": { + "shasum": "0f398a7300ea441e9c348c83d98ab8c9dbf9c40a", + "tarball": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.2.tgz" + }, + "engines": { + "node": "*" + }, + "files": [ + "minimatch.js" + ], + "gitHead": "81edb7c763abd31ba981c87ec5e835f178786be0", + "homepage": "https://github.com/isaacs/minimatch#readme", + "license": "ISC", + "main": "minimatch.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "name": "minimatch", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "scripts": { + "posttest": "standard minimatch.js test/*.js", + "test": "tap test/*.js" + }, + "version": "3.0.2" +} diff --git a/node_modules/fsevents/node_modules/minimist/.travis.yml b/node_modules/fsevents/node_modules/minimist/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/fsevents/node_modules/minimist/LICENSE b/node_modules/fsevents/node_modules/minimist/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/minimist/example/parse.js b/node_modules/fsevents/node_modules/minimist/example/parse.js new file mode 100644 index 0000000..abff3e8 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/example/parse.js @@ -0,0 +1,2 @@ +var argv = require('../')(process.argv.slice(2)); +console.dir(argv); diff --git a/node_modules/fsevents/node_modules/minimist/index.js b/node_modules/fsevents/node_modules/minimist/index.js new file mode 100644 index 0000000..584f551 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/index.js @@ -0,0 +1,187 @@ +module.exports = function (args, opts) { + if (!opts) opts = {}; + + var flags = { bools : {}, strings : {} }; + + [].concat(opts['boolean']).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + }); + + var aliases = {}; + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + var defaults = opts['default'] || {}; + + var argv = { _ : [] }; + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--')+1); + args = args.slice(0, args.indexOf('--')); + } + + function setArg (key, val) { + var value = !flags.strings[key] && isNumber(val) + ? Number(val) : val + ; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + + if (/^--.+=/.test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + setArg(m[1], m[2]); + } + else if (/^--no-.+/.test(arg)) { + var key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false); + } + else if (/^--.+/.test(arg)) { + var key = arg.match(/^--(.+)/)[1]; + var next = args[i + 1]; + if (next !== undefined && !/^-/.test(next) + && !flags.bools[key] + && (aliases[key] ? !flags.bools[aliases[key]] : true)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next === 'true'); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true); + } + } + else if (/^-[^-]+/.test(arg)) { + var letters = arg.slice(1,-1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + var next = arg.slice(j+2); + + if (next === '-') { + setArg(letters[j], next) + continue; + } + + if (/[A-Za-z]/.test(letters[j]) + && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) { + setArg(letters[j], next); + broken = true; + break; + } + + if (letters[j+1] && letters[j+1].match(/\W/)) { + setArg(letters[j], arg.slice(j+2)); + broken = true; + break; + } + else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true); + } + } + + var key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1]) + && !flags.bools[key] + && (aliases[key] ? !flags.bools[aliases[key]] : true)) { + setArg(key, args[i+1]); + i++; + } + else if (args[i+1] && /true|false/.test(args[i+1])) { + setArg(key, args[i+1] === 'true'); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true); + } + } + } + else { + argv._.push( + flags.strings['_'] || !isNumber(arg) ? arg : Number(arg) + ); + } + } + + Object.keys(defaults).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) { + setKey(argv, key.split('.'), defaults[key]); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[key]); + }); + } + }); + + notFlags.forEach(function(key) { + argv._.push(key); + }); + + return argv; +}; + +function hasKey (obj, keys) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + o = (o[key] || {}); + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function setKey (obj, keys, value) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + if (o[key] === undefined) o[key] = {}; + o = o[key]; + }); + + var key = keys[keys.length - 1]; + if (o[key] === undefined || typeof o[key] === 'boolean') { + o[key] = value; + } + else if (Array.isArray(o[key])) { + o[key].push(value); + } + else { + o[key] = [ o[key], value ]; + } +} + +function isNumber (x) { + if (typeof x === 'number') return true; + if (/^0x[0-9a-f]+$/i.test(x)) return true; + return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + +function longest (xs) { + return Math.max.apply(null, xs.map(function (x) { return x.length })); +} diff --git a/node_modules/fsevents/node_modules/minimist/package.json b/node_modules/fsevents/node_modules/minimist/package.json new file mode 100644 index 0000000..20232c4 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/package.json @@ -0,0 +1,93 @@ +{ + "_args": [ + [ + "minimist@0.0.8", + "/Users/eshanker/Code/fsevents/node_modules/mkdirp" + ] + ], + "_from": "minimist@0.0.8", + "_id": "minimist@0.0.8", + "_inCache": true, + "_installable": true, + "_location": "/minimist", + "_npmUser": { + "email": "mail@substack.net", + "name": "substack" + }, + "_npmVersion": "1.4.3", + "_phantomChildren": {}, + "_requested": { + "name": "minimist", + "raw": "minimist@0.0.8", + "rawSpec": "0.0.8", + "scope": null, + "spec": "0.0.8", + "type": "version" + }, + "_requiredBy": [ + "/mkdirp" + ], + "_resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "_shasum": "857fcabfc3397d2625b8228262e86aa7a011b05d", + "_shrinkwrap": null, + "_spec": "minimist@0.0.8", + "_where": "/Users/eshanker/Code/fsevents/node_modules/mkdirp", + "author": { + "email": "mail@substack.net", + "name": "James Halliday", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/substack/minimist/issues" + }, + "dependencies": {}, + "description": "parse argument options", + "devDependencies": { + "tap": "~0.4.0", + "tape": "~1.0.4" + }, + "directories": {}, + "dist": { + "shasum": "857fcabfc3397d2625b8228262e86aa7a011b05d", + "tarball": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz" + }, + "homepage": "https://github.com/substack/minimist", + "keywords": [ + "argv", + "getopt", + "optimist", + "parser" + ], + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "substack", + "email": "mail@substack.net" + } + ], + "name": "minimist", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/substack/minimist.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "testling": { + "browsers": [ + "chrome/10", + "chrome/latest", + "ff/5", + "firefox/latest", + "ie/6..latest", + "opera/12", + "safari/5.1", + "safari/latest" + ], + "files": "test/*.js" + }, + "version": "0.0.8" +} diff --git a/node_modules/fsevents/node_modules/minimist/readme.markdown b/node_modules/fsevents/node_modules/minimist/readme.markdown new file mode 100644 index 0000000..c256353 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/readme.markdown @@ -0,0 +1,73 @@ +# minimist + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist) + +[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist) + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.dir(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ _: [ 'foo', 'bar', 'baz' ], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' } +``` + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a string or array of strings to always treat as booleans +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT diff --git a/node_modules/fsevents/node_modules/minimist/test/dash.js b/node_modules/fsevents/node_modules/minimist/test/dash.js new file mode 100644 index 0000000..8b034b9 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/dash.js @@ -0,0 +1,24 @@ +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(5); + t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] }); + t.deepEqual(parse([ '-' ]), { _: [ '-' ] }); + t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] }); + t.deepEqual( + parse([ '-b', '-' ], { boolean: 'b' }), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + parse([ '-s', '-' ], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(3); + t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/default_bool.js b/node_modules/fsevents/node_modules/minimist/test/default_bool.js new file mode 100644 index 0000000..f0041ee --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/default_bool.js @@ -0,0 +1,20 @@ +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true } + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false } + }); + t.equal(argv.somefalse, false); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/dotted.js b/node_modules/fsevents/node_modules/minimist/test/dotted.js new file mode 100644 index 0000000..ef0ae34 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/dotted.js @@ -0,0 +1,16 @@ +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/long.js b/node_modules/fsevents/node_modules/minimist/test/long.js new file mode 100644 index 0000000..5d3a1e0 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/long.js @@ -0,0 +1,31 @@ +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse([ '--bool' ]), + { bool : true, _ : [] }, + 'long boolean' + ); + t.deepEqual( + parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture sp' + ); + t.deepEqual( + parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture eq' + ); + t.deepEqual( + parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures sp' + ); + t.deepEqual( + parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/parse.js b/node_modules/fsevents/node_modules/minimist/test/parse.js new file mode 100644 index 0000000..8a90646 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/parse.js @@ -0,0 +1,318 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse([ '--no-moo' ]), + { moo : false, _ : [] }, + 'no' + ); + t.deepEqual( + parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ] + } + ); + t.end(); +}); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789' + ]); + t.deepEqual(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ] + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse([ '-t', 'moo' ], { boolean: 't' }); + t.deepEqual(argv, { t : true, _ : [ 'moo' ] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: [ 't', 'verbose' ], + default: { verbose: true } + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false } + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], { + boolean: ['x','y','z'] + }); + + t.deepEqual(argv, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ] + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = parse([ '-s', "X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse([ "--s=X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + t.end(); +}); + +test('strings' , function (t) { + var s = parse([ '-s', '0001234' ], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse([ '-x', '56' ], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([ ' ', ' ' ], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function(t) { + var s = parse([ '-s' ], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse([ '--str' ], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse([ '-art' ], { + string: [ 'a', 't' ] + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + + +test('slashBreak', function (t) { + t.same( + parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [] } + ); + t.same( + parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: 'zoom' } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: [ 'zm', 'zoom' ] } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]); + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + } + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); + +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + alias: { 'h': 'herp' }, + boolean: 'herp' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/parse_modified.js b/node_modules/fsevents/node_modules/minimist/test/parse_modified.js new file mode 100644 index 0000000..21851b0 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,9 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = parse([ '-b', '123' ], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: ['123'] }); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/short.js b/node_modules/fsevents/node_modules/minimist/test/short.js new file mode 100644 index 0000000..d513a1c --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/short.js @@ -0,0 +1,67 @@ +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] }); + t.deepEqual( + parse([ '-123', '456' ]), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse([ '-b' ]), + { b : true, _ : [] }, + 'short boolean' + ); + t.deepEqual( + parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ] }, + 'bare' + ); + t.deepEqual( + parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [] }, + 'group' + ); + t.deepEqual( + parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [] }, + 'short group next' + ); + t.deepEqual( + parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [] }, + 'short capture' + ); + t.deepEqual( + parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/minimist/test/whitespace.js b/node_modules/fsevents/node_modules/minimist/test/whitespace.js new file mode 100644 index 0000000..8a52a58 --- /dev/null +++ b/node_modules/fsevents/node_modules/minimist/test/whitespace.js @@ -0,0 +1,8 @@ +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/.travis.yml b/node_modules/fsevents/node_modules/mkdirp/.travis.yml new file mode 100644 index 0000000..74c57bf --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" +before_install: + - npm install -g npm@~1.4.6 diff --git a/node_modules/fsevents/node_modules/mkdirp/LICENSE b/node_modules/fsevents/node_modules/mkdirp/LICENSE new file mode 100644 index 0000000..432d1ae --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/LICENSE @@ -0,0 +1,21 @@ +Copyright 2010 James Halliday (mail@substack.net) + +This project is free software released under the MIT/X11 license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/mkdirp/bin/cmd.js b/node_modules/fsevents/node_modules/mkdirp/bin/cmd.js new file mode 100755 index 0000000..d95de15 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/bin/cmd.js @@ -0,0 +1,33 @@ +#!/usr/bin/env node + +var mkdirp = require('../'); +var minimist = require('minimist'); +var fs = require('fs'); + +var argv = minimist(process.argv.slice(2), { + alias: { m: 'mode', h: 'help' }, + string: [ 'mode' ] +}); +if (argv.help) { + fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout); + return; +} + +var paths = argv._.slice(); +var mode = argv.mode ? parseInt(argv.mode, 8) : undefined; + +(function next () { + if (paths.length === 0) return; + var p = paths.shift(); + + if (mode === undefined) mkdirp(p, cb) + else mkdirp(p, mode, cb) + + function cb (err) { + if (err) { + console.error(err.message); + process.exit(1); + } + else next(); + } +})(); diff --git a/node_modules/fsevents/node_modules/mkdirp/bin/usage.txt b/node_modules/fsevents/node_modules/mkdirp/bin/usage.txt new file mode 100644 index 0000000..f952aa2 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/bin/usage.txt @@ -0,0 +1,12 @@ +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories that + don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m, --mode If a directory needs to be created, set the mode as an octal + permission string. + diff --git a/node_modules/fsevents/node_modules/mkdirp/examples/pow.js b/node_modules/fsevents/node_modules/mkdirp/examples/pow.js new file mode 100644 index 0000000..e692421 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/examples/pow.js @@ -0,0 +1,6 @@ +var mkdirp = require('mkdirp'); + +mkdirp('/tmp/foo/bar/baz', function (err) { + if (err) console.error(err) + else console.log('pow!') +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/index.js b/node_modules/fsevents/node_modules/mkdirp/index.js new file mode 100644 index 0000000..6ce241b --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/index.js @@ -0,0 +1,98 @@ +var path = require('path'); +var fs = require('fs'); +var _0777 = parseInt('0777', 8); + +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; + +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + var cb = f || function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + mkdirP(path.dirname(p), opts, function (er, made) { + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) { + throw err0; + } + if (!stat.isDirectory()) throw err0; + break; + } + } + + return made; +}; diff --git a/node_modules/fsevents/node_modules/mkdirp/package.json b/node_modules/fsevents/node_modules/mkdirp/package.json new file mode 100644 index 0000000..3602532 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/package.json @@ -0,0 +1,87 @@ +{ + "_args": [ + [ + "mkdirp@~0.5.0", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "mkdirp@>=0.5.0 <0.6.0", + "_id": "mkdirp@0.5.1", + "_inCache": true, + "_installable": true, + "_location": "/mkdirp", + "_nodeVersion": "2.0.0", + "_npmUser": { + "email": "substack@gmail.com", + "name": "substack" + }, + "_npmVersion": "2.9.0", + "_phantomChildren": {}, + "_requested": { + "name": "mkdirp", + "raw": "mkdirp@~0.5.0", + "rawSpec": "~0.5.0", + "scope": null, + "spec": ">=0.5.0 <0.6.0", + "type": "range" + }, + "_requiredBy": [ + "/fstream", + "/node-pre-gyp", + "/tap" + ], + "_resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "_shasum": "30057438eac6cf7f8c4767f38648d6697d75c903", + "_shrinkwrap": null, + "_spec": "mkdirp@~0.5.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "mail@substack.net", + "name": "James Halliday", + "url": "http://substack.net" + }, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "bugs": { + "url": "https://github.com/substack/node-mkdirp/issues" + }, + "dependencies": { + "minimist": "0.0.8" + }, + "description": "Recursively mkdir, like `mkdir -p`", + "devDependencies": { + "mock-fs": "2 >=2.7.0", + "tap": "1" + }, + "directories": {}, + "dist": { + "shasum": "30057438eac6cf7f8c4767f38648d6697d75c903", + "tarball": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz" + }, + "gitHead": "d4eff0f06093aed4f387e88e9fc301cb76beedc7", + "homepage": "https://github.com/substack/node-mkdirp#readme", + "keywords": [ + "directory", + "mkdir" + ], + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "substack", + "email": "mail@substack.net" + } + ], + "name": "mkdirp", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/substack/node-mkdirp.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "0.5.1" +} diff --git a/node_modules/fsevents/node_modules/mkdirp/readme.markdown b/node_modules/fsevents/node_modules/mkdirp/readme.markdown new file mode 100644 index 0000000..3cc1315 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/readme.markdown @@ -0,0 +1,100 @@ +# mkdirp + +Like `mkdir -p`, but in node.js! + +[![build status](https://secure.travis-ci.org/substack/node-mkdirp.png)](http://travis-ci.org/substack/node-mkdirp) + +# example + +## pow.js + +```js +var mkdirp = require('mkdirp'); + +mkdirp('/tmp/foo/bar/baz', function (err) { + if (err) console.error(err) + else console.log('pow!') +}); +``` + +Output + +``` +pow! +``` + +And now /tmp/foo/bar/baz exists, huzzah! + +# methods + +```js +var mkdirp = require('mkdirp'); +``` + +## mkdirp(dir, opts, cb) + +Create a new directory and any necessary subdirectories at `dir` with octal +permission string `opts.mode`. If `opts` is a non-object, it will be treated as +the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0777 & (~process.umask())`. + +`cb(err, made)` fires with the error or the first directory `made` +that had to be created, if any. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdir(path, mode, cb)` and +`opts.fs.stat(path, cb)`. + +## mkdirp.sync(dir, opts) + +Synchronously create a new directory and any necessary subdirectories at `dir` +with octal permission string `opts.mode`. If `opts` is a non-object, it will be +treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0777 & (~process.umask())`. + +Returns the first directory that had to be created, if any. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)` and +`opts.fs.statSync(path)`. + +# usage + +This package also ships with a `mkdirp` command. + +``` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories that + don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m, --mode If a directory needs to be created, set the mode as an octal + permission string. + +``` + +# install + +With [npm](http://npmjs.org) do: + +``` +npm install mkdirp +``` + +to get the library, or + +``` +npm install -g mkdirp +``` + +to get the command. + +# license + +MIT diff --git a/node_modules/fsevents/node_modules/mkdirp/test/chmod.js b/node_modules/fsevents/node_modules/mkdirp/test/chmod.js new file mode 100644 index 0000000..6a404b9 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/chmod.js @@ -0,0 +1,41 @@ +var mkdirp = require('../').mkdirp; +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); +var _0744 = parseInt('0744', 8); + +var ps = [ '', 'tmp' ]; + +for (var i = 0; i < 25; i++) { + var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + ps.push(dir); +} + +var file = ps.join('/'); + +test('chmod-pre', function (t) { + var mode = _0744 + mkdirp(file, mode, function (er) { + t.ifError(er, 'should not error'); + fs.stat(file, function (er, stat) { + t.ifError(er, 'should exist'); + t.ok(stat && stat.isDirectory(), 'should be directory'); + t.equal(stat && stat.mode & _0777, mode, 'should be 0744'); + t.end(); + }); + }); +}); + +test('chmod', function (t) { + var mode = _0755 + mkdirp(file, mode, function (er) { + t.ifError(er, 'should not error'); + fs.stat(file, function (er, stat) { + t.ifError(er, 'should exist'); + t.ok(stat && stat.isDirectory(), 'should be directory'); + t.end(); + }); + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/clobber.js b/node_modules/fsevents/node_modules/mkdirp/test/clobber.js new file mode 100644 index 0000000..2433b9a --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/clobber.js @@ -0,0 +1,38 @@ +var mkdirp = require('../').mkdirp; +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; +var _0755 = parseInt('0755', 8); + +var ps = [ '', 'tmp' ]; + +for (var i = 0; i < 25; i++) { + var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + ps.push(dir); +} + +var file = ps.join('/'); + +// a file in the way +var itw = ps.slice(0, 3).join('/'); + + +test('clobber-pre', function (t) { + console.error("about to write to "+itw) + fs.writeFileSync(itw, 'I AM IN THE WAY, THE TRUTH, AND THE LIGHT.'); + + fs.stat(itw, function (er, stat) { + t.ifError(er) + t.ok(stat && stat.isFile(), 'should be file') + t.end() + }) +}) + +test('clobber', function (t) { + t.plan(2); + mkdirp(file, _0755, function (err) { + t.ok(err); + t.equal(err.code, 'ENOTDIR'); + t.end(); + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/mkdirp.js b/node_modules/fsevents/node_modules/mkdirp/test/mkdirp.js new file mode 100644 index 0000000..eaa8921 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/mkdirp.js @@ -0,0 +1,28 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('woo', function (t) { + t.plan(5); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }) + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/opts_fs.js b/node_modules/fsevents/node_modules/mkdirp/test/opts_fs.js new file mode 100644 index 0000000..97186b6 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/opts_fs.js @@ -0,0 +1,29 @@ +var mkdirp = require('../'); +var path = require('path'); +var test = require('tap').test; +var mockfs = require('mock-fs'); +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('opts.fs', function (t) { + t.plan(5); + + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/beep/boop/' + [x,y,z].join('/'); + var xfs = mockfs.fs(); + + mkdirp(file, { fs: xfs, mode: _0755 }, function (err) { + t.ifError(err); + xfs.exists(file, function (ex) { + t.ok(ex, 'created file'); + xfs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/opts_fs_sync.js b/node_modules/fsevents/node_modules/mkdirp/test/opts_fs_sync.js new file mode 100644 index 0000000..6c370aa --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/opts_fs_sync.js @@ -0,0 +1,27 @@ +var mkdirp = require('../'); +var path = require('path'); +var test = require('tap').test; +var mockfs = require('mock-fs'); +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('opts.fs sync', function (t) { + t.plan(4); + + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/beep/boop/' + [x,y,z].join('/'); + var xfs = mockfs.fs(); + + mkdirp.sync(file, { fs: xfs, mode: _0755 }); + xfs.exists(file, function (ex) { + t.ok(ex, 'created file'); + xfs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/perm.js b/node_modules/fsevents/node_modules/mkdirp/test/perm.js new file mode 100644 index 0000000..fbce44b --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/perm.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('async perm', function (t) { + t.plan(5); + var file = '/tmp/' + (Math.random() * (1<<30)).toString(16); + + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }) + }); +}); + +test('async root perm', function (t) { + mkdirp('/tmp', _0755, function (err) { + if (err) t.fail(err); + t.end(); + }); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/perm_sync.js b/node_modules/fsevents/node_modules/mkdirp/test/perm_sync.js new file mode 100644 index 0000000..398229f --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/perm_sync.js @@ -0,0 +1,36 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('sync perm', function (t) { + t.plan(4); + var file = '/tmp/' + (Math.random() * (1<<30)).toString(16) + '.json'; + + mkdirp.sync(file, _0755); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); + +test('sync root perm', function (t) { + t.plan(3); + + var file = '/tmp'; + mkdirp.sync(file, _0755); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/race.js b/node_modules/fsevents/node_modules/mkdirp/test/race.js new file mode 100644 index 0000000..b0b9e18 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/race.js @@ -0,0 +1,37 @@ +var mkdirp = require('../').mkdirp; +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('race', function (t) { + t.plan(10); + var ps = [ '', 'tmp' ]; + + for (var i = 0; i < 25; i++) { + var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + ps.push(dir); + } + var file = ps.join('/'); + + var res = 2; + mk(file); + + mk(file); + + function mk (file, cb) { + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }) + }); + } +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/rel.js b/node_modules/fsevents/node_modules/mkdirp/test/rel.js new file mode 100644 index 0000000..4ddb342 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/rel.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('rel', function (t) { + t.plan(5); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var cwd = process.cwd(); + process.chdir('/tmp'); + + var file = [x,y,z].join('/'); + + mkdirp(file, _0755, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + process.chdir(cwd); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }) + }) + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/return.js b/node_modules/fsevents/node_modules/mkdirp/test/return.js new file mode 100644 index 0000000..bce68e5 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/return.js @@ -0,0 +1,25 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; + +test('return value', function (t) { + t.plan(4); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + // should return the first dir created. + // By this point, it would be profoundly surprising if /tmp didn't + // already exist, since every other test makes things in there. + mkdirp(file, function (err, made) { + t.ifError(err); + t.equal(made, '/tmp/' + x); + mkdirp(file, function (err, made) { + t.ifError(err); + t.equal(made, null); + }); + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/return_sync.js b/node_modules/fsevents/node_modules/mkdirp/test/return_sync.js new file mode 100644 index 0000000..7c222d3 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/return_sync.js @@ -0,0 +1,24 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; + +test('return value', function (t) { + t.plan(2); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + // should return the first dir created. + // By this point, it would be profoundly surprising if /tmp didn't + // already exist, since every other test makes things in there. + // Note that this will throw on failure, which will fail the test. + var made = mkdirp.sync(file); + t.equal(made, '/tmp/' + x); + + // making the same file again should have no effect. + made = mkdirp.sync(file); + t.equal(made, null); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/root.js b/node_modules/fsevents/node_modules/mkdirp/test/root.js new file mode 100644 index 0000000..9e7d079 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/root.js @@ -0,0 +1,19 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var test = require('tap').test; +var _0755 = parseInt('0755', 8); + +test('root', function (t) { + // '/' on unix, 'c:/' on windows. + var file = path.resolve('/'); + + mkdirp(file, _0755, function (err) { + if (err) throw err + fs.stat(file, function (er, stat) { + if (er) throw er + t.ok(stat.isDirectory(), 'target is a directory'); + t.end(); + }) + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/sync.js b/node_modules/fsevents/node_modules/mkdirp/test/sync.js new file mode 100644 index 0000000..8c8dc93 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/sync.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('sync', function (t) { + t.plan(4); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + try { + mkdirp.sync(file, _0755); + } catch (err) { + t.fail(err); + return t.end(); + } + + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0755); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/umask.js b/node_modules/fsevents/node_modules/mkdirp/test/umask.js new file mode 100644 index 0000000..2033c63 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/umask.js @@ -0,0 +1,28 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('implicit mode from umask', function (t) { + t.plan(5); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + mkdirp(file, function (err) { + t.ifError(err); + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, _0777 & (~process.umask())); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }) + }); +}); diff --git a/node_modules/fsevents/node_modules/mkdirp/test/umask_sync.js b/node_modules/fsevents/node_modules/mkdirp/test/umask_sync.js new file mode 100644 index 0000000..11a7614 --- /dev/null +++ b/node_modules/fsevents/node_modules/mkdirp/test/umask_sync.js @@ -0,0 +1,32 @@ +var mkdirp = require('../'); +var path = require('path'); +var fs = require('fs'); +var exists = fs.exists || path.exists; +var test = require('tap').test; +var _0777 = parseInt('0777', 8); +var _0755 = parseInt('0755', 8); + +test('umask sync modes', function (t) { + t.plan(4); + var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16); + + var file = '/tmp/' + [x,y,z].join('/'); + + try { + mkdirp.sync(file); + } catch (err) { + t.fail(err); + return t.end(); + } + + exists(file, function (ex) { + t.ok(ex, 'file created'); + fs.stat(file, function (err, stat) { + t.ifError(err); + t.equal(stat.mode & _0777, (_0777 & (~process.umask()))); + t.ok(stat.isDirectory(), 'target not a directory'); + }); + }); +}); diff --git a/node_modules/fsevents/node_modules/ms/.npmignore b/node_modules/fsevents/node_modules/ms/.npmignore new file mode 100644 index 0000000..d1aa0ce --- /dev/null +++ b/node_modules/fsevents/node_modules/ms/.npmignore @@ -0,0 +1,5 @@ +node_modules +test +History.md +Makefile +component.json diff --git a/node_modules/fsevents/node_modules/ms/History.md b/node_modules/fsevents/node_modules/ms/History.md new file mode 100644 index 0000000..32fdfc1 --- /dev/null +++ b/node_modules/fsevents/node_modules/ms/History.md @@ -0,0 +1,66 @@ + +0.7.1 / 2015-04-20 +================== + + * prevent extraordinary long inputs (@evilpacket) + * Fixed broken readme link + +0.7.0 / 2014-11-24 +================== + + * add time abbreviations, updated tests and readme for the new units + * fix example in the readme. + * add LICENSE file + +0.6.2 / 2013-12-05 +================== + + * Adding repository section to package.json to suppress warning from NPM. + +0.6.1 / 2013-05-10 +================== + + * fix singularization [visionmedia] + +0.6.0 / 2013-03-15 +================== + + * fix minutes + +0.5.1 / 2013-02-24 +================== + + * add component namespace + +0.5.0 / 2012-11-09 +================== + + * add short formatting as default and .long option + * add .license property to component.json + * add version to component.json + +0.4.0 / 2012-10-22 +================== + + * add rounding to fix crazy decimals + +0.3.0 / 2012-09-07 +================== + + * fix `ms()` [visionmedia] + +0.2.0 / 2012-09-03 +================== + + * add component.json [visionmedia] + * add days support [visionmedia] + * add hours support [visionmedia] + * add minutes support [visionmedia] + * add seconds support [visionmedia] + * add ms string support [visionmedia] + * refactor tests to facilitate ms(number) [visionmedia] + +0.1.0 / 2012-03-07 +================== + + * Initial release diff --git a/node_modules/fsevents/node_modules/ms/LICENSE b/node_modules/fsevents/node_modules/ms/LICENSE new file mode 100644 index 0000000..6c07561 --- /dev/null +++ b/node_modules/fsevents/node_modules/ms/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014 Guillermo Rauch + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/ms/README.md b/node_modules/fsevents/node_modules/ms/README.md new file mode 100644 index 0000000..9b4fd03 --- /dev/null +++ b/node_modules/fsevents/node_modules/ms/README.md @@ -0,0 +1,35 @@ +# ms.js: miliseconds conversion utility + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('100') // 100 +``` + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(ms('10 hours')) // "10h" +``` + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +- Node/Browser compatible. Published as [`ms`](https://www.npmjs.org/package/ms) in [NPM](http://nodejs.org/download). +- If a number is supplied to `ms`, a string with a unit is returned. +- If a string that contains the number is supplied, it returns it as +a number (e.g: it returns `100` for `'100'`). +- If you pass a string with a number and a valid unit, the number of +equivalent ms is returned. + +## License + +MIT diff --git a/node_modules/fsevents/node_modules/ms/index.js b/node_modules/fsevents/node_modules/ms/index.js new file mode 100644 index 0000000..4f92771 --- /dev/null +++ b/node_modules/fsevents/node_modules/ms/index.js @@ -0,0 +1,125 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} options + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options){ + options = options || {}; + if ('string' == typeof val) return parse(val); + return options.long + ? long(val) + : short(val); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = '' + str; + if (str.length > 10000) return; + var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(str); + if (!match) return; + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function short(ms) { + if (ms >= d) return Math.round(ms / d) + 'd'; + if (ms >= h) return Math.round(ms / h) + 'h'; + if (ms >= m) return Math.round(ms / m) + 'm'; + if (ms >= s) return Math.round(ms / s) + 's'; + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function long(ms) { + return plural(ms, d, 'day') + || plural(ms, h, 'hour') + || plural(ms, m, 'minute') + || plural(ms, s, 'second') + || ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) return; + if (ms < n * 1.5) return Math.floor(ms / n) + ' ' + name; + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff --git a/node_modules/fsevents/node_modules/ms/package.json b/node_modules/fsevents/node_modules/ms/package.json new file mode 100644 index 0000000..42b9034 --- /dev/null +++ b/node_modules/fsevents/node_modules/ms/package.json @@ -0,0 +1,74 @@ +{ + "_args": [ + [ + "ms@0.7.1", + "/Users/eshanker/Code/fsevents/node_modules/debug" + ] + ], + "_from": "ms@0.7.1", + "_id": "ms@0.7.1", + "_inCache": true, + "_installable": true, + "_location": "/ms", + "_nodeVersion": "0.12.2", + "_npmUser": { + "email": "rauchg@gmail.com", + "name": "rauchg" + }, + "_npmVersion": "2.7.5", + "_phantomChildren": {}, + "_requested": { + "name": "ms", + "raw": "ms@0.7.1", + "rawSpec": "0.7.1", + "scope": null, + "spec": "0.7.1", + "type": "version" + }, + "_requiredBy": [ + "/debug" + ], + "_resolved": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz", + "_shasum": "9cd13c03adbff25b65effde7ce864ee952017098", + "_shrinkwrap": null, + "_spec": "ms@0.7.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/debug", + "bugs": { + "url": "https://github.com/guille/ms.js/issues" + }, + "component": { + "scripts": { + "ms/index.js": "index.js" + } + }, + "dependencies": {}, + "description": "Tiny ms conversion utility", + "devDependencies": { + "expect.js": "*", + "mocha": "*", + "serve": "*" + }, + "directories": {}, + "dist": { + "shasum": "9cd13c03adbff25b65effde7ce864ee952017098", + "tarball": "https://registry.npmjs.org/ms/-/ms-0.7.1.tgz" + }, + "gitHead": "713dcf26d9e6fd9dbc95affe7eff9783b7f1b909", + "homepage": "https://github.com/guille/ms.js", + "main": "./index", + "maintainers": [ + { + "name": "rauchg", + "email": "rauchg@gmail.com" + } + ], + "name": "ms", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/guille/ms.js.git" + }, + "scripts": {}, + "version": "0.7.1" +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/CHANGELOG.md b/node_modules/fsevents/node_modules/node-pre-gyp/CHANGELOG.md new file mode 100644 index 0000000..17e467c --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/CHANGELOG.md @@ -0,0 +1,323 @@ +# node-pre-gyp changelog + +## 0.6.29 + + - Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0 + +## 0.6.28 + + - Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default + and users need to know why builds are falling back to source compiles that might then error out. + +## 0.6.27 + + - Add known node version for node v6 + - Stopped bundling dependencies + - Documented method for module authors to avoid bundling node-pre-gyp + - See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details + +## 0.6.26 + + - Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg + +## 0.6.25 + + - Improved support for auto-detection of electron runtime in `node-pre-gyp.find()` + - Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187 + - Add known node version for 4.4.1 and 5.9.1 + +## 0.6.24 + + - Add known node version for 5.8.0, 5.9.0, and 4.4.0. + +## 0.6.23 + + - Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1. + +## 0.6.22 + + - Add known node version for 4.3.1, and 5.7.0. + +## 0.6.21 + + - Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0. + +## 0.6.20 + + - Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0. + +## 0.6.19 + + - Add known node version for 4.2.4 + +## 0.6.18 + + - Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x + +## 0.6.17 + + - Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'` + +## 0.6.16 + + - Added known version in crosswalk for 5.1.0. + +## 0.6.15 + + - Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182) + - Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170) + - Added known version in crosswalk for 4.2.2. + +## 0.6.14 + + - Added node 5.x version + +## 0.6.13 + + - Added more known node 4.x versions + +## 0.6.12 + + - Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz + +## 0.6.11 + + - Added known node and io.js versions including more 3.x and 4.x versions + +## 0.6.10 + + - Added known node and io.js versions including 3.x and 4.x versions + - Upgraded `tar` dep + +## 0.6.9 + + - Upgraded `rc` dep + - Updated known io.js version: v2.4.0 + +## 0.6.8 + + - Upgraded `semver` and `rimraf` deps + - Updated known node and io.js versions + +## 0.6.7 + + - Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94) + +## 0.6.6 + + - Updated with known io.js 2.0.0 version + +## 0.6.5 + + - Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887) + - Updated to semver@4.3.2 + - Updated known node v0.12.x versions and io.js 1.x versions. + +## 0.6.4 + + - Improved support for `io.js` (@fengmk2) + - Test coverage improvements (@mikemorris) + - Fixed support for `--dist-url` that regressed in 0.6.3 + +## 0.6.3 + + - Added support for passing raw options to node-gyp using `--` separator. Flags passed after + the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed + after the `--` will be passed directly to make/visual studio. + - Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly + - Fix issue with require validation not working on windows 7 (@edgarsilva) + +## 0.6.2 + + - Support for io.js >= v1.0.2 + - Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`. + +## 0.6.1 + + - Fixed bundled `tar` version + +## 0.6.0 + + - BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124) + - Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`. + - Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place. + - Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped. + - Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version` + +## 0.5.31 + + - Added support for deducing node_abi for node.js runtime from previous release if the series is even + - Added support for --target=0.10.33 + +## 0.5.30 + + - Repackaged with latest bundled deps + +## 0.5.29 + + - Added support for semver `build`. + - Fixed support for downloading from urls that include `+`. + +## 0.5.28 + + - Now reporting unix style paths only in reveal command + +## 0.5.27 + + - Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo + - Fixed support for installing when path contains a `'` - @halfdan + - Ported tests to mocha + +## 0.5.26 + + - Fix node-webkit support when `--target` option is not provided + +## 0.5.25 + + - Fix bundling of deps + +## 0.5.24 + + - Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31 + +## 0.5.23 + + - Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value + - Added support for `--silent` (shortcut for `--loglevel=silent`) + +## 0.5.22 + + - Fixed node-webkit versioning name (NOTE: node-webkit support still experimental) + +## 0.5.21 + + - New package to fix `shasum check failed` error with v0.5.20 + +## 0.5.20 + + - Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90) + +## 0.5.19 + + - Updated to know about more node-webkit releases + +## 0.5.18 + + - Updated to know about more node-webkit releases + +## 0.5.17 + + - Updated to know about node v0.10.29 release + +## 0.5.16 + + - Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86) + +## 0.5.15 + + - Fixed installation of windows packages sub directories on unix systems (#84) + +## 0.5.14 + + - Finished support for cross building using `--target_platform` option (#82) + - Now skipping binary validation on install if target arch/platform do not match the host. + - Removed multi-arch validing for OS X since it required a FAT node.js binary + +## 0.5.13 + + - Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled. + +## 0.5.12 + + - Improved support for node-webkit (@Mithgol) + +## 0.5.11 + + - Updated target versions listing + +## 0.5.10 + + - Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72) + - Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary + - Failed install due to `testbinary` check failure no longer leaves behind binary (#70) + +## 0.5.9 + + - Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60) + +## 0.5.8 + + - Started bundling deps + +## 0.5.7 + + - Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63) + - Exposed the internal `testbinary` command in node-pre-gyp command line tool + - Fixed minor bug so that `fallback_to_build` option is always respected + +## 0.5.6 + + - Added support for versioning on the `name` value in `package.json` (#57). + - Moved to using streams for reading tarball when publishing (#52) + +## 0.5.5 + + - Improved binary validation that also now works with node-webkit (@Mithgol) + - Upgraded test apps to work with node v0.11.x + - Improved test coverage + +## 0.5.4 + + - No longer depends on external install of node-gyp for compiling builds. + +## 0.5.3 + + - Reverted fix for debian/nodejs since it broke windows (#45) + +## 0.5.2 + + - Support for debian systems where the node binary is named `nodejs` (#45) + - Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed) + - Updated abi-crosswalk with node v0.10.26 entry. + +## 0.5.1 + + - Various minor bug fixes, several improving windows support for publishing. + +## 0.5.0 + + - Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`. + - Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18). + - Added `remote_path` which also supports versioning. + - Changed `remote_uri` to `host`. + +## 0.4.2 + + - Added support for `--target` flag to request cross-compile against a specific node/node-webkit version. + - Added preliminary support for node-webkit + - Fixed support for `--target_arch` option being respected in all cases. + +## 0.4.1 + + - Fixed exception when only stderr is available in binary test (@bendi / #31) + +## 0.4.0 + + - Enforce only `https:` based remote publishing access. + - Added `node-pre-gyp info` command to display listing of published binaries + - Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option. + - Added support for S3 prefixes. + +## 0.3.1 + + - Added `unpublish` command. + - Fixed module path construction in tests. + - Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target. + +## 0.3.0 + + - Support for packaging all files in `module_path` directory - see `app4` for example + - Added `testpackage` command. + - Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that. + - `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks. + diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/LICENSE b/node_modules/fsevents/node_modules/node-pre-gyp/LICENSE new file mode 100644 index 0000000..8f5fce9 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/LICENSE @@ -0,0 +1,27 @@ +Copyright (c), Mapbox + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of node-pre-gyp nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/appveyor.yml b/node_modules/fsevents/node_modules/node-pre-gyp/appveyor.yml new file mode 100644 index 0000000..bdfaaaf --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/appveyor.yml @@ -0,0 +1,30 @@ +environment: + matrix: + - nodejs_version: 0.10.36 + - nodejs_version: 4 + - nodejs_version: 5 + +platform: + - x64 + - x86 + +shallow_clone: true + +install: + - ps: Install-Product node $env:nodejs_version $env:Platform + - node --version + - npm --version + - SET PATH=%APPDATA%\npm;%PATH% + - npm update -g npm + - npm --version + - node -e "console.log(process.arch);" + - SET PATH=C:\Program Files (x86)\MSBuild\12.0\bin\;%PATH% + - if "%PLATFORM%" == "x64" set PATH=C:\Python27-x64;%PATH% + - if "%PLATFORM%" == "x86" SET PATH=C:\python27;%PATH% + - npm install + - npm test + - .\scripts\test.bat + +build: off +test: off +deploy: off diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp b/node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp new file mode 100755 index 0000000..4f0e1d0 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp @@ -0,0 +1,131 @@ +#!/usr/bin/env node + +"use strict"; + +/** + * Set the title. + */ + +process.title = 'node-pre-gyp'; + +/** + * Module dependencies. + */ + +var node_pre_gyp = require('../'); +var log = require('npmlog'); + +/** + * Process and execute the selected commands. + */ + +var prog = new node_pre_gyp.Run(); +var completed = false; +prog.parseArgv(process.argv); + +if (prog.todo.length === 0) { + if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { + console.log('v%s', prog.version); + } else { + console.log('%s', prog.usage()); + } + return process.exit(0); +} + +// if --no-color is passed +if (prog.opts && prog.opts.hasOwnProperty('color') && !prog.opts.color) { + log.disableColor(); +} + +log.info('it worked if it ends with', 'ok'); +log.verbose('cli', process.argv); +log.info('using', process.title + '@%s', prog.version); +log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch); + + +/** + * Change dir if -C/--directory was passed. + */ + +var dir = prog.opts.directory; +if (dir) { + var fs = require('fs'); + try { + var stat = fs.statSync(dir); + if (stat.isDirectory()) { + log.info('chdir', dir); + process.chdir(dir); + } else { + log.warn('chdir', dir + ' is not a directory'); + } + } catch (e) { + if (e.code === 'ENOENT') { + log.warn('chdir', dir + ' is not a directory'); + } else { + log.warn('chdir', 'error during chdir() "%s"', e.message); + } + } +} + +function run () { + var command = prog.todo.shift(); + if (!command) { + // done! + completed = true; + log.info('ok'); + return; + } + + prog.commands[command.name](command.args, function (err) { + if (err) { + log.error(command.name + ' error'); + log.error('stack', err.stack); + errorMessage(); + log.error('not ok'); + console.log(err.message); + return process.exit(1); + } + var args_array = [].slice.call(arguments, 1); + if (args_array.length) { + console.log.apply(console, args_array); + } + // now run the next command in the queue + process.nextTick(run); + }); +} + +process.on('exit', function (code) { + if (!completed && !code) { + log.error('Completion callback never invoked!'); + issueMessage(); + process.exit(6); + } +}); + +process.on('uncaughtException', function (err) { + log.error('UNCAUGHT EXCEPTION'); + log.error('stack', err.stack); + issueMessage(); + process.exit(7); +}); + +function errorMessage () { + // copied from npm's lib/util/error-handler.js + var os = require('os'); + log.error('System', os.type() + ' ' + os.release()); + log.error('command', process.argv.map(JSON.stringify).join(' ')); + log.error('cwd', process.cwd()); + log.error('node -v', process.version); + log.error(process.title+' -v', 'v' + prog.package.version); +} + +function issueMessage () { + errorMessage(); + log.error('', [ 'This is a bug in `'+process.title+'`.', + 'Try to update '+process.title+' and file an issue if it does not help:', + ' ', + ].join('\n')); +} + +// start running the given commands! +run(); diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd b/node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd new file mode 100644 index 0000000..46e14b5 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/bin/node-pre-gyp.cmd @@ -0,0 +1,2 @@ +@echo off +node "%~dp0\node-pre-gyp" %* diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/build.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/build.js new file mode 100644 index 0000000..1074fa3 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/build.js @@ -0,0 +1,43 @@ +"use strict"; + +module.exports = exports = build; + +exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp'; + +var compile = require('./util/compile.js'); +var handle_gyp_opts = require('./util/handle_gyp_opts.js'); +var configure = require('./configure.js'); + +function do_build(gyp,argv,callback) { + handle_gyp_opts(gyp,argv,function(err,result) { + var final_args = ['build'].concat(result.gyp).concat(result.pre); + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(final_args,result.opts,function(err) { + return callback(err); + }); + }); +} + +function build(gyp, argv, callback) { + + // Form up commands to pass to node-gyp: + // We map `node-pre-gyp build` to `node-gyp configure build` so that we do not + // trigger a clean and therefore do not pay the penalty of a full recompile + if (argv.length && (argv.indexOf('rebuild') > -1)) { + // here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means + // "clean + configure + build" and triggers a full recompile + compile.run_gyp(['clean'],{},function(err) { + if (err) return callback(err); + configure(gyp,argv,function(err) { + if (err) return callback(err); + return do_build(gyp,argv,callback); + }); + }); + } else { + return do_build(gyp,argv,callback); + } +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/clean.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/clean.js new file mode 100644 index 0000000..a1289f6 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/clean.js @@ -0,0 +1,23 @@ +"use strict"; + +module.exports = exports = clean; + +exports.usage = 'Removes the entire folder containing the compiled .node module'; + +var fs = require('fs'); +var rm = require('rimraf'); +var exists = require('fs').exists || require('path').exists; +var versioning = require('./util/versioning.js'); + +function clean (gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + var to_delete = opts.module_path; + exists(to_delete, function(found) { + if (found) { + if (!gyp.opts.silent_clean) console.log('['+package_json.name+'] Removing "%s"', to_delete); + return rm(to_delete, callback); + } + return callback(); + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/configure.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/configure.js new file mode 100644 index 0000000..1ea5664 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/configure.js @@ -0,0 +1,48 @@ +"use strict"; + +module.exports = exports = configure; + +exports.usage = 'Attempts to configure node-gyp or nw-gyp build'; + +var compile = require('./util/compile.js'); +var handle_gyp_opts = require('./util/handle_gyp_opts.js'); + +function configure(gyp, argv, callback) { + handle_gyp_opts(gyp,argv,function(err,result) { + var final_args = result.gyp.concat(result.pre); + // pull select node-gyp configure options out of the npm environ + var known_gyp_args = ['dist-url','python','nodedir','msvs_version']; + known_gyp_args.forEach(function(key) { + var val = gyp.opts[key] || gyp.opts[key.replace('-','_')]; + if (val) { + final_args.push('--'+key+'='+val); + } + }); + // --ensure=false tell node-gyp to re-install node development headers + // but it is only respected by node-gyp install, so we have to call install + // as a separate step if the user passes it + if (gyp.opts.ensure === false) { + var install_args = final_args.concat(['install','--ensure=false']); + compile.run_gyp(install_args,result.opts,function(err) { + if (err) return callback(err); + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) { + return callback(err); + }); + }); + } else { + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) { + return callback(err); + }); + } + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/info.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/info.js new file mode 100644 index 0000000..aff9bf8 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/info.js @@ -0,0 +1,40 @@ +"use strict"; + +module.exports = exports = unpublish; + +exports.usage = 'Lists all published binaries (requires aws-sdk)'; + +var fs = require('fs'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var s3_setup = require('./util/s3_setup.js'); +var config = require('rc')("node_pre_gyp",{acl:"public-read"}); + +function unpublish(gyp, argv, callback) { + var AWS = require("aws-sdk"); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + s3_setup.detect(opts.hosted_path,config); + AWS.config.update(config); + var s3 = new AWS.S3(); + var s3_opts = { Bucket: config.bucket, + Prefix: config.prefix + }; + s3.listObjects(s3_opts, function(err, meta){ + if (err && err.code == 'NotFound') { + return callback(new Error('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix)); + } else if(err) { + return callback(err); + } else { + log.verbose(JSON.stringify(meta,null,1)); + if (meta && meta.Contents) { + meta.Contents.forEach(function(obj) { + console.log(obj.Key); + }); + } else { + console.error('['+package_json.name+'] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix ); + } + return callback(); + } + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/install.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/install.js new file mode 100644 index 0000000..ce2d56f --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/install.js @@ -0,0 +1,215 @@ +"use strict"; + +module.exports = exports = install; + +exports.usage = 'Attempts to install pre-built binary for module'; + +var fs = require('fs'); +var path = require('path'); +var zlib = require('zlib'); +var log = require('npmlog'); +var existsAsync = fs.exists || path.exists; +var versioning = require('./util/versioning.js'); +var testbinary = require('./testbinary.js'); +var clean = require('./clean.js'); + +function download(uri,opts,callback) { + log.http('GET', uri); + + var req = null; + var requestOpts = { + uri: uri.replace('+','%2B'), + headers: { + 'User-Agent': 'node-pre-gyp (node ' + process.version + ')' + } + }; + + var proxyUrl = opts.proxy || + process.env.http_proxy || + process.env.HTTP_PROXY || + process.env.npm_config_proxy; + if (proxyUrl) { + if (/^https?:\/\//i.test(proxyUrl)) { + log.verbose('download', 'using proxy url: "%s"', proxyUrl); + requestOpts.proxy = proxyUrl; + } else { + log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl); + } + } + try { + req = require('request')(requestOpts); + } catch (e) { + return callback(e); + } + if (req) { + req.on('response', function (res) { + log.http(res.statusCode, uri); + }); + } + return callback(null,req); +} + +function place_binary(from,to,opts,callback) { + download(from,opts,function(err,req) { + if (err) return callback(err); + if (!req) return callback(new Error("empty req")); + var badDownload = false; + var extractCount = 0; + var gunzip = zlib.createGunzip(); + var extracter = require('tar').Extract({ path: to, strip: 1}); + + function afterTarball(err) { + if (err) return callback(err); + if (badDownload) return callback(new Error("bad download")); + if (extractCount === 0) { + return callback(new Error('There was a fatal problem while downloading/extracting the tarball')); + } + log.info('tarball', 'done parsing tarball'); + callback(); + } + + function filter_func(entry) { + // ensure directories are +x + // https://github.com/mapnik/node-mapnik/issues/262 + entry.props.mode |= (entry.props.mode >>> 2) & parseInt('0111',8); + log.info('install','unpacking ' + entry.path); + extractCount++; + } + + gunzip.on('error', callback); + extracter.on('entry', filter_func); + extracter.on('error', callback); + extracter.on('end', afterTarball); + + req.on('error', function(err) { + badDownload = true; + return callback(err); + }); + + req.on('close', function () { + if (extractCount === 0) { + return callback(new Error('Connection closed while downloading tarball file')); + } + }); + + req.on('response', function(res) { + if (res.statusCode !== 200) { + badDownload = true; + if (res.statusCode == 404) { + return callback(new Error('Pre-built binary not available for your system, looked for ' + from)); + } else { + return callback(new Error(res.statusCode + ' status code downloading tarball ' + from)); + } + } + // start unzipping and untaring + req.pipe(gunzip).pipe(extracter); + }); + }); +} + +function do_build(gyp,argv,callback) { + gyp.todo.push( { name: 'build', args: ['rebuild'] } ); + process.nextTick(callback); +} + +function print_fallback_error(err,opts,package_json) { + var fallback_message = ' (falling back to source compile with node-gyp)'; + var full_message = "Pre-built binaries not found for " + package_json.name + "@" + package_json.version; + full_message += " and " + opts.runtime + "@" + (opts.target || process.versions.node) + " (" + opts.node_abi + " ABI)"; + full_message += fallback_message; + log.error("Tried to download: " + opts.hosted_tarball); + log.error(full_message); + log.http(err.message); +} + +function install(gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source; + var update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary; + var should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true'); + var no_rollback = gyp.opts.hasOwnProperty('rollback') && gyp.opts.rollback === false; + if (should_do_source_build) { + log.info('build','requesting source compile'); + return do_build(gyp,argv,callback); + } else { + var fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build; + var should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true'); + // but allow override from npm + if (process.env.npm_config_argv) { + var cooked = JSON.parse(process.env.npm_config_argv).cooked; + var match = cooked.indexOf("--fallback-to-build"); + if (match > -1 && cooked.length > match && cooked[match+1] == "false") { + should_do_fallback_build = false; + log.info('install','Build fallback disabled via npm flag: --fallback-to-build=false'); + } + } + var opts; + try { + opts = versioning.evaluate(package_json, gyp.opts); + } catch (err) { + return callback(err); + } + var from = opts.hosted_tarball; + var to = opts.module_path; + var binary_module = path.join(to,opts.module_name + '.node'); + if (existsAsync(binary_module,function(found) { + if (found && !update_binary) { + testbinary(gyp, argv, function(err) { + if (err) { + console.error('['+package_json.name+'] ' + err.message); + log.error("Testing local pre-built binary failed, attempting to re-download"); + place_binary(from,to,opts,function(err) { + if (err) { + if (should_do_fallback_build) { + print_fallback_error(err,opts,package_json); + return do_build(gyp,argv,callback); + } else { + return callback(err); + } + } else { + console.log('['+package_json.name+'] Success: "' + binary_module + '" is reinstalled via remote'); + return callback(); + } + }); + } else { + console.log('['+package_json.name+'] Success: "' + binary_module + '" already installed'); + console.log('Pass --update-binary to reinstall or --build-from-source to recompile'); + return callback(); + } + }); + } else { + if (!update_binary) log.info('check','checked for "' + binary_module + '" (not found)'); + place_binary(from,to,opts,function(err) { + if (err && should_do_fallback_build) { + print_fallback_error(err,opts,package_json); + return do_build(gyp,argv,callback); + } else if (err) { + return callback(err); + } else { + testbinary(gyp, argv, function(err) { + if (err) { + if (no_rollback) { + return callback(err); + } + gyp.opts.silent_clean = true; + clean(gyp, argv, function(error) { + if (error) console.log(error); + if (should_do_fallback_build) { + console.error('['+package_json.name+'] ' + err.message); + log.error("Testing pre-built binary failed, attempting to source compile"); + return do_build(gyp,argv,callback); + } else { + return callback(err); + } + }); + } else { + console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed via remote'); + return callback(); + } + }); + } + }); + } + })); + } +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/node-pre-gyp.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/node-pre-gyp.js new file mode 100644 index 0000000..f0fff78 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/node-pre-gyp.js @@ -0,0 +1,192 @@ +"use strict"; + +/** + * Module exports. + */ + +module.exports = exports; + +/** + * Module dependencies. + */ + +var path = require('path'); +var nopt = require('nopt'); +var log = require('npmlog'); +var EE = require('events').EventEmitter; +var inherits = require('util').inherits; +var commands = [ + 'clean', + 'install', + 'reinstall', + 'build', + 'rebuild', + 'package', + 'testpackage', + 'publish', + 'unpublish', + 'info', + 'testbinary', + 'reveal', + 'configure' + ]; +var aliases = {}; + +// differentiate node-pre-gyp's logs from npm's +log.heading = 'node-pre-gyp'; + +exports.find = require('./pre-binding').find; + +function Run() { + var self = this; + + this.commands = {}; + + commands.forEach(function (command) { + self.commands[command] = function (argv, callback) { + log.verbose('command', command, argv); + return require('./' + command)(self, argv, callback); + }; + }); +} +inherits(Run, EE); +exports.Run = Run; +var proto = Run.prototype; + +/** + * Export the contents of the package.json. + */ + +proto.package = require('../package'); + +/** + * nopt configuration definitions + */ + +proto.configDefs = { + help: Boolean, // everywhere + arch: String, // 'configure' + debug: Boolean, // 'build' + directory: String, // bin + proxy: String, // 'install' + loglevel: String, // everywhere +}; + +/** + * nopt shorthands + */ + +proto.shorthands = { + release: '--no-debug', + C: '--directory', + debug: '--debug', + j: '--jobs', + silent: '--loglevel=silent', + silly: '--loglevel=silly', + verbose: '--loglevel=verbose', +}; + +/** + * expose the command aliases for the bin file to use. + */ + +proto.aliases = aliases; + +/** + * Parses the given argv array and sets the 'opts', + * 'argv' and 'command' properties. + */ + +proto.parseArgv = function parseOpts (argv) { + this.opts = nopt(this.configDefs, this.shorthands, argv); + this.argv = this.opts.argv.remain.slice(); + var commands = this.todo = []; + + // create a copy of the argv array with aliases mapped + argv = this.argv.map(function (arg) { + // is this an alias? + if (arg in this.aliases) { + arg = this.aliases[arg]; + } + return arg; + }, this); + + // process the mapped args into "command" objects ("name" and "args" props) + argv.slice().forEach(function (arg) { + if (arg in this.commands) { + var args = argv.splice(0, argv.indexOf(arg)); + argv.shift(); + if (commands.length > 0) { + commands[commands.length - 1].args = args; + } + commands.push({ name: arg, args: [] }); + } + }, this); + if (commands.length > 0) { + commands[commands.length - 1].args = argv.splice(0); + } + + // support for inheriting config env variables from npm + var npm_config_prefix = 'npm_config_'; + Object.keys(process.env).forEach(function (name) { + if (name.indexOf(npm_config_prefix) !== 0) return; + var val = process.env[name]; + if (name === npm_config_prefix + 'loglevel') { + log.level = val; + } else { + // add the user-defined options to the config + name = name.substring(npm_config_prefix.length); + // avoid npm argv clobber already present args + // which avoids problem of 'npm test' calling + // script that runs unique npm install commands + if (name === 'argv') { + if (this.opts.argv && + this.opts.argv.remain && + this.opts.argv.remain.length) { + // do nothing + } else { + this.opts[name] = val; + } + } else { + this.opts[name] = val; + } + } + }, this); + + if (this.opts.loglevel) { + log.level = this.opts.loglevel; + } + log.resume(); +}; + +/** + * Returns the usage instructions for node-pre-gyp. + */ + +proto.usage = function usage () { + var str = [ + '', + ' Usage: node-pre-gyp [options]', + '', + ' where is one of:', + commands.map(function (c) { + return ' - ' + c + ' - ' + require('./' + c).usage; + }).join('\n'), + '', + 'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), + 'node@' + process.versions.node + ].join('\n'); + return str; +}; + +/** + * Version number getter. + */ + +Object.defineProperty(proto, 'version', { + get: function () { + return this.package.version; + }, + enumerable: true +}); + diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/package.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/package.js new file mode 100644 index 0000000..3a35f65 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/package.js @@ -0,0 +1,46 @@ +"use strict"; + +module.exports = exports = _package; + +exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var write = require('fs').createWriteStream; +var existsAsync = fs.exists || path.exists; +var mkdirp = require('mkdirp'); + +function _package(gyp, argv, callback) { + var pack = require('tar-pack').pack; + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + var from = opts.module_path; + var binary_module = path.join(from,opts.module_name + '.node'); + existsAsync(binary_module,function(found) { + if (!found) { + return callback(new Error("Cannot package because " + binary_module + " missing: run `node-pre-gyp rebuild` first")); + } + var tarball = opts.staged_tarball; + var filter_func = function(entry) { + // ensure directories are +x + // https://github.com/mapnik/node-mapnik/issues/262 + log.info('package','packing ' + entry.path); + return true; + }; + mkdirp(path.dirname(tarball),function(err) { + if (err) throw err; + pack(from, { filter: filter_func }) + .pipe(write(tarball)) + .on('error', function(err) { + if (err) console.error('['+package_json.name+'] ' + err.message); + return callback(err); + }) + .on('close', function() { + log.info('package','Binary staged at "' + tarball + '"'); + return callback(); + }); + }); + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/pre-binding.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/pre-binding.js new file mode 100644 index 0000000..326a486 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/pre-binding.js @@ -0,0 +1,25 @@ +"use strict"; + +var versioning = require('../lib/util/versioning.js'); +var existsSync = require('fs').existsSync || require('path').existsSync; +var path = require('path'); + +module.exports = exports; + +exports.usage = 'Finds the require path for the node-pre-gyp installed module'; + +exports.validate = function(package_json) { + versioning.validate_config(package_json); +}; + +exports.find = function(package_json_path,opts) { + if (!existsSync(package_json_path)) { + throw new Error("package.json does not exist at " + package_json_path); + } + var package_json = require(package_json_path); + versioning.validate_config(package_json); + opts = opts || {}; + if (!opts.module_root) opts.module_root = path.dirname(package_json_path); + var meta = versioning.evaluate(package_json,opts); + return meta.module; +}; diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/publish.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/publish.js new file mode 100644 index 0000000..d666b01 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/publish.js @@ -0,0 +1,77 @@ +"use strict"; + +module.exports = exports = publish; + +exports.usage = 'Publishes pre-built binary (requires aws-sdk)'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var s3_setup = require('./util/s3_setup.js'); +var existsAsync = fs.exists || path.exists; +var url = require('url'); +var config = require('rc')("node_pre_gyp",{acl:"public-read"}); + +function publish(gyp, argv, callback) { + var AWS = require("aws-sdk"); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + var tarball = opts.staged_tarball; + existsAsync(tarball,function(found) { + if (!found) { + return callback(new Error("Cannot publish because " + tarball + " missing: run `node-pre-gyp package` first")); + } + log.info('publish', 'Detecting s3 credentials'); + s3_setup.detect(opts.hosted_path,config); + var key_name = url.resolve(config.prefix,opts.package_name); + log.info('publish', 'Authenticating with s3'); + AWS.config.update(config); + var s3 = new AWS.S3(); + var s3_opts = { Bucket: config.bucket, + Key: key_name + }; + var remote_package = 'https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key; + log.info('publish', 'Checking for existing binary at ' + remote_package); + s3.headObject(s3_opts, function(err, meta){ + if (meta) log.info('publish', JSON.stringify(meta)); + if (err && err.code == 'NotFound') { + // we are safe to publish because + // the object does not already exist + log.info('publish', 'Preparing to put object'); + var s3_put = new AWS.S3(); + var s3_put_opts = { ACL: config.acl, + Body: fs.createReadStream(tarball), + Bucket: config.bucket, + Key: key_name + }; + log.info('publish', 'Putting object'); + try { + s3_put.putObject(s3_put_opts, function(err, resp){ + log.info('publish', 'returned from putting object'); + if(err) { + log.info('publish', 's3 putObject error: "' + err + '"'); + return callback(err); + } + if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"'); + log.info('publish', 'successfully put object'); + console.log('['+package_json.name+'] published to ' + remote_package); + return callback(); + }); + } catch (err) { + log.info('publish', 's3 putObject error: "' + err + '"'); + return callback(err); + } + } else if (err) { + log.info('publish', 's3 headObject error: "' + err + '"'); + return callback(err); + } else { + log.error('publish','Cannot publish over existing version'); + log.error('publish',"Update the 'version' field in package.json and try again"); + log.error('publish','If the previous version was published in error see:'); + log.error('publish','\t node-pre-gyp unpublish'); + return callback(new Error('Failed publishing to ' + remote_package)); + } + }); + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/rebuild.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/rebuild.js new file mode 100644 index 0000000..48a7b74 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/rebuild.js @@ -0,0 +1,13 @@ +"use strict"; + +module.exports = exports = rebuild; + +exports.usage = 'Runs "clean" and "build" at once'; + +function rebuild (gyp, argv, callback) { + gyp.todo.unshift( + { name: 'clean', args: [] }, + { name: 'build', args: ['rebuild'] } + ); + process.nextTick(callback); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/reinstall.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/reinstall.js new file mode 100644 index 0000000..ed65d54 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/reinstall.js @@ -0,0 +1,13 @@ +"use strict"; + +module.exports = exports = rebuild; + +exports.usage = 'Runs "clean" and "install" at once'; + +function rebuild (gyp, argv, callback) { + gyp.todo.unshift( + { name: 'clean', args: [] }, + { name: 'install', args: [] } + ); + process.nextTick(callback); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/reveal.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/reveal.js new file mode 100644 index 0000000..e6d00eb --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/reveal.js @@ -0,0 +1,31 @@ +"use strict"; + +module.exports = exports = reveal; + +exports.usage = 'Reveals data on the versioned binary'; + +var fs = require('fs'); +var versioning = require('./util/versioning.js'); + +function unix_paths(key, val) { + return val && val.replace ? val.replace(/\\/g, '/') : val; +} + +function reveal(gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + var hit = false; + // if a second arg is passed look to see + // if it is a known option + //console.log(JSON.stringify(gyp.opts,null,1)) + var remain = gyp.opts.argv.remain.pop(); + if (remain && opts.hasOwnProperty(remain)) { + console.log(opts[remain].replace(/\\/g, '/')); + hit = true; + } + // otherwise return all options as json + if (!hit) { + console.log(JSON.stringify(opts,unix_paths,2)); + } + return callback(); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/testbinary.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/testbinary.js new file mode 100644 index 0000000..fbdfca4 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/testbinary.js @@ -0,0 +1,73 @@ +"use strict"; + +module.exports = exports = testbinary; + +exports.usage = 'Tests that the binary.node can be required'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var cp = require('child_process'); +var versioning = require('./util/versioning.js'); +var path = require('path'); + +function testbinary(gyp, argv, callback) { + var args = []; + var options = {}; + var shell_cmd = process.execPath; + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + // ensure on windows that / are used for require path + var binary_module = opts.module.replace(/\\/g, '/'); + var nw = (opts.runtime && opts.runtime === 'node-webkit'); + if ((process.arch != opts.target_arch) || + (process.platform != opts.target_platform)) { + var msg = "skipping validation since host platform/arch ("; + msg += process.platform+'/'+process.arch+")"; + msg += " does not match target ("; + msg += opts.target_platform+'/'+opts.target_arch+")"; + log.info('validate', msg); + return callback(); + } + if (nw) { + options.timeout = 5000; + if (process.platform === 'darwin') { + shell_cmd = 'node-webkit'; + } else if (process.platform === 'win32') { + shell_cmd = 'nw.exe'; + } else { + shell_cmd = 'nw'; + } + var modulePath = path.resolve(binary_module); + var appDir = path.join(__dirname, 'util', 'nw-pre-gyp'); + args.push(appDir); + args.push(modulePath); + log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'"); + cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) { + // check for normal timeout for node-webkit + if (err) { + if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) { + return callback(); + } + var stderrLog = stderr.toString(); + log.info('stderr', stderrLog); + if( /^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog) ){ + log.info('RANDR', 'stderr contains only RANDR error, ignored'); + return callback(); + } + return callback(err); + } + return callback(); + }); + return; + } + args.push('--eval'); + args.push("'require(\\'" + binary_module.replace(/\'/g, '\\\'') +"\\')'"); + log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'"); + cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) { + if (err) { + return callback(err, { stdout:stdout, stderr:stderr}); + } + return callback(); + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/testpackage.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/testpackage.js new file mode 100644 index 0000000..1d4cc60 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/testpackage.js @@ -0,0 +1,49 @@ +"use strict"; + +module.exports = exports = testpackage; + +exports.usage = 'Tests that the staged package is valid'; + +var fs = require('fs'); +var path = require('path'); +var log = require('npmlog'); +var existsAsync = fs.exists || path.exists; +var versioning = require('./util/versioning.js'); +var testbinary = require('./testbinary.js'); +var read = require('fs').createReadStream; +var zlib = require('zlib'); + +function testpackage(gyp, argv, callback) { + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + var tarball = opts.staged_tarball; + existsAsync(tarball, function(found) { + if (!found) { + return callback(new Error("Cannot test package because " + tarball + " missing: run `node-pre-gyp package` first")); + } + var to = opts.module_path; + var gunzip = zlib.createGunzip(); + var extracter = require('tar').Extract({ path: to, strip: 1 }); + function filter_func(entry) { + // ensure directories are +x + // https://github.com/mapnik/node-mapnik/issues/262 + entry.props.mode |= (entry.props.mode >>> 2) & parseInt('0111',8); + log.info('install','unpacking ' + entry.path); + } + gunzip.on('error', callback); + extracter.on('error', callback); + extracter.on('entry', filter_func); + extracter.on('end', function(err) { + if (err) return callback(err); + testbinary(gyp,argv,function(err) { + if (err) { + return callback(err); + } else { + console.log('['+package_json.name+'] Package appears valid'); + return callback(); + } + }); + }); + read(tarball).pipe(gunzip).pipe(extracter); + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/unpublish.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/unpublish.js new file mode 100644 index 0000000..43f8e66 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/unpublish.js @@ -0,0 +1,41 @@ +"use strict"; + +module.exports = exports = unpublish; + +exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)'; + +var fs = require('fs'); +var log = require('npmlog'); +var versioning = require('./util/versioning.js'); +var s3_setup = require('./util/s3_setup.js'); +var url = require('url'); +var config = require('rc')("node_pre_gyp",{acl:"public-read"}); + +function unpublish(gyp, argv, callback) { + var AWS = require("aws-sdk"); + var package_json = JSON.parse(fs.readFileSync('./package.json')); + var opts = versioning.evaluate(package_json, gyp.opts); + s3_setup.detect(opts.hosted_path,config); + AWS.config.update(config); + var key_name = url.resolve(config.prefix,opts.package_name); + var s3 = new AWS.S3(); + var s3_opts = { Bucket: config.bucket, + Key: key_name + }; + s3.headObject(s3_opts, function(err, meta) { + if (err && err.code == 'NotFound') { + console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key); + return callback(); + } else if(err) { + return callback(err); + } else { + log.info('unpublish', JSON.stringify(meta)); + s3.deleteObject(s3_opts, function(err, resp) { + if (err) return callback(err); + log.info(JSON.stringify(resp)); + console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key); + return callback(); + }); + } + }); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json new file mode 100644 index 0000000..e1f84e5 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json @@ -0,0 +1,1274 @@ +{ + "0.1.14": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.15": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.16": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.17": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.18": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.19": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.20": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.21": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.22": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.23": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.24": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.25": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.26": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.27": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.28": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.29": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.30": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.31": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.32": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.33": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.90": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.91": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.92": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.93": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.94": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.95": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.96": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.97": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.98": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.99": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.100": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.101": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.102": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.103": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.104": { + "node_abi": null, + "v8": "2.3" + }, + "0.2.0": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.1": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.2": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.3": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.4": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.5": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.6": { + "node_abi": 1, + "v8": "2.3" + }, + "0.3.0": { + "node_abi": 1, + "v8": "2.5" + }, + "0.3.1": { + "node_abi": 1, + "v8": "2.5" + }, + "0.3.2": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.3": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.4": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.5": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.6": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.7": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.8": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.0": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.1": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.2": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.3": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.4": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.5": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.6": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.7": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.8": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.9": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.10": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.11": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.12": { + "node_abi": 1, + "v8": "3.1" + }, + "0.5.0": { + "node_abi": 1, + "v8": "3.1" + }, + "0.5.1": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.2": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.3": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.4": { + "node_abi": 1, + "v8": "3.5" + }, + "0.5.5": { + "node_abi": 1, + "v8": "3.5" + }, + "0.5.6": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.7": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.8": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.9": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.10": { + "node_abi": 1, + "v8": "3.7" + }, + "0.6.0": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.1": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.2": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.3": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.4": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.5": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.6": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.7": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.8": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.9": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.10": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.11": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.12": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.13": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.14": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.15": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.16": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.17": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.18": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.19": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.20": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.21": { + "node_abi": 1, + "v8": "3.6" + }, + "0.7.0": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.1": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.2": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.3": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.4": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.5": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.6": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.7": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.8": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.9": { + "node_abi": 1, + "v8": "3.11" + }, + "0.7.10": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.11": { + "node_abi": 1, + "v8": "3.11" + }, + "0.7.12": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.0": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.1": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.2": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.3": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.4": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.5": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.6": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.7": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.8": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.9": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.10": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.11": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.12": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.13": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.14": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.15": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.16": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.17": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.18": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.19": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.20": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.21": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.22": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.23": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.24": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.25": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.26": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.27": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.28": { + "node_abi": 1, + "v8": "3.11" + }, + "0.9.0": { + "node_abi": 1, + "v8": "3.11" + }, + "0.9.1": { + "node_abi": 10, + "v8": "3.11" + }, + "0.9.2": { + "node_abi": 10, + "v8": "3.11" + }, + "0.9.3": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.4": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.5": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.6": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.7": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.8": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.9": { + "node_abi": 11, + "v8": "3.15" + }, + "0.9.10": { + "node_abi": 11, + "v8": "3.15" + }, + "0.9.11": { + "node_abi": 11, + "v8": "3.14" + }, + "0.9.12": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.0": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.1": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.2": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.3": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.4": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.5": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.6": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.7": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.8": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.9": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.10": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.11": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.12": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.13": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.14": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.15": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.16": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.17": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.18": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.19": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.20": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.21": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.22": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.23": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.24": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.25": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.26": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.27": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.28": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.29": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.30": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.31": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.32": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.33": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.34": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.35": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.36": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.37": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.38": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.39": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.40": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.41": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.42": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.43": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.44": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.45": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.46": { + "node_abi": 11, + "v8": "3.14" + }, + "0.11.0": { + "node_abi": 12, + "v8": "3.17" + }, + "0.11.1": { + "node_abi": 12, + "v8": "3.18" + }, + "0.11.2": { + "node_abi": 12, + "v8": "3.19" + }, + "0.11.3": { + "node_abi": 12, + "v8": "3.19" + }, + "0.11.4": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.5": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.6": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.7": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.8": { + "node_abi": 13, + "v8": "3.21" + }, + "0.11.9": { + "node_abi": 13, + "v8": "3.22" + }, + "0.11.10": { + "node_abi": 13, + "v8": "3.22" + }, + "0.11.11": { + "node_abi": 14, + "v8": "3.22" + }, + "0.11.12": { + "node_abi": 14, + "v8": "3.22" + }, + "0.11.13": { + "node_abi": 14, + "v8": "3.25" + }, + "0.11.14": { + "node_abi": 14, + "v8": "3.26" + }, + "0.11.15": { + "node_abi": 14, + "v8": "3.28" + }, + "0.11.16": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.0": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.1": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.2": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.3": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.4": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.5": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.6": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.7": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.8": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.9": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.10": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.11": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.12": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.13": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.14": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.15": { + "node_abi": 14, + "v8": "3.28" + }, + "1.0.0": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.1": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.2": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.3": { + "node_abi": 42, + "v8": "4.1" + }, + "1.0.4": { + "node_abi": 42, + "v8": "4.1" + }, + "1.1.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.2.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.3.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.5.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.5.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.4": { + "node_abi": 43, + "v8": "4.1" + }, + "1.7.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.4": { + "node_abi": 43, + "v8": "4.1" + }, + "2.0.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.0.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.0.2": { + "node_abi": 44, + "v8": "4.2" + }, + "2.1.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.2.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.2.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.2": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.3": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.4": { + "node_abi": 44, + "v8": "4.2" + }, + "2.4.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.5.0": { + "node_abi": 44, + "v8": "4.2" + }, + "3.0.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.1.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.2.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.3.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.3.1": { + "node_abi": 45, + "v8": "4.4" + }, + "4.0.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.6": { + "node_abi": 46, + "v8": "4.5" + }, + "5.0.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.1.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.1.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.2.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.3.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.4.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.4.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.5.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.6.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.7.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.7.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.8.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.9.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.9.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.10.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.10.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.11.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.11.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.12.0": { + "node_abi": 47, + "v8": "4.6" + }, + "6.0.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.1.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.1": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.2": { + "node_abi": 48, + "v8": "5.0" + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/compile.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/compile.js new file mode 100644 index 0000000..0dc460c --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/compile.js @@ -0,0 +1,87 @@ +"use strict"; + +module.exports = exports; + +var fs = require('fs'); +var path = require('path'); +var win = process.platform == 'win32'; +var existsSync = fs.existsSync || path.existsSync; +var cp = require('child_process'); + +// try to build up the complete path to node-gyp +/* priority: + - node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887) + - node-gyp on NODE_PATH + - node-gyp inside npm on NODE_PATH (ignore on iojs) + - node-gyp inside npm beside node exe +*/ +function which_node_gyp() { + var node_gyp_bin; + if (process.env.npm_config_node_gyp) { + try { + node_gyp_bin = process.env.npm_config_node_gyp; + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { } + } + try { + var node_gyp_main = require.resolve('node-gyp'); + node_gyp_bin = path.join(path.dirname( + path.dirname(node_gyp_main)), + 'bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { } + if (process.execPath.indexOf('iojs') === -1) { + try { + var npm_main = require.resolve('npm'); + node_gyp_bin = path.join(path.dirname( + path.dirname(npm_main)), + 'node_modules/node-gyp/bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { } + } + var npm_base = path.join(path.dirname( + path.dirname(process.execPath)), + 'lib/node_modules/npm/'); + node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } +} + +module.exports.run_gyp = function(args,opts,callback) { + var shell_cmd = ''; + var cmd_args = []; + if (opts.runtime && opts.runtime == 'node-webkit') { + shell_cmd = 'nw-gyp'; + if (win) shell_cmd += '.cmd'; + } else { + var node_gyp_path = which_node_gyp(); + if (node_gyp_path) { + shell_cmd = process.execPath; + cmd_args.push(node_gyp_path); + } else { + shell_cmd = 'node-gyp'; + if (win) shell_cmd += '.cmd'; + } + } + var final_args = cmd_args.concat(args); + var cmd = cp.spawn(shell_cmd, final_args, {cwd: undefined, env: process.env, stdio: [ 0, 1, 2]}); + cmd.on('error', function (err) { + if (err) { + return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ")")); + } + callback(null,opts); + }); + cmd.on('close', function (code) { + if (code && code !== 0) { + return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ")")); + } + callback(null,opts); + }); +}; diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js new file mode 100644 index 0000000..39fe1a2 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/handle_gyp_opts.js @@ -0,0 +1,94 @@ +"use strict"; + +module.exports = exports = handle_gyp_opts; + +var fs = require('fs'); +var versioning = require('./versioning.js'); + +/* + +Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp. + +We massage the args and options slightly to account for differences in what commands mean between +node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below) + +Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether +node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm. + +We also try to preserve any command line options that might have been passed to npm or node-pre-gyp. +But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all +the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have +to be very selective about what we pass through. + +For example: + +`npm install --build-from-source` will give: + +argv == [ 'rebuild' ] +gyp.opts.argv == { remain: [ 'install' ], + cooked: [ 'install', '--fallback-to-build' ], + original: [ 'install', '--fallback-to-build' ] } + +`./bin/node-pre-gyp build` will give: + +argv == [] +gyp.opts.argv == { remain: [ 'build' ], + cooked: [ 'build' ], + original: [ '-C', 'test/app1', 'build' ] } + +*/ + +// select set of node-pre-gyp versioning info +// to share with node-gyp +var share_with_node_gyp = [ + 'module', + 'module_name', + 'module_path', +]; + +function handle_gyp_opts(gyp, argv, callback) { + + // Collect node-pre-gyp specific variables to pass to node-gyp + var node_pre_gyp_options = []; + // generate custom node-pre-gyp versioning info + var opts = versioning.evaluate(JSON.parse(fs.readFileSync('./package.json')), gyp.opts); + share_with_node_gyp.forEach(function(key) { + var val = opts[key]; + if (val) { + node_pre_gyp_options.push('--' + key + '=' + val); + } else { + return callback(new Error("Option " + key + " required but not found by node-pre-gyp")); + } + }); + + // Collect options that follow the special -- which disables nopt parsing + var unparsed_options = []; + var double_hyphen_found = false; + gyp.opts.argv.original.forEach(function(opt) { + if (double_hyphen_found) { + unparsed_options.push(opt); + } + if (opt == '--') { + double_hyphen_found = true; + } + }); + + // We try respect and pass through remaining command + // line options (like --foo=bar) to node-gyp + var cooked = gyp.opts.argv.cooked; + var node_gyp_options = []; + cooked.forEach(function(value) { + if (value.length > 2 && value.slice(0,2) == '--') { + var key = value.slice(2); + var val = cooked[cooked.indexOf(value)+1]; + if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar'] + node_gyp_options.push('--' + key + '=' + val); + } else { // pass through --foo + node_gyp_options.push(value); + } + } + }); + + var result = {'opts':opts,'gyp':node_gyp_options,'pre':node_pre_gyp_options,'unparsed':unparsed_options}; + return callback(null,result); +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html new file mode 100644 index 0000000..244466c --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/index.html @@ -0,0 +1,26 @@ + + + + +Node-webkit-based module test + + + +

Node-webkit-based module test

+ + diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json new file mode 100644 index 0000000..71d03f8 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json @@ -0,0 +1,9 @@ +{ +"main": "index.html", +"name": "nw-pre-gyp-module-test", +"description": "Node-webkit-based module test.", +"version": "0.0.1", +"window": { + "show": false +} +} diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js new file mode 100644 index 0000000..5bc42e9 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/s3_setup.js @@ -0,0 +1,27 @@ +"use strict"; + +module.exports = exports; + +var url = require('url'); + +var URI_REGEX="^(.*)\.(s3(?:-.*)?)\.amazonaws\.com$"; + +module.exports.detect = function(to,config) { + var uri = url.parse(to); + var hostname_matches = uri.hostname.match(URI_REGEX); + config.prefix = (!uri.pathname || uri.pathname == '/') ? '' : uri.pathname.replace('/',''); + if(!hostname_matches) { + return; + } + if (!config.bucket) { + config.bucket = hostname_matches[1]; + } + if (!config.region) { + var s3_domain = hostname_matches[2]; + if (s3_domain.slice(0,3) == 's3-' && + s3_domain.length >= 3) { + // it appears the region is explicit in the url + config.region = s3_domain.replace('s3-',''); + } + } +}; diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js new file mode 100644 index 0000000..8ebbe3a --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/lib/util/versioning.js @@ -0,0 +1,308 @@ +"use strict"; + +module.exports = exports; + +var path = require('path'); +var semver = require('semver'); +var url = require('url'); + +var abi_crosswalk; + +// This is used for unit testing to provide a fake +// ABI crosswalk that emulates one that is not updated +// for the current version +if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { + abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); +} else { + abi_crosswalk = require('./abi_crosswalk.json'); +} + +function get_electron_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_electron_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if electron is the target."); + } + // Electron guarantees that patch version update won't break native modules. + var sem_ver = semver.parse(target_version); + return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; +} +module.exports.get_electron_abi = get_electron_abi; + +function get_node_webkit_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_node_webkit_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if node-webkit is the target."); + } + return runtime + '-v' + target_version; +} +module.exports.get_node_webkit_abi = get_node_webkit_abi; + +function get_node_abi(runtime, versions) { + if (!runtime) { + throw new Error("get_node_abi requires valid runtime arg"); + } + if (!versions) { + throw new Error("get_node_abi requires valid process.versions object"); + } + var sem_ver = semver.parse(versions.node); + if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series + // https://github.com/mapbox/node-pre-gyp/issues/124 + return runtime+'-v'+versions.node; + } else { + // process.versions.modules added in >= v0.10.4 and v0.11.7 + // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e + return versions.modules ? runtime+'-v' + (+versions.modules) : + 'v8-' + versions.v8.split('.').slice(0,2).join('.'); + } +} +module.exports.get_node_abi = get_node_abi; + +function get_runtime_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_runtime_abi requires valid runtime arg"); + } + if (runtime === 'node-webkit') { + return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); + } else if (runtime === 'electron') { + return get_electron_abi(runtime, target_version || process.versions.electron); + } else { + if (runtime != 'node') { + throw new Error("Unknown Runtime: '" + runtime + "'"); + } + if (!target_version) { + return get_node_abi(runtime,process.versions); + } else { + var cross_obj; + // abi_crosswalk generated with ./scripts/abi_crosswalk.js + if (abi_crosswalk[target_version]) { + cross_obj = abi_crosswalk[target_version]; + } else { + var target_parts = target_version.split('.').map(function(i) { return +i; }); + if (target_parts.length != 3) { // parse failed + throw new Error("Unknown target version: " + target_version); + } + /* + The below code tries to infer the last known ABI compatible version + that we have recorded in the abi_crosswalk.json when an exact match + is not possible. The reasons for this to exist are complicated: + + - We support passing --target to be able to allow developers to package binaries for versions of node + that are not the same one as they are running. This might also be used in combination with the + --target_arch or --target_platform flags to also package binaries for alternative platforms + - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node + version that is running in memory + - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look + this info up for all versions + - But we cannot easily predict what the future ABI will be for released versions + - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly + by being fully available at install time. + - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release + need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if + you want the `--target` flag to keep working for the latest version + - Which is impractical ^^ + - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. + + In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that + only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be + ABI compatible with v0.10.33). + + TODO: use semver module instead of custom version parsing + */ + var major = target_parts[0]; + var minor = target_parts[1]; + var patch = target_parts[2]; + // io.js: yeah if node.js ever releases 1.x this will break + // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 + if (major === 1) { + // look for last release that is the same major version + // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 + while (true) { + if (minor > 0) --minor; + if (patch > 0) --patch; + var new_iojs_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_iojs_target]) { + cross_obj = abi_crosswalk[new_iojs_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); + break; + } + if (minor === 0 && patch === 0) { + break; + } + } + } else if (major === 0) { // node.js + if (target_parts[1] % 2 === 0) { // for stable/even node.js series + // look for the last release that is the same minor release + // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 + while (--patch > 0) { + var new_node_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_node_target]) { + cross_obj = abi_crosswalk[new_node_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); + break; + } + } + } + } + } + if (!cross_obj) { + throw new Error("Unsupported target version: " + target_version); + } + // emulate process.versions + var versions_obj = { + node: target_version, + v8: cross_obj.v8+'.0', + // abi_crosswalk uses 1 for node versions lacking process.versions.modules + // process.versions.modules added in >= v0.10.4 and v0.11.7 + modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined + }; + return get_node_abi(runtime, versions_obj); + } + } +} +module.exports.get_runtime_abi = get_runtime_abi; + +var required_parameters = [ + 'module_name', + 'module_path', + 'host' +]; + +function validate_config(package_json) { + var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; + var missing = []; + if (!package_json.main) { + missing.push('main'); + } + if (!package_json.version) { + missing.push('version'); + } + if (!package_json.name) { + missing.push('name'); + } + if (!package_json.binary) { + missing.push('binary'); + } + var o = package_json.binary; + required_parameters.forEach(function(p) { + if (missing.indexOf('binary') > -1) { + missing.pop('binary'); + } + if (!o || o[p] === undefined) { + missing.push('binary.' + p); + } + }); + if (missing.length >= 1) { + throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n')); + } + if (o) { + // enforce https over http + var protocol = url.parse(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted"); + } + } +} + +module.exports.validate_config = validate_config; + +function eval_template(template,opts) { + Object.keys(opts).forEach(function(key) { + var pattern = '{'+key+'}'; + while (template.indexOf(pattern) > -1) { + template = template.replace(pattern,opts[key]); + } + }); + return template; +} + +// url.resolve needs single trailing slash +// to behave correctly, otherwise a double slash +// may end up in the url which breaks requests +// and a lacking slash may not lead to proper joining +function fix_slashes(pathname) { + if (pathname.slice(-1) != '/') { + return pathname + '/'; + } + return pathname; +} + +// remove double slashes +// note: path.normalize will not work because +// it will convert forward to back slashes +function drop_double_slashes(pathname) { + return pathname.replace(/\/\//g,'/'); +} + +function get_process_runtime(versions) { + var runtime = 'node'; + if (versions['node-webkit']) { + runtime = 'node-webkit'; + } else if (versions.electron) { + runtime = 'electron'; + } + return runtime; +} + +module.exports.get_process_runtime = get_process_runtime; + +var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; +var default_remote_path = ''; + +module.exports.evaluate = function(package_json,options) { + options = options || {}; + validate_config(package_json); + var v = package_json.version; + var module_version = semver.parse(v); + var runtime = options.runtime || get_process_runtime(process.versions); + var opts = { + name: package_json.name, + configuration: Boolean(options.debug) ? 'Debug' : 'Release', + debug: options.debug, + module_name: package_json.binary.module_name, + version: module_version.version, + prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', + build: module_version.build.length ? module_version.build.join('.') : '', + major: module_version.major, + minor: module_version.minor, + patch: module_version.patch, + runtime: runtime, + node_abi: get_runtime_abi(runtime,options.target), + target: options.target || '', + platform: options.target_platform || process.platform, + target_platform: options.target_platform || process.platform, + arch: options.target_arch || process.arch, + target_arch: options.target_arch || process.arch, + module_main: package_json.main, + toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119 + }; + // support host mirror with npm config `--{module_name}_binary_host_mirror` + // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 + // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ + var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host; + opts.host = fix_slashes(eval_template(host,opts)); + opts.module_path = eval_template(package_json.binary.module_path,opts); + // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably + if (options.module_root) { + // resolve relative to known module root: works for pre-binding require + opts.module_path = path.join(options.module_root,opts.module_path); + } else { + // resolve relative to current working directory: works for node-pre-gyp commands + opts.module_path = path.resolve(opts.module_path); + } + opts.module = path.join(opts.module_path,opts.module_name + '.node'); + opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path; + var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; + opts.package_name = eval_template(package_name,opts); + opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name); + opts.hosted_path = url.resolve(opts.host,opts.remote_path); + opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name); + return opts; +}; diff --git a/node_modules/fsevents/node_modules/node-pre-gyp/package.json b/node_modules/fsevents/node_modules/node-pre-gyp/package.json new file mode 100644 index 0000000..78ac30b --- /dev/null +++ b/node_modules/fsevents/node_modules/node-pre-gyp/package.json @@ -0,0 +1,129 @@ +{ + "_args": [ + [ + "node-pre-gyp@^0.6.29", + "/Users/eshanker/Code/fsevents" + ] + ], + "_from": "node-pre-gyp@>=0.6.29 <0.7.0", + "_id": "node-pre-gyp@0.6.29", + "_inCache": true, + "_installable": true, + "_location": "/node-pre-gyp", + "_nodeVersion": "4.4.5", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/node-pre-gyp-0.6.29.tgz_1467142616046_0.945712324930355" + }, + "_npmUser": { + "email": "dane@mapbox.com", + "name": "springmeyer" + }, + "_npmVersion": "2.15.5", + "_phantomChildren": {}, + "_requested": { + "name": "node-pre-gyp", + "raw": "node-pre-gyp@^0.6.29", + "rawSpec": "^0.6.29", + "scope": null, + "spec": ">=0.6.29 <0.7.0", + "type": "range" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.29.tgz", + "_shasum": "b0bd13635baf7d1be7ae233c16fbcf3309acd37c", + "_shrinkwrap": null, + "_spec": "node-pre-gyp@^0.6.29", + "_where": "/Users/eshanker/Code/fsevents", + "author": { + "email": "dane@mapbox.com", + "name": "Dane Springmeyer" + }, + "bin": { + "node-pre-gyp": "./bin/node-pre-gyp" + }, + "bugs": { + "url": "https://github.com/mapbox/node-pre-gyp/issues" + }, + "dependencies": { + "mkdirp": "~0.5.0", + "nopt": "~3.0.1", + "npmlog": "~3.1.2", + "rc": "~1.1.0", + "request": "2.x", + "rimraf": "~2.5.0", + "semver": "~5.2.0", + "tar": "~2.2.0", + "tar-pack": "~3.1.0" + }, + "description": "Node.js native addon binary install tool", + "devDependencies": { + "aws-sdk": "2.x", + "jshint": "2.x", + "mocha": "2.x", + "retire": "1.x" + }, + "directories": {}, + "dist": { + "shasum": "b0bd13635baf7d1be7ae233c16fbcf3309acd37c", + "tarball": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.29.tgz" + }, + "gitHead": "3733536bf8947ee9dd18c8d46b0f964f0e41db8e", + "homepage": "https://github.com/mapbox/node-pre-gyp#readme", + "jshintConfig": { + "globalstrict": true, + "mocha": true, + "noarg": true, + "node": true, + "undef": true, + "unused": true + }, + "keywords": [ + "addon", + "binary", + "bindings", + "c", + "c++", + "module", + "native" + ], + "license": "BSD-3-Clause", + "main": "./lib/node-pre-gyp.js", + "maintainers": [ + { + "name": "springmeyer", + "email": "dane@dbsgeo.com" + }, + { + "name": "bergwerkgis", + "email": "wb@bergwerk-gis.at" + }, + { + "name": "mikemorris", + "email": "michael.patrick.morris@gmail.com" + }, + { + "name": "kkaefer", + "email": "kkaefer@gmail.com" + }, + { + "name": "yhahn", + "email": "young@developmentseed.org" + } + ], + "name": "node-pre-gyp", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/mapbox/node-pre-gyp.git" + }, + "scripts": { + "prepublish": "retire -n && npm ls && jshint test/build.test.js test/s3_setup.test.js test/versioning.test.js", + "test": "jshint lib lib/util scripts bin/node-pre-gyp && mocha -R spec --timeout 500000", + "update-crosswalk": "node scripts/abi_crosswalk.js" + }, + "version": "0.6.29" +} diff --git a/node_modules/fsevents/node_modules/node-uuid/.npmignore b/node_modules/fsevents/node_modules/node-uuid/.npmignore new file mode 100644 index 0000000..8886139 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/.npmignore @@ -0,0 +1,4 @@ +node_modules +.DS_Store +.nyc_output +coverage diff --git a/node_modules/fsevents/node_modules/node-uuid/LICENSE.md b/node_modules/fsevents/node_modules/node-uuid/LICENSE.md new file mode 100644 index 0000000..652609b --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010-2012 Robert Kieffer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fsevents/node_modules/node-uuid/README.md b/node_modules/fsevents/node_modules/node-uuid/README.md new file mode 100644 index 0000000..5cd8555 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/README.md @@ -0,0 +1,254 @@ +# node-uuid + +Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. + +Features: + +* Generate RFC4122 version 1 or version 4 UUIDs +* Runs in node.js and all browsers. +* Registered as a [ComponentJS](https://github.com/component/component) [component](https://github.com/component/component/wiki/Components) ('broofa/node-uuid'). +* Cryptographically strong random # generation + * `crypto.randomBytes(n)` in node.js + * `window.crypto.getRandomValues(ta)` in [supported browsers](https://developer.mozilla.org/en-US/docs/Web/API/RandomSource/getRandomValues#Browser_Compatibility) +* 1.1K minified and gzip'ed (Want something smaller? Check this [crazy shit](https://gist.github.com/982883) out! ) +* [Annotated source code](http://broofa.github.com/node-uuid/docs/uuid.html) +* Comes with a Command Line Interface for generating uuids on the command line + +## Getting Started + +Install it in your browser: + +```html + +``` + +Or in node.js: + +``` +npm install node-uuid +``` + +```javascript +var uuid = require('node-uuid'); +``` + +Then create some ids ... + +```javascript +// Generate a v1 (time-based) id +uuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' + +// Generate a v4 (random) id +uuid.v4(); // -> '110ec58a-a0f2-4ac4-8393-c866d813b8d1' +``` + +## API + +### uuid.v1([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v1 (timestamp-based) UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. + * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. + * `msecs` - (Number | Date) Time in milliseconds since unix Epoch. Default: The current time is used. + * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Notes: + +1. The randomly generated node id is only guaranteed to stay constant for the lifetime of the current JS runtime. (Future versions of this module may use persistent storage mechanisms to extend this guarantee.) + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v1({ + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678 +}); // -> "710b962e-041c-11e1-9234-0123456789ab" +``` + +Example: In-place generation of two binary IDs + +```javascript +// Generate two ids in an array +var arr = new Array(32); // -> [] +uuid.v1(null, arr, 0); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15] +uuid.v1(null, arr, 16); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15 02 a3 1c b0 14 32 11 e1 85 58 0b 48 8e 4f c1 15] + +// Optionally use uuid.unparse() to get stringify the ids +uuid.unparse(buffer); // -> '02a2ce90-1432-11e1-8558-0b488e4fc115' +uuid.unparse(buffer, 16) // -> '02a31cb0-1432-11e1-8558-0b488e4fc115' +``` + +### uuid.v4([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v4 UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values + * `rng` - (Function) Random # generator to use. Set to one of the built-in generators - `uuid.mathRNG` (all platforms), `uuid.nodeRNG` (node.js only), `uuid.whatwgRNG` (WebKit only) - or a custom function that returns an array[16] of byte values. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v4({ + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, + 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 + ] +}); +// -> "109156be-c4fb-41ea-b1b4-efe1671c5836" +``` + +Example: Generate two IDs in a single buffer + +```javascript +var buffer = new Array(32); // (or 'new Buffer' in node.js) +uuid.v4(null, buffer, 0); +uuid.v4(null, buffer, 16); +``` + +### uuid.parse(id[, buffer[, offset]]) +### uuid.unparse(buffer[, offset]) + +Parse and unparse UUIDs + + * `id` - (String) UUID(-like) string + * `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. Default: A new Array or Buffer is used + * `offset` - (Number) Starting index in `buffer` at which to begin writing. Default: 0 + +Example parsing and unparsing a UUID string + +```javascript +var bytes = uuid.parse('797ff043-11eb-11e1-80d6-510998755d10'); // -> +var string = uuid.unparse(bytes); // -> '797ff043-11eb-11e1-80d6-510998755d10' +``` + +### uuid.noConflict() + +(Browsers only) Set `uuid` property back to it's previous value. + +Returns the node-uuid object. + +Example: + +```javascript +var myUuid = uuid.noConflict(); +myUuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' +``` + +## Deprecated APIs + +Support for the following v1.2 APIs is available in v1.3, but is deprecated and will be removed in the next major version. + +### uuid([format [, buffer [, offset]]]) + +uuid() has become uuid.v4(), and the `format` argument is now implicit in the `buffer` argument. (i.e. if you specify a buffer, the format is assumed to be binary). + +### uuid.BufferClass + +The class of container created when generating binary uuid data if no buffer argument is specified. This is expected to go away, with no replacement API. + +## Command Line Interface + +To use the executable, it's probably best to install this library globally. + +`npm install -g node-uuid` + +Usage: + +``` +USAGE: uuid [version] [options] + + +options: + +--help Display this message and exit +``` + +`version` must be an RFC4122 version that is supported by this library, which is currently version 1 and version 4 (denoted by "v1" and "v4", respectively). `version` defaults to version 4 when not supplied. + +### Examples + +``` +> uuid +3a91f950-dec8-4688-ba14-5b7bbfc7a563 +``` + +``` +> uuid v1 +9d0b43e0-7696-11e3-964b-250efa37a98e +``` + +``` +> uuid v4 +6790ac7c-24ac-4f98-8464-42f6d98a53ae +``` + +## Testing + +In node.js + +``` +npm test +``` + +In Browser + +``` +open test/test.html +``` + +### Benchmarking + +Requires node.js + +``` +npm install uuid uuid-js +node benchmark/benchmark.js +``` + +For a more complete discussion of node-uuid performance, please see the `benchmark/README.md` file, and the [benchmark wiki](https://github.com/broofa/node-uuid/wiki/Benchmark) + +For browser performance [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance). + +## Release notes + +### 1.4.6 + +* Properly detect node crypto and whatwg crypto +* Workaround phantomjs/browserify bug +* Explicit check for `window` rather implicit this-global +* Issue warning if Math.random() is being used +* "use strict"; +* A few jshint / stylistic updates (=== and such) + +### 1.4.0 + +* Improved module context detection +* Removed public RNG functions + +### 1.3.2 + +* Improve tests and handling of v1() options (Issue #24) +* Expose RNG option to allow for perf testing with different generators + +### 1.3.0 + +* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +* Support for node.js crypto API +* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/fsevents/node_modules/node-uuid/benchmark/README.md b/node_modules/fsevents/node_modules/node-uuid/benchmark/README.md new file mode 100644 index 0000000..aaeb2ea --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/benchmark/README.md @@ -0,0 +1,53 @@ +# node-uuid Benchmarks + +### Results + +To see the results of our benchmarks visit https://github.com/broofa/node-uuid/wiki/Benchmark + +### Run them yourself + +node-uuid comes with some benchmarks to measure performance of generating UUIDs. These can be run using node.js. node-uuid is being benchmarked against some other uuid modules, that are available through npm namely `uuid` and `uuid-js`. + +To prepare and run the benchmark issue; + +``` +npm install uuid uuid-js +node benchmark/benchmark.js +``` + +You'll see an output like this one: + +``` +# v4 +nodeuuid.v4(): 854700 uuids/second +nodeuuid.v4('binary'): 788643 uuids/second +nodeuuid.v4('binary', buffer): 1336898 uuids/second +uuid(): 479386 uuids/second +uuid('binary'): 582072 uuids/second +uuidjs.create(4): 312304 uuids/second + +# v1 +nodeuuid.v1(): 938086 uuids/second +nodeuuid.v1('binary'): 683060 uuids/second +nodeuuid.v1('binary', buffer): 1644736 uuids/second +uuidjs.create(1): 190621 uuids/second +``` + +* The `uuid()` entries are for Nikhil Marathe's [uuid module](https://bitbucket.org/nikhilm/uuidjs) which is a wrapper around the native libuuid library. +* The `uuidjs()` entries are for Patrick Negri's [uuid-js module](https://github.com/pnegri/uuid-js) which is a pure javascript implementation based on [UUID.js](https://github.com/LiosK/UUID.js) by LiosK. + +If you want to get more reliable results you can run the benchmark multiple times and write the output into a log file: + +``` +for i in {0..9}; do node benchmark/benchmark.js >> benchmark/bench_0.4.12.log; done; +``` + +If you're interested in how performance varies between different node versions, you can issue the above command multiple times. + +You can then use the shell script `bench.sh` provided in this directory to calculate the averages over all benchmark runs and draw a nice plot: + +``` +(cd benchmark/ && ./bench.sh) +``` + +This assumes you have [gnuplot](http://www.gnuplot.info/) and [ImageMagick](http://www.imagemagick.org/) installed. You'll find a nice `bench.png` graph in the `benchmark/` directory then. diff --git a/node_modules/fsevents/node_modules/node-uuid/benchmark/bench.gnu b/node_modules/fsevents/node_modules/node-uuid/benchmark/bench.gnu new file mode 100644 index 0000000..a342fbb --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/benchmark/bench.gnu @@ -0,0 +1,174 @@ +#!/opt/local/bin/gnuplot -persist +# +# +# G N U P L O T +# Version 4.4 patchlevel 3 +# last modified March 2011 +# System: Darwin 10.8.0 +# +# Copyright (C) 1986-1993, 1998, 2004, 2007-2010 +# Thomas Williams, Colin Kelley and many others +# +# gnuplot home: http://www.gnuplot.info +# faq, bugs, etc: type "help seeking-assistance" +# immediate help: type "help" +# plot window: hit 'h' +set terminal postscript eps noenhanced defaultplex \ + leveldefault color colortext \ + solid linewidth 1.2 butt noclip \ + palfuncparam 2000,0.003 \ + "Helvetica" 14 +set output 'bench.eps' +unset clip points +set clip one +unset clip two +set bar 1.000000 front +set border 31 front linetype -1 linewidth 1.000 +set xdata +set ydata +set zdata +set x2data +set y2data +set timefmt x "%d/%m/%y,%H:%M" +set timefmt y "%d/%m/%y,%H:%M" +set timefmt z "%d/%m/%y,%H:%M" +set timefmt x2 "%d/%m/%y,%H:%M" +set timefmt y2 "%d/%m/%y,%H:%M" +set timefmt cb "%d/%m/%y,%H:%M" +set boxwidth +set style fill empty border +set style rectangle back fc lt -3 fillstyle solid 1.00 border lt -1 +set style circle radius graph 0.02, first 0, 0 +set dummy x,y +set format x "% g" +set format y "% g" +set format x2 "% g" +set format y2 "% g" +set format z "% g" +set format cb "% g" +set angles radians +unset grid +set key title "" +set key outside left top horizontal Right noreverse enhanced autotitles columnhead nobox +set key noinvert samplen 4 spacing 1 width 0 height 0 +set key maxcolumns 2 maxrows 0 +unset label +unset arrow +set style increment default +unset style line +set style line 1 linetype 1 linewidth 2.000 pointtype 1 pointsize default pointinterval 0 +unset style arrow +set style histogram clustered gap 2 title offset character 0, 0, 0 +unset logscale +set offsets graph 0.05, 0.15, 0, 0 +set pointsize 1.5 +set pointintervalbox 1 +set encoding default +unset polar +unset parametric +unset decimalsign +set view 60, 30, 1, 1 +set samples 100, 100 +set isosamples 10, 10 +set surface +unset contour +set clabel '%8.3g' +set mapping cartesian +set datafile separator whitespace +unset hidden3d +set cntrparam order 4 +set cntrparam linear +set cntrparam levels auto 5 +set cntrparam points 5 +set size ratio 0 1,1 +set origin 0,0 +set style data points +set style function lines +set xzeroaxis linetype -2 linewidth 1.000 +set yzeroaxis linetype -2 linewidth 1.000 +set zzeroaxis linetype -2 linewidth 1.000 +set x2zeroaxis linetype -2 linewidth 1.000 +set y2zeroaxis linetype -2 linewidth 1.000 +set ticslevel 0.5 +set mxtics default +set mytics default +set mztics default +set mx2tics default +set my2tics default +set mcbtics default +set xtics border in scale 1,0.5 mirror norotate offset character 0, 0, 0 +set xtics norangelimit +set xtics () +set ytics border in scale 1,0.5 mirror norotate offset character 0, 0, 0 +set ytics autofreq norangelimit +set ztics border in scale 1,0.5 nomirror norotate offset character 0, 0, 0 +set ztics autofreq norangelimit +set nox2tics +set noy2tics +set cbtics border in scale 1,0.5 mirror norotate offset character 0, 0, 0 +set cbtics autofreq norangelimit +set title "" +set title offset character 0, 0, 0 font "" norotate +set timestamp bottom +set timestamp "" +set timestamp offset character 0, 0, 0 font "" norotate +set rrange [ * : * ] noreverse nowriteback # (currently [8.98847e+307:-8.98847e+307] ) +set autoscale rfixmin +set autoscale rfixmax +set trange [ * : * ] noreverse nowriteback # (currently [-5.00000:5.00000] ) +set autoscale tfixmin +set autoscale tfixmax +set urange [ * : * ] noreverse nowriteback # (currently [-10.0000:10.0000] ) +set autoscale ufixmin +set autoscale ufixmax +set vrange [ * : * ] noreverse nowriteback # (currently [-10.0000:10.0000] ) +set autoscale vfixmin +set autoscale vfixmax +set xlabel "" +set xlabel offset character 0, 0, 0 font "" textcolor lt -1 norotate +set x2label "" +set x2label offset character 0, 0, 0 font "" textcolor lt -1 norotate +set xrange [ * : * ] noreverse nowriteback # (currently [-0.150000:3.15000] ) +set autoscale xfixmin +set autoscale xfixmax +set x2range [ * : * ] noreverse nowriteback # (currently [0.00000:3.00000] ) +set autoscale x2fixmin +set autoscale x2fixmax +set ylabel "" +set ylabel offset character 0, 0, 0 font "" textcolor lt -1 rotate by -270 +set y2label "" +set y2label offset character 0, 0, 0 font "" textcolor lt -1 rotate by -270 +set yrange [ 0.00000 : 1.90000e+06 ] noreverse nowriteback # (currently [:] ) +set autoscale yfixmin +set autoscale yfixmax +set y2range [ * : * ] noreverse nowriteback # (currently [0.00000:1.90000e+06] ) +set autoscale y2fixmin +set autoscale y2fixmax +set zlabel "" +set zlabel offset character 0, 0, 0 font "" textcolor lt -1 norotate +set zrange [ * : * ] noreverse nowriteback # (currently [-10.0000:10.0000] ) +set autoscale zfixmin +set autoscale zfixmax +set cblabel "" +set cblabel offset character 0, 0, 0 font "" textcolor lt -1 rotate by -270 +set cbrange [ * : * ] noreverse nowriteback # (currently [8.98847e+307:-8.98847e+307] ) +set autoscale cbfixmin +set autoscale cbfixmax +set zero 1e-08 +set lmargin -1 +set bmargin -1 +set rmargin -1 +set tmargin -1 +set pm3d explicit at s +set pm3d scansautomatic +set pm3d interpolate 1,1 flush begin noftriangles nohidden3d corners2color mean +set palette positive nops_allcF maxcolors 0 gamma 1.5 color model RGB +set palette rgbformulae 7, 5, 15 +set colorbox default +set colorbox vertical origin screen 0.9, 0.2, 0 size screen 0.05, 0.6, 0 front bdefault +set loadpath +set fontpath +set fit noerrorvariables +GNUTERM = "aqua" +plot 'bench_results.txt' using 2:xticlabel(1) w lp lw 2, '' using 3:xticlabel(1) w lp lw 2, '' using 4:xticlabel(1) w lp lw 2, '' using 5:xticlabel(1) w lp lw 2, '' using 6:xticlabel(1) w lp lw 2, '' using 7:xticlabel(1) w lp lw 2, '' using 8:xticlabel(1) w lp lw 2, '' using 9:xticlabel(1) w lp lw 2 +# EOF diff --git a/node_modules/fsevents/node_modules/node-uuid/benchmark/bench.sh b/node_modules/fsevents/node_modules/node-uuid/benchmark/bench.sh new file mode 100755 index 0000000..d870a0c --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/benchmark/bench.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# for a given node version run: +# for i in {0..9}; do node benchmark.js >> bench_0.6.2.log; done; + +PATTERNS=('nodeuuid.v1()' "nodeuuid.v1('binary'," 'nodeuuid.v4()' "nodeuuid.v4('binary'," "uuid()" "uuid('binary')" 'uuidjs.create(1)' 'uuidjs.create(4)' '140byte') +FILES=(node_uuid_v1_string node_uuid_v1_buf node_uuid_v4_string node_uuid_v4_buf libuuid_v4_string libuuid_v4_binary uuidjs_v1_string uuidjs_v4_string 140byte_es) +INDICES=(2 3 2 3 2 2 2 2 2) +VERSIONS=$( ls bench_*.log | sed -e 's/^bench_\([0-9\.]*\)\.log/\1/' | tr "\\n" " " ) +TMPJOIN="tmp_join" +OUTPUT="bench_results.txt" + +for I in ${!FILES[*]}; do + F=${FILES[$I]} + P=${PATTERNS[$I]} + INDEX=${INDICES[$I]} + echo "version $F" > $F + for V in $VERSIONS; do + (VAL=$( grep "$P" bench_$V.log | LC_ALL=en_US awk '{ sum += $'$INDEX' } END { print sum/NR }' ); echo $V $VAL) >> $F + done + if [ $I == 0 ]; then + cat $F > $TMPJOIN + else + join $TMPJOIN $F > $OUTPUT + cp $OUTPUT $TMPJOIN + fi + rm $F +done + +rm $TMPJOIN + +gnuplot bench.gnu +convert -density 200 -resize 800x560 -flatten bench.eps bench.png +rm bench.eps diff --git a/node_modules/fsevents/node_modules/node-uuid/benchmark/benchmark-native.c b/node_modules/fsevents/node_modules/node-uuid/benchmark/benchmark-native.c new file mode 100644 index 0000000..dbfc75f --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/benchmark/benchmark-native.c @@ -0,0 +1,34 @@ +/* +Test performance of native C UUID generation + +To Compile: cc -luuid benchmark-native.c -o benchmark-native +*/ + +#include +#include +#include +#include + +int main() { + uuid_t myid; + char buf[36+1]; + int i; + struct timeval t; + double start, finish; + + gettimeofday(&t, NULL); + start = t.tv_sec + t.tv_usec/1e6; + + int n = 2e5; + for (i = 0; i < n; i++) { + uuid_generate(myid); + uuid_unparse(myid, buf); + } + + gettimeofday(&t, NULL); + finish = t.tv_sec + t.tv_usec/1e6; + double dur = finish - start; + + printf("%d uuids/sec", (int)(n/dur)); + return 0; +} diff --git a/node_modules/fsevents/node_modules/node-uuid/benchmark/benchmark.js b/node_modules/fsevents/node_modules/node-uuid/benchmark/benchmark.js new file mode 100644 index 0000000..40e6efb --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/benchmark/benchmark.js @@ -0,0 +1,84 @@ +try { + var nodeuuid = require('../uuid'); +} catch (e) { + console.error('node-uuid require failed - skipping tests'); +} + +try { + var uuid = require('uuid'); +} catch (e) { + console.error('uuid require failed - skipping tests'); +} + +try { + var uuidjs = require('uuid-js'); +} catch (e) { + console.error('uuid-js require failed - skipping tests'); +} + +var N = 5e5; + +function rate(msg, t) { + console.log(msg + ': ' + + (N / (Date.now() - t) * 1e3 | 0) + + ' uuids/second'); +} + +console.log('# v4'); + +// node-uuid - string form +if (nodeuuid) { + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4(); + rate('nodeuuid.v4() - using node.js crypto RNG', t); + + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4({rng: nodeuuid.mathRNG}); + rate('nodeuuid.v4() - using Math.random() RNG', t); + + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4('binary'); + rate('nodeuuid.v4(\'binary\')', t); + + var buffer = new nodeuuid.BufferClass(16); + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4('binary', buffer); + rate('nodeuuid.v4(\'binary\', buffer)', t); +} + +// libuuid - string form +if (uuid) { + for (var i = 0, t = Date.now(); i < N; i++) uuid(); + rate('uuid()', t); + + for (var i = 0, t = Date.now(); i < N; i++) uuid('binary'); + rate('uuid(\'binary\')', t); +} + +// uuid-js - string form +if (uuidjs) { + for (var i = 0, t = Date.now(); i < N; i++) uuidjs.create(4); + rate('uuidjs.create(4)', t); +} + +// 140byte.es +for (var i = 0, t = Date.now(); i < N; i++) 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g,function(s,r){r=Math.random()*16|0;return (s=='x'?r:r&0x3|0x8).toString(16)}); +rate('140byte.es_v4', t); + +console.log(''); +console.log('# v1'); + +// node-uuid - v1 string form +if (nodeuuid) { + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v1(); + rate('nodeuuid.v1()', t); + + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v1('binary'); + rate('nodeuuid.v1(\'binary\')', t); + + var buffer = new nodeuuid.BufferClass(16); + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v1('binary', buffer); + rate('nodeuuid.v1(\'binary\', buffer)', t); +} + +// uuid-js - v1 string form +if (uuidjs) { + for (var i = 0, t = Date.now(); i < N; i++) uuidjs.create(1); + rate('uuidjs.create(1)', t); +} diff --git a/node_modules/fsevents/node_modules/node-uuid/bin/uuid b/node_modules/fsevents/node_modules/node-uuid/bin/uuid new file mode 100755 index 0000000..f732e99 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/bin/uuid @@ -0,0 +1,26 @@ +#!/usr/bin/env node + +var path = require('path'); +var uuid = require(path.join(__dirname, '..')); + +var arg = process.argv[2]; + +if ('--help' === arg) { + console.log('\n USAGE: uuid [version] [options]\n\n'); + console.log(' options:\n'); + console.log(' --help Display this message and exit\n'); + process.exit(0); +} + +if (null == arg) { + console.log(uuid()); + process.exit(0); +} + +if ('v1' !== arg && 'v4' !== arg) { + console.error('Version must be RFC4122 version 1 or version 4, denoted as "v1" or "v4"'); + process.exit(1); +} + +console.log(uuid[arg]()); +process.exit(0); diff --git a/node_modules/fsevents/node_modules/node-uuid/bower.json b/node_modules/fsevents/node_modules/node-uuid/bower.json new file mode 100644 index 0000000..c0925e1 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/bower.json @@ -0,0 +1,23 @@ +{ + "name": "node-uuid", + "version": "1.4.7", + "homepage": "https://github.com/broofa/node-uuid", + "authors": [ + "Robert Kieffer " + ], + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "main": "uuid.js", + "keywords": [ + "uuid", + "gid", + "rfc4122" + ], + "license": "MIT", + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ] +} diff --git a/node_modules/fsevents/node_modules/node-uuid/component.json b/node_modules/fsevents/node_modules/node-uuid/component.json new file mode 100644 index 0000000..3ff4633 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/component.json @@ -0,0 +1,25 @@ +{ + "name": "node-uuid", + "repo": "broofa/node-uuid", + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "version": "1.4.7", + "author": "Robert Kieffer ", + "contributors": [ + { + "name": "Christoph Tavan ", + "github": "https://github.com/ctavan" + } + ], + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "dependencies": {}, + "development": {}, + "main": "uuid.js", + "scripts": [ + "uuid.js" + ], + "license": "MIT" +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/node-uuid/package.json b/node_modules/fsevents/node_modules/node-uuid/package.json new file mode 100644 index 0000000..c1954ae --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/package.json @@ -0,0 +1,105 @@ +{ + "_args": [ + [ + "node-uuid@~1.4.7", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "node-uuid@>=1.4.7 <1.5.0", + "_id": "node-uuid@1.4.7", + "_inCache": true, + "_installable": true, + "_location": "/node-uuid", + "_nodeVersion": "5.0.0", + "_npmUser": { + "email": "coolaj86@gmail.com", + "name": "coolaj86" + }, + "_npmVersion": "3.3.6", + "_phantomChildren": {}, + "_requested": { + "name": "node-uuid", + "raw": "node-uuid@~1.4.7", + "rawSpec": "~1.4.7", + "scope": null, + "spec": ">=1.4.7 <1.5.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz", + "_shasum": "6da5a17668c4b3dd59623bda11cf7fa4c1f60a6f", + "_shrinkwrap": null, + "_spec": "node-uuid@~1.4.7", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "author": { + "email": "robert@broofa.com", + "name": "Robert Kieffer" + }, + "bin": { + "uuid": "./bin/uuid" + }, + "bugs": { + "url": "https://github.com/broofa/node-uuid/issues" + }, + "contributors": [ + { + "name": "AJ ONeal", + "email": "coolaj86@gmail.com" + }, + { + "name": "Christoph Tavan", + "email": "dev@tavan.de" + } + ], + "dependencies": {}, + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "devDependencies": { + "nyc": "^2.2.0" + }, + "directories": {}, + "dist": { + "shasum": "6da5a17668c4b3dd59623bda11cf7fa4c1f60a6f", + "tarball": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz" + }, + "gitHead": "309512573ec1c60143c257157479a20f7f1f51cd", + "homepage": "https://github.com/broofa/node-uuid", + "installable": true, + "keywords": [ + "guid", + "rfc4122", + "uuid" + ], + "lib": ".", + "licenses": [ + { + "type": "MIT", + "url": "https://raw.github.com/broofa/node-uuid/master/LICENSE.md" + } + ], + "main": "./uuid.js", + "maintainers": [ + { + "name": "broofa", + "email": "robert@broofa.com" + }, + { + "name": "coolaj86", + "email": "coolaj86@gmail.com" + } + ], + "name": "node-uuid", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/broofa/node-uuid.git" + }, + "scripts": { + "coverage": "nyc npm test && nyc report", + "test": "node test/test.js" + }, + "url": "http://github.com/broofa/node-uuid", + "version": "1.4.7" +} diff --git a/node_modules/fsevents/node_modules/node-uuid/test/compare_v1.js b/node_modules/fsevents/node_modules/node-uuid/test/compare_v1.js new file mode 100644 index 0000000..05af822 --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/test/compare_v1.js @@ -0,0 +1,63 @@ +var assert = require('assert'), + nodeuuid = require('../uuid'), + uuidjs = require('uuid-js'), + libuuid = require('uuid').generate, + util = require('util'), + exec = require('child_process').exec, + os = require('os'); + +// On Mac Os X / macports there's only the ossp-uuid package that provides uuid +// On Linux there's uuid-runtime which provides uuidgen +var uuidCmd = os.type() === 'Darwin' ? 'uuid -1' : 'uuidgen -t'; + +function compare(ids) { + console.log(ids); + for (var i = 0; i < ids.length; i++) { + var id = ids[i].split('-'); + id = [id[2], id[1], id[0]].join(''); + ids[i] = id; + } + var sorted = ([].concat(ids)).sort(); + + if (sorted.toString() !== ids.toString()) { + console.log('Warning: sorted !== ids'); + } else { + console.log('everything in order!'); + } +} + +// Test time order of v1 uuids +var ids = []; +while (ids.length < 10e3) ids.push(nodeuuid.v1()); + +var max = 10; +console.log('node-uuid:'); +ids = []; +for (var i = 0; i < max; i++) ids.push(nodeuuid.v1()); +compare(ids); + +console.log(''); +console.log('uuidjs:'); +ids = []; +for (var i = 0; i < max; i++) ids.push(uuidjs.create(1).toString()); +compare(ids); + +console.log(''); +console.log('libuuid:'); +ids = []; +var count = 0; +var last = function() { + compare(ids); +} +var cb = function(err, stdout, stderr) { + ids.push(stdout.substring(0, stdout.length-1)); + count++; + if (count < max) { + return next(); + } + last(); +}; +var next = function() { + exec(uuidCmd, cb); +}; +next(); diff --git a/node_modules/fsevents/node_modules/node-uuid/test/test.html b/node_modules/fsevents/node_modules/node-uuid/test/test.html new file mode 100644 index 0000000..d80326e --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/test/test.html @@ -0,0 +1,17 @@ + + + + + + + + + diff --git a/node_modules/fsevents/node_modules/node-uuid/test/test.js b/node_modules/fsevents/node_modules/node-uuid/test/test.js new file mode 100644 index 0000000..5f1113d --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/test/test.js @@ -0,0 +1,231 @@ +if (!this.uuid) { + // node.js + uuid = require('../uuid'); + if (!/_rb/.test(uuid._rng.toString())) { + throw new Error("should use crypto for node.js"); + } +} + +// +// x-platform log/assert shims +// + +function _log(msg, type) { + type = type || 'log'; + + if (typeof(document) != 'undefined') { + document.write('
' + msg.replace(/\n/g, '
') + '
'); + } + if (typeof(console) != 'undefined') { + var color = { + log: '\033[39m', + warn: '\033[33m', + error: '\033[31m' + }; + console[type](color[type] + msg + color.log); + } +} + +function log(msg) {_log(msg, 'log');} +function warn(msg) {_log(msg, 'warn');} +function error(msg) {_log(msg, 'error');} + +function assert(res, msg) { + if (!res) { + error('FAIL: ' + msg); + } else { + log('Pass: ' + msg); + } +} + +// +// Unit tests +// + +// Verify ordering of v1 ids created with explicit times +var TIME = 1321644961388; // 2011-11-18 11:36:01.388-08:00 + +function compare(name, ids) { + ids = ids.map(function(id) { + return id.split('-').reverse().join('-'); + }).sort(); + var sorted = ([].concat(ids)).sort(); + + assert(sorted.toString() == ids.toString(), name + ' have expected order'); +} + +// Verify ordering of v1 ids created using default behavior +compare('uuids with current time', [ + uuid.v1(), + uuid.v1(), + uuid.v1(), + uuid.v1(), + uuid.v1() +]); + +// Verify ordering of v1 ids created with explicit times +compare('uuids with time option', [ + uuid.v1({msecs: TIME - 10*3600*1000}), + uuid.v1({msecs: TIME - 1}), + uuid.v1({msecs: TIME}), + uuid.v1({msecs: TIME + 1}), + uuid.v1({msecs: TIME + 28*24*3600*1000}) +]); + +assert( + uuid.v1({msecs: TIME}) != uuid.v1({msecs: TIME}), + 'IDs created at same msec are different' +); + +// Verify throw if too many ids created +var thrown = false; +try { + uuid.v1({msecs: TIME, nsecs: 10000}); +} catch (e) { + thrown = true; +} +assert(thrown, 'Exception thrown when > 10K ids created in 1 ms'); + +// Verify clock regression bumps clockseq +var uidt = uuid.v1({msecs: TIME}); +var uidtb = uuid.v1({msecs: TIME - 1}); +assert( + parseInt(uidtb.split('-')[3], 16) - parseInt(uidt.split('-')[3], 16) === 1, + 'Clock regression by msec increments the clockseq' +); + +// Verify clock regression bumps clockseq +var uidtn = uuid.v1({msecs: TIME, nsecs: 10}); +var uidtnb = uuid.v1({msecs: TIME, nsecs: 9}); +assert( + parseInt(uidtnb.split('-')[3], 16) - parseInt(uidtn.split('-')[3], 16) === 1, + 'Clock regression by nsec increments the clockseq' +); + +// Verify explicit options produce expected id +var id = uuid.v1({ + msecs: 1321651533573, + nsecs: 5432, + clockseq: 0x385c, + node: [ 0x61, 0xcd, 0x3c, 0xbb, 0x32, 0x10 ] +}); +assert(id == 'd9428888-122b-11e1-b85c-61cd3cbb3210', 'Explicit options produce expected id'); + +// Verify adjacent ids across a msec boundary are 1 time unit apart +var u0 = uuid.v1({msecs: TIME, nsecs: 9999}); +var u1 = uuid.v1({msecs: TIME + 1, nsecs: 0}); + +var before = u0.split('-')[0], after = u1.split('-')[0]; +var dt = parseInt(after, 16) - parseInt(before, 16); +assert(dt === 1, 'Ids spanning 1ms boundary are 100ns apart'); + +// +// Test parse/unparse +// + +id = '00112233445566778899aabbccddeeff'; +assert(uuid.unparse(uuid.parse(id.substr(0,10))) == + '00112233-4400-0000-0000-000000000000', 'Short parse'); +assert(uuid.unparse(uuid.parse('(this is the uuid -> ' + id + id)) == + '00112233-4455-6677-8899-aabbccddeeff', 'Dirty parse'); + +// +// Perf tests +// + +var generators = { + v1: uuid.v1, + v4: uuid.v4 +}; + +var UUID_FORMAT = { + v1: /[0-9a-f]{8}-[0-9a-f]{4}-1[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i, + v4: /[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i +}; + +var N = 1e4; + +// Get %'age an actual value differs from the ideal value +function divergence(actual, ideal) { + return Math.round(100*100*(actual - ideal)/ideal)/100; +} + +function rate(msg, t) { + log(msg + ': ' + (N / (Date.now() - t) * 1e3 | 0) + ' uuids\/second'); +} + +for (var version in generators) { + var counts = {}, max = 0; + var generator = generators[version]; + var format = UUID_FORMAT[version]; + + log('\nSanity check ' + N + ' ' + version + ' uuids'); + for (var i = 0, ok = 0; i < N; i++) { + id = generator(); + if (!format.test(id)) { + throw Error(id + ' is not a valid UUID string'); + } + + if (id != uuid.unparse(uuid.parse(id))) { + assert(fail, id + ' is not a valid id'); + } + + // Count digits for our randomness check + if (version == 'v4') { + var digits = id.replace(/-/g, '').split(''); + for (var j = digits.length-1; j >= 0; j--) { + var c = digits[j]; + max = Math.max(max, counts[c] = (counts[c] || 0) + 1); + } + } + } + + // Check randomness for v4 UUIDs + if (version == 'v4') { + // Limit that we get worried about randomness. (Purely empirical choice, this!) + var limit = 2*100*Math.sqrt(1/N); + + log('\nChecking v4 randomness. Distribution of Hex Digits (% deviation from ideal)'); + + for (var i = 0; i < 16; i++) { + var c = i.toString(16); + var bar = '', n = counts[c], p = Math.round(n/max*100|0); + + // 1-3,5-8, and D-F: 1:16 odds over 30 digits + var ideal = N*30/16; + if (i == 4) { + // 4: 1:1 odds on 1 digit, plus 1:16 odds on 30 digits + ideal = N*(1 + 30/16); + } else if (i >= 8 && i <= 11) { + // 8-B: 1:4 odds on 1 digit, plus 1:16 odds on 30 digits + ideal = N*(1/4 + 30/16); + } else { + // Otherwise: 1:16 odds on 30 digits + ideal = N*30/16; + } + var d = divergence(n, ideal); + + // Draw bar using UTF squares (just for grins) + var s = n/max*50 | 0; + while (s--) bar += '='; + + assert(Math.abs(d) < limit, c + ' |' + bar + '| ' + counts[c] + ' (' + d + '% < ' + limit + '%)'); + } + } +} + +// Perf tests +for (var version in generators) { + log('\nPerformance testing ' + version + ' UUIDs'); + var generator = generators[version]; + var buf = new uuid.BufferClass(16); + + for (var i = 0, t = Date.now(); i < N; i++) generator(); + rate('uuid.' + version + '()', t); + + for (var i = 0, t = Date.now(); i < N; i++) generator('binary'); + rate('uuid.' + version + '(\'binary\')', t); + + for (var i = 0, t = Date.now(); i < N; i++) generator('binary', buf); + rate('uuid.' + version + '(\'binary\', buffer)', t); +} diff --git a/node_modules/fsevents/node_modules/node-uuid/uuid.js b/node_modules/fsevents/node_modules/node-uuid/uuid.js new file mode 100644 index 0000000..89c5b8f --- /dev/null +++ b/node_modules/fsevents/node_modules/node-uuid/uuid.js @@ -0,0 +1,272 @@ +// uuid.js +// +// Copyright (c) 2010-2012 Robert Kieffer +// MIT License - http://opensource.org/licenses/mit-license.php + +/*global window, require, define */ +(function(_window) { + 'use strict'; + + // Unique ID creation requires a high quality random # generator. We feature + // detect to determine the best RNG source, normalizing to a function that + // returns 128-bits of randomness, since that's what's usually required + var _rng, _mathRNG, _nodeRNG, _whatwgRNG, _previousRoot; + + function setupBrowser() { + // Allow for MSIE11 msCrypto + var _crypto = _window.crypto || _window.msCrypto; + + if (!_rng && _crypto && _crypto.getRandomValues) { + // WHATWG crypto-based RNG - http://wiki.whatwg.org/wiki/Crypto + // + // Moderately fast, high quality + try { + var _rnds8 = new Uint8Array(16); + _whatwgRNG = _rng = function whatwgRNG() { + _crypto.getRandomValues(_rnds8); + return _rnds8; + }; + _rng(); + } catch(e) {} + } + + if (!_rng) { + // Math.random()-based (RNG) + // + // If all else fails, use Math.random(). It's fast, but is of unspecified + // quality. + var _rnds = new Array(16); + _mathRNG = _rng = function() { + for (var i = 0, r; i < 16; i++) { + if ((i & 0x03) === 0) { r = Math.random() * 0x100000000; } + _rnds[i] = r >>> ((i & 0x03) << 3) & 0xff; + } + + return _rnds; + }; + if ('undefined' !== typeof console && console.warn) { + console.warn("[SECURITY] node-uuid: crypto not usable, falling back to insecure Math.random()"); + } + } + } + + function setupNode() { + // Node.js crypto-based RNG - http://nodejs.org/docs/v0.6.2/api/crypto.html + // + // Moderately fast, high quality + if ('function' === typeof require) { + try { + var _rb = require('crypto').randomBytes; + _nodeRNG = _rng = _rb && function() {return _rb(16);}; + _rng(); + } catch(e) {} + } + } + + if (_window) { + setupBrowser(); + } else { + setupNode(); + } + + // Buffer class to use + var BufferClass = ('function' === typeof Buffer) ? Buffer : Array; + + // Maps for number <-> hex string conversion + var _byteToHex = []; + var _hexToByte = {}; + for (var i = 0; i < 256; i++) { + _byteToHex[i] = (i + 0x100).toString(16).substr(1); + _hexToByte[_byteToHex[i]] = i; + } + + // **`parse()` - Parse a UUID into it's component bytes** + function parse(s, buf, offset) { + var i = (buf && offset) || 0, ii = 0; + + buf = buf || []; + s.toLowerCase().replace(/[0-9a-f]{2}/g, function(oct) { + if (ii < 16) { // Don't overflow! + buf[i + ii++] = _hexToByte[oct]; + } + }); + + // Zero out remaining bytes if string was short + while (ii < 16) { + buf[i + ii++] = 0; + } + + return buf; + } + + // **`unparse()` - Convert UUID byte array (ala parse()) into a string** + function unparse(buf, offset) { + var i = offset || 0, bth = _byteToHex; + return bth[buf[i++]] + bth[buf[i++]] + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + + bth[buf[i++]] + bth[buf[i++]] + + bth[buf[i++]] + bth[buf[i++]]; + } + + // **`v1()` - Generate time-based UUID** + // + // Inspired by https://github.com/LiosK/UUID.js + // and http://docs.python.org/library/uuid.html + + // random #'s we need to init node and clockseq + var _seedBytes = _rng(); + + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + var _nodeId = [ + _seedBytes[0] | 0x01, + _seedBytes[1], _seedBytes[2], _seedBytes[3], _seedBytes[4], _seedBytes[5] + ]; + + // Per 4.2.2, randomize (14 bit) clockseq + var _clockseq = (_seedBytes[6] << 8 | _seedBytes[7]) & 0x3fff; + + // Previous uuid creation time + var _lastMSecs = 0, _lastNSecs = 0; + + // See https://github.com/broofa/node-uuid for API details + function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + + var clockseq = (options.clockseq != null) ? options.clockseq : _clockseq; + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = (options.msecs != null) ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = (options.nsecs != null) ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq == null) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs == null) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + var node = options.node || _nodeId; + for (var n = 0; n < 6; n++) { + b[i + n] = node[n]; + } + + return buf ? buf : unparse(b); + } + + // **`v4()` - Generate random UUID** + + // See https://github.com/broofa/node-uuid for API details + function v4(options, buf, offset) { + // Deprecated - 'format' argument, as supported in v1.2 + var i = buf && offset || 0; + + if (typeof(options) === 'string') { + buf = (options === 'binary') ? new BufferClass(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || _rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ii++) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || unparse(rnds); + } + + // Export public API + var uuid = v4; + uuid.v1 = v1; + uuid.v4 = v4; + uuid.parse = parse; + uuid.unparse = unparse; + uuid.BufferClass = BufferClass; + uuid._rng = _rng; + uuid._mathRNG = _mathRNG; + uuid._nodeRNG = _nodeRNG; + uuid._whatwgRNG = _whatwgRNG; + + if (('undefined' !== typeof module) && module.exports) { + // Publish as node.js module + module.exports = uuid; + } else if (typeof define === 'function' && define.amd) { + // Publish as AMD module + define(function() {return uuid;}); + + + } else { + // Publish as global (in browsers) + _previousRoot = _window.uuid; + + // **`noConflict()` - (browser only) to reset global 'uuid' var** + uuid.noConflict = function() { + _window.uuid = _previousRoot; + return uuid; + }; + + _window.uuid = uuid; + } +})('undefined' !== typeof window ? window : null); diff --git a/node_modules/fsevents/node_modules/nopt/.npmignore b/node_modules/fsevents/node_modules/nopt/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/fsevents/node_modules/nopt/.travis.yml b/node_modules/fsevents/node_modules/nopt/.travis.yml new file mode 100644 index 0000000..99f2bbf --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +language: node_js +node_js: + - '0.8' + - '0.10' + - '0.12' + - 'iojs' +before_install: + - npm install -g npm@latest diff --git a/node_modules/fsevents/node_modules/nopt/LICENSE b/node_modules/fsevents/node_modules/nopt/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/nopt/README.md b/node_modules/fsevents/node_modules/nopt/README.md new file mode 100644 index 0000000..f21a4b3 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/README.md @@ -0,0 +1,211 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + + // my-program.js + var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + console.log(parsed) + +This would give you support for any of the following: + +```bash +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/fsevents/node_modules/nopt/bin/nopt.js b/node_modules/fsevents/node_modules/nopt/bin/nopt.js new file mode 100755 index 0000000..3232d4c --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/bin/nopt.js @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/fsevents/node_modules/nopt/examples/my-program.js b/node_modules/fsevents/node_modules/nopt/examples/my-program.js new file mode 100755 index 0000000..142447e --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/examples/my-program.js @@ -0,0 +1,30 @@ +#!/usr/bin/env node + +//process.env.DEBUG_NOPT = 1 + +// my-program.js +var nopt = require("../lib/nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag", "true"] + , "g" : ["--flag"] + , "s" : "--flag" + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + +console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/fsevents/node_modules/nopt/lib/nopt.js b/node_modules/fsevents/node_modules/nopt/lib/nopt.js new file mode 100644 index 0000000..97707e7 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/lib/nopt.js @@ -0,0 +1,415 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , remain = [] + , cooked = args + , original = args.slice(0) + + parse(args, data, remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = {remain:remain,cooked:cooked,original:original} + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + if (!val.length) delete data[k] + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true + + val = String(val) + var homePattern = process.platform === 'win32' ? /^~(\/|\\)/ : /^~\// + if (val.match(homePattern) && process.env.HOME) { + val = path.resolve(process.env.HOME, val.substr(2)) + } + data[k] = path.resolve(String(val)) + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + debug("validate Date %j %j %j", k, val, Date.parse(val)) + var s = Date.parse(val) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + if (arg.indexOf("=") !== -1) { + hadEq = true + var v = arg.split("=") + arg = v.shift() + v = v.join("=") + args.splice.apply(args, [i, 1].concat([arg, v])) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var isArray = types[arg] === Array || + Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + types[arg] === Boolean || + Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 || + (typeof types[arg] === 'undefined' && !hadEq) || + (la === "false" && + (types[arg] === null || + Array.isArray(types[arg]) && ~types[arg].indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (Array.isArray(types[arg]) && la) { + if (~types[arg].indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~types[arg].indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~types[arg].indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (types[arg] === String && la === undefined) + la = "" + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} diff --git a/node_modules/fsevents/node_modules/nopt/package.json b/node_modules/fsevents/node_modules/nopt/package.json new file mode 100644 index 0000000..d8d9d34 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/package.json @@ -0,0 +1,88 @@ +{ + "_args": [ + [ + "nopt@~3.0.1", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "nopt@>=3.0.1 <3.1.0", + "_id": "nopt@3.0.6", + "_inCache": true, + "_installable": true, + "_location": "/nopt", + "_nodeVersion": "4.2.1", + "_npmUser": { + "email": "ogd@aoaioxxysz.net", + "name": "othiym23" + }, + "_npmVersion": "2.14.10", + "_phantomChildren": {}, + "_requested": { + "name": "nopt", + "raw": "nopt@~3.0.1", + "rawSpec": "~3.0.1", + "scope": null, + "spec": ">=3.0.1 <3.1.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", + "_shasum": "c6465dbf08abcd4db359317f79ac68a646b28ff9", + "_shrinkwrap": null, + "_spec": "nopt@~3.0.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bin": { + "nopt": "./bin/nopt.js" + }, + "bugs": { + "url": "https://github.com/npm/nopt/issues" + }, + "dependencies": { + "abbrev": "1" + }, + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "devDependencies": { + "tap": "^1.2.0" + }, + "directories": {}, + "dist": { + "shasum": "c6465dbf08abcd4db359317f79ac68a646b28ff9", + "tarball": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz" + }, + "gitHead": "10a750c9bb99c1950160353459e733ac2aa18cb6", + "homepage": "https://github.com/npm/nopt#readme", + "license": "ISC", + "main": "lib/nopt.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + }, + { + "name": "othiym23", + "email": "ogd@aoaioxxysz.net" + }, + { + "name": "zkat", + "email": "kat@sykosomatic.org" + } + ], + "name": "nopt", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/nopt.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "3.0.6" +} diff --git a/node_modules/fsevents/node_modules/nopt/test/basic.js b/node_modules/fsevents/node_modules/nopt/test/basic.js new file mode 100644 index 0000000..d399de9 --- /dev/null +++ b/node_modules/fsevents/node_modules/nopt/test/basic.js @@ -0,0 +1,273 @@ +var nopt = require("../") + , test = require('tap').test + + +test("passing a string results in a string", function (t) { + var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0) + t.same(parsed.key, "myvalue") + t.end() +}) + +// https://github.com/npm/nopt/issues/31 +test("Empty String results in empty string, not true", function (t) { + var parsed = nopt({ empty: String }, {}, ["--empty"], 0) + t.same(parsed.empty, "") + t.end() +}) + +test("~ path is resolved to $HOME", function (t) { + var path = require("path") + if (!process.env.HOME) process.env.HOME = "/tmp" + var parsed = nopt({key: path}, {}, ["--key=~/val"], 0) + t.same(parsed.key, path.resolve(process.env.HOME, "val")) + t.end() +}) + +// https://github.com/npm/nopt/issues/24 +test("Unknown options are not parsed as numbers", function (t) { + var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0) + t.equal(parsed['leave-as-is'], '1.20') + t.equal(parsed['parse-me'], 1.2) + t.end() +}); + +// https://github.com/npm/nopt/issues/48 +test("Check types based on name of type", function (t) { + var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0) + t.equal(parsed['parse-me'], 1.2) + t.end() +}) + + +test("Missing types are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("Types passed without a name are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("other tests", function (t) { + + var util = require("util") + , Stream = require("stream") + , path = require("path") + , url = require("url") + + , shorthands = + { s : ["--loglevel", "silent"] + , d : ["--loglevel", "info"] + , dd : ["--loglevel", "verbose"] + , ddd : ["--loglevel", "silly"] + , noreg : ["--no-registry"] + , reg : ["--registry"] + , "no-reg" : ["--no-registry"] + , silent : ["--loglevel", "silent"] + , verbose : ["--loglevel", "verbose"] + , h : ["--usage"] + , H : ["--usage"] + , "?" : ["--usage"] + , help : ["--usage"] + , v : ["--version"] + , f : ["--force"] + , desc : ["--description"] + , "no-desc" : ["--no-description"] + , "local" : ["--no-global"] + , l : ["--long"] + , p : ["--parseable"] + , porcelain : ["--parseable"] + , g : ["--global"] + } + + , types = + { aoa: Array + , nullstream: [null, Stream] + , date: Date + , str: String + , browser : String + , cache : path + , color : ["always", Boolean] + , depth : Number + , description : Boolean + , dev : Boolean + , editor : path + , force : Boolean + , global : Boolean + , globalconfig : path + , group : [String, Number] + , gzipbin : String + , logfd : [Number, Stream] + , loglevel : ["silent","win","error","warn","info","verbose","silly"] + , long : Boolean + , "node-version" : [false, String] + , npaturl : url + , npat : Boolean + , "onload-script" : [false, String] + , outfd : [Number, Stream] + , parseable : Boolean + , pre: Boolean + , prefix: path + , proxy : url + , "rebuild-bundle" : Boolean + , registry : url + , searchopts : String + , searchexclude: [null, String] + , shell : path + , t: [Array, String] + , tag : String + , tar : String + , tmp : path + , "unsafe-perm" : Boolean + , usage : Boolean + , user : String + , username : String + , userconfig : path + , version : Boolean + , viewer: path + , _exit : Boolean + , path: path + } + + ; [["-v", {version:true}, []] + ,["---v", {version:true}, []] + ,["ls -s --no-reg connect -d", + {loglevel:"info",registry:null},["ls","connect"]] + ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] + ,["ls --registry blargle", {}, ["ls"]] + ,["--no-registry", {registry:null}, []] + ,["--no-color true", {color:false}, []] + ,["--no-color false", {color:true}, []] + ,["--no-color", {color:false}, []] + ,["--color false", {color:false}, []] + ,["--color --logfd 7", {logfd:7,color:true}, []] + ,["--color=true", {color:true}, []] + ,["--logfd=10", {logfd:10}, []] + ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]] + ,["--tmp=tmp -tar=gtar", + {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] + ,["--logfd x", {}, []] + ,["a -true -- -no-false", {true:true},["a","-no-false"]] + ,["a -no-false", {false:false},["a"]] + ,["a -no-no-true", {true:true}, ["a"]] + ,["a -no-no-no-false", {false:false}, ["a"]] + ,["---NO-no-No-no-no-no-nO-no-no"+ + "-No-no-no-no-no-no-no-no-no"+ + "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ + "-no-body-can-do-the-boogaloo-like-I-do" + ,{"body-can-do-the-boogaloo-like-I-do":false}, []] + ,["we are -no-strangers-to-love "+ + "--you-know=the-rules --and=so-do-i "+ + "---im-thinking-of=a-full-commitment "+ + "--no-you-would-get-this-from-any-other-guy "+ + "--no-gonna-give-you-up "+ + "-no-gonna-let-you-down=true "+ + "--no-no-gonna-run-around false "+ + "--desert-you=false "+ + "--make-you-cry false "+ + "--no-tell-a-lie "+ + "--no-no-and-hurt-you false" + ,{"strangers-to-love":false + ,"you-know":"the-rules" + ,"and":"so-do-i" + ,"you-would-get-this-from-any-other-guy":false + ,"gonna-give-you-up":false + ,"gonna-let-you-down":false + ,"gonna-run-around":false + ,"desert-you":false + ,"make-you-cry":false + ,"tell-a-lie":false + ,"and-hurt-you":false + },["we", "are"]] + ,["-t one -t two -t three" + ,{t: ["one", "two", "three"]} + ,[]] + ,["-t one -t null -t three four five null" + ,{t: ["one", "null", "three"]} + ,["four", "five", "null"]] + ,["-t foo" + ,{t:["foo"]} + ,[]] + ,["--no-t" + ,{t:["false"]} + ,[]] + ,["-no-no-t" + ,{t:["true"]} + ,[]] + ,["-aoa one -aoa null -aoa 100" + ,{aoa:["one", null, '100']} + ,[]] + ,["-str 100" + ,{str:"100"} + ,[]] + ,["--color always" + ,{color:"always"} + ,[]] + ,["--no-nullstream" + ,{nullstream:null} + ,[]] + ,["--nullstream false" + ,{nullstream:null} + ,[]] + ,["--notadate=2011-01-25" + ,{notadate: "2011-01-25"} + ,[]] + ,["--date 2011-01-25" + ,{date: new Date("2011-01-25")} + ,[]] + ,["-cl 1" + ,{config: true, length: 1} + ,[] + ,{config: Boolean, length: Number, clear: Boolean} + ,{c: "--config", l: "--length"}] + ,["--acount bla" + ,{"acount":true} + ,["bla"] + ,{account: Boolean, credentials: Boolean, options: String} + ,{a:"--account", c:"--credentials",o:"--options"}] + ,["--clear" + ,{clear:true} + ,[] + ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} + ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] + ,["--file -" + ,{"file":"-"} + ,[] + ,{file:String} + ,{}] + ,["--file -" + ,{"file":true} + ,["-"] + ,{file:Boolean} + ,{}] + ,["--path" + ,{"path":null} + ,[]] + ,["--path ." + ,{"path":process.cwd()} + ,[]] + ].forEach(function (test) { + var argv = test[0].split(/\s+/) + , opts = test[1] + , rem = test[2] + , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) + , parsed = actual.argv + delete actual.argv + for (var i in opts) { + var e = JSON.stringify(opts[i]) + , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) + if (e && typeof e === "object") { + t.deepEqual(e, a) + } else { + t.equal(e, a) + } + } + t.deepEqual(rem, parsed.remain) + }) + t.end() +}) diff --git a/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md b/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md new file mode 100644 index 0000000..f549a1f --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md @@ -0,0 +1,36 @@ +### v3.1.2 + +* Update to `gauge@1.6.0` adding support for default values for template + items. + +### v3.1.1 + +* Update to `gauge@1.5.3` to fix to `1.x` compatibility when it comes to + when a progress bar is enabled. In `1.x` if you didn't have a TTY the + progress bar was never shown. In `2.x` it merely defaults to disabled, + but you can enable it explicitly if you still want progress updates. + +### v3.1.0 + +* Update to `gauge@2.5.2`: + * Updates the `signal-exit` dependency which fixes an incompatibility with + the node profiler. + * Uses externalizes its ansi code generation in `console-control-strings` +* Make the default progress bar include the last line printed, colored as it + would be when printing to a tty. + +### v3.0.0 + +* Switch to `gauge@2.0.0`, for better performance, better look. +* Set stderr/stdout blocking if they're tty's, so that we can hide a + progress bar going to stderr and then safely print to stdout. Without + this the two can end up overlapping producing confusing and sometimes + corrupted output. + +### v2.0.0 + +* Make the `error` event non-fatal so that folks can use it as a prefix. + +### v1.0.0 + +* Add progress bar with `gauge@1.1.0` diff --git a/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md~ b/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md~ new file mode 100644 index 0000000..a373c6a --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/CHANGELOG.md~ @@ -0,0 +1,31 @@ +### v3.1.1 + +* Update to `gauge@1.5.3` to fix to `1.x` compatibility when it comes to + when a progress bar is enabled. In `1.x` if you didn't have a TTY the + progress bar was never shown. In `2.x` it merely defaults to disabled, + but you can enable it explicitly if you still want progress updates. + +### v3.1.0 + +* Update to `gauge@2.5.2`: + * Updates the `signal-exit` dependency which fixes an incompatibility with + the node profiler. + * Uses externalizes its ansi code generation in `console-control-strings` +* Make the default progress bar include the last line printed, colored as it + would be when printing to a tty. + +### v3.0.0 + +* Switch to `gauge@2.0.0`, for better performance, better look. +* Set stderr/stdout blocking if they're tty's, so that we can hide a + progress bar going to stderr and then safely print to stdout. Without + this the two can end up overlapping producing confusing and sometimes + corrupted output. + +### v2.0.0 + +* Make the `error` event non-fatal so that folks can use it as a prefix. + +### v1.0.0 + +* Add progress bar with `gauge@1.1.0` diff --git a/node_modules/fsevents/node_modules/npmlog/LICENSE b/node_modules/fsevents/node_modules/npmlog/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/npmlog/README.md b/node_modules/fsevents/node_modules/npmlog/README.md new file mode 100644 index 0000000..dba3550 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/README.md @@ -0,0 +1,210 @@ +# npmlog + +The logger util that npm uses. + +This logger is very basic. It does the logging for npm. It supports +custom levels and colored output. + +By default, logs are written to stderr. If you want to send log messages +to outputs other than streams, then you can change the `log.stream` +member, or you can just listen to the events that it emits, and do +whatever you want with them. + +# Basic Usage + +``` +var log = require('npmlog') + +// additional stuff ---------------------------+ +// message ----------+ | +// prefix ----+ | | +// level -+ | | | +// v v v v + log.info('fyi', 'I have a kitty cat: %j', myKittyCat) +``` + +## log.level + +* {String} + +The level to display logs at. Any logs at or above this level will be +displayed. The special level `silent` will prevent anything from being +displayed ever. + +## log.record + +* {Array} + +An array of all the log messages that have been entered. + +## log.maxRecordSize + +* {Number} + +The maximum number of records to keep. If log.record gets bigger than +10% over this value, then it is sliced down to 90% of this value. + +The reason for the 10% window is so that it doesn't have to resize a +large array on every log entry. + +## log.prefixStyle + +* {Object} + +A style object that specifies how prefixes are styled. (See below) + +## log.headingStyle + +* {Object} + +A style object that specifies how the heading is styled. (See below) + +## log.heading + +* {String} Default: "" + +If set, a heading that is printed at the start of every line. + +## log.stream + +* {Stream} Default: `process.stderr` + +The stream where output is written. + +## log.enableColor() + +Force colors to be used on all messages, regardless of the output +stream. + +## log.disableColor() + +Disable colors on all messages. + +## log.enableProgress() + +Enable the display of log activity spinner and progress bar + +## log.disableProgress() + +Disable the display of a progress bar + +## log.enableUnicode() + +Force the unicode theme to be used for the progress bar. + +## log.disableUnicode() + +Disable the use of unicode in the progress bar. + +## log.setGaugeTemplate(template) + +Set a template for outputting the progress bar. See the [gauge documentation] for details. + +[gauge documentation]: https://npmjs.com/package/gauge + +## log.setGaugeThemeset(themes) + +Select a themeset to pick themes from for the progress bar. See the [gauge documentation] for details. + +## log.pause() + +Stop emitting messages to the stream, but do not drop them. + +## log.resume() + +Emit all buffered messages that were written while paused. + +## log.log(level, prefix, message, ...) + +* `level` {String} The level to emit the message at +* `prefix` {String} A string prefix. Set to "" to skip. +* `message...` Arguments to `util.format` + +Emit a log message at the specified level. + +## log\[level](prefix, message, ...) + +For example, + +* log.silly(prefix, message, ...) +* log.verbose(prefix, message, ...) +* log.info(prefix, message, ...) +* log.http(prefix, message, ...) +* log.warn(prefix, message, ...) +* log.error(prefix, message, ...) + +Like `log.log(level, prefix, message, ...)`. In this way, each level is +given a shorthand, so you can do `log.info(prefix, message)`. + +## log.addLevel(level, n, style, disp) + +* `level` {String} Level indicator +* `n` {Number} The numeric level +* `style` {Object} Object with fg, bg, inverse, etc. +* `disp` {String} Optional replacement for `level` in the output. + +Sets up a new level with a shorthand function and so forth. + +Note that if the number is `Infinity`, then setting the level to that +will cause all log messages to be suppressed. If the number is +`-Infinity`, then the only way to show it is to enable all log messages. + +## log.newItem(name, todo, weight) + +* `name` {String} Optional; progress item name. +* `todo` {Number} Optional; total amount of work to be done. Default 0. +* `weight` {Number} Optional; the weight of this item relative to others. Default 1. + +This adds a new `are-we-there-yet` item tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `Tracker` object. + +## log.newStream(name, todo, weight) + +This adds a new `are-we-there-yet` stream tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerStream` object. + +## log.newGroup(name, weight) + +This adds a new `are-we-there-yet` tracker group to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerGroup` object. + +# Events + +Events are all emitted with the message object. + +* `log` Emitted for all messages +* `log.` Emitted for all messages with the `` level. +* `` Messages with prefixes also emit their prefix as an event. + +# Style Objects + +Style objects can have the following fields: + +* `fg` {String} Color for the foreground text +* `bg` {String} Color for the background +* `bold`, `inverse`, `underline` {Boolean} Set the associated property +* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) + +# Message Objects + +Every log event is emitted with a message object, and the `log.record` +list contains all of them that have been created. They have the +following fields: + +* `id` {Number} +* `level` {String} +* `prefix` {String} +* `message` {String} Result of `util.format()` +* `messageRaw` {Array} Arguments to `util.format()` + +# Blocking TTYs + +We use [`set-blocking`](https://npmjs.com/package/set-blocking) to set +stderr and stdout blocking if they are tty's and have the setBlocking call. +This is a work around for an issue in early versions of Node.js 6.x, which +made stderr and stdout non-blocking on OSX. (They are always blocking +Windows and were never blocking on Linux.) `npmlog` needs them to be blocking +so that it can allow output to stdout and stderr to be interlaced. diff --git a/node_modules/fsevents/node_modules/npmlog/log.js b/node_modules/fsevents/node_modules/npmlog/log.js new file mode 100644 index 0000000..e5b70f1 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/log.js @@ -0,0 +1,298 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) this.gauge.setWriteTo(stream, stream) + }, + get: function () { + return stream + } +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''} + ] +}) + +log.tracker = new Progress.TrackerGroup() + +// no progress bars unless asked +log.progressEnabled = false + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) return + this.progressEnabled = true + if (this._pause) return + this.tracker.on('change', this.showProgress) + this.gauge.enable() + this.showProgress() +} + +log.disableProgress = function () { + if (!this.progressEnabled) return + this.clearProgress() + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') return + if (trackerConstructors.filter(function (C) { return C === P }).length) return + if (tracker[P]) return + if (typeof log[P] !== 'function') return + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) return cb && process.nextTick(cb) + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) return + var values = {} + if (name) values.section = name + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true +} + +log.resume = function () { + if (!this._paused) return + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) this.enableProgress() +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg && + (arg instanceof Error) && arg.stack) { + arg.stack = stack = arg.stack + '' + } + } + if (stack) a.unshift(stack + '\n') + message = util.format.apply(util, a) + + var m = { id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) this.emit(m.prefix, m) + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) this.gauge.pulse(m.prefix) + var l = this.levels[m.level] + if (l === undefined) return + if (l < this.levels[this.level]) return + if (l > 0 && !isFinite(l)) return + + var disp = log.disp[m.level] || m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) this.write(' ') + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) return + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) settings.push(style.fg) + if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + if (style.bold) settings.push('bold') + if (style.underline) settings.push('underline') + if (style.inverse) settings.push('inverse') + if (settings.length) output += consoleControl.color(settings) + if (style.beep) output += consoleControl.beep() + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + return output +} + +log.write = function (msg, style) { + if (!stream) return + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + if (!disp) disp = lvl + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/node_modules/fsevents/node_modules/npmlog/package.json b/node_modules/fsevents/node_modules/npmlog/package.json new file mode 100644 index 0000000..5dbf2f0 --- /dev/null +++ b/node_modules/fsevents/node_modules/npmlog/package.json @@ -0,0 +1,100 @@ +{ + "_args": [ + [ + "npmlog@~3.1.2", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "npmlog@>=3.1.2 <3.2.0", + "_id": "npmlog@3.1.2", + "_inCache": true, + "_installable": true, + "_location": "/npmlog", + "_nodeVersion": "4.4.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/npmlog-3.1.2.tgz_1466073052641_0.36111341998912394" + }, + "_npmUser": { + "email": "me@re-becca.org", + "name": "iarna" + }, + "_npmVersion": "3.9.6", + "_phantomChildren": {}, + "_requested": { + "name": "npmlog", + "raw": "npmlog@~3.1.2", + "rawSpec": "~3.1.2", + "scope": null, + "spec": ">=3.1.2 <3.2.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/npmlog/-/npmlog-3.1.2.tgz", + "_shasum": "2d46fa874337af9498a2f12bb43d8d0be4a36873", + "_shrinkwrap": null, + "_spec": "npmlog@~3.1.2", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/npmlog/issues" + }, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.6.0", + "set-blocking": "~2.0.0" + }, + "description": "logger for npm", + "devDependencies": { + "standard": "~7.1.2", + "tap": "~5.7.0" + }, + "directories": {}, + "dist": { + "shasum": "2d46fa874337af9498a2f12bb43d8d0be4a36873", + "tarball": "https://registry.npmjs.org/npmlog/-/npmlog-3.1.2.tgz" + }, + "files": [ + "log.js" + ], + "gitHead": "444e237743fa1339cb91bea2d3f16f710be56984", + "homepage": "https://github.com/npm/npmlog#readme", + "license": "ISC", + "main": "log.js", + "maintainers": [ + { + "name": "iarna", + "email": "me@re-becca.org" + }, + { + "name": "isaacs", + "email": "i@izs.me" + }, + { + "name": "othiym23", + "email": "ogd@aoaioxxysz.net" + }, + { + "name": "zkat", + "email": "kat@sykosomatic.org" + } + ], + "name": "npmlog", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/npmlog.git" + }, + "scripts": { + "test": "standard && tap test/*.js" + }, + "version": "3.1.2" +} diff --git a/node_modules/fsevents/node_modules/number-is-nan/index.js b/node_modules/fsevents/node_modules/number-is-nan/index.js new file mode 100644 index 0000000..79be4b9 --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = Number.isNaN || function (x) { + return x !== x; +}; diff --git a/node_modules/fsevents/node_modules/number-is-nan/license b/node_modules/fsevents/node_modules/number-is-nan/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/number-is-nan/package.json b/node_modules/fsevents/node_modules/number-is-nan/package.json new file mode 100644 index 0000000..f9a40df --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/package.json @@ -0,0 +1,94 @@ +{ + "_args": [ + [ + "number-is-nan@^1.0.0", + "/Users/eshanker/Code/fsevents/node_modules/code-point-at" + ] + ], + "_from": "number-is-nan@>=1.0.0 <2.0.0", + "_id": "number-is-nan@1.0.0", + "_inCache": true, + "_installable": true, + "_location": "/number-is-nan", + "_nodeVersion": "0.12.3", + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "2.10.0", + "_phantomChildren": {}, + "_requested": { + "name": "number-is-nan", + "raw": "number-is-nan@^1.0.0", + "rawSpec": "^1.0.0", + "scope": null, + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/code-point-at", + "/is-fullwidth-code-point" + ], + "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz", + "_shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", + "_shrinkwrap": null, + "_spec": "number-is-nan@^1.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/code-point-at", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/number-is-nan/issues" + }, + "dependencies": {}, + "description": "ES6 Number.isNaN() ponyfill", + "devDependencies": { + "ava": "0.0.4" + }, + "directories": {}, + "dist": { + "shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", + "tarball": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "0f394b1bc33185c40304363b209e3f0588dbeeb3", + "homepage": "https://github.com/sindresorhus/number-is-nan#readme", + "keywords": [ + "ecmascript", + "es2015", + "es6", + "harmony", + "is", + "nan", + "not", + "number", + "polyfill", + "ponyfill", + "shim" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + } + ], + "name": "number-is-nan", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/number-is-nan.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "1.0.0" +} diff --git a/node_modules/fsevents/node_modules/number-is-nan/readme.md b/node_modules/fsevents/node_modules/number-is-nan/readme.md new file mode 100644 index 0000000..93d851a --- /dev/null +++ b/node_modules/fsevents/node_modules/number-is-nan/readme.md @@ -0,0 +1,30 @@ +# number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) + +> ES6 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) ponyfill + +> Ponyfill: A polyfill that doesn't overwrite the native method + + +## Install + +``` +$ npm install --save number-is-nan +``` + + +## Usage + +```js +var numberIsNan = require('number-is-nan'); + +numberIsNan(NaN); +//=> true + +numberIsNan('unicorn'); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/oauth-sign/LICENSE b/node_modules/fsevents/node_modules/oauth-sign/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/fsevents/node_modules/oauth-sign/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/oauth-sign/README.md b/node_modules/fsevents/node_modules/oauth-sign/README.md new file mode 100644 index 0000000..34c4a85 --- /dev/null +++ b/node_modules/fsevents/node_modules/oauth-sign/README.md @@ -0,0 +1,4 @@ +oauth-sign +========== + +OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module. diff --git a/node_modules/fsevents/node_modules/oauth-sign/index.js b/node_modules/fsevents/node_modules/oauth-sign/index.js new file mode 100644 index 0000000..dadcba9 --- /dev/null +++ b/node_modules/fsevents/node_modules/oauth-sign/index.js @@ -0,0 +1,136 @@ +var crypto = require('crypto') + , qs = require('querystring') + ; + +function sha1 (key, body) { + return crypto.createHmac('sha1', key).update(body).digest('base64') +} + +function rsa (key, body) { + return crypto.createSign("RSA-SHA1").update(body).sign(key, 'base64'); +} + +function rfc3986 (str) { + return encodeURIComponent(str) + .replace(/!/g,'%21') + .replace(/\*/g,'%2A') + .replace(/\(/g,'%28') + .replace(/\)/g,'%29') + .replace(/'/g,'%27') + ; +} + +// Maps object to bi-dimensional array +// Converts { foo: 'A', bar: [ 'b', 'B' ]} to +// [ ['foo', 'A'], ['bar', 'b'], ['bar', 'B'] ] +function map (obj) { + var key, val, arr = [] + for (key in obj) { + val = obj[key] + if (Array.isArray(val)) + for (var i = 0; i < val.length; i++) + arr.push([key, val[i]]) + else if (typeof val === "object") + for (var prop in val) + arr.push([key + '[' + prop + ']', val[prop]]); + else + arr.push([key, val]) + } + return arr +} + +// Compare function for sort +function compare (a, b) { + return a > b ? 1 : a < b ? -1 : 0 +} + +function generateBase (httpMethod, base_uri, params) { + // adapted from https://dev.twitter.com/docs/auth/oauth and + // https://dev.twitter.com/docs/auth/creating-signature + + // Parameter normalization + // http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + var normalized = map(params) + // 1. First, the name and value of each parameter are encoded + .map(function (p) { + return [ rfc3986(p[0]), rfc3986(p[1] || '') ] + }) + // 2. The parameters are sorted by name, using ascending byte value + // ordering. If two or more parameters share the same name, they + // are sorted by their value. + .sort(function (a, b) { + return compare(a[0], b[0]) || compare(a[1], b[1]) + }) + // 3. The name of each parameter is concatenated to its corresponding + // value using an "=" character (ASCII code 61) as a separator, even + // if the value is empty. + .map(function (p) { return p.join('=') }) + // 4. The sorted name/value pairs are concatenated together into a + // single string by using an "&" character (ASCII code 38) as + // separator. + .join('&') + + var base = [ + rfc3986(httpMethod ? httpMethod.toUpperCase() : 'GET'), + rfc3986(base_uri), + rfc3986(normalized) + ].join('&') + + return base +} + +function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') + + return sha1(key, base) +} + +function rsasign (httpMethod, base_uri, params, private_key, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = private_key || '' + + return rsa(key, base) +} + +function plaintext (consumer_secret, token_secret) { + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') + + return key +} + +function sign (signMethod, httpMethod, base_uri, params, consumer_secret, token_secret) { + var method + var skipArgs = 1 + + switch (signMethod) { + case 'RSA-SHA1': + method = rsasign + break + case 'HMAC-SHA1': + method = hmacsign + break + case 'PLAINTEXT': + method = plaintext + skipArgs = 4 + break + default: + throw new Error("Signature method not supported: " + signMethod) + } + + return method.apply(null, [].slice.call(arguments, skipArgs)) +} + +exports.hmacsign = hmacsign +exports.rsasign = rsasign +exports.plaintext = plaintext +exports.sign = sign +exports.rfc3986 = rfc3986 +exports.generateBase = generateBase + diff --git a/node_modules/fsevents/node_modules/oauth-sign/package.json b/node_modules/fsevents/node_modules/oauth-sign/package.json new file mode 100644 index 0000000..e6afec1 --- /dev/null +++ b/node_modules/fsevents/node_modules/oauth-sign/package.json @@ -0,0 +1,90 @@ +{ + "_args": [ + [ + "oauth-sign@~0.8.1", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "oauth-sign@>=0.8.1 <0.9.0", + "_id": "oauth-sign@0.8.2", + "_inCache": true, + "_installable": true, + "_location": "/oauth-sign", + "_nodeVersion": "5.9.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/oauth-sign-0.8.2.tgz_1462396399020_0.8175400267355144" + }, + "_npmUser": { + "email": "simeonvelichkov@gmail.com", + "name": "simov" + }, + "_npmVersion": "2.15.3", + "_phantomChildren": {}, + "_requested": { + "name": "oauth-sign", + "raw": "oauth-sign@~0.8.1", + "rawSpec": "~0.8.1", + "scope": null, + "spec": ">=0.8.1 <0.9.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", + "_shasum": "46a6ab7f0aead8deae9ec0565780b7d4efeb9d43", + "_shrinkwrap": null, + "_spec": "oauth-sign@~0.8.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "author": { + "email": "mikeal.rogers@gmail.com", + "name": "Mikeal Rogers", + "url": "http://www.futurealoof.com" + }, + "bugs": { + "url": "https://github.com/mikeal/oauth-sign/issues" + }, + "dependencies": {}, + "description": "OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module.", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "46a6ab7f0aead8deae9ec0565780b7d4efeb9d43", + "tarball": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz" + }, + "engines": { + "node": "*" + }, + "files": [ + "index.js" + ], + "gitHead": "0b034206316132f57e26970152c2fb18e71bddd5", + "homepage": "https://github.com/mikeal/oauth-sign#readme", + "license": "Apache-2.0", + "main": "index.js", + "maintainers": [ + { + "name": "mikeal", + "email": "mikeal.rogers@gmail.com" + }, + { + "name": "nylen", + "email": "jnylen@gmail.com" + }, + { + "name": "simov", + "email": "simeonvelichkov@gmail.com" + } + ], + "name": "oauth-sign", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "url": "git+https://github.com/mikeal/oauth-sign.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "0.8.2" +} diff --git a/node_modules/fsevents/node_modules/object-assign/index.js b/node_modules/fsevents/node_modules/object-assign/index.js new file mode 100644 index 0000000..5085048 --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/index.js @@ -0,0 +1,83 @@ +'use strict'; +/* eslint-disable no-unused-vars */ +var hasOwnProperty = Object.prototype.hasOwnProperty; +var propIsEnumerable = Object.prototype.propertyIsEnumerable; + +function toObject(val) { + if (val === null || val === undefined) { + throw new TypeError('Object.assign cannot be called with null or undefined'); + } + + return Object(val); +} + +function shouldUseNative() { + try { + if (!Object.assign) { + return false; + } + + // Detect buggy property enumeration order in older V8 versions. + + // https://bugs.chromium.org/p/v8/issues/detail?id=4118 + var test1 = new String('abc'); // eslint-disable-line + test1[5] = 'de'; + if (Object.getOwnPropertyNames(test1)[0] === '5') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test2 = {}; + for (var i = 0; i < 10; i++) { + test2['_' + String.fromCharCode(i)] = i; + } + var order2 = Object.getOwnPropertyNames(test2).map(function (n) { + return test2[n]; + }); + if (order2.join('') !== '0123456789') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test3 = {}; + 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { + test3[letter] = letter; + }); + if (Object.keys(Object.assign({}, test3)).join('') !== + 'abcdefghijklmnopqrst') { + return false; + } + + return true; + } catch (e) { + // We don't expect any of the above to throw, but better to be safe. + return false; + } +} + +module.exports = shouldUseNative() ? Object.assign : function (target, source) { + var from; + var to = toObject(target); + var symbols; + + for (var s = 1; s < arguments.length; s++) { + from = Object(arguments[s]); + + for (var key in from) { + if (hasOwnProperty.call(from, key)) { + to[key] = from[key]; + } + } + + if (Object.getOwnPropertySymbols) { + symbols = Object.getOwnPropertySymbols(from); + for (var i = 0; i < symbols.length; i++) { + if (propIsEnumerable.call(from, symbols[i])) { + to[symbols[i]] = from[symbols[i]]; + } + } + } + } + + return to; +}; diff --git a/node_modules/fsevents/node_modules/object-assign/license b/node_modules/fsevents/node_modules/object-assign/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/object-assign/package.json b/node_modules/fsevents/node_modules/object-assign/package.json new file mode 100644 index 0000000..1f2706b --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/package.json @@ -0,0 +1,106 @@ +{ + "_args": [ + [ + "object-assign@^4.1.0", + "/Users/eshanker/Code/fsevents/node_modules/gauge" + ] + ], + "_from": "object-assign@>=4.1.0 <5.0.0", + "_id": "object-assign@4.1.0", + "_inCache": true, + "_installable": true, + "_location": "/object-assign", + "_nodeVersion": "4.1.0", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/object-assign-4.1.0.tgz_1462212593641_0.3332549517508596" + }, + "_npmUser": { + "email": "ben@benalpert.com", + "name": "spicyj" + }, + "_npmVersion": "2.14.19", + "_phantomChildren": {}, + "_requested": { + "name": "object-assign", + "raw": "object-assign@^4.1.0", + "rawSpec": "^4.1.0", + "scope": null, + "spec": ">=4.1.0 <5.0.0", + "type": "range" + }, + "_requiredBy": [ + "/gauge" + ], + "_resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.0.tgz", + "_shasum": "7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0", + "_shrinkwrap": null, + "_spec": "object-assign@^4.1.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/gauge", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/object-assign/issues" + }, + "dependencies": {}, + "description": "ES2015 Object.assign() ponyfill", + "devDependencies": { + "lodash": "^4.8.2", + "matcha": "^0.7.0", + "mocha": "*", + "xo": "*" + }, + "directories": {}, + "dist": { + "shasum": "7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0", + "tarball": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.0.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "72fe21c86911758f3342fdf41c2a57860d5829bc", + "homepage": "https://github.com/sindresorhus/object-assign#readme", + "keywords": [ + "assign", + "browser", + "ecmascript", + "es2015", + "extend", + "harmony", + "object", + "polyfill", + "ponyfill", + "prollyfill", + "properties", + "shim" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + }, + { + "name": "spicyj", + "email": "ben@benalpert.com" + } + ], + "name": "object-assign", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/object-assign.git" + }, + "scripts": { + "bench": "matcha bench.js", + "test": "xo && mocha" + }, + "version": "4.1.0" +} diff --git a/node_modules/fsevents/node_modules/object-assign/readme.md b/node_modules/fsevents/node_modules/object-assign/readme.md new file mode 100644 index 0000000..13c0977 --- /dev/null +++ b/node_modules/fsevents/node_modules/object-assign/readme.md @@ -0,0 +1,56 @@ +# object-assign [![Build Status](https://travis-ci.org/sindresorhus/object-assign.svg?branch=master)](https://travis-ci.org/sindresorhus/object-assign) + +> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) ponyfill + +> Ponyfill: A polyfill that doesn't overwrite the native method + + +## Install + +``` +$ npm install --save object-assign +``` + + +## Usage + +```js +const objectAssign = require('object-assign'); + +objectAssign({foo: 0}, {bar: 1}); +//=> {foo: 0, bar: 1} + +// multiple sources +objectAssign({foo: 0}, {bar: 1}, {baz: 2}); +//=> {foo: 0, bar: 1, baz: 2} + +// overwrites equal keys +objectAssign({foo: 0}, {foo: 1}, {foo: 2}); +//=> {foo: 2} + +// ignores null and undefined sources +objectAssign({foo: 0}, null, {bar: 1}, undefined); +//=> {foo: 0, bar: 1} +``` + + +## API + +### objectAssign(target, source, [source, ...]) + +Assigns enumerable own properties of `source` objects to the `target` object and returns the `target` object. Additional `source` objects will overwrite previous ones. + + +## Resources + +- [ES2015 spec - Object.assign](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign) + + +## Related + +- [deep-assign](https://github.com/sindresorhus/deep-assign) - Recursive `Object.assign()` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/once/LICENSE b/node_modules/fsevents/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/once/README.md b/node_modules/fsevents/node_modules/once/README.md new file mode 100644 index 0000000..a2981ea --- /dev/null +++ b/node_modules/fsevents/node_modules/once/README.md @@ -0,0 +1,51 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` diff --git a/node_modules/fsevents/node_modules/once/once.js b/node_modules/fsevents/node_modules/once/once.js new file mode 100644 index 0000000..2e1e721 --- /dev/null +++ b/node_modules/fsevents/node_modules/once/once.js @@ -0,0 +1,21 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} diff --git a/node_modules/fsevents/node_modules/once/package.json b/node_modules/fsevents/node_modules/once/package.json new file mode 100644 index 0000000..100aafb --- /dev/null +++ b/node_modules/fsevents/node_modules/once/package.json @@ -0,0 +1,90 @@ +{ + "_args": [ + [ + "once@^1.3.0", + "/Users/eshanker/Code/fsevents/node_modules/glob" + ] + ], + "_from": "once@>=1.3.0 <2.0.0", + "_id": "once@1.3.3", + "_inCache": true, + "_installable": true, + "_location": "/once", + "_nodeVersion": "4.0.0", + "_npmUser": { + "email": "i@izs.me", + "name": "isaacs" + }, + "_npmVersion": "3.3.2", + "_phantomChildren": {}, + "_requested": { + "name": "once", + "raw": "once@^1.3.0", + "rawSpec": "^1.3.0", + "scope": null, + "spec": ">=1.3.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/glob", + "/inflight", + "/tar-pack" + ], + "_resolved": "https://registry.npmjs.org/once/-/once-1.3.3.tgz", + "_shasum": "b2e261557ce4c314ec8304f3fa82663e4297ca20", + "_shrinkwrap": null, + "_spec": "once@^1.3.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/glob", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/once/issues" + }, + "dependencies": { + "wrappy": "1" + }, + "description": "Run a function exactly one time", + "devDependencies": { + "tap": "^1.2.0" + }, + "directories": { + "test": "test" + }, + "dist": { + "shasum": "b2e261557ce4c314ec8304f3fa82663e4297ca20", + "tarball": "https://registry.npmjs.org/once/-/once-1.3.3.tgz" + }, + "files": [ + "once.js" + ], + "gitHead": "2ad558657e17fafd24803217ba854762842e4178", + "homepage": "https://github.com/isaacs/once#readme", + "keywords": [ + "function", + "once", + "one", + "single" + ], + "license": "ISC", + "main": "once.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "name": "once", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "1.3.3" +} diff --git a/node_modules/fsevents/node_modules/path-is-absolute/index.js b/node_modules/fsevents/node_modules/path-is-absolute/index.js new file mode 100644 index 0000000..19f103f --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/index.js @@ -0,0 +1,20 @@ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +}; + +function win32(path) { + // https://github.com/joyent/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = !!device && device.charAt(1) !== ':'; + + // UNC paths are always absolute + return !!result[2] || isUnc; +}; + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/fsevents/node_modules/path-is-absolute/license b/node_modules/fsevents/node_modules/path-is-absolute/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/path-is-absolute/package.json b/node_modules/fsevents/node_modules/path-is-absolute/package.json new file mode 100644 index 0000000..2b89d64 --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/package.json @@ -0,0 +1,97 @@ +{ + "_args": [ + [ + "path-is-absolute@^1.0.0", + "/Users/eshanker/Code/fsevents/node_modules/glob" + ] + ], + "_from": "path-is-absolute@>=1.0.0 <2.0.0", + "_id": "path-is-absolute@1.0.0", + "_inCache": true, + "_installable": true, + "_location": "/path-is-absolute", + "_nodeVersion": "0.12.0", + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "2.5.1", + "_phantomChildren": {}, + "_requested": { + "name": "path-is-absolute", + "raw": "path-is-absolute@^1.0.0", + "rawSpec": "^1.0.0", + "scope": null, + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/glob" + ], + "_resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz", + "_shasum": "263dada66ab3f2fb10bf7f9d24dd8f3e570ef912", + "_shrinkwrap": null, + "_spec": "path-is-absolute@^1.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/glob", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/path-is-absolute/issues" + }, + "dependencies": {}, + "description": "Node.js 0.12 path.isAbsolute() ponyfill", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "263dada66ab3f2fb10bf7f9d24dd8f3e570ef912", + "tarball": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.0.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "7a76a0c9f2263192beedbe0a820e4d0baee5b7a1", + "homepage": "https://github.com/sindresorhus/path-is-absolute", + "keywords": [ + "absolute", + "built-in", + "check", + "core", + "detect", + "dir", + "file", + "is", + "is-absolute", + "isabsolute", + "path", + "paths", + "polyfill", + "ponyfill", + "shim", + "util", + "utils" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + } + ], + "name": "path-is-absolute", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/path-is-absolute.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "1.0.0" +} diff --git a/node_modules/fsevents/node_modules/path-is-absolute/readme.md b/node_modules/fsevents/node_modules/path-is-absolute/readme.md new file mode 100644 index 0000000..cdf94f4 --- /dev/null +++ b/node_modules/fsevents/node_modules/path-is-absolute/readme.md @@ -0,0 +1,51 @@ +# path-is-absolute [![Build Status](https://travis-ci.org/sindresorhus/path-is-absolute.svg?branch=master)](https://travis-ci.org/sindresorhus/path-is-absolute) + +> Node.js 0.12 [`path.isAbsolute()`](http://nodejs.org/api/path.html#path_path_isabsolute_path) ponyfill + +> Ponyfill: A polyfill that doesn't overwrite the native method + + +## Install + +``` +$ npm install --save path-is-absolute +``` + + +## Usage + +```js +var pathIsAbsolute = require('path-is-absolute'); + +// Linux +pathIsAbsolute('/home/foo'); +//=> true + +// Windows +pathIsAbsolute('C:/Users/'); +//=> true + +// Any OS +pathIsAbsolute.posix('/home/foo'); +//=> true +``` + + +## API + +See the [`path.isAbsolute()` docs](http://nodejs.org/api/path.html#path_path_isabsolute_path). + +### pathIsAbsolute(path) + +### pathIsAbsolute.posix(path) + +The Posix specific version. + +### pathIsAbsolute.win32(path) + +The Windows specific version. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/pinkie-promise/index.js b/node_modules/fsevents/node_modules/pinkie-promise/index.js new file mode 100644 index 0000000..777377a --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie-promise/index.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = typeof Promise === 'function' ? Promise : require('pinkie'); diff --git a/node_modules/fsevents/node_modules/pinkie-promise/license b/node_modules/fsevents/node_modules/pinkie-promise/license new file mode 100644 index 0000000..1aeb74f --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie-promise/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/pinkie-promise/package.json b/node_modules/fsevents/node_modules/pinkie-promise/package.json new file mode 100644 index 0000000..e20de2c --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie-promise/package.json @@ -0,0 +1,94 @@ +{ + "_args": [ + [ + "pinkie-promise@^2.0.0", + "/Users/eshanker/Code/fsevents/node_modules/har-validator" + ] + ], + "_from": "pinkie-promise@>=2.0.0 <3.0.0", + "_id": "pinkie-promise@2.0.1", + "_inCache": true, + "_installable": true, + "_location": "/pinkie-promise", + "_nodeVersion": "4.4.1", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/pinkie-promise-2.0.1.tgz_1460309839126_0.3422858319245279" + }, + "_npmUser": { + "email": "floatdrop@gmail.com", + "name": "floatdrop" + }, + "_npmVersion": "2.14.20", + "_phantomChildren": {}, + "_requested": { + "name": "pinkie-promise", + "raw": "pinkie-promise@^2.0.0", + "rawSpec": "^2.0.0", + "scope": null, + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/har-validator" + ], + "_resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "_shasum": "2135d6dfa7a358c069ac9b178776288228450ffa", + "_shrinkwrap": null, + "_spec": "pinkie-promise@^2.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/har-validator", + "author": { + "email": "floatdrop@gmail.com", + "name": "Vsevolod Strukchinsky", + "url": "github.com/floatdrop" + }, + "bugs": { + "url": "https://github.com/floatdrop/pinkie-promise/issues" + }, + "dependencies": { + "pinkie": "^2.0.0" + }, + "description": "ES2015 Promise ponyfill", + "devDependencies": { + "mocha": "*" + }, + "directories": {}, + "dist": { + "shasum": "2135d6dfa7a358c069ac9b178776288228450ffa", + "tarball": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "4a936c09c34ad591a25db93f1216d242de0d6184", + "homepage": "https://github.com/floatdrop/pinkie-promise", + "keywords": [ + "es2015", + "es6", + "polyfill", + "ponyfill", + "promise", + "promises" + ], + "license": "MIT", + "maintainers": [ + { + "name": "floatdrop", + "email": "floatdrop@gmail.com" + } + ], + "name": "pinkie-promise", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/floatdrop/pinkie-promise.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "2.0.1" +} diff --git a/node_modules/fsevents/node_modules/pinkie-promise/readme.md b/node_modules/fsevents/node_modules/pinkie-promise/readme.md new file mode 100644 index 0000000..78477f4 --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie-promise/readme.md @@ -0,0 +1,28 @@ +# pinkie-promise [![Build Status](https://travis-ci.org/floatdrop/pinkie-promise.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie-promise) + +> [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) ponyfill + +Module exports global Promise object (if available) or [`pinkie`](http://github.com/floatdrop/pinkie) Promise polyfill. + +## Install + +``` +$ npm install --save pinkie-promise +``` + +## Usage + +```js +var Promise = require('pinkie-promise'); + +new Promise(function (resolve) { resolve('unicorns'); }); +//=> Promise { 'unicorns' } +``` + +## Related + +- [pify](https://github.com/sindresorhus/pify) - Promisify a callback-style function + +## License + +MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop) diff --git a/node_modules/fsevents/node_modules/pinkie/index.js b/node_modules/fsevents/node_modules/pinkie/index.js new file mode 100644 index 0000000..14ce1bf --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie/index.js @@ -0,0 +1,292 @@ +'use strict'; + +var PENDING = 'pending'; +var SETTLED = 'settled'; +var FULFILLED = 'fulfilled'; +var REJECTED = 'rejected'; +var NOOP = function () {}; +var isNode = typeof global !== 'undefined' && typeof global.process !== 'undefined' && typeof global.process.emit === 'function'; + +var asyncSetTimer = typeof setImmediate === 'undefined' ? setTimeout : setImmediate; +var asyncQueue = []; +var asyncTimer; + +function asyncFlush() { + // run promise callbacks + for (var i = 0; i < asyncQueue.length; i++) { + asyncQueue[i][0](asyncQueue[i][1]); + } + + // reset async asyncQueue + asyncQueue = []; + asyncTimer = false; +} + +function asyncCall(callback, arg) { + asyncQueue.push([callback, arg]); + + if (!asyncTimer) { + asyncTimer = true; + asyncSetTimer(asyncFlush, 0); + } +} + +function invokeResolver(resolver, promise) { + function resolvePromise(value) { + resolve(promise, value); + } + + function rejectPromise(reason) { + reject(promise, reason); + } + + try { + resolver(resolvePromise, rejectPromise); + } catch (e) { + rejectPromise(e); + } +} + +function invokeCallback(subscriber) { + var owner = subscriber.owner; + var settled = owner._state; + var value = owner._data; + var callback = subscriber[settled]; + var promise = subscriber.then; + + if (typeof callback === 'function') { + settled = FULFILLED; + try { + value = callback(value); + } catch (e) { + reject(promise, e); + } + } + + if (!handleThenable(promise, value)) { + if (settled === FULFILLED) { + resolve(promise, value); + } + + if (settled === REJECTED) { + reject(promise, value); + } + } +} + +function handleThenable(promise, value) { + var resolved; + + try { + if (promise === value) { + throw new TypeError('A promises callback cannot return that same promise.'); + } + + if (value && (typeof value === 'function' || typeof value === 'object')) { + // then should be retrieved only once + var then = value.then; + + if (typeof then === 'function') { + then.call(value, function (val) { + if (!resolved) { + resolved = true; + + if (value === val) { + fulfill(promise, val); + } else { + resolve(promise, val); + } + } + }, function (reason) { + if (!resolved) { + resolved = true; + + reject(promise, reason); + } + }); + + return true; + } + } + } catch (e) { + if (!resolved) { + reject(promise, e); + } + + return true; + } + + return false; +} + +function resolve(promise, value) { + if (promise === value || !handleThenable(promise, value)) { + fulfill(promise, value); + } +} + +function fulfill(promise, value) { + if (promise._state === PENDING) { + promise._state = SETTLED; + promise._data = value; + + asyncCall(publishFulfillment, promise); + } +} + +function reject(promise, reason) { + if (promise._state === PENDING) { + promise._state = SETTLED; + promise._data = reason; + + asyncCall(publishRejection, promise); + } +} + +function publish(promise) { + promise._then = promise._then.forEach(invokeCallback); +} + +function publishFulfillment(promise) { + promise._state = FULFILLED; + publish(promise); +} + +function publishRejection(promise) { + promise._state = REJECTED; + publish(promise); + if (!promise._handled && isNode) { + global.process.emit('unhandledRejection', promise._data, promise); + } +} + +function notifyRejectionHandled(promise) { + global.process.emit('rejectionHandled', promise); +} + +/** + * @class + */ +function Promise(resolver) { + if (typeof resolver !== 'function') { + throw new TypeError('Promise resolver ' + resolver + ' is not a function'); + } + + if (this instanceof Promise === false) { + throw new TypeError('Failed to construct \'Promise\': Please use the \'new\' operator, this object constructor cannot be called as a function.'); + } + + this._then = []; + + invokeResolver(resolver, this); +} + +Promise.prototype = { + constructor: Promise, + + _state: PENDING, + _then: null, + _data: undefined, + _handled: false, + + then: function (onFulfillment, onRejection) { + var subscriber = { + owner: this, + then: new this.constructor(NOOP), + fulfilled: onFulfillment, + rejected: onRejection + }; + + if ((onRejection || onFulfillment) && !this._handled) { + this._handled = true; + if (this._state === REJECTED && isNode) { + asyncCall(notifyRejectionHandled, this); + } + } + + if (this._state === FULFILLED || this._state === REJECTED) { + // already resolved, call callback async + asyncCall(invokeCallback, subscriber); + } else { + // subscribe + this._then.push(subscriber); + } + + return subscriber.then; + }, + + catch: function (onRejection) { + return this.then(null, onRejection); + } +}; + +Promise.all = function (promises) { + if (!Array.isArray(promises)) { + throw new TypeError('You must pass an array to Promise.all().'); + } + + return new Promise(function (resolve, reject) { + var results = []; + var remaining = 0; + + function resolver(index) { + remaining++; + return function (value) { + results[index] = value; + if (!--remaining) { + resolve(results); + } + }; + } + + for (var i = 0, promise; i < promises.length; i++) { + promise = promises[i]; + + if (promise && typeof promise.then === 'function') { + promise.then(resolver(i), reject); + } else { + results[i] = promise; + } + } + + if (!remaining) { + resolve(results); + } + }); +}; + +Promise.race = function (promises) { + if (!Array.isArray(promises)) { + throw new TypeError('You must pass an array to Promise.race().'); + } + + return new Promise(function (resolve, reject) { + for (var i = 0, promise; i < promises.length; i++) { + promise = promises[i]; + + if (promise && typeof promise.then === 'function') { + promise.then(resolve, reject); + } else { + resolve(promise); + } + } + }); +}; + +Promise.resolve = function (value) { + if (value && typeof value === 'object' && value.constructor === Promise) { + return value; + } + + return new Promise(function (resolve) { + resolve(value); + }); +}; + +Promise.reject = function (reason) { + return new Promise(function (resolve, reject) { + reject(reason); + }); +}; + +module.exports = Promise; diff --git a/node_modules/fsevents/node_modules/pinkie/license b/node_modules/fsevents/node_modules/pinkie/license new file mode 100644 index 0000000..1aeb74f --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/pinkie/package.json b/node_modules/fsevents/node_modules/pinkie/package.json new file mode 100644 index 0000000..bd7b23e --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie/package.json @@ -0,0 +1,92 @@ +{ + "_args": [ + [ + "pinkie@^2.0.0", + "/Users/eshanker/Code/fsevents/node_modules/pinkie-promise" + ] + ], + "_from": "pinkie@>=2.0.0 <3.0.0", + "_id": "pinkie@2.0.4", + "_inCache": true, + "_installable": true, + "_location": "/pinkie", + "_nodeVersion": "4.2.4", + "_npmUser": { + "email": "floatdrop@gmail.com", + "name": "floatdrop" + }, + "_npmVersion": "2.14.12", + "_phantomChildren": {}, + "_requested": { + "name": "pinkie", + "raw": "pinkie@^2.0.0", + "rawSpec": "^2.0.0", + "scope": null, + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/pinkie-promise" + ], + "_resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "_shasum": "72556b80cfa0d48a974e80e77248e80ed4f7f870", + "_shrinkwrap": null, + "_spec": "pinkie@^2.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/pinkie-promise", + "author": { + "email": "floatdrop@gmail.com", + "name": "Vsevolod Strukchinsky", + "url": "github.com/floatdrop" + }, + "bugs": { + "url": "https://github.com/floatdrop/pinkie/issues" + }, + "dependencies": {}, + "description": "Itty bitty little widdle twinkie pinkie ES2015 Promise implementation", + "devDependencies": { + "core-assert": "^0.1.1", + "coveralls": "^2.11.4", + "mocha": "*", + "nyc": "^3.2.2", + "promises-aplus-tests": "*", + "xo": "^0.10.1" + }, + "directories": {}, + "dist": { + "shasum": "72556b80cfa0d48a974e80e77248e80ed4f7f870", + "tarball": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "8d4a92447a5c62bff9f89756caeb4c9c8770579b", + "homepage": "https://github.com/floatdrop/pinkie", + "keywords": [ + "es2015", + "es6", + "promise", + "promises" + ], + "license": "MIT", + "maintainers": [ + { + "name": "floatdrop", + "email": "floatdrop@gmail.com" + } + ], + "name": "pinkie", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/floatdrop/pinkie.git" + }, + "scripts": { + "coverage": "nyc report --reporter=text-lcov | coveralls", + "test": "xo && nyc mocha" + }, + "version": "2.0.4" +} diff --git a/node_modules/fsevents/node_modules/pinkie/readme.md b/node_modules/fsevents/node_modules/pinkie/readme.md new file mode 100644 index 0000000..1565f95 --- /dev/null +++ b/node_modules/fsevents/node_modules/pinkie/readme.md @@ -0,0 +1,83 @@ +

+
+ pinkie +
+
+

+ +> Itty bitty little widdle twinkie pinkie [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation + +[![Build Status](https://travis-ci.org/floatdrop/pinkie.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie) [![Coverage Status](https://coveralls.io/repos/floatdrop/pinkie/badge.svg?branch=master&service=github)](https://coveralls.io/github/floatdrop/pinkie?branch=master) + +There are [tons of Promise implementations](https://github.com/promises-aplus/promises-spec/blob/master/implementations.md#standalone) out there, but all of them focus on browser compatibility and are often bloated with functionality. + +This module is an exact Promise specification polyfill (like [native-promise-only](https://github.com/getify/native-promise-only)), but in Node.js land (it should be browserify-able though). + + +## Install + +``` +$ npm install --save pinkie +``` + + +## Usage + +```js +var fs = require('fs'); +var Promise = require('pinkie'); + +new Promise(function (resolve, reject) { + fs.readFile('foo.json', 'utf8', function (err, data) { + if (err) { + reject(err); + return; + } + + resolve(data); + }); +}); +//=> Promise +``` + + +### API + +`pinkie` exports bare [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation and polyfills [Node.js rejection events](https://nodejs.org/api/process.html#process_event_unhandledrejection). In case you forgot: + +#### new Promise(executor) + +Returns new instance of `Promise`. + +##### executor + +*Required* +Type: `function` + +Function with two arguments `resolve` and `reject`. The first argument fulfills the promise, the second argument rejects it. + +#### pinkie.all(promises) + +Returns a promise that resolves when all of the promises in the `promises` Array argument have resolved. + +#### pinkie.race(promises) + +Returns a promise that resolves or rejects as soon as one of the promises in the `promises` Array resolves or rejects, with the value or reason from that promise. + +#### pinkie.reject(reason) + +Returns a Promise object that is rejected with the given `reason`. + +#### pinkie.resolve(value) + +Returns a Promise object that is resolved with the given `value`. If the `value` is a thenable (i.e. has a then method), the returned promise will "follow" that thenable, adopting its eventual state; otherwise the returned promise will be fulfilled with the `value`. + + +## Related + +- [pinkie-promise](https://github.com/floatdrop/pinkie-promise) - Returns the native Promise or this module + + +## License + +MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop) diff --git a/node_modules/fsevents/node_modules/process-nextick-args/.travis.yml b/node_modules/fsevents/node_modules/process-nextick-args/.travis.yml new file mode 100644 index 0000000..36201b1 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/.travis.yml @@ -0,0 +1,12 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.11" + - "0.12" + - "1.7.1" + - 1 + - 2 + - 3 + - 4 + - 5 diff --git a/node_modules/fsevents/node_modules/process-nextick-args/index.js b/node_modules/fsevents/node_modules/process-nextick-args/index.js new file mode 100644 index 0000000..a4f40f8 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/index.js @@ -0,0 +1,43 @@ +'use strict'; + +if (!process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = nextTick; +} else { + module.exports = process.nextTick; +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} diff --git a/node_modules/fsevents/node_modules/process-nextick-args/license.md b/node_modules/fsevents/node_modules/process-nextick-args/license.md new file mode 100644 index 0000000..c67e353 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/fsevents/node_modules/process-nextick-args/package.json b/node_modules/fsevents/node_modules/process-nextick-args/package.json new file mode 100644 index 0000000..7b5313a --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/package.json @@ -0,0 +1,76 @@ +{ + "_args": [ + [ + "process-nextick-args@~1.0.6", + "/Users/eshanker/Code/fsevents/node_modules/readable-stream" + ] + ], + "_from": "process-nextick-args@>=1.0.6 <1.1.0", + "_id": "process-nextick-args@1.0.7", + "_inCache": true, + "_installable": true, + "_location": "/process-nextick-args", + "_nodeVersion": "5.11.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776" + }, + "_npmUser": { + "email": "calvin.metcalf@gmail.com", + "name": "cwmma" + }, + "_npmVersion": "3.8.6", + "_phantomChildren": {}, + "_requested": { + "name": "process-nextick-args", + "raw": "process-nextick-args@~1.0.6", + "rawSpec": "~1.0.6", + "scope": null, + "spec": ">=1.0.6 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/bl/readable-stream", + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", + "_shrinkwrap": null, + "_spec": "process-nextick-args@~1.0.6", + "_where": "/Users/eshanker/Code/fsevents/node_modules/readable-stream", + "author": "", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "dependencies": {}, + "description": "process.nextTick but always with args", + "devDependencies": { + "tap": "~0.2.6" + }, + "directories": {}, + "dist": { + "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", + "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39", + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "cwmma", + "email": "calvin.metcalf@gmail.com" + } + ], + "name": "process-nextick-args", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "1.0.7" +} diff --git a/node_modules/fsevents/node_modules/process-nextick-args/readme.md b/node_modules/fsevents/node_modules/process-nextick-args/readme.md new file mode 100644 index 0000000..78e7cfa --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var nextTick = require('process-nextick-args'); + +nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/fsevents/node_modules/process-nextick-args/test.js b/node_modules/fsevents/node_modules/process-nextick-args/test.js new file mode 100644 index 0000000..ef15721 --- /dev/null +++ b/node_modules/fsevents/node_modules/process-nextick-args/test.js @@ -0,0 +1,24 @@ +var test = require("tap").test; +var nextTick = require('./'); + +test('should work', function (t) { + t.plan(5); + nextTick(function (a) { + t.ok(a); + nextTick(function (thing) { + t.equals(thing, 7); + }, 7); + }, true); + nextTick(function (a, b, c) { + t.equals(a, 'step'); + t.equals(b, 3); + t.equals(c, 'profit'); + }, 'step', 3, 'profit'); +}); + +test('correct number of arguments', function (t) { + t.plan(1); + nextTick(function () { + t.equals(2, arguments.length, 'correct number'); + }, 1, 2); +}); diff --git a/node_modules/fsevents/node_modules/qs/.eslintignore b/node_modules/fsevents/node_modules/qs/.eslintignore new file mode 100644 index 0000000..1521c8b --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/.eslintignore @@ -0,0 +1 @@ +dist diff --git a/node_modules/fsevents/node_modules/qs/.eslintrc b/node_modules/fsevents/node_modules/qs/.eslintrc new file mode 100644 index 0000000..1faac27 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/.eslintrc @@ -0,0 +1,19 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "complexity": [2, 22], + "consistent-return": [1], + "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], + "indent": [2, 4], + "max-params": [2, 9], + "max-statements": [2, 36], + "no-extra-parens": [1], + "no-continue": [1], + "no-magic-numbers": 0, + "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], + "operator-linebreak": 1 + } +} diff --git a/node_modules/fsevents/node_modules/qs/.jscs.json b/node_modules/fsevents/node_modules/qs/.jscs.json new file mode 100644 index 0000000..3d099c4 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 1 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/fsevents/node_modules/qs/CHANGELOG.md b/node_modules/fsevents/node_modules/qs/CHANGELOG.md new file mode 100644 index 0000000..e318a05 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/CHANGELOG.md @@ -0,0 +1,120 @@ +## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) +- [New] pass Buffers to the encoder/decoder directly (#161) +- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) +- [Fix] fix compacting of nested sparse arrays (#150) + +## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) +- [New] allowDots option for `stringify` (#151) +- [Fix] "sort" option should work at a depth of 3 or more (#151) +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) +- Revert ES6 requirement and restore support for node down to v0.8. + +## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) +- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json + +## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) +- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 + +## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) +- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string + +## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) +- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional +- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify + +## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) +- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false +- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm + +## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) +- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional + +## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) +- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" + +## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) +- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties +- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost +- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing +- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object +- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option +- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. +- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 +- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 +- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign +- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute + +## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) +- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function + +## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) +- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option + +## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) +- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 +- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader + +## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) +- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object + +## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) +- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". + +## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) +- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 + +## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) +- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? +- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 +- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 + +## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) +- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number + +## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) +- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array +- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x + +## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) +- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value +- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty +- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? + +## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) +- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 +- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects + +## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) +- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present +- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays +- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge +- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? + +## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) +- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter + +## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) +- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? +- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit +- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 + +## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) +- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values + +## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) +- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters +- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block + +## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) +- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument +- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed + +## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) +- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted +- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null +- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README + +## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) +- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/fsevents/node_modules/qs/CONTRIBUTING.md b/node_modules/fsevents/node_modules/qs/CONTRIBUTING.md new file mode 100644 index 0000000..8928361 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/CONTRIBUTING.md @@ -0,0 +1 @@ +Please view our [hapijs contributing guide](https://github.com/hapijs/hapi/blob/master/CONTRIBUTING.md). diff --git a/node_modules/fsevents/node_modules/qs/LICENSE b/node_modules/fsevents/node_modules/qs/LICENSE new file mode 100644 index 0000000..d456948 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2014 Nathan LaFreniere and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hapijs/qs/graphs/contributors diff --git a/node_modules/fsevents/node_modules/qs/dist/qs.js b/node_modules/fsevents/node_modules/qs/dist/qs.js new file mode 100644 index 0000000..4cc6f30 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/dist/qs.js @@ -0,0 +1,487 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o= 0 && + (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = parseObject(chain, val, options); + } else { + obj[cleanRoot] = parseObject(chain, val, options); + } + } + + return obj; +}; + +var parseKeys = function parseKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^\.\[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var parent = /^([^\[\]]*)/; + var child = /(\[[^\[\]]*\])/g; + + // Get the parent + + var segment = parent.exec(key); + + // Stash the parent if it exists + + var keys = []; + if (segment[1]) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1])) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1].replace(/\[|\]/g, ''))) { + if (!options.allowPrototypes) { + continue; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +module.exports = function (str, opts) { + var options = opts || {}; + + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + options.delimiter = typeof options.delimiter === 'string' || Utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = Utils.merge(obj, newObj, options); + } + + return Utils.compact(obj); +}; + +},{"./utils":4}],3:[function(require,module,exports){ +'use strict'; + +var Utils = require('./utils'); + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var defaults = { + delimiter: '&', + strictNullHandling: false, + skipNulls: false, + encode: true, + encoder: Utils.encode +}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = obj.toISOString(); + } else if (obj === null) { + if (strictNullHandling) { + return encoder ? encoder(prefix) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || Utils.isBuffer(obj)) { + if (encoder) { + return [encoder(prefix) + '=' + encoder(obj)]; + } + return [prefix + '=' + String(obj)]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (Array.isArray(obj)) { + values = values.concat(stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } else { + values = values.concat(stringify(obj[key], prefix + (allowDots ? '.' + key : '[' + key + ']'), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + } + + return values; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = opts || {}; + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = encode ? (typeof options.encoder === 'function' ? options.encoder : defaults.encoder) : null; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var objKeys; + var filter; + + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + objKeys = filter = options.filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (sort) { + objKeys.sort(sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + keys = keys.concat(stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + + return keys.join(delimiter); +}; + +},{"./utils":4}],4:[function(require,module,exports){ +'use strict'; + +var hexTable = (function () { + var array = new Array(256); + for (var i = 0; i < 256; ++i) { + array[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase(); + } + + return array; +}()); + +exports.arrayToObject = function (source, options) { + var obj = options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +exports.merge = function (target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + target[source] = true; + } else { + return [target, source]; + } + + return target; + } + + if (typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = exports.arrayToObject(target, options); + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (Object.prototype.hasOwnProperty.call(acc, key)) { + acc[key] = exports.merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +exports.decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; + +exports.encode = function (str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D || // - + c === 0x2E || // . + c === 0x5F || // _ + c === 0x7E || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5A) || // a-z + (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + hexTable[0x80 | ((c >> 12) & 0x3F)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +exports.compact = function (obj, references) { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + var refs = references || []; + var lookup = refs.indexOf(obj); + if (lookup !== -1) { + return refs[lookup]; + } + + refs.push(obj); + + if (Array.isArray(obj)) { + var compacted = []; + + for (var i = 0; i < obj.length; ++i) { + if (obj[i] && typeof obj[i] === 'object') { + compacted.push(exports.compact(obj[i], refs)); + } else if (typeof obj[i] !== 'undefined') { + compacted.push(obj[i]); + } + } + + return compacted; + } + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + obj[key] = exports.compact(obj[key], refs); + } + + return obj; +}; + +exports.isRegExp = function (obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +exports.isBuffer = function (obj) { + if (obj === null || typeof obj === 'undefined') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +},{}]},{},[1])(1) +}); \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/qs/lib/index.js b/node_modules/fsevents/node_modules/qs/lib/index.js new file mode 100755 index 0000000..1901959 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/lib/index.js @@ -0,0 +1,9 @@ +'use strict'; + +var Stringify = require('./stringify'); +var Parse = require('./parse'); + +module.exports = { + stringify: Stringify, + parse: Parse +}; diff --git a/node_modules/fsevents/node_modules/qs/lib/parse.js b/node_modules/fsevents/node_modules/qs/lib/parse.js new file mode 100755 index 0000000..bf70fd8 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/lib/parse.js @@ -0,0 +1,167 @@ +'use strict'; + +var Utils = require('./utils'); + +var defaults = { + delimiter: '&', + depth: 5, + arrayLimit: 20, + parameterLimit: 1000, + strictNullHandling: false, + plainObjects: false, + allowPrototypes: false, + allowDots: false, + decoder: Utils.decode +}; + +var parseValues = function parseValues(str, options) { + var obj = {}; + var parts = str.split(options.delimiter, options.parameterLimit === Infinity ? undefined : options.parameterLimit); + + for (var i = 0; i < parts.length; ++i) { + var part = parts[i]; + var pos = part.indexOf(']=') === -1 ? part.indexOf('=') : part.indexOf(']=') + 1; + + if (pos === -1) { + obj[options.decoder(part)] = ''; + + if (options.strictNullHandling) { + obj[options.decoder(part)] = null; + } + } else { + var key = options.decoder(part.slice(0, pos)); + var val = options.decoder(part.slice(pos + 1)); + + if (Object.prototype.hasOwnProperty.call(obj, key)) { + obj[key] = [].concat(obj[key]).concat(val); + } else { + obj[key] = val; + } + } + } + + return obj; +}; + +var parseObject = function parseObject(chain, val, options) { + if (!chain.length) { + return val; + } + + var root = chain.shift(); + + var obj; + if (root === '[]') { + obj = []; + obj = obj.concat(parseObject(chain, val, options)); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root[0] === '[' && root[root.length - 1] === ']' ? root.slice(1, root.length - 1) : root; + var index = parseInt(cleanRoot, 10); + if ( + !isNaN(index) && + root !== cleanRoot && + String(index) === cleanRoot && + index >= 0 && + (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = parseObject(chain, val, options); + } else { + obj[cleanRoot] = parseObject(chain, val, options); + } + } + + return obj; +}; + +var parseKeys = function parseKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^\.\[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var parent = /^([^\[\]]*)/; + var child = /(\[[^\[\]]*\])/g; + + // Get the parent + + var segment = parent.exec(key); + + // Stash the parent if it exists + + var keys = []; + if (segment[1]) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1])) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1].replace(/\[|\]/g, ''))) { + if (!options.allowPrototypes) { + continue; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +module.exports = function (str, opts) { + var options = opts || {}; + + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + options.delimiter = typeof options.delimiter === 'string' || Utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = Utils.merge(obj, newObj, options); + } + + return Utils.compact(obj); +}; diff --git a/node_modules/fsevents/node_modules/qs/lib/stringify.js b/node_modules/fsevents/node_modules/qs/lib/stringify.js new file mode 100755 index 0000000..6e1c9a2 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/lib/stringify.js @@ -0,0 +1,137 @@ +'use strict'; + +var Utils = require('./utils'); + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var defaults = { + delimiter: '&', + strictNullHandling: false, + skipNulls: false, + encode: true, + encoder: Utils.encode +}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = obj.toISOString(); + } else if (obj === null) { + if (strictNullHandling) { + return encoder ? encoder(prefix) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || Utils.isBuffer(obj)) { + if (encoder) { + return [encoder(prefix) + '=' + encoder(obj)]; + } + return [prefix + '=' + String(obj)]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (Array.isArray(obj)) { + values = values.concat(stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } else { + values = values.concat(stringify(obj[key], prefix + (allowDots ? '.' + key : '[' + key + ']'), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + } + + return values; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = opts || {}; + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = encode ? (typeof options.encoder === 'function' ? options.encoder : defaults.encoder) : null; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var objKeys; + var filter; + + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + objKeys = filter = options.filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (sort) { + objKeys.sort(sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + keys = keys.concat(stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + + return keys.join(delimiter); +}; diff --git a/node_modules/fsevents/node_modules/qs/lib/utils.js b/node_modules/fsevents/node_modules/qs/lib/utils.js new file mode 100755 index 0000000..2c5c8ee --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/lib/utils.js @@ -0,0 +1,164 @@ +'use strict'; + +var hexTable = (function () { + var array = new Array(256); + for (var i = 0; i < 256; ++i) { + array[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase(); + } + + return array; +}()); + +exports.arrayToObject = function (source, options) { + var obj = options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +exports.merge = function (target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + target[source] = true; + } else { + return [target, source]; + } + + return target; + } + + if (typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = exports.arrayToObject(target, options); + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (Object.prototype.hasOwnProperty.call(acc, key)) { + acc[key] = exports.merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +exports.decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; + +exports.encode = function (str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D || // - + c === 0x2E || // . + c === 0x5F || // _ + c === 0x7E || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5A) || // a-z + (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + hexTable[0x80 | ((c >> 12) & 0x3F)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +exports.compact = function (obj, references) { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + var refs = references || []; + var lookup = refs.indexOf(obj); + if (lookup !== -1) { + return refs[lookup]; + } + + refs.push(obj); + + if (Array.isArray(obj)) { + var compacted = []; + + for (var i = 0; i < obj.length; ++i) { + if (obj[i] && typeof obj[i] === 'object') { + compacted.push(exports.compact(obj[i], refs)); + } else if (typeof obj[i] !== 'undefined') { + compacted.push(obj[i]); + } + } + + return compacted; + } + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + obj[key] = exports.compact(obj[key], refs); + } + + return obj; +}; + +exports.isRegExp = function (obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +exports.isBuffer = function (obj) { + if (obj === null || typeof obj === 'undefined') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; diff --git a/node_modules/fsevents/node_modules/qs/package.json b/node_modules/fsevents/node_modules/qs/package.json new file mode 100644 index 0000000..75c14f8 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/package.json @@ -0,0 +1,111 @@ +{ + "_args": [ + [ + "qs@~6.2.0", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "qs@>=6.2.0 <6.3.0", + "_id": "qs@6.2.0", + "_inCache": true, + "_installable": true, + "_location": "/qs", + "_nodeVersion": "6.1.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/qs-6.2.0.tgz_1462749349998_0.03372702235355973" + }, + "_npmUser": { + "email": "ljharb@gmail.com", + "name": "ljharb" + }, + "_npmVersion": "3.8.6", + "_phantomChildren": {}, + "_requested": { + "name": "qs", + "raw": "qs@~6.2.0", + "rawSpec": "~6.2.0", + "scope": null, + "spec": ">=6.2.0 <6.3.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/qs/-/qs-6.2.0.tgz", + "_shasum": "3b7848c03c2dece69a9522b0fae8c4126d745f3b", + "_shrinkwrap": null, + "_spec": "qs@~6.2.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "bugs": { + "url": "https://github.com/ljharb/qs/issues" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "dependencies": {}, + "description": "A querystring parser that supports nesting and arrays, with a depth limit", + "devDependencies": { + "@ljharb/eslint-config": "^4.0.0", + "browserify": "^13.0.1", + "covert": "^1.1.0", + "eslint": "^2.9.0", + "evalmd": "^0.0.17", + "iconv-lite": "^0.4.13", + "mkdirp": "^0.5.1", + "parallelshell": "^2.0.0", + "tape": "^4.5.1" + }, + "directories": {}, + "dist": { + "shasum": "3b7848c03c2dece69a9522b0fae8c4126d745f3b", + "tarball": "https://registry.npmjs.org/qs/-/qs-6.2.0.tgz" + }, + "engines": { + "node": ">=0.6" + }, + "gitHead": "d67d315b606c6bb809fedcbeebbbdb7f863852aa", + "homepage": "https://github.com/ljharb/qs", + "keywords": [ + "qs", + "querystring" + ], + "license": "BSD-3-Clause", + "main": "lib/index.js", + "maintainers": [ + { + "name": "hueniverse", + "email": "eran@hammer.io" + }, + { + "name": "ljharb", + "email": "ljharb@gmail.com" + }, + { + "name": "nlf", + "email": "quitlahok@gmail.com" + } + ], + "name": "qs", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/qs.git" + }, + "scripts": { + "coverage": "covert test", + "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js", + "lint": "eslint lib/*.js text/*.js", + "prepublish": "npm run dist", + "pretest": "parallelshell 'npm run --silent readme' 'npm run --silent lint'", + "readme": "evalmd README.md", + "test": "npm run --silent coverage", + "tests-only": "node test" + }, + "version": "6.2.0" +} diff --git a/node_modules/fsevents/node_modules/qs/test/index.js b/node_modules/fsevents/node_modules/qs/test/index.js new file mode 100644 index 0000000..b6a7d95 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/test/index.js @@ -0,0 +1,5 @@ +require('./parse'); + +require('./stringify'); + +require('./utils'); diff --git a/node_modules/fsevents/node_modules/qs/test/parse.js b/node_modules/fsevents/node_modules/qs/test/parse.js new file mode 100755 index 0000000..1b79daf --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/test/parse.js @@ -0,0 +1,423 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var iconv = require('iconv-lite'); + +test('parse()', function (t) { + t.test('parses a simple string', function (st) { + st.deepEqual(qs.parse('0=foo'), { '0': 'foo' }); + st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); + st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); + st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); + st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); + st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); + st.deepEqual(qs.parse('foo'), { foo: '' }); + st.deepEqual(qs.parse('foo='), { foo: '' }); + st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); + st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); + st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); + st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); + st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); + st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); + st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); + st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { + cht: 'p3', + chd: 't:60,40', + chs: '250x100', + chl: 'Hello|World' + }); + st.end(); + }); + + t.test('allows enabling dot notation', function (st) { + st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); + st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); + t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); + t.deepEqual( + qs.parse('a[b][c][d][e][f][g][h]=i'), + { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, + 'defaults to a depth of 5' + ); + + t.test('only parses one level when depth = 1', function (st) { + st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); + st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); + st.end(); + }); + + t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); + + t.test('parses an explicit array', function (st) { + st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); + st.end(); + }); + + t.test('parses a mix of simple and explicit arrays', function (st) { + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[1]=c'), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('parses a nested array', function (st) { + st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); + st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); + st.end(); + }); + + t.test('allows to specify array indices', function (st) { + st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); + st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); + st.end(); + }); + + t.test('limits specific array indices to 20', function (st) { + st.deepEqual(qs.parse('a[20]=a'), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a'), { a: { '21': 'a' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); + + t.test('supports encoded = signs', function (st) { + st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); + st.end(); + }); + + t.test('is ok with url encoded strings', function (st) { + st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); + st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); + st.end(); + }); + + t.test('allows brackets in the value', function (st) { + st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); + st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); + st.end(); + }); + + t.test('allows empty values', function (st) { + st.deepEqual(qs.parse(''), {}); + st.deepEqual(qs.parse(null), {}); + st.deepEqual(qs.parse(undefined), {}); + st.end(); + }); + + t.test('transforms arrays to objects', function (st) { + st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { '0': 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { '0': 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', '0': 'bar', '1': 'foo' } }); + st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c'), { a: { '0': 'b', t: 'u', c: true } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y'), { a: { '0': 'b', '1': 'c', x: 'y' } }); + st.end(); + }); + + t.test('transforms arrays to objects (dot notation)', function (st) { + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); + st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); + st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { '0': 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', '0': 'bar', '1': 'foo' } }); + st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c&a=d'), { a: { b: 'c', d: true } }, 'can add keys to objects'); + + t.test('correctly prunes undefined values when converting an array to an object', function (st) { + st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { '2': 'b', '99999999': 'c' } }); + st.end(); + }); + + t.test('supports malformed uri characters', function (st) { + st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); + st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); + st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); + st.end(); + }); + + t.test('doesn\'t produce empty keys', function (st) { + st.deepEqual(qs.parse('_r=1&'), { '_r': '1' }); + st.end(); + }); + + t.test('cannot access Object prototype', function (st) { + qs.parse('constructor[prototype][bad]=bad'); + qs.parse('bad[constructor][prototype][bad]=bad'); + st.equal(typeof Object.prototype.bad, 'undefined'); + st.end(); + }); + + t.test('parses arrays of objects', function (st) { + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); + st.end(); + }); + + t.test('allows for empty strings in arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true }), { a: ['b', null, 'c', ''] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true }), { a: ['b', '', 'c', null] }); + st.deepEqual(qs.parse('a[]=&a[]=b&a[]=c'), { a: ['', 'b', 'c'] }); + st.end(); + }); + + t.test('compacts sparse arrays', function (st) { + st.deepEqual(qs.parse('a[10]=1&a[2]=2'), { a: ['2', '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1'), { a: [{ b: [{ c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1'), { a: [[[{ c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1'), { a: [[[{ c: ['1'] }]]] }); + st.end(); + }); + + t.test('parses semi-parsed strings', function (st) { + st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); + st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); + st.end(); + }); + + t.test('parses buffers correctly', function (st) { + var b = new Buffer('test'); + st.deepEqual(qs.parse({ a: b }), { a: b }); + st.end(); + }); + + t.test('continues parsing when no parent is found', function (st) { + st.deepEqual(qs.parse('[]=&a=b'), { '0': '', a: 'b' }); + st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { '0': null, a: 'b' }); + st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); + st.end(); + }); + + t.test('does not error when parsing a very long array', function (st) { + var str = 'a[]=a'; + while (Buffer.byteLength(str) < 128 * 1024) { + str = str + '&' + str; + } + + st.doesNotThrow(function () { qs.parse(str); }); + + st.end(); + }); + + t.test('should not throw when a native prototype has an enumerable property', { parallel: false }, function (st) { + Object.prototype.crash = ''; + Array.prototype.crash = ''; + st.doesNotThrow(qs.parse.bind(null, 'a=b')); + st.deepEqual(qs.parse('a=b'), { a: 'b' }); + st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + delete Object.prototype.crash; + delete Array.prototype.crash; + st.end(); + }); + + t.test('parses a string with an alternative string delimiter', function (st) { + st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('parses a string with an alternative RegExp delimiter', function (st) { + st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not use non-splittable objects as delimiters', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding parameter limit', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); + st.end(); + }); + + t.test('allows setting the parameter limit to Infinity', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding array limit', function (st) { + st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { '0': 'b' } }); + st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { '0': 'b', '1': 'c' } }); + st.end(); + }); + + t.test('allows disabling array parsing', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { parseArrays: false }), { a: { '0': 'b', '1': 'c' } }); + st.end(); + }); + + t.test('parses an object', function (st) { + var input = { + 'user[name]': { 'pop[bob]': 3 }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object in dot notation', function (st) { + var input = { + 'user.name': { 'pop[bob]': 3 }, + 'user.email.': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input, { allowDots: true }); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object and not child values', function (st) { + var input = { + 'user[name]': { 'pop[bob]': { 'test': 3 } }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': { 'test': 3 } }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.parse('a=b&c=d'); + global.Buffer = tempBuffer; + st.deepEqual(result, { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + var parsed; + + st.doesNotThrow(function () { + parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + st.equal('bar' in parsed.foo, true); + st.equal('baz' in parsed.foo, true); + st.equal(parsed.foo.bar, 'baz'); + st.deepEqual(parsed.foo.baz, a); + st.end(); + }); + + t.test('parses plain objects correctly', function (st) { + var a = Object.create(null); + a.b = 'c'; + + st.deepEqual(qs.parse(a), { b: 'c' }); + var result = qs.parse({ a: a }); + st.equal('a' in result, true, 'result has "a" property'); + st.deepEqual(result.a, a); + st.end(); + }); + + t.test('parses dates correctly', function (st) { + var now = new Date(); + st.deepEqual(qs.parse({ a: now }), { a: now }); + st.end(); + }); + + t.test('parses regular expressions correctly', function (st) { + var re = /^test$/; + st.deepEqual(qs.parse({ a: re }), { a: re }); + st.end(); + }); + + t.test('can allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }, { prototype: false }); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }, { prototype: false }); + st.end(); + }); + + t.test('can return plain objects', function (st) { + var expected = Object.create(null); + expected.a = Object.create(null); + expected.a.b = 'c'; + expected.a.hasOwnProperty = 'd'; + st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); + st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); + var expectedArray = Object.create(null); + expectedArray.a = Object.create(null); + expectedArray.a['0'] = 'b'; + expectedArray.a.c = 'd'; + st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); + st.end(); + }); + + t.test('can parse with custom encoding', function (st) { + st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { + decoder: function (str) { + var reg = /\%([0-9A-F]{2})/ig; + var result = []; + var parts; + var last = 0; + while (parts = reg.exec(str)) { + result.push(parseInt(parts[1], 16)); + last = parts.index + parts[0].length; + } + return iconv.decode(new Buffer(result), 'shift_jis').toString(); + } + }), { 県: '大阪府' }); + st.end(); + }); + + t.test('throws error with wrong decoder', function (st) { + st.throws(function () { + qs.parse({}, { + decoder: 'string' + }); + }, new TypeError('Decoder has to be a function.')); + st.end(); + }); +}); diff --git a/node_modules/fsevents/node_modules/qs/test/stringify.js b/node_modules/fsevents/node_modules/qs/test/stringify.js new file mode 100755 index 0000000..699397e --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/test/stringify.js @@ -0,0 +1,305 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var iconv = require('iconv-lite'); + +test('stringify()', function (t) { + t.test('stringifies a querystring object', function (st) { + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: 1 }), 'a=1'); + st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); + st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); + st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); + st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); + st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); + st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); + st.end(); + }); + + t.test('stringifies a nested object', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies a nested object with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); + st.end(); + }); + + t.test('stringifies an array value', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }), 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d'); + st.end(); + }); + + t.test('omits nulls when asked', function (st) { + st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); + st.end(); + }); + + + t.test('omits nested nulls when asked', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('omits array indices when asked', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); + st.end(); + }); + + t.test('stringifies a nested array value', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.end(); + }); + + t.test('stringifies a nested array value with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { allowDots: true, encode: false }), 'a.b[0]=c&a.b[1]=d'); + st.end(); + }); + + t.test('stringifies an object inside an array', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }), 'a%5B0%5D%5Bb%5D=c'); + st.equal(qs.stringify({ a: [{ b: { c: [1] } }] }), 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1'); + st.end(); + }); + + t.test('stringifies an array with mixed objects and primitives', function (st) { + st.equal(qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }), 'a[0][b]=1&a[1]=2&a[2]=3'); + st.end(); + }); + + t.test('stringifies an object inside an array with dots notation', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false }), 'a[0].b=c'); + st.equal(qs.stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false }), 'a[0].b.c[0]=1'); + st.end(); + }); + + t.test('does not omit object keys when indices = false', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when indices=true', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); + st.end(); + }); + + t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); + st.end(); + }); + + t.test('stringifies a complicated object', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies an empty value', function (st) { + st.equal(qs.stringify({ a: '' }), 'a='); + st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); + + st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); + st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); + + st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); + + st.end(); + }); + + t.test('stringifies an empty object', function (st) { + var obj = Object.create(null); + obj.a = 'b'; + st.equal(qs.stringify(obj), 'a=b'); + st.end(); + }); + + t.test('returns an empty string for invalid input', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(''), ''); + st.end(); + }); + + t.test('stringifies an object with an empty object as a child', function (st) { + var obj = { + a: Object.create(null) + }; + + obj.a.b = 'c'; + st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('drops keys with a value of undefined', function (st) { + st.equal(qs.stringify({ a: undefined }), ''); + + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); + st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); + st.end(); + }); + + t.test('url encodes values', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('stringifies a date', function (st) { + var now = new Date(); + var str = 'a=' + encodeURIComponent(now.toISOString()); + st.equal(qs.stringify({ a: now }), str); + st.end(); + }); + + t.test('stringifies the weird object from qs', function (st) { + st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); + st.end(); + }); + + t.test('skips properties that are part of the object prototype', function (st) { + Object.prototype.crash = 'test'; + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + delete Object.prototype.crash; + st.end(); + }); + + t.test('stringifies boolean values', function (st) { + st.equal(qs.stringify({ a: true }), 'a=true'); + st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); + st.equal(qs.stringify({ b: false }), 'b=false'); + st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies buffer values', function (st) { + st.equal(qs.stringify({ a: new Buffer('test') }), 'a=test'); + st.equal(qs.stringify({ a: { b: new Buffer('test') } }), 'a%5Bb%5D=test'); + st.end(); + }); + + t.test('stringifies an object using an alternative delimiter', function (st) { + st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); + st.end(); + }); + + t.test('doesn\'t blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.stringify({ a: 'b', c: 'd' }); + global.Buffer = tempBuffer; + st.equal(result, 'a=b&c=d'); + st.end(); + }); + + t.test('selects properties when filter=array', function (st) { + st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); + st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); + st.equal(qs.stringify({ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, { filter: ['a', 'b', 0, 2] }), 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3'); + st.end(); + }); + + t.test('supports custom representations when filter=function', function (st) { + var calls = 0; + var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; + var filterFunc = function (prefix, value) { + calls++; + if (calls === 1) { + st.equal(prefix, '', 'prefix is empty'); + st.equal(value, obj); + } else if (prefix === 'c') { + return; + } else if (value instanceof Date) { + st.equal(prefix, 'e[f]'); + return value.getTime(); + } + return value; + }; + + st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); + st.equal(calls, 5); + st.end(); + }); + + t.test('can disable uri encoding', function (st) { + st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); + st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); + st.end(); + }); + + t.test('can sort the keys', function (st) { + var sort = function (a, b) { return a.localeCompare(b); }; + st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); + st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); + st.end(); + }); + + t.test('can sort the keys at depth 3 or more too', function (st) { + var sort = function (a, b) { return a.localeCompare(b); }; + st.equal(qs.stringify({ a: 'a', z: { zj: {zjb: 'zjb', zja: 'zja'}, zi: {zib: 'zib', zia: 'zia'} }, b: 'b' }, { sort: sort, encode: false }), 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb'); + st.equal(qs.stringify({ a: 'a', z: { zj: {zjb: 'zjb', zja: 'zja'}, zi: {zib: 'zib', zia: 'zia'} }, b: 'b' }, { sort: null, encode: false }), 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b'); + st.end(); + }); + + t.test('can stringify with custom encoding', function (st) { + st.equal(qs.stringify({ 県: '大阪府', '': ''}, { + encoder: function (str) { + if (str.length === 0) { + return ''; + } + var buf = iconv.encode(str, 'shiftjis'); + var result = []; + for (var i=0; i < buf.length; ++i) { + result.push(buf.readUInt8(i).toString(16)); + } + return '%' + result.join('%'); + } + }), '%8c%a7=%91%e5%8d%e3%95%7b&='); + st.end(); + }); + + t.test('throws error with wrong encoder', function (st) { + st.throws(function () { + qs.stringify({}, { + encoder: 'string' + }); + }, new TypeError('Encoder has to be a function.')); + st.end(); + }); + + t.test('can use custom encoder for a buffer object', { + skip: typeof Buffer === 'undefined' + }, function (st) { + st.equal(qs.stringify({ a: new Buffer([1]) }, { + encoder: function (buffer) { + if (typeof buffer === 'string') { + return buffer; + } + return String.fromCharCode(buffer.readUInt8(0) + 97); + } + }), 'a=b'); + st.end(); + }); +}); diff --git a/node_modules/fsevents/node_modules/qs/test/utils.js b/node_modules/fsevents/node_modules/qs/test/utils.js new file mode 100755 index 0000000..4a8d824 --- /dev/null +++ b/node_modules/fsevents/node_modules/qs/test/utils.js @@ -0,0 +1,9 @@ +'use strict'; + +var test = require('tape'); +var utils = require('../lib/utils'); + +test('merge()', function (t) { + t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/.npmignore b/node_modules/fsevents/node_modules/rc/.npmignore new file mode 100644 index 0000000..13abef4 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/.npmignore @@ -0,0 +1,3 @@ +node_modules +node_modules/* +npm_debug.log diff --git a/node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 b/node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 new file mode 100644 index 0000000..6366c04 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/fsevents/node_modules/rc/LICENSE.BSD b/node_modules/fsevents/node_modules/rc/LICENSE.BSD new file mode 100644 index 0000000..96bb796 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/LICENSE.BSD @@ -0,0 +1,26 @@ +Copyright (c) 2013, Dominic Tarr +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/node_modules/fsevents/node_modules/rc/LICENSE.MIT b/node_modules/fsevents/node_modules/rc/LICENSE.MIT new file mode 100644 index 0000000..6eafbd7 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/rc/README.md b/node_modules/fsevents/node_modules/rc/README.md new file mode 100644 index 0000000..65a5f06 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/README.md @@ -0,0 +1,149 @@ +# rc + +The non-configurable configuration loader for lazy people. + +## Usage + +The only option is to pass rc the name of your app, and your default configuration. + +```javascript +var conf = require('rc')(appname, { + //defaults go here. + port: 2468, + + //defaults which are objects will be merged, not replaced + views: { + engine: 'jade' + } +}); +``` + +`rc` will return your configuration options merged with the defaults you specify. +If you pass in a predefined defaults object, it will be mutated: + +```javascript +var conf = {}; +require('rc')(appname, conf); +``` + +If `rc` finds any config files for your app, the returned config object will have +a `configs` array containing their paths: + +```javascript +var appCfg = require('rc')(appname, conf); +appCfg.configs[0] // /etc/appnamerc +appCfg.configs[1] // /home/dominictarr/.config/appname +appCfg.config // same as appCfg.configs[appCfg.configs.length - 1] +``` + +## Standards + +Given your application name (`appname`), rc will look in all the obvious places for configuration. + + * command line arguments (parsed by minimist) + * environment variables prefixed with `${appname}_` + * or use "\_\_" to indicate nested properties
_(e.g. `appname_foo__bar__baz` => `foo.bar.baz`)_ + * if you passed an option `--config file` then from that file + * a local `.${appname}rc` or the first found looking in `./ ../ ../../ ../../../` etc. + * `$HOME/.${appname}rc` + * `$HOME/.${appname}/config` + * `$HOME/.config/${appname}` + * `$HOME/.config/${appname}/config` + * `/etc/${appname}rc` + * `/etc/${appname}/config` + * the defaults object you passed in. + +All configuration sources that were found will be flattened into one object, +so that sources **earlier** in this list override later ones. + + +## Configuration File Formats + +Configuration files (e.g. `.appnamerc`) may be in either [json](http://json.org/example) or [ini](http://en.wikipedia.org/wiki/INI_file) format. The example configurations below are equivalent: + + +#### Formatted as `ini` + +``` +; You can include comments in `ini` format if you want. + +dependsOn=0.10.0 + + +; `rc` has built-in support for ini sections, see? + +[commands] + www = ./commands/www + console = ./commands/repl + + +; You can even do nested sections + +[generators.options] + engine = ejs + +[generators.modules] + new = generate-new + engine = generate-backend + +``` + +#### Formatted as `json` + +```javascript +{ + // You can even comment your JSON, if you want + "dependsOn": "0.10.0", + "commands": { + "www": "./commands/www", + "console": "./commands/repl" + }, + "generators": { + "options": { + "engine": "ejs" + }, + "modules": { + "new": "generate-new", + "backend": "generate-backend" + } + } +} +``` + +Comments are stripped from JSON config via [strip-json-comments](https://github.com/sindresorhus/strip-json-comments). + +> Since ini, and env variables do not have a standard for types, your application needs be prepared for strings. + + + +## Advanced Usage + +#### Pass in your own `argv` + +You may pass in your own `argv` as the third argument to `rc`. This is in case you want to [use your own command-line opts parser](https://github.com/dominictarr/rc/pull/12). + +```javascript +require('rc')(appname, defaults, customArgvParser); +``` + +## Pass in your own parser + +If you have a special need to use a non-standard parser, +you can do so by passing in the parser as the 4th argument. +(leave the 3rd as null to get the default args parser) + +```javascript +require('rc')(appname, defaults, null, parser); +``` + +This may also be used to force a more strict format, +such as strict, valid JSON only. + +## Note on Performance + +`rc` is running `fs.statSync`-- so make sure you don't use it in a hot code path (e.g. a request handler) + + +## License + +Multi-licensed under the two-clause BSD License, MIT License, or Apache License, version 2.0 diff --git a/node_modules/fsevents/node_modules/rc/browser.js b/node_modules/fsevents/node_modules/rc/browser.js new file mode 100644 index 0000000..8c230c5 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/browser.js @@ -0,0 +1,7 @@ + +// when this is loaded into the browser, +// just use the defaults... + +module.exports = function (name, defaults) { + return defaults +} diff --git a/node_modules/fsevents/node_modules/rc/index.js b/node_modules/fsevents/node_modules/rc/index.js new file mode 100755 index 0000000..6f8f113 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/index.js @@ -0,0 +1,60 @@ +#! /usr/bin/env node +var cc = require('./lib/utils') +var join = require('path').join +var deepExtend = require('deep-extend') +var etc = '/etc' +var win = process.platform === "win32" +var home = win + ? process.env.USERPROFILE + : process.env.HOME + +module.exports = function (name, defaults, argv, parse) { + if('string' !== typeof name) + throw new Error('rc(name): name *must* be string') + if(!argv) + argv = require('minimist')(process.argv.slice(2)) + defaults = ( + 'string' === typeof defaults + ? cc.json(defaults) : defaults + ) || {} + + parse = parse || cc.parse + + var env = cc.env(name + '_') + + var configs = [defaults] + var configFiles = [] + function addConfigFile (file) { + if (configFiles.indexOf(file) >= 0) return + var fileConfig = cc.file(file) + if (fileConfig) { + configs.push(parse(fileConfig)) + configFiles.push(file) + } + } + + // which files do we look at? + if (!win) + [join(etc, name, 'config'), + join(etc, name + 'rc')].forEach(addConfigFile) + if (home) + [join(home, '.config', name, 'config'), + join(home, '.config', name), + join(home, '.' + name, 'config'), + join(home, '.' + name + 'rc')].forEach(addConfigFile) + addConfigFile(cc.find('.'+name+'rc')) + if (env.config) addConfigFile(env.config) + if (argv.config) addConfigFile(argv.config) + + return deepExtend.apply(null, configs.concat([ + env, + argv, + configFiles.length ? {configs: configFiles, config: configFiles[configFiles.length - 1]} : undefined, + ])) +} + +if(!module.parent) { + console.log( + JSON.stringify(module.exports(process.argv[2]), false, 2) + ) +} diff --git a/node_modules/fsevents/node_modules/rc/lib/utils.js b/node_modules/fsevents/node_modules/rc/lib/utils.js new file mode 100644 index 0000000..ae6dec0 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/lib/utils.js @@ -0,0 +1,103 @@ +'use strict'; +var fs = require('fs') +var ini = require('ini') +var path = require('path') +var stripJsonComments = require('strip-json-comments') + +var parse = exports.parse = function (content) { + + //if it ends in .json or starts with { then it must be json. + //must be done this way, because ini accepts everything. + //can't just try and parse it and let it throw if it's not ini. + //everything is ini. even json with a syntax error. + + if(/^\s*{/.test(content)) + return JSON.parse(stripJsonComments(content)) + return ini.parse(content) + +} + +var file = exports.file = function () { + var args = [].slice.call(arguments).filter(function (arg) { return arg != null }) + + //path.join breaks if it's a not a string, so just skip this. + for(var i in args) + if('string' !== typeof args[i]) + return + + var file = path.join.apply(null, args) + var content + try { + return fs.readFileSync(file,'utf-8') + } catch (err) { + return + } +} + +var json = exports.json = function () { + var content = file.apply(null, arguments) + return content ? parse(content) : null +} + +var env = exports.env = function (prefix, env) { + env = env || process.env + var obj = {} + var l = prefix.length + for(var k in env) { + if((k.indexOf(prefix)) === 0) { + + var keypath = k.substring(l).split('__') + + // Trim empty strings from keypath array + var _emptyStringIndex + while ((_emptyStringIndex=keypath.indexOf('')) > -1) { + keypath.splice(_emptyStringIndex, 1) + } + + var cursor = obj + keypath.forEach(function _buildSubObj(_subkey,i){ + + // (check for _subkey first so we ignore empty strings) + // (check for cursor to avoid assignment to primitive objects) + if (!_subkey || typeof cursor !== 'object') + return + + // If this is the last key, just stuff the value in there + // Assigns actual value from env variable to final key + // (unless it's just an empty string- in that case use the last valid key) + if (i === keypath.length-1) + cursor[_subkey] = env[k] + + + // Build sub-object if nothing already exists at the keypath + if (cursor[_subkey] === undefined) + cursor[_subkey] = {} + + // Increment cursor used to track the object at the current depth + cursor = cursor[_subkey] + + }) + + } + + } + + return obj +} + +var find = exports.find = function () { + var rel = path.join.apply(null, [].slice.call(arguments)) + + function find(start, rel) { + var file = path.join(start, rel) + try { + fs.statSync(file) + return file + } catch (err) { + if(path.dirname(start) !== start) // root + return find(path.dirname(start), rel) + } + } + return find(process.cwd(), rel) +} + diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml b/node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml new file mode 100644 index 0000000..74c57bf --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" +before_install: + - npm install -g npm@~1.4.6 diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE b/node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js new file mode 100644 index 0000000..abff3e8 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/example/parse.js @@ -0,0 +1,2 @@ +var argv = require('../')(process.argv.slice(2)); +console.dir(argv); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js new file mode 100644 index 0000000..6a0559d --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/index.js @@ -0,0 +1,236 @@ +module.exports = function (args, opts) { + if (!opts) opts = {}; + + var flags = { bools : {}, strings : {}, unknownFn: null }; + + if (typeof opts['unknown'] === 'function') { + flags.unknownFn = opts['unknown']; + } + + if (typeof opts['boolean'] === 'boolean' && opts['boolean']) { + flags.allBools = true; + } else { + [].concat(opts['boolean']).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + flags.strings[aliases[key]] = true; + } + }); + + var defaults = opts['default'] || {}; + + var argv = { _ : [] }; + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--')+1); + args = args.slice(0, args.indexOf('--')); + } + + function argDefined(key, arg) { + return (flags.allBools && /^--[^=]+$/.test(arg)) || + flags.strings[key] || flags.bools[key] || aliases[key]; + } + + function setArg (key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) return; + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) : val + ; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + function setKey (obj, keys, value) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + if (o[key] === undefined) o[key] = {}; + o = o[key]; + }); + + var key = keys[keys.length - 1]; + if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') { + o[key] = value; + } + else if (Array.isArray(o[key])) { + o[key].push(value); + } + else { + o[key] = [ o[key], value ]; + } + } + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + + if (/^--.+=/.test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + var key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } + else if (/^--no-.+/.test(arg)) { + var key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } + else if (/^--.+/.test(arg)) { + var key = arg.match(/^--(.+)/)[1]; + var next = args[i + 1]; + if (next !== undefined && !/^-/.test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, next, arg); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + else if (/^-[^-]+/.test(arg)) { + var letters = arg.slice(1,-1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + var next = arg.slice(j+2); + + if (next === '-') { + setArg(letters[j], next, arg) + continue; + } + + if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) { + setArg(letters[j], next.split('=')[1], arg); + broken = true; + break; + } + + if (/[A-Za-z]/.test(letters[j]) + && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j+1] && letters[j+1].match(/\W/)) { + setArg(letters[j], arg.slice(j+2), arg); + broken = true; + break; + } + else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + var key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, args[i+1], arg); + i++; + } + else if (args[i+1] && /true|false/.test(args[i+1])) { + setArg(key, args[i+1] === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } + else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push( + flags.strings['_'] || !isNumber(arg) ? arg : Number(arg) + ); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) { + setKey(argv, key.split('.'), defaults[key]); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[key]); + }); + } + }); + + if (opts['--']) { + argv['--'] = new Array(); + notFlags.forEach(function(key) { + argv['--'].push(key); + }); + } + else { + notFlags.forEach(function(key) { + argv._.push(key); + }); + } + + return argv; +}; + +function hasKey (obj, keys) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + o = (o[key] || {}); + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber (x) { + if (typeof x === 'number') return true; + if (/^0x[0-9a-f]+$/i.test(x)) return true; + return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json b/node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json new file mode 100644 index 0000000..b327d28 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/package.json @@ -0,0 +1,97 @@ +{ + "_args": [ + [ + "minimist@^1.2.0", + "/Users/eshanker/Code/fsevents/node_modules/rc" + ] + ], + "_from": "minimist@>=1.2.0 <2.0.0", + "_id": "minimist@1.2.0", + "_inCache": true, + "_installable": true, + "_location": "/rc/minimist", + "_nodeVersion": "2.4.0", + "_npmUser": { + "email": "substack@gmail.com", + "name": "substack" + }, + "_npmVersion": "3.2.2", + "_phantomChildren": {}, + "_requested": { + "name": "minimist", + "raw": "minimist@^1.2.0", + "rawSpec": "^1.2.0", + "scope": null, + "spec": ">=1.2.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/rc" + ], + "_resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "_shasum": "a35008b20f41383eec1fb914f4cd5df79a264284", + "_shrinkwrap": null, + "_spec": "minimist@^1.2.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/rc", + "author": { + "email": "mail@substack.net", + "name": "James Halliday", + "url": "http://substack.net" + }, + "bugs": { + "url": "https://github.com/substack/minimist/issues" + }, + "dependencies": {}, + "description": "parse argument options", + "devDependencies": { + "covert": "^1.0.0", + "tap": "~0.4.0", + "tape": "^3.5.0" + }, + "directories": {}, + "dist": { + "shasum": "a35008b20f41383eec1fb914f4cd5df79a264284", + "tarball": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz" + }, + "gitHead": "dc624482fcfec5bc669c68cdb861f00573ed4e64", + "homepage": "https://github.com/substack/minimist", + "keywords": [ + "argv", + "getopt", + "optimist", + "parser" + ], + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "substack", + "email": "mail@substack.net" + } + ], + "name": "minimist", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/substack/minimist.git" + }, + "scripts": { + "coverage": "covert test/*.js", + "test": "tap test/*.js" + }, + "testling": { + "browsers": [ + "chrome/10", + "chrome/latest", + "ff/5", + "firefox/latest", + "ie/6..latest", + "opera/12", + "safari/5.1", + "safari/latest" + ], + "files": "test/*.js" + }, + "version": "1.2.0" +} diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown b/node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown new file mode 100644 index 0000000..30a74cf --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/readme.markdown @@ -0,0 +1,91 @@ +# minimist + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist) + +[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist) + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.dir(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ _: [ 'foo', 'bar', 'baz' ], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' } +``` + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +``` +> require('./')('one two three -- four five --six'.split(' '), { '--': true }) +{ _: [ 'one', 'two', 'three' ], + '--': [ 'four', 'five', '--six' ] } +``` + +Note that with `opts['--']` set, parsing for arguments still stops after the +`--`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js new file mode 100644 index 0000000..ac83548 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/all_bool.js @@ -0,0 +1,32 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js new file mode 100644 index 0000000..14b0717 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/bool.js @@ -0,0 +1,166 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false } + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], { + boolean: ['x','y','z'] + }); + + t.deepEqual(argv, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ] + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + alias: { 'h': 'herp' }, + boolean: 'herp' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var alt = [ '--harp', 'derp' ]; + var opts = { + alias: { 'h': ['herp', 'harp'] }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, false); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js new file mode 100644 index 0000000..5a4fa5b --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dash.js @@ -0,0 +1,31 @@ +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(5); + t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] }); + t.deepEqual(parse([ '-' ]), { _: [ '-' ] }); + t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] }); + t.deepEqual( + parse([ '-b', '-' ], { boolean: 'b' }), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + parse([ '-s', '-' ], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(3); + t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); +}); + +test('move arguments after the -- into their own `--` array', function(t) { + t.plan(1); + t.deepEqual( + parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }), + { name: 'John', _: [ 'before' ], '--': [ 'after' ] }); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js new file mode 100644 index 0000000..780a311 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/default_bool.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true } + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false } + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, null); + var argv = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, true); + t.end(); + +}) diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js new file mode 100644 index 0000000..d8b3e85 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/dotted.js @@ -0,0 +1,22 @@ +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', {default: {'a.b': 11}}); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js new file mode 100644 index 0000000..f813b30 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/kv_short.js @@ -0,0 +1,16 @@ +var parse = require('../'); +var test = require('tape'); + +test('short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-b=123' ]); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-a=whatever', '-b=robots' ]); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js new file mode 100644 index 0000000..5d3a1e0 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/long.js @@ -0,0 +1,31 @@ +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse([ '--bool' ]), + { bool : true, _ : [] }, + 'long boolean' + ); + t.deepEqual( + parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture sp' + ); + t.deepEqual( + parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture eq' + ); + t.deepEqual( + parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures sp' + ); + t.deepEqual( + parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js new file mode 100644 index 0000000..2cc77f4 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/num.js @@ -0,0 +1,36 @@ +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789' + ]); + t.deepEqual(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ] + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse([ '-x', 1234, 789 ]); + t.deepEqual(argv, { x : 1234, _ : [ 789 ] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js new file mode 100644 index 0000000..7b4a2a1 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse.js @@ -0,0 +1,197 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse([ '--no-moo' ]), + { moo : false, _ : [] }, + 'no' + ); + t.deepEqual( + parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ] + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse([ '-t', 'moo' ], { boolean: 't' }); + t.deepEqual(argv, { t : true, _ : [ 'moo' ] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: [ 't', 'verbose' ], + default: { verbose: true } + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = parse([ '-s', "X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse([ "--s=X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + t.end(); +}); + +test('strings' , function (t) { + var s = parse([ '-s', '0001234' ], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse([ '-x', '56' ], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([ ' ', ' ' ], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function(t) { + var s = parse([ '-s' ], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse([ '--str' ], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse([ '-art' ], { + string: [ 'a', 't' ] + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + + +test('string and alias', function(t) { + var x = parse([ '--str', '000123' ], { + string: 's', + alias: { s: 'str' } + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse([ '-s', '000123' ], { + string: 'str', + alias: { str: 's' } + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [] } + ); + t.same( + parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: 'zoom' } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: [ 'zm', 'zoom' ] } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]); + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + } + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js new file mode 100644 index 0000000..ab620dc --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,9 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = parse([ '-b', '123' ], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js new file mode 100644 index 0000000..d513a1c --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/short.js @@ -0,0 +1,67 @@ +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] }); + t.deepEqual( + parse([ '-123', '456' ]), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse([ '-b' ]), + { b : true, _ : [] }, + 'short boolean' + ); + t.deepEqual( + parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ] }, + 'bare' + ); + t.deepEqual( + parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [] }, + 'group' + ); + t.deepEqual( + parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [] }, + 'short group next' + ); + t.deepEqual( + parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [] }, + 'short capture' + ); + t.deepEqual( + parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js new file mode 100644 index 0000000..bdf9fbc --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/stop_early.js @@ -0,0 +1,15 @@ +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'] + }); + + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js new file mode 100644 index 0000000..462a36b --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/unknown.js @@ -0,0 +1,102 @@ +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'true', '--derp', 'true' ]; + var regular = [ '--herp', 'true', '-d', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [] + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello', '--derp', 'goodbye' ]; + var regular = [ '--herp', 'hello', '-d', 'moon' ]; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello' ]; + var regular = [ '--herp', 'hello' ]; + var opts = { + default: { 'h': 'bar' }, + alias: { 'h': 'herp' }, + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '--bad', '--', 'good', 'arg' ]; + var opts = { + '--': true, + unknown: unknownFn + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + '_': [] + }) + t.end(); +}); diff --git a/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js new file mode 100644 index 0000000..8a52a58 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/node_modules/minimist/test/whitespace.js @@ -0,0 +1,8 @@ +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/fsevents/node_modules/rc/package.json b/node_modules/fsevents/node_modules/rc/package.json new file mode 100644 index 0000000..2399bf3 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/package.json @@ -0,0 +1,88 @@ +{ + "_args": [ + [ + "rc@~1.1.0", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "rc@>=1.1.0 <1.2.0", + "_id": "rc@1.1.6", + "_inCache": true, + "_installable": true, + "_location": "/rc", + "_nodeVersion": "4.2.3", + "_npmUser": { + "email": "dominic.tarr@gmail.com", + "name": "dominictarr" + }, + "_npmVersion": "3.5.1", + "_phantomChildren": {}, + "_requested": { + "name": "rc", + "raw": "rc@~1.1.0", + "rawSpec": "~1.1.0", + "scope": null, + "spec": ">=1.1.0 <1.2.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/rc/-/rc-1.1.6.tgz", + "_shasum": "43651b76b6ae53b5c802f1151fa3fc3b059969c9", + "_shrinkwrap": null, + "_spec": "rc@~1.1.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "dominic.tarr@gmail.com", + "name": "Dominic Tarr", + "url": "dominictarr.com" + }, + "bin": { + "rc": "./index.js" + }, + "browserify": "browser.js", + "bugs": { + "url": "https://github.com/dominictarr/rc/issues" + }, + "dependencies": { + "deep-extend": "~0.4.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~1.0.4" + }, + "description": "hardwired configuration loader", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "43651b76b6ae53b5c802f1151fa3fc3b059969c9", + "tarball": "https://registry.npmjs.org/rc/-/rc-1.1.6.tgz" + }, + "gitHead": "132062de0e61881a025cc4784d9a2798409c2bf1", + "homepage": "https://github.com/dominictarr/rc#readme", + "keywords": [ + "config", + "defaults", + "rc", + "unix" + ], + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "main": "index.js", + "maintainers": [ + { + "name": "dominictarr", + "email": "dominic.tarr@gmail.com" + } + ], + "name": "rc", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/dominictarr/rc.git" + }, + "scripts": { + "test": "set -e; node test/test.js; node test/ini.js; node test/nested-env-vars.js" + }, + "version": "1.1.6" +} diff --git a/node_modules/fsevents/node_modules/rc/test/ini.js b/node_modules/fsevents/node_modules/rc/test/ini.js new file mode 100644 index 0000000..e6857f8 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/test/ini.js @@ -0,0 +1,16 @@ +var cc =require('../lib/utils') +var INI = require('ini') +var assert = require('assert') + +function test(obj) { + + var _json, _ini + var json = cc.parse (_json = JSON.stringify(obj)) + var ini = cc.parse (_ini = INI.stringify(obj)) + console.log(_ini, _json) + assert.deepEqual(json, ini) +} + + +test({hello: true}) + diff --git a/node_modules/fsevents/node_modules/rc/test/nested-env-vars.js b/node_modules/fsevents/node_modules/rc/test/nested-env-vars.js new file mode 100644 index 0000000..f576fb1 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/test/nested-env-vars.js @@ -0,0 +1,40 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + + +// Basic usage +process.env[n+'_someOpt__a'] = 42 +process.env[n+'_someOpt__x__'] = 99 +process.env[n+'_someOpt__a__b'] = 186 +process.env[n+'_someOpt__a__b__c'] = 243 +process.env[n+'_someOpt__x__y'] = 1862 +process.env[n+'_someOpt__z'] = 186577 + +// Should ignore empty strings from orphaned '__' +process.env[n+'_someOpt__z__x__'] = 18629 +process.env[n+'_someOpt__w__w__'] = 18629 + +// Leading '__' should ignore everything up to 'z' +process.env[n+'___z__i__'] = 9999 + +var config = require('../')(n, { + option: true +}) + +console.log('\n\n------ nested-env-vars ------\n',config) + +assert.equal(config.option, true) +assert.equal(config.someOpt.a, 42) +assert.equal(config.someOpt.x, 99) +// Should not override `a` once it's been set +assert.equal(config.someOpt.a/*.b*/, 42) +// Should not override `x` once it's been set +assert.equal(config.someOpt.x/*.y*/, 99) +assert.equal(config.someOpt.z, 186577) +// Should not override `z` once it's been set +assert.equal(config.someOpt.z/*.x*/, 186577) +assert.equal(config.someOpt.w.w, 18629) +assert.equal(config.z.i, 9999) + + diff --git a/node_modules/fsevents/node_modules/rc/test/test.js b/node_modules/fsevents/node_modules/rc/test/test.js new file mode 100644 index 0000000..4f63351 --- /dev/null +++ b/node_modules/fsevents/node_modules/rc/test/test.js @@ -0,0 +1,59 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + +process.env[n+'_envOption'] = 42 + +var config = require('../')(n, { + option: true +}) + +console.log(config) + +assert.equal(config.option, true) +assert.equal(config.envOption, 42) + +var customArgv = require('../')(n, { + option: true +}, { // nopt-like argv + option: false, + envOption: 24, + argv: { + remain: [], + cooked: ['--no-option', '--envOption', '24'], + original: ['--no-option', '--envOption=24'] + } +}) + +console.log(customArgv) + +assert.equal(customArgv.option, false) +assert.equal(customArgv.envOption, 24) + +var fs = require('fs') +var path = require('path') +var jsonrc = path.resolve('.' + n + 'rc'); + +fs.writeFileSync(jsonrc, [ + '{', + '// json overrides default', + '"option": false,', + '/* env overrides json */', + '"envOption": 24', + '}' +].join('\n')); + +var commentedJSON = require('../')(n, { + option: true +}) + +fs.unlinkSync(jsonrc); + +console.log(commentedJSON) + +assert.equal(commentedJSON.option, false) +assert.equal(commentedJSON.envOption, 42) + +assert.equal(commentedJSON.config, jsonrc) +assert.equal(commentedJSON.configs.length, 1) +assert.equal(commentedJSON.configs[0], jsonrc) diff --git a/node_modules/fsevents/node_modules/readable-stream/.npmignore b/node_modules/fsevents/node_modules/readable-stream/.npmignore new file mode 100644 index 0000000..265ff73 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/.npmignore @@ -0,0 +1,8 @@ +build/ +test/ +examples/ +fs.js +zlib.js +.zuul.yml +.nyc_output +coverage diff --git a/node_modules/fsevents/node_modules/readable-stream/.travis.yml b/node_modules/fsevents/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000..ae0156a --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/.travis.yml @@ -0,0 +1,54 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - npm install -g npm +notifications: + email: false +matrix: + fast_finish: true + allow_failures: + - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" + - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" + include: + - node_js: '0.8' + env: TASK=test + - node_js: '0.10' + env: TASK=test + - node_js: '0.11' + env: TASK=test + - node_js: '0.12' + env: TASK=test + - node_js: 1 + env: TASK=test + - node_js: 2 + env: TASK=test + - node_js: 3 + env: TASK=test + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 5 + env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=microsoftedge BROWSER_VERSION=latest +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/fsevents/node_modules/readable-stream/LICENSE b/node_modules/fsevents/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..e3d4e69 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/readable-stream/README.md b/node_modules/fsevents/node_modules/readable-stream/README.md new file mode 100644 index 0000000..2233732 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/README.md @@ -0,0 +1,36 @@ +# readable-stream + +***Node-core v6.1.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core, including [documentation](doc/stream.md). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams WG Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/node_modules/fsevents/node_modules/readable-stream/doc/stream.md b/node_modules/fsevents/node_modules/readable-stream/doc/stream.md new file mode 100644 index 0000000..c907ca0 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/doc/stream.md @@ -0,0 +1,1772 @@ +# Stream + + Stability: 2 - Stable + +A stream is an abstract interface implemented by various objects in +Node.js. For example a [request to an HTTP server][http-incoming-message] is a +stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All +streams are instances of [`EventEmitter`][]. + +You can load the Stream base classes by doing `require('stream')`. +There are base classes provided for [Readable][] streams, [Writable][] +streams, [Duplex][] streams, and [Transform][] streams. + +This document is split up into 3 sections: + +1. The first section explains the parts of the API that you need to be + aware of to use streams in your programs. +2. The second section explains the parts of the API that you need to + use if you implement your own custom streams yourself. The API is designed to + make this easy for you to do. +3. The third section goes into more depth about how streams work, + including some of the internal mechanisms and functions that you + should probably not modify unless you definitely know what you are + doing. + + +## API for Stream Consumers + + + +Streams can be either [Readable][], [Writable][], or both ([Duplex][]). + +All streams are EventEmitters, but they also have other custom methods +and properties depending on whether they are Readable, Writable, or +Duplex. + +If a stream is both Readable and Writable, then it implements all of +the methods and events. So, a [Duplex][] or [Transform][] stream is +fully described by this API, though their implementation may be +somewhat different. + +It is not necessary to implement Stream interfaces in order to consume +streams in your programs. If you **are** implementing streaming +interfaces in your own program, please also refer to +[API for Stream Implementors][]. + +Almost all Node.js programs, no matter how simple, use Streams in some +way. Here is an example of using Streams in an Node.js program: + +```js +const http = require('http'); + +var server = http.createServer( (req, res) => { + // req is an http.IncomingMessage, which is a Readable Stream + // res is an http.ServerResponse, which is a Writable Stream + + var body = ''; + // we want to get the data as utf8 strings + // If you don't set an encoding, then you'll get Buffer objects + req.setEncoding('utf8'); + + // Readable streams emit 'data' events once a listener is added + req.on('data', (chunk) => { + body += chunk; + }); + + // the end event tells you that you have entire body + req.on('end', () => { + try { + var data = JSON.parse(body); + } catch (er) { + // uh oh! bad json! + res.statusCode = 400; + return res.end(`error: ${er.message}`); + } + + // write back something interesting to the user: + res.write(typeof data); + res.end(); + }); +}); + +server.listen(1337); + +// $ curl localhost:1337 -d '{}' +// object +// $ curl localhost:1337 -d '"foo"' +// string +// $ curl localhost:1337 -d 'not json' +// error: Unexpected token o +``` + +### Class: stream.Duplex + +Duplex streams are streams that implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Duplex streams include: + +* [TCP sockets][] +* [zlib streams][zlib] +* [crypto streams][crypto] + +### Class: stream.Readable + + + +The Readable stream interface is the abstraction for a *source* of +data that you are reading from. In other words, data comes *out* of a +Readable stream. + +A Readable stream will not start emitting data until you indicate that +you are ready to receive it. + +Readable streams have two "modes": a **flowing mode** and a **paused +mode**. When in flowing mode, data is read from the underlying system +and provided to your program as fast as possible. In paused mode, you +must explicitly call [`stream.read()`][stream-read] to get chunks of data out. +Streams start out in paused mode. + +**Note**: If no data event handlers are attached, and there are no +[`stream.pipe()`][] destinations, and the stream is switched into flowing +mode, then data will be lost. + +You can switch to flowing mode by doing any of the following: + +* Adding a [`'data'`][] event handler to listen for data. +* Calling the [`stream.resume()`][stream-resume] method to explicitly open the + flow. +* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. + +You can switch back to paused mode by doing either of the following: + +* If there are no pipe destinations, by calling the + [`stream.pause()`][stream-pause] method. +* If there are pipe destinations, by removing any [`'data'`][] event + handlers, and removing all pipe destinations by calling the + [`stream.unpipe()`][] method. + +Note that, for backwards compatibility reasons, removing [`'data'`][] +event handlers will **not** automatically pause the stream. Also, if +there are piped destinations, then calling [`stream.pause()`][stream-pause] will +not guarantee that the stream will *remain* paused once those +destinations drain and ask for more data. + +Examples of readable streams include: + +* [HTTP responses, on the client][http-incoming-message] +* [HTTP requests, on the server][http-incoming-message] +* [fs read streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdout and stderr][] +* [`process.stdin`][] + +#### Event: 'close' + +Emitted when the stream and any of its underlying resources (a file +descriptor, for example) have been closed. The event indicates that +no more events will be emitted, and no further computation will occur. + +Not all streams will emit the `'close'` event as the `'close'` event is +optional. + +#### Event: 'data' + +* `chunk` {Buffer|String} The chunk of data. + +Attaching a `'data'` event listener to a stream that has not been +explicitly paused will switch the stream into flowing mode. Data will +then be passed as soon as it is available. + +If you just want to get all the data out of the stream as fast as +possible, this is the best way to do so. + +```js +var readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log('got %d bytes of data', chunk.length); +}); +``` + +#### Event: 'end' + +This event fires when there will be no more data to read. + +Note that the `'end'` event **will not fire** unless the data is +completely consumed. This can be done by switching into flowing mode, +or by calling [`stream.read()`][stream-read] repeatedly until you get to the +end. + +```js +var readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log('got %d bytes of data', chunk.length); +}); +readable.on('end', () => { + console.log('there will be no more data.'); +}); +``` + +#### Event: 'error' + +* {Error} + +Emitted if there was an error receiving data. + +#### Event: 'readable' + +When a chunk of data can be read from the stream, it will emit a +`'readable'` event. + +In some cases, listening for a `'readable'` event will cause some data +to be read into the internal buffer from the underlying system, if it +hadn't already. + +```javascript +var readable = getReadableStreamSomehow(); +readable.on('readable', () => { + // there is some data to read now +}); +``` + +Once the internal buffer is drained, a `'readable'` event will fire +again when more data is available. + +The `'readable'` event is not emitted in the "flowing" mode with the +sole exception of the last one, on end-of-stream. + +The `'readable'` event indicates that the stream has new information: +either new data is available or the end of the stream has been reached. +In the former case, [`stream.read()`][stream-read] will return that data. In the +latter case, [`stream.read()`][stream-read] will return null. For instance, in +the following example, `foo.txt` is an empty file: + +```js +const fs = require('fs'); +var rr = fs.createReadStream('foo.txt'); +rr.on('readable', () => { + console.log('readable:', rr.read()); +}); +rr.on('end', () => { + console.log('end'); +}); +``` + +The output of running this script is: + +``` +$ node test.js +readable: null +end +``` + +#### readable.isPaused() + +* Return: {Boolean} + +This method returns whether or not the `readable` has been **explicitly** +paused by client code (using [`stream.pause()`][stream-pause] without a +corresponding [`stream.resume()`][stream-resume]). + +```js +var readable = new stream.Readable + +readable.isPaused() // === false +readable.pause() +readable.isPaused() // === true +readable.resume() +readable.isPaused() // === false +``` + +#### readable.pause() + +* Return: `this` + +This method will cause a stream in flowing mode to stop emitting +[`'data'`][] events, switching out of flowing mode. Any data that becomes +available will remain in the internal buffer. + +```js +var readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log('got %d bytes of data', chunk.length); + readable.pause(); + console.log('there will be no more data for 1 second'); + setTimeout(() => { + console.log('now data will start flowing again'); + readable.resume(); + }, 1000); +}); +``` + +#### readable.pipe(destination[, options]) + +* `destination` {stream.Writable} The destination for writing data +* `options` {Object} Pipe options + * `end` {Boolean} End the writer when the reader ends. Default = `true` + +This method pulls all the data out of a readable stream, and writes it +to the supplied destination, automatically managing the flow so that +the destination is not overwhelmed by a fast readable stream. + +Multiple destinations can be piped to safely. + +```js +var readable = getReadableStreamSomehow(); +var writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt' +readable.pipe(writable); +``` + +This function returns the destination stream, so you can set up pipe +chains like so: + +```js +var r = fs.createReadStream('file.txt'); +var z = zlib.createGzip(); +var w = fs.createWriteStream('file.txt.gz'); +r.pipe(z).pipe(w); +``` + +For example, emulating the Unix `cat` command: + +```js +process.stdin.pipe(process.stdout); +``` + +By default [`stream.end()`][stream-end] is called on the destination when the +source stream emits [`'end'`][], so that `destination` is no longer writable. +Pass `{ end: false }` as `options` to keep the destination stream open. + +This keeps `writer` open so that "Goodbye" can be written at the +end. + +```js +reader.pipe(writer, { end: false }); +reader.on('end', () => { + writer.end('Goodbye\n'); +}); +``` + +Note that [`process.stderr`][] and [`process.stdout`][] are never closed until +the process exits, regardless of the specified options. + +#### readable.read([size]) + +* `size` {Number} Optional argument to specify how much data to read. +* Return {String|Buffer|Null} + +The `read()` method pulls some data out of the internal buffer and +returns it. If there is no data available, then it will return +`null`. + +If you pass in a `size` argument, then it will return that many +bytes. If `size` bytes are not available, then it will return `null`, +unless we've ended, in which case it will return the data remaining +in the buffer. + +If you do not specify a `size` argument, then it will return all the +data in the internal buffer. + +This method should only be called in paused mode. In flowing mode, +this method is called automatically until the internal buffer is +drained. + +```js +var readable = getReadableStreamSomehow(); +readable.on('readable', () => { + var chunk; + while (null !== (chunk = readable.read())) { + console.log('got %d bytes of data', chunk.length); + } +}); +``` + +If this method returns a data chunk, then it will also trigger the +emission of a [`'data'`][] event. + +Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] +event has been triggered will return `null`. No runtime error will be raised. + +#### readable.resume() + +* Return: `this` + +This method will cause the readable stream to resume emitting [`'data'`][] +events. + +This method will switch the stream into flowing mode. If you do *not* +want to consume the data from a stream, but you *do* want to get to +its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open +the flow of data. + +```js +var readable = getReadableStreamSomehow(); +readable.resume(); +readable.on('end', () => { + console.log('got to the end, but did not read anything'); +}); +``` + +#### readable.setEncoding(encoding) + +* `encoding` {String} The encoding to use. +* Return: `this` + +Call this function to cause the stream to return strings of the specified +encoding instead of Buffer objects. For example, if you do +`readable.setEncoding('utf8')`, then the output data will be interpreted as +UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, +then the data will be encoded in hexadecimal string format. + +This properly handles multi-byte characters that would otherwise be +potentially mangled if you simply pulled the Buffers directly and +called [`buf.toString(encoding)`][] on them. If you want to read the data +as strings, always use this method. + +Also you can disable any encoding at all with `readable.setEncoding(null)`. +This approach is very useful if you deal with binary data or with large +multi-byte strings spread out over multiple chunks. + +```js +var readable = getReadableStreamSomehow(); +readable.setEncoding('utf8'); +readable.on('data', (chunk) => { + assert.equal(typeof chunk, 'string'); + console.log('got %d characters of string data', chunk.length); +}); +``` + +#### readable.unpipe([destination]) + +* `destination` {stream.Writable} Optional specific stream to unpipe + +This method will remove the hooks set up for a previous [`stream.pipe()`][] +call. + +If the destination is not specified, then all pipes are removed. + +If the destination is specified, but no pipe is set up for it, then +this is a no-op. + +```js +var readable = getReadableStreamSomehow(); +var writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt', +// but only for the first second +readable.pipe(writable); +setTimeout(() => { + console.log('stop writing to file.txt'); + readable.unpipe(writable); + console.log('manually close the file stream'); + writable.end(); +}, 1000); +``` + +#### readable.unshift(chunk) + +* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue + +This is useful in certain cases where a stream is being consumed by a +parser, which needs to "un-consume" some data that it has +optimistically pulled out of the source, so that the stream can be +passed on to some other party. + +Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event +has been triggered; a runtime error will be raised. + +If you find that you must often call `stream.unshift(chunk)` in your +programs, consider implementing a [Transform][] stream instead. (See [API +for Stream Implementors][].) + +```js +// Pull off a header delimited by \n\n +// use unshift() if we get too much +// Call the callback with (error, header, stream) +const StringDecoder = require('string_decoder').StringDecoder; +function parseHeader(stream, callback) { + stream.on('error', callback); + stream.on('readable', onReadable); + var decoder = new StringDecoder('utf8'); + var header = ''; + function onReadable() { + var chunk; + while (null !== (chunk = stream.read())) { + var str = decoder.write(chunk); + if (str.match(/\n\n/)) { + // found the header boundary + var split = str.split(/\n\n/); + header += split.shift(); + var remaining = split.join('\n\n'); + var buf = Buffer.from(remaining, 'utf8'); + if (buf.length) + stream.unshift(buf); + stream.removeListener('error', callback); + stream.removeListener('readable', onReadable); + // now the body of the message can be read from the stream. + callback(null, header, stream); + } else { + // still reading the header. + header += str; + } + } + } +} +``` + +Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` +will not end the reading process by resetting the internal reading state of the +stream. This can cause unexpected results if `unshift()` is called during a +read (i.e. from within a [`stream._read()`][stream-_read] implementation on a +custom stream). Following the call to `unshift()` with an immediate +[`stream.push('')`][stream-push] will reset the reading state appropriately, +however it is best to simply avoid calling `unshift()` while in the process of +performing a read. + +#### readable.wrap(stream) + +* `stream` {Stream} An "old style" readable stream + +Versions of Node.js prior to v0.10 had streams that did not implement the +entire Streams API as it is today. (See [Compatibility][] for +more information.) + +If you are using an older Node.js library that emits [`'data'`][] events and +has a [`stream.pause()`][stream-pause] method that is advisory only, then you +can use the `wrap()` method to create a [Readable][] stream that uses the old +stream as its data source. + +You will very rarely ever need to call this function, but it exists +as a convenience for interacting with old Node.js programs and libraries. + +For example: + +```js +const OldReader = require('./old-api-module.js').OldReader; +const Readable = require('stream').Readable; +const oreader = new OldReader; +const myReader = new Readable().wrap(oreader); + +myReader.on('readable', () => { + myReader.read(); // etc. +}); +``` + +### Class: stream.Transform + +Transform streams are [Duplex][] streams where the output is in some way +computed from the input. They implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Transform streams include: + +* [zlib streams][zlib] +* [crypto streams][crypto] + +### Class: stream.Writable + + + +The Writable stream interface is an abstraction for a *destination* +that you are writing data *to*. + +Examples of writable streams include: + +* [HTTP requests, on the client][] +* [HTTP responses, on the server][] +* [fs write streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdin][] +* [`process.stdout`][], [`process.stderr`][] + +#### Event: 'close' + +Emitted when the stream and any of its underlying resources (a file descriptor, +for example) have been closed. The event indicates that no more events will be +emitted, and no further computation will occur. + +Not all streams will emit the `'close'` event as the `'close'` event is +optional. + +#### Event: 'drain' + +If a [`stream.write(chunk)`][stream-write] call returns `false`, then the +`'drain'` event will indicate when it is appropriate to begin writing more data +to the stream. + +```js +// Write the data to the supplied writable stream one million times. +// Be attentive to back-pressure. +function writeOneMillionTimes(writer, data, encoding, callback) { + var i = 1000000; + write(); + function write() { + var ok = true; + do { + i -= 1; + if (i === 0) { + // last time! + writer.write(data, encoding, callback); + } else { + // see if we should continue, or wait + // don't pass the callback, because we're not done yet. + ok = writer.write(data, encoding); + } + } while (i > 0 && ok); + if (i > 0) { + // had to stop early! + // write some more once it drains + writer.once('drain', write); + } + } +} +``` + +#### Event: 'error' + +* {Error} + +Emitted if there was an error when writing or piping data. + +#### Event: 'finish' + +When the [`stream.end()`][stream-end] method has been called, and all data has +been flushed to the underlying system, this event is emitted. + +```javascript +var writer = getWritableStreamSomehow(); +for (var i = 0; i < 100; i ++) { + writer.write('hello, #${i}!\n'); +} +writer.end('this is the end\n'); +writer.on('finish', () => { + console.error('all writes are now complete.'); +}); +``` + +#### Event: 'pipe' + +* `src` {stream.Readable} source stream that is piping to this writable + +This is emitted whenever the [`stream.pipe()`][] method is called on a readable +stream, adding this writable to its set of destinations. + +```js +var writer = getWritableStreamSomehow(); +var reader = getReadableStreamSomehow(); +writer.on('pipe', (src) => { + console.error('something is piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +``` + +#### Event: 'unpipe' + +* `src` {[Readable][] Stream} The source stream that + [unpiped][`stream.unpipe()`] this writable + +This is emitted whenever the [`stream.unpipe()`][] method is called on a +readable stream, removing this writable from its set of destinations. + +```js +var writer = getWritableStreamSomehow(); +var reader = getReadableStreamSomehow(); +writer.on('unpipe', (src) => { + console.error('something has stopped piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +reader.unpipe(writer); +``` + +#### writable.cork() + +Forces buffering of all writes. + +Buffered data will be flushed either at [`stream.uncork()`][] or at +[`stream.end()`][stream-end] call. + +#### writable.end([chunk][, encoding][, callback]) + +* `chunk` {String|Buffer} Optional data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Optional callback for when the stream is finished + +Call this method when no more data will be written to the stream. If supplied, +the callback is attached as a listener on the [`'finish'`][] event. + +Calling [`stream.write()`][stream-write] after calling +[`stream.end()`][stream-end] will raise an error. + +```js +// write 'hello, ' and then end with 'world!' +var file = fs.createWriteStream('example.txt'); +file.write('hello, '); +file.end('world!'); +// writing more now is not allowed! +``` + +#### writable.setDefaultEncoding(encoding) + +* `encoding` {String} The new default encoding +* Return: `this` + +Sets the default encoding for a writable stream. + +#### writable.uncork() + +Flush all data, buffered since [`stream.cork()`][] call. + +#### writable.write(chunk[, encoding][, callback]) + +* `chunk` {String|Buffer} The data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Callback for when this chunk of data is flushed +* Returns: {Boolean} `true` if the data was handled completely. + +This method writes some data to the underlying system, and calls the +supplied callback once the data has been fully handled. If an error +occurs, the callback may or may not be called with the error as its +first argument. To detect write errors, listen for the `'error'` event. + +The return value indicates if you should continue writing right now. +If the data had to be buffered internally, then it will return +`false`. Otherwise, it will return `true`. + +This return value is strictly advisory. You MAY continue to write, +even if it returns `false`. However, writes will be buffered in +memory, so it is best not to do this excessively. Instead, wait for +the [`'drain'`][] event before writing more data. + + +## API for Stream Implementors + + + +To implement any sort of stream, the pattern is the same: + +1. Extend the appropriate parent class in your own subclass. (The + [`util.inherits()`][] method is particularly helpful for this.) +2. Call the appropriate parent class constructor in your constructor, + to be sure that the internal mechanisms are set up properly. +3. Implement one or more specific methods, as detailed below. + +The class to extend and the method(s) to implement depend on the sort +of stream class you are writing: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Use-case

+
+

Class

+
+

Method(s) to implement

+
+

Reading only

+
+

[Readable](#stream_class_stream_readable_1)

+
+

[_read][stream-_read]

+
+

Writing only

+
+

[Writable](#stream_class_stream_writable_1)

+
+

[_write][stream-_write], [_writev][stream-_writev]

+
+

Reading and writing

+
+

[Duplex](#stream_class_stream_duplex_1)

+
+

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

+
+

Operate on written data, then read the result

+
+

[Transform](#stream_class_stream_transform_1)

+
+

[_transform][stream-_transform], [_flush][stream-_flush]

+
+ +In your implementation code, it is very important to never call the methods +described in [API for Stream Consumers][]. Otherwise, you can potentially cause +adverse side effects in programs that consume your streaming interfaces. + +### Class: stream.Duplex + + + +A "duplex" stream is one that is both Readable and Writable, such as a TCP +socket connection. + +Note that `stream.Duplex` is an abstract class designed to be extended +with an underlying implementation of the [`stream._read(size)`][stream-_read] +and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you +would with a Readable or Writable stream class. + +Since JavaScript doesn't have multiple prototypal inheritance, this class +prototypally inherits from Readable, and then parasitically from Writable. It is +thus up to the user to implement both the low-level +[`stream._read(n)`][stream-_read] method as well as the low-level +[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension +duplex classes. + +#### new stream.Duplex(options) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then + the stream will automatically end the readable side when the + writable side ends and vice versa. + * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` + for readable side of the stream. Has no effect if `objectMode` + is `true`. + * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` + for writable side of the stream. Has no effect if `objectMode` + is `true`. + +In classes that extend the Duplex class, make sure to call the +constructor so that the buffering settings can be properly +initialized. + +### Class: stream.PassThrough + +This is a trivial implementation of a [Transform][] stream that simply +passes the input bytes across to the output. Its purpose is mainly +for examples and testing, but there are occasionally use cases where +it can come in handy as a building block for novel sorts of streams. + +### Class: stream.Readable + + + +`stream.Readable` is an abstract class designed to be extended with an +underlying implementation of the [`stream._read(size)`][stream-_read] method. + +Please see [API for Stream Consumers][] for how to consume +streams in your programs. What follows is an explanation of how to +implement Readable streams in your programs. + +#### new stream.Readable([options]) + +* `options` {Object} + * `highWaterMark` {Number} The maximum number of bytes to store in + the internal buffer before ceasing to read from the underlying + resource. Default = `16384` (16kb), or `16` for `objectMode` streams + * `encoding` {String} If specified, then buffers will be decoded to + strings using the specified encoding. Default = `null` + * `objectMode` {Boolean} Whether this stream should behave + as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns + a single value instead of a Buffer of size n. Default = `false` + * `read` {Function} Implementation for the [`stream._read()`][stream-_read] + method. + +In classes that extend the Readable class, make sure to call the +Readable constructor so that the buffering settings can be properly +initialized. + +#### readable.\_read(size) + +* `size` {Number} Number of bytes to read asynchronously + +Note: **Implement this method, but do NOT call it directly.** + +This method is prefixed with an underscore because it is internal to the +class that defines it and should only be called by the internal Readable +class methods. All Readable stream implementations must provide a \_read +method to fetch data from the underlying resource. + +When `_read()` is called, if data is available from the resource, the `_read()` +implementation should start pushing that data into the read queue by calling +[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from +the resource and pushing data until push returns `false`, at which point it +should stop reading from the resource. Only when `_read()` is called again after +it has stopped should it start reading more data from the resource and pushing +that data onto the queue. + +Note: once the `_read()` method is called, it will not be called again until +the [`stream.push()`][stream-push] method is called. + +The `size` argument is advisory. Implementations where a "read" is a +single call that returns data can use this to know how much data to +fetch. Implementations where that is not relevant, such as TCP or +TLS, may ignore this argument, and simply provide data whenever it +becomes available. There is no need, for example to "wait" until +`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. + +#### readable.push(chunk[, encoding]) + + +* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue +* `encoding` {String} Encoding of String chunks. Must be a valid + Buffer encoding, such as `'utf8'` or `'ascii'` +* return {Boolean} Whether or not more pushes should be performed + +Note: **This method should be called by Readable implementors, NOT +by consumers of Readable streams.** + +If a value other than null is passed, The `push()` method adds a chunk of data +into the queue for subsequent stream processors to consume. If `null` is +passed, it signals the end of the stream (EOF), after which no more data +can be written. + +The data added with `push()` can be pulled out by calling the +[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. + +This API is designed to be as flexible as possible. For example, +you may be wrapping a lower-level source which has some sort of +pause/resume mechanism, and a data callback. In those cases, you +could wrap the low-level source object by doing something like this: + +```js +// source is an object with readStop() and readStart() methods, +// and an `ondata` member that gets called when it has data, and +// an `onend` member that gets called when the data is over. + +util.inherits(SourceWrapper, Readable); + +function SourceWrapper(options) { + Readable.call(this, options); + + this._source = getLowlevelSourceObject(); + + // Every time there's data, we push it into the internal buffer. + this._source.ondata = (chunk) => { + // if push() returns false, then we need to stop reading from source + if (!this.push(chunk)) + this._source.readStop(); + }; + + // When the source ends, we push the EOF-signaling `null` chunk + this._source.onend = () => { + this.push(null); + }; +} + +// _read will be called when the stream wants to pull more data in +// the advisory size argument is ignored in this case. +SourceWrapper.prototype._read = function(size) { + this._source.readStart(); +}; +``` + +#### Example: A Counting Stream + + + +This is a basic example of a Readable stream. It emits the numerals +from 1 to 1,000,000 in ascending order, and then ends. + +```js +const Readable = require('stream').Readable; +const util = require('util'); +util.inherits(Counter, Readable); + +function Counter(opt) { + Readable.call(this, opt); + this._max = 1000000; + this._index = 1; +} + +Counter.prototype._read = function() { + var i = this._index++; + if (i > this._max) + this.push(null); + else { + var str = '' + i; + var buf = Buffer.from(str, 'ascii'); + this.push(buf); + } +}; +``` + +#### Example: SimpleProtocol v1 (Sub-optimal) + +This is similar to the `parseHeader` function described +[here](#stream_readable_unshift_chunk), but implemented as a custom stream. +Also, note that this implementation does not convert the incoming data to a +string. + +However, this would be better implemented as a [Transform][] stream. See +[SimpleProtocol v2][] for a better implementation. + +```js +// A parser for a simple data protocol. +// The "header" is a JSON object, followed by 2 \n characters, and +// then a message body. +// +// NOTE: This can be done more simply as a Transform stream! +// Using Readable directly for this is sub-optimal. See the +// alternative example below under the Transform section. + +const Readable = require('stream').Readable; +const util = require('util'); + +util.inherits(SimpleProtocol, Readable); + +function SimpleProtocol(source, options) { + if (!(this instanceof SimpleProtocol)) + return new SimpleProtocol(source, options); + + Readable.call(this, options); + this._inBody = false; + this._sawFirstCr = false; + + // source is a readable stream, such as a socket or file + this._source = source; + + source.on('end', () => { + this.push(null); + }); + + // give it a kick whenever the source is readable + // read(0) will not consume any bytes + source.on('readable', () => { + this.read(0); + }); + + this._rawHeader = []; + this.header = null; +} + +SimpleProtocol.prototype._read = function(n) { + if (!this._inBody) { + var chunk = this._source.read(); + + // if the source doesn't have data, we don't have data yet. + if (chunk === null) + return this.push(''); + + // check if the chunk has a \n\n + var split = -1; + for (var i = 0; i < chunk.length; i++) { + if (chunk[i] === 10) { // '\n' + if (this._sawFirstCr) { + split = i; + break; + } else { + this._sawFirstCr = true; + } + } else { + this._sawFirstCr = false; + } + } + + if (split === -1) { + // still waiting for the \n\n + // stash the chunk, and try again. + this._rawHeader.push(chunk); + this.push(''); + } else { + this._inBody = true; + var h = chunk.slice(0, split); + this._rawHeader.push(h); + var header = Buffer.concat(this._rawHeader).toString(); + try { + this.header = JSON.parse(header); + } catch (er) { + this.emit('error', new Error('invalid simple protocol data')); + return; + } + // now, because we got some extra data, unshift the rest + // back into the read queue so that our consumer will see it. + var b = chunk.slice(split); + this.unshift(b); + // calling unshift by itself does not reset the reading state + // of the stream; since we're inside _read, doing an additional + // push('') will reset the state appropriately. + this.push(''); + + // and let them know that we are done parsing the header. + this.emit('header', this.header); + } + } else { + // from there on, just provide the data to our consumer. + // careful not to push(null), since that would indicate EOF. + var chunk = this._source.read(); + if (chunk) this.push(chunk); + } +}; + +// Usage: +// var parser = new SimpleProtocol(source); +// Now parser is a readable stream that will emit 'header' +// with the parsed header data. +``` + +### Class: stream.Transform + +A "transform" stream is a duplex stream where the output is causally +connected in some way to the input, such as a [zlib][] stream or a +[crypto][] stream. + +There is no requirement that the output be the same size as the input, +the same number of chunks, or arrive at the same time. For example, a +Hash stream will only ever have a single chunk of output which is +provided when the input is ended. A zlib stream will produce output +that is either much smaller or much larger than its input. + +Rather than implement the [`stream._read()`][stream-_read] and +[`stream._write()`][stream-_write] methods, Transform classes must implement the +[`stream._transform()`][stream-_transform] method, and may optionally +also implement the [`stream._flush()`][stream-_flush] method. (See below.) + +#### new stream.Transform([options]) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `transform` {Function} Implementation for the + [`stream._transform()`][stream-_transform] method. + * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] + method. + +In classes that extend the Transform class, make sure to call the +constructor so that the buffering settings can be properly +initialized. + +#### Events: 'finish' and 'end' + +The [`'finish'`][] and [`'end'`][] events are from the parent Writable +and Readable classes respectively. The `'finish'` event is fired after +[`stream.end()`][stream-end] is called and all chunks have been processed by +[`stream._transform()`][stream-_transform], `'end'` is fired after all data has +been output which is after the callback in [`stream._flush()`][stream-_flush] +has been called. + +#### transform.\_flush(callback) + +* `callback` {Function} Call this function (optionally with an error + argument) when you are done flushing any remaining data. + +Note: **This function MUST NOT be called directly.** It MAY be implemented +by child classes, and if so, will be called by the internal Transform +class methods only. + +In some cases, your transform operation may need to emit a bit more +data at the end of the stream. For example, a `Zlib` compression +stream will store up some internal state so that it can optimally +compress the output. At the end, however, it needs to do the best it +can with what is left, so that the data will be complete. + +In those cases, you can implement a `_flush()` method, which will be +called at the very end, after all the written data is consumed, but +before emitting [`'end'`][] to signal the end of the readable side. Just +like with [`stream._transform()`][stream-_transform], call +`transform.push(chunk)` zero or more times, as appropriate, and call `callback` +when the flush operation is complete. + +This method is prefixed with an underscore because it is internal to +the class that defines it, and should not be called directly by user +programs. However, you **are** expected to override this method in +your own extension classes. + +#### transform.\_transform(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be transformed. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} Call this function (optionally with an error + argument and data) when you are done processing the supplied chunk. + +Note: **This function MUST NOT be called directly.** It should be +implemented by child classes, and called by the internal Transform +class methods only. + +All Transform stream implementations must provide a `_transform()` +method to accept input and produce output. + +`_transform()` should do whatever has to be done in this specific +Transform class, to handle the bytes being written, and pass them off +to the readable portion of the interface. Do asynchronous I/O, +process things, and so on. + +Call `transform.push(outputChunk)` 0 or more times to generate output +from this input chunk, depending on how much data you want to output +as a result of this chunk. + +Call the callback function only when the current chunk is completely +consumed. Note that there may or may not be output as a result of any +particular input chunk. If you supply a second argument to the callback +it will be passed to the push method. In other words the following are +equivalent: + +```js +transform.prototype._transform = function (data, encoding, callback) { + this.push(data); + callback(); +}; + +transform.prototype._transform = function (data, encoding, callback) { + callback(null, data); +}; +``` + +This method is prefixed with an underscore because it is internal to +the class that defines it, and should not be called directly by user +programs. However, you **are** expected to override this method in +your own extension classes. + +#### Example: `SimpleProtocol` parser v2 + +The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple +protocol parser can be implemented simply by using the higher level +[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol +v1` examples. + +In this example, rather than providing the input as an argument, it +would be piped into the parser, which is a more idiomatic Node.js stream +approach. + +```javascript +const util = require('util'); +const Transform = require('stream').Transform; +util.inherits(SimpleProtocol, Transform); + +function SimpleProtocol(options) { + if (!(this instanceof SimpleProtocol)) + return new SimpleProtocol(options); + + Transform.call(this, options); + this._inBody = false; + this._sawFirstCr = false; + this._rawHeader = []; + this.header = null; +} + +SimpleProtocol.prototype._transform = function(chunk, encoding, done) { + if (!this._inBody) { + // check if the chunk has a \n\n + var split = -1; + for (var i = 0; i < chunk.length; i++) { + if (chunk[i] === 10) { // '\n' + if (this._sawFirstCr) { + split = i; + break; + } else { + this._sawFirstCr = true; + } + } else { + this._sawFirstCr = false; + } + } + + if (split === -1) { + // still waiting for the \n\n + // stash the chunk, and try again. + this._rawHeader.push(chunk); + } else { + this._inBody = true; + var h = chunk.slice(0, split); + this._rawHeader.push(h); + var header = Buffer.concat(this._rawHeader).toString(); + try { + this.header = JSON.parse(header); + } catch (er) { + this.emit('error', new Error('invalid simple protocol data')); + return; + } + // and let them know that we are done parsing the header. + this.emit('header', this.header); + + // now, because we got some extra data, emit this first. + this.push(chunk.slice(split)); + } + } else { + // from there on, just provide the data to our consumer as-is. + this.push(chunk); + } + done(); +}; + +// Usage: +// var parser = new SimpleProtocol(); +// source.pipe(parser) +// Now parser is a readable stream that will emit 'header' +// with the parsed header data. +``` + +### Class: stream.Writable + + + +`stream.Writable` is an abstract class designed to be extended with an +underlying implementation of the +[`stream._write(chunk, encoding, callback)`][stream-_write] method. + +Please see [API for Stream Consumers][] for how to consume +writable streams in your programs. What follows is an explanation of +how to implement Writable streams in your programs. + +#### new stream.Writable([options]) + +* `options` {Object} + * `highWaterMark` {Number} Buffer level when + [`stream.write()`][stream-write] starts returning `false`. Default = `16384` + (16kb), or `16` for `objectMode` streams. + * `decodeStrings` {Boolean} Whether or not to decode strings into + Buffers before passing them to [`stream._write()`][stream-_write]. + Default = `true` + * `objectMode` {Boolean} Whether or not the + [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can + write arbitrary data instead of only `Buffer` / `String` data. + Default = `false` + * `write` {Function} Implementation for the + [`stream._write()`][stream-_write] method. + * `writev` {Function} Implementation for the + [`stream._writev()`][stream-_writev] method. + +In classes that extend the Writable class, make sure to call the +constructor so that the buffering settings can be properly +initialized. + +#### writable.\_write(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be written. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} Call this function (optionally with an error + argument) when you are done processing the supplied chunk. + +All Writable stream implementations must provide a +[`stream._write()`][stream-_write] method to send data to the underlying +resource. + +Note: **This function MUST NOT be called directly.** It should be +implemented by child classes, and called by the internal Writable +class methods only. + +Call the callback using the standard `callback(error)` pattern to +signal that the write completed successfully or with an error. + +If the `decodeStrings` flag is set in the constructor options, then +`chunk` may be a string rather than a Buffer, and `encoding` will +indicate the sort of string that it is. This is to support +implementations that have an optimized handling for certain string +data encodings. If you do not explicitly set the `decodeStrings` +option to `false`, then you can safely ignore the `encoding` argument, +and assume that `chunk` will always be a Buffer. + +This method is prefixed with an underscore because it is internal to +the class that defines it, and should not be called directly by user +programs. However, you **are** expected to override this method in +your own extension classes. + +#### writable.\_writev(chunks, callback) + +* `chunks` {Array} The chunks to be written. Each chunk has following + format: `{ chunk: ..., encoding: ... }`. +* `callback` {Function} Call this function (optionally with an error + argument) when you are done processing the supplied chunks. + +Note: **This function MUST NOT be called directly.** It may be +implemented by child classes, and called by the internal Writable +class methods only. + +This function is completely optional to implement. In most cases it is +unnecessary. If implemented, it will be called with all the chunks +that are buffered in the write queue. + + +## Simplified Constructor API + + + +In simple cases there is now the added benefit of being able to construct a +stream without inheritance. + +This can be done by passing the appropriate methods as constructor options: + +Examples: + +### Duplex + +```js +var duplex = new stream.Duplex({ + read: function(n) { + // sets this._read under the hood + + // push data onto the read queue, passing null + // will signal the end of the stream (EOF) + this.push(chunk); + }, + write: function(chunk, encoding, next) { + // sets this._write under the hood + + // An optional error can be passed as the first argument + next() + } +}); + +// or + +var duplex = new stream.Duplex({ + read: function(n) { + // sets this._read under the hood + + // push data onto the read queue, passing null + // will signal the end of the stream (EOF) + this.push(chunk); + }, + writev: function(chunks, next) { + // sets this._writev under the hood + + // An optional error can be passed as the first argument + next() + } +}); +``` + +### Readable + +```js +var readable = new stream.Readable({ + read: function(n) { + // sets this._read under the hood + + // push data onto the read queue, passing null + // will signal the end of the stream (EOF) + this.push(chunk); + } +}); +``` + +### Transform + +```js +var transform = new stream.Transform({ + transform: function(chunk, encoding, next) { + // sets this._transform under the hood + + // generate output as many times as needed + // this.push(chunk); + + // call when the current chunk is consumed + next(); + }, + flush: function(done) { + // sets this._flush under the hood + + // generate output as many times as needed + // this.push(chunk); + + done(); + } +}); +``` + +### Writable + +```js +var writable = new stream.Writable({ + write: function(chunk, encoding, next) { + // sets this._write under the hood + + // An optional error can be passed as the first argument + next() + } +}); + +// or + +var writable = new stream.Writable({ + writev: function(chunks, next) { + // sets this._writev under the hood + + // An optional error can be passed as the first argument + next() + } +}); +``` + +## Streams: Under the Hood + + + +### Buffering + + + +Both Writable and Readable streams will buffer data on an internal +object which can be retrieved from `_writableState.getBuffer()` or +`_readableState.buffer`, respectively. + +The amount of data that will potentially be buffered depends on the +`highWaterMark` option which is passed into the constructor. + +Buffering in Readable streams happens when the implementation calls +[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not +call [`stream.read()`][stream-read], then the data will sit in the internal +queue until it is consumed. + +Buffering in Writable streams happens when the user calls +[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. + +The purpose of streams, especially with the [`stream.pipe()`][] method, is to +limit the buffering of data to acceptable levels, so that sources and +destinations of varying speed will not overwhelm the available memory. + +### Compatibility with Older Node.js Versions + + + +In versions of Node.js prior to v0.10, the Readable stream interface was +simpler, but also less powerful and less useful. + +* Rather than waiting for you to call the [`stream.read()`][stream-read] method, + [`'data'`][] events would start emitting immediately. If you needed to do + some I/O to decide how to handle data, then you had to store the chunks + in some kind of buffer so that they would not be lost. +* The [`stream.pause()`][stream-pause] method was advisory, rather than + guaranteed. This meant that you still had to be prepared to receive + [`'data'`][] events even when the stream was in a paused state. + +In Node.js v0.10, the [Readable][] class was added. +For backwards compatibility with older Node.js programs, Readable streams +switch into "flowing mode" when a [`'data'`][] event handler is added, or +when the [`stream.resume()`][stream-resume] method is called. The effect is +that, even if you are not using the new [`stream.read()`][stream-read] method +and [`'readable'`][] event, you no longer have to worry about losing +[`'data'`][] chunks. + +Most programs will continue to function normally. However, this +introduces an edge case in the following conditions: + +* No [`'data'`][] event handler is added. +* The [`stream.resume()`][stream-resume] method is never called. +* The stream is not piped to any writable destination. + +For example, consider the following code: + +```js +// WARNING! BROKEN! +net.createServer((socket) => { + + // we add an 'end' method, but never consume the data + socket.on('end', () => { + // It will never get here. + socket.end('I got your message (but didnt read it)\n'); + }); + +}).listen(1337); +``` + +In versions of Node.js prior to v0.10, the incoming message data would be +simply discarded. However, in Node.js v0.10 and beyond, +the socket will remain paused forever. + +The workaround in this situation is to call the +[`stream.resume()`][stream-resume] method to start the flow of data: + +```js +// Workaround +net.createServer((socket) => { + + socket.on('end', () => { + socket.end('I got your message (but didnt read it)\n'); + }); + + // start the flow of data, discarding it. + socket.resume(); + +}).listen(1337); +``` + +In addition to new Readable streams switching into flowing mode, +pre-v0.10 style streams can be wrapped in a Readable class using the +[`stream.wrap()`][] method. + + +### Object Mode + + + +Normally, Streams operate on Strings and Buffers exclusively. + +Streams that are in **object mode** can emit generic JavaScript values +other than Buffers and Strings. + +A Readable stream in object mode will always return a single item from +a call to [`stream.read(size)`][stream-read], regardless of what the size +argument is. + +A Writable stream in object mode will always ignore the `encoding` +argument to [`stream.write(data, encoding)`][stream-write]. + +The special value `null` still retains its special value for object +mode streams. That is, for object mode readable streams, `null` as a +return value from [`stream.read()`][stream-read] indicates that there is no more +data, and [`stream.push(null)`][stream-push] will signal the end of stream data +(`EOF`). + +No streams in Node.js core are object mode streams. This pattern is only +used by userland streaming libraries. + +You should set `objectMode` in your stream child class constructor on +the options object. Setting `objectMode` mid-stream is not safe. + +For Duplex streams `objectMode` can be set exclusively for readable or +writable side with `readableObjectMode` and `writableObjectMode` +respectively. These options can be used to implement parsers and +serializers with Transform streams. + +```js +const util = require('util'); +const StringDecoder = require('string_decoder').StringDecoder; +const Transform = require('stream').Transform; +util.inherits(JSONParseStream, Transform); + +// Gets \n-delimited JSON string data, and emits the parsed objects +function JSONParseStream() { + if (!(this instanceof JSONParseStream)) + return new JSONParseStream(); + + Transform.call(this, { readableObjectMode : true }); + + this._buffer = ''; + this._decoder = new StringDecoder('utf8'); +} + +JSONParseStream.prototype._transform = function(chunk, encoding, cb) { + this._buffer += this._decoder.write(chunk); + // split on newlines + var lines = this._buffer.split(/\r?\n/); + // keep the last partial line buffered + this._buffer = lines.pop(); + for (var l = 0; l < lines.length; l++) { + var line = lines[l]; + try { + var obj = JSON.parse(line); + } catch (er) { + this.emit('error', er); + return; + } + // push the parsed object out to the readable consumer + this.push(obj); + } + cb(); +}; + +JSONParseStream.prototype._flush = function(cb) { + // Just handle any leftover + var rem = this._buffer.trim(); + if (rem) { + try { + var obj = JSON.parse(rem); + } catch (er) { + this.emit('error', er); + return; + } + // push the parsed object out to the readable consumer + this.push(obj); + } + cb(); +}; +``` + +### `stream.read(0)` + +There are some cases where you want to trigger a refresh of the +underlying readable stream mechanisms, without actually consuming any +data. In that case, you can call `stream.read(0)`, which will always +return null. + +If the internal read buffer is below the `highWaterMark`, and the +stream is not currently reading, then calling `stream.read(0)` will trigger +a low-level [`stream._read()`][stream-_read] call. + +There is almost never a need to do this. However, you will see some +cases in Node.js's internals where this is done, particularly in the +Readable stream class internals. + +### `stream.push('')` + +Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an +interesting side effect. Because it *is* a call to +[`stream.push()`][stream-push], it will end the `reading` process. However, it +does *not* add any data to the readable buffer, so there's nothing for +a user to consume. + +Very rarely, there are cases where you have no data to provide now, +but the consumer of your stream (or, perhaps, another bit of your own +code) will know when to check again, by calling [`stream.read(0)`][stream-read]. +In those cases, you *may* call `stream.push('')`. + +So far, the only use case for this functionality is in the +[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you +find that you have to use `stream.push('')`, please consider another +approach, because it almost certainly indicates that something is +horribly wrong. + +[`'data'`]: #stream_event_data +[`'drain'`]: #stream_event_drain +[`'end'`]: #stream_event_end +[`'finish'`]: #stream_event_finish +[`'readable'`]: #stream_event_readable +[`buf.toString(encoding)`]: https://nodejs.org/docs/v6.1.0/api/buffer.html#buffer_buf_tostring_encoding_start_end +[`EventEmitter`]: https://nodejs.org/docs/v6.1.0/api/events.html#events_class_eventemitter +[`process.stderr`]: https://nodejs.org/docs/v6.1.0/api/process.html#process_process_stderr +[`process.stdin`]: https://nodejs.org/docs/v6.1.0/api/process.html#process_process_stdin +[`process.stdout`]: https://nodejs.org/docs/v6.1.0/api/process.html#process_process_stdout +[`stream.cork()`]: #stream_writable_cork +[`stream.pipe()`]: #stream_readable_pipe_destination_options +[`stream.uncork()`]: #stream_writable_uncork +[`stream.unpipe()`]: #stream_readable_unpipe_destination +[`stream.wrap()`]: #stream_readable_wrap_stream +[`tls.CryptoStream`]: https://nodejs.org/docs/v6.1.0/api/tls.html#tls_class_cryptostream +[`util.inherits()`]: https://nodejs.org/docs/v6.1.0/api/util.html#util_util_inherits_constructor_superconstructor +[API for Stream Consumers]: #stream_api_for_stream_consumers +[API for Stream Implementors]: #stream_api_for_stream_implementors +[child process stdin]: https://nodejs.org/docs/v6.1.0/api/child_process.html#child_process_child_stdin +[child process stdout and stderr]: https://nodejs.org/docs/v6.1.0/api/child_process.html#child_process_child_stdout +[Compatibility]: #stream_compatibility_with_older_node_js_versions +[crypto]: crypto.html +[Duplex]: #stream_class_stream_duplex +[fs read streams]: https://nodejs.org/docs/v6.1.0/api/fs.html#fs_class_fs_readstream +[fs write streams]: https://nodejs.org/docs/v6.1.0/api/fs.html#fs_class_fs_writestream +[HTTP requests, on the client]: https://nodejs.org/docs/v6.1.0/api/http.html#http_class_http_clientrequest +[HTTP responses, on the server]: https://nodejs.org/docs/v6.1.0/api/http.html#http_class_http_serverresponse +[http-incoming-message]: https://nodejs.org/docs/v6.1.0/api/http.html#http_class_http_incomingmessage +[Object mode]: #stream_object_mode +[Readable]: #stream_class_stream_readable +[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 +[stream-_flush]: #stream_transform_flush_callback +[stream-_read]: #stream_readable_read_size_1 +[stream-_transform]: #stream_transform_transform_chunk_encoding_callback +[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 +[stream-_writev]: #stream_writable_writev_chunks_callback +[stream-end]: #stream_writable_end_chunk_encoding_callback +[stream-pause]: #stream_readable_pause +[stream-push]: #stream_readable_push_chunk_encoding +[stream-read]: #stream_readable_read_size +[stream-resume]: #stream_readable_resume +[stream-write]: #stream_writable_write_chunk_encoding_callback +[TCP sockets]: https://nodejs.org/docs/v6.1.0/api/net.html#net_class_net_socket +[Transform]: #stream_class_stream_transform +[Writable]: #stream_class_stream_writable +[zlib]: zlib.html diff --git a/node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/fsevents/node_modules/readable-stream/duplex.js b/node_modules/fsevents/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..ca807af --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..736693b --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,75 @@ +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +var keys = objectKeys(Writable.prototype); +for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + processNextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..d06f71f --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,26 @@ +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..79914fa --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,893 @@ +'use strict'; + +module.exports = Readable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var isArray = require('isarray'); +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var StringDecoder; + +util.inherits(Readable, Stream); + +var hasPrependListener = typeof EE.prototype.prependListener === 'function'; + +function prependListener(emitter, event, fn) { + if (hasPrependListener) return emitter.prependListener(event, fn); + + // This is a brutally ugly hack to make sure that our error handler + // is attached before any userland ones. NEVER DO THIS. This is here + // only because this code needs to continue to work with older versions + // of Node.js that do not include the prependListener() method. The goal + // is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +var Duplex; +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.buffer = []; + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +var Duplex; +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options && typeof options.read === 'function') this._read = options.read; + + Stream.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + + if (!state.objectMode && typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = bufferShim.from(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + var state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + var er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (state.ended && !addToFront) { + var e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + var _e = new Error('stream.unshift() after end event'); + stream.emit('error', _e); + } else { + var skipAdd; + if (state.decoder && !addToFront && !encoding) { + chunk = state.decoder.write(chunk); + skipAdd = !state.objectMode && chunk.length === 0; + } + + if (!addToFront) state.reading = false; + + // Don't add to the buffer if we've decoded to an empty string chunk and + // we're not in object mode + if (!skipAdd) { + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +function howMuchToRead(n, state) { + if (state.length === 0 && state.ended) return 0; + + if (state.objectMode) return n === 0 ? 0 : 1; + + if (n === null || isNaN(n)) { + // only flow one buffer at a time + if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; + } + + if (n <= 0) return 0; + + // If we're asking for more than the target buffer level, + // then raise the water mark. Bump up to the next highest + // power of 2, to prevent increasing it excessively in tiny + // amounts. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + + // don't have that much. return null, unless we've ended. + if (n > state.length) { + if (!state.ended) { + state.needReadable = true; + return 0; + } else { + return state.length; + } + } + + return n; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + var state = this._readableState; + var nOrig = n; + + if (typeof n !== 'number' || n > 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } + + if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + } + + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (doRead && !state.reading) n = howMuchToRead(nOrig, state); + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } + + state.length -= n; + + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (state.length === 0 && !state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended && state.length === 0) endReadable(this); + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + var er = null; + if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + processNextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : cleanup; + if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + if (false === ret) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var _i = 0; _i < len; _i++) { + dests[_i].emit('unpipe', this); + }return this; + } + + // try to find the right one. + var i = indexOf(state.pipes, dest); + if (i === -1) return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + // If listening to data, and it has not explicitly been paused, + // then call resume to start the flow of data on the next tick. + if (ev === 'data' && false !== this._readableState.flowing) { + this.resume(); + } + + if (ev === 'readable' && !this._readableState.endEmitted) { + var state = this._readableState; + if (!state.readableListening) { + state.readableListening = true; + state.emittedReadable = false; + state.needReadable = true; + if (!state.reading) { + processNextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + processNextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + if (state.flowing) { + do { + var chunk = stream.read(); + } while (null !== chunk && state.flowing); + } +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + + var self = this; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + var events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, function (ev) { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +function fromList(n, state) { + var list = state.buffer; + var length = state.length; + var stringMode = !!state.decoder; + var objectMode = !!state.objectMode; + var ret; + + // nothing in the list, definitely empty. + if (list.length === 0) return null; + + if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { + // read it all, truncate the array. + if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); + list.length = 0; + } else { + // read just some of it. + if (n < list[0].length) { + // just take a part of the first list item. + // slice is the same for buffers and strings. + var buf = list[0]; + ret = buf.slice(0, n); + list[0] = buf.slice(n); + } else if (n === list[0].length) { + // first list is a perfect match + ret = list.shift(); + } else { + // complex case. + // we have enough to cover it, but it spans past the first buffer. + if (stringMode) ret = '';else ret = bufferShim.allocUnsafe(n); + + var c = 0; + for (var i = 0, l = list.length; i < l && c < n; i++) { + var _buf = list[0]; + var cpy = Math.min(n - c, _buf.length); + + if (stringMode) ret += _buf.slice(0, cpy);else _buf.copy(ret, c, 0, cpy); + + if (cpy < _buf.length) list[0] = _buf.slice(cpy);else list.shift(); + + c += cpy; + } + } + } + + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + processNextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..dbc996e --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,180 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function TransformState(stream) { + this.afterTransform = function (er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; + this.writeencoding = null; +} + +function afterTransform(stream, er, data) { + var ts = stream._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (data !== null && data !== undefined) stream.push(data); + + cb(er); + + var rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(this); + + // when the writable side finishes, then flush out anything remaining. + var stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + this.once('prefinish', function () { + if (typeof this._flush === 'function') this._flush(function (er) { + done(stream, er); + });else done(stream); + }); +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('Not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +function done(stream, er) { + if (er) return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + var ws = stream._writableState; + var ts = stream._transformState; + + if (ws.length) throw new Error('Calling transform done when ws.length != 0'); + + if (ts.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..ed5efcb --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,526 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +util.inherits(Writable, Stream); + +function nop() {} + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +var Duplex; +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function writableStateGetBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') + }); + } catch (_) {} +})(); + +var Duplex; +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + processNextTick(cb, er); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + // Always throw error if a null is written + // if we are not in object mode then throw + // if it is not a buffer, string, or undefined. + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + processNextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = bufferShim.from(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) processNextTick(cb, er);else cb(er); + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + while (entry) { + buffer[count] = entry; + entry = entry.next; + count += 1; + } + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequestCount = 0; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else { + prefinish(stream, state); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) processNextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function (err) { + var entry = _this.entry; + _this.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = _this; + } else { + state.corkedRequestsFree = _this; + } + }; +} \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/readable-stream/package.json b/node_modules/fsevents/node_modules/readable-stream/package.json new file mode 100644 index 0000000..3f9f60e --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/package.json @@ -0,0 +1,118 @@ +{ + "_args": [ + [ + "readable-stream@^2.0.0 || ^1.1.13", + "/Users/eshanker/Code/fsevents/node_modules/are-we-there-yet" + ] + ], + "_from": "readable-stream@>=2.0.0 <3.0.0||>=1.1.13 <2.0.0", + "_id": "readable-stream@2.1.4", + "_inCache": true, + "_installable": true, + "_location": "/readable-stream", + "_nodeVersion": "5.11.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/readable-stream-2.1.4.tgz_1463679605032_0.6917394688352942" + }, + "_npmUser": { + "email": "calvin.metcalf@gmail.com", + "name": "cwmma" + }, + "_npmVersion": "3.8.6", + "_phantomChildren": {}, + "_requested": { + "name": "readable-stream", + "raw": "readable-stream@^2.0.0 || ^1.1.13", + "rawSpec": "^2.0.0 || ^1.1.13", + "scope": null, + "spec": ">=2.0.0 <3.0.0||>=1.1.13 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/are-we-there-yet", + "/tar-pack" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.4.tgz", + "_shasum": "70b9791c6fcb8480db44bd155a0f6bb58f172468", + "_shrinkwrap": null, + "_spec": "readable-stream@^2.0.0 || ^1.1.13", + "_where": "/Users/eshanker/Code/fsevents/node_modules/are-we-there-yet", + "browser": { + "util": false + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "dependencies": { + "buffer-shims": "^1.0.0", + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~0.10.x", + "util-deprecate": "~1.0.1" + }, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "assert": "~1.4.0", + "nyc": "^6.4.0", + "tap": "~0.7.1", + "tape": "~4.5.1", + "zuul": "~3.9.0", + "zuul-ngrok": "^4.0.0" + }, + "directories": {}, + "dist": { + "shasum": "70b9791c6fcb8480db44bd155a0f6bb58f172468", + "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.4.tgz" + }, + "gitHead": "7752832fba237929388dea6c96911a0a6379abfc", + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "pipe", + "readable", + "stream" + ], + "license": "MIT", + "main": "readable.js", + "maintainers": [ + { + "name": "isaacs", + "email": "isaacs@npmjs.com" + }, + { + "name": "tootallnate", + "email": "nathan@tootallnate.net" + }, + { + "name": "rvagg", + "email": "rod@vagg.org" + }, + { + "name": "cwmma", + "email": "calvin.metcalf@gmail.com" + } + ], + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "browser": "npm run write-zuul && zuul -- test/browser.js", + "cover": "nyc npm test", + "local": "zuul --local -- test/browser.js", + "report": "nyc report --reporter=lcov", + "test": "tap test/parallel/*.js test/ours/*.js", + "write-zuul": "printf \"ui: tape\ntunnel: ngrok\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" + }, + "version": "2.1.4" +} diff --git a/node_modules/fsevents/node_modules/readable-stream/passthrough.js b/node_modules/fsevents/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..27e8d8a --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/fsevents/node_modules/readable-stream/readable.js b/node_modules/fsevents/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..be2688a --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = (function (){ + try { + return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify + } catch(_){} +}()); +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = Stream || exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); + +if (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; +} diff --git a/node_modules/fsevents/node_modules/readable-stream/transform.js b/node_modules/fsevents/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..5d482f0 --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/fsevents/node_modules/readable-stream/writable.js b/node_modules/fsevents/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..e1e9efd --- /dev/null +++ b/node_modules/fsevents/node_modules/readable-stream/writable.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_writable.js") diff --git a/node_modules/fsevents/node_modules/request/.eslintrc b/node_modules/fsevents/node_modules/request/.eslintrc new file mode 100644 index 0000000..5a59481 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/.eslintrc @@ -0,0 +1,45 @@ +{ + "env": { + "node": true + }, + "rules": { + // 2-space indentation + "indent": [2, 2, {"SwitchCase": 1}], + // Disallow semi-colons, unless needed to disambiguate statement + "semi": [2, "never"], + // Require strings to use single quotes + "quotes": [2, "single"], + // Require curly braces for all control statements + "curly": 2, + // Disallow using variables and functions before they've been defined + "no-use-before-define": 2, + // Allow any case for variable naming + "camelcase": 0, + // Disallow unused variables, except as function arguments + "no-unused-vars": [2, {"args":"none"}], + // Allow leading underscores for method names + // REASON: we use underscores to denote private methods + "no-underscore-dangle": 0, + // Allow multi spaces around operators since they are + // used for alignment. This is not consistent in the + // code. + "no-multi-spaces": 0, + // Style rule is: most objects use { beforeColon: false, afterColon: true }, unless aligning which uses: + // + // { + // beforeColon : true, + // afterColon : true + // } + // + // eslint can't handle this, so the check is disabled. + "key-spacing": 0, + // Allow shadowing vars in outer scope (needs discussion) + "no-shadow": 0, + // Use if () { } + // ^ space + "keyword-spacing": [2, {"after": true}], + // Use if () { } + // ^ space + "space-before-blocks": [2, "always"] + } +} diff --git a/node_modules/fsevents/node_modules/request/.npmignore b/node_modules/fsevents/node_modules/request/.npmignore new file mode 100644 index 0000000..67fe11c --- /dev/null +++ b/node_modules/fsevents/node_modules/request/.npmignore @@ -0,0 +1,6 @@ +coverage +tests +node_modules +examples +release.sh +disabled.appveyor.yml diff --git a/node_modules/fsevents/node_modules/request/.travis.yml b/node_modules/fsevents/node_modules/request/.travis.yml new file mode 100644 index 0000000..6f2d753 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/.travis.yml @@ -0,0 +1,21 @@ + +language: node_js + +node_js: + - node + - 4 + - 0.12 + - 0.10 + +after_script: + - npm run test-cov + - codecov + - cat ./coverage/lcov.info | coveralls + +webhooks: + urls: https://webhooks.gitter.im/e/237280ed4796c19cc626 + on_success: change # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: false # default: false + +sudo: false diff --git a/node_modules/fsevents/node_modules/request/CHANGELOG.md b/node_modules/fsevents/node_modules/request/CHANGELOG.md new file mode 100644 index 0000000..c7d684f --- /dev/null +++ b/node_modules/fsevents/node_modules/request/CHANGELOG.md @@ -0,0 +1,631 @@ +## Change Log + +### v2.73.0 (2016/07/09) +- [#2240](https://github.com/request/request/pull/2240) Remove connectionErrorHandler to fix #1903 (@zarenner) +- [#2251](https://github.com/request/request/pull/2251) tape@4.6.0 breaks build 🚨 (@greenkeeperio-bot) +- [#2225](https://github.com/request/request/pull/2225) Update docs (@ArtskydJ) +- [#2203](https://github.com/request/request/pull/2203) Update browserify to version 13.0.1 🚀 (@greenkeeperio-bot) +- [#2275](https://github.com/request/request/pull/2275) Update karma to version 1.1.1 🚀 (@greenkeeperio-bot) +- [#2204](https://github.com/request/request/pull/2204) Add codecov.yml and disable PR comments (@simov) +- [#2212](https://github.com/request/request/pull/2212) Fix link to http.IncomingMessage documentation (@nazieb) +- [#2208](https://github.com/request/request/pull/2208) Update to form-data RC4 and pass null values to it (@simov) +- [#2207](https://github.com/request/request/pull/2207) Move aws4 require statement to the top (@simov) +- [#2199](https://github.com/request/request/pull/2199) Update karma-coverage to version 1.0.0 🚀 (@greenkeeperio-bot) +- [#2206](https://github.com/request/request/pull/2206) Update qs to version 6.2.0 🚀 (@greenkeeperio-bot) +- [#2205](https://github.com/request/request/pull/2205) Use server-destory to close hanging sockets in tests (@simov) +- [#2200](https://github.com/request/request/pull/2200) Update karma-cli to version 1.0.0 🚀 (@greenkeeperio-bot) + +### v2.72.0 (2016/04/17) +- [#2176](https://github.com/request/request/pull/2176) Do not try to pipe Gzip responses with no body (@simov) +- [#2175](https://github.com/request/request/pull/2175) Add 'delete' alias for the 'del' API method (@simov, @MuhanZou) +- [#2172](https://github.com/request/request/pull/2172) Add support for deflate content encoding (@czardoz) +- [#2169](https://github.com/request/request/pull/2169) Add callback option (@simov) +- [#2165](https://github.com/request/request/pull/2165) Check for self.req existence inside the write method (@simov) +- [#2167](https://github.com/request/request/pull/2167) Fix TravisCI badge reference master branch (@a0viedo) + +### v2.71.0 (2016/04/12) +- [#2164](https://github.com/request/request/pull/2164) Catch errors from the underlying http module (@simov) + +### v2.70.0 (2016/04/05) +- [#2147](https://github.com/request/request/pull/2147) Update eslint to version 2.5.3 🚀 (@simov, @greenkeeperio-bot) +- [#2009](https://github.com/request/request/pull/2009) Support JSON stringify replacer argument. (@elyobo) +- [#2142](https://github.com/request/request/pull/2142) Update eslint to version 2.5.1 🚀 (@greenkeeperio-bot) +- [#2128](https://github.com/request/request/pull/2128) Update browserify-istanbul to version 2.0.0 🚀 (@greenkeeperio-bot) +- [#2115](https://github.com/request/request/pull/2115) Update eslint to version 2.3.0 🚀 (@simov, @greenkeeperio-bot) +- [#2089](https://github.com/request/request/pull/2089) Fix badges (@simov) +- [#2092](https://github.com/request/request/pull/2092) Update browserify-istanbul to version 1.0.0 🚀 (@greenkeeperio-bot) +- [#2079](https://github.com/request/request/pull/2079) Accept read stream as body option (@simov) +- [#2070](https://github.com/request/request/pull/2070) Update bl to version 1.1.2 🚀 (@greenkeeperio-bot) +- [#2063](https://github.com/request/request/pull/2063) Up bluebird and oauth-sign (@simov) +- [#2058](https://github.com/request/request/pull/2058) Karma fixes for latest versions (@eiriksm) +- [#2057](https://github.com/request/request/pull/2057) Update contributing guidelines (@simov) +- [#2054](https://github.com/request/request/pull/2054) Update qs to version 6.1.0 🚀 (@greenkeeperio-bot) + +### v2.69.0 (2016/01/27) +- [#2041](https://github.com/request/request/pull/2041) restore aws4 as regular dependency (@rmg) + +### v2.68.0 (2016/01/27) +- [#2036](https://github.com/request/request/pull/2036) Add AWS Signature Version 4 (@simov, @mirkods) +- [#2022](https://github.com/request/request/pull/2022) Convert numeric multipart bodies to string (@simov, @feross) +- [#2024](https://github.com/request/request/pull/2024) Update har-validator dependency for nsp advisory #76 (@TylerDixon) +- [#2016](https://github.com/request/request/pull/2016) Update qs to version 6.0.2 🚀 (@greenkeeperio-bot) +- [#2007](https://github.com/request/request/pull/2007) Use the `extend` module instead of util._extend (@simov) +- [#2003](https://github.com/request/request/pull/2003) Update browserify to version 13.0.0 🚀 (@greenkeeperio-bot) +- [#1989](https://github.com/request/request/pull/1989) Update buffer-equal to version 1.0.0 🚀 (@greenkeeperio-bot) +- [#1956](https://github.com/request/request/pull/1956) Check form-data content-length value before setting up the header (@jongyoonlee) +- [#1958](https://github.com/request/request/pull/1958) Use IncomingMessage.destroy method (@simov) +- [#1952](https://github.com/request/request/pull/1952) Adds example for Tor proxy (@prometheansacrifice) +- [#1943](https://github.com/request/request/pull/1943) Update eslint to version 1.10.3 🚀 (@simov, @greenkeeperio-bot) +- [#1924](https://github.com/request/request/pull/1924) Update eslint to version 1.10.1 🚀 (@greenkeeperio-bot) +- [#1915](https://github.com/request/request/pull/1915) Remove content-length and transfer-encoding headers from defaultProxyHeaderWhiteList (@yaxia) + +### v2.67.0 (2015/11/19) +- [#1913](https://github.com/request/request/pull/1913) Update http-signature to version 1.1.0 🚀 (@greenkeeperio-bot) + +### v2.66.0 (2015/11/18) +- [#1906](https://github.com/request/request/pull/1906) Update README URLs based on HTTP redirects (@ReadmeCritic) +- [#1905](https://github.com/request/request/pull/1905) Convert typed arrays into regular buffers (@simov) +- [#1902](https://github.com/request/request/pull/1902) node-uuid@1.4.7 breaks build 🚨 (@greenkeeperio-bot) +- [#1894](https://github.com/request/request/pull/1894) Fix tunneling after redirection from https (Original: #1881) (@simov, @falms) +- [#1893](https://github.com/request/request/pull/1893) Update eslint to version 1.9.0 🚀 (@greenkeeperio-bot) +- [#1852](https://github.com/request/request/pull/1852) Update eslint to version 1.7.3 🚀 (@simov, @greenkeeperio-bot, @paulomcnally, @michelsalib, @arbaaz, @vladimirich, @LoicMahieu, @JoshWillik, @jzaefferer, @ryanwholey, @djchie, @thisconnect, @mgenereu, @acroca, @Sebmaster, @Bloutiouf) +- [#1876](https://github.com/request/request/pull/1876) Implement loose matching for har mime types (@simov) +- [#1875](https://github.com/request/request/pull/1875) Update bluebird to version 3.0.2 🚀 (@simov, @greenkeeperio-bot) +- [#1871](https://github.com/request/request/pull/1871) Update browserify to version 12.0.1 🚀 (@greenkeeperio-bot) +- [#1866](https://github.com/request/request/pull/1866) Add missing quotes on x-token property in README (@miguelmota) +- [#1874](https://github.com/request/request/pull/1874) Fix typo in README.md (@gswalden) +- [#1860](https://github.com/request/request/pull/1860) Improve referer header tests and docs (@simov) +- [#1861](https://github.com/request/request/pull/1861) Remove redundant call to Stream constructor (@watson) +- [#1857](https://github.com/request/request/pull/1857) Fix Referer header to point to the original host name (@simov) +- [#1850](https://github.com/request/request/pull/1850) Update karma-coverage to version 0.5.3 🚀 (@greenkeeperio-bot) +- [#1847](https://github.com/request/request/pull/1847) Use node's latest version when building (@simov) +- [#1836](https://github.com/request/request/pull/1836) Tunnel: fix wrong property name (@Bloutiouf) +- [#1820](https://github.com/request/request/pull/1820) Set href as request.js uses it (@mgenereu) +- [#1840](https://github.com/request/request/pull/1840) Update http-signature to version 1.0.2 🚀 (@greenkeeperio-bot) +- [#1845](https://github.com/request/request/pull/1845) Update istanbul to version 0.4.0 🚀 (@greenkeeperio-bot) + +### v2.65.0 (2015/10/11) +- [#1833](https://github.com/request/request/pull/1833) Update aws-sign2 to version 0.6.0 🚀 (@greenkeeperio-bot) +- [#1811](https://github.com/request/request/pull/1811) Enable loose cookie parsing in tough-cookie (@Sebmaster) +- [#1830](https://github.com/request/request/pull/1830) Bring back tilde ranges for all dependencies (@simov) +- [#1821](https://github.com/request/request/pull/1821) Implement support for RFC 2617 MD5-sess algorithm. (@BigDSK) +- [#1828](https://github.com/request/request/pull/1828) Updated qs dependency to 5.2.0 (@acroca) +- [#1818](https://github.com/request/request/pull/1818) Extract `readResponseBody` method out of `onRequestResponse` (@pvoisin) +- [#1819](https://github.com/request/request/pull/1819) Run stringify once (@mgenereu) +- [#1814](https://github.com/request/request/pull/1814) Updated har-validator to version 2.0.2 (@greenkeeperio-bot) +- [#1807](https://github.com/request/request/pull/1807) Updated tough-cookie to version 2.1.0 (@greenkeeperio-bot) +- [#1800](https://github.com/request/request/pull/1800) Add caret ranges for devDependencies, except eslint (@simov) +- [#1799](https://github.com/request/request/pull/1799) Updated karma-browserify to version 4.4.0 (@greenkeeperio-bot) +- [#1797](https://github.com/request/request/pull/1797) Updated tape to version 4.2.0 (@greenkeeperio-bot) +- [#1788](https://github.com/request/request/pull/1788) Pinned all dependencies (@greenkeeperio-bot) + +### v2.64.0 (2015/09/25) +- [#1787](https://github.com/request/request/pull/1787) npm ignore examples, release.sh and disabled.appveyor.yml (@thisconnect) +- [#1775](https://github.com/request/request/pull/1775) Fix typo in README.md (@djchie) +- [#1776](https://github.com/request/request/pull/1776) Changed word 'conjuction' to read 'conjunction' in README.md (@ryanwholey) +- [#1785](https://github.com/request/request/pull/1785) Revert: Set default application/json content-type when using json option #1772 (@simov) + +### v2.63.0 (2015/09/21) +- [#1772](https://github.com/request/request/pull/1772) Set default application/json content-type when using json option (@jzaefferer) + +### v2.62.0 (2015/09/15) +- [#1768](https://github.com/request/request/pull/1768) Add node 4.0 to the list of build targets (@simov) +- [#1767](https://github.com/request/request/pull/1767) Query strings now cooperate with unix sockets (@JoshWillik) +- [#1750](https://github.com/request/request/pull/1750) Revert doc about installation of tough-cookie added in #884 (@LoicMahieu) +- [#1746](https://github.com/request/request/pull/1746) Missed comma in Readme (@vladimirich) +- [#1743](https://github.com/request/request/pull/1743) Fix options not being initialized in defaults method (@simov) + +### v2.61.0 (2015/08/19) +- [#1721](https://github.com/request/request/pull/1721) Minor fix in README.md (@arbaaz) +- [#1733](https://github.com/request/request/pull/1733) Avoid useless Buffer transformation (@michelsalib) +- [#1726](https://github.com/request/request/pull/1726) Update README.md (@paulomcnally) +- [#1715](https://github.com/request/request/pull/1715) Fix forever option in node > 0.10 #1709 (@calibr) +- [#1716](https://github.com/request/request/pull/1716) Do not create Buffer from Object in setContentLength(iojs v3.0 issue) (@calibr) +- [#1711](https://github.com/request/request/pull/1711) Add ability to detect connect timeouts (@kevinburke) +- [#1712](https://github.com/request/request/pull/1712) Set certificate expiration to August 2, 2018 (@kevinburke) +- [#1700](https://github.com/request/request/pull/1700) debug() when JSON.parse() on a response body fails (@phillipj) + +### v2.60.0 (2015/07/21) +- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews) + +### v2.59.0 (2015/07/20) +- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options. Forever option defaults to using http(s).Agent in node 0.12+ (@simov) +- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman) +- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme) +- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov) +- [#1651](https://github.com/request/request/pull/1651) Preserve HEAD method when using followAllRedirects (@simov) +- [#1652](https://github.com/request/request/pull/1652) Update `encoding` option documentation in README.md (@daniel347x) +- [#1650](https://github.com/request/request/pull/1650) Allow content-type overriding when using the `form` option (@simov) +- [#1646](https://github.com/request/request/pull/1646) Clarify the nature of setting `ca` in `agentOptions` (@jeffcharles) + +### v2.58.0 (2015/06/16) +- [#1638](https://github.com/request/request/pull/1638) Use the `extend` module to deep extend in the defaults method (@simov) +- [#1631](https://github.com/request/request/pull/1631) Move tunnel logic into separate module (@simov) +- [#1634](https://github.com/request/request/pull/1634) Fix OAuth query transport_method (@simov) +- [#1603](https://github.com/request/request/pull/1603) Add codecov (@simov) + +### v2.57.0 (2015/05/31) +- [#1615](https://github.com/request/request/pull/1615) Replace '.client' with '.socket' as the former was deprecated in 2.2.0. (@ChALkeR) + +### v2.56.0 (2015/05/28) +- [#1610](https://github.com/request/request/pull/1610) Bump module dependencies (@simov) +- [#1600](https://github.com/request/request/pull/1600) Extract the querystring logic into separate module (@simov) +- [#1607](https://github.com/request/request/pull/1607) Re-generate certificates (@simov) +- [#1599](https://github.com/request/request/pull/1599) Move getProxyFromURI logic below the check for Invaild URI (#1595) (@simov) +- [#1598](https://github.com/request/request/pull/1598) Fix the way http verbs are defined in order to please intellisense IDEs (@simov, @flannelJesus) +- [#1591](https://github.com/request/request/pull/1591) A few minor fixes: (@simov) +- [#1584](https://github.com/request/request/pull/1584) Refactor test-default tests (according to comments in #1430) (@simov) +- [#1585](https://github.com/request/request/pull/1585) Fixing documentation regarding TLS options (#1583) (@mainakae) +- [#1574](https://github.com/request/request/pull/1574) Refresh the oauth_nonce on redirect (#1573) (@simov) +- [#1570](https://github.com/request/request/pull/1570) Discovered tests that weren't properly running (@seanstrom) +- [#1569](https://github.com/request/request/pull/1569) Fix pause before response arrives (@kevinoid) +- [#1558](https://github.com/request/request/pull/1558) Emit error instead of throw (@simov) +- [#1568](https://github.com/request/request/pull/1568) Fix stall when piping gzipped response (@kevinoid) +- [#1560](https://github.com/request/request/pull/1560) Update combined-stream (@apechimp) +- [#1543](https://github.com/request/request/pull/1543) Initial support for oauth_body_hash on json payloads (@simov, @aesopwolf) +- [#1541](https://github.com/request/request/pull/1541) Fix coveralls (@simov) +- [#1540](https://github.com/request/request/pull/1540) Fix recursive defaults for convenience methods (@simov) +- [#1536](https://github.com/request/request/pull/1536) More eslint style rules (@froatsnook) +- [#1533](https://github.com/request/request/pull/1533) Adding dependency status bar to README.md (@YasharF) +- [#1539](https://github.com/request/request/pull/1539) ensure the latest version of har-validator is included (@ahmadnassri) +- [#1516](https://github.com/request/request/pull/1516) forever+pool test (@devTristan) + +### v2.55.0 (2015/04/05) +- [#1520](https://github.com/request/request/pull/1520) Refactor defaults (@simov) +- [#1525](https://github.com/request/request/pull/1525) Delete request headers with undefined value. (@froatsnook) +- [#1521](https://github.com/request/request/pull/1521) Add promise tests (@simov) +- [#1518](https://github.com/request/request/pull/1518) Fix defaults (@simov) +- [#1515](https://github.com/request/request/pull/1515) Allow static invoking of convenience methods (@simov) +- [#1505](https://github.com/request/request/pull/1505) Fix multipart boundary extraction regexp (@simov) +- [#1510](https://github.com/request/request/pull/1510) Fix basic auth form data (@simov) + +### v2.54.0 (2015/03/24) +- [#1501](https://github.com/request/request/pull/1501) HTTP Archive 1.2 support (@ahmadnassri) +- [#1486](https://github.com/request/request/pull/1486) Add a test for the forever agent (@akshayp) +- [#1500](https://github.com/request/request/pull/1500) Adding handling for no auth method and null bearer (@philberg) +- [#1498](https://github.com/request/request/pull/1498) Add table of contents in readme (@simov) +- [#1477](https://github.com/request/request/pull/1477) Add support for qs options via qsOptions key (@simov) +- [#1496](https://github.com/request/request/pull/1496) Parameters encoded to base 64 should be decoded as UTF-8, not ASCII. (@albanm) +- [#1494](https://github.com/request/request/pull/1494) Update eslint (@froatsnook) +- [#1474](https://github.com/request/request/pull/1474) Require Colon in Basic Auth (@erykwalder) +- [#1481](https://github.com/request/request/pull/1481) Fix baseUrl and redirections. (@burningtree) +- [#1469](https://github.com/request/request/pull/1469) Feature/base url (@froatsnook) +- [#1459](https://github.com/request/request/pull/1459) Add option to time request/response cycle (including rollup of redirects) (@aaron-em) +- [#1468](https://github.com/request/request/pull/1468) Re-enable io.js/node 0.12 build (@simov, @mikeal, @BBB) +- [#1442](https://github.com/request/request/pull/1442) Fixed the issue with strictSSL tests on 0.12 & io.js by explicitly setting a cipher that matches the cert. (@BBB, @nicolasmccurdy, @demohi, @simov, @0x4139) +- [#1460](https://github.com/request/request/pull/1460) localAddress or proxy config is lost when redirecting (@simov, @0x4139) +- [#1453](https://github.com/request/request/pull/1453) Test on Node.js 0.12 and io.js with allowed failures (@nicolasmccurdy, @demohi) +- [#1426](https://github.com/request/request/pull/1426) Fixing tests to pass on io.js and node 0.12 (only test-https.js stiff failing) (@mikeal) +- [#1446](https://github.com/request/request/pull/1446) Missing HTTP referer header with redirects Fixes #1038 (@simov, @guimon) +- [#1428](https://github.com/request/request/pull/1428) Deprecate Node v0.8.x (@nylen) +- [#1436](https://github.com/request/request/pull/1436) Add ability to set a requester without setting default options (@tikotzky) +- [#1435](https://github.com/request/request/pull/1435) dry up verb methods (@sethpollack) +- [#1423](https://github.com/request/request/pull/1423) Allow fully qualified multipart content-type header (@simov) +- [#1430](https://github.com/request/request/pull/1430) Fix recursive requester (@tikotzky) +- [#1429](https://github.com/request/request/pull/1429) Throw error when making HEAD request with a body (@tikotzky) +- [#1419](https://github.com/request/request/pull/1419) Add note that the project is broken in 0.12.x (@nylen) +- [#1413](https://github.com/request/request/pull/1413) Fix basic auth (@simov) +- [#1397](https://github.com/request/request/pull/1397) Improve pipe-from-file tests (@nylen) + +### v2.53.0 (2015/02/02) +- [#1396](https://github.com/request/request/pull/1396) Do not rfc3986 escape JSON bodies (@nylen, @simov) +- [#1392](https://github.com/request/request/pull/1392) Improve `timeout` option description (@watson) + +### v2.52.0 (2015/02/02) +- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen) +- [#1388](https://github.com/request/request/pull/1388) Upgrade mime-types package version (@roderickhsiao) +- [#1389](https://github.com/request/request/pull/1389) Revise Setup Tunnel Function (@seanstrom) +- [#1374](https://github.com/request/request/pull/1374) Allow explicitly disabling tunneling for proxied https destinations (@nylen) +- [#1376](https://github.com/request/request/pull/1376) Use karma-browserify for tests. Add browser test coverage reporter. (@eiriksm) +- [#1366](https://github.com/request/request/pull/1366) Refactor OAuth into separate module (@simov) +- [#1373](https://github.com/request/request/pull/1373) Rewrite tunnel test to be pure Node.js (@nylen) +- [#1371](https://github.com/request/request/pull/1371) Upgrade test reporter (@nylen) +- [#1360](https://github.com/request/request/pull/1360) Refactor basic, bearer, digest auth logic into separate class (@simov) +- [#1354](https://github.com/request/request/pull/1354) Remove circular dependency from debugging code (@nylen) +- [#1351](https://github.com/request/request/pull/1351) Move digest auth into private prototype method (@simov) +- [#1352](https://github.com/request/request/pull/1352) Update hawk dependency to ~2.3.0 (@mridgway) +- [#1353](https://github.com/request/request/pull/1353) Correct travis-ci badge (@dogancelik) +- [#1349](https://github.com/request/request/pull/1349) Make sure we return on errored browser requests. (@eiriksm) +- [#1346](https://github.com/request/request/pull/1346) getProxyFromURI Extraction Refactor (@seanstrom) +- [#1337](https://github.com/request/request/pull/1337) Standardize test ports on 6767 (@nylen) +- [#1341](https://github.com/request/request/pull/1341) Emit FormData error events as Request error events (@nylen, @rwky) +- [#1343](https://github.com/request/request/pull/1343) Clean up readme badges, and add Travis and Coveralls badges (@nylen) +- [#1345](https://github.com/request/request/pull/1345) Update README.md (@Aaron-Hartwig) +- [#1338](https://github.com/request/request/pull/1338) Always wait for server.close() callback in tests (@nylen) +- [#1342](https://github.com/request/request/pull/1342) Add mock https server and redo start of browser tests for this purpose. (@eiriksm) +- [#1339](https://github.com/request/request/pull/1339) Improve auth docs (@nylen) +- [#1335](https://github.com/request/request/pull/1335) Add support for OAuth plaintext signature method (@simov) +- [#1332](https://github.com/request/request/pull/1332) Add clean script to remove test-browser.js after the tests run (@seanstrom) +- [#1327](https://github.com/request/request/pull/1327) Fix errors generating coverage reports. (@nylen) +- [#1330](https://github.com/request/request/pull/1330) Return empty buffer upon empty response body and encoding is set to null (@seanstrom) +- [#1326](https://github.com/request/request/pull/1326) Use faster container-based infrastructure on Travis (@nylen) +- [#1315](https://github.com/request/request/pull/1315) Implement rfc3986 option (@simov, @nylen, @apoco, @DullReferenceException, @mmalecki, @oliamb, @cliffcrosland, @LewisJEllis, @eiriksm, @poislagarde) +- [#1314](https://github.com/request/request/pull/1314) Detect urlencoded form data header via regex (@simov) +- [#1317](https://github.com/request/request/pull/1317) Improve OAuth1.0 server side flow example (@simov) + +### v2.51.0 (2014/12/10) +- [#1310](https://github.com/request/request/pull/1310) Revert changes introduced in https://github.com/request/request/pull/1282 (@simov) + +### v2.50.0 (2014/12/09) +- [#1308](https://github.com/request/request/pull/1308) Add browser test to keep track of browserify compability. (@eiriksm) +- [#1299](https://github.com/request/request/pull/1299) Add optional support for jsonReviver (@poislagarde) +- [#1277](https://github.com/request/request/pull/1277) Add Coveralls configuration (@simov) +- [#1307](https://github.com/request/request/pull/1307) Upgrade form-data, add back browserify compability. Fixes #455. (@eiriksm) +- [#1305](https://github.com/request/request/pull/1305) Fix typo in README.md (@LewisJEllis) +- [#1288](https://github.com/request/request/pull/1288) Update README.md to explain custom file use case (@cliffcrosland) + +### v2.49.0 (2014/11/28) +- [#1295](https://github.com/request/request/pull/1295) fix(proxy): no-proxy false positive (@oliamb) +- [#1292](https://github.com/request/request/pull/1292) Upgrade `caseless` to 0.8.1 (@mmalecki) +- [#1276](https://github.com/request/request/pull/1276) Set transfer encoding for multipart/related to chunked by default (@simov) +- [#1275](https://github.com/request/request/pull/1275) Fix multipart content-type headers detection (@simov) +- [#1269](https://github.com/request/request/pull/1269) adds streams example for review (@tbuchok) +- [#1238](https://github.com/request/request/pull/1238) Add examples README.md (@simov) + +### v2.48.0 (2014/11/12) +- [#1263](https://github.com/request/request/pull/1263) Fixed a syntax error / typo in README.md (@xna2) +- [#1253](https://github.com/request/request/pull/1253) Add multipart chunked flag (@simov, @nylen) +- [#1251](https://github.com/request/request/pull/1251) Clarify that defaults() does not modify global defaults (@nylen) +- [#1250](https://github.com/request/request/pull/1250) Improve documentation for pool and maxSockets options (@nylen) +- [#1237](https://github.com/request/request/pull/1237) Documenting error handling when using streams (@vmattos) +- [#1244](https://github.com/request/request/pull/1244) Finalize changelog command (@nylen) +- [#1241](https://github.com/request/request/pull/1241) Fix typo (@alexanderGugel) +- [#1223](https://github.com/request/request/pull/1223) Show latest version number instead of "upcoming" in changelog (@nylen) +- [#1236](https://github.com/request/request/pull/1236) Document how to use custom CA in README (#1229) (@hypesystem) +- [#1228](https://github.com/request/request/pull/1228) Support for oauth with RSA-SHA1 signing (@nylen) +- [#1216](https://github.com/request/request/pull/1216) Made json and multipart options coexist (@nylen, @simov) +- [#1225](https://github.com/request/request/pull/1225) Allow header white/exclusive lists in any case. (@RReverser) + +### v2.47.0 (2014/10/26) +- [#1222](https://github.com/request/request/pull/1222) Move from mikeal/request to request/request (@nylen) +- [#1220](https://github.com/request/request/pull/1220) update qs dependency to 2.3.1 (@FredKSchott) +- [#1212](https://github.com/request/request/pull/1212) Improve tests/test-timeout.js (@nylen) +- [#1219](https://github.com/request/request/pull/1219) remove old globalAgent workaround for node 0.4 (@request) +- [#1214](https://github.com/request/request/pull/1214) Remove cruft left over from optional dependencies (@nylen) +- [#1215](https://github.com/request/request/pull/1215) Add proxyHeaderExclusiveList option for proxy-only headers. (@RReverser) +- [#1211](https://github.com/request/request/pull/1211) Allow 'Host' header instead of 'host' and remember case across redirects (@nylen) +- [#1208](https://github.com/request/request/pull/1208) Improve release script (@nylen) +- [#1213](https://github.com/request/request/pull/1213) Support for custom cookie store (@nylen, @mitsuru) +- [#1197](https://github.com/request/request/pull/1197) Clean up some code around setting the agent (@FredKSchott) +- [#1209](https://github.com/request/request/pull/1209) Improve multipart form append test (@simov) +- [#1207](https://github.com/request/request/pull/1207) Update changelog (@nylen) +- [#1185](https://github.com/request/request/pull/1185) Stream multipart/related bodies (@simov) + +### v2.46.0 (2014/10/23) +- [#1198](https://github.com/request/request/pull/1198) doc for TLS/SSL protocol options (@shawnzhu) +- [#1200](https://github.com/request/request/pull/1200) Add a Gitter chat badge to README.md (@gitter-badger) +- [#1196](https://github.com/request/request/pull/1196) Upgrade taper test reporter to v0.3.0 (@nylen) +- [#1199](https://github.com/request/request/pull/1199) Fix lint error: undeclared var i (@nylen) +- [#1191](https://github.com/request/request/pull/1191) Move self.proxy decision logic out of init and into a helper (@FredKSchott) +- [#1190](https://github.com/request/request/pull/1190) Move _buildRequest() logic back into init (@FredKSchott) +- [#1186](https://github.com/request/request/pull/1186) Support Smarter Unix URL Scheme (@FredKSchott) +- [#1178](https://github.com/request/request/pull/1178) update form documentation for new usage (@FredKSchott) +- [#1180](https://github.com/request/request/pull/1180) Enable no-mixed-requires linting rule (@nylen) +- [#1184](https://github.com/request/request/pull/1184) Don't forward authorization header across redirects to different hosts (@nylen) +- [#1183](https://github.com/request/request/pull/1183) Correct README about pre and postamble CRLF using multipart and not mult... (@netpoetica) +- [#1179](https://github.com/request/request/pull/1179) Lint tests directory (@nylen) +- [#1169](https://github.com/request/request/pull/1169) add metadata for form-data file field (@dotcypress) +- [#1173](https://github.com/request/request/pull/1173) remove optional dependencies (@seanstrom) +- [#1165](https://github.com/request/request/pull/1165) Cleanup event listeners and remove function creation from init (@FredKSchott) +- [#1174](https://github.com/request/request/pull/1174) update the request.cookie docs to have a valid cookie example (@seanstrom) +- [#1168](https://github.com/request/request/pull/1168) create a detach helper and use detach helper in replace of nextTick (@seanstrom) +- [#1171](https://github.com/request/request/pull/1171) in post can send form data and use callback (@MiroRadenovic) +- [#1159](https://github.com/request/request/pull/1159) accept charset for x-www-form-urlencoded content-type (@seanstrom) +- [#1157](https://github.com/request/request/pull/1157) Update README.md: body with json=true (@Rob--W) +- [#1164](https://github.com/request/request/pull/1164) Disable tests/test-timeout.js on Travis (@nylen) +- [#1153](https://github.com/request/request/pull/1153) Document how to run a single test (@nylen) +- [#1144](https://github.com/request/request/pull/1144) adds documentation for the "response" event within the streaming section (@tbuchok) +- [#1162](https://github.com/request/request/pull/1162) Update eslintrc file to no longer allow past errors (@FredKSchott) +- [#1155](https://github.com/request/request/pull/1155) Support/use self everywhere (@seanstrom) +- [#1161](https://github.com/request/request/pull/1161) fix no-use-before-define lint warnings (@emkay) +- [#1156](https://github.com/request/request/pull/1156) adding curly brackets to get rid of lint errors (@emkay) +- [#1151](https://github.com/request/request/pull/1151) Fix localAddress test on OS X (@nylen) +- [#1145](https://github.com/request/request/pull/1145) documentation: fix outdated reference to setCookieSync old name in README (@FredKSchott) +- [#1131](https://github.com/request/request/pull/1131) Update pool documentation (@FredKSchott) +- [#1143](https://github.com/request/request/pull/1143) Rewrite all tests to use tape (@nylen) +- [#1137](https://github.com/request/request/pull/1137) Add ability to specifiy querystring lib in options. (@jgrund) +- [#1138](https://github.com/request/request/pull/1138) allow hostname and port in place of host on uri (@slimelabs) +- [#1134](https://github.com/request/request/pull/1134) Fix multiple redirects and `self.followRedirect` (@blakeembrey) +- [#1130](https://github.com/request/request/pull/1130) documentation fix: add note about npm test for contributing (@FredKSchott) +- [#1120](https://github.com/request/request/pull/1120) Support/refactor request setup tunnel (@seanstrom) +- [#1129](https://github.com/request/request/pull/1129) linting fix: convert double quote strings to use single quotes (@FredKSchott) +- [#1124](https://github.com/request/request/pull/1124) linting fix: remove unneccesary semi-colons (@FredKSchott) + +### v2.45.0 (2014/10/06) +- [#1128](https://github.com/request/request/pull/1128) Add test for setCookie regression (@nylen) +- [#1127](https://github.com/request/request/pull/1127) added tests around using objects as values in a query string (@bcoe) +- [#1103](https://github.com/request/request/pull/1103) Support/refactor request constructor (@nylen, @seanstrom) +- [#1119](https://github.com/request/request/pull/1119) add basic linting to request library (@FredKSchott) +- [#1121](https://github.com/request/request/pull/1121) Revert "Explicitly use sync versions of cookie functions" (@nylen) +- [#1118](https://github.com/request/request/pull/1118) linting fix: Restructure bad empty if statement (@FredKSchott) +- [#1117](https://github.com/request/request/pull/1117) Fix a bad check for valid URIs (@FredKSchott) +- [#1113](https://github.com/request/request/pull/1113) linting fix: space out operators (@FredKSchott) +- [#1116](https://github.com/request/request/pull/1116) Fix typo in `noProxyHost` definition (@FredKSchott) +- [#1114](https://github.com/request/request/pull/1114) linting fix: Added a `new` operator that was missing when creating and throwing a new error (@FredKSchott) +- [#1096](https://github.com/request/request/pull/1096) No_proxy support (@samcday) +- [#1107](https://github.com/request/request/pull/1107) linting-fix: remove unused variables (@FredKSchott) +- [#1112](https://github.com/request/request/pull/1112) linting fix: Make return values consistent and more straitforward (@FredKSchott) +- [#1111](https://github.com/request/request/pull/1111) linting fix: authPieces was getting redeclared (@FredKSchott) +- [#1105](https://github.com/request/request/pull/1105) Use strict mode in request (@FredKSchott) +- [#1110](https://github.com/request/request/pull/1110) linting fix: replace lazy '==' with more strict '===' (@FredKSchott) +- [#1109](https://github.com/request/request/pull/1109) linting fix: remove function call from if-else conditional statement (@FredKSchott) +- [#1102](https://github.com/request/request/pull/1102) Fix to allow setting a `requester` on recursive calls to `request.defaults` (@tikotzky) +- [#1095](https://github.com/request/request/pull/1095) Tweaking engines in package.json (@pdehaan) +- [#1082](https://github.com/request/request/pull/1082) Forward the socket event from the httpModule request (@seanstrom) +- [#972](https://github.com/request/request/pull/972) Clarify gzip handling in the README (@kevinoid) +- [#1089](https://github.com/request/request/pull/1089) Mention that encoding defaults to utf8, not Buffer (@stuartpb) +- [#1088](https://github.com/request/request/pull/1088) Fix cookie example in README.md and make it more clear (@pipi32167) +- [#1027](https://github.com/request/request/pull/1027) Add support for multipart form data in request options. (@crocket) +- [#1076](https://github.com/request/request/pull/1076) use Request.abort() to abort the request when the request has timed-out (@seanstrom) +- [#1068](https://github.com/request/request/pull/1068) add optional postamble required by .NET multipart requests (@netpoetica) + +### v2.43.0 (2014/09/18) +- [#1057](https://github.com/request/request/pull/1057) Defaults should not overwrite defined options (@davidwood) +- [#1046](https://github.com/request/request/pull/1046) Propagate datastream errors, useful in case gzip fails. (@ZJONSSON, @Janpot) +- [#1063](https://github.com/request/request/pull/1063) copy the input headers object #1060 (@finnp) +- [#1031](https://github.com/request/request/pull/1031) Explicitly use sync versions of cookie functions (@ZJONSSON) +- [#1056](https://github.com/request/request/pull/1056) Fix redirects when passing url.parse(x) as URL to convenience method (@nylen) + +### v2.42.0 (2014/09/04) +- [#1053](https://github.com/request/request/pull/1053) Fix #1051 Parse auth properly when using non-tunneling proxy (@isaacs) + +### v2.41.0 (2014/09/04) +- [#1050](https://github.com/request/request/pull/1050) Pass whitelisted headers to tunneling proxy. Organize all tunneling logic. (@isaacs, @Feldhacker) +- [#1035](https://github.com/request/request/pull/1035) souped up nodei.co badge (@rvagg) +- [#1048](https://github.com/request/request/pull/1048) Aws is now possible over a proxy (@steven-aerts) +- [#1039](https://github.com/request/request/pull/1039) extract out helper functions to a helper file (@seanstrom) +- [#1021](https://github.com/request/request/pull/1021) Support/refactor indexjs (@seanstrom) +- [#1033](https://github.com/request/request/pull/1033) Improve and document debug options (@nylen) +- [#1034](https://github.com/request/request/pull/1034) Fix readme headings (@nylen) +- [#1030](https://github.com/request/request/pull/1030) Allow recursive request.defaults (@tikotzky) +- [#1029](https://github.com/request/request/pull/1029) Fix a couple of typos (@nylen) +- [#675](https://github.com/request/request/pull/675) Checking for SSL fault on connection before reading SSL properties (@VRMink) +- [#989](https://github.com/request/request/pull/989) Added allowRedirect function. Should return true if redirect is allowed or false otherwise (@doronin) +- [#1025](https://github.com/request/request/pull/1025) [fixes #1023] Set self._ended to true once response has ended (@mridgway) +- [#1020](https://github.com/request/request/pull/1020) Add back removed debug metadata (@FredKSchott) +- [#1008](https://github.com/request/request/pull/1008) Moving to module instead of cutomer buffer concatenation. (@mikeal) +- [#770](https://github.com/request/request/pull/770) Added dependency badge for README file; (@timgluz, @mafintosh, @lalitkapoor, @stash, @bobyrizov) +- [#1016](https://github.com/request/request/pull/1016) toJSON no longer results in an infinite loop, returns simple objects (@FredKSchott) +- [#1018](https://github.com/request/request/pull/1018) Remove pre-0.4.4 HTTPS fix (@mmalecki) +- [#1006](https://github.com/request/request/pull/1006) Migrate to caseless, fixes #1001 (@mikeal) +- [#995](https://github.com/request/request/pull/995) Fix parsing array of objects (@sjonnet19) +- [#999](https://github.com/request/request/pull/999) Fix fallback for browserify for optional modules. (@eiriksm) +- [#996](https://github.com/request/request/pull/996) Wrong oauth signature when multiple same param keys exist [updated] (@bengl, @hyjin) + +### v2.40.0 (2014/08/06) +- [#992](https://github.com/request/request/pull/992) Fix security vulnerability. Update qs (@poeticninja) +- [#988](https://github.com/request/request/pull/988) “--” -> “—” (@upisfree) +- [#987](https://github.com/request/request/pull/987) Show optional modules as being loaded by the module that reqeusted them (@iarna) + +### v2.39.0 (2014/07/24) +- [#976](https://github.com/request/request/pull/976) Update README.md (@pvoznenko) + +### v2.38.0 (2014/07/22) +- [#952](https://github.com/request/request/pull/952) Adding support to client certificate with proxy use case (@ofirshaked) +- [#884](https://github.com/request/request/pull/884) Documented tough-cookie installation. (@wbyoung) +- [#935](https://github.com/request/request/pull/935) Correct repository url (@fritx) +- [#963](https://github.com/request/request/pull/963) Update changelog (@nylen) +- [#960](https://github.com/request/request/pull/960) Support gzip with encoding on node pre-v0.9.4 (@kevinoid) +- [#953](https://github.com/request/request/pull/953) Add async Content-Length computation when using form-data (@LoicMahieu) +- [#844](https://github.com/request/request/pull/844) Add support for HTTP[S]_PROXY environment variables. Fixes #595. (@jvmccarthy) +- [#946](https://github.com/request/request/pull/946) defaults: merge headers (@aj0strow) + +### v2.37.0 (2014/07/07) +- [#957](https://github.com/request/request/pull/957) Silence EventEmitter memory leak warning #311 (@watson) +- [#955](https://github.com/request/request/pull/955) check for content-length header before setting it in nextTick (@camilleanne) +- [#951](https://github.com/request/request/pull/951) Add support for gzip content decoding (@kevinoid) +- [#949](https://github.com/request/request/pull/949) Manually enter querystring in form option (@charlespwd) +- [#944](https://github.com/request/request/pull/944) Make request work with browserify (@eiriksm) +- [#943](https://github.com/request/request/pull/943) New mime module (@eiriksm) +- [#927](https://github.com/request/request/pull/927) Bump version of hawk dep. (@samccone) +- [#907](https://github.com/request/request/pull/907) append secureOptions to poolKey (@medovob) + +### v2.35.0 (2014/05/17) +- [#901](https://github.com/request/request/pull/901) Fixes #555 (@pigulla) +- [#897](https://github.com/request/request/pull/897) merge with default options (@vohof) +- [#891](https://github.com/request/request/pull/891) fixes 857 - options object is mutated by calling request (@lalitkapoor) +- [#869](https://github.com/request/request/pull/869) Pipefilter test (@tgohn) +- [#866](https://github.com/request/request/pull/866) Fix typo (@dandv) +- [#861](https://github.com/request/request/pull/861) Add support for RFC 6750 Bearer Tokens (@phedny) +- [#809](https://github.com/request/request/pull/809) upgrade tunnel-proxy to 0.4.0 (@ksato9700) +- [#850](https://github.com/request/request/pull/850) Fix word consistency in readme (@0xNobody) +- [#810](https://github.com/request/request/pull/810) add some exposition to mpu example in README.md (@mikermcneil) +- [#840](https://github.com/request/request/pull/840) improve error reporting for invalid protocols (@FND) +- [#821](https://github.com/request/request/pull/821) added secureOptions back (@nw) +- [#815](https://github.com/request/request/pull/815) Create changelog based on pull requests (@lalitkapoor) + +### v2.34.0 (2014/02/18) +- [#516](https://github.com/request/request/pull/516) UNIX Socket URL Support (@lyuzashi) +- [#801](https://github.com/request/request/pull/801) 794 ignore cookie parsing and domain errors (@lalitkapoor) +- [#802](https://github.com/request/request/pull/802) Added the Apache license to the package.json. (@keskival) +- [#793](https://github.com/request/request/pull/793) Adds content-length calculation when submitting forms using form-data li... (@Juul) +- [#785](https://github.com/request/request/pull/785) Provide ability to override content-type when `json` option used (@vvo) +- [#781](https://github.com/request/request/pull/781) simpler isReadStream function (@joaojeronimo) + +### v2.32.0 (2014/01/16) +- [#767](https://github.com/request/request/pull/767) Use tough-cookie CookieJar sync API (@stash) +- [#764](https://github.com/request/request/pull/764) Case-insensitive authentication scheme (@bobyrizov) +- [#763](https://github.com/request/request/pull/763) Upgrade tough-cookie to 0.10.0 (@stash) +- [#744](https://github.com/request/request/pull/744) Use Cookie.parse (@lalitkapoor) +- [#757](https://github.com/request/request/pull/757) require aws-sign2 (@mafintosh) + +### v2.31.0 (2014/01/08) +- [#645](https://github.com/request/request/pull/645) update twitter api url to v1.1 (@mick) +- [#746](https://github.com/request/request/pull/746) README: Markdown code highlight (@weakish) +- [#745](https://github.com/request/request/pull/745) updating setCookie example to make it clear that the callback is required (@emkay) +- [#742](https://github.com/request/request/pull/742) Add note about JSON output body type (@iansltx) +- [#741](https://github.com/request/request/pull/741) README example is using old cookie jar api (@emkay) +- [#736](https://github.com/request/request/pull/736) Fix callback arguments documentation (@mmalecki) + +### v2.30.0 (2013/12/13) +- [#732](https://github.com/request/request/pull/732) JSHINT: Creating global 'for' variable. Should be 'for (var ...'. (@Fritz-Lium) +- [#730](https://github.com/request/request/pull/730) better HTTP DIGEST support (@dai-shi) +- [#728](https://github.com/request/request/pull/728) Fix TypeError when calling request.cookie (@scarletmeow) + +### v2.29.0 (2013/12/06) +- [#727](https://github.com/request/request/pull/727) fix requester bug (@jchris) + +### v2.28.0 (2013/12/04) +- [#724](https://github.com/request/request/pull/724) README.md: add custom HTTP Headers example. (@tcort) +- [#719](https://github.com/request/request/pull/719) Made a comment gender neutral. (@unsetbit) +- [#715](https://github.com/request/request/pull/715) Request.multipart no longer crashes when header 'Content-type' present (@pastaclub) +- [#710](https://github.com/request/request/pull/710) Fixing listing in callback part of docs. (@lukasz-zak) +- [#696](https://github.com/request/request/pull/696) Edited README.md for formatting and clarity of phrasing (@Zearin) +- [#694](https://github.com/request/request/pull/694) Typo in README (@VRMink) +- [#690](https://github.com/request/request/pull/690) Handle blank password in basic auth. (@diversario) +- [#682](https://github.com/request/request/pull/682) Optional dependencies (@Turbo87) +- [#683](https://github.com/request/request/pull/683) Travis CI support (@Turbo87) +- [#674](https://github.com/request/request/pull/674) change cookie module,to tough-cookie.please check it . (@sxyizhiren) +- [#666](https://github.com/request/request/pull/666) make `ciphers` and `secureProtocol` to work in https request (@richarddong) +- [#656](https://github.com/request/request/pull/656) Test case for #304. (@diversario) +- [#662](https://github.com/request/request/pull/662) option.tunnel to explicitly disable tunneling (@seanmonstar) +- [#659](https://github.com/request/request/pull/659) fix failure when running with NODE_DEBUG=request, and a test for that (@jrgm) +- [#630](https://github.com/request/request/pull/630) Send random cnonce for HTTP Digest requests (@wprl) + +### v2.27.0 (2013/08/15) +- [#619](https://github.com/request/request/pull/619) decouple things a bit (@joaojeronimo) + +### v2.26.0 (2013/08/07) +- [#613](https://github.com/request/request/pull/613) Fixes #583, moved initialization of self.uri.pathname (@lexander) +- [#605](https://github.com/request/request/pull/605) Only include ":" + pass in Basic Auth if it's defined (fixes #602) (@bendrucker) + +### v2.24.0 (2013/07/23) +- [#596](https://github.com/request/request/pull/596) Global agent is being used when pool is specified (@Cauldrath) +- [#594](https://github.com/request/request/pull/594) Emit complete event when there is no callback (@RomainLK) +- [#601](https://github.com/request/request/pull/601) Fixed a small typo (@michalstanko) + +### v2.23.0 (2013/07/23) +- [#589](https://github.com/request/request/pull/589) Prevent setting headers after they are sent (@geek) +- [#587](https://github.com/request/request/pull/587) Global cookie jar disabled by default (@threepointone) +- [#544](https://github.com/request/request/pull/544) Update http-signature version. (@davidlehn) +- [#581](https://github.com/request/request/pull/581) Fix spelling of "ignoring." (@bigeasy) +- [#568](https://github.com/request/request/pull/568) use agentOptions to create agent when specified in request (@SamPlacette) +- [#564](https://github.com/request/request/pull/564) Fix redirections (@criloz) +- [#541](https://github.com/request/request/pull/541) The exported request function doesn't have an auth method (@tschaub) +- [#542](https://github.com/request/request/pull/542) Expose Request class (@regality) +- [#536](https://github.com/request/request/pull/536) Allow explicitly empty user field for basic authentication. (@mikeando) +- [#532](https://github.com/request/request/pull/532) fix typo (@fredericosilva) +- [#497](https://github.com/request/request/pull/497) Added redirect event (@Cauldrath) +- [#503](https://github.com/request/request/pull/503) Fix basic auth for passwords that contain colons (@tonistiigi) +- [#521](https://github.com/request/request/pull/521) Improving test-localAddress.js (@noway421) +- [#529](https://github.com/request/request/pull/529) dependencies versions bump (@jodaka) +- [#523](https://github.com/request/request/pull/523) Updating dependencies (@noway421) +- [#520](https://github.com/request/request/pull/520) Fixing test-tunnel.js (@noway421) +- [#519](https://github.com/request/request/pull/519) Update internal path state on post-creation QS changes (@jblebrun) +- [#510](https://github.com/request/request/pull/510) Add HTTP Signature support. (@davidlehn) +- [#502](https://github.com/request/request/pull/502) Fix POST (and probably other) requests that are retried after 401 Unauthorized (@nylen) +- [#508](https://github.com/request/request/pull/508) Honor the .strictSSL option when using proxies (tunnel-agent) (@jhs) +- [#512](https://github.com/request/request/pull/512) Make password optional to support the format: http://username@hostname/ (@pajato1) +- [#513](https://github.com/request/request/pull/513) add 'localAddress' support (@yyfrankyy) +- [#498](https://github.com/request/request/pull/498) Moving response emit above setHeaders on destination streams (@kenperkins) +- [#490](https://github.com/request/request/pull/490) Empty response body (3-rd argument) must be passed to callback as an empty string (@Olegas) +- [#479](https://github.com/request/request/pull/479) Changing so if Accept header is explicitly set, sending json does not ov... (@RoryH) +- [#475](https://github.com/request/request/pull/475) Use `unescape` from `querystring` (@shimaore) +- [#473](https://github.com/request/request/pull/473) V0.10 compat (@isaacs) +- [#471](https://github.com/request/request/pull/471) Using querystring library from visionmedia (@kbackowski) +- [#461](https://github.com/request/request/pull/461) Strip the UTF8 BOM from a UTF encoded response (@kppullin) +- [#460](https://github.com/request/request/pull/460) hawk 0.10.0 (@hueniverse) +- [#462](https://github.com/request/request/pull/462) if query params are empty, then request path shouldn't end with a '?' (merges cleanly now) (@jaipandya) +- [#456](https://github.com/request/request/pull/456) hawk 0.9.0 (@hueniverse) +- [#429](https://github.com/request/request/pull/429) Copy options before adding callback. (@nrn, @nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki) +- [#454](https://github.com/request/request/pull/454) Destroy the response if present when destroying the request (clean merge) (@mafintosh) +- [#310](https://github.com/request/request/pull/310) Twitter Oauth Stuff Out of Date; Now Updated (@joemccann, @isaacs, @mscdex) +- [#413](https://github.com/request/request/pull/413) rename googledoodle.png to .jpg (@nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki) +- [#448](https://github.com/request/request/pull/448) Convenience method for PATCH (@mloar) +- [#444](https://github.com/request/request/pull/444) protect against double callbacks on error path (@spollack) +- [#433](https://github.com/request/request/pull/433) Added support for HTTPS cert & key (@mmalecki) +- [#430](https://github.com/request/request/pull/430) Respect specified {Host,host} headers, not just {host} (@andrewschaaf) +- [#415](https://github.com/request/request/pull/415) Fixed a typo. (@jerem) +- [#338](https://github.com/request/request/pull/338) Add more auth options, including digest support (@nylen) +- [#403](https://github.com/request/request/pull/403) Optimize environment lookup to happen once only (@mmalecki) +- [#398](https://github.com/request/request/pull/398) Add more reporting to tests (@mmalecki) +- [#388](https://github.com/request/request/pull/388) Ensure "safe" toJSON doesn't break EventEmitters (@othiym23) +- [#381](https://github.com/request/request/pull/381) Resolving "Invalid signature. Expected signature base string: " (@landeiro) +- [#380](https://github.com/request/request/pull/380) Fixes missing host header on retried request when using forever agent (@mac-) +- [#376](https://github.com/request/request/pull/376) Headers lost on redirect (@kapetan) +- [#375](https://github.com/request/request/pull/375) Fix for missing oauth_timestamp parameter (@jplock) +- [#374](https://github.com/request/request/pull/374) Correct Host header for proxy tunnel CONNECT (@youurayy) +- [#370](https://github.com/request/request/pull/370) Twitter reverse auth uses x_auth_mode not x_auth_type (@drudge) +- [#369](https://github.com/request/request/pull/369) Don't remove x_auth_mode for Twitter reverse auth (@drudge) +- [#344](https://github.com/request/request/pull/344) Make AWS auth signing find headers correctly (@nlf) +- [#363](https://github.com/request/request/pull/363) rfc3986 on base_uri, now passes tests (@jeffmarshall) +- [#362](https://github.com/request/request/pull/362) Running `rfc3986` on `base_uri` in `oauth.hmacsign` instead of just `encodeURIComponent` (@jeffmarshall) +- [#361](https://github.com/request/request/pull/361) Don't create a Content-Length header if we already have it set (@danjenkins) +- [#360](https://github.com/request/request/pull/360) Delete self._form along with everything else on redirect (@jgautier) +- [#355](https://github.com/request/request/pull/355) stop sending erroneous headers on redirected requests (@azylman) +- [#332](https://github.com/request/request/pull/332) Fix #296 - Only set Content-Type if body exists (@Marsup) +- [#343](https://github.com/request/request/pull/343) Allow AWS to work in more situations, added a note in the README on its usage (@nlf) +- [#320](https://github.com/request/request/pull/320) request.defaults() doesn't need to wrap jar() (@StuartHarris) +- [#322](https://github.com/request/request/pull/322) Fix + test for piped into request bumped into redirect. #321 (@alexindigo) +- [#326](https://github.com/request/request/pull/326) Do not try to remove listener from an undefined connection (@strk) +- [#318](https://github.com/request/request/pull/318) Pass servername to tunneling secure socket creation (@isaacs) +- [#317](https://github.com/request/request/pull/317) Workaround for #313 (@isaacs) +- [#293](https://github.com/request/request/pull/293) Allow parser errors to bubble up to request (@mscdex) +- [#290](https://github.com/request/request/pull/290) A test for #289 (@isaacs) +- [#280](https://github.com/request/request/pull/280) Like in node.js print options if NODE_DEBUG contains the word request (@Filirom1) +- [#207](https://github.com/request/request/pull/207) Fix #206 Change HTTP/HTTPS agent when redirecting between protocols (@isaacs) +- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas) +- [#272](https://github.com/request/request/pull/272) Boundary begins with CRLF? (@elspoono, @timshadel, @naholyr, @nanodocumet, @TehShrike) +- [#284](https://github.com/request/request/pull/284) Remove stray `console.log()` call in multipart generator. (@bcherry) +- [#241](https://github.com/request/request/pull/241) Composability updates suggested by issue #239 (@polotek) +- [#282](https://github.com/request/request/pull/282) OAuth Authorization header contains non-"oauth_" parameters (@jplock) +- [#279](https://github.com/request/request/pull/279) fix tests with boundary by injecting boundry from header (@benatkin) +- [#273](https://github.com/request/request/pull/273) Pipe back pressure issue (@mafintosh) +- [#268](https://github.com/request/request/pull/268) I'm not OCD seriously (@TehShrike) +- [#263](https://github.com/request/request/pull/263) Bug in OAuth key generation for sha1 (@nanodocumet) +- [#265](https://github.com/request/request/pull/265) uncaughtException when redirected to invalid URI (@naholyr) +- [#262](https://github.com/request/request/pull/262) JSON test should check for equality (@timshadel) +- [#261](https://github.com/request/request/pull/261) Setting 'pool' to 'false' does NOT disable Agent pooling (@timshadel) +- [#249](https://github.com/request/request/pull/249) Fix for the fix of your (closed) issue #89 where self.headers[content-length] is set to 0 for all methods (@sethbridges, @polotek, @zephrax, @jeromegn) +- [#255](https://github.com/request/request/pull/255) multipart allow body === '' ( the empty string ) (@Filirom1) +- [#260](https://github.com/request/request/pull/260) fixed just another leak of 'i' (@sreuter) +- [#246](https://github.com/request/request/pull/246) Fixing the set-cookie header (@jeromegn) +- [#243](https://github.com/request/request/pull/243) Dynamic boundary (@zephrax) +- [#240](https://github.com/request/request/pull/240) don't error when null is passed for options (@polotek) +- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso) +- [#224](https://github.com/request/request/pull/224) Multipart content-type change (@janjongboom) +- [#217](https://github.com/request/request/pull/217) need to use Authorization (titlecase) header with Tumblr OAuth (@visnup) +- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@milewise) +- [#199](https://github.com/request/request/pull/199) Tunnel (@isaacs) +- [#198](https://github.com/request/request/pull/198) Bugfix on forever usage of util.inherits (@isaacs) +- [#197](https://github.com/request/request/pull/197) Make ForeverAgent work with HTTPS (@isaacs) +- [#193](https://github.com/request/request/pull/193) Fixes GH-119 (@goatslacker) +- [#188](https://github.com/request/request/pull/188) Add abort support to the returned request (@itay) +- [#176](https://github.com/request/request/pull/176) Querystring option (@csainty) +- [#182](https://github.com/request/request/pull/182) Fix request.defaults to support (uri, options, callback) api (@twilson63) +- [#180](https://github.com/request/request/pull/180) Modified the post, put, head and del shortcuts to support uri optional param (@twilson63) +- [#179](https://github.com/request/request/pull/179) fix to add opts in .pipe(stream, opts) (@substack) +- [#177](https://github.com/request/request/pull/177) Issue #173 Support uri as first and optional config as second argument (@twilson63) +- [#170](https://github.com/request/request/pull/170) can't create a cookie in a wrapped request (defaults) (@fabianonunes) +- [#168](https://github.com/request/request/pull/168) Picking off an EasyFix by adding some missing mimetypes. (@serby) +- [#161](https://github.com/request/request/pull/161) Fix cookie jar/headers.cookie collision (#125) (@papandreou) +- [#162](https://github.com/request/request/pull/162) Fix issue #159 (@dpetukhov) +- [#90](https://github.com/request/request/pull/90) add option followAllRedirects to follow post/put redirects (@jroes) +- [#148](https://github.com/request/request/pull/148) Retry Agent (@thejh) +- [#146](https://github.com/request/request/pull/146) Multipart should respect content-type if previously set (@apeace) +- [#144](https://github.com/request/request/pull/144) added "form" option to readme (@petejkim) +- [#133](https://github.com/request/request/pull/133) Fixed cookies parsing (@afanasy) +- [#135](https://github.com/request/request/pull/135) host vs hostname (@iangreenleaf) +- [#132](https://github.com/request/request/pull/132) return the body as a Buffer when encoding is set to null (@jahewson) +- [#112](https://github.com/request/request/pull/112) Support using a custom http-like module (@jhs) +- [#104](https://github.com/request/request/pull/104) Cookie handling contains bugs (@janjongboom) +- [#121](https://github.com/request/request/pull/121) Another patch for cookie handling regression (@jhurliman) +- [#117](https://github.com/request/request/pull/117) Remove the global `i` (@3rd-Eden) +- [#110](https://github.com/request/request/pull/110) Update to Iris Couch URL (@jhs) +- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@kkaefer) +- [#105](https://github.com/request/request/pull/105) added test for proxy option. (@dominictarr) +- [#102](https://github.com/request/request/pull/102) Implemented cookies - closes issue 82: https://github.com/mikeal/request/issues/82 (@alessioalex) +- [#97](https://github.com/request/request/pull/97) Typo in previous pull causes TypeError in non-0.5.11 versions (@isaacs) +- [#96](https://github.com/request/request/pull/96) Authless parsed url host support (@isaacs) +- [#81](https://github.com/request/request/pull/81) Enhance redirect handling (@danmactough) +- [#78](https://github.com/request/request/pull/78) Don't try to do strictSSL for non-ssl connections (@isaacs) +- [#76](https://github.com/request/request/pull/76) Bug when a request fails and a timeout is set (@Marsup) +- [#70](https://github.com/request/request/pull/70) add test script to package.json (@isaacs, @aheckmann) +- [#73](https://github.com/request/request/pull/73) Fix #71 Respect the strictSSL flag (@isaacs) +- [#69](https://github.com/request/request/pull/69) Flatten chunked requests properly (@isaacs) +- [#67](https://github.com/request/request/pull/67) fixed global variable leaks (@aheckmann) +- [#66](https://github.com/request/request/pull/66) Do not overwrite established content-type headers for read stream deliver (@voodootikigod) +- [#53](https://github.com/request/request/pull/53) Parse json: Issue #51 (@benatkin) +- [#45](https://github.com/request/request/pull/45) Added timeout option (@mbrevoort) +- [#35](https://github.com/request/request/pull/35) The "end" event isn't emitted for some responses (@voxpelli) +- [#31](https://github.com/request/request/pull/31) Error on piping a request to a destination (@tobowers) \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/request/CONTRIBUTING.md b/node_modules/fsevents/node_modules/request/CONTRIBUTING.md new file mode 100644 index 0000000..8aa6999 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/CONTRIBUTING.md @@ -0,0 +1,81 @@ + +# Contributing to Request + +:+1::tada: First off, thanks for taking the time to contribute! :tada::+1: + +The following is a set of guidelines for contributing to Request and its packages, which are hosted in the [Request Organization](https://github.com/request) on GitHub. +These are just guidelines, not rules, use your best judgment and feel free to propose changes to this document in a pull request. + + +## Submitting an Issue + +1. Provide a small self **sufficient** code example to **reproduce** the issue. +2. Run your test code using [request-debug](https://github.com/request/request-debug) and copy/paste the results inside the issue. +3. You should **always** use fenced code blocks when submitting code examples or any other formatted output: +
+  ```js
+  put your javascript code here
+  ```
+
+  ```
+  put any other formatted output here,
+  like for example the one returned from using request-debug
+  ```
+  
+ +If the problem cannot be reliably reproduced, the issue will be marked as `Not enough info (see CONTRIBUTING.md)`. + +If the problem is not related to request the issue will be marked as `Help (please use Stackoverflow)`. + + +## Submitting a Pull Request + +1. In almost all of the cases your PR **needs tests**. Make sure you have any. +2. Run `npm test` locally. Fix any errors before pushing to GitHub. +3. After submitting the PR a build will be triggered on TravisCI. Wait for it to ends and make sure all jobs are passing. + + +----------------------------------------- + + +## Becoming a Contributor + +Individuals making significant and valuable contributions are given +commit-access to the project to contribute as they see fit. This project is +more like an open wiki than a standard guarded open source project. + + +## Rules + +There are a few basic ground-rules for contributors: + +1. **No `--force` pushes** or modifying the Git history in any way. +1. **Non-master branches** ought to be used for ongoing work. +1. **Any** change should be added through Pull Request. +1. **External API changes and significant modifications** ought to be subject + to an **internal pull-request** to solicit feedback from other contributors. +1. Internal pull-requests to solicit feedback are *encouraged* for any other + non-trivial contribution but left to the discretion of the contributor. +1. For significant changes wait a full 24 hours before merging so that active + contributors who are distributed throughout the world have a chance to weigh + in. +1. Contributors should attempt to adhere to the prevailing code-style. +1. Run `npm test` locally before submitting your PR, to catch any easy to miss + style & testing issues. To diagnose test failures, there are two ways to + run a single test file: + - `node_modules/.bin/taper tests/test-file.js` - run using the default + [`taper`](https://github.com/nylen/taper) test reporter. + - `node tests/test-file.js` - view the raw + [tap](https://testanything.org/) output. + + +## Releases + +Declaring formal releases remains the prerogative of the project maintainer. + + +## Changes to this arrangement + +This is an experiment and feedback is welcome! This document may also be +subject to pull-requests or changes by contributors where you believe you have +something valuable to add or change. diff --git a/node_modules/fsevents/node_modules/request/LICENSE b/node_modules/fsevents/node_modules/request/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/fsevents/node_modules/request/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/request/README.md b/node_modules/fsevents/node_modules/request/README.md new file mode 100644 index 0000000..81ecac5 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/README.md @@ -0,0 +1,1097 @@ + +# Request - Simplified HTTP client + +[![npm package](https://nodei.co/npm/request.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/request/) + +[![Build status](https://img.shields.io/travis/request/request/master.svg?style=flat-square)](https://travis-ci.org/request/request) +[![Coverage](https://img.shields.io/codecov/c/github/request/request.svg?style=flat-square)](https://codecov.io/github/request/request?branch=master) +[![Coverage](https://img.shields.io/coveralls/request/request.svg?style=flat-square)](https://coveralls.io/r/request/request) +[![Dependency Status](https://img.shields.io/david/request/request.svg?style=flat-square)](https://david-dm.org/request/request) +[![Known Vulnerabilities](https://snyk.io/test/npm/request/badge.svg?style=flat-square)](https://snyk.io/test/npm/request) +[![Gitter](https://img.shields.io/badge/gitter-join_chat-blue.svg?style=flat-square)](https://gitter.im/request/request?utm_source=badge) + + +## Super simple to use + +Request is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default. + +```js +var request = require('request'); +request('http://www.google.com', function (error, response, body) { + if (!error && response.statusCode == 200) { + console.log(body) // Show the HTML for the Google homepage. + } +}) +``` + + +## Table of contents + +- [Streaming](#streaming) +- [Forms](#forms) +- [HTTP Authentication](#http-authentication) +- [Custom HTTP Headers](#custom-http-headers) +- [OAuth Signing](#oauth-signing) +- [Proxies](#proxies) +- [Unix Domain Sockets](#unix-domain-sockets) +- [TLS/SSL Protocol](#tlsssl-protocol) +- [Support for HAR 1.2](#support-for-har-12) +- [**All Available Options**](#requestoptions-callback) + +Request also offers [convenience methods](#convenience-methods) like +`request.defaults` and `request.post`, and there are +lots of [usage examples](#examples) and several +[debugging techniques](#debugging). + + +--- + + +## Streaming + +You can stream any response to a file stream. + +```js +request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png')) +``` + +You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types (in this case `application/json`) and use the proper `content-type` in the PUT request (if the headers don’t already provide one). + +```js +fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json')) +``` + +Request can also `pipe` to itself. When doing so, `content-type` and `content-length` are preserved in the PUT headers. + +```js +request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png')) +``` + +Request emits a "response" event when a response is received. The `response` argument will be an instance of [http.IncomingMessage](https://nodejs.org/api/http.html#http_class_http_incomingmessage). + +```js +request + .get('http://google.com/img.png') + .on('response', function(response) { + console.log(response.statusCode) // 200 + console.log(response.headers['content-type']) // 'image/png' + }) + .pipe(request.put('http://mysite.com/img.png')) +``` + +To easily handle errors when streaming requests, listen to the `error` event before piping: + +```js +request + .get('http://mysite.com/doodle.png') + .on('error', function(err) { + console.log(err) + }) + .pipe(fs.createWriteStream('doodle.png')) +``` + +Now let’s get fancy. + +```js +http.createServer(function (req, resp) { + if (req.url === '/doodle.png') { + if (req.method === 'PUT') { + req.pipe(request.put('http://mysite.com/doodle.png')) + } else if (req.method === 'GET' || req.method === 'HEAD') { + request.get('http://mysite.com/doodle.png').pipe(resp) + } + } +}) +``` + +You can also `pipe()` from `http.ServerRequest` instances, as well as to `http.ServerResponse` instances. The HTTP method, headers, and entity-body data will be sent. Which means that, if you don't really care about security, you can do: + +```js +http.createServer(function (req, resp) { + if (req.url === '/doodle.png') { + var x = request('http://mysite.com/doodle.png') + req.pipe(x) + x.pipe(resp) + } +}) +``` + +And since `pipe()` returns the destination stream in ≥ Node 0.5.x you can do one line proxying. :) + +```js +req.pipe(request('http://mysite.com/doodle.png')).pipe(resp) +``` + +Also, none of this new functionality conflicts with requests previous features, it just expands them. + +```js +var r = request.defaults({'proxy':'http://localproxy.com'}) + +http.createServer(function (req, resp) { + if (req.url === '/doodle.png') { + r.get('http://google.com/doodle.png').pipe(resp) + } +}) +``` + +You can still use intermediate proxies, the requests will still follow HTTP forwards, etc. + +[back to top](#table-of-contents) + + +--- + + +## Forms + +`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API. + + +#### application/x-www-form-urlencoded (URL-Encoded Forms) + +URL-encoded forms are simple. + +```js +request.post('http://service.com/upload', {form:{key:'value'}}) +// or +request.post('http://service.com/upload').form({key:'value'}) +// or +request.post({url:'http://service.com/upload', form: {key:'value'}}, function(err,httpResponse,body){ /* ... */ }) +``` + + +#### multipart/form-data (Multipart Form Uploads) + +For `multipart/form-data` we use the [form-data](https://github.com/form-data/form-data) library by [@felixge](https://github.com/felixge). For the most cases, you can pass your upload form data via the `formData` option. + + +```js +var formData = { + // Pass a simple key-value pair + my_field: 'my_value', + // Pass data via Buffers + my_buffer: new Buffer([1, 2, 3]), + // Pass data via Streams + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), + // Pass multiple values /w an Array + attachments: [ + fs.createReadStream(__dirname + '/attachment1.jpg'), + fs.createReadStream(__dirname + '/attachment2.jpg') + ], + // Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS} + // Use case: for some types of streams, you'll need to provide "file"-related information manually. + // See the `form-data` README for more information about options: https://github.com/form-data/form-data + custom_file: { + value: fs.createReadStream('/dev/urandom'), + options: { + filename: 'topsecret.jpg', + contentType: 'image/jpg' + } + } +}; +request.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For advanced cases, you can access the form-data object itself via `r.form()`. This can be modified until the request is fired on the next cycle of the event-loop. (Note that this calling `form()` will clear the currently set form data for that request.) + +```js +// NOTE: Advanced use-case, for normal use see 'formData' usage above +var r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {...}) +var form = r.form(); +form.append('my_field', 'my_value'); +form.append('my_buffer', new Buffer([1, 2, 3])); +form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'}); +``` +See the [form-data README](https://github.com/form-data/form-data) for more information & examples. + + +#### multipart/related + +Some variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/related` request (using the multipart option). This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options. + +```js + request({ + method: 'PUT', + preambleCRLF: true, + postambleCRLF: true, + uri: 'http://service.com/upload', + multipart: [ + { + 'content-type': 'application/json', + body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) + }, + { body: 'I am an attachment' }, + { body: fs.createReadStream('image.png') } + ], + // alternatively pass an object containing additional options + multipart: { + chunked: false, + data: [ + { + 'content-type': 'application/json', + body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) + }, + { body: 'I am an attachment' } + ] + } + }, + function (error, response, body) { + if (error) { + return console.error('upload failed:', error); + } + console.log('Upload successful! Server responded with:', body); + }) +``` + +[back to top](#table-of-contents) + + +--- + + +## HTTP Authentication + +```js +request.get('http://some.server.com/').auth('username', 'password', false); +// or +request.get('http://some.server.com/', { + 'auth': { + 'user': 'username', + 'pass': 'password', + 'sendImmediately': false + } +}); +// or +request.get('http://some.server.com/').auth(null, null, true, 'bearerToken'); +// or +request.get('http://some.server.com/', { + 'auth': { + 'bearer': 'bearerToken' + } +}); +``` + +If passed as an option, `auth` should be a hash containing values: + +- `user` || `username` +- `pass` || `password` +- `sendImmediately` (optional) +- `bearer` (optional) + +The method form takes parameters +`auth(username, password, sendImmediately, bearer)`. + +`sendImmediately` defaults to `true`, which causes a basic or bearer +authentication header to be sent. If `sendImmediately` is `false`, then +`request` will retry with a proper authentication header after receiving a +`401` response from the server (which must contain a `WWW-Authenticate` header +indicating the required authentication method). + +Note that you can also specify basic authentication using the URL itself, as +detailed in [RFC 1738](http://www.ietf.org/rfc/rfc1738.txt). Simply pass the +`user:password` before the host with an `@` sign: + +```js +var username = 'username', + password = 'password', + url = 'http://' + username + ':' + password + '@some.server.com'; + +request({url: url}, function (error, response, body) { + // Do more stuff with 'body' here +}); +``` + +Digest authentication is supported, but it only works with `sendImmediately` +set to `false`; otherwise `request` will send basic authentication on the +initial request, which will probably cause the request to fail. + +Bearer authentication is supported, and is activated when the `bearer` value is +available. The value may be either a `String` or a `Function` returning a +`String`. Using a function to supply the bearer token is particularly useful if +used in conjunction with `defaults` to allow a single function to supply the +last known token at the time of sending a request, or to compute one on the fly. + +[back to top](#table-of-contents) + + +--- + + +## Custom HTTP Headers + +HTTP Headers, such as `User-Agent`, can be set in the `options` object. +In the example below, we call the github API to find out the number +of stars and forks for the request repository. This requires a +custom `User-Agent` header as well as https. + +```js +var request = require('request'); + +var options = { + url: 'https://api.github.com/repos/request/request', + headers: { + 'User-Agent': 'request' + } +}; + +function callback(error, response, body) { + if (!error && response.statusCode == 200) { + var info = JSON.parse(body); + console.log(info.stargazers_count + " Stars"); + console.log(info.forks_count + " Forks"); + } +} + +request(options, callback); +``` + +[back to top](#table-of-contents) + + +--- + + +## OAuth Signing + +[OAuth version 1.0](https://tools.ietf.org/html/rfc5849) is supported. The +default signing algorithm is +[HMAC-SHA1](https://tools.ietf.org/html/rfc5849#section-3.4.2): + +```js +// OAuth1.0 - 3-legged server side flow (Twitter example) +// step 1 +var qs = require('querystring') + , oauth = + { callback: 'http://mysite.com/callback/' + , consumer_key: CONSUMER_KEY + , consumer_secret: CONSUMER_SECRET + } + , url = 'https://api.twitter.com/oauth/request_token' + ; +request.post({url:url, oauth:oauth}, function (e, r, body) { + // Ideally, you would take the body in the response + // and construct a URL that a user clicks on (like a sign in button). + // The verifier is only available in the response after a user has + // verified with twitter that they are authorizing your app. + + // step 2 + var req_data = qs.parse(body) + var uri = 'https://api.twitter.com/oauth/authenticate' + + '?' + qs.stringify({oauth_token: req_data.oauth_token}) + // redirect the user to the authorize uri + + // step 3 + // after the user is redirected back to your server + var auth_data = qs.parse(body) + , oauth = + { consumer_key: CONSUMER_KEY + , consumer_secret: CONSUMER_SECRET + , token: auth_data.oauth_token + , token_secret: req_data.oauth_token_secret + , verifier: auth_data.oauth_verifier + } + , url = 'https://api.twitter.com/oauth/access_token' + ; + request.post({url:url, oauth:oauth}, function (e, r, body) { + // ready to make signed requests on behalf of the user + var perm_data = qs.parse(body) + , oauth = + { consumer_key: CONSUMER_KEY + , consumer_secret: CONSUMER_SECRET + , token: perm_data.oauth_token + , token_secret: perm_data.oauth_token_secret + } + , url = 'https://api.twitter.com/1.1/users/show.json' + , qs = + { screen_name: perm_data.screen_name + , user_id: perm_data.user_id + } + ; + request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) { + console.log(user) + }) + }) +}) +``` + +For [RSA-SHA1 signing](https://tools.ietf.org/html/rfc5849#section-3.4.3), make +the following changes to the OAuth options object: +* Pass `signature_method : 'RSA-SHA1'` +* Instead of `consumer_secret`, specify a `private_key` string in + [PEM format](http://how2ssl.com/articles/working_with_pem_files/) + +For [PLAINTEXT signing](http://oauth.net/core/1.0/#anchor22), make +the following changes to the OAuth options object: +* Pass `signature_method : 'PLAINTEXT'` + +To send OAuth parameters via query params or in a post body as described in The +[Consumer Request Parameters](http://oauth.net/core/1.0/#consumer_req_param) +section of the oauth1 spec: +* Pass `transport_method : 'query'` or `transport_method : 'body'` in the OAuth + options object. +* `transport_method` defaults to `'header'` + +To use [Request Body Hash](https://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html) you can either +* Manually generate the body hash and pass it as a string `body_hash: '...'` +* Automatically generate the body hash by passing `body_hash: true` + +[back to top](#table-of-contents) + + +--- + + +## Proxies + +If you specify a `proxy` option, then the request (and any subsequent +redirects) will be sent via a connection to the proxy server. + +If your endpoint is an `https` url, and you are using a proxy, then +request will send a `CONNECT` request to the proxy server *first*, and +then use the supplied connection to connect to the endpoint. + +That is, first it will make a request like: + +``` +HTTP/1.1 CONNECT endpoint-server.com:80 +Host: proxy-server.com +User-Agent: whatever user agent you specify +``` + +and then the proxy server make a TCP connection to `endpoint-server` +on port `80`, and return a response that looks like: + +``` +HTTP/1.1 200 OK +``` + +At this point, the connection is left open, and the client is +communicating directly with the `endpoint-server.com` machine. + +See [the wikipedia page on HTTP Tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel) +for more information. + +By default, when proxying `http` traffic, request will simply make a +standard proxied `http` request. This is done by making the `url` +section of the initial line of the request a fully qualified url to +the endpoint. + +For example, it will make a single request that looks like: + +``` +HTTP/1.1 GET http://endpoint-server.com/some-url +Host: proxy-server.com +Other-Headers: all go here + +request body or whatever +``` + +Because a pure "http over http" tunnel offers no additional security +or other features, it is generally simpler to go with a +straightforward HTTP proxy in this case. However, if you would like +to force a tunneling proxy, you may set the `tunnel` option to `true`. + +You can also make a standard proxied `http` request by explicitly setting +`tunnel : false`, but **note that this will allow the proxy to see the traffic +to/from the destination server**. + +If you are using a tunneling proxy, you may set the +`proxyHeaderWhiteList` to share certain headers with the proxy. + +You can also set the `proxyHeaderExclusiveList` to share certain +headers only with the proxy and not with destination host. + +By default, this set is: + +``` +accept +accept-charset +accept-encoding +accept-language +accept-ranges +cache-control +content-encoding +content-language +content-length +content-location +content-md5 +content-range +content-type +connection +date +expect +max-forwards +pragma +proxy-authorization +referer +te +transfer-encoding +user-agent +via +``` + +Note that, when using a tunneling proxy, the `proxy-authorization` +header and any headers from custom `proxyHeaderExclusiveList` are +*never* sent to the endpoint server, but only to the proxy server. + + +### Controlling proxy behaviour using environment variables + +The following environment variables are respected by `request`: + + * `HTTP_PROXY` / `http_proxy` + * `HTTPS_PROXY` / `https_proxy` + * `NO_PROXY` / `no_proxy` + +When `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request. + +`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables. + +Here's some examples of valid `no_proxy` values: + + * `google.com` - don't proxy HTTP/HTTPS requests to Google. + * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google. + * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo! + * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether. + +[back to top](#table-of-contents) + + +--- + + +## UNIX Domain Sockets + +`request` supports making requests to [UNIX Domain Sockets](https://en.wikipedia.org/wiki/Unix_domain_socket). To make one, use the following URL scheme: + +```js +/* Pattern */ 'http://unix:SOCKET:PATH' +/* Example */ request.get('http://unix:/absolute/path/to/unix.socket:/request/path') +``` + +Note: The `SOCKET` path is assumed to be absolute to the root of the host file system. + +[back to top](#table-of-contents) + + +--- + + +## TLS/SSL Protocol + +TLS/SSL Protocol options, such as `cert`, `key` and `passphrase`, can be +set directly in `options` object, in the `agentOptions` property of the `options` object, or even in `https.globalAgent.options`. Keep in mind that, although `agentOptions` allows for a slightly wider range of configurations, the recommended way is via `options` object directly, as using `agentOptions` or `https.globalAgent.options` would not be applied in the same way in proxied environments (as data travels through a TLS connection instead of an http/https agent). + +```js +var fs = require('fs') + , path = require('path') + , certFile = path.resolve(__dirname, 'ssl/client.crt') + , keyFile = path.resolve(__dirname, 'ssl/client.key') + , caFile = path.resolve(__dirname, 'ssl/ca.cert.pem') + , request = require('request'); + +var options = { + url: 'https://api.some-server.com/', + cert: fs.readFileSync(certFile), + key: fs.readFileSync(keyFile), + passphrase: 'password', + ca: fs.readFileSync(caFile) +}; + +request.get(options); +``` + +### Using `options.agentOptions` + +In the example below, we call an API requires client side SSL certificate +(in PEM format) with passphrase protected private key (in PEM format) and disable the SSLv3 protocol: + +```js +var fs = require('fs') + , path = require('path') + , certFile = path.resolve(__dirname, 'ssl/client.crt') + , keyFile = path.resolve(__dirname, 'ssl/client.key') + , request = require('request'); + +var options = { + url: 'https://api.some-server.com/', + agentOptions: { + cert: fs.readFileSync(certFile), + key: fs.readFileSync(keyFile), + // Or use `pfx` property replacing `cert` and `key` when using private key, certificate and CA certs in PFX or PKCS12 format: + // pfx: fs.readFileSync(pfxFilePath), + passphrase: 'password', + securityOptions: 'SSL_OP_NO_SSLv3' + } +}; + +request.get(options); +``` + +It is able to force using SSLv3 only by specifying `secureProtocol`: + +```js +request.get({ + url: 'https://api.some-server.com/', + agentOptions: { + secureProtocol: 'SSLv3_method' + } +}); +``` + +It is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs). +This can be useful, for example, when using self-signed certificates. +To require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`. +The certificate the domain presents must be signed by the root certificate specified: + +```js +request.get({ + url: 'https://api.some-server.com/', + agentOptions: { + ca: fs.readFileSync('ca.cert.pem') + } +}); +``` + +[back to top](#table-of-contents) + + +--- + +## Support for HAR 1.2 + +The `options.har` property will override the values: `url`, `method`, `qs`, `headers`, `form`, `formData`, `body`, `json`, as well as construct multipart data and read files from disk when `request.postData.params[].fileName` is present without a matching `value`. + +a validation step will check if the HAR Request format matches the latest spec (v1.2) and will skip parsing if not matching. + +```js + var request = require('request') + request({ + // will be ignored + method: 'GET', + uri: 'http://www.google.com', + + // HTTP Archive Request Object + har: { + url: 'http://www.mockbin.com/har', + method: 'POST', + headers: [ + { + name: 'content-type', + value: 'application/x-www-form-urlencoded' + } + ], + postData: { + mimeType: 'application/x-www-form-urlencoded', + params: [ + { + name: 'foo', + value: 'bar' + }, + { + name: 'hello', + value: 'world' + } + ] + } + } + }) + + // a POST request will be sent to http://www.mockbin.com + // with body an application/x-www-form-urlencoded body: + // foo=bar&hello=world +``` + +[back to top](#table-of-contents) + + +--- + +## request(options, callback) + +The first argument can be either a `url` or an `options` object. The only required option is `uri`; all others are optional. + +- `uri` || `url` - fully qualified uri or a parsed url object from `url.parse()` +- `baseUrl` - fully qualified uri string used as the base url. Most useful with `request.defaults`, for example when you want to do many requests to the same domain. If `baseUrl` is `https://example.com/api/`, then requesting `/end/point?test=true` will fetch `https://example.com/api/end/point?test=true`. When `baseUrl` is given, `uri` must also be a string. +- `method` - http method (default: `"GET"`) +- `headers` - http headers (default: `{}`) + +--- + +- `qs` - object containing querystring values to be appended to the `uri` +- `qsParseOptions` - object containing options to pass to the [qs.parse](https://github.com/hapijs/qs#parsing-objects) method. Alternatively pass options to the [querystring.parse](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_parse_str_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}` +- `qsStringifyOptions` - object containing options to pass to the [qs.stringify](https://github.com/hapijs/qs#stringifying) method. Alternatively pass options to the [querystring.stringify](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`. For example, to change the way arrays are converted to query strings using the `qs` module pass the `arrayFormat` option with one of `indices|brackets|repeat` +- `useQuerystring` - If true, use `querystring` to stringify and parse + querystrings, otherwise use `qs` (default: `false`). Set this option to + `true` if you need arrays to be serialized as `foo=bar&foo=baz` instead of the + default `foo[0]=bar&foo[1]=baz`. + +--- + +- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer`, `String` or `ReadStream`. If `json` is `true`, then `body` must be a JSON-serializable object. +- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See "Forms" section above. +- `formData` - Data to pass for a `multipart/form-data` request. See + [Forms](#forms) section above. +- `multipart` - array of objects which contain their own headers and `body` + attributes. Sends a `multipart/related` request. See [Forms](#forms) section + above. + - Alternatively you can pass in an object `{chunked: false, data: []}` where + `chunked` is used to specify whether the request is sent in + [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding) + In non-chunked requests, data items with body streams are not allowed. +- `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request. +- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request. +- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON. +- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body. +- `jsonReplacer` - a [replacer function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) that will be passed to `JSON.stringify()` when stringifying a JSON request body. + +--- + +- `auth` - A hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above. +- `oauth` - Options for OAuth HMAC-SHA1 signing. See documentation above. +- `hawk` - Options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example). +- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`. Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. **Note:** you need to `npm install aws4` first. +- `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options. + +--- + +- `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise. +- `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`) +- `maxRedirects` - the maximum number of redirects to follow (default: `10`) +- `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain. + +--- + +- `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.) +- `gzip` - If `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below. +- `jar` - If `true`, remember cookies for future use (or define your custom cookie jar; see examples section) + +--- + +- `agent` - `http(s).Agent` instance to use +- `agentClass` - alternatively specify your agent's class name +- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions). +- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+ +- `pool` - An object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified. + - A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`). + - Note that if you are sending multiple requests in a loop and creating + multiple new `pool` objects, `maxSockets` will not work as intended. To + work around this, either use [`request.defaults`](#requestdefaultsoptions) + with your pool options or create the pool object with the `maxSockets` + property outside of the loop. +- `timeout` - Integer containing the number of milliseconds to wait for a +server to send response headers (and start the response body) before aborting +the request. Note that if the underlying TCP connection cannot be established, +the OS-wide TCP connection timeout will overrule the `timeout` option ([the +default in Linux can be anywhere from 20-120 seconds][linux-timeout]). + +[linux-timeout]: http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout + +--- + +- `localAddress` - Local interface to bind for network connections. +- `proxy` - An HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`) +- `strictSSL` - If `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option. +- `tunnel` - controls the behavior of + [HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling) + as follows: + - `undefined` (default) - `true` if the destination is `https`, `false` otherwise + - `true` - always tunnel to the destination by making a `CONNECT` request to + the proxy + - `false` - request the destination as a `GET` request. +- `proxyHeaderWhiteList` - A whitelist of headers to send to a + tunneling proxy. +- `proxyHeaderExclusiveList` - A whitelist of headers to send + exclusively to a tunneling proxy and not to destination. + +--- + +- `time` - If `true`, the request-response cycle (including all redirects) is timed at millisecond resolution, and the result provided on the response's `elapsedTime` property. +- `har` - A [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-1.2) for details)* +- `callback` - alternatively pass the request's callback in the options object + +The callback argument gets 3 arguments: + +1. An `error` when applicable (usually from [`http.ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) object) +2. An [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) object +3. The third is the `response` body (`String` or `Buffer`, or JSON object if the `json` option is supplied) + +[back to top](#table-of-contents) + + +--- + +## Convenience methods + +There are also shorthand methods for different HTTP METHODs and some other conveniences. + + +### request.defaults(options) + +This method **returns a wrapper** around the normal request API that defaults +to whatever options you pass to it. + +**Note:** `request.defaults()` **does not** modify the global request API; +instead, it **returns a wrapper** that has your default settings applied to it. + +**Note:** You can call `.defaults()` on the wrapper that is returned from +`request.defaults` to add/override defaults that were previously defaulted. + +For example: +```js +//requests using baseRequest() will set the 'x-token' header +var baseRequest = request.defaults({ + headers: {'x-token': 'my-token'} +}) + +//requests using specialRequest() will include the 'x-token' header set in +//baseRequest and will also include the 'special' header +var specialRequest = baseRequest.defaults({ + headers: {special: 'special value'} +}) +``` + +### request.put + +Same as `request()`, but defaults to `method: "PUT"`. + +```js +request.put(url) +``` + +### request.patch + +Same as `request()`, but defaults to `method: "PATCH"`. + +```js +request.patch(url) +``` + +### request.post + +Same as `request()`, but defaults to `method: "POST"`. + +```js +request.post(url) +``` + +### request.head + +Same as `request()`, but defaults to `method: "HEAD"`. + +```js +request.head(url) +``` + +### request.del / request.delete + +Same as `request()`, but defaults to `method: "DELETE"`. + +```js +request.del(url) +request.delete(url) +``` + +### request.get + +Same as `request()` (for uniformity). + +```js +request.get(url) +``` +### request.cookie + +Function that creates a new cookie. + +```js +request.cookie('key1=value1') +``` +### request.jar() + +Function that creates a new cookie jar. + +```js +request.jar() +``` + +[back to top](#table-of-contents) + + +--- + + +## Debugging + +There are at least three ways to debug the operation of `request`: + +1. Launch the node process like `NODE_DEBUG=request node script.js` + (`lib,request,otherlib` works too). + +2. Set `require('request').debug = true` at any time (this does the same thing + as #1). + +3. Use the [request-debug module](https://github.com/request/request-debug) to + view request and response headers and bodies. + +[back to top](#table-of-contents) + + +--- + +## Timeouts + +Most requests to external servers should have a timeout attached, in case the +server is not responding in a timely manner. Without a timeout, your code may +have a socket open/consume resources for minutes or more. + +There are two main types of timeouts: **connection timeouts** and **read +timeouts**. A connect timeout occurs if the timeout is hit while your client is +attempting to establish a connection to a remote machine (corresponding to the +[connect() call][connect] on the socket). A read timeout occurs any time the +server is too slow to send back a part of the response. + +These two situations have widely different implications for what went wrong +with the request, so it's useful to be able to distinguish them. You can detect +timeout errors by checking `err.code` for an 'ETIMEDOUT' value. Further, you +can detect whether the timeout was a connection timeout by checking if the +`err.connect` property is set to `true`. + +```js +request.get('http://10.255.255.1', {timeout: 1500}, function(err) { + console.log(err.code === 'ETIMEDOUT'); + // Set to `true` if the timeout was a connection timeout, `false` or + // `undefined` otherwise. + console.log(err.connect === true); + process.exit(0); +}); +``` + +[connect]: http://linux.die.net/man/2/connect + +## Examples: + +```js + var request = require('request') + , rand = Math.floor(Math.random()*100000000).toString() + ; + request( + { method: 'PUT' + , uri: 'http://mikeal.iriscouch.com/testjs/' + rand + , multipart: + [ { 'content-type': 'application/json' + , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) + } + , { body: 'I am an attachment' } + ] + } + , function (error, response, body) { + if(response.statusCode == 201){ + console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand) + } else { + console.log('error: '+ response.statusCode) + console.log(body) + } + } + ) +``` + +For backwards-compatibility, response compression is not supported by default. +To accept gzip-compressed responses, set the `gzip` option to `true`. Note +that the body data passed through `request` is automatically decompressed +while the response object is unmodified and will contain compressed data if +the server sent a compressed response. + +```js + var request = require('request') + request( + { method: 'GET' + , uri: 'http://www.google.com' + , gzip: true + } + , function (error, response, body) { + // body is the decompressed response body + console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity')) + console.log('the decoded data is: ' + body) + } + ).on('data', function(data) { + // decompressed data as it is received + console.log('decoded chunk: ' + data) + }) + .on('response', function(response) { + // unmodified http.IncomingMessage object + response.on('data', function(data) { + // compressed data as it is received + console.log('received ' + data.length + ' bytes of compressed data') + }) + }) +``` + +Cookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`). + +```js +var request = request.defaults({jar: true}) +request('http://www.google.com', function () { + request('http://images.google.com') +}) +``` + +To use a custom cookie jar (instead of `request`’s global cookie jar), set `jar` to an instance of `request.jar()` (either in `defaults` or `options`) + +```js +var j = request.jar() +var request = request.defaults({jar:j}) +request('http://www.google.com', function () { + request('http://images.google.com') +}) +``` + +OR + +```js +var j = request.jar(); +var cookie = request.cookie('key1=value1'); +var url = 'http://www.google.com'; +j.setCookie(cookie, url); +request({url: url, jar: j}, function () { + request('http://images.google.com') +}) +``` + +To use a custom cookie store (such as a +[`FileCookieStore`](https://github.com/mitsuru/tough-cookie-filestore) +which supports saving to and restoring from JSON files), pass it as a parameter +to `request.jar()`: + +```js +var FileCookieStore = require('tough-cookie-filestore'); +// NOTE - currently the 'cookies.json' file must already exist! +var j = request.jar(new FileCookieStore('cookies.json')); +request = request.defaults({ jar : j }) +request('http://www.google.com', function() { + request('http://images.google.com') +}) +``` + +The cookie store must be a +[`tough-cookie`](https://github.com/SalesforceEng/tough-cookie) +store and it must support synchronous operations; see the +[`CookieStore` API docs](https://github.com/SalesforceEng/tough-cookie#cookiestore-api) +for details. + +To inspect your cookie jar after a request: + +```js +var j = request.jar() +request({url: 'http://www.google.com', jar: j}, function () { + var cookie_string = j.getCookieString(url); // "key1=value1; key2=value2; ..." + var cookies = j.getCookies(url); + // [{key: 'key1', value: 'value1', domain: "www.google.com", ...}, ...] +}) +``` + +[back to top](#table-of-contents) diff --git a/node_modules/fsevents/node_modules/request/codecov.yml b/node_modules/fsevents/node_modules/request/codecov.yml new file mode 100644 index 0000000..acd3f33 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/codecov.yml @@ -0,0 +1,2 @@ + +comment: false diff --git a/node_modules/fsevents/node_modules/request/index.js b/node_modules/fsevents/node_modules/request/index.js new file mode 100755 index 0000000..911a90d --- /dev/null +++ b/node_modules/fsevents/node_modules/request/index.js @@ -0,0 +1,157 @@ +// Copyright 2010-2012 Mikeal Rogers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict' + +var extend = require('extend') + , cookies = require('./lib/cookies') + , helpers = require('./lib/helpers') + +var isFunction = helpers.isFunction + , paramsHaveRequestBody = helpers.paramsHaveRequestBody + + +// organize params for patch, post, put, head, del +function initParams(uri, options, callback) { + if (typeof options === 'function') { + callback = options + } + + var params = {} + if (typeof options === 'object') { + extend(params, options, {uri: uri}) + } else if (typeof uri === 'string') { + extend(params, {uri: uri}) + } else { + extend(params, uri) + } + + params.callback = callback || params.callback + return params +} + +function request (uri, options, callback) { + if (typeof uri === 'undefined') { + throw new Error('undefined is not a valid uri or options object.') + } + + var params = initParams(uri, options, callback) + + if (params.method === 'HEAD' && paramsHaveRequestBody(params)) { + throw new Error('HTTP HEAD requests MUST NOT include a request body.') + } + + return new request.Request(params) +} + +function verbFunc (verb) { + var method = verb.toUpperCase() + return function (uri, options, callback) { + var params = initParams(uri, options, callback) + params.method = method + return request(params, params.callback) + } +} + +// define like this to please codeintel/intellisense IDEs +request.get = verbFunc('get') +request.head = verbFunc('head') +request.post = verbFunc('post') +request.put = verbFunc('put') +request.patch = verbFunc('patch') +request.del = verbFunc('delete') +request['delete'] = verbFunc('delete') + +request.jar = function (store) { + return cookies.jar(store) +} + +request.cookie = function (str) { + return cookies.parse(str) +} + +function wrapRequestMethod (method, options, requester, verb) { + + return function (uri, opts, callback) { + var params = initParams(uri, opts, callback) + + var target = {} + extend(true, target, options, params) + + target.pool = params.pool || options.pool + + if (verb) { + target.method = verb.toUpperCase() + } + + if (isFunction(requester)) { + method = requester + } + + return method(target, target.callback) + } +} + +request.defaults = function (options, requester) { + var self = this + + options = options || {} + + if (typeof options === 'function') { + requester = options + options = {} + } + + var defaults = wrapRequestMethod(self, options, requester) + + var verbs = ['get', 'head', 'post', 'put', 'patch', 'del', 'delete'] + verbs.forEach(function(verb) { + defaults[verb] = wrapRequestMethod(self[verb], options, requester, verb) + }) + + defaults.cookie = wrapRequestMethod(self.cookie, options, requester) + defaults.jar = self.jar + defaults.defaults = self.defaults + return defaults +} + +request.forever = function (agentOptions, optionsArg) { + var options = {} + if (optionsArg) { + extend(options, optionsArg) + } + if (agentOptions) { + options.agentOptions = agentOptions + } + + options.forever = true + return request.defaults(options) +} + +// Exports + +module.exports = request +request.Request = require('./request') +request.initParams = initParams + +// Backwards compatibility for request.debug +Object.defineProperty(request, 'debug', { + enumerable : true, + get : function() { + return request.Request.debug + }, + set : function(debug) { + request.Request.debug = debug + } +}) diff --git a/node_modules/fsevents/node_modules/request/lib/auth.js b/node_modules/fsevents/node_modules/request/lib/auth.js new file mode 100644 index 0000000..1cb6952 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/auth.js @@ -0,0 +1,168 @@ +'use strict' + +var caseless = require('caseless') + , uuid = require('node-uuid') + , helpers = require('./helpers') + +var md5 = helpers.md5 + , toBase64 = helpers.toBase64 + + +function Auth (request) { + // define all public properties here + this.request = request + this.hasAuth = false + this.sentAuth = false + this.bearerToken = null + this.user = null + this.pass = null +} + +Auth.prototype.basic = function (user, pass, sendImmediately) { + var self = this + if (typeof user !== 'string' || (pass !== undefined && typeof pass !== 'string')) { + self.request.emit('error', new Error('auth() received invalid user or password')) + } + self.user = user + self.pass = pass + self.hasAuth = true + var header = user + ':' + (pass || '') + if (sendImmediately || typeof sendImmediately === 'undefined') { + var authHeader = 'Basic ' + toBase64(header) + self.sentAuth = true + return authHeader + } +} + +Auth.prototype.bearer = function (bearer, sendImmediately) { + var self = this + self.bearerToken = bearer + self.hasAuth = true + if (sendImmediately || typeof sendImmediately === 'undefined') { + if (typeof bearer === 'function') { + bearer = bearer() + } + var authHeader = 'Bearer ' + (bearer || '') + self.sentAuth = true + return authHeader + } +} + +Auth.prototype.digest = function (method, path, authHeader) { + // TODO: More complete implementation of RFC 2617. + // - handle challenge.domain + // - support qop="auth-int" only + // - handle Authentication-Info (not necessarily?) + // - check challenge.stale (not necessarily?) + // - increase nc (not necessarily?) + // For reference: + // http://tools.ietf.org/html/rfc2617#section-3 + // https://github.com/bagder/curl/blob/master/lib/http_digest.c + + var self = this + + var challenge = {} + var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi + for (;;) { + var match = re.exec(authHeader) + if (!match) { + break + } + challenge[match[1]] = match[2] || match[3] + } + + /** + * RFC 2617: handle both MD5 and MD5-sess algorithms. + * + * If the algorithm directive's value is "MD5" or unspecified, then HA1 is + * HA1=MD5(username:realm:password) + * If the algorithm directive's value is "MD5-sess", then HA1 is + * HA1=MD5(MD5(username:realm:password):nonce:cnonce) + */ + var ha1Compute = function (algorithm, user, realm, pass, nonce, cnonce) { + var ha1 = md5(user + ':' + realm + ':' + pass) + if (algorithm && algorithm.toLowerCase() === 'md5-sess') { + return md5(ha1 + ':' + nonce + ':' + cnonce) + } else { + return ha1 + } + } + + var qop = /(^|,)\s*auth\s*($|,)/.test(challenge.qop) && 'auth' + var nc = qop && '00000001' + var cnonce = qop && uuid().replace(/-/g, '') + var ha1 = ha1Compute(challenge.algorithm, self.user, challenge.realm, self.pass, challenge.nonce, cnonce) + var ha2 = md5(method + ':' + path) + var digestResponse = qop + ? md5(ha1 + ':' + challenge.nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2) + : md5(ha1 + ':' + challenge.nonce + ':' + ha2) + var authValues = { + username: self.user, + realm: challenge.realm, + nonce: challenge.nonce, + uri: path, + qop: qop, + response: digestResponse, + nc: nc, + cnonce: cnonce, + algorithm: challenge.algorithm, + opaque: challenge.opaque + } + + authHeader = [] + for (var k in authValues) { + if (authValues[k]) { + if (k === 'qop' || k === 'nc' || k === 'algorithm') { + authHeader.push(k + '=' + authValues[k]) + } else { + authHeader.push(k + '="' + authValues[k] + '"') + } + } + } + authHeader = 'Digest ' + authHeader.join(', ') + self.sentAuth = true + return authHeader +} + +Auth.prototype.onRequest = function (user, pass, sendImmediately, bearer) { + var self = this + , request = self.request + + var authHeader + if (bearer === undefined && user === undefined) { + self.request.emit('error', new Error('no auth mechanism defined')) + } else if (bearer !== undefined) { + authHeader = self.bearer(bearer, sendImmediately) + } else { + authHeader = self.basic(user, pass, sendImmediately) + } + if (authHeader) { + request.setHeader('authorization', authHeader) + } +} + +Auth.prototype.onResponse = function (response) { + var self = this + , request = self.request + + if (!self.hasAuth || self.sentAuth) { return null } + + var c = caseless(response.headers) + + var authHeader = c.get('www-authenticate') + var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase() + request.debug('reauth', authVerb) + + switch (authVerb) { + case 'basic': + return self.basic(self.user, self.pass, true) + + case 'bearer': + return self.bearer(self.bearerToken, true) + + case 'digest': + return self.digest(request.method, request.path, authHeader) + } +} + +exports.Auth = Auth diff --git a/node_modules/fsevents/node_modules/request/lib/cookies.js b/node_modules/fsevents/node_modules/request/lib/cookies.js new file mode 100644 index 0000000..412c07d --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/cookies.js @@ -0,0 +1,39 @@ +'use strict' + +var tough = require('tough-cookie') + +var Cookie = tough.Cookie + , CookieJar = tough.CookieJar + + +exports.parse = function(str) { + if (str && str.uri) { + str = str.uri + } + if (typeof str !== 'string') { + throw new Error('The cookie function only accepts STRING as param') + } + return Cookie.parse(str, {loose: true}) +} + +// Adapt the sometimes-Async api of tough.CookieJar to our requirements +function RequestJar(store) { + var self = this + self._jar = new CookieJar(store, {looseMode: true}) +} +RequestJar.prototype.setCookie = function(cookieOrStr, uri, options) { + var self = this + return self._jar.setCookieSync(cookieOrStr, uri, options || {}) +} +RequestJar.prototype.getCookieString = function(uri) { + var self = this + return self._jar.getCookieStringSync(uri) +} +RequestJar.prototype.getCookies = function(uri) { + var self = this + return self._jar.getCookiesSync(uri) +} + +exports.jar = function(store) { + return new RequestJar(store) +} diff --git a/node_modules/fsevents/node_modules/request/lib/getProxyFromURI.js b/node_modules/fsevents/node_modules/request/lib/getProxyFromURI.js new file mode 100644 index 0000000..c2013a6 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/getProxyFromURI.js @@ -0,0 +1,79 @@ +'use strict' + +function formatHostname(hostname) { + // canonicalize the hostname, so that 'oogle.com' won't match 'google.com' + return hostname.replace(/^\.*/, '.').toLowerCase() +} + +function parseNoProxyZone(zone) { + zone = zone.trim().toLowerCase() + + var zoneParts = zone.split(':', 2) + , zoneHost = formatHostname(zoneParts[0]) + , zonePort = zoneParts[1] + , hasPort = zone.indexOf(':') > -1 + + return {hostname: zoneHost, port: zonePort, hasPort: hasPort} +} + +function uriInNoProxy(uri, noProxy) { + var port = uri.port || (uri.protocol === 'https:' ? '443' : '80') + , hostname = formatHostname(uri.hostname) + , noProxyList = noProxy.split(',') + + // iterate through the noProxyList until it finds a match. + return noProxyList.map(parseNoProxyZone).some(function(noProxyZone) { + var isMatchedAt = hostname.indexOf(noProxyZone.hostname) + , hostnameMatched = ( + isMatchedAt > -1 && + (isMatchedAt === hostname.length - noProxyZone.hostname.length) + ) + + if (noProxyZone.hasPort) { + return (port === noProxyZone.port) && hostnameMatched + } + + return hostnameMatched + }) +} + +function getProxyFromURI(uri) { + // Decide the proper request proxy to use based on the request URI object and the + // environmental variables (NO_PROXY, HTTP_PROXY, etc.) + // respect NO_PROXY environment variables (see: http://lynx.isc.org/current/breakout/lynx_help/keystrokes/environments.html) + + var noProxy = process.env.NO_PROXY || process.env.no_proxy || '' + + // if the noProxy is a wildcard then return null + + if (noProxy === '*') { + return null + } + + // if the noProxy is not empty and the uri is found return null + + if (noProxy !== '' && uriInNoProxy(uri, noProxy)) { + return null + } + + // Check for HTTP or HTTPS Proxy in environment Else default to null + + if (uri.protocol === 'http:') { + return process.env.HTTP_PROXY || + process.env.http_proxy || null + } + + if (uri.protocol === 'https:') { + return process.env.HTTPS_PROXY || + process.env.https_proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || null + } + + // if none of that works, return null + // (What uri protocol are you using then?) + + return null +} + +module.exports = getProxyFromURI diff --git a/node_modules/fsevents/node_modules/request/lib/har.js b/node_modules/fsevents/node_modules/request/lib/har.js new file mode 100644 index 0000000..3059574 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/har.js @@ -0,0 +1,215 @@ +'use strict' + +var fs = require('fs') +var qs = require('querystring') +var validate = require('har-validator') +var extend = require('extend') + +function Har (request) { + this.request = request +} + +Har.prototype.reducer = function (obj, pair) { + // new property ? + if (obj[pair.name] === undefined) { + obj[pair.name] = pair.value + return obj + } + + // existing? convert to array + var arr = [ + obj[pair.name], + pair.value + ] + + obj[pair.name] = arr + + return obj +} + +Har.prototype.prep = function (data) { + // construct utility properties + data.queryObj = {} + data.headersObj = {} + data.postData.jsonObj = false + data.postData.paramsObj = false + + // construct query objects + if (data.queryString && data.queryString.length) { + data.queryObj = data.queryString.reduce(this.reducer, {}) + } + + // construct headers objects + if (data.headers && data.headers.length) { + // loweCase header keys + data.headersObj = data.headers.reduceRight(function (headers, header) { + headers[header.name] = header.value + return headers + }, {}) + } + + // construct Cookie header + if (data.cookies && data.cookies.length) { + var cookies = data.cookies.map(function (cookie) { + return cookie.name + '=' + cookie.value + }) + + if (cookies.length) { + data.headersObj.cookie = cookies.join('; ') + } + } + + // prep body + function some (arr) { + return arr.some(function (type) { + return data.postData.mimeType.indexOf(type) === 0 + }) + } + + if (some([ + 'multipart/mixed', + 'multipart/related', + 'multipart/form-data', + 'multipart/alternative'])) { + + // reset values + data.postData.mimeType = 'multipart/form-data' + } + + else if (some([ + 'application/x-www-form-urlencoded'])) { + + if (!data.postData.params) { + data.postData.text = '' + } else { + data.postData.paramsObj = data.postData.params.reduce(this.reducer, {}) + + // always overwrite + data.postData.text = qs.stringify(data.postData.paramsObj) + } + } + + else if (some([ + 'text/json', + 'text/x-json', + 'application/json', + 'application/x-json'])) { + + data.postData.mimeType = 'application/json' + + if (data.postData.text) { + try { + data.postData.jsonObj = JSON.parse(data.postData.text) + } catch (e) { + this.request.debug(e) + + // force back to text/plain + data.postData.mimeType = 'text/plain' + } + } + } + + return data +} + +Har.prototype.options = function (options) { + // skip if no har property defined + if (!options.har) { + return options + } + + var har = {} + extend(har, options.har) + + // only process the first entry + if (har.log && har.log.entries) { + har = har.log.entries[0] + } + + // add optional properties to make validation successful + har.url = har.url || options.url || options.uri || options.baseUrl || '/' + har.httpVersion = har.httpVersion || 'HTTP/1.1' + har.queryString = har.queryString || [] + har.headers = har.headers || [] + har.cookies = har.cookies || [] + har.postData = har.postData || {} + har.postData.mimeType = har.postData.mimeType || 'application/octet-stream' + + har.bodySize = 0 + har.headersSize = 0 + har.postData.size = 0 + + if (!validate.request(har)) { + return options + } + + // clean up and get some utility properties + var req = this.prep(har) + + // construct new options + if (req.url) { + options.url = req.url + } + + if (req.method) { + options.method = req.method + } + + if (Object.keys(req.queryObj).length) { + options.qs = req.queryObj + } + + if (Object.keys(req.headersObj).length) { + options.headers = req.headersObj + } + + function test (type) { + return req.postData.mimeType.indexOf(type) === 0 + } + if (test('application/x-www-form-urlencoded')) { + options.form = req.postData.paramsObj + } + else if (test('application/json')) { + if (req.postData.jsonObj) { + options.body = req.postData.jsonObj + options.json = true + } + } + else if (test('multipart/form-data')) { + options.formData = {} + + req.postData.params.forEach(function (param) { + var attachment = {} + + if (!param.fileName && !param.fileName && !param.contentType) { + options.formData[param.name] = param.value + return + } + + // attempt to read from disk! + if (param.fileName && !param.value) { + attachment.value = fs.createReadStream(param.fileName) + } else if (param.value) { + attachment.value = param.value + } + + if (param.fileName) { + attachment.options = { + filename: param.fileName, + contentType: param.contentType ? param.contentType : null + } + } + + options.formData[param.name] = attachment + }) + } + else { + if (req.postData.text) { + options.body = req.postData.text + } + } + + return options +} + +exports.Har = Har diff --git a/node_modules/fsevents/node_modules/request/lib/helpers.js b/node_modules/fsevents/node_modules/request/lib/helpers.js new file mode 100644 index 0000000..356ff74 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/helpers.js @@ -0,0 +1,74 @@ +'use strict' + +var jsonSafeStringify = require('json-stringify-safe') + , crypto = require('crypto') + +function deferMethod() { + if (typeof setImmediate === 'undefined') { + return process.nextTick + } + + return setImmediate +} + +function isFunction(value) { + return typeof value === 'function' +} + +function paramsHaveRequestBody(params) { + return ( + params.body || + params.requestBodyStream || + (params.json && typeof params.json !== 'boolean') || + params.multipart + ) +} + +function safeStringify (obj, replacer) { + var ret + try { + ret = JSON.stringify(obj, replacer) + } catch (e) { + ret = jsonSafeStringify(obj, replacer) + } + return ret +} + +function md5 (str) { + return crypto.createHash('md5').update(str).digest('hex') +} + +function isReadStream (rs) { + return rs.readable && rs.path && rs.mode +} + +function toBase64 (str) { + return (new Buffer(str || '', 'utf8')).toString('base64') +} + +function copy (obj) { + var o = {} + Object.keys(obj).forEach(function (i) { + o[i] = obj[i] + }) + return o +} + +function version () { + var numbers = process.version.replace('v', '').split('.') + return { + major: parseInt(numbers[0], 10), + minor: parseInt(numbers[1], 10), + patch: parseInt(numbers[2], 10) + } +} + +exports.isFunction = isFunction +exports.paramsHaveRequestBody = paramsHaveRequestBody +exports.safeStringify = safeStringify +exports.md5 = md5 +exports.isReadStream = isReadStream +exports.toBase64 = toBase64 +exports.copy = copy +exports.version = version +exports.defer = deferMethod() diff --git a/node_modules/fsevents/node_modules/request/lib/multipart.js b/node_modules/fsevents/node_modules/request/lib/multipart.js new file mode 100644 index 0000000..c128172 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/multipart.js @@ -0,0 +1,112 @@ +'use strict' + +var uuid = require('node-uuid') + , CombinedStream = require('combined-stream') + , isstream = require('isstream') + + +function Multipart (request) { + this.request = request + this.boundary = uuid() + this.chunked = false + this.body = null +} + +Multipart.prototype.isChunked = function (options) { + var self = this + , chunked = false + , parts = options.data || options + + if (!parts.forEach) { + self.request.emit('error', new Error('Argument error, options.multipart.')) + } + + if (options.chunked !== undefined) { + chunked = options.chunked + } + + if (self.request.getHeader('transfer-encoding') === 'chunked') { + chunked = true + } + + if (!chunked) { + parts.forEach(function (part) { + if (typeof part.body === 'undefined') { + self.request.emit('error', new Error('Body attribute missing in multipart.')) + } + if (isstream(part.body)) { + chunked = true + } + }) + } + + return chunked +} + +Multipart.prototype.setHeaders = function (chunked) { + var self = this + + if (chunked && !self.request.hasHeader('transfer-encoding')) { + self.request.setHeader('transfer-encoding', 'chunked') + } + + var header = self.request.getHeader('content-type') + + if (!header || header.indexOf('multipart') === -1) { + self.request.setHeader('content-type', 'multipart/related; boundary=' + self.boundary) + } else { + if (header.indexOf('boundary') !== -1) { + self.boundary = header.replace(/.*boundary=([^\s;]+).*/, '$1') + } else { + self.request.setHeader('content-type', header + '; boundary=' + self.boundary) + } + } +} + +Multipart.prototype.build = function (parts, chunked) { + var self = this + var body = chunked ? new CombinedStream() : [] + + function add (part) { + if (typeof part === 'number') { + part = part.toString() + } + return chunked ? body.append(part) : body.push(new Buffer(part)) + } + + if (self.request.preambleCRLF) { + add('\r\n') + } + + parts.forEach(function (part) { + var preamble = '--' + self.boundary + '\r\n' + Object.keys(part).forEach(function (key) { + if (key === 'body') { return } + preamble += key + ': ' + part[key] + '\r\n' + }) + preamble += '\r\n' + add(preamble) + add(part.body) + add('\r\n') + }) + add('--' + self.boundary + '--') + + if (self.request.postambleCRLF) { + add('\r\n') + } + + return body +} + +Multipart.prototype.onRequest = function (options) { + var self = this + + var chunked = self.isChunked(options) + , parts = options.data || options + + self.setHeaders(chunked) + self.chunked = chunked + self.body = self.build(parts, chunked) +} + +exports.Multipart = Multipart diff --git a/node_modules/fsevents/node_modules/request/lib/oauth.js b/node_modules/fsevents/node_modules/request/lib/oauth.js new file mode 100644 index 0000000..c24209b --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/oauth.js @@ -0,0 +1,147 @@ +'use strict' + +var url = require('url') + , qs = require('qs') + , caseless = require('caseless') + , uuid = require('node-uuid') + , oauth = require('oauth-sign') + , crypto = require('crypto') + + +function OAuth (request) { + this.request = request + this.params = null +} + +OAuth.prototype.buildParams = function (_oauth, uri, method, query, form, qsLib) { + var oa = {} + for (var i in _oauth) { + oa['oauth_' + i] = _oauth[i] + } + if (!oa.oauth_version) { + oa.oauth_version = '1.0' + } + if (!oa.oauth_timestamp) { + oa.oauth_timestamp = Math.floor( Date.now() / 1000 ).toString() + } + if (!oa.oauth_nonce) { + oa.oauth_nonce = uuid().replace(/-/g, '') + } + if (!oa.oauth_signature_method) { + oa.oauth_signature_method = 'HMAC-SHA1' + } + + var consumer_secret_or_private_key = oa.oauth_consumer_secret || oa.oauth_private_key + delete oa.oauth_consumer_secret + delete oa.oauth_private_key + + var token_secret = oa.oauth_token_secret + delete oa.oauth_token_secret + + var realm = oa.oauth_realm + delete oa.oauth_realm + delete oa.oauth_transport_method + + var baseurl = uri.protocol + '//' + uri.host + uri.pathname + var params = qsLib.parse([].concat(query, form, qsLib.stringify(oa)).join('&')) + + oa.oauth_signature = oauth.sign( + oa.oauth_signature_method, + method, + baseurl, + params, + consumer_secret_or_private_key, + token_secret) + + if (realm) { + oa.realm = realm + } + + return oa +} + +OAuth.prototype.buildBodyHash = function(_oauth, body) { + if (['HMAC-SHA1', 'RSA-SHA1'].indexOf(_oauth.signature_method || 'HMAC-SHA1') < 0) { + this.request.emit('error', new Error('oauth: ' + _oauth.signature_method + + ' signature_method not supported with body_hash signing.')) + } + + var shasum = crypto.createHash('sha1') + shasum.update(body || '') + var sha1 = shasum.digest('hex') + + return new Buffer(sha1).toString('base64') +} + +OAuth.prototype.concatParams = function (oa, sep, wrap) { + wrap = wrap || '' + + var params = Object.keys(oa).filter(function (i) { + return i !== 'realm' && i !== 'oauth_signature' + }).sort() + + if (oa.realm) { + params.splice(0, 0, 'realm') + } + params.push('oauth_signature') + + return params.map(function (i) { + return i + '=' + wrap + oauth.rfc3986(oa[i]) + wrap + }).join(sep) +} + +OAuth.prototype.onRequest = function (_oauth) { + var self = this + self.params = _oauth + + var uri = self.request.uri || {} + , method = self.request.method || '' + , headers = caseless(self.request.headers) + , body = self.request.body || '' + , qsLib = self.request.qsLib || qs + + var form + , query + , contentType = headers.get('content-type') || '' + , formContentType = 'application/x-www-form-urlencoded' + , transport = _oauth.transport_method || 'header' + + if (contentType.slice(0, formContentType.length) === formContentType) { + contentType = formContentType + form = body + } + if (uri.query) { + query = uri.query + } + if (transport === 'body' && (method !== 'POST' || contentType !== formContentType)) { + self.request.emit('error', new Error('oauth: transport_method of body requires POST ' + + 'and content-type ' + formContentType)) + } + + if (!form && typeof _oauth.body_hash === 'boolean') { + _oauth.body_hash = self.buildBodyHash(_oauth, self.request.body.toString()) + } + + var oa = self.buildParams(_oauth, uri, method, query, form, qsLib) + + switch (transport) { + case 'header': + self.request.setHeader('Authorization', 'OAuth ' + self.concatParams(oa, ',', '"')) + break + + case 'query': + var href = self.request.uri.href += (query ? '&' : '?') + self.concatParams(oa, '&') + self.request.uri = url.parse(href) + self.request.path = self.request.uri.path + break + + case 'body': + self.request.body = (form ? form + '&' : '') + self.concatParams(oa, '&') + break + + default: + self.request.emit('error', new Error('oauth: transport_method invalid')) + } +} + +exports.OAuth = OAuth diff --git a/node_modules/fsevents/node_modules/request/lib/querystring.js b/node_modules/fsevents/node_modules/request/lib/querystring.js new file mode 100644 index 0000000..baf5e80 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/querystring.js @@ -0,0 +1,51 @@ +'use strict' + +var qs = require('qs') + , querystring = require('querystring') + + +function Querystring (request) { + this.request = request + this.lib = null + this.useQuerystring = null + this.parseOptions = null + this.stringifyOptions = null +} + +Querystring.prototype.init = function (options) { + if (this.lib) {return} + + this.useQuerystring = options.useQuerystring + this.lib = (this.useQuerystring ? querystring : qs) + + this.parseOptions = options.qsParseOptions || {} + this.stringifyOptions = options.qsStringifyOptions || {} +} + +Querystring.prototype.stringify = function (obj) { + return (this.useQuerystring) + ? this.rfc3986(this.lib.stringify(obj, + this.stringifyOptions.sep || null, + this.stringifyOptions.eq || null, + this.stringifyOptions)) + : this.lib.stringify(obj, this.stringifyOptions) +} + +Querystring.prototype.parse = function (str) { + return (this.useQuerystring) + ? this.lib.parse(str, + this.parseOptions.sep || null, + this.parseOptions.eq || null, + this.parseOptions) + : this.lib.parse(str, this.parseOptions) +} + +Querystring.prototype.rfc3986 = function (str) { + return str.replace(/[!'()*]/g, function (c) { + return '%' + c.charCodeAt(0).toString(16).toUpperCase() + }) +} + +Querystring.prototype.unescape = querystring.unescape + +exports.Querystring = Querystring diff --git a/node_modules/fsevents/node_modules/request/lib/redirect.js b/node_modules/fsevents/node_modules/request/lib/redirect.js new file mode 100644 index 0000000..040dfe0 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/redirect.js @@ -0,0 +1,153 @@ +'use strict' + +var url = require('url') +var isUrl = /^https?:/ + +function Redirect (request) { + this.request = request + this.followRedirect = true + this.followRedirects = true + this.followAllRedirects = false + this.allowRedirect = function () {return true} + this.maxRedirects = 10 + this.redirects = [] + this.redirectsFollowed = 0 + this.removeRefererHeader = false +} + +Redirect.prototype.onRequest = function (options) { + var self = this + + if (options.maxRedirects !== undefined) { + self.maxRedirects = options.maxRedirects + } + if (typeof options.followRedirect === 'function') { + self.allowRedirect = options.followRedirect + } + if (options.followRedirect !== undefined) { + self.followRedirects = !!options.followRedirect + } + if (options.followAllRedirects !== undefined) { + self.followAllRedirects = options.followAllRedirects + } + if (self.followRedirects || self.followAllRedirects) { + self.redirects = self.redirects || [] + } + if (options.removeRefererHeader !== undefined) { + self.removeRefererHeader = options.removeRefererHeader + } +} + +Redirect.prototype.redirectTo = function (response) { + var self = this + , request = self.request + + var redirectTo = null + if (response.statusCode >= 300 && response.statusCode < 400 && response.caseless.has('location')) { + var location = response.caseless.get('location') + request.debug('redirect', location) + + if (self.followAllRedirects) { + redirectTo = location + } else if (self.followRedirects) { + switch (request.method) { + case 'PATCH': + case 'PUT': + case 'POST': + case 'DELETE': + // Do not follow redirects + break + default: + redirectTo = location + break + } + } + } else if (response.statusCode === 401) { + var authHeader = request._auth.onResponse(response) + if (authHeader) { + request.setHeader('authorization', authHeader) + redirectTo = request.uri + } + } + return redirectTo +} + +Redirect.prototype.onResponse = function (response) { + var self = this + , request = self.request + + var redirectTo = self.redirectTo(response) + if (!redirectTo || !self.allowRedirect.call(request, response)) { + return false + } + + request.debug('redirect to', redirectTo) + + // ignore any potential response body. it cannot possibly be useful + // to us at this point. + // response.resume should be defined, but check anyway before calling. Workaround for browserify. + if (response.resume) { + response.resume() + } + + if (self.redirectsFollowed >= self.maxRedirects) { + request.emit('error', new Error('Exceeded maxRedirects. Probably stuck in a redirect loop ' + request.uri.href)) + return false + } + self.redirectsFollowed += 1 + + if (!isUrl.test(redirectTo)) { + redirectTo = url.resolve(request.uri.href, redirectTo) + } + + var uriPrev = request.uri + request.uri = url.parse(redirectTo) + + // handle the case where we change protocol from https to http or vice versa + if (request.uri.protocol !== uriPrev.protocol) { + delete request.agent + } + + self.redirects.push( + { statusCode : response.statusCode + , redirectUri: redirectTo + } + ) + if (self.followAllRedirects && request.method !== 'HEAD' + && response.statusCode !== 401 && response.statusCode !== 307) { + request.method = 'GET' + } + // request.method = 'GET' // Force all redirects to use GET || commented out fixes #215 + delete request.src + delete request.req + delete request._started + if (response.statusCode !== 401 && response.statusCode !== 307) { + // Remove parameters from the previous response, unless this is the second request + // for a server that requires digest authentication. + delete request.body + delete request._form + if (request.headers) { + request.removeHeader('host') + request.removeHeader('content-type') + request.removeHeader('content-length') + if (request.uri.hostname !== request.originalHost.split(':')[0]) { + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of curl: + // https://github.com/bagder/curl/blob/6beb0eee/lib/http.c#L710 + request.removeHeader('authorization') + } + } + } + + if (!self.removeRefererHeader) { + request.setHeader('referer', uriPrev.href) + } + + request.emit('redirect') + + request.init() + + return true +} + +exports.Redirect = Redirect diff --git a/node_modules/fsevents/node_modules/request/lib/tunnel.js b/node_modules/fsevents/node_modules/request/lib/tunnel.js new file mode 100644 index 0000000..bf96a8f --- /dev/null +++ b/node_modules/fsevents/node_modules/request/lib/tunnel.js @@ -0,0 +1,176 @@ +'use strict' + +var url = require('url') + , tunnel = require('tunnel-agent') + +var defaultProxyHeaderWhiteList = [ + 'accept', + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept-ranges', + 'cache-control', + 'content-encoding', + 'content-language', + 'content-location', + 'content-md5', + 'content-range', + 'content-type', + 'connection', + 'date', + 'expect', + 'max-forwards', + 'pragma', + 'referer', + 'te', + 'user-agent', + 'via' +] + +var defaultProxyHeaderExclusiveList = [ + 'proxy-authorization' +] + +function constructProxyHost(uriObject) { + var port = uriObject.port + , protocol = uriObject.protocol + , proxyHost = uriObject.hostname + ':' + + if (port) { + proxyHost += port + } else if (protocol === 'https:') { + proxyHost += '443' + } else { + proxyHost += '80' + } + + return proxyHost +} + +function constructProxyHeaderWhiteList(headers, proxyHeaderWhiteList) { + var whiteList = proxyHeaderWhiteList + .reduce(function (set, header) { + set[header.toLowerCase()] = true + return set + }, {}) + + return Object.keys(headers) + .filter(function (header) { + return whiteList[header.toLowerCase()] + }) + .reduce(function (set, header) { + set[header] = headers[header] + return set + }, {}) +} + +function constructTunnelOptions (request, proxyHeaders) { + var proxy = request.proxy + + var tunnelOptions = { + proxy : { + host : proxy.hostname, + port : +proxy.port, + proxyAuth : proxy.auth, + headers : proxyHeaders + }, + headers : request.headers, + ca : request.ca, + cert : request.cert, + key : request.key, + passphrase : request.passphrase, + pfx : request.pfx, + ciphers : request.ciphers, + rejectUnauthorized : request.rejectUnauthorized, + secureOptions : request.secureOptions, + secureProtocol : request.secureProtocol + } + + return tunnelOptions +} + +function constructTunnelFnName(uri, proxy) { + var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http') + var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http') + return [uriProtocol, proxyProtocol].join('Over') +} + +function getTunnelFn(request) { + var uri = request.uri + var proxy = request.proxy + var tunnelFnName = constructTunnelFnName(uri, proxy) + return tunnel[tunnelFnName] +} + + +function Tunnel (request) { + this.request = request + this.proxyHeaderWhiteList = defaultProxyHeaderWhiteList + this.proxyHeaderExclusiveList = [] + if (typeof request.tunnel !== 'undefined') { + this.tunnelOverride = request.tunnel + } +} + +Tunnel.prototype.isEnabled = function () { + var self = this + , request = self.request + // Tunnel HTTPS by default. Allow the user to override this setting. + + // If self.tunnelOverride is set (the user specified a value), use it. + if (typeof self.tunnelOverride !== 'undefined') { + return self.tunnelOverride + } + + // If the destination is HTTPS, tunnel. + if (request.uri.protocol === 'https:') { + return true + } + + // Otherwise, do not use tunnel. + return false +} + +Tunnel.prototype.setup = function (options) { + var self = this + , request = self.request + + options = options || {} + + if (typeof request.proxy === 'string') { + request.proxy = url.parse(request.proxy) + } + + if (!request.proxy || !request.tunnel) { + return false + } + + // Setup Proxy Header Exclusive List and White List + if (options.proxyHeaderWhiteList) { + self.proxyHeaderWhiteList = options.proxyHeaderWhiteList + } + if (options.proxyHeaderExclusiveList) { + self.proxyHeaderExclusiveList = options.proxyHeaderExclusiveList + } + + var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList) + var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList) + + // Setup Proxy Headers and Proxy Headers Host + // Only send the Proxy White Listed Header names + var proxyHeaders = constructProxyHeaderWhiteList(request.headers, proxyHeaderWhiteList) + proxyHeaders.host = constructProxyHost(request.uri) + + proxyHeaderExclusiveList.forEach(request.removeHeader, request) + + // Set Agent from Tunnel Data + var tunnelFn = getTunnelFn(request) + var tunnelOptions = constructTunnelOptions(request, proxyHeaders) + request.agent = tunnelFn(tunnelOptions) + + return true +} + +Tunnel.defaultProxyHeaderWhiteList = defaultProxyHeaderWhiteList +Tunnel.defaultProxyHeaderExclusiveList = defaultProxyHeaderExclusiveList +exports.Tunnel = Tunnel diff --git a/node_modules/fsevents/node_modules/request/package.json b/node_modules/fsevents/node_modules/request/package.json new file mode 100644 index 0000000..6e72965 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/package.json @@ -0,0 +1,144 @@ +{ + "_args": [ + [ + "request@2.x", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "request@>=2.0.0 <3.0.0", + "_id": "request@2.73.0", + "_inCache": true, + "_installable": true, + "_location": "/request", + "_nodeVersion": "6.2.2", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/request-2.73.0.tgz_1468050202545_0.40326908184215426" + }, + "_npmUser": { + "email": "simeonvelichkov@gmail.com", + "name": "simov" + }, + "_npmVersion": "2.15.6", + "_phantomChildren": {}, + "_requested": { + "name": "request", + "raw": "request@2.x", + "rawSpec": "2.x", + "scope": null, + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/request/-/request-2.73.0.tgz", + "_shasum": "5f78a9fde4370abc8ff6479d7a84a71a14b878a2", + "_shrinkwrap": null, + "_spec": "request@2.x", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "mikeal.rogers@gmail.com", + "name": "Mikeal Rogers" + }, + "bugs": { + "url": "http://github.com/request/request/issues" + }, + "dependencies": { + "aws-sign2": "~0.6.0", + "aws4": "^1.2.1", + "bl": "~1.1.2", + "caseless": "~0.11.0", + "combined-stream": "~1.0.5", + "extend": "~3.0.0", + "forever-agent": "~0.6.1", + "form-data": "~1.0.0-rc4", + "har-validator": "~2.0.6", + "hawk": "~3.1.3", + "http-signature": "~1.1.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.7", + "node-uuid": "~1.4.7", + "oauth-sign": "~0.8.1", + "qs": "~6.2.0", + "stringstream": "~0.0.4", + "tough-cookie": "~2.2.0", + "tunnel-agent": "~0.4.1" + }, + "description": "Simplified HTTP request client.", + "devDependencies": { + "bluebird": "^3.2.1", + "browserify": "^13.0.1", + "browserify-istanbul": "^2.0.0", + "buffer-equal": "^1.0.0", + "codecov": "^1.0.1", + "coveralls": "^2.11.4", + "eslint": "^2.5.3", + "function-bind": "^1.0.2", + "istanbul": "^0.4.0", + "karma": "^1.1.1", + "karma-browserify": "^5.0.1", + "karma-cli": "^1.0.0", + "karma-coverage": "^1.0.0", + "karma-phantomjs-launcher": "^1.0.0", + "karma-tap": "^1.0.3", + "phantomjs-prebuilt": "^2.1.3", + "rimraf": "^2.2.8", + "server-destroy": "^1.0.1", + "tape": "^4.6.0", + "taper": "^0.4.0" + }, + "directories": {}, + "dist": { + "shasum": "5f78a9fde4370abc8ff6479d7a84a71a14b878a2", + "tarball": "https://registry.npmjs.org/request/-/request-2.73.0.tgz" + }, + "engines": { + "node": ">=0.8.0" + }, + "gitHead": "fa46be421b165c737c93672a8f920245cfb090a2", + "homepage": "https://github.com/request/request#readme", + "license": "Apache-2.0", + "main": "index.js", + "maintainers": [ + { + "name": "mikeal", + "email": "mikeal.rogers@gmail.com" + }, + { + "name": "nylen", + "email": "jnylen@gmail.com" + }, + { + "name": "fredkschott", + "email": "fkschott@gmail.com" + }, + { + "name": "simov", + "email": "simeonvelichkov@gmail.com" + } + ], + "name": "request", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/request/request.git" + }, + "scripts": { + "lint": "eslint lib/ *.js tests/ && echo Lint passed.", + "test": "npm run lint && npm run test-ci && npm run test-browser", + "test-browser": "node tests/browser/start.js", + "test-ci": "taper tests/test-*.js", + "test-cov": "istanbul cover tape tests/test-*.js" + }, + "tags": [ + "http", + "simple", + "util", + "utility" + ], + "version": "2.73.0" +} diff --git a/node_modules/fsevents/node_modules/request/request.js b/node_modules/fsevents/node_modules/request/request.js new file mode 100644 index 0000000..8267c12 --- /dev/null +++ b/node_modules/fsevents/node_modules/request/request.js @@ -0,0 +1,1434 @@ +'use strict' + +var http = require('http') + , https = require('https') + , url = require('url') + , util = require('util') + , stream = require('stream') + , zlib = require('zlib') + , bl = require('bl') + , hawk = require('hawk') + , aws2 = require('aws-sign2') + , aws4 = require('aws4') + , httpSignature = require('http-signature') + , mime = require('mime-types') + , stringstream = require('stringstream') + , caseless = require('caseless') + , ForeverAgent = require('forever-agent') + , FormData = require('form-data') + , extend = require('extend') + , isstream = require('isstream') + , isTypedArray = require('is-typedarray').strict + , helpers = require('./lib/helpers') + , cookies = require('./lib/cookies') + , getProxyFromURI = require('./lib/getProxyFromURI') + , Querystring = require('./lib/querystring').Querystring + , Har = require('./lib/har').Har + , Auth = require('./lib/auth').Auth + , OAuth = require('./lib/oauth').OAuth + , Multipart = require('./lib/multipart').Multipart + , Redirect = require('./lib/redirect').Redirect + , Tunnel = require('./lib/tunnel').Tunnel + +var safeStringify = helpers.safeStringify + , isReadStream = helpers.isReadStream + , toBase64 = helpers.toBase64 + , defer = helpers.defer + , copy = helpers.copy + , version = helpers.version + , globalCookieJar = cookies.jar() + + +var globalPool = {} + +function filterForNonReserved(reserved, options) { + // Filter out properties that are not reserved. + // Reserved values are passed in at call site. + + var object = {} + for (var i in options) { + var notReserved = (reserved.indexOf(i) === -1) + if (notReserved) { + object[i] = options[i] + } + } + return object +} + +function filterOutReservedFunctions(reserved, options) { + // Filter out properties that are functions and are reserved. + // Reserved values are passed in at call site. + + var object = {} + for (var i in options) { + var isReserved = !(reserved.indexOf(i) === -1) + var isFunction = (typeof options[i] === 'function') + if (!(isReserved && isFunction)) { + object[i] = options[i] + } + } + return object + +} + +// Return a simpler request object to allow serialization +function requestToJSON() { + var self = this + return { + uri: self.uri, + method: self.method, + headers: self.headers + } +} + +// Return a simpler response object to allow serialization +function responseToJSON() { + var self = this + return { + statusCode: self.statusCode, + body: self.body, + headers: self.headers, + request: requestToJSON.call(self.request) + } +} + +function Request (options) { + // if given the method property in options, set property explicitMethod to true + + // extend the Request instance with any non-reserved properties + // remove any reserved functions from the options object + // set Request instance to be readable and writable + // call init + + var self = this + + // start with HAR, then override with additional options + if (options.har) { + self._har = new Har(self) + options = self._har.options(options) + } + + stream.Stream.call(self) + var reserved = Object.keys(Request.prototype) + var nonReserved = filterForNonReserved(reserved, options) + + extend(self, nonReserved) + options = filterOutReservedFunctions(reserved, options) + + self.readable = true + self.writable = true + if (options.method) { + self.explicitMethod = true + } + self._qs = new Querystring(self) + self._auth = new Auth(self) + self._oauth = new OAuth(self) + self._multipart = new Multipart(self) + self._redirect = new Redirect(self) + self._tunnel = new Tunnel(self) + self.init(options) +} + +util.inherits(Request, stream.Stream) + +// Debugging +Request.debug = process.env.NODE_DEBUG && /\brequest\b/.test(process.env.NODE_DEBUG) +function debug() { + if (Request.debug) { + console.error('REQUEST %s', util.format.apply(util, arguments)) + } +} +Request.prototype.debug = debug + +Request.prototype.init = function (options) { + // init() contains all the code to setup the request object. + // the actual outgoing request is not started until start() is called + // this function is called from both the constructor and on redirect. + var self = this + if (!options) { + options = {} + } + self.headers = self.headers ? copy(self.headers) : {} + + // Delete headers with value undefined since they break + // ClientRequest.OutgoingMessage.setHeader in node 0.12 + for (var headerName in self.headers) { + if (typeof self.headers[headerName] === 'undefined') { + delete self.headers[headerName] + } + } + + caseless.httpify(self, self.headers) + + if (!self.method) { + self.method = options.method || 'GET' + } + if (!self.localAddress) { + self.localAddress = options.localAddress + } + + self._qs.init(options) + + debug(options) + if (!self.pool && self.pool !== false) { + self.pool = globalPool + } + self.dests = self.dests || [] + self.__isRequestRequest = true + + // Protect against double callback + if (!self._callback && self.callback) { + self._callback = self.callback + self.callback = function () { + if (self._callbackCalled) { + return // Print a warning maybe? + } + self._callbackCalled = true + self._callback.apply(self, arguments) + } + self.on('error', self.callback.bind()) + self.on('complete', self.callback.bind(self, null)) + } + + // People use this property instead all the time, so support it + if (!self.uri && self.url) { + self.uri = self.url + delete self.url + } + + // If there's a baseUrl, then use it as the base URL (i.e. uri must be + // specified as a relative path and is appended to baseUrl). + if (self.baseUrl) { + if (typeof self.baseUrl !== 'string') { + return self.emit('error', new Error('options.baseUrl must be a string')) + } + + if (typeof self.uri !== 'string') { + return self.emit('error', new Error('options.uri must be a string when using options.baseUrl')) + } + + if (self.uri.indexOf('//') === 0 || self.uri.indexOf('://') !== -1) { + return self.emit('error', new Error('options.uri must be a path when using options.baseUrl')) + } + + // Handle all cases to make sure that there's only one slash between + // baseUrl and uri. + var baseUrlEndsWithSlash = self.baseUrl.lastIndexOf('/') === self.baseUrl.length - 1 + var uriStartsWithSlash = self.uri.indexOf('/') === 0 + + if (baseUrlEndsWithSlash && uriStartsWithSlash) { + self.uri = self.baseUrl + self.uri.slice(1) + } else if (baseUrlEndsWithSlash || uriStartsWithSlash) { + self.uri = self.baseUrl + self.uri + } else if (self.uri === '') { + self.uri = self.baseUrl + } else { + self.uri = self.baseUrl + '/' + self.uri + } + delete self.baseUrl + } + + // A URI is needed by this point, emit error if we haven't been able to get one + if (!self.uri) { + return self.emit('error', new Error('options.uri is a required argument')) + } + + // If a string URI/URL was given, parse it into a URL object + if (typeof self.uri === 'string') { + self.uri = url.parse(self.uri) + } + + // Some URL objects are not from a URL parsed string and need href added + if (!self.uri.href) { + self.uri.href = url.format(self.uri) + } + + // DEPRECATED: Warning for users of the old Unix Sockets URL Scheme + if (self.uri.protocol === 'unix:') { + return self.emit('error', new Error('`unix://` URL scheme is no longer supported. Please use the format `http://unix:SOCKET:PATH`')) + } + + // Support Unix Sockets + if (self.uri.host === 'unix') { + self.enableUnixSocket() + } + + if (self.strictSSL === false) { + self.rejectUnauthorized = false + } + + if (!self.uri.pathname) {self.uri.pathname = '/'} + + if (!(self.uri.host || (self.uri.hostname && self.uri.port)) && !self.uri.isUnix) { + // Invalid URI: it may generate lot of bad errors, like 'TypeError: Cannot call method `indexOf` of undefined' in CookieJar + // Detect and reject it as soon as possible + var faultyUri = url.format(self.uri) + var message = 'Invalid URI "' + faultyUri + '"' + if (Object.keys(options).length === 0) { + // No option ? This can be the sign of a redirect + // As this is a case where the user cannot do anything (they didn't call request directly with this URL) + // they should be warned that it can be caused by a redirection (can save some hair) + message += '. This can be caused by a crappy redirection.' + } + // This error was fatal + self.abort() + return self.emit('error', new Error(message)) + } + + if (!self.hasOwnProperty('proxy')) { + self.proxy = getProxyFromURI(self.uri) + } + + self.tunnel = self._tunnel.isEnabled() + if (self.proxy) { + self._tunnel.setup(options) + } + + self._redirect.onRequest(options) + + self.setHost = false + if (!self.hasHeader('host')) { + var hostHeaderName = self.originalHostHeaderName || 'host' + self.setHeader(hostHeaderName, self.uri.hostname) + if (self.uri.port) { + if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') && + !(self.uri.port === 443 && self.uri.protocol === 'https:') ) { + self.setHeader(hostHeaderName, self.getHeader('host') + (':' + self.uri.port) ) + } + } + self.setHost = true + } + + self.jar(self._jar || options.jar) + + if (!self.uri.port) { + if (self.uri.protocol === 'http:') {self.uri.port = 80} + else if (self.uri.protocol === 'https:') {self.uri.port = 443} + } + + if (self.proxy && !self.tunnel) { + self.port = self.proxy.port + self.host = self.proxy.hostname + } else { + self.port = self.uri.port + self.host = self.uri.hostname + } + + if (options.form) { + self.form(options.form) + } + + if (options.formData) { + var formData = options.formData + var requestForm = self.form() + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty('value') && value.hasOwnProperty('options')) { + requestForm.append(key, value.value, value.options) + } else { + requestForm.append(key, value) + } + } + for (var formKey in formData) { + if (formData.hasOwnProperty(formKey)) { + var formValue = formData[formKey] + if (formValue instanceof Array) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]) + } + } else { + appendFormValue(formKey, formValue) + } + } + } + } + + if (options.qs) { + self.qs(options.qs) + } + + if (self.uri.path) { + self.path = self.uri.path + } else { + self.path = self.uri.pathname + (self.uri.search || '') + } + + if (self.path.length === 0) { + self.path = '/' + } + + // Auth must happen last in case signing is dependent on other headers + if (options.aws) { + self.aws(options.aws) + } + + if (options.hawk) { + self.hawk(options.hawk) + } + + if (options.httpSignature) { + self.httpSignature(options.httpSignature) + } + + if (options.auth) { + if (Object.prototype.hasOwnProperty.call(options.auth, 'username')) { + options.auth.user = options.auth.username + } + if (Object.prototype.hasOwnProperty.call(options.auth, 'password')) { + options.auth.pass = options.auth.password + } + + self.auth( + options.auth.user, + options.auth.pass, + options.auth.sendImmediately, + options.auth.bearer + ) + } + + if (self.gzip && !self.hasHeader('accept-encoding')) { + self.setHeader('accept-encoding', 'gzip, deflate') + } + + if (self.uri.auth && !self.hasHeader('authorization')) { + var uriAuthPieces = self.uri.auth.split(':').map(function(item) {return self._qs.unescape(item)}) + self.auth(uriAuthPieces[0], uriAuthPieces.slice(1).join(':'), true) + } + + if (!self.tunnel && self.proxy && self.proxy.auth && !self.hasHeader('proxy-authorization')) { + var proxyAuthPieces = self.proxy.auth.split(':').map(function(item) {return self._qs.unescape(item)}) + var authHeader = 'Basic ' + toBase64(proxyAuthPieces.join(':')) + self.setHeader('proxy-authorization', authHeader) + } + + if (self.proxy && !self.tunnel) { + self.path = (self.uri.protocol + '//' + self.uri.host + self.path) + } + + if (options.json) { + self.json(options.json) + } + if (options.multipart) { + self.multipart(options.multipart) + } + + if (options.time) { + self.timing = true + self.elapsedTime = self.elapsedTime || 0 + } + + function setContentLength () { + if (isTypedArray(self.body)) { + self.body = new Buffer(self.body) + } + + if (!self.hasHeader('content-length')) { + var length + if (typeof self.body === 'string') { + length = Buffer.byteLength(self.body) + } + else if (Array.isArray(self.body)) { + length = self.body.reduce(function (a, b) {return a + b.length}, 0) + } + else { + length = self.body.length + } + + if (length) { + self.setHeader('content-length', length) + } else { + self.emit('error', new Error('Argument error, options.body.')) + } + } + } + if (self.body && !isstream(self.body)) { + setContentLength() + } + + if (options.oauth) { + self.oauth(options.oauth) + } else if (self._oauth.params && self.hasHeader('authorization')) { + self.oauth(self._oauth.params) + } + + var protocol = self.proxy && !self.tunnel ? self.proxy.protocol : self.uri.protocol + , defaultModules = {'http:':http, 'https:':https} + , httpModules = self.httpModules || {} + + self.httpModule = httpModules[protocol] || defaultModules[protocol] + + if (!self.httpModule) { + return self.emit('error', new Error('Invalid protocol: ' + protocol)) + } + + if (options.ca) { + self.ca = options.ca + } + + if (!self.agent) { + if (options.agentOptions) { + self.agentOptions = options.agentOptions + } + + if (options.agentClass) { + self.agentClass = options.agentClass + } else if (options.forever) { + var v = version() + // use ForeverAgent in node 0.10- only + if (v.major === 0 && v.minor <= 10) { + self.agentClass = protocol === 'http:' ? ForeverAgent : ForeverAgent.SSL + } else { + self.agentClass = self.httpModule.Agent + self.agentOptions = self.agentOptions || {} + self.agentOptions.keepAlive = true + } + } else { + self.agentClass = self.httpModule.Agent + } + } + + if (self.pool === false) { + self.agent = false + } else { + self.agent = self.agent || self.getNewAgent() + } + + self.on('pipe', function (src) { + if (self.ntick && self._started) { + self.emit('error', new Error('You cannot pipe to this stream after the outbound request has started.')) + } + self.src = src + if (isReadStream(src)) { + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', mime.lookup(src.path)) + } + } else { + if (src.headers) { + for (var i in src.headers) { + if (!self.hasHeader(i)) { + self.setHeader(i, src.headers[i]) + } + } + } + if (self._json && !self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + if (src.method && !self.explicitMethod) { + self.method = src.method + } + } + + // self.on('pipe', function () { + // console.error('You have already piped to this stream. Pipeing twice is likely to break the request.') + // }) + }) + + defer(function () { + if (self._aborted) { + return + } + + var end = function () { + if (self._form) { + if (!self._auth.hasAuth) { + self._form.pipe(self) + } + else if (self._auth.hasAuth && self._auth.sentAuth) { + self._form.pipe(self) + } + } + if (self._multipart && self._multipart.chunked) { + self._multipart.body.pipe(self) + } + if (self.body) { + if (isstream(self.body)) { + self.body.pipe(self) + } else { + setContentLength() + if (Array.isArray(self.body)) { + self.body.forEach(function (part) { + self.write(part) + }) + } else { + self.write(self.body) + } + self.end() + } + } else if (self.requestBodyStream) { + console.warn('options.requestBodyStream is deprecated, please pass the request object to stream.pipe.') + self.requestBodyStream.pipe(self) + } else if (!self.src) { + if (self._auth.hasAuth && !self._auth.sentAuth) { + self.end() + return + } + if (self.method !== 'GET' && typeof self.method !== 'undefined') { + self.setHeader('content-length', 0) + } + self.end() + } + } + + if (self._form && !self.hasHeader('content-length')) { + // Before ending the request, we had to compute the length of the whole form, asyncly + self.setHeader(self._form.getHeaders(), true) + self._form.getLength(function (err, length) { + if (!err && !isNaN(length)) { + self.setHeader('content-length', length) + } + end() + }) + } else { + end() + } + + self.ntick = true + }) + +} + +Request.prototype.getNewAgent = function () { + var self = this + var Agent = self.agentClass + var options = {} + if (self.agentOptions) { + for (var i in self.agentOptions) { + options[i] = self.agentOptions[i] + } + } + if (self.ca) { + options.ca = self.ca + } + if (self.ciphers) { + options.ciphers = self.ciphers + } + if (self.secureProtocol) { + options.secureProtocol = self.secureProtocol + } + if (self.secureOptions) { + options.secureOptions = self.secureOptions + } + if (typeof self.rejectUnauthorized !== 'undefined') { + options.rejectUnauthorized = self.rejectUnauthorized + } + + if (self.cert && self.key) { + options.key = self.key + options.cert = self.cert + } + + if (self.pfx) { + options.pfx = self.pfx + } + + if (self.passphrase) { + options.passphrase = self.passphrase + } + + var poolKey = '' + + // different types of agents are in different pools + if (Agent !== self.httpModule.Agent) { + poolKey += Agent.name + } + + // ca option is only relevant if proxy or destination are https + var proxy = self.proxy + if (typeof proxy === 'string') { + proxy = url.parse(proxy) + } + var isHttps = (proxy && proxy.protocol === 'https:') || this.uri.protocol === 'https:' + + if (isHttps) { + if (options.ca) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.ca + } + + if (typeof options.rejectUnauthorized !== 'undefined') { + if (poolKey) { + poolKey += ':' + } + poolKey += options.rejectUnauthorized + } + + if (options.cert) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.cert.toString('ascii') + options.key.toString('ascii') + } + + if (options.pfx) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.pfx.toString('ascii') + } + + if (options.ciphers) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.ciphers + } + + if (options.secureProtocol) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.secureProtocol + } + + if (options.secureOptions) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.secureOptions + } + } + + if (self.pool === globalPool && !poolKey && Object.keys(options).length === 0 && self.httpModule.globalAgent) { + // not doing anything special. Use the globalAgent + return self.httpModule.globalAgent + } + + // we're using a stored agent. Make sure it's protocol-specific + poolKey = self.uri.protocol + poolKey + + // generate a new agent for this setting if none yet exists + if (!self.pool[poolKey]) { + self.pool[poolKey] = new Agent(options) + // properly set maxSockets on new agents + if (self.pool.maxSockets) { + self.pool[poolKey].maxSockets = self.pool.maxSockets + } + } + + return self.pool[poolKey] +} + +Request.prototype.start = function () { + // start() is called once we are ready to send the outgoing HTTP request. + // this is usually called on the first write(), end() or on nextTick() + var self = this + + if (self._aborted) { + return + } + + self._started = true + self.method = self.method || 'GET' + self.href = self.uri.href + + if (self.src && self.src.stat && self.src.stat.size && !self.hasHeader('content-length')) { + self.setHeader('content-length', self.src.stat.size) + } + if (self._aws) { + self.aws(self._aws, true) + } + + // We have a method named auth, which is completely different from the http.request + // auth option. If we don't remove it, we're gonna have a bad time. + var reqOptions = copy(self) + delete reqOptions.auth + + debug('make request', self.uri.href) + + try { + self.req = self.httpModule.request(reqOptions) + } catch (err) { + self.emit('error', err) + return + } + + if (self.timing) { + self.startTime = new Date().getTime() + } + + if (self.timeout && !self.timeoutTimer) { + var timeout = self.timeout < 0 ? 0 : self.timeout + // Set a timeout in memory - this block will throw if the server takes more + // than `timeout` to write the HTTP status and headers (corresponding to + // the on('response') event on the client). NB: this measures wall-clock + // time, not the time between bytes sent by the server. + self.timeoutTimer = setTimeout(function () { + var connectTimeout = self.req.socket && self.req.socket.readable === false + self.abort() + var e = new Error('ETIMEDOUT') + e.code = 'ETIMEDOUT' + e.connect = connectTimeout + self.emit('error', e) + }, timeout) + + if (self.req.setTimeout) { // only works on node 0.6+ + // Set an additional timeout on the socket, via the `setsockopt` syscall. + // This timeout sets the amount of time to wait *between* bytes sent + // from the server, and may or may not correspond to the wall-clock time + // elapsed from the start of the request. + // + // In particular, it's useful for erroring if the server fails to send + // data halfway through streaming a response. + self.req.setTimeout(timeout, function () { + if (self.req) { + self.req.abort() + var e = new Error('ESOCKETTIMEDOUT') + e.code = 'ESOCKETTIMEDOUT' + e.connect = false + self.emit('error', e) + } + }) + } + } + + self.req.on('response', self.onRequestResponse.bind(self)) + self.req.on('error', self.onRequestError.bind(self)) + self.req.on('drain', function() { + self.emit('drain') + }) + self.req.on('socket', function(socket) { + self.emit('socket', socket) + }) + + self.emit('request', self.req) +} + +Request.prototype.onRequestError = function (error) { + var self = this + if (self._aborted) { + return + } + if (self.req && self.req._reusedSocket && error.code === 'ECONNRESET' + && self.agent.addRequestNoreuse) { + self.agent = { addRequest: self.agent.addRequestNoreuse.bind(self.agent) } + self.start() + self.req.end() + return + } + if (self.timeout && self.timeoutTimer) { + clearTimeout(self.timeoutTimer) + self.timeoutTimer = null + } + self.emit('error', error) +} + +Request.prototype.onRequestResponse = function (response) { + var self = this + debug('onRequestResponse', self.uri.href, response.statusCode, response.headers) + response.on('end', function() { + if (self.timing) { + self.elapsedTime += (new Date().getTime() - self.startTime) + debug('elapsed time', self.elapsedTime) + response.elapsedTime = self.elapsedTime + } + debug('response end', self.uri.href, response.statusCode, response.headers) + }) + + if (self._aborted) { + debug('aborted', self.uri.href) + response.resume() + return + } + + self.response = response + response.request = self + response.toJSON = responseToJSON + + // XXX This is different on 0.10, because SSL is strict by default + if (self.httpModule === https && + self.strictSSL && (!response.hasOwnProperty('socket') || + !response.socket.authorized)) { + debug('strict ssl error', self.uri.href) + var sslErr = response.hasOwnProperty('socket') ? response.socket.authorizationError : self.uri.href + ' does not support SSL' + self.emit('error', new Error('SSL Error: ' + sslErr)) + return + } + + // Save the original host before any redirect (if it changes, we need to + // remove any authorization headers). Also remember the case of the header + // name because lots of broken servers expect Host instead of host and we + // want the caller to be able to specify this. + self.originalHost = self.getHeader('host') + if (!self.originalHostHeaderName) { + self.originalHostHeaderName = self.hasHeader('host') + } + if (self.setHost) { + self.removeHeader('host') + } + if (self.timeout && self.timeoutTimer) { + clearTimeout(self.timeoutTimer) + self.timeoutTimer = null + } + + var targetCookieJar = (self._jar && self._jar.setCookie) ? self._jar : globalCookieJar + var addCookie = function (cookie) { + //set the cookie if it's domain in the href's domain. + try { + targetCookieJar.setCookie(cookie, self.uri.href, {ignoreError: true}) + } catch (e) { + self.emit('error', e) + } + } + + response.caseless = caseless(response.headers) + + if (response.caseless.has('set-cookie') && (!self._disableCookies)) { + var headerName = response.caseless.has('set-cookie') + if (Array.isArray(response.headers[headerName])) { + response.headers[headerName].forEach(addCookie) + } else { + addCookie(response.headers[headerName]) + } + } + + if (self._redirect.onResponse(response)) { + return // Ignore the rest of the response + } else { + // Be a good stream and emit end when the response is finished. + // Hack to emit end on close because of a core bug that never fires end + response.on('close', function () { + if (!self._ended) { + self.response.emit('end') + } + }) + + response.on('end', function () { + self._ended = true + }) + + var noBody = function (code) { + return ( + self.method === 'HEAD' + // Informational + || (code >= 100 && code < 200) + // No Content + || code === 204 + // Not Modified + || code === 304 + ) + } + + var responseContent + if (self.gzip && !noBody(response.statusCode)) { + var contentEncoding = response.headers['content-encoding'] || 'identity' + contentEncoding = contentEncoding.trim().toLowerCase() + + if (contentEncoding === 'gzip') { + responseContent = zlib.createGunzip() + response.pipe(responseContent) + } else if (contentEncoding === 'deflate') { + responseContent = zlib.createInflate() + response.pipe(responseContent) + } else { + // Since previous versions didn't check for Content-Encoding header, + // ignore any invalid values to preserve backwards-compatibility + if (contentEncoding !== 'identity') { + debug('ignoring unrecognized Content-Encoding ' + contentEncoding) + } + responseContent = response + } + } else { + responseContent = response + } + + if (self.encoding) { + if (self.dests.length !== 0) { + console.error('Ignoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.') + } else if (responseContent.setEncoding) { + responseContent.setEncoding(self.encoding) + } else { + // Should only occur on node pre-v0.9.4 (joyent/node@9b5abe5) with + // zlib streams. + // If/When support for 0.9.4 is dropped, this should be unnecessary. + responseContent = responseContent.pipe(stringstream(self.encoding)) + } + } + + if (self._paused) { + responseContent.pause() + } + + self.responseContent = responseContent + + self.emit('response', response) + + self.dests.forEach(function (dest) { + self.pipeDest(dest) + }) + + responseContent.on('data', function (chunk) { + self._destdata = true + self.emit('data', chunk) + }) + responseContent.on('end', function (chunk) { + self.emit('end', chunk) + }) + responseContent.on('error', function (error) { + self.emit('error', error) + }) + responseContent.on('close', function () {self.emit('close')}) + + if (self.callback) { + self.readResponseBody(response) + } + //if no callback + else { + self.on('end', function () { + if (self._aborted) { + debug('aborted', self.uri.href) + return + } + self.emit('complete', response) + }) + } + } + debug('finish init function', self.uri.href) +} + +Request.prototype.readResponseBody = function (response) { + var self = this + debug('reading response\'s body') + var buffer = bl() + , strings = [] + + self.on('data', function (chunk) { + if (Buffer.isBuffer(chunk)) { + buffer.append(chunk) + } else { + strings.push(chunk) + } + }) + self.on('end', function () { + debug('end event', self.uri.href) + if (self._aborted) { + debug('aborted', self.uri.href) + // `buffer` is defined in the parent scope and used in a closure it exists for the life of the request. + // This can lead to leaky behavior if the user retains a reference to the request object. + buffer.destroy() + return + } + + if (buffer.length) { + debug('has body', self.uri.href, buffer.length) + if (self.encoding === null) { + // response.body = buffer + // can't move to this until https://github.com/rvagg/bl/issues/13 + response.body = buffer.slice() + } else { + response.body = buffer.toString(self.encoding) + } + // `buffer` is defined in the parent scope and used in a closure it exists for the life of the Request. + // This can lead to leaky behavior if the user retains a reference to the request object. + buffer.destroy() + } else if (strings.length) { + // The UTF8 BOM [0xEF,0xBB,0xBF] is converted to [0xFE,0xFF] in the JS UTC16/UCS2 representation. + // Strip this value out when the encoding is set to 'utf8', as upstream consumers won't expect it and it breaks JSON.parse(). + if (self.encoding === 'utf8' && strings[0].length > 0 && strings[0][0] === '\uFEFF') { + strings[0] = strings[0].substring(1) + } + response.body = strings.join('') + } + + if (self._json) { + try { + response.body = JSON.parse(response.body, self._jsonReviver) + } catch (e) { + debug('invalid JSON received', self.uri.href) + } + } + debug('emitting complete', self.uri.href) + if (typeof response.body === 'undefined' && !self._json) { + response.body = self.encoding === null ? new Buffer(0) : '' + } + self.emit('complete', response, response.body) + }) +} + +Request.prototype.abort = function () { + var self = this + self._aborted = true + + if (self.req) { + self.req.abort() + } + else if (self.response) { + self.response.destroy() + } + + self.emit('abort') +} + +Request.prototype.pipeDest = function (dest) { + var self = this + var response = self.response + // Called after the response is received + if (dest.headers && !dest.headersSent) { + if (response.caseless.has('content-type')) { + var ctname = response.caseless.has('content-type') + if (dest.setHeader) { + dest.setHeader(ctname, response.headers[ctname]) + } + else { + dest.headers[ctname] = response.headers[ctname] + } + } + + if (response.caseless.has('content-length')) { + var clname = response.caseless.has('content-length') + if (dest.setHeader) { + dest.setHeader(clname, response.headers[clname]) + } else { + dest.headers[clname] = response.headers[clname] + } + } + } + if (dest.setHeader && !dest.headersSent) { + for (var i in response.headers) { + // If the response content is being decoded, the Content-Encoding header + // of the response doesn't represent the piped content, so don't pass it. + if (!self.gzip || i !== 'content-encoding') { + dest.setHeader(i, response.headers[i]) + } + } + dest.statusCode = response.statusCode + } + if (self.pipefilter) { + self.pipefilter(response, dest) + } +} + +Request.prototype.qs = function (q, clobber) { + var self = this + var base + if (!clobber && self.uri.query) { + base = self._qs.parse(self.uri.query) + } else { + base = {} + } + + for (var i in q) { + base[i] = q[i] + } + + var qs = self._qs.stringify(base) + + if (qs === '') { + return self + } + + self.uri = url.parse(self.uri.href.split('?')[0] + '?' + qs) + self.url = self.uri + self.path = self.uri.path + + if (self.uri.host === 'unix') { + self.enableUnixSocket() + } + + return self +} +Request.prototype.form = function (form) { + var self = this + if (form) { + if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) { + self.setHeader('content-type', 'application/x-www-form-urlencoded') + } + self.body = (typeof form === 'string') + ? self._qs.rfc3986(form.toString('utf8')) + : self._qs.stringify(form).toString('utf8') + return self + } + // create form-data object + self._form = new FormData() + self._form.on('error', function(err) { + err.message = 'form-data: ' + err.message + self.emit('error', err) + self.abort() + }) + return self._form +} +Request.prototype.multipart = function (multipart) { + var self = this + + self._multipart.onRequest(multipart) + + if (!self._multipart.chunked) { + self.body = self._multipart.body + } + + return self +} +Request.prototype.json = function (val) { + var self = this + + if (!self.hasHeader('accept')) { + self.setHeader('accept', 'application/json') + } + + if (typeof self.jsonReplacer === 'function') { + self._jsonReplacer = self.jsonReplacer + } + + self._json = true + if (typeof val === 'boolean') { + if (self.body !== undefined) { + if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) { + self.body = safeStringify(self.body, self._jsonReplacer) + } else { + self.body = self._qs.rfc3986(self.body) + } + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + } + } else { + self.body = safeStringify(val, self._jsonReplacer) + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + } + + if (typeof self.jsonReviver === 'function') { + self._jsonReviver = self.jsonReviver + } + + return self +} +Request.prototype.getHeader = function (name, headers) { + var self = this + var result, re, match + if (!headers) { + headers = self.headers + } + Object.keys(headers).forEach(function (key) { + if (key.length !== name.length) { + return + } + re = new RegExp(name, 'i') + match = key.match(re) + if (match) { + result = headers[key] + } + }) + return result +} +Request.prototype.enableUnixSocket = function () { + // Get the socket & request paths from the URL + var unixParts = this.uri.path.split(':') + , host = unixParts[0] + , path = unixParts[1] + // Apply unix properties to request + this.socketPath = host + this.uri.pathname = path + this.uri.path = path + this.uri.host = host + this.uri.hostname = host + this.uri.isUnix = true +} + + +Request.prototype.auth = function (user, pass, sendImmediately, bearer) { + var self = this + + self._auth.onRequest(user, pass, sendImmediately, bearer) + + return self +} +Request.prototype.aws = function (opts, now) { + var self = this + + if (!now) { + self._aws = opts + return self + } + + if (opts.sign_version == 4 || opts.sign_version == '4') { + // use aws4 + var options = { + host: self.uri.host, + path: self.uri.path, + method: self.method, + headers: { + 'content-type': self.getHeader('content-type') || '' + }, + body: self.body + } + var signRes = aws4.sign(options, { + accessKeyId: opts.key, + secretAccessKey: opts.secret + }) + self.setHeader('authorization', signRes.headers.Authorization) + self.setHeader('x-amz-date', signRes.headers['X-Amz-Date']) + } + else { + // default: use aws-sign2 + var date = new Date() + self.setHeader('date', date.toUTCString()) + var auth = + { key: opts.key + , secret: opts.secret + , verb: self.method.toUpperCase() + , date: date + , contentType: self.getHeader('content-type') || '' + , md5: self.getHeader('content-md5') || '' + , amazonHeaders: aws2.canonicalizeHeaders(self.headers) + } + var path = self.uri.path + if (opts.bucket && path) { + auth.resource = '/' + opts.bucket + path + } else if (opts.bucket && !path) { + auth.resource = '/' + opts.bucket + } else if (!opts.bucket && path) { + auth.resource = path + } else if (!opts.bucket && !path) { + auth.resource = '/' + } + auth.resource = aws2.canonicalizeResource(auth.resource) + self.setHeader('authorization', aws2.authorization(auth)) + } + + return self +} +Request.prototype.httpSignature = function (opts) { + var self = this + httpSignature.signRequest({ + getHeader: function(header) { + return self.getHeader(header, self.headers) + }, + setHeader: function(header, value) { + self.setHeader(header, value) + }, + method: self.method, + path: self.path + }, opts) + debug('httpSignature authorization', self.getHeader('authorization')) + + return self +} +Request.prototype.hawk = function (opts) { + var self = this + self.setHeader('Authorization', hawk.client.header(self.uri, self.method, opts).field) +} +Request.prototype.oauth = function (_oauth) { + var self = this + + self._oauth.onRequest(_oauth) + + return self +} + +Request.prototype.jar = function (jar) { + var self = this + var cookies + + if (self._redirect.redirectsFollowed === 0) { + self.originalCookieHeader = self.getHeader('cookie') + } + + if (!jar) { + // disable cookies + cookies = false + self._disableCookies = true + } else { + var targetCookieJar = (jar && jar.getCookieString) ? jar : globalCookieJar + var urihref = self.uri.href + //fetch cookie in the Specified host + if (targetCookieJar) { + cookies = targetCookieJar.getCookieString(urihref) + } + } + + //if need cookie and cookie is not empty + if (cookies && cookies.length) { + if (self.originalCookieHeader) { + // Don't overwrite existing Cookie header + self.setHeader('cookie', self.originalCookieHeader + '; ' + cookies) + } else { + self.setHeader('cookie', cookies) + } + } + self._jar = jar + return self +} + + +// Stream API +Request.prototype.pipe = function (dest, opts) { + var self = this + + if (self.response) { + if (self._destdata) { + self.emit('error', new Error('You cannot pipe after data has been emitted from the response.')) + } else if (self._ended) { + self.emit('error', new Error('You cannot pipe after the response has been ended.')) + } else { + stream.Stream.prototype.pipe.call(self, dest, opts) + self.pipeDest(dest) + return dest + } + } else { + self.dests.push(dest) + stream.Stream.prototype.pipe.call(self, dest, opts) + return dest + } +} +Request.prototype.write = function () { + var self = this + if (self._aborted) {return} + + if (!self._started) { + self.start() + } + if (self.req) { + return self.req.write.apply(self.req, arguments) + } +} +Request.prototype.end = function (chunk) { + var self = this + if (self._aborted) {return} + + if (chunk) { + self.write(chunk) + } + if (!self._started) { + self.start() + } + if (self.req) { + self.req.end() + } +} +Request.prototype.pause = function () { + var self = this + if (!self.responseContent) { + self._paused = true + } else { + self.responseContent.pause.apply(self.responseContent, arguments) + } +} +Request.prototype.resume = function () { + var self = this + if (!self.responseContent) { + self._paused = false + } else { + self.responseContent.resume.apply(self.responseContent, arguments) + } +} +Request.prototype.destroy = function () { + var self = this + if (!self._ended) { + self.end() + } else if (self.response) { + self.response.destroy() + } +} + +Request.defaultProxyHeaderWhiteList = + Tunnel.defaultProxyHeaderWhiteList.slice() + +Request.defaultProxyHeaderExclusiveList = + Tunnel.defaultProxyHeaderExclusiveList.slice() + +// Exports + +Request.prototype.toJSON = requestToJSON +module.exports = Request diff --git a/node_modules/fsevents/node_modules/rimraf/LICENSE b/node_modules/fsevents/node_modules/rimraf/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/rimraf/README.md b/node_modules/fsevents/node_modules/rimraf/README.md new file mode 100644 index 0000000..423b8cf --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/fsevents/node_modules/rimraf/bin.js b/node_modules/fsevents/node_modules/rimraf/bin.js new file mode 100755 index 0000000..1bd5a0d --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/bin.js @@ -0,0 +1,40 @@ +#!/usr/bin/env node + +var rimraf = require('./') + +var help = false +var dashdash = false +var args = process.argv.slice(2).filter(function(arg) { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else + return !!arg +}); + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + var log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + process.exit(help ? 0 : 1) +} else + go(0) + +function go (n) { + if (n >= args.length) + return + rimraf(args[n], function (er) { + if (er) + throw er + go(n+1) + }) +} diff --git a/node_modules/fsevents/node_modules/rimraf/package.json b/node_modules/fsevents/node_modules/rimraf/package.json new file mode 100644 index 0000000..a6b3b66 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/package.json @@ -0,0 +1,93 @@ +{ + "_args": [ + [ + "rimraf@~2.5.0", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "rimraf@>=2.5.0 <2.6.0", + "_id": "rimraf@2.5.3", + "_inCache": true, + "_installable": true, + "_location": "/rimraf", + "_nodeVersion": "4.4.4", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/rimraf-2.5.3.tgz_1467582915019_0.6380921453237534" + }, + "_npmUser": { + "email": "i@izs.me", + "name": "isaacs" + }, + "_npmVersion": "3.10.2", + "_phantomChildren": {}, + "_requested": { + "name": "rimraf", + "raw": "rimraf@~2.5.0", + "rawSpec": "~2.5.0", + "scope": null, + "spec": ">=2.5.0 <2.6.0", + "type": "range" + }, + "_requiredBy": [ + "/fstream", + "/node-pre-gyp", + "/tar-pack" + ], + "_resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.5.3.tgz", + "_shasum": "6e5efdda4aa2f03417f6b2a574aec29f4b652705", + "_shrinkwrap": null, + "_spec": "rimraf@~2.5.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bin": { + "rimraf": "./bin.js" + }, + "bugs": { + "url": "https://github.com/isaacs/rimraf/issues" + }, + "dependencies": { + "glob": "^7.0.5" + }, + "description": "A deep deletion module for node (like `rm -rf`)", + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^6.1.1" + }, + "directories": {}, + "dist": { + "shasum": "6e5efdda4aa2f03417f6b2a574aec29f4b652705", + "tarball": "https://registry.npmjs.org/rimraf/-/rimraf-2.5.3.tgz" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "gitHead": "7263a784e8f08d94dd6caf6ee934fceb525a6f3d", + "homepage": "https://github.com/isaacs/rimraf#readme", + "license": "ISC", + "main": "rimraf.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "name": "rimraf", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/rimraf.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "2.5.3" +} diff --git a/node_modules/fsevents/node_modules/rimraf/rimraf.js b/node_modules/fsevents/node_modules/rimraf/rimraf.js new file mode 100644 index 0000000..db518a9 --- /dev/null +++ b/node_modules/fsevents/node_modules/rimraf/rimraf.js @@ -0,0 +1,343 @@ +module.exports = rimraf +rimraf.sync = rimrafSync + +var assert = require("assert") +var path = require("path") +var fs = require("fs") +var glob = require("glob") + +var defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +var timeout = 0 + +var isWindows = (process.platform === "win32") + +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + + defaults(options) + + var busyTries = 0 + var errState = null + var n = 0 + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) + } + + function afterGlob (er, results) { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if (isWindows && (er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) + } +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) + + options.chmod(p, 666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) + + try { + options.chmodSync(p, 666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (var i = 0; i < results.length; i++) { + var p = results[i] + + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + rmdirSync(p, options, er) + } + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + options.rmdirSync(p, options) +} diff --git a/node_modules/fsevents/node_modules/semver/LICENSE b/node_modules/fsevents/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/semver/README.md b/node_modules/fsevents/node_modules/semver/README.md new file mode 100644 index 0000000..27b044e --- /dev/null +++ b/node_modules/fsevents/node_modules/semver/README.md @@ -0,0 +1,346 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Usage + + $ npm install semver + + semver.valid('1.2.3') // '1.2.3' + semver.valid('a.b.c') // null + semver.clean(' =v1.2.3 ') // '1.2.3' + semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true + semver.gt('1.2.3', '9.8.7') // false + semver.lt('1.2.3', '9.8.7') // true + +As a command-line utility: + + $ semver -h + + SemVer 5.1.0 + + A JavaScript implementation of the http://semver.org/ specification + Copyright Isaac Z. Schlueter + + Usage: semver [options] [ [...]] + Prints valid versions sorted by SemVer precedence + + Options: + -r --range + Print versions that match the specified range. + + -i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + + --preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + + -l --loose + Interpret versions and ranges loosely + + Program exits successfully if any valid version satisfies + all supplied ranges, and prints all satisfying versions. + + If no satisfying versions are found, then exits failure. + + Versions are printed in ascending order, so supplying + multiple versions to the utility will just sort them. + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +> semver.inc('1.2.3', 'prerelease', 'beta') +'1.2.4-beta.0' +``` + +command-line example: + +```shell +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```shell +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any version satisfies) +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero digit in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0` +* `^0.2.3` := `>=0.2.3 <0.3.0` +* `^0.0.3` := `>=0.0.3 <0.0.4` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0` +* `^0.0.x` := `>=0.0.0 <0.1.0` +* `^0.0` := `>=0.0.0 <0.1.0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0` +* `^0.x` := `>=0.0.0 <1.0.0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `loose` boolean argument that, if +true, will be more forgiving about not-quite-valid semver strings. +The resulting output will always be 100% strict, of course. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. diff --git a/node_modules/fsevents/node_modules/semver/bin/semver b/node_modules/fsevents/node_modules/semver/bin/semver new file mode 100755 index 0000000..c5f2e85 --- /dev/null +++ b/node_modules/fsevents/node_modules/semver/bin/semver @@ -0,0 +1,133 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + , versions = [] + , range = [] + , gt = [] + , lt = [] + , eq = [] + , inc = null + , version = require("../package.json").version + , loose = false + , identifier = undefined + , semver = require("../semver") + , reverse = false + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var i = a.indexOf('=') + if (i !== -1) { + a = a.slice(0, i) + argv.unshift(a.slice(i + 1)) + } + switch (a) { + case "-rv": case "-rev": case "--rev": case "--reverse": + reverse = true + break + case "-l": case "--loose": + loose = true + break + case "-v": case "--version": + versions.push(argv.shift()) + break + case "-i": case "--inc": case "--increment": + switch (argv[0]) { + case "major": case "minor": case "patch": case "prerelease": + case "premajor": case "preminor": case "prepatch": + inc = argv.shift() + break + default: + inc = "patch" + break + } + break + case "--preid": + identifier = argv.shift() + break + case "-r": case "--range": + range.push(argv.shift()) + break + case "-h": case "--help": case "-?": + return help() + default: + versions.push(a) + break + } + } + + versions = versions.filter(function (v) { + return semver.valid(v, loose) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) + return failInc() + + for (var i = 0, l = range.length; i < l ; i ++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], loose) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error("--inc can only be used on a single version with no range") + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? "rcompare" : "compare" + versions.sort(function (a, b) { + return semver[compare](a, b, loose) + }).map(function (v) { + return semver.clean(v, loose) + }).map(function (v) { + return inc ? semver.inc(v, inc, loose, identifier) : v + }).forEach(function (v,i,_) { console.log(v) }) +} + +function help () { + console.log(["SemVer " + version + ,"" + ,"A JavaScript implementation of the http://semver.org/ specification" + ,"Copyright Isaac Z. Schlueter" + ,"" + ,"Usage: semver [options] [ [...]]" + ,"Prints valid versions sorted by SemVer precedence" + ,"" + ,"Options:" + ,"-r --range " + ," Print versions that match the specified range." + ,"" + ,"-i --increment []" + ," Increment a version by the specified level. Level can" + ," be one of: major, minor, patch, premajor, preminor," + ," prepatch, or prerelease. Default level is 'patch'." + ," Only one version may be specified." + ,"" + ,"--preid " + ," Identifier to be used to prefix premajor, preminor," + ," prepatch or prerelease version increments." + ,"" + ,"-l --loose" + ," Interpret versions and ranges loosely" + ,"" + ,"Program exits successfully if any valid version satisfies" + ,"all supplied ranges, and prints all satisfying versions." + ,"" + ,"If no satisfying versions are found, then exits failure." + ,"" + ,"Versions are printed in ascending order, so supplying" + ,"multiple versions to the utility will just sort them." + ].join("\n")) +} diff --git a/node_modules/fsevents/node_modules/semver/package.json b/node_modules/fsevents/node_modules/semver/package.json new file mode 100644 index 0000000..074800d --- /dev/null +++ b/node_modules/fsevents/node_modules/semver/package.json @@ -0,0 +1,86 @@ +{ + "_args": [ + [ + "semver@~5.2.0", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "semver@>=5.2.0 <5.3.0", + "_id": "semver@5.2.0", + "_inCache": true, + "_installable": true, + "_location": "/semver", + "_nodeVersion": "4.4.4", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/semver-5.2.0.tgz_1467136841238_0.2250258030835539" + }, + "_npmUser": { + "email": "i@izs.me", + "name": "isaacs" + }, + "_npmVersion": "3.10.2", + "_phantomChildren": {}, + "_requested": { + "name": "semver", + "raw": "semver@~5.2.0", + "rawSpec": "~5.2.0", + "scope": null, + "spec": ">=5.2.0 <5.3.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/semver/-/semver-5.2.0.tgz", + "_shasum": "281995b80c1448209415ddbc4cf50c269cef55c5", + "_shrinkwrap": null, + "_spec": "semver@~5.2.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "bin": { + "semver": "./bin/semver" + }, + "bugs": { + "url": "https://github.com/npm/node-semver/issues" + }, + "dependencies": {}, + "description": "The semantic version parser used by npm.", + "devDependencies": { + "tap": "^2.0.0" + }, + "directories": {}, + "dist": { + "shasum": "281995b80c1448209415ddbc4cf50c269cef55c5", + "tarball": "https://registry.npmjs.org/semver/-/semver-5.2.0.tgz" + }, + "files": [ + "bin", + "range.bnf", + "semver.js" + ], + "gitHead": "f7fef36765c53ebe237bf415c3ea002f24aa5621", + "homepage": "https://github.com/npm/node-semver#readme", + "license": "ISC", + "main": "semver.js", + "maintainers": [ + { + "name": "isaacs", + "email": "isaacs@npmjs.com" + }, + { + "name": "othiym23", + "email": "ogd@aoaioxxysz.net" + } + ], + "name": "semver", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-semver.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "5.2.0" +} diff --git a/node_modules/fsevents/node_modules/semver/range.bnf b/node_modules/fsevents/node_modules/semver/range.bnf new file mode 100644 index 0000000..25ebd5c --- /dev/null +++ b/node_modules/fsevents/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/fsevents/node_modules/semver/semver.js b/node_modules/fsevents/node_modules/semver/semver.js new file mode 100644 index 0000000..b2d7298 --- /dev/null +++ b/node_modules/fsevents/node_modules/semver/semver.js @@ -0,0 +1,1194 @@ +exports = module.exports = SemVer; + +// The debug function is excluded entirely from the minified version. +/* nomin */ var debug; +/* nomin */ if (typeof process === 'object' && + /* nomin */ process.env && + /* nomin */ process.env.NODE_DEBUG && + /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG)) + /* nomin */ debug = function() { + /* nomin */ var args = Array.prototype.slice.call(arguments, 0); + /* nomin */ args.unshift('SEMVER'); + /* nomin */ console.log.apply(console, args); + /* nomin */ }; +/* nomin */ else + /* nomin */ debug = function() {}; + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0'; + +var MAX_LENGTH = 256; +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991; + +// The actual regexps go on exports.re +var re = exports.re = []; +var src = exports.src = []; +var R = 0; + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++; +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; +var NUMERICIDENTIFIERLOOSE = R++; +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; + + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++; +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; + + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++; +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')'; + +var MAINVERSIONLOOSE = R++; +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++; +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + +var PRERELEASEIDENTIFIERLOOSE = R++; +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++; +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; + +var PRERELEASELOOSE = R++; +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++; +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++; +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; + + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++; +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?'; + +src[FULL] = '^' + FULLPLAIN + '$'; + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?'; + +var LOOSE = R++; +src[LOOSE] = '^' + LOOSEPLAIN + '$'; + +var GTLT = R++; +src[GTLT] = '((?:<|>)?=?)'; + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++; +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; +var XRANGEIDENTIFIER = R++; +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; + +var XRANGEPLAIN = R++; +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGEPLAINLOOSE = R++; +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGE = R++; +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; +var XRANGELOOSE = R++; +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++; +src[LONETILDE] = '(?:~>?)'; + +var TILDETRIM = R++; +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); +var tildeTrimReplace = '$1~'; + +var TILDE = R++; +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; +var TILDELOOSE = R++; +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++; +src[LONECARET] = '(?:\\^)'; + +var CARETTRIM = R++; +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); +var caretTrimReplace = '$1^'; + +var CARET = R++; +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; +var CARETLOOSE = R++; +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++; +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; +var COMPARATOR = R++; +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; + + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++; +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); +var comparatorTrimReplace = '$1$2$3'; + + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++; +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$'; + +var HYPHENRANGELOOSE = R++; +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$'; + +// Star ranges basically just allow anything at all. +var STAR = R++; +src[STAR] = '(<|>)?=?\\s*\\*'; + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]); + if (!re[i]) + re[i] = new RegExp(src[i]); +} + +exports.parse = parse; +function parse(version, loose) { + if (version instanceof SemVer) + return version; + + if (typeof version !== 'string') + return null; + + if (version.length > MAX_LENGTH) + return null; + + var r = loose ? re[LOOSE] : re[FULL]; + if (!r.test(version)) + return null; + + try { + return new SemVer(version, loose); + } catch (er) { + return null; + } +} + +exports.valid = valid; +function valid(version, loose) { + var v = parse(version, loose); + return v ? v.version : null; +} + + +exports.clean = clean; +function clean(version, loose) { + var s = parse(version.trim().replace(/^[=v]+/, ''), loose); + return s ? s.version : null; +} + +exports.SemVer = SemVer; + +function SemVer(version, loose) { + if (version instanceof SemVer) { + if (version.loose === loose) + return version; + else + version = version.version; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version); + } + + if (version.length > MAX_LENGTH) + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + + if (!(this instanceof SemVer)) + return new SemVer(version, loose); + + debug('SemVer', version, loose); + this.loose = loose; + var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); + + if (!m) + throw new TypeError('Invalid Version: ' + version); + + this.raw = version; + + // these are actually numbers + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) + throw new TypeError('Invalid major version') + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) + throw new TypeError('Invalid minor version') + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) + throw new TypeError('Invalid patch version') + + // numberify any prerelease numeric ids + if (!m[4]) + this.prerelease = []; + else + this.prerelease = m[4].split('.').map(function(id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) + return num + } + return id; + }); + + this.build = m[5] ? m[5].split('.') : []; + this.format(); +} + +SemVer.prototype.format = function() { + this.version = this.major + '.' + this.minor + '.' + this.patch; + if (this.prerelease.length) + this.version += '-' + this.prerelease.join('.'); + return this.version; +}; + +SemVer.prototype.toString = function() { + return this.version; +}; + +SemVer.prototype.compare = function(other) { + debug('SemVer.compare', this.version, this.loose, other); + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return this.compareMain(other) || this.comparePre(other); +}; + +SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch); +}; + +SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) + return -1; + else if (!this.prerelease.length && other.prerelease.length) + return 1; + else if (!this.prerelease.length && !other.prerelease.length) + return 0; + + var i = 0; + do { + var a = this.prerelease[i]; + var b = other.prerelease[i]; + debug('prerelease compare', i, a, b); + if (a === undefined && b === undefined) + return 0; + else if (b === undefined) + return 1; + else if (a === undefined) + return -1; + else if (a === b) + continue; + else + return compareIdentifiers(a, b); + } while (++i); +}; + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function(release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc('pre', identifier); + break; + case 'preminor': + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc('pre', identifier); + break; + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0; + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) + this.major++; + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) + this.minor++; + this.patch = 0; + this.prerelease = []; + break; + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) + this.patch++; + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) + this.prerelease = [0]; + else { + var i = this.prerelease.length; + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++; + i = -2; + } + } + if (i === -1) // didn't increment anything + this.prerelease.push(0); + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) + this.prerelease = [identifier, 0]; + } else + this.prerelease = [identifier, 0]; + } + break; + + default: + throw new Error('invalid increment argument: ' + release); + } + this.format(); + this.raw = this.version; + return this; +}; + +exports.inc = inc; +function inc(version, release, loose, identifier) { + if (typeof(loose) === 'string') { + identifier = loose; + loose = undefined; + } + + try { + return new SemVer(version, loose).inc(release, identifier).version; + } catch (er) { + return null; + } +} + +exports.diff = diff; +function diff(version1, version2) { + if (eq(version1, version2)) { + return null; + } else { + var v1 = parse(version1); + var v2 = parse(version2); + if (v1.prerelease.length || v2.prerelease.length) { + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return 'pre'+key; + } + } + } + return 'prerelease'; + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return key; + } + } + } + } +} + +exports.compareIdentifiers = compareIdentifiers; + +var numeric = /^[0-9]+$/; +function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + + if (anum && bnum) { + a = +a; + b = +b; + } + + return (anum && !bnum) ? -1 : + (bnum && !anum) ? 1 : + a < b ? -1 : + a > b ? 1 : + 0; +} + +exports.rcompareIdentifiers = rcompareIdentifiers; +function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); +} + +exports.major = major; +function major(a, loose) { + return new SemVer(a, loose).major; +} + +exports.minor = minor; +function minor(a, loose) { + return new SemVer(a, loose).minor; +} + +exports.patch = patch; +function patch(a, loose) { + return new SemVer(a, loose).patch; +} + +exports.compare = compare; +function compare(a, b, loose) { + return new SemVer(a, loose).compare(b); +} + +exports.compareLoose = compareLoose; +function compareLoose(a, b) { + return compare(a, b, true); +} + +exports.rcompare = rcompare; +function rcompare(a, b, loose) { + return compare(b, a, loose); +} + +exports.sort = sort; +function sort(list, loose) { + return list.sort(function(a, b) { + return exports.compare(a, b, loose); + }); +} + +exports.rsort = rsort; +function rsort(list, loose) { + return list.sort(function(a, b) { + return exports.rcompare(a, b, loose); + }); +} + +exports.gt = gt; +function gt(a, b, loose) { + return compare(a, b, loose) > 0; +} + +exports.lt = lt; +function lt(a, b, loose) { + return compare(a, b, loose) < 0; +} + +exports.eq = eq; +function eq(a, b, loose) { + return compare(a, b, loose) === 0; +} + +exports.neq = neq; +function neq(a, b, loose) { + return compare(a, b, loose) !== 0; +} + +exports.gte = gte; +function gte(a, b, loose) { + return compare(a, b, loose) >= 0; +} + +exports.lte = lte; +function lte(a, b, loose) { + return compare(a, b, loose) <= 0; +} + +exports.cmp = cmp; +function cmp(a, op, b, loose) { + var ret; + switch (op) { + case '===': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a === b; + break; + case '!==': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a !== b; + break; + case '': case '=': case '==': ret = eq(a, b, loose); break; + case '!=': ret = neq(a, b, loose); break; + case '>': ret = gt(a, b, loose); break; + case '>=': ret = gte(a, b, loose); break; + case '<': ret = lt(a, b, loose); break; + case '<=': ret = lte(a, b, loose); break; + default: throw new TypeError('Invalid operator: ' + op); + } + return ret; +} + +exports.Comparator = Comparator; +function Comparator(comp, loose) { + if (comp instanceof Comparator) { + if (comp.loose === loose) + return comp; + else + comp = comp.value; + } + + if (!(this instanceof Comparator)) + return new Comparator(comp, loose); + + debug('comparator', comp, loose); + this.loose = loose; + this.parse(comp); + + if (this.semver === ANY) + this.value = ''; + else + this.value = this.operator + this.semver.version; + + debug('comp', this); +} + +var ANY = {}; +Comparator.prototype.parse = function(comp) { + var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var m = comp.match(r); + + if (!m) + throw new TypeError('Invalid comparator: ' + comp); + + this.operator = m[1]; + if (this.operator === '=') + this.operator = ''; + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) + this.semver = ANY; + else + this.semver = new SemVer(m[2], this.loose); +}; + +Comparator.prototype.toString = function() { + return this.value; +}; + +Comparator.prototype.test = function(version) { + debug('Comparator.test', version, this.loose); + + if (this.semver === ANY) + return true; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + return cmp(version, this.operator, this.semver, this.loose); +}; + + +exports.Range = Range; +function Range(range, loose) { + if ((range instanceof Range) && range.loose === loose) + return range; + + if (!(this instanceof Range)) + return new Range(range, loose); + + this.loose = loose; + + // First, split based on boolean or || + this.raw = range; + this.set = range.split(/\s*\|\|\s*/).map(function(range) { + return this.parseRange(range.trim()); + }, this).filter(function(c) { + // throw out any that are not relevant for whatever reason + return c.length; + }); + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range); + } + + this.format(); +} + +Range.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(' ').trim(); + }).join('||').trim(); + return this.range; +}; + +Range.prototype.toString = function() { + return this.range; +}; + +Range.prototype.parseRange = function(range) { + var loose = this.loose; + range = range.trim(); + debug('range', range, loose); + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + debug('hyphen replace', range); + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); + debug('comparator trim', range, re[COMPARATORTRIM]); + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace); + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace); + + // normalize spaces + range = range.split(/\s+/).join(' '); + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var set = range.split(' ').map(function(comp) { + return parseComparator(comp, loose); + }).join(' ').split(/\s+/); + if (this.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function(comp) { + return !!comp.match(compRe); + }); + } + set = set.map(function(comp) { + return new Comparator(comp, loose); + }); + + return set; +}; + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators; +function toComparators(range, loose) { + return new Range(range, loose).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(' ').trim().split(' '); + }); +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator(comp, loose) { + debug('comp', comp); + comp = replaceCarets(comp, loose); + debug('caret', comp); + comp = replaceTildes(comp, loose); + debug('tildes', comp); + comp = replaceXRanges(comp, loose); + debug('xrange', comp); + comp = replaceStars(comp, loose); + debug('stars', comp); + return comp; +} + +function isX(id) { + return !id || id.toLowerCase() === 'x' || id === '*'; +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceTilde(comp, loose); + }).join(' '); +} + +function replaceTilde(comp, loose) { + var r = loose ? re[TILDELOOSE] : re[TILDE]; + return comp.replace(r, function(_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr); + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else if (pr) { + debug('replaceTilde pr', pr); + if (pr.charAt(0) !== '-') + pr = '-' + pr; + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + + debug('tilde return', ret); + return ret; + }); +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceCaret(comp, loose); + }).join(' '); +} + +function replaceCaret(comp, loose) { + debug('caret', comp, loose); + var r = loose ? re[CARETLOOSE] : re[CARET]; + return comp.replace(r, function(_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr); + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) { + if (M === '0') + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'; + } else if (pr) { + debug('replaceCaret pr', pr); + if (pr.charAt(0) !== '-') + pr = '-' + pr; + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + (+M + 1) + '.0.0'; + } else { + debug('no pr'); + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0'; + } + + debug('caret return', ret); + return ret; + }); +} + +function replaceXRanges(comp, loose) { + debug('replaceXRanges', comp, loose); + return comp.split(/\s+/).map(function(comp) { + return replaceXRange(comp, loose); + }).join(' '); +} + +function replaceXRange(comp, loose) { + comp = comp.trim(); + var r = loose ? re[XRANGELOOSE] : re[XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr); + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + + if (gtlt === '=' && anyX) + gtlt = ''; + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0'; + } else { + // nothing is forbidden + ret = '*'; + } + } else if (gtlt && anyX) { + // replace X with 0 + if (xm) + m = 0; + if (xp) + p = 0; + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>='; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else if (xp) { + m = +m + 1; + p = 0; + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) + M = +M + 1 + else + m = +m + 1 + } + + ret = gtlt + M + '.' + m + '.' + p; + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + } + + debug('xRange return', ret); + + return ret; + }); +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars(comp, loose) { + debug('replaceStars', comp, loose); + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], ''); +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + + if (isX(fM)) + from = ''; + else if (isX(fm)) + from = '>=' + fM + '.0.0'; + else if (isX(fp)) + from = '>=' + fM + '.' + fm + '.0'; + else + from = '>=' + from; + + if (isX(tM)) + to = ''; + else if (isX(tm)) + to = '<' + (+tM + 1) + '.0.0'; + else if (isX(tp)) + to = '<' + tM + '.' + (+tm + 1) + '.0'; + else if (tpr) + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; + else + to = '<=' + to; + + return (from + ' ' + to).trim(); +} + + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function(version) { + if (!version) + return false; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version)) + return true; + } + return false; +}; + +function testSet(set, version) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) + return false; + } + + if (version.prerelease.length) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (var i = 0; i < set.length; i++) { + debug(set[i].semver); + if (set[i].semver === ANY) + continue; + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver; + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) + return true; + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false; + } + + return true; +} + +exports.satisfies = satisfies; +function satisfies(version, range, loose) { + try { + range = new Range(range, loose); + } catch (er) { + return false; + } + return range.test(version); +} + +exports.maxSatisfying = maxSatisfying; +function maxSatisfying(versions, range, loose) { + return versions.filter(function(version) { + return satisfies(version, range, loose); + }).sort(function(a, b) { + return rcompare(a, b, loose); + })[0] || null; +} + +exports.validRange = validRange; +function validRange(range, loose) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, loose).range || '*'; + } catch (er) { + return null; + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr; +function ltr(version, range, loose) { + return outside(version, range, '<', loose); +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr; +function gtr(version, range, loose) { + return outside(version, range, '>', loose); +} + +exports.outside = outside; +function outside(version, range, hilo, loose) { + version = new SemVer(version, loose); + range = new Range(range, loose); + + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case '>': + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = '>'; + ecomp = '>='; + break; + case '<': + gtfn = lt; + ltefn = gte; + ltfn = gt; + comp = '<'; + ecomp = '<='; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, loose)) { + return false; + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i]; + + var high = null; + var low = null; + + comparators.forEach(function(comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, loose)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, loose)) { + low = comparator; + } + }); + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false; + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false; + } + } + return true; +} + +exports.prerelease = prerelease; +function prerelease(version, loose) { + var parsed = parse(version, loose); + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null; +} diff --git a/node_modules/fsevents/node_modules/set-blocking/CHANGELOG.md b/node_modules/fsevents/node_modules/set-blocking/CHANGELOG.md new file mode 100644 index 0000000..03bf591 --- /dev/null +++ b/node_modules/fsevents/node_modules/set-blocking/CHANGELOG.md @@ -0,0 +1,26 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [2.0.0](https://github.com/yargs/set-blocking/compare/v1.0.0...v2.0.0) (2016-05-17) + + +### Features + +* add an isTTY check ([#3](https://github.com/yargs/set-blocking/issues/3)) ([66ce277](https://github.com/yargs/set-blocking/commit/66ce277)) + + +### BREAKING CHANGES + +* stdio/stderr will not be set to blocking if isTTY === false + + + + +# 1.0.0 (2016-05-14) + + +### Features + +* implemented shim for stream._handle.setBlocking ([6bde0c0](https://github.com/yargs/set-blocking/commit/6bde0c0)) diff --git a/node_modules/fsevents/node_modules/set-blocking/LICENSE.txt b/node_modules/fsevents/node_modules/set-blocking/LICENSE.txt new file mode 100644 index 0000000..836440b --- /dev/null +++ b/node_modules/fsevents/node_modules/set-blocking/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/set-blocking/README.md b/node_modules/fsevents/node_modules/set-blocking/README.md new file mode 100644 index 0000000..e93b420 --- /dev/null +++ b/node_modules/fsevents/node_modules/set-blocking/README.md @@ -0,0 +1,31 @@ +# set-blocking + +[![Build Status](https://travis-ci.org/yargs/set-blocking.svg)](https://travis-ci.org/yargs/set-blocking) +[![NPM version](https://img.shields.io/npm/v/set-blocking.svg)](https://www.npmjs.com/package/set-blocking) +[![Coverage Status](https://coveralls.io/repos/yargs/set-blocking/badge.svg?branch=)](https://coveralls.io/r/yargs/set-blocking?branch=master) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +set blocking `stdio` and `stderr` ensuring that terminal output does not truncate. + +```js +const setBlocking = require('set-blocking') +setBlocking(true) +console.log(someLargeStringToOutput) +``` + +## Historical Context/Word of Warning + +This was created as a shim to address the bug discussed in [node #6456](https://github.com/nodejs/node/issues/6456). This bug crops up on +newer versions of Node.js (`0.12+`), truncating terminal output. + +You should be mindful of the side-effects caused by using `set-blocking`: + +* if your module sets blocking to `true`, it will effect other modules + consuming your library. In [yargs](https://github.com/yargs/yargs/blob/master/yargs.js#L653) we only call + `setBlocking(true)` once we already know we are about to call `process.exit(code)`. +* this patch will not apply to subprocesses spawned with `isTTY = true`, this is + the [default `spawn()` behavior](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). + +## License + +ISC diff --git a/node_modules/fsevents/node_modules/set-blocking/index.js b/node_modules/fsevents/node_modules/set-blocking/index.js new file mode 100644 index 0000000..6f78774 --- /dev/null +++ b/node_modules/fsevents/node_modules/set-blocking/index.js @@ -0,0 +1,7 @@ +module.exports = function (blocking) { + [process.stdout, process.stderr].forEach(function (stream) { + if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') { + stream._handle.setBlocking(blocking) + } + }) +} diff --git a/node_modules/fsevents/node_modules/set-blocking/package.json b/node_modules/fsevents/node_modules/set-blocking/package.json new file mode 100644 index 0000000..c497587 --- /dev/null +++ b/node_modules/fsevents/node_modules/set-blocking/package.json @@ -0,0 +1,98 @@ +{ + "_args": [ + [ + "set-blocking@~2.0.0", + "/Users/eshanker/Code/fsevents/node_modules/npmlog" + ] + ], + "_from": "set-blocking@>=2.0.0 <2.1.0", + "_id": "set-blocking@2.0.0", + "_inCache": true, + "_installable": true, + "_location": "/set-blocking", + "_nodeVersion": "0.12.7", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/set-blocking-2.0.0.tgz_1463525966987_0.5456729622092098" + }, + "_npmUser": { + "email": "ben@npmjs.com", + "name": "bcoe" + }, + "_npmVersion": "2.11.3", + "_phantomChildren": {}, + "_requested": { + "name": "set-blocking", + "raw": "set-blocking@~2.0.0", + "rawSpec": "~2.0.0", + "scope": null, + "spec": ">=2.0.0 <2.1.0", + "type": "range" + }, + "_requiredBy": [ + "/npmlog" + ], + "_resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "_shasum": "045f9782d011ae9a6803ddd382b24392b3d890f7", + "_shrinkwrap": null, + "_spec": "set-blocking@~2.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/npmlog", + "author": { + "email": "ben@npmjs.com", + "name": "Ben Coe" + }, + "bugs": { + "url": "https://github.com/yargs/set-blocking/issues" + }, + "dependencies": {}, + "description": "set blocking stdio and stderr ensuring that terminal output does not truncate", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^2.11.9", + "mocha": "^2.4.5", + "nyc": "^6.4.4", + "standard": "^7.0.1", + "standard-version": "^2.2.1" + }, + "directories": {}, + "dist": { + "shasum": "045f9782d011ae9a6803ddd382b24392b3d890f7", + "tarball": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" + }, + "files": [ + "LICENSE.txt", + "index.js" + ], + "gitHead": "7eec10577b5fff264de477ba3b9d07f404946eff", + "homepage": "https://github.com/yargs/set-blocking#readme", + "keywords": [ + "blocking", + "flush", + "shim", + "stderr", + "stdio", + "terminal" + ], + "license": "ISC", + "main": "index.js", + "maintainers": [ + { + "name": "bcoe", + "email": "ben@npmjs.com" + } + ], + "name": "set-blocking", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/yargs/set-blocking.git" + }, + "scripts": { + "coverage": "nyc report --reporter=text-lcov | coveralls", + "pretest": "standard", + "test": "nyc mocha ./test/*.js", + "version": "standard-version" + }, + "version": "2.0.0" +} diff --git a/node_modules/fsevents/node_modules/signal-exit/CHANGELOG.md b/node_modules/fsevents/node_modules/signal-exit/CHANGELOG.md new file mode 100644 index 0000000..c12f834 --- /dev/null +++ b/node_modules/fsevents/node_modules/signal-exit/CHANGELOG.md @@ -0,0 +1,17 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [3.0.0](https://github.com/tapjs/signal-exit/compare/v2.1.2...v3.0.0) (2016-06-13) + + +### Bug Fixes + +* get our test suite running on Windows ([#23](https://github.com/tapjs/signal-exit/issues/23)) ([6f3eda8](https://github.com/tapjs/signal-exit/commit/6f3eda8)) +* hooking SIGPROF was interfering with profilers see [#21](https://github.com/tapjs/signal-exit/issues/21) ([#24](https://github.com/tapjs/signal-exit/issues/24)) ([1248a4c](https://github.com/tapjs/signal-exit/commit/1248a4c)) + + +### BREAKING CHANGES + +* signal-exit no longer wires into SIGPROF diff --git a/node_modules/fsevents/node_modules/signal-exit/LICENSE.txt b/node_modules/fsevents/node_modules/signal-exit/LICENSE.txt new file mode 100644 index 0000000..c7e2747 --- /dev/null +++ b/node_modules/fsevents/node_modules/signal-exit/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/signal-exit/README.md b/node_modules/fsevents/node_modules/signal-exit/README.md new file mode 100644 index 0000000..8ebccab --- /dev/null +++ b/node_modules/fsevents/node_modules/signal-exit/README.md @@ -0,0 +1,40 @@ +# signal-exit + +[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) +[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) +[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) +[![Windows Tests](https://img.shields.io/appveyor/ci/bcoe/signal-exit/master.svg?label=Windows%20Tests)](https://ci.appveyor.com/project/bcoe/signal-exit) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +When you want to fire an event no matter how a process exits: + +* reaching the end of execution. +* explicitly having `process.exit(code)` called. +* having `process.kill(pid, sig)` called. +* receiving a fatal signal from outside the process + +Use `signal-exit`. + +```js +var onExit = require('signal-exit') + +onExit(function (code, signal) { + console.log('process exited!') +}) +``` + +## API + +`var remove = onExit(function (code, signal) {}, options)` + +The return value of the function is a function that will remove the +handler. + +Note that the function *only* fires for signals if the signal would +cause the proces to exit. That is, there are no other listeners, and +it is a fatal signal. + +## Options + +* `alwaysLast`: Run this handler after any other signal or exit + handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/fsevents/node_modules/signal-exit/index.js b/node_modules/fsevents/node_modules/signal-exit/index.js new file mode 100644 index 0000000..7dd8d91 --- /dev/null +++ b/node_modules/fsevents/node_modules/signal-exit/index.js @@ -0,0 +1,148 @@ +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +var assert = require('assert') +var signals = require('./signals.js') + +var EE = require('events') +/* istanbul ignore if */ +if (typeof EE !== 'function') { + EE = EE.EventEmitter +} + +var emitter +if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ +} else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} +} + +module.exports = function (cb, opts) { + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() + } + } + emitter.on(ev, cb) + + return remove +} + +module.exports.unload = unload +function unload () { + if (!loaded) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 +} + +function emit (event, code, signal) { + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) +} + +// { : , ... } +var sigListeners = {} +signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + process.kill(process.pid, sig) + } + } +}) + +module.exports.signals = function () { + return signals +} + +module.exports.load = load + +var loaded = false + +function load () { + if (loaded) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit +} + +var originalProcessReallyExit = process.reallyExit +function processReallyExit (code) { + process.exitCode = code || 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) +} + +var originalProcessEmit = process.emit +function processEmit (ev, arg) { + if (ev === 'exit') { + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } +} diff --git a/node_modules/fsevents/node_modules/signal-exit/package.json b/node_modules/fsevents/node_modules/signal-exit/package.json new file mode 100644 index 0000000..3ea57b8 --- /dev/null +++ b/node_modules/fsevents/node_modules/signal-exit/package.json @@ -0,0 +1,98 @@ +{ + "_args": [ + [ + "signal-exit@^3.0.0", + "/Users/eshanker/Code/fsevents/node_modules/gauge" + ] + ], + "_from": "signal-exit@>=3.0.0 <4.0.0", + "_id": "signal-exit@3.0.0", + "_inCache": true, + "_installable": true, + "_location": "/signal-exit", + "_nodeVersion": "5.1.0", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/signal-exit-3.0.0.tgz_1465857346813_0.7961636525578797" + }, + "_npmUser": { + "email": "ben@npmjs.com", + "name": "bcoe" + }, + "_npmVersion": "3.3.12", + "_phantomChildren": {}, + "_requested": { + "name": "signal-exit", + "raw": "signal-exit@^3.0.0", + "rawSpec": "^3.0.0", + "scope": null, + "spec": ">=3.0.0 <4.0.0", + "type": "range" + }, + "_requiredBy": [ + "/gauge" + ], + "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz", + "_shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", + "_shrinkwrap": null, + "_spec": "signal-exit@^3.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/gauge", + "author": { + "email": "ben@npmjs.com", + "name": "Ben Coe" + }, + "bugs": { + "url": "https://github.com/tapjs/signal-exit/issues" + }, + "dependencies": {}, + "description": "when you want to fire an event no matter how a process exits.", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^2.11.2", + "nyc": "^6.4.4", + "standard": "^7.1.2", + "standard-version": "^2.3.0", + "tap": "^5.7.2" + }, + "directories": {}, + "dist": { + "shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", + "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz" + }, + "files": [ + "index.js", + "signals.js" + ], + "gitHead": "2bbec4e5d9f9cf1f7529b1c923d1b058e69ccf7f", + "homepage": "https://github.com/tapjs/signal-exit", + "keywords": [ + "exit", + "signal" + ], + "license": "ISC", + "main": "index.js", + "maintainers": [ + { + "name": "bcoe", + "email": "ben@npmjs.com" + }, + { + "name": "isaacs", + "email": "isaacs@npmjs.com" + } + ], + "name": "signal-exit", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/tapjs/signal-exit.git" + }, + "scripts": { + "coverage": "nyc report --reporter=text-lcov | coveralls", + "pretest": "standard", + "release": "standard-version", + "test": "tap --timeout=240 ./test/*.js --cov" + }, + "version": "3.0.0" +} diff --git a/node_modules/fsevents/node_modules/signal-exit/signals.js b/node_modules/fsevents/node_modules/signal-exit/signals.js new file mode 100644 index 0000000..bc6f97e --- /dev/null +++ b/node_modules/fsevents/node_modules/signal-exit/signals.js @@ -0,0 +1,52 @@ +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGBUS', + 'SIGFPE', + 'SIGHUP', + 'SIGILL', + 'SIGINT', + 'SIGSEGV', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} diff --git a/node_modules/fsevents/node_modules/sntp/.npmignore b/node_modules/fsevents/node_modules/sntp/.npmignore new file mode 100644 index 0000000..77ba16c --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/.npmignore @@ -0,0 +1,18 @@ +.idea +*.iml +npm-debug.log +dump.rdb +node_modules +results.tap +results.xml +npm-shrinkwrap.json +config.json +.DS_Store +*/.DS_Store +*/*/.DS_Store +._* +*/._* +*/*/._* +coverage.* +lib-cov + diff --git a/node_modules/fsevents/node_modules/sntp/.travis.yml b/node_modules/fsevents/node_modules/sntp/.travis.yml new file mode 100755 index 0000000..047f7e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/.travis.yml @@ -0,0 +1,5 @@ +language: node_js + +node_js: + - 0.10 + diff --git a/node_modules/fsevents/node_modules/sntp/LICENSE b/node_modules/fsevents/node_modules/sntp/LICENSE new file mode 100755 index 0000000..b0d8774 --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012-2014, Eran Hammer and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hueniverse/sntp/graphs/contributors diff --git a/node_modules/fsevents/node_modules/sntp/Makefile b/node_modules/fsevents/node_modules/sntp/Makefile new file mode 100755 index 0000000..417fd93 --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/Makefile @@ -0,0 +1,9 @@ +test: + @node node_modules/lab/bin/lab +test-cov: + @node node_modules/lab/bin/lab -t 100 -m 3000 +test-cov-html: + @node node_modules/lab/bin/lab -r html -o coverage.html + +.PHONY: test test-cov test-cov-html + diff --git a/node_modules/fsevents/node_modules/sntp/README.md b/node_modules/fsevents/node_modules/sntp/README.md new file mode 100755 index 0000000..98a6e02 --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/README.md @@ -0,0 +1,68 @@ +# sntp + +An SNTP v4 client (RFC4330) for node. Simpy connects to the NTP or SNTP server requested and returns the server time +along with the roundtrip duration and clock offset. To adjust the local time to the NTP time, add the returned `t` offset +to the local time. + +[![Build Status](https://secure.travis-ci.org/hueniverse/sntp.png)](http://travis-ci.org/hueniverse/sntp) + +# Usage + +```javascript +var Sntp = require('sntp'); + +// All options are optional + +var options = { + host: 'nist1-sj.ustiming.org', // Defaults to pool.ntp.org + port: 123, // Defaults to 123 (NTP) + resolveReference: true, // Default to false (not resolving) + timeout: 1000 // Defaults to zero (no timeout) +}; + +// Request server time + +Sntp.time(options, function (err, time) { + + if (err) { + console.log('Failed: ' + err.message); + process.exit(1); + } + + console.log('Local clock is off by: ' + time.t + ' milliseconds'); + process.exit(0); +}); +``` + +If an application needs to maintain continuous time synchronization, the module provides a stateful method for +querying the current offset only when the last one is too old (defaults to daily). + +```javascript +// Request offset once + +Sntp.offset(function (err, offset) { + + console.log(offset); // New (served fresh) + + // Request offset again + + Sntp.offset(function (err, offset) { + + console.log(offset); // Identical (served from cache) + }); +}); +``` + +To set a background offset refresh, start the interval and use the provided now() method. If for any reason the +client fails to obtain an up-to-date offset, the current system clock is used. + +```javascript +var before = Sntp.now(); // System time without offset + +Sntp.start(function () { + + var now = Sntp.now(); // With offset + Sntp.stop(); +}); +``` + diff --git a/node_modules/fsevents/node_modules/sntp/examples/offset.js b/node_modules/fsevents/node_modules/sntp/examples/offset.js new file mode 100755 index 0000000..0303f6d --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/examples/offset.js @@ -0,0 +1,16 @@ +var Sntp = require('../lib'); + +// Request offset once + +Sntp.offset(function (err, offset) { + + console.log(offset); // New (served fresh) + + // Request offset again + + Sntp.offset(function (err, offset) { + + console.log(offset); // Identical (served from cache) + }); +}); + diff --git a/node_modules/fsevents/node_modules/sntp/examples/time.js b/node_modules/fsevents/node_modules/sntp/examples/time.js new file mode 100755 index 0000000..bd70d0e --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/examples/time.js @@ -0,0 +1,25 @@ +var Sntp = require('../lib'); + +// All options are optional + +var options = { + host: 'nist1-sj.ustiming.org', // Defaults to pool.ntp.org + port: 123, // Defaults to 123 (NTP) + resolveReference: true, // Default to false (not resolving) + timeout: 1000 // Defaults to zero (no timeout) +}; + +// Request server time + +Sntp.time(options, function (err, time) { + + if (err) { + console.log('Failed: ' + err.message); + process.exit(1); + } + + console.log(time); + console.log('Local clock is off by: ' + time.t + ' milliseconds'); + process.exit(0); +}); + diff --git a/node_modules/fsevents/node_modules/sntp/index.js b/node_modules/fsevents/node_modules/sntp/index.js new file mode 100755 index 0000000..4cc88b3 --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/index.js @@ -0,0 +1 @@ +module.exports = require('./lib'); \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/sntp/lib/index.js b/node_modules/fsevents/node_modules/sntp/lib/index.js new file mode 100755 index 0000000..e91718b --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/lib/index.js @@ -0,0 +1,412 @@ +// Load modules + +var Dgram = require('dgram'); +var Dns = require('dns'); +var Hoek = require('hoek'); + + +// Declare internals + +var internals = {}; + + +exports.time = function (options, callback) { + + if (arguments.length !== 2) { + callback = arguments[0]; + options = {}; + } + + var settings = Hoek.clone(options); + settings.host = settings.host || 'pool.ntp.org'; + settings.port = settings.port || 123; + settings.resolveReference = settings.resolveReference || false; + + // Declare variables used by callback + + var timeoutId = 0; + var sent = 0; + + // Ensure callback is only called once + + var finish = function (err, result) { + + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = 0; + } + + socket.removeAllListeners(); + socket.once('error', internals.ignore); + socket.close(); + return callback(err, result); + }; + + finish = Hoek.once(finish); + + // Create UDP socket + + var socket = Dgram.createSocket('udp4'); + + socket.once('error', function (err) { + + return finish(err); + }); + + // Listen to incoming messages + + socket.on('message', function (buffer, rinfo) { + + var received = Date.now(); + + var message = new internals.NtpMessage(buffer); + if (!message.isValid) { + return finish(new Error('Invalid server response'), message); + } + + if (message.originateTimestamp !== sent) { + return finish(new Error('Wrong originate timestamp'), message); + } + + // Timestamp Name ID When Generated + // ------------------------------------------------------------ + // Originate Timestamp T1 time request sent by client + // Receive Timestamp T2 time request received by server + // Transmit Timestamp T3 time reply sent by server + // Destination Timestamp T4 time reply received by client + // + // The roundtrip delay d and system clock offset t are defined as: + // + // d = (T4 - T1) - (T3 - T2) t = ((T2 - T1) + (T3 - T4)) / 2 + + var T1 = message.originateTimestamp; + var T2 = message.receiveTimestamp; + var T3 = message.transmitTimestamp; + var T4 = received; + + message.d = (T4 - T1) - (T3 - T2); + message.t = ((T2 - T1) + (T3 - T4)) / 2; + message.receivedLocally = received; + + if (!settings.resolveReference || + message.stratum !== 'secondary') { + + return finish(null, message); + } + + // Resolve reference IP address + + Dns.reverse(message.referenceId, function (err, domains) { + + if (/* $lab:coverage:off$ */ !err /* $lab:coverage:on$ */) { + message.referenceHost = domains[0]; + } + + return finish(null, message); + }); + }); + + // Set timeout + + if (settings.timeout) { + timeoutId = setTimeout(function () { + + timeoutId = 0; + return finish(new Error('Timeout')); + }, settings.timeout); + } + + // Construct NTP message + + var message = new Buffer(48); + for (var i = 0; i < 48; i++) { // Zero message + message[i] = 0; + } + + message[0] = (0 << 6) + (4 << 3) + (3 << 0) // Set version number to 4 and Mode to 3 (client) + sent = Date.now(); + internals.fromMsecs(sent, message, 40); // Set transmit timestamp (returns as originate) + + // Send NTP request + + socket.send(message, 0, message.length, settings.port, settings.host, function (err, bytes) { + + if (err || + bytes !== 48) { + + return finish(err || new Error('Could not send entire message')); + } + }); +}; + + +internals.NtpMessage = function (buffer) { + + this.isValid = false; + + // Validate + + if (buffer.length !== 48) { + return; + } + + // Leap indicator + + var li = (buffer[0] >> 6); + switch (li) { + case 0: this.leapIndicator = 'no-warning'; break; + case 1: this.leapIndicator = 'last-minute-61'; break; + case 2: this.leapIndicator = 'last-minute-59'; break; + case 3: this.leapIndicator = 'alarm'; break; + } + + // Version + + var vn = ((buffer[0] & 0x38) >> 3); + this.version = vn; + + // Mode + + var mode = (buffer[0] & 0x7); + switch (mode) { + case 1: this.mode = 'symmetric-active'; break; + case 2: this.mode = 'symmetric-passive'; break; + case 3: this.mode = 'client'; break; + case 4: this.mode = 'server'; break; + case 5: this.mode = 'broadcast'; break; + case 0: + case 6: + case 7: this.mode = 'reserved'; break; + } + + // Stratum + + var stratum = buffer[1]; + if (stratum === 0) { + this.stratum = 'death'; + } + else if (stratum === 1) { + this.stratum = 'primary'; + } + else if (stratum <= 15) { + this.stratum = 'secondary'; + } + else { + this.stratum = 'reserved'; + } + + // Poll interval (msec) + + this.pollInterval = Math.round(Math.pow(2, buffer[2])) * 1000; + + // Precision (msecs) + + this.precision = Math.pow(2, buffer[3]) * 1000; + + // Root delay (msecs) + + var rootDelay = 256 * (256 * (256 * buffer[4] + buffer[5]) + buffer[6]) + buffer[7]; + this.rootDelay = 1000 * (rootDelay / 0x10000); + + // Root dispersion (msecs) + + this.rootDispersion = ((buffer[8] << 8) + buffer[9] + ((buffer[10] << 8) + buffer[11]) / Math.pow(2, 16)) * 1000; + + // Reference identifier + + this.referenceId = ''; + switch (this.stratum) { + case 'death': + case 'primary': + this.referenceId = String.fromCharCode(buffer[12]) + String.fromCharCode(buffer[13]) + String.fromCharCode(buffer[14]) + String.fromCharCode(buffer[15]); + break; + case 'secondary': + this.referenceId = '' + buffer[12] + '.' + buffer[13] + '.' + buffer[14] + '.' + buffer[15]; + break; + } + + // Reference timestamp + + this.referenceTimestamp = internals.toMsecs(buffer, 16); + + // Originate timestamp + + this.originateTimestamp = internals.toMsecs(buffer, 24); + + // Receive timestamp + + this.receiveTimestamp = internals.toMsecs(buffer, 32); + + // Transmit timestamp + + this.transmitTimestamp = internals.toMsecs(buffer, 40); + + // Validate + + if (this.version === 4 && + this.stratum !== 'reserved' && + this.mode === 'server' && + this.originateTimestamp && + this.receiveTimestamp && + this.transmitTimestamp) { + + this.isValid = true; + } + + return this; +}; + + +internals.toMsecs = function (buffer, offset) { + + var seconds = 0; + var fraction = 0; + + for (var i = 0; i < 4; ++i) { + seconds = (seconds * 256) + buffer[offset + i]; + } + + for (i = 4; i < 8; ++i) { + fraction = (fraction * 256) + buffer[offset + i]; + } + + return ((seconds - 2208988800 + (fraction / Math.pow(2, 32))) * 1000); +}; + + +internals.fromMsecs = function (ts, buffer, offset) { + + var seconds = Math.floor(ts / 1000) + 2208988800; + var fraction = Math.round((ts % 1000) / 1000 * Math.pow(2, 32)); + + buffer[offset + 0] = (seconds & 0xFF000000) >> 24; + buffer[offset + 1] = (seconds & 0x00FF0000) >> 16; + buffer[offset + 2] = (seconds & 0x0000FF00) >> 8; + buffer[offset + 3] = (seconds & 0x000000FF); + + buffer[offset + 4] = (fraction & 0xFF000000) >> 24; + buffer[offset + 5] = (fraction & 0x00FF0000) >> 16; + buffer[offset + 6] = (fraction & 0x0000FF00) >> 8; + buffer[offset + 7] = (fraction & 0x000000FF); +}; + + +// Offset singleton + +internals.last = { + offset: 0, + expires: 0, + host: '', + port: 0 +}; + + +exports.offset = function (options, callback) { + + if (arguments.length !== 2) { + callback = arguments[0]; + options = {}; + } + + var now = Date.now(); + var clockSyncRefresh = options.clockSyncRefresh || 24 * 60 * 60 * 1000; // Daily + + if (internals.last.offset && + internals.last.host === options.host && + internals.last.port === options.port && + now < internals.last.expires) { + + process.nextTick(function () { + + callback(null, internals.last.offset); + }); + + return; + } + + exports.time(options, function (err, time) { + + if (err) { + return callback(err, 0); + } + + internals.last = { + offset: Math.round(time.t), + expires: now + clockSyncRefresh, + host: options.host, + port: options.port + }; + + return callback(null, internals.last.offset); + }); +}; + + +// Now singleton + +internals.now = { + intervalId: 0 +}; + + +exports.start = function (options, callback) { + + if (arguments.length !== 2) { + callback = arguments[0]; + options = {}; + } + + if (internals.now.intervalId) { + process.nextTick(function () { + + callback(); + }); + + return; + } + + exports.offset(options, function (err, offset) { + + internals.now.intervalId = setInterval(function () { + + exports.offset(options, function () { }); + }, options.clockSyncRefresh || 24 * 60 * 60 * 1000); // Daily + + return callback(); + }); +}; + + +exports.stop = function () { + + if (!internals.now.intervalId) { + return; + } + + clearInterval(internals.now.intervalId); + internals.now.intervalId = 0; +}; + + +exports.isLive = function () { + + return !!internals.now.intervalId; +}; + + +exports.now = function () { + + var now = Date.now(); + if (!exports.isLive() || + now >= internals.last.expires) { + + return now; + } + + return now + internals.last.offset; +}; + + +internals.ignore = function () { + +}; diff --git a/node_modules/fsevents/node_modules/sntp/package.json b/node_modules/fsevents/node_modules/sntp/package.json new file mode 100644 index 0000000..11ec7bc --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/package.json @@ -0,0 +1,90 @@ +{ + "_args": [ + [ + "sntp@1.x.x", + "/Users/eshanker/Code/fsevents/node_modules/hawk" + ] + ], + "_from": "sntp@>=1.0.0 <2.0.0", + "_id": "sntp@1.0.9", + "_inCache": true, + "_installable": true, + "_location": "/sntp", + "_npmUser": { + "email": "eran@hueniverse.com", + "name": "hueniverse" + }, + "_npmVersion": "1.4.23", + "_phantomChildren": {}, + "_requested": { + "name": "sntp", + "raw": "sntp@1.x.x", + "rawSpec": "1.x.x", + "scope": null, + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/hawk" + ], + "_resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz", + "_shasum": "6541184cc90aeea6c6e7b35e2659082443c66198", + "_shrinkwrap": null, + "_spec": "sntp@1.x.x", + "_where": "/Users/eshanker/Code/fsevents/node_modules/hawk", + "author": { + "email": "eran@hammer.io", + "name": "Eran Hammer", + "url": "http://hueniverse.com" + }, + "bugs": { + "url": "https://github.com/hueniverse/sntp/issues" + }, + "contributors": [], + "dependencies": { + "hoek": "2.x.x" + }, + "description": "SNTP Client", + "devDependencies": { + "lab": "4.x.x" + }, + "directories": {}, + "dist": { + "shasum": "6541184cc90aeea6c6e7b35e2659082443c66198", + "tarball": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz" + }, + "engines": { + "node": ">=0.8.0" + }, + "gitHead": "ee2e35284f684609990681734d39010cd356d7da", + "homepage": "https://github.com/hueniverse/sntp", + "keywords": [ + "ntp", + "sntp", + "time" + ], + "licenses": [ + { + "type": "BSD", + "url": "http://github.com/hueniverse/sntp/raw/master/LICENSE" + } + ], + "main": "index", + "maintainers": [ + { + "name": "hueniverse", + "email": "eran@hueniverse.com" + } + ], + "name": "sntp", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/hueniverse/sntp.git" + }, + "scripts": { + "test": "make test-cov" + }, + "version": "1.0.9" +} diff --git a/node_modules/fsevents/node_modules/sntp/test/index.js b/node_modules/fsevents/node_modules/sntp/test/index.js new file mode 100755 index 0000000..f1d1cda --- /dev/null +++ b/node_modules/fsevents/node_modules/sntp/test/index.js @@ -0,0 +1,435 @@ +// Load modules + +var Dns = require('dns'); +var Dgram = require('dgram'); +var Lab = require('lab'); +var Sntp = require('../lib'); + + +// Declare internals + +var internals = {}; + + +// Test shortcuts + +var lab = exports.lab = Lab.script(); +var before = lab.before; +var after = lab.after; +var describe = lab.experiment; +var it = lab.test; +var expect = Lab.expect; + + +describe('SNTP', function () { + + describe('#time', function () { + + it('returns consistent result over multiple tries', function (done) { + + Sntp.time(function (err, time) { + + expect(err).to.not.exist; + expect(time).to.exist; + var t1 = time.t; + + Sntp.time(function (err, time) { + + expect(err).to.not.exist; + expect(time).to.exist; + var t2 = time.t; + expect(Math.abs(t1 - t2)).is.below(200); + done(); + }); + }); + }); + + it('resolves reference IP', function (done) { + + Sntp.time({ host: 'ntp.exnet.com', resolveReference: true }, function (err, time) { + + expect(err).to.not.exist; + expect(time).to.exist; + expect(time.referenceHost).to.exist; + done(); + }); + }); + + it('times out on no response', function (done) { + + Sntp.time({ port: 124, timeout: 100 }, function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.equal('Timeout'); + done(); + }); + }); + + it('errors on error event', { parallel: false }, function (done) { + + var orig = Dgram.createSocket; + Dgram.createSocket = function (type) { + + Dgram.createSocket = orig; + var socket = Dgram.createSocket(type); + setImmediate(function () { socket.emit('error', new Error('Fake')) }); + return socket; + }; + + Sntp.time(function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.equal('Fake'); + done(); + }); + }); + + it('errors on incorrect sent size', { parallel: false }, function (done) { + + var orig = Dgram.Socket.prototype.send; + Dgram.Socket.prototype.send = function (buf, offset, length, port, address, callback) { + + Dgram.Socket.prototype.send = orig; + return callback(null, 40); + }; + + Sntp.time(function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.equal('Could not send entire message'); + done(); + }); + }); + + it('times out on invalid host', function (done) { + + Sntp.time({ host: 'error', timeout: 10000 }, function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.contain('getaddrinfo'); + done(); + }); + }); + + it('fails on bad response buffer size', function (done) { + + var server = Dgram.createSocket('udp4'); + server.on('message', function (message, remote) { + var message = new Buffer(10); + server.send(message, 0, message.length, remote.port, remote.address, function (err, bytes) { + + server.close(); + }); + }); + + server.bind(49123); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + var messup = function (bytes) { + + var server = Dgram.createSocket('udp4'); + server.on('message', function (message, remote) { + + var message = new Buffer([ + 0x24, 0x01, 0x00, 0xe3, + 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + 0x41, 0x43, 0x54, 0x53, + 0xd4, 0xa8, 0x2d, 0xc7, + 0x1c, 0x5d, 0x49, 0x1b, + 0xd4, 0xa8, 0x2d, 0xe6, + 0x67, 0xef, 0x9d, 0xb2, + 0xd4, 0xa8, 0x2d, 0xe6, + 0x71, 0xed, 0xb5, 0xfb, + 0xd4, 0xa8, 0x2d, 0xe6, + 0x71, 0xee, 0x6c, 0xc5 + ]); + + for (var i = 0, il = bytes.length; i < il; ++i) { + message[bytes[i][0]] = bytes[i][1]; + } + + server.send(message, 0, message.length, remote.port, remote.address, function (err, bytes) { + + server.close(); + }); + }); + + server.bind(49123); + }; + + it('fails on bad version', function (done) { + + messup([[0, (0 << 6) + (3 << 3) + (4 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.version).to.equal(3); + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + it('fails on bad originateTimestamp', function (done) { + + messup([[24, 0x83], [25, 0xaa], [26, 0x7e], [27, 0x80], [28, 0], [29, 0], [30, 0], [31, 0]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + it('fails on bad receiveTimestamp', function (done) { + + messup([[32, 0x83], [33, 0xaa], [34, 0x7e], [35, 0x80], [36, 0], [37, 0], [38, 0], [39, 0]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + it('fails on bad originate timestamp and alarm li', function (done) { + + messup([[0, (3 << 6) + (4 << 3) + (4 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Wrong originate timestamp'); + expect(time.leapIndicator).to.equal('alarm'); + done(); + }); + }); + + it('returns time with death stratum and last61 li', function (done) { + + messup([[0, (1 << 6) + (4 << 3) + (4 << 0)], [1, 0]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(time.stratum).to.equal('death'); + expect(time.leapIndicator).to.equal('last-minute-61'); + done(); + }); + }); + + it('returns time with reserved stratum and last59 li', function (done) { + + messup([[0, (2 << 6) + (4 << 3) + (4 << 0)], [1, 0x1f]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(time.stratum).to.equal('reserved'); + expect(time.leapIndicator).to.equal('last-minute-59'); + done(); + }); + }); + + it('fails on bad mode (symmetric-active)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (1 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('symmetric-active'); + done(); + }); + }); + + it('fails on bad mode (symmetric-passive)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (2 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('symmetric-passive'); + done(); + }); + }); + + it('fails on bad mode (client)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (3 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('client'); + done(); + }); + }); + + it('fails on bad mode (broadcast)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (5 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('broadcast'); + done(); + }); + }); + + it('fails on bad mode (reserved)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (6 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('reserved'); + done(); + }); + }); + }); + + describe('#offset', function () { + + it('gets the current offset', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + done(); + }); + }); + + it('gets the current offset from cache', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + var offset1 = offset; + Sntp.offset({}, function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.equal(offset1); + done(); + }); + }); + }); + + it('gets the new offset on different server', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + var offset1 = offset; + Sntp.offset({ host: 'nist1-sj.ustiming.org' }, function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(offset1); + done(); + }); + }); + }); + + it('gets the new offset on different server', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + var offset1 = offset; + Sntp.offset({ port: 123 }, function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(offset1); + done(); + }); + }); + }); + + it('fails getting the current offset on invalid server', function (done) { + + Sntp.offset({ host: 'error' }, function (err, offset) { + + expect(err).to.exist; + expect(offset).to.equal(0); + done(); + }); + }); + }); + + describe('#now', function () { + + it('starts auto-sync, gets now, then stops', function (done) { + + Sntp.stop(); + + var before = Sntp.now(); + expect(before).to.equal(Date.now()); + + Sntp.start(function () { + + var now = Sntp.now(); + expect(now).to.not.equal(Date.now()); + Sntp.stop(); + + done(); + }); + }); + + it('starts twice', function (done) { + + Sntp.start(function () { + + Sntp.start(function () { + + var now = Sntp.now(); + expect(now).to.not.equal(Date.now()); + Sntp.stop(); + + done(); + }); + }); + }); + + it('starts auto-sync, gets now, waits, gets again after timeout', function (done) { + + Sntp.stop(); + + var before = Sntp.now(); + expect(before).to.equal(Date.now()); + + Sntp.start({ clockSyncRefresh: 100 }, function () { + + var now = Sntp.now(); + expect(now).to.not.equal(Date.now()); + expect(now).to.equal(Sntp.now()); + + setTimeout(function () { + + expect(Sntp.now()).to.not.equal(now); + Sntp.stop(); + done(); + }, 110); + }); + }); + }); +}); + diff --git a/node_modules/fsevents/node_modules/sshpk/.npmignore b/node_modules/fsevents/node_modules/sshpk/.npmignore new file mode 100644 index 0000000..8000b59 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/.npmignore @@ -0,0 +1,9 @@ +.gitmodules +deps +docs +Makefile +node_modules +test +tools +coverage +man/src diff --git a/node_modules/fsevents/node_modules/sshpk/.travis.yml b/node_modules/fsevents/node_modules/sshpk/.travis.yml new file mode 100644 index 0000000..c3394c2 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/.travis.yml @@ -0,0 +1,11 @@ +language: node_js +node_js: + - "5.10" + - "4.4" + - "4.1" + - "0.12" + - "0.10" +before_install: + - "make check" +after_success: + - '[ "${TRAVIS_NODE_VERSION}" = "4.4" ] && make codecovio' diff --git a/node_modules/fsevents/node_modules/sshpk/LICENSE b/node_modules/fsevents/node_modules/sshpk/LICENSE new file mode 100644 index 0000000..f6d947d --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/sshpk/README.md b/node_modules/fsevents/node_modules/sshpk/README.md new file mode 100644 index 0000000..d0fa88d --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/README.md @@ -0,0 +1,455 @@ +sshpk +========= + +Parse, convert, fingerprint and use SSH keys (both public and private) in pure +node -- no `ssh-keygen` or other external dependencies. + +Supports RSA, DSA, ECDSA (nistp-\*) and ED25519 key types, in PEM (PKCS#1, +PKCS#8) and OpenSSH formats. + +This library has been extracted from +[`node-http-signature`](https://github.com/joyent/node-http-signature) +(work by [Mark Cavage](https://github.com/mcavage) and +[Dave Eddy](https://github.com/bahamas10)) and +[`node-ssh-fingerprint`](https://github.com/bahamas10/node-ssh-fingerprint) +(work by Dave Eddy), with additions (including ECDSA support) by +[Alex Wilson](https://github.com/arekinath). + +Install +------- + +``` +npm install sshpk +``` + +Examples +-------- + +```js +var sshpk = require('sshpk'); + +var fs = require('fs'); + +/* Read in an OpenSSH-format public key */ +var keyPub = fs.readFileSync('id_rsa.pub'); +var key = sshpk.parseKey(keyPub, 'ssh'); + +/* Get metadata about the key */ +console.log('type => %s', key.type); +console.log('size => %d bits', key.size); +console.log('comment => %s', key.comment); + +/* Compute key fingerprints, in new OpenSSH (>6.7) format, and old MD5 */ +console.log('fingerprint => %s', key.fingerprint().toString()); +console.log('old-style fingerprint => %s', key.fingerprint('md5').toString()); +``` + +Example output: + +``` +type => rsa +size => 2048 bits +comment => foo@foo.com +fingerprint => SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w +old-style fingerprint => a0:c8:ad:6c:32:9a:32:fa:59:cc:a9:8c:0a:0d:6e:bd +``` + +More examples: converting between formats: + +```js +/* Read in a PEM public key */ +var keyPem = fs.readFileSync('id_rsa.pem'); +var key = sshpk.parseKey(keyPem, 'pem'); + +/* Convert to PEM PKCS#8 public key format */ +var pemBuf = key.toBuffer('pkcs8'); + +/* Convert to SSH public key format (and return as a string) */ +var sshKey = key.toString('ssh'); +``` + +Signing and verifying: + +```js +/* Read in an OpenSSH/PEM *private* key */ +var keyPriv = fs.readFileSync('id_ecdsa'); +var key = sshpk.parsePrivateKey(keyPriv, 'pem'); + +var data = 'some data'; + +/* Sign some data with the key */ +var s = key.createSign('sha1'); +s.update(data); +var signature = s.sign(); + +/* Now load the public key (could also use just key.toPublic()) */ +var keyPub = fs.readFileSync('id_ecdsa.pub'); +key = sshpk.parseKey(keyPub, 'ssh'); + +/* Make a crypto.Verifier with this key */ +var v = key.createVerify('sha1'); +v.update(data); +var valid = v.verify(signature); +/* => true! */ +``` + +Matching fingerprints with keys: + +```js +var fp = sshpk.parseFingerprint('SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w'); + +var keys = [sshpk.parseKey(...), sshpk.parseKey(...), ...]; + +keys.forEach(function (key) { + if (fp.matches(key)) + console.log('found it!'); +}); +``` + +Usage +----- + +## Public keys + +### `parseKey(data[, format = 'auto'[, options]])` + +Parses a key from a given data format and returns a new `Key` object. + +Parameters + +- `data` -- Either a Buffer or String, containing the key +- `format` -- String name of format to use, valid options are: + - `auto`: choose automatically from all below + - `pem`: supports both PKCS#1 and PKCS#8 + - `ssh`: standard OpenSSH format, + - `pkcs1`, `pkcs8`: variants of `pem` + - `rfc4253`: raw OpenSSH wire format + - `openssh`: new post-OpenSSH 6.5 internal format, produced by + `ssh-keygen -o` +- `options` -- Optional Object, extra options, with keys: + - `filename` -- Optional String, name for the key being parsed + (eg. the filename that was opened). Used to generate + Error messages + - `passphrase` -- Optional String, encryption passphrase used to decrypt an + encrypted PEM file + +### `Key.isKey(obj)` + +Returns `true` if the given object is a valid `Key` object created by a version +of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `Key#type` + +String, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`. + +### `Key#size` + +Integer, "size" of the key in bits. For RSA/DSA this is the size of the modulus; +for ECDSA this is the bit size of the curve in use. + +### `Key#comment` + +Optional string, a key comment used by some formats (eg the `ssh` format). + +### `Key#curve` + +Only present if `this.type === 'ecdsa'`, string containing the name of the +named curve used with this key. Possible values include `nistp256`, `nistp384` +and `nistp521`. + +### `Key#toBuffer([format = 'ssh'])` + +Convert the key into a given data format and return the serialized key as +a Buffer. + +Parameters + +- `format` -- String name of format to use, for valid options see `parseKey()` + +### `Key#toString([format = 'ssh])` + +Same as `this.toBuffer(format).toString()`. + +### `Key#fingerprint([algorithm = 'sha256'])` + +Creates a new `Fingerprint` object representing this Key's fingerprint. + +Parameters + +- `algorithm` -- String name of hash algorithm to use, valid options are `md5`, + `sha1`, `sha256`, `sha384`, `sha512` + +### `Key#createVerify([hashAlgorithm])` + +Creates a `crypto.Verifier` specialized to use this Key (and the correct public +key algorithm to match it). The returned Verifier has the same API as a regular +one, except that the `verify()` function takes only the target signature as an +argument. + +Parameters + +- `hashAlgorithm` -- optional String name of hash algorithm to use, any + supported by OpenSSL are valid, usually including + `sha1`, `sha256`. + +`v.verify(signature[, format])` Parameters + +- `signature` -- either a Signature object, or a Buffer or String +- `format` -- optional String, name of format to interpret given String with. + Not valid if `signature` is a Signature or Buffer. + +### `Key#createDiffieHellman()` +### `Key#createDH()` + +Creates a Diffie-Hellman key exchange object initialized with this key and all +necessary parameters. This has the same API as a `crypto.DiffieHellman` +instance, except that functions take `Key` and `PrivateKey` objects as +arguments, and return them where indicated for. + +This is only valid for keys belonging to a cryptosystem that supports DHE +or a close analogue (i.e. `dsa`, `ecdsa` and `curve25519` keys). An attempt +to call this function on other keys will yield an `Error`. + +## Private keys + +### `parsePrivateKey(data[, format = 'auto'[, options]])` + +Parses a private key from a given data format and returns a new +`PrivateKey` object. + +Parameters + +- `data` -- Either a Buffer or String, containing the key +- `format` -- String name of format to use, valid options are: + - `auto`: choose automatically from all below + - `pem`: supports both PKCS#1 and PKCS#8 + - `ssh`, `openssh`: new post-OpenSSH 6.5 internal format, produced by + `ssh-keygen -o` + - `pkcs1`, `pkcs8`: variants of `pem` + - `rfc4253`: raw OpenSSH wire format +- `options` -- Optional Object, extra options, with keys: + - `filename` -- Optional String, name for the key being parsed + (eg. the filename that was opened). Used to generate + Error messages + - `passphrase` -- Optional String, encryption passphrase used to decrypt an + encrypted PEM file + +### `PrivateKey.isPrivateKey(obj)` + +Returns `true` if the given object is a valid `PrivateKey` object created by a +version of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `PrivateKey#type` + +String, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`. + +### `PrivateKey#size` + +Integer, "size" of the key in bits. For RSA/DSA this is the size of the modulus; +for ECDSA this is the bit size of the curve in use. + +### `PrivateKey#curve` + +Only present if `this.type === 'ecdsa'`, string containing the name of the +named curve used with this key. Possible values include `nistp256`, `nistp384` +and `nistp521`. + +### `PrivateKey#toBuffer([format = 'pkcs1'])` + +Convert the key into a given data format and return the serialized key as +a Buffer. + +Parameters + +- `format` -- String name of format to use, valid options are listed under + `parsePrivateKey`. Note that ED25519 keys default to `openssh` + format instead (as they have no `pkcs1` representation). + +### `PrivateKey#toString([format = 'pkcs1'])` + +Same as `this.toBuffer(format).toString()`. + +### `PrivateKey#toPublic()` + +Extract just the public part of this private key, and return it as a `Key` +object. + +### `PrivateKey#fingerprint([algorithm = 'sha256'])` + +Same as `this.toPublic().fingerprint()`. + +### `PrivateKey#createVerify([hashAlgorithm])` + +Same as `this.toPublic().createVerify()`. + +### `PrivateKey#createSign([hashAlgorithm])` + +Creates a `crypto.Sign` specialized to use this PrivateKey (and the correct +key algorithm to match it). The returned Signer has the same API as a regular +one, except that the `sign()` function takes no arguments, and returns a +`Signature` object. + +Parameters + +- `hashAlgorithm` -- optional String name of hash algorithm to use, any + supported by OpenSSL are valid, usually including + `sha1`, `sha256`. + +`v.sign()` Parameters + +- none + +### `PrivateKey#derive(newType)` + +Derives a related key of type `newType` from this key. Currently this is +only supported to change between `ed25519` and `curve25519` keys which are +stored with the same private key (but usually distinct public keys in order +to avoid degenerate keys that lead to a weak Diffie-Hellman exchange). + +Parameters + +- `newType` -- String, type of key to derive, either `ed25519` or `curve25519` + +## Fingerprints + +### `parseFingerprint(fingerprint[, algorithms])` + +Pre-parses a fingerprint, creating a `Fingerprint` object that can be used to +quickly locate a key by using the `Fingerprint#matches` function. + +Parameters + +- `fingerprint` -- String, the fingerprint value, in any supported format +- `algorithms` -- Optional list of strings, names of hash algorithms to limit + support to. If `fingerprint` uses a hash algorithm not on + this list, throws `InvalidAlgorithmError`. + +### `Fingerprint.isFingerprint(obj)` + +Returns `true` if the given object is a valid `Fingerprint` object created by a +version of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `Fingerprint#toString([format])` + +Returns a fingerprint as a string, in the given format. + +Parameters + +- `format` -- Optional String, format to use, valid options are `hex` and + `base64`. If this `Fingerprint` uses the `md5` algorithm, the + default format is `hex`. Otherwise, the default is `base64`. + +### `Fingerprint#matches(key)` + +Verifies whether or not this `Fingerprint` matches a given `Key`. This function +uses double-hashing to avoid leaking timing information. Returns a boolean. + +Parameters + +- `key` -- a `Key` object, the key to match this fingerprint against + +## Signatures + +### `parseSignature(signature, algorithm, format)` + +Parses a signature in a given format, creating a `Signature` object. Useful +for converting between the SSH and ASN.1 (PKCS/OpenSSL) signature formats, and +also returned as output from `PrivateKey#createSign().sign()`. + +A Signature object can also be passed to a verifier produced by +`Key#createVerify()` and it will automatically be converted internally into the +correct format for verification. + +Parameters + +- `signature` -- a Buffer (binary) or String (base64), data of the actual + signature in the given format +- `algorithm` -- a String, name of the algorithm to be used, possible values + are `rsa`, `dsa`, `ecdsa` +- `format` -- a String, either `asn1` or `ssh` + +### `Signature.isSignature(obj)` + +Returns `true` if the given object is a valid `Signature` object created by a +version of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `Signature#toBuffer([format = 'asn1'])` + +Converts a Signature to the given format and returns it as a Buffer. + +Parameters + +- `format` -- a String, either `asn1` or `ssh` + +### `Signature#toString([format = 'asn1'])` + +Same as `this.toBuffer(format).toString('base64')`. + +Errors +------ + +### `InvalidAlgorithmError` + +The specified algorithm is not valid, either because it is not supported, or +because it was not included on a list of allowed algorithms. + +Thrown by `Fingerprint.parse`, `Key#fingerprint`. + +Properties + +- `algorithm` -- the algorithm that could not be validated + +### `FingerprintFormatError` + +The fingerprint string given could not be parsed as a supported fingerprint +format, or the specified fingerprint format is invalid. + +Thrown by `Fingerprint.parse`, `Fingerprint#toString`. + +Properties + +- `fingerprint` -- if caused by a fingerprint, the string value given +- `format` -- if caused by an invalid format specification, the string value given + +### `KeyParseError` + +The key data given could not be parsed as a valid key. + +Properties + +- `keyName` -- `filename` that was given to `Key#parse` +- `format` -- the `format` that was trying to parse the key +- `innerErr` -- the inner Error thrown by the format parser + +### `KeyEncryptedError` + +The key is encrypted with a symmetric key (ie, it is password protected). The +parsing operation would succeed if it was given the `passphrase` option. + +Properties + +- `keyName` -- `filename` that was given to `Key#parse` +- `format` -- the `format` that was trying to parse the key (currently can only + be `"pem"`) + +Friends of sshpk +---------------- + + * [`sshpk-agent`](https://github.com/arekinath/node-sshpk-agent) is a library + for speaking the `ssh-agent` protocol from node.js, which uses `sshpk` diff --git a/node_modules/fsevents/node_modules/sshpk/bin/sshpk-conv b/node_modules/fsevents/node_modules/sshpk/bin/sshpk-conv new file mode 100755 index 0000000..a1205a4 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/bin/sshpk-conv @@ -0,0 +1,195 @@ +#!/usr/bin/env node +// -*- mode: js -*- +// vim: set filetype=javascript : +// Copyright 2015 Joyent, Inc. All rights reserved. + +var dashdash = require('dashdash'); +var sshpk = require('../lib/index'); +var fs = require('fs'); +var path = require('path'); +var tty = require('tty'); +var readline = require('readline'); +var getPassword = require('getpass').getPass; + +var options = [ + { + names: ['outformat', 't'], + type: 'string', + help: 'Output format' + }, + { + names: ['informat', 'T'], + type: 'string', + help: 'Input format' + }, + { + names: ['file', 'f'], + type: 'string', + help: 'Input file name (default stdin)' + }, + { + names: ['out', 'o'], + type: 'string', + help: 'Output file name (default stdout)' + }, + { + names: ['private', 'p'], + type: 'bool', + help: 'Produce a private key as output' + }, + { + names: ['derive', 'd'], + type: 'string', + help: 'Output a new key derived from this one, with given algo' + }, + { + names: ['identify', 'i'], + type: 'bool', + help: 'Print key metadata instead of converting' + }, + { + names: ['comment', 'c'], + type: 'string', + help: 'Set key comment, if output format supports' + }, + { + names: ['help', 'h'], + type: 'bool', + help: 'Shows this help text' + } +]; + +if (require.main === module) { + var parser = dashdash.createParser({ + options: options + }); + + try { + var opts = parser.parse(process.argv); + } catch (e) { + console.error('sshpk-conv: error: %s', e.message); + process.exit(1); + } + + if (opts.help || opts._args.length > 1) { + var help = parser.help({}).trimRight(); + console.error('sshpk-conv: converts between SSH key formats\n'); + console.error(help); + console.error('\navailable formats:'); + console.error(' - pem, pkcs1 eg id_rsa'); + console.error(' - ssh eg id_rsa.pub'); + console.error(' - pkcs8 format you want for openssl'); + console.error(' - openssh like output of ssh-keygen -o'); + console.error(' - rfc4253 raw OpenSSH wire format'); + process.exit(1); + } + + /* + * Key derivation can only be done on private keys, so use of the -d + * option necessarily implies -p. + */ + if (opts.derive) + opts.private = true; + + var inFile = process.stdin; + var inFileName = 'stdin'; + + var inFilePath; + if (opts.file) { + inFilePath = opts.file; + } else if (opts._args.length === 1) { + inFilePath = opts._args[0]; + } + + if (inFilePath) + inFileName = path.basename(inFilePath); + + try { + if (inFilePath) { + fs.accessSync(inFilePath, fs.R_OK); + inFile = fs.createReadStream(inFilePath); + } + } catch (e) { + console.error('sshpk-conv: error opening input file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + var outFile = process.stdout; + + try { + if (opts.out && !opts.identify) { + fs.accessSync(path.dirname(opts.out), fs.W_OK); + outFile = fs.createWriteStream(opts.out); + } + } catch (e) { + console.error('sshpk-conv: error opening output file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + var bufs = []; + inFile.on('readable', function () { + var data; + while ((data = inFile.read())) + bufs.push(data); + }); + var parseOpts = {}; + parseOpts.filename = inFileName; + inFile.on('end', function processKey() { + var buf = Buffer.concat(bufs); + var fmt = 'auto'; + if (opts.informat) + fmt = opts.informat; + var f = sshpk.parseKey; + if (opts.private) + f = sshpk.parsePrivateKey; + try { + var key = f(buf, fmt, parseOpts); + } catch (e) { + if (e.name === 'KeyEncryptedError') { + getPassword(function (err, pw) { + parseOpts.passphrase = pw; + processKey(); + }); + return; + } + console.error('sshpk-conv: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + if (opts.derive) + key = key.derive(opts.derive); + + if (opts.comment) + key.comment = opts.comment; + + if (!opts.identify) { + fmt = undefined; + if (opts.outformat) + fmt = opts.outformat; + outFile.write(key.toBuffer(fmt)); + if (fmt === 'ssh' || + (!opts.private && fmt === undefined)) + outFile.write('\n'); + outFile.once('drain', function () { + process.exit(0); + }); + } else { + var kind = 'public'; + if (sshpk.PrivateKey.isPrivateKey(key)) + kind = 'private'; + console.log('%s: a %d bit %s %s key', inFileName, + key.size, key.type.toUpperCase(), kind); + if (key.type === 'ecdsa') + console.log('ECDSA curve: %s', key.curve); + if (key.comment) + console.log('Comment: %s', key.comment); + console.log('Fingerprint:'); + console.log(' ' + key.fingerprint().toString()); + console.log(' ' + key.fingerprint('md5').toString()); + process.exit(0); + } + }); +} diff --git a/node_modules/fsevents/node_modules/sshpk/bin/sshpk-sign b/node_modules/fsevents/node_modules/sshpk/bin/sshpk-sign new file mode 100755 index 0000000..673fc98 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/bin/sshpk-sign @@ -0,0 +1,191 @@ +#!/usr/bin/env node +// -*- mode: js -*- +// vim: set filetype=javascript : +// Copyright 2015 Joyent, Inc. All rights reserved. + +var dashdash = require('dashdash'); +var sshpk = require('../lib/index'); +var fs = require('fs'); +var path = require('path'); +var getPassword = require('getpass').getPass; + +var options = [ + { + names: ['hash', 'H'], + type: 'string', + help: 'Hash algorithm (sha1, sha256, sha384, sha512)' + }, + { + names: ['verbose', 'v'], + type: 'bool', + help: 'Display verbose info about key and hash used' + }, + { + names: ['identity', 'i'], + type: 'string', + help: 'Path to key to use' + }, + { + names: ['file', 'f'], + type: 'string', + help: 'Input filename' + }, + { + names: ['out', 'o'], + type: 'string', + help: 'Output filename' + }, + { + names: ['format', 't'], + type: 'string', + help: 'Signature format (asn1, ssh, raw)' + }, + { + names: ['binary', 'b'], + type: 'bool', + help: 'Output raw binary instead of base64' + }, + { + names: ['help', 'h'], + type: 'bool', + help: 'Shows this help text' + } +]; + +var parseOpts = {}; + +if (require.main === module) { + var parser = dashdash.createParser({ + options: options + }); + + try { + var opts = parser.parse(process.argv); + } catch (e) { + console.error('sshpk-sign: error: %s', e.message); + process.exit(1); + } + + if (opts.help || opts._args.length > 1) { + var help = parser.help({}).trimRight(); + console.error('sshpk-sign: sign data using an SSH key\n'); + console.error(help); + process.exit(1); + } + + if (!opts.identity) { + var help = parser.help({}).trimRight(); + console.error('sshpk-sign: the -i or --identity option ' + + 'is required\n'); + console.error(help); + process.exit(1); + } + + var keyData = fs.readFileSync(opts.identity); + parseOpts.filename = opts.identity; + + run(); +} + +function run() { + var key; + try { + key = sshpk.parsePrivateKey(keyData, 'auto', parseOpts); + } catch (e) { + if (e.name === 'KeyEncryptedError') { + getPassword(function (err, pw) { + parseOpts.passphrase = pw; + run(); + }); + return; + } + console.error('sshpk-sign: error loading private key "' + + opts.identity + '": ' + e.name + ': ' + e.message); + process.exit(1); + } + + var hash = opts.hash || key.defaultHashAlgorithm(); + + var signer; + try { + signer = key.createSign(hash); + } catch (e) { + console.error('sshpk-sign: error creating signer: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + if (opts.verbose) { + console.error('sshpk-sign: using %s-%s with a %d bit key', + key.type, hash, key.size); + } + + var inFile = process.stdin; + var inFileName = 'stdin'; + + var inFilePath; + if (opts.file) { + inFilePath = opts.file; + } else if (opts._args.length === 1) { + inFilePath = opts._args[0]; + } + + if (inFilePath) + inFileName = path.basename(inFilePath); + + try { + if (inFilePath) { + fs.accessSync(inFilePath, fs.R_OK); + inFile = fs.createReadStream(inFilePath); + } + } catch (e) { + console.error('sshpk-sign: error opening input file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + var outFile = process.stdout; + + try { + if (opts.out && !opts.identify) { + fs.accessSync(path.dirname(opts.out), fs.W_OK); + outFile = fs.createWriteStream(opts.out); + } + } catch (e) { + console.error('sshpk-sign: error opening output file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + inFile.pipe(signer); + inFile.on('end', function () { + var sig; + try { + sig = signer.sign(); + } catch (e) { + console.error('sshpk-sign: error signing data: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + var fmt = opts.format || 'asn1'; + var output; + try { + output = sig.toBuffer(fmt); + if (!opts.binary) + output = output.toString('base64'); + } catch (e) { + console.error('sshpk-sign: error converting signature' + + ' to ' + fmt + ' format: ' + e.name + ': ' + + e.message); + process.exit(1); + } + + outFile.write(output); + if (!opts.binary) + outFile.write('\n'); + outFile.once('drain', function () { + process.exit(0); + }); + }); +} diff --git a/node_modules/fsevents/node_modules/sshpk/bin/sshpk-verify b/node_modules/fsevents/node_modules/sshpk/bin/sshpk-verify new file mode 100755 index 0000000..a1669f4 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/bin/sshpk-verify @@ -0,0 +1,166 @@ +#!/usr/bin/env node +// -*- mode: js -*- +// vim: set filetype=javascript : +// Copyright 2015 Joyent, Inc. All rights reserved. + +var dashdash = require('dashdash'); +var sshpk = require('../lib/index'); +var fs = require('fs'); +var path = require('path'); + +var options = [ + { + names: ['hash', 'H'], + type: 'string', + help: 'Hash algorithm (sha1, sha256, sha384, sha512)' + }, + { + names: ['verbose', 'v'], + type: 'bool', + help: 'Display verbose info about key and hash used' + }, + { + names: ['identity', 'i'], + type: 'string', + help: 'Path to (public) key to use' + }, + { + names: ['file', 'f'], + type: 'string', + help: 'Input filename' + }, + { + names: ['format', 't'], + type: 'string', + help: 'Signature format (asn1, ssh, raw)' + }, + { + names: ['signature', 's'], + type: 'string', + help: 'base64-encoded signature data' + }, + { + names: ['help', 'h'], + type: 'bool', + help: 'Shows this help text' + } +]; + +if (require.main === module) { + var parser = dashdash.createParser({ + options: options + }); + + try { + var opts = parser.parse(process.argv); + } catch (e) { + console.error('sshpk-verify: error: %s', e.message); + process.exit(3); + } + + if (opts.help || opts._args.length > 1) { + var help = parser.help({}).trimRight(); + console.error('sshpk-verify: sign data using an SSH key\n'); + console.error(help); + process.exit(3); + } + + if (!opts.identity) { + var help = parser.help({}).trimRight(); + console.error('sshpk-verify: the -i or --identity option ' + + 'is required\n'); + console.error(help); + process.exit(3); + } + + if (!opts.signature) { + var help = parser.help({}).trimRight(); + console.error('sshpk-verify: the -s or --signature option ' + + 'is required\n'); + console.error(help); + process.exit(3); + } + + var keyData = fs.readFileSync(opts.identity); + + var key; + try { + key = sshpk.parseKey(keyData); + } catch (e) { + console.error('sshpk-verify: error loading key "' + + opts.identity + '": ' + e.name + ': ' + e.message); + process.exit(2); + } + + var fmt = opts.format || 'asn1'; + var sigData = new Buffer(opts.signature, 'base64'); + + var sig; + try { + sig = sshpk.parseSignature(sigData, key.type, fmt); + } catch (e) { + console.error('sshpk-verify: error parsing signature: ' + + e.name + ': ' + e.message); + process.exit(2); + } + + var hash = opts.hash || key.defaultHashAlgorithm(); + + var verifier; + try { + verifier = key.createVerify(hash); + } catch (e) { + console.error('sshpk-verify: error creating verifier: ' + + e.name + ': ' + e.message); + process.exit(2); + } + + if (opts.verbose) { + console.error('sshpk-verify: using %s-%s with a %d bit key', + key.type, hash, key.size); + } + + var inFile = process.stdin; + var inFileName = 'stdin'; + + var inFilePath; + if (opts.file) { + inFilePath = opts.file; + } else if (opts._args.length === 1) { + inFilePath = opts._args[0]; + } + + if (inFilePath) + inFileName = path.basename(inFilePath); + + try { + if (inFilePath) { + fs.accessSync(inFilePath, fs.R_OK); + inFile = fs.createReadStream(inFilePath); + } + } catch (e) { + console.error('sshpk-verify: error opening input file' + + ': ' + e.name + ': ' + e.message); + process.exit(2); + } + + inFile.pipe(verifier); + inFile.on('end', function () { + var ret; + try { + ret = verifier.verify(sig); + } catch (e) { + console.error('sshpk-verify: error verifying data: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + if (ret) { + console.error('OK'); + process.exit(0); + } + + console.error('NOT OK'); + process.exit(1); + }); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/algs.js b/node_modules/fsevents/node_modules/sshpk/lib/algs.js new file mode 100644 index 0000000..f30af56 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/algs.js @@ -0,0 +1,168 @@ +// Copyright 2015 Joyent, Inc. + +var algInfo = { + 'dsa': { + parts: ['p', 'q', 'g', 'y'], + sizePart: 'p' + }, + 'rsa': { + parts: ['e', 'n'], + sizePart: 'n' + }, + 'ecdsa': { + parts: ['curve', 'Q'], + sizePart: 'Q' + }, + 'ed25519': { + parts: ['R'], + normalize: false, + sizePart: 'R' + } +}; +algInfo['curve25519'] = algInfo['ed25519']; + +var algPrivInfo = { + 'dsa': { + parts: ['p', 'q', 'g', 'y', 'x'] + }, + 'rsa': { + parts: ['n', 'e', 'd', 'iqmp', 'p', 'q'] + }, + 'ecdsa': { + parts: ['curve', 'Q', 'd'] + }, + 'ed25519': { + parts: ['R', 'r'], + normalize: false + } +}; +algPrivInfo['curve25519'] = algPrivInfo['ed25519']; + +var hashAlgs = { + 'md5': true, + 'sha1': true, + 'sha256': true, + 'sha384': true, + 'sha512': true +}; + +/* + * Taken from + * http://csrc.nist.gov/groups/ST/toolkit/documents/dss/NISTReCur.pdf + */ +var curves = { + 'nistp256': { + size: 256, + pkcs8oid: '1.2.840.10045.3.1.7', + p: new Buffer(('00' + + 'ffffffff 00000001 00000000 00000000' + + '00000000 ffffffff ffffffff ffffffff'). + replace(/ /g, ''), 'hex'), + a: new Buffer(('00' + + 'FFFFFFFF 00000001 00000000 00000000' + + '00000000 FFFFFFFF FFFFFFFF FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: new Buffer(( + '5ac635d8 aa3a93e7 b3ebbd55 769886bc' + + '651d06b0 cc53b0f6 3bce3c3e 27d2604b'). + replace(/ /g, ''), 'hex'), + s: new Buffer(('00' + + 'c49d3608 86e70493 6a6678e1 139d26b7' + + '819f7e90'). + replace(/ /g, ''), 'hex'), + n: new Buffer(('00' + + 'ffffffff 00000000 ffffffff ffffffff' + + 'bce6faad a7179e84 f3b9cac2 fc632551'). + replace(/ /g, ''), 'hex'), + G: new Buffer(('04' + + '6b17d1f2 e12c4247 f8bce6e5 63a440f2' + + '77037d81 2deb33a0 f4a13945 d898c296' + + '4fe342e2 fe1a7f9b 8ee7eb4a 7c0f9e16' + + '2bce3357 6b315ece cbb64068 37bf51f5'). + replace(/ /g, ''), 'hex') + }, + 'nistp384': { + size: 384, + pkcs8oid: '1.3.132.0.34', + p: new Buffer(('00' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff fffffffe' + + 'ffffffff 00000000 00000000 ffffffff'). + replace(/ /g, ''), 'hex'), + a: new Buffer(('00' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFE' + + 'FFFFFFFF 00000000 00000000 FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: new Buffer(( + 'b3312fa7 e23ee7e4 988e056b e3f82d19' + + '181d9c6e fe814112 0314088f 5013875a' + + 'c656398d 8a2ed19d 2a85c8ed d3ec2aef'). + replace(/ /g, ''), 'hex'), + s: new Buffer(('00' + + 'a335926a a319a27a 1d00896a 6773a482' + + '7acdac73'). + replace(/ /g, ''), 'hex'), + n: new Buffer(('00' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff c7634d81 f4372ddf' + + '581a0db2 48b0a77a ecec196a ccc52973'). + replace(/ /g, ''), 'hex'), + G: new Buffer(('04' + + 'aa87ca22 be8b0537 8eb1c71e f320ad74' + + '6e1d3b62 8ba79b98 59f741e0 82542a38' + + '5502f25d bf55296c 3a545e38 72760ab7' + + '3617de4a 96262c6f 5d9e98bf 9292dc29' + + 'f8f41dbd 289a147c e9da3113 b5f0b8c0' + + '0a60b1ce 1d7e819d 7a431d7c 90ea0e5f'). + replace(/ /g, ''), 'hex') + }, + 'nistp521': { + size: 521, + pkcs8oid: '1.3.132.0.35', + p: new Buffer(( + '01ffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffff').replace(/ /g, ''), 'hex'), + a: new Buffer(('01FF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: new Buffer(('51' + + '953eb961 8e1c9a1f 929a21a0 b68540ee' + + 'a2da725b 99b315f3 b8b48991 8ef109e1' + + '56193951 ec7e937b 1652c0bd 3bb1bf07' + + '3573df88 3d2c34f1 ef451fd4 6b503f00'). + replace(/ /g, ''), 'hex'), + s: new Buffer(('00' + + 'd09e8800 291cb853 96cc6717 393284aa' + + 'a0da64ba').replace(/ /g, ''), 'hex'), + n: new Buffer(('01ff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff fffffffa' + + '51868783 bf2f966b 7fcc0148 f709a5d0' + + '3bb5c9b8 899c47ae bb6fb71e 91386409'). + replace(/ /g, ''), 'hex'), + G: new Buffer(('04' + + '00c6 858e06b7 0404e9cd 9e3ecb66 2395b442' + + '9c648139 053fb521 f828af60 6b4d3dba' + + 'a14b5e77 efe75928 fe1dc127 a2ffa8de' + + '3348b3c1 856a429b f97e7e31 c2e5bd66' + + '0118 39296a78 9a3bc004 5c8a5fb4 2c7d1bd9' + + '98f54449 579b4468 17afbd17 273e662c' + + '97ee7299 5ef42640 c550b901 3fad0761' + + '353c7086 a272c240 88be9476 9fd16650'). + replace(/ /g, ''), 'hex') + } +}; + +module.exports = { + info: algInfo, + privInfo: algPrivInfo, + hashAlgs: hashAlgs, + curves: curves +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/dhe.js b/node_modules/fsevents/node_modules/sshpk/lib/dhe.js new file mode 100644 index 0000000..8f9548c --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/dhe.js @@ -0,0 +1,311 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = DiffieHellman; + +var assert = require('assert-plus'); +var crypto = require('crypto'); +var algs = require('./algs'); +var utils = require('./utils'); +var ed; + +var Key = require('./key'); +var PrivateKey = require('./private-key'); + +var CRYPTO_HAVE_ECDH = (crypto.createECDH !== undefined); + +var ecdh, ec, jsbn; + +function DiffieHellman(key) { + utils.assertCompatible(key, Key, [1, 4], 'key'); + this._isPriv = PrivateKey.isPrivateKey(key, [1, 3]); + this._algo = key.type; + this._curve = key.curve; + this._key = key; + if (key.type === 'dsa') { + if (!CRYPTO_HAVE_ECDH) { + throw (new Error('Due to bugs in the node 0.10 ' + + 'crypto API, node 0.12.x or later is required ' + + 'to use DH')); + } + this._dh = crypto.createDiffieHellman( + key.part.p.data, undefined, + key.part.g.data, undefined); + this._p = key.part.p; + this._g = key.part.g; + if (this._isPriv) + this._dh.setPrivateKey(key.part.x.data); + this._dh.setPublicKey(key.part.y.data); + + } else if (key.type === 'ecdsa') { + if (!CRYPTO_HAVE_ECDH) { + if (ecdh === undefined) + ecdh = require('ecc-jsbn'); + if (ec === undefined) + ec = require('ecc-jsbn/lib/ec'); + if (jsbn === undefined) + jsbn = require('jsbn').BigInteger; + + this._ecParams = new X9ECParameters(this._curve); + + if (this._isPriv) { + this._priv = new ECPrivate( + this._ecParams, key.part.d.data); + } + return; + } + + var curve = { + 'nistp256': 'prime256v1', + 'nistp384': 'secp384r1', + 'nistp521': 'secp521r1' + }[key.curve]; + this._dh = crypto.createECDH(curve); + if (typeof (this._dh) !== 'object' || + typeof (this._dh.setPrivateKey) !== 'function') { + CRYPTO_HAVE_ECDH = false; + DiffieHellman.call(this, key); + return; + } + if (this._isPriv) + this._dh.setPrivateKey(key.part.d.data); + this._dh.setPublicKey(key.part.Q.data); + + } else if (key.type === 'curve25519') { + if (ed === undefined) + ed = require('jodid25519'); + + if (this._isPriv) { + this._priv = key.part.r.data; + if (this._priv[0] === 0x00) + this._priv = this._priv.slice(1); + this._priv = this._priv.slice(0, 32); + } + + } else { + throw (new Error('DH not supported for ' + key.type + ' keys')); + } +} + +DiffieHellman.prototype.getPublicKey = function () { + if (this._isPriv) + return (this._key.toPublic()); + return (this._key); +}; + +DiffieHellman.prototype.getPrivateKey = function () { + if (this._isPriv) + return (this._key); + else + return (undefined); +}; +DiffieHellman.prototype.getKey = DiffieHellman.prototype.getPrivateKey; + +DiffieHellman.prototype._keyCheck = function (pk, isPub) { + assert.object(pk, 'key'); + if (!isPub) + utils.assertCompatible(pk, PrivateKey, [1, 3], 'key'); + utils.assertCompatible(pk, Key, [1, 4], 'key'); + + if (pk.type !== this._algo) { + throw (new Error('A ' + pk.type + ' key cannot be used in ' + + this._algo + ' Diffie-Hellman')); + } + + if (pk.curve !== this._curve) { + throw (new Error('A key from the ' + pk.curve + ' curve ' + + 'cannot be used with a ' + this._curve + + ' Diffie-Hellman')); + } + + if (pk.type === 'dsa') { + assert.deepEqual(pk.part.p, this._p, + 'DSA key prime does not match'); + assert.deepEqual(pk.part.g, this._g, + 'DSA key generator does not match'); + } +}; + +DiffieHellman.prototype.setKey = function (pk) { + this._keyCheck(pk); + + if (pk.type === 'dsa') { + this._dh.setPrivateKey(pk.part.x.data); + this._dh.setPublicKey(pk.part.y.data); + + } else if (pk.type === 'ecdsa') { + if (CRYPTO_HAVE_ECDH) { + this._dh.setPrivateKey(pk.part.d.data); + this._dh.setPublicKey(pk.part.Q.data); + } else { + this._priv = new ECPrivate( + this._ecParams, pk.part.d.data); + } + + } else if (pk.type === 'curve25519') { + this._priv = pk.part.r.data; + if (this._priv[0] === 0x00) + this._priv = this._priv.slice(1); + this._priv = this._priv.slice(0, 32); + } + this._key = pk; + this._isPriv = true; +}; +DiffieHellman.prototype.setPrivateKey = DiffieHellman.prototype.setKey; + +DiffieHellman.prototype.computeSecret = function (otherpk) { + this._keyCheck(otherpk, true); + if (!this._isPriv) + throw (new Error('DH exchange has not been initialized with ' + + 'a private key yet')); + + var pub; + if (this._algo === 'dsa') { + return (this._dh.computeSecret( + otherpk.part.y.data)); + + } else if (this._algo === 'ecdsa') { + if (CRYPTO_HAVE_ECDH) { + return (this._dh.computeSecret( + otherpk.part.Q.data)); + } else { + pub = new ECPublic( + this._ecParams, otherpk.part.Q.data); + return (this._priv.deriveSharedSecret(pub)); + } + + } else if (this._algo === 'curve25519') { + pub = otherpk.part.R.data; + if (pub[0] === 0x00) + pub = pub.slice(1); + + var secret = ed.dh.computeKey( + this._priv.toString('binary'), + pub.toString('binary')); + + return (new Buffer(secret, 'binary')); + } + + throw (new Error('Invalid algorithm: ' + this._algo)); +}; + +DiffieHellman.prototype.generateKey = function () { + var parts = []; + var priv, pub; + if (this._algo === 'dsa') { + this._dh.generateKeys(); + + parts.push({name: 'p', data: this._p.data}); + parts.push({name: 'q', data: this._key.part.q.data}); + parts.push({name: 'g', data: this._g.data}); + parts.push({name: 'y', data: this._dh.getPublicKey()}); + parts.push({name: 'x', data: this._dh.getPrivateKey()}); + this._key = new PrivateKey({ + type: 'dsa', + parts: parts + }); + this._isPriv = true; + return (this._key); + + } else if (this._algo === 'ecdsa') { + if (CRYPTO_HAVE_ECDH) { + this._dh.generateKeys(); + + parts.push({name: 'curve', + data: new Buffer(this._curve)}); + parts.push({name: 'Q', data: this._dh.getPublicKey()}); + parts.push({name: 'd', data: this._dh.getPrivateKey()}); + this._key = new PrivateKey({ + type: 'ecdsa', + curve: this._curve, + parts: parts + }); + this._isPriv = true; + return (this._key); + + } else { + var n = this._ecParams.getN(); + var r = new jsbn(crypto.randomBytes(n.bitLength())); + var n1 = n.subtract(jsbn.ONE); + priv = r.mod(n1).add(jsbn.ONE); + pub = this._ecParams.getG().multiply(priv); + + priv = new Buffer(priv.toByteArray()); + pub = new Buffer(this._ecParams.getCurve(). + encodePointHex(pub), 'hex'); + + this._priv = new ECPrivate(this._ecParams, priv); + + parts.push({name: 'curve', + data: new Buffer(this._curve)}); + parts.push({name: 'Q', data: pub}); + parts.push({name: 'd', data: priv}); + + this._key = new PrivateKey({ + type: 'ecdsa', + curve: this._curve, + parts: parts + }); + this._isPriv = true; + return (this._key); + } + + } else if (this._algo === 'curve25519') { + priv = ed.dh.generateKey(); + pub = ed.dh.publicKey(priv); + this._priv = priv = new Buffer(priv, 'binary'); + pub = new Buffer(pub, 'binary'); + + parts.push({name: 'R', data: pub}); + parts.push({name: 'r', data: Buffer.concat([priv, pub])}); + this._key = new PrivateKey({ + type: 'curve25519', + parts: parts + }); + this._isPriv = true; + return (this._key); + } + + throw (new Error('Invalid algorithm: ' + this._algo)); +}; +DiffieHellman.prototype.generateKeys = DiffieHellman.prototype.generateKey; + +/* These are helpers for using ecc-jsbn (for node 0.10 compatibility). */ + +function X9ECParameters(name) { + var params = algs.curves[name]; + assert.object(params); + + var p = new jsbn(params.p); + var a = new jsbn(params.a); + var b = new jsbn(params.b); + var n = new jsbn(params.n); + var h = jsbn.ONE; + var curve = new ec.ECCurveFp(p, a, b); + var G = curve.decodePointHex(params.G.toString('hex')); + + this.curve = curve; + this.g = G; + this.n = n; + this.h = h; +} +X9ECParameters.prototype.getCurve = function () { return (this.curve); }; +X9ECParameters.prototype.getG = function () { return (this.g); }; +X9ECParameters.prototype.getN = function () { return (this.n); }; +X9ECParameters.prototype.getH = function () { return (this.h); }; + +function ECPublic(params, buffer) { + this._params = params; + if (buffer[0] === 0x00) + buffer = buffer.slice(1); + this._pub = params.getCurve().decodePointHex(buffer.toString('hex')); +} + +function ECPrivate(params, buffer) { + this._params = params; + this._priv = new jsbn(utils.mpNormalize(buffer)); +} +ECPrivate.prototype.deriveSharedSecret = function (pubKey) { + assert.ok(pubKey instanceof ECPublic); + var S = pubKey._pub.multiply(this._priv); + return (new Buffer(S.getX().toBigInteger().toByteArray())); +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/ed-compat.js b/node_modules/fsevents/node_modules/sshpk/lib/ed-compat.js new file mode 100644 index 0000000..5365fb1 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/ed-compat.js @@ -0,0 +1,96 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + Verifier: Verifier, + Signer: Signer +}; + +var nacl; +var stream = require('stream'); +var util = require('util'); +var assert = require('assert-plus'); +var Signature = require('./signature'); + +function Verifier(key, hashAlgo) { + if (nacl === undefined) + nacl = require('tweetnacl'); + + if (hashAlgo.toLowerCase() !== 'sha512') + throw (new Error('ED25519 only supports the use of ' + + 'SHA-512 hashes')); + + this.key = key; + this.chunks = []; + + stream.Writable.call(this, {}); +} +util.inherits(Verifier, stream.Writable); + +Verifier.prototype._write = function (chunk, enc, cb) { + this.chunks.push(chunk); + cb(); +}; + +Verifier.prototype.update = function (chunk) { + if (typeof (chunk) === 'string') + chunk = new Buffer(chunk, 'binary'); + this.chunks.push(chunk); +}; + +Verifier.prototype.verify = function (signature, fmt) { + var sig; + if (Signature.isSignature(signature, [2, 0])) { + if (signature.type !== 'ed25519') + return (false); + sig = signature.toBuffer('raw'); + + } else if (typeof (signature) === 'string') { + sig = new Buffer(signature, 'base64'); + + } else if (Signature.isSignature(signature, [1, 0])) { + throw (new Error('signature was created by too old ' + + 'a version of sshpk and cannot be verified')); + } + + assert.buffer(sig); + return (nacl.sign.detached.verify( + new Uint8Array(Buffer.concat(this.chunks)), + new Uint8Array(sig), + new Uint8Array(this.key.part.R.data))); +}; + +function Signer(key, hashAlgo) { + if (nacl === undefined) + nacl = require('tweetnacl'); + + if (hashAlgo.toLowerCase() !== 'sha512') + throw (new Error('ED25519 only supports the use of ' + + 'SHA-512 hashes')); + + this.key = key; + this.chunks = []; + + stream.Writable.call(this, {}); +} +util.inherits(Signer, stream.Writable); + +Signer.prototype._write = function (chunk, enc, cb) { + this.chunks.push(chunk); + cb(); +}; + +Signer.prototype.update = function (chunk) { + if (typeof (chunk) === 'string') + chunk = new Buffer(chunk, 'binary'); + this.chunks.push(chunk); +}; + +Signer.prototype.sign = function () { + var sig = nacl.sign.detached( + new Uint8Array(Buffer.concat(this.chunks)), + new Uint8Array(this.key.part.r.data)); + var sigBuf = new Buffer(sig); + var sigObj = Signature.parse(sigBuf, 'ed25519', 'raw'); + sigObj.hashAlgorithm = 'sha512'; + return (sigObj); +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/errors.js b/node_modules/fsevents/node_modules/sshpk/lib/errors.js new file mode 100644 index 0000000..d984f1a --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/errors.js @@ -0,0 +1,71 @@ +// Copyright 2015 Joyent, Inc. + +var assert = require('assert-plus'); +var util = require('util'); + +function FingerprintFormatError(fp, format) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, FingerprintFormatError); + this.name = 'FingerprintFormatError'; + this.fingerprint = fp; + this.format = format; + this.message = 'Fingerprint format is not supported, or is invalid: '; + if (fp !== undefined) + this.message += ' fingerprint = ' + fp; + if (format !== undefined) + this.message += ' format = ' + format; +} +util.inherits(FingerprintFormatError, Error); + +function InvalidAlgorithmError(alg) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, InvalidAlgorithmError); + this.name = 'InvalidAlgorithmError'; + this.algorithm = alg; + this.message = 'Algorithm "' + alg + '" is not supported'; +} +util.inherits(InvalidAlgorithmError, Error); + +function KeyParseError(name, format, innerErr) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, KeyParseError); + this.name = 'KeyParseError'; + this.format = format; + this.keyName = name; + this.innerErr = innerErr; + this.message = 'Failed to parse ' + name + ' as a valid ' + format + + ' format key: ' + innerErr.message; +} +util.inherits(KeyParseError, Error); + +function SignatureParseError(type, format, innerErr) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, SignatureParseError); + this.name = 'SignatureParseError'; + this.type = type; + this.format = format; + this.innerErr = innerErr; + this.message = 'Failed to parse the given data as a ' + type + + ' signature in ' + format + ' format: ' + innerErr.message; +} +util.inherits(SignatureParseError, Error); + +function KeyEncryptedError(name, format) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, KeyEncryptedError); + this.name = 'KeyEncryptedError'; + this.format = format; + this.keyName = name; + this.message = 'The ' + format + ' format key ' + name + ' is ' + + 'encrypted (password-protected), and no passphrase was ' + + 'provided in `options`'; +} +util.inherits(KeyEncryptedError, Error); + +module.exports = { + FingerprintFormatError: FingerprintFormatError, + InvalidAlgorithmError: InvalidAlgorithmError, + KeyParseError: KeyParseError, + SignatureParseError: SignatureParseError, + KeyEncryptedError: KeyEncryptedError +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/fingerprint.js b/node_modules/fsevents/node_modules/sshpk/lib/fingerprint.js new file mode 100644 index 0000000..c607330 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/fingerprint.js @@ -0,0 +1,140 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = Fingerprint; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var errs = require('./errors'); +var Key = require('./key'); +var utils = require('./utils'); + +var FingerprintFormatError = errs.FingerprintFormatError; +var InvalidAlgorithmError = errs.InvalidAlgorithmError; + +function Fingerprint(opts) { + assert.object(opts, 'options'); + assert.buffer(opts.hash, 'options.hash'); + assert.string(opts.algorithm, 'options.algorithm'); + + this.algorithm = opts.algorithm.toLowerCase(); + if (algs.hashAlgs[this.algorithm] !== true) + throw (new InvalidAlgorithmError(this.algorithm)); + + this.hash = opts.hash; +} + +Fingerprint.prototype.toString = function (format) { + if (format === undefined) { + if (this.algorithm === 'md5') + format = 'hex'; + else + format = 'base64'; + } + assert.string(format); + + switch (format) { + case 'hex': + return (addColons(this.hash.toString('hex'))); + case 'base64': + return (sshBase64Format(this.algorithm, + this.hash.toString('base64'))); + default: + throw (new FingerprintFormatError(undefined, format)); + } +}; + +Fingerprint.prototype.matches = function (key) { + assert.object(key, 'key'); + utils.assertCompatible(key, Key, [1, 0], 'key'); + + var theirHash = key.hash(this.algorithm); + var theirHash2 = crypto.createHash(this.algorithm). + update(theirHash).digest('base64'); + + if (this.hash2 === undefined) + this.hash2 = crypto.createHash(this.algorithm). + update(this.hash).digest('base64'); + + return (this.hash2 === theirHash2); +}; + +Fingerprint.parse = function (fp, enAlgs) { + assert.string(fp, 'fingerprint'); + + var alg, hash; + assert.optionalArrayOfString(enAlgs, 'algorithms'); + + var parts = fp.split(':'); + if (parts.length == 2) { + alg = parts[0].toLowerCase(); + /*JSSTYLED*/ + var base64RE = /^[A-Za-z0-9+\/=]+$/; + if (!base64RE.test(parts[1])) + throw (new FingerprintFormatError(fp)); + try { + hash = new Buffer(parts[1], 'base64'); + } catch (e) { + throw (new FingerprintFormatError(fp)); + } + } else if (parts.length > 2) { + alg = 'md5'; + if (parts[0].toLowerCase() === 'md5') + parts = parts.slice(1); + parts = parts.join(''); + /*JSSTYLED*/ + var md5RE = /^[a-fA-F0-9]+$/; + if (!md5RE.test(parts)) + throw (new FingerprintFormatError(fp)); + try { + hash = new Buffer(parts, 'hex'); + } catch (e) { + throw (new FingerprintFormatError(fp)); + } + } + + if (alg === undefined) + throw (new FingerprintFormatError(fp)); + + if (algs.hashAlgs[alg] === undefined) + throw (new InvalidAlgorithmError(alg)); + + if (enAlgs !== undefined) { + enAlgs = enAlgs.map(function (a) { return a.toLowerCase(); }); + if (enAlgs.indexOf(alg) === -1) + throw (new InvalidAlgorithmError(alg)); + } + + return (new Fingerprint({algorithm: alg, hash: hash})); +}; + +function addColons(s) { + /*JSSTYLED*/ + return (s.replace(/(.{2})(?=.)/g, '$1:')); +} + +function base64Strip(s) { + /*JSSTYLED*/ + return (s.replace(/=*$/, '')); +} + +function sshBase64Format(alg, h) { + return (alg.toUpperCase() + ':' + base64Strip(h)); +} + +Fingerprint.isFingerprint = function (obj, ver) { + return (utils.isCompatible(obj, Fingerprint, ver)); +}; + +/* + * API versions for Fingerprint: + * [1,0] -- initial ver + * [1,1] -- first tagged ver + */ +Fingerprint.prototype._sshpkApiVersion = [1, 1]; + +Fingerprint._oldVersionDetect = function (obj) { + assert.func(obj.toString); + assert.func(obj.matches); + return ([1, 0]); +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/auto.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/auto.js new file mode 100644 index 0000000..973c032 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/auto.js @@ -0,0 +1,73 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + write: write +}; + +var assert = require('assert-plus'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); + +var pem = require('./pem'); +var ssh = require('./ssh'); +var rfc4253 = require('./rfc4253'); + +function read(buf, options) { + if (typeof (buf) === 'string') { + if (buf.trim().match(/^[-]+[ ]*BEGIN/)) + return (pem.read(buf, options)); + if (buf.match(/^\s*ssh-[a-z]/)) + return (ssh.read(buf, options)); + if (buf.match(/^\s*ecdsa-/)) + return (ssh.read(buf, options)); + buf = new Buffer(buf, 'binary'); + } else { + assert.buffer(buf); + if (findPEMHeader(buf)) + return (pem.read(buf, options)); + if (findSSHHeader(buf)) + return (ssh.read(buf, options)); + } + if (buf.readUInt32BE(0) < buf.length) + return (rfc4253.read(buf, options)); + throw (new Error('Failed to auto-detect format of key')); +} + +function findSSHHeader(buf) { + var offset = 0; + while (offset < buf.length && + (buf[offset] === 32 || buf[offset] === 10 || buf[offset] === 9)) + ++offset; + if (offset + 4 <= buf.length && + buf.slice(offset, offset + 4).toString('ascii') === 'ssh-') + return (true); + if (offset + 6 <= buf.length && + buf.slice(offset, offset + 6).toString('ascii') === 'ecdsa-') + return (true); + return (false); +} + +function findPEMHeader(buf) { + var offset = 0; + while (offset < buf.length && + (buf[offset] === 32 || buf[offset] === 10)) + ++offset; + if (buf[offset] !== 45) + return (false); + while (offset < buf.length && + (buf[offset] === 45)) + ++offset; + while (offset < buf.length && + (buf[offset] === 32)) + ++offset; + if (offset + 5 > buf.length || + buf.slice(offset, offset + 5).toString('ascii') !== 'BEGIN') + return (false); + return (true); +} + +function write(key, options) { + throw (new Error('"auto" format cannot be used for writing')); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/pem.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/pem.js new file mode 100644 index 0000000..5318b35 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/pem.js @@ -0,0 +1,186 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + write: write +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var crypto = require('crypto'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); + +var pkcs1 = require('./pkcs1'); +var pkcs8 = require('./pkcs8'); +var sshpriv = require('./ssh-private'); +var rfc4253 = require('./rfc4253'); + +var errors = require('../errors'); + +/* + * For reading we support both PKCS#1 and PKCS#8. If we find a private key, + * we just take the public component of it and use that. + */ +function read(buf, options, forceType) { + var input = buf; + if (typeof (buf) !== 'string') { + assert.buffer(buf, 'buf'); + buf = buf.toString('ascii'); + } + + var lines = buf.trim().split('\n'); + + var m = lines[0].match(/*JSSTYLED*/ + /[-]+[ ]*BEGIN ([A-Z0-9]+ )?(PUBLIC|PRIVATE) KEY[ ]*[-]+/); + assert.ok(m, 'invalid PEM header'); + + var m2 = lines[lines.length - 1].match(/*JSSTYLED*/ + /[-]+[ ]*END ([A-Z0-9]+ )?(PUBLIC|PRIVATE) KEY[ ]*[-]+/); + assert.ok(m2, 'invalid PEM footer'); + + /* Begin and end banners must match key type */ + assert.equal(m[2], m2[2]); + var type = m[2].toLowerCase(); + + var alg; + if (m[1]) { + /* They also must match algorithms, if given */ + assert.equal(m[1], m2[1], 'PEM header and footer mismatch'); + alg = m[1].trim(); + } + + var headers = {}; + while (true) { + lines = lines.slice(1); + m = lines[0].match(/*JSSTYLED*/ + /^([A-Za-z0-9-]+): (.+)$/); + if (!m) + break; + headers[m[1].toLowerCase()] = m[2]; + } + + var cipher, key, iv; + if (headers['proc-type']) { + var parts = headers['proc-type'].split(','); + if (parts[0] === '4' && parts[1] === 'ENCRYPTED') { + if (typeof (options.passphrase) === 'string') { + options.passphrase = new Buffer( + options.passphrase, 'utf-8'); + } + if (!Buffer.isBuffer(options.passphrase)) { + throw (new errors.KeyEncryptedError( + options.filename, 'PEM')); + } else { + parts = headers['dek-info'].split(','); + assert.ok(parts.length === 2); + cipher = parts[0].toLowerCase(); + iv = new Buffer(parts[1], 'hex'); + key = utils.opensslKeyDeriv(cipher, iv, + options.passphrase, 1).key; + } + } + } + + /* Chop off the first and last lines */ + lines = lines.slice(0, -1).join(''); + buf = new Buffer(lines, 'base64'); + + if (cipher && key && iv) { + var cipherStream = crypto.createDecipheriv(cipher, key, iv); + var chunk, chunks = []; + cipherStream.once('error', function (e) { + if (e.toString().indexOf('bad decrypt') !== -1) { + throw (new Error('Incorrect passphrase ' + + 'supplied, could not decrypt key')); + } + throw (e); + }); + cipherStream.write(buf); + cipherStream.end(); + while ((chunk = cipherStream.read()) !== null) + chunks.push(chunk); + buf = Buffer.concat(chunks); + } + + /* The new OpenSSH internal format abuses PEM headers */ + if (alg && alg.toLowerCase() === 'openssh') + return (sshpriv.readSSHPrivate(type, buf)); + if (alg && alg.toLowerCase() === 'ssh2') + return (rfc4253.readType(type, buf)); + + var der = new asn1.BerReader(buf); + der.originalInput = input; + + /* + * All of the PEM file types start with a sequence tag, so chop it + * off here + */ + der.readSequence(); + + /* PKCS#1 type keys name an algorithm in the banner explicitly */ + if (alg) { + if (forceType) + assert.strictEqual(forceType, 'pkcs1'); + return (pkcs1.readPkcs1(alg, type, der)); + } else { + if (forceType) + assert.strictEqual(forceType, 'pkcs8'); + return (pkcs8.readPkcs8(alg, type, der)); + } +} + +function write(key, options, type) { + assert.object(key); + + var alg = {'ecdsa': 'EC', 'rsa': 'RSA', 'dsa': 'DSA'}[key.type]; + var header; + + var der = new asn1.BerWriter(); + + if (PrivateKey.isPrivateKey(key)) { + if (type && type === 'pkcs8') { + header = 'PRIVATE KEY'; + pkcs8.writePkcs8(der, key); + } else { + if (type) + assert.strictEqual(type, 'pkcs1'); + header = alg + ' PRIVATE KEY'; + pkcs1.writePkcs1(der, key); + } + + } else if (Key.isKey(key)) { + if (type && type === 'pkcs1') { + header = alg + ' PUBLIC KEY'; + pkcs1.writePkcs1(der, key); + } else { + if (type) + assert.strictEqual(type, 'pkcs8'); + header = 'PUBLIC KEY'; + pkcs8.writePkcs8(der, key); + } + + } else { + throw (new Error('key is not a Key or PrivateKey')); + } + + var tmp = der.buffer.toString('base64'); + var len = tmp.length + (tmp.length / 64) + + 18 + 16 + header.length*2 + 10; + var buf = new Buffer(len); + var o = 0; + o += buf.write('-----BEGIN ' + header + '-----\n', o); + for (var i = 0; i < tmp.length; ) { + var limit = i + 64; + if (limit > tmp.length) + limit = tmp.length; + o += buf.write(tmp.slice(i, limit), o); + buf[o++] = 10; + i = limit; + } + o += buf.write('-----END ' + header + '-----\n', o); + + return (buf.slice(0, o)); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/pkcs1.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/pkcs1.js new file mode 100644 index 0000000..a5676af --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/pkcs1.js @@ -0,0 +1,320 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + readPkcs1: readPkcs1, + write: write, + writePkcs1: writePkcs1 +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); + +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); + +var pkcs8 = require('./pkcs8'); +var readECDSACurve = pkcs8.readECDSACurve; + +function read(buf, options) { + return (pem.read(buf, options, 'pkcs1')); +} + +function write(key, options) { + return (pem.write(key, options, 'pkcs1')); +} + +/* Helper to read in a single mpint */ +function readMPInt(der, nm) { + assert.strictEqual(der.peek(), asn1.Ber.Integer, + nm + ' is not an Integer'); + return (utils.mpNormalize(der.readString(asn1.Ber.Integer, true))); +} + +function readPkcs1(alg, type, der) { + switch (alg) { + case 'RSA': + if (type === 'public') + return (readPkcs1RSAPublic(der)); + else if (type === 'private') + return (readPkcs1RSAPrivate(der)); + throw (new Error('Unknown key type: ' + type)); + case 'DSA': + if (type === 'public') + return (readPkcs1DSAPublic(der)); + else if (type === 'private') + return (readPkcs1DSAPrivate(der)); + throw (new Error('Unknown key type: ' + type)); + case 'EC': + case 'ECDSA': + if (type === 'private') + return (readPkcs1ECDSAPrivate(der)); + else if (type === 'public') + return (readPkcs1ECDSAPublic(der)); + throw (new Error('Unknown key type: ' + type)); + default: + throw (new Error('Unknown key algo: ' + alg)); + } +} + +function readPkcs1RSAPublic(der) { + // modulus and exponent + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'exponent'); + + // now, make the key + var key = { + type: 'rsa', + parts: [ + { name: 'e', data: e }, + { name: 'n', data: n } + ] + }; + + return (new Key(key)); +} + +function readPkcs1RSAPrivate(der) { + var version = readMPInt(der, 'version'); + assert.strictEqual(version[0], 0); + + // modulus then public exponent + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'public exponent'); + var d = readMPInt(der, 'private exponent'); + var p = readMPInt(der, 'prime1'); + var q = readMPInt(der, 'prime2'); + var dmodp = readMPInt(der, 'exponent1'); + var dmodq = readMPInt(der, 'exponent2'); + var iqmp = readMPInt(der, 'iqmp'); + + // now, make the key + var key = { + type: 'rsa', + parts: [ + { name: 'n', data: n }, + { name: 'e', data: e }, + { name: 'd', data: d }, + { name: 'iqmp', data: iqmp }, + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'dmodp', data: dmodp }, + { name: 'dmodq', data: dmodq } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs1DSAPrivate(der) { + var version = readMPInt(der, 'version'); + assert.strictEqual(version.readUInt8(0), 0); + + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + var y = readMPInt(der, 'y'); + var x = readMPInt(der, 'x'); + + // now, make the key + var key = { + type: 'dsa', + parts: [ + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g }, + { name: 'y', data: y }, + { name: 'x', data: x } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs1DSAPublic(der) { + var y = readMPInt(der, 'y'); + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + + var key = { + type: 'dsa', + parts: [ + { name: 'y', data: y }, + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g } + ] + }; + + return (new Key(key)); +} + +function readPkcs1ECDSAPublic(der) { + der.readSequence(); + + var oid = der.readOID(); + assert.strictEqual(oid, '1.2.840.10045.2.1', 'must be ecPublicKey'); + + var curveOid = der.readOID(); + + var curve; + var curves = Object.keys(algs.curves); + for (var j = 0; j < curves.length; ++j) { + var c = curves[j]; + var cd = algs.curves[c]; + if (cd.pkcs8oid === curveOid) { + curve = c; + break; + } + } + assert.string(curve, 'a known ECDSA named curve'); + + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curve) }, + { name: 'Q', data: Q } + ] + }; + + return (new Key(key)); +} + +function readPkcs1ECDSAPrivate(der) { + var version = readMPInt(der, 'version'); + assert.strictEqual(version.readUInt8(0), 1); + + // private key + var d = der.readString(asn1.Ber.OctetString, true); + + der.readSequence(0xa0); + var curve = readECDSACurve(der); + assert.string(curve, 'a known elliptic curve'); + + der.readSequence(0xa1); + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curve) }, + { name: 'Q', data: Q }, + { name: 'd', data: d } + ] + }; + + return (new PrivateKey(key)); +} + +function writePkcs1(der, key) { + der.startSequence(); + + switch (key.type) { + case 'rsa': + if (PrivateKey.isPrivateKey(key)) + writePkcs1RSAPrivate(der, key); + else + writePkcs1RSAPublic(der, key); + break; + case 'dsa': + if (PrivateKey.isPrivateKey(key)) + writePkcs1DSAPrivate(der, key); + else + writePkcs1DSAPublic(der, key); + break; + case 'ecdsa': + if (PrivateKey.isPrivateKey(key)) + writePkcs1ECDSAPrivate(der, key); + else + writePkcs1ECDSAPublic(der, key); + break; + default: + throw (new Error('Unknown key algo: ' + key.type)); + } + + der.endSequence(); +} + +function writePkcs1RSAPublic(der, key) { + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); +} + +function writePkcs1RSAPrivate(der, key) { + var ver = new Buffer(1); + ver[0] = 0; + der.writeBuffer(ver, asn1.Ber.Integer); + + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); + der.writeBuffer(key.part.d.data, asn1.Ber.Integer); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + if (!key.part.dmodp || !key.part.dmodq) + utils.addRSAMissing(key); + der.writeBuffer(key.part.dmodp.data, asn1.Ber.Integer); + der.writeBuffer(key.part.dmodq.data, asn1.Ber.Integer); + der.writeBuffer(key.part.iqmp.data, asn1.Ber.Integer); +} + +function writePkcs1DSAPrivate(der, key) { + var ver = new Buffer(1); + ver[0] = 0; + der.writeBuffer(ver, asn1.Ber.Integer); + + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); + der.writeBuffer(key.part.y.data, asn1.Ber.Integer); + der.writeBuffer(key.part.x.data, asn1.Ber.Integer); +} + +function writePkcs1DSAPublic(der, key) { + der.writeBuffer(key.part.y.data, asn1.Ber.Integer); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); +} + +function writePkcs1ECDSAPublic(der, key) { + der.startSequence(); + + der.writeOID('1.2.840.10045.2.1'); /* ecPublicKey */ + var curve = key.part.curve.data.toString(); + var curveOid = algs.curves[curve].pkcs8oid; + assert.string(curveOid, 'a known ECDSA named curve'); + der.writeOID(curveOid); + + der.endSequence(); + + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); +} + +function writePkcs1ECDSAPrivate(der, key) { + var ver = new Buffer(1); + ver[0] = 1; + der.writeBuffer(ver, asn1.Ber.Integer); + + der.writeBuffer(key.part.d.data, asn1.Ber.OctetString); + + der.startSequence(0xa0); + var curve = key.part.curve.data.toString(); + var curveOid = algs.curves[curve].pkcs8oid; + assert.string(curveOid, 'a known ECDSA named curve'); + der.writeOID(curveOid); + der.endSequence(); + + der.startSequence(0xa1); + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); + der.endSequence(); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/pkcs8.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/pkcs8.js new file mode 100644 index 0000000..33fb7cc --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/pkcs8.js @@ -0,0 +1,510 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + readPkcs8: readPkcs8, + write: write, + writePkcs8: writePkcs8, + + readECDSACurve: readECDSACurve, + writeECDSACurve: writeECDSACurve +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); + +function read(buf, options) { + return (pem.read(buf, options, 'pkcs8')); +} + +function write(key, options) { + return (pem.write(key, options, 'pkcs8')); +} + +/* Helper to read in a single mpint */ +function readMPInt(der, nm) { + assert.strictEqual(der.peek(), asn1.Ber.Integer, + nm + ' is not an Integer'); + return (utils.mpNormalize(der.readString(asn1.Ber.Integer, true))); +} + +function readPkcs8(alg, type, der) { + /* Private keys in pkcs#8 format have a weird extra int */ + if (der.peek() === asn1.Ber.Integer) { + assert.strictEqual(type, 'private', + 'unexpected Integer at start of public key'); + der.readString(asn1.Ber.Integer, true); + } + + der.readSequence(); + + var oid = der.readOID(); + switch (oid) { + case '1.2.840.113549.1.1.1': + if (type === 'public') + return (readPkcs8RSAPublic(der)); + else + return (readPkcs8RSAPrivate(der)); + case '1.2.840.10040.4.1': + if (type === 'public') + return (readPkcs8DSAPublic(der)); + else + return (readPkcs8DSAPrivate(der)); + case '1.2.840.10045.2.1': + if (type === 'public') + return (readPkcs8ECDSAPublic(der)); + else + return (readPkcs8ECDSAPrivate(der)); + default: + throw (new Error('Unknown key type OID ' + oid)); + } +} + +function readPkcs8RSAPublic(der) { + // Null -- XXX this probably isn't good practice + der.readByte(); + der.readByte(); + + // bit string sequence + der.readSequence(asn1.Ber.BitString); + der.readByte(); + der.readSequence(); + + // modulus + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'exponent'); + + // now, make the key + var key = { + type: 'rsa', + source: der.originalInput, + parts: [ + { name: 'e', data: e }, + { name: 'n', data: n } + ] + }; + + return (new Key(key)); +} + +function readPkcs8RSAPrivate(der) { + der.readByte(); + der.readByte(); + + der.readSequence(asn1.Ber.OctetString); + der.readSequence(); + + var ver = readMPInt(der, 'version'); + assert.equal(ver[0], 0x0, 'unknown RSA private key version'); + + // modulus then public exponent + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'public exponent'); + var d = readMPInt(der, 'private exponent'); + var p = readMPInt(der, 'prime1'); + var q = readMPInt(der, 'prime2'); + var dmodp = readMPInt(der, 'exponent1'); + var dmodq = readMPInt(der, 'exponent2'); + var iqmp = readMPInt(der, 'iqmp'); + + // now, make the key + var key = { + type: 'rsa', + parts: [ + { name: 'n', data: n }, + { name: 'e', data: e }, + { name: 'd', data: d }, + { name: 'iqmp', data: iqmp }, + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'dmodp', data: dmodp }, + { name: 'dmodq', data: dmodq } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs8DSAPublic(der) { + der.readSequence(); + + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + + // bit string sequence + der.readSequence(asn1.Ber.BitString); + der.readByte(); + + var y = readMPInt(der, 'y'); + + // now, make the key + var key = { + type: 'dsa', + parts: [ + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g }, + { name: 'y', data: y } + ] + }; + + return (new Key(key)); +} + +function readPkcs8DSAPrivate(der) { + der.readSequence(); + + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + + der.readSequence(asn1.Ber.OctetString); + var x = readMPInt(der, 'x'); + + /* The pkcs#8 format does not include the public key */ + var y = utils.calculateDSAPublic(g, p, x); + + var key = { + type: 'dsa', + parts: [ + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g }, + { name: 'y', data: y }, + { name: 'x', data: x } + ] + }; + + return (new PrivateKey(key)); +} + +function readECDSACurve(der) { + var curveName, curveNames; + var j, c, cd; + + if (der.peek() === asn1.Ber.OID) { + var oid = der.readOID(); + + curveNames = Object.keys(algs.curves); + for (j = 0; j < curveNames.length; ++j) { + c = curveNames[j]; + cd = algs.curves[c]; + if (cd.pkcs8oid === oid) { + curveName = c; + break; + } + } + + } else { + // ECParameters sequence + der.readSequence(); + var version = der.readString(asn1.Ber.Integer, true); + assert.strictEqual(version[0], 1, 'ECDSA key not version 1'); + + var curve = {}; + + // FieldID sequence + der.readSequence(); + var fieldTypeOid = der.readOID(); + assert.strictEqual(fieldTypeOid, '1.2.840.10045.1.1', + 'ECDSA key is not from a prime-field'); + var p = curve.p = utils.mpNormalize( + der.readString(asn1.Ber.Integer, true)); + /* + * p always starts with a 1 bit, so count the zeros to get its + * real size. + */ + curve.size = p.length * 8 - utils.countZeros(p); + + // Curve sequence + der.readSequence(); + curve.a = utils.mpNormalize( + der.readString(asn1.Ber.OctetString, true)); + curve.b = utils.mpNormalize( + der.readString(asn1.Ber.OctetString, true)); + if (der.peek() === asn1.Ber.BitString) + curve.s = der.readString(asn1.Ber.BitString, true); + + // Combined Gx and Gy + curve.G = der.readString(asn1.Ber.OctetString, true); + assert.strictEqual(curve.G[0], 0x4, + 'uncompressed G is required'); + + curve.n = utils.mpNormalize( + der.readString(asn1.Ber.Integer, true)); + curve.h = utils.mpNormalize( + der.readString(asn1.Ber.Integer, true)); + assert.strictEqual(curve.h[0], 0x1, 'a cofactor=1 curve is ' + + 'required'); + + curveNames = Object.keys(algs.curves); + var ks = Object.keys(curve); + for (j = 0; j < curveNames.length; ++j) { + c = curveNames[j]; + cd = algs.curves[c]; + var equal = true; + for (var i = 0; i < ks.length; ++i) { + var k = ks[i]; + if (cd[k] === undefined) + continue; + if (typeof (cd[k]) === 'object' && + cd[k].equals !== undefined) { + if (!cd[k].equals(curve[k])) { + equal = false; + break; + } + } else if (Buffer.isBuffer(cd[k])) { + if (cd[k].toString('binary') + !== curve[k].toString('binary')) { + equal = false; + break; + } + } else { + if (cd[k] !== curve[k]) { + equal = false; + break; + } + } + } + if (equal) { + curveName = c; + break; + } + } + } + return (curveName); +} + +function readPkcs8ECDSAPrivate(der) { + var curveName = readECDSACurve(der); + assert.string(curveName, 'a known elliptic curve'); + + der.readSequence(asn1.Ber.OctetString); + der.readSequence(); + + var version = readMPInt(der, 'version'); + assert.equal(version[0], 1, 'unknown version of ECDSA key'); + + var d = der.readString(asn1.Ber.OctetString, true); + der.readSequence(0xa1); + + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curveName) }, + { name: 'Q', data: Q }, + { name: 'd', data: d } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs8ECDSAPublic(der) { + var curveName = readECDSACurve(der); + assert.string(curveName, 'a known elliptic curve'); + + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curveName) }, + { name: 'Q', data: Q } + ] + }; + + return (new Key(key)); +} + +function writePkcs8(der, key) { + der.startSequence(); + + if (PrivateKey.isPrivateKey(key)) { + var sillyInt = new Buffer(1); + sillyInt[0] = 0x0; + der.writeBuffer(sillyInt, asn1.Ber.Integer); + } + + der.startSequence(); + switch (key.type) { + case 'rsa': + der.writeOID('1.2.840.113549.1.1.1'); + if (PrivateKey.isPrivateKey(key)) + writePkcs8RSAPrivate(key, der); + else + writePkcs8RSAPublic(key, der); + break; + case 'dsa': + der.writeOID('1.2.840.10040.4.1'); + if (PrivateKey.isPrivateKey(key)) + writePkcs8DSAPrivate(key, der); + else + writePkcs8DSAPublic(key, der); + break; + case 'ecdsa': + der.writeOID('1.2.840.10045.2.1'); + if (PrivateKey.isPrivateKey(key)) + writePkcs8ECDSAPrivate(key, der); + else + writePkcs8ECDSAPublic(key, der); + break; + default: + throw (new Error('Unsupported key type: ' + key.type)); + } + + der.endSequence(); +} + +function writePkcs8RSAPrivate(key, der) { + der.writeNull(); + der.endSequence(); + + der.startSequence(asn1.Ber.OctetString); + der.startSequence(); + + var version = new Buffer(1); + version[0] = 0; + der.writeBuffer(version, asn1.Ber.Integer); + + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); + der.writeBuffer(key.part.d.data, asn1.Ber.Integer); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + if (!key.part.dmodp || !key.part.dmodq) + utils.addRSAMissing(key); + der.writeBuffer(key.part.dmodp.data, asn1.Ber.Integer); + der.writeBuffer(key.part.dmodq.data, asn1.Ber.Integer); + der.writeBuffer(key.part.iqmp.data, asn1.Ber.Integer); + + der.endSequence(); + der.endSequence(); +} + +function writePkcs8RSAPublic(key, der) { + der.writeNull(); + der.endSequence(); + + der.startSequence(asn1.Ber.BitString); + der.writeByte(0x00); + + der.startSequence(); + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); + der.endSequence(); + + der.endSequence(); +} + +function writePkcs8DSAPrivate(key, der) { + der.startSequence(); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); + der.endSequence(); + + der.endSequence(); + + der.startSequence(asn1.Ber.OctetString); + der.writeBuffer(key.part.x.data, asn1.Ber.Integer); + der.endSequence(); +} + +function writePkcs8DSAPublic(key, der) { + der.startSequence(); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); + der.endSequence(); + der.endSequence(); + + der.startSequence(asn1.Ber.BitString); + der.writeByte(0x00); + der.writeBuffer(key.part.y.data, asn1.Ber.Integer); + der.endSequence(); +} + +function writeECDSACurve(key, der) { + var curve = algs.curves[key.curve]; + if (curve.pkcs8oid) { + /* This one has a name in pkcs#8, so just write the oid */ + der.writeOID(curve.pkcs8oid); + + } else { + // ECParameters sequence + der.startSequence(); + + var version = new Buffer(1); + version.writeUInt8(1, 0); + der.writeBuffer(version, asn1.Ber.Integer); + + // FieldID sequence + der.startSequence(); + der.writeOID('1.2.840.10045.1.1'); // prime-field + der.writeBuffer(curve.p, asn1.Ber.Integer); + der.endSequence(); + + // Curve sequence + der.startSequence(); + var a = curve.p; + if (a[0] === 0x0) + a = a.slice(1); + der.writeBuffer(a, asn1.Ber.OctetString); + der.writeBuffer(curve.b, asn1.Ber.OctetString); + der.writeBuffer(curve.s, asn1.Ber.BitString); + der.endSequence(); + + der.writeBuffer(curve.G, asn1.Ber.OctetString); + der.writeBuffer(curve.n, asn1.Ber.Integer); + var h = curve.h; + if (!h) { + h = new Buffer(1); + h[0] = 1; + } + der.writeBuffer(h, asn1.Ber.Integer); + + // ECParameters + der.endSequence(); + } +} + +function writePkcs8ECDSAPublic(key, der) { + writeECDSACurve(key, der); + der.endSequence(); + + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); +} + +function writePkcs8ECDSAPrivate(key, der) { + writeECDSACurve(key, der); + der.endSequence(); + + der.startSequence(asn1.Ber.OctetString); + der.startSequence(); + + var version = new Buffer(1); + version[0] = 1; + der.writeBuffer(version, asn1.Ber.Integer); + + der.writeBuffer(key.part.d.data, asn1.Ber.OctetString); + + der.startSequence(0xa1); + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); + der.endSequence(); + + der.endSequence(); + der.endSequence(); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/rfc4253.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/rfc4253.js new file mode 100644 index 0000000..9d436dd --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/rfc4253.js @@ -0,0 +1,146 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read.bind(undefined, false, undefined), + readType: read.bind(undefined, false), + write: write, + /* semi-private api, used by sshpk-agent */ + readPartial: read.bind(undefined, true), + + /* shared with ssh format */ + readInternal: read, + keyTypeToAlg: keyTypeToAlg, + algToKeyType: algToKeyType +}; + +var assert = require('assert-plus'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var SSHBuffer = require('../ssh-buffer'); + +function algToKeyType(alg) { + assert.string(alg); + if (alg === 'ssh-dss') + return ('dsa'); + else if (alg === 'ssh-rsa') + return ('rsa'); + else if (alg === 'ssh-ed25519') + return ('ed25519'); + else if (alg === 'ssh-curve25519') + return ('curve25519'); + else if (alg.match(/^ecdsa-sha2-/)) + return ('ecdsa'); + else + throw (new Error('Unknown algorithm ' + alg)); +} + +function keyTypeToAlg(key) { + assert.object(key); + if (key.type === 'dsa') + return ('ssh-dss'); + else if (key.type === 'rsa') + return ('ssh-rsa'); + else if (key.type === 'ed25519') + return ('ssh-ed25519'); + else if (key.type === 'curve25519') + return ('ssh-curve25519'); + else if (key.type === 'ecdsa') + return ('ecdsa-sha2-' + key.part.curve.data.toString()); + else + throw (new Error('Unknown key type ' + key.type)); +} + +function read(partial, type, buf, options) { + if (typeof (buf) === 'string') + buf = new Buffer(buf); + assert.buffer(buf, 'buf'); + + var key = {}; + + var parts = key.parts = []; + var sshbuf = new SSHBuffer({buffer: buf}); + + var alg = sshbuf.readString(); + assert.ok(!sshbuf.atEnd(), 'key must have at least one part'); + + key.type = algToKeyType(alg); + + var partCount = algs.info[key.type].parts.length; + if (type && type === 'private') + partCount = algs.privInfo[key.type].parts.length; + + while (!sshbuf.atEnd() && parts.length < partCount) + parts.push(sshbuf.readPart()); + while (!partial && !sshbuf.atEnd()) + parts.push(sshbuf.readPart()); + + assert.ok(parts.length >= 1, + 'key must have at least one part'); + assert.ok(partial || sshbuf.atEnd(), + 'leftover bytes at end of key'); + + var Constructor = Key; + var algInfo = algs.info[key.type]; + if (type === 'private' || algInfo.parts.length !== parts.length) { + algInfo = algs.privInfo[key.type]; + Constructor = PrivateKey; + } + assert.strictEqual(algInfo.parts.length, parts.length); + + if (key.type === 'ecdsa') { + var res = /^ecdsa-sha2-(.+)$/.exec(alg); + assert.ok(res !== null); + assert.strictEqual(res[1], parts[0].data.toString()); + } + + var normalized = true; + for (var i = 0; i < algInfo.parts.length; ++i) { + parts[i].name = algInfo.parts[i]; + if (parts[i].name !== 'curve' && + algInfo.normalize !== false) { + var p = parts[i]; + var nd = utils.mpNormalize(p.data); + if (nd !== p.data) { + p.data = nd; + normalized = false; + } + } + } + + if (normalized) + key._rfc4253Cache = sshbuf.toBuffer(); + + if (partial && typeof (partial) === 'object') { + partial.remainder = sshbuf.remainder(); + partial.consumed = sshbuf._offset; + } + + return (new Constructor(key)); +} + +function write(key, options) { + assert.object(key); + + var alg = keyTypeToAlg(key); + var i; + + var algInfo = algs.info[key.type]; + if (PrivateKey.isPrivateKey(key)) + algInfo = algs.privInfo[key.type]; + var parts = algInfo.parts; + + var buf = new SSHBuffer({}); + + buf.writeString(alg); + + for (i = 0; i < parts.length; ++i) { + var data = key.part[parts[i]].data; + if (algInfo.normalize !== false) + data = utils.mpNormalize(data); + buf.writeBuffer(data); + } + + return (buf.toBuffer()); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/ssh-private.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/ssh-private.js new file mode 100644 index 0000000..bfbdab5 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/ssh-private.js @@ -0,0 +1,138 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + readSSHPrivate: readSSHPrivate, + write: write +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); +var crypto = require('crypto'); + +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); +var rfc4253 = require('./rfc4253'); +var SSHBuffer = require('../ssh-buffer'); + +function read(buf, options) { + return (pem.read(buf, options)); +} + +var MAGIC = 'openssh-key-v1'; + +function readSSHPrivate(type, buf) { + buf = new SSHBuffer({buffer: buf}); + + var magic = buf.readCString(); + assert.strictEqual(magic, MAGIC, 'bad magic string'); + + var cipher = buf.readString(); + var kdf = buf.readString(); + + /* We only support unencrypted keys. */ + if (cipher !== 'none' || kdf !== 'none') { + throw (new Error('OpenSSH-format key is encrypted ' + + '(password-protected). Please use the SSH agent ' + + 'or decrypt the key.')); + } + + /* Skip over kdfoptions. */ + buf.readString(); + + var nkeys = buf.readInt(); + if (nkeys !== 1) { + throw (new Error('OpenSSH-format key file contains ' + + 'multiple keys: this is unsupported.')); + } + + var pubKey = buf.readBuffer(); + + if (type === 'public') { + assert.ok(buf.atEnd(), 'excess bytes left after key'); + return (rfc4253.read(pubKey)); + } + + var privKeyBlob = buf.readBuffer(); + assert.ok(buf.atEnd(), 'excess bytes left after key'); + + buf = new SSHBuffer({buffer: privKeyBlob}); + + var checkInt1 = buf.readInt(); + var checkInt2 = buf.readInt(); + assert.strictEqual(checkInt1, checkInt2, 'checkints do not match'); + + var ret = {}; + var key = rfc4253.readInternal(ret, 'private', buf.remainder()); + + buf.skip(ret.consumed); + + var comment = buf.readString(); + key.comment = comment; + + return (key); +} + +function write(key, options) { + var pubKey; + if (PrivateKey.isPrivateKey(key)) + pubKey = key.toPublic(); + else + pubKey = key; + + var privBuf; + if (PrivateKey.isPrivateKey(key)) { + privBuf = new SSHBuffer({}); + var checkInt = crypto.randomBytes(4).readUInt32BE(0); + privBuf.writeInt(checkInt); + privBuf.writeInt(checkInt); + privBuf.write(key.toBuffer('rfc4253')); + privBuf.writeString(key.comment || ''); + + var n = 1; + while (privBuf._offset % 8 !== 0) + privBuf.writeChar(n++); + } + + var buf = new SSHBuffer({}); + + buf.writeCString(MAGIC); + buf.writeString('none'); /* cipher */ + buf.writeString('none'); /* kdf */ + buf.writeBuffer(new Buffer(0)); /* kdfoptions */ + + buf.writeInt(1); /* nkeys */ + buf.writeBuffer(pubKey.toBuffer('rfc4253')); + + if (privBuf) + buf.writeBuffer(privBuf.toBuffer()); + + buf = buf.toBuffer(); + + var header; + if (PrivateKey.isPrivateKey(key)) + header = 'OPENSSH PRIVATE KEY'; + else + header = 'OPENSSH PUBLIC KEY'; + + var tmp = buf.toString('base64'); + var len = tmp.length + (tmp.length / 70) + + 18 + 16 + header.length*2 + 10; + buf = new Buffer(len); + var o = 0; + o += buf.write('-----BEGIN ' + header + '-----\n', o); + for (var i = 0; i < tmp.length; ) { + var limit = i + 70; + if (limit > tmp.length) + limit = tmp.length; + o += buf.write(tmp.slice(i, limit), o); + buf[o++] = 10; + i = limit; + } + o += buf.write('-----END ' + header + '-----\n', o); + + return (buf.slice(0, o)); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/formats/ssh.js b/node_modules/fsevents/node_modules/sshpk/lib/formats/ssh.js new file mode 100644 index 0000000..655c9ea --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/formats/ssh.js @@ -0,0 +1,114 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + write: write +}; + +var assert = require('assert-plus'); +var rfc4253 = require('./rfc4253'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); + +var sshpriv = require('./ssh-private'); + +/*JSSTYLED*/ +var SSHKEY_RE = /^([a-z0-9-]+)[ \t]+([a-zA-Z0-9+\/]+[=]*)([\n \t]+([^\n]+))?$/; +/*JSSTYLED*/ +var SSHKEY_RE2 = /^([a-z0-9-]+)[ \t]+([a-zA-Z0-9+\/ \t\n]+[=]*)(.*)$/; + +function read(buf, options) { + if (typeof (buf) !== 'string') { + assert.buffer(buf, 'buf'); + buf = buf.toString('ascii'); + } + + var trimmed = buf.trim().replace(/[\\\r]/g, ''); + var m = trimmed.match(SSHKEY_RE); + if (!m) + m = trimmed.match(SSHKEY_RE2); + assert.ok(m, 'key must match regex'); + + var type = rfc4253.algToKeyType(m[1]); + var kbuf = new Buffer(m[2], 'base64'); + + /* + * This is a bit tricky. If we managed to parse the key and locate the + * key comment with the regex, then do a non-partial read and assert + * that we have consumed all bytes. If we couldn't locate the key + * comment, though, there may be whitespace shenanigans going on that + * have conjoined the comment to the rest of the key. We do a partial + * read in this case to try to make the best out of a sorry situation. + */ + var key; + var ret = {}; + if (m[4]) { + try { + key = rfc4253.read(kbuf); + + } catch (e) { + m = trimmed.match(SSHKEY_RE2); + assert.ok(m, 'key must match regex'); + kbuf = new Buffer(m[2], 'base64'); + key = rfc4253.readInternal(ret, 'public', kbuf); + } + } else { + key = rfc4253.readInternal(ret, 'public', kbuf); + } + + assert.strictEqual(type, key.type); + + if (m[4] && m[4].length > 0) { + key.comment = m[4]; + + } else if (ret.consumed) { + /* + * Now the magic: trying to recover the key comment when it's + * gotten conjoined to the key or otherwise shenanigan'd. + * + * Work out how much base64 we used, then drop all non-base64 + * chars from the beginning up to this point in the the string. + * Then offset in this and try to make up for missing = chars. + */ + var data = m[2] + m[3]; + var realOffset = Math.ceil(ret.consumed / 3) * 4; + data = data.slice(0, realOffset - 2). /*JSSTYLED*/ + replace(/[^a-zA-Z0-9+\/=]/g, '') + + data.slice(realOffset - 2); + + var padding = ret.consumed % 3; + if (padding > 0 && + data.slice(realOffset - 1, realOffset) !== '=') + realOffset--; + while (data.slice(realOffset, realOffset + 1) === '=') + realOffset++; + + /* Finally, grab what we think is the comment & clean it up. */ + var trailer = data.slice(realOffset); + trailer = trailer.replace(/[\r\n]/g, ' '). + replace(/^\s+/, ''); + if (trailer.match(/^[a-zA-Z0-9]/)) + key.comment = trailer; + } + + return (key); +} + +function write(key, options) { + assert.object(key); + if (!Key.isKey(key)) + throw (new Error('Must be a public key')); + + var parts = []; + var alg = rfc4253.keyTypeToAlg(key); + parts.push(alg); + + var buf = rfc4253.write(key); + parts.push(buf.toString('base64')); + + if (key.comment) + parts.push(key.comment); + + return (new Buffer(parts.join(' '))); +} diff --git a/node_modules/fsevents/node_modules/sshpk/lib/index.js b/node_modules/fsevents/node_modules/sshpk/lib/index.js new file mode 100644 index 0000000..0b40429 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/index.js @@ -0,0 +1,26 @@ +// Copyright 2015 Joyent, Inc. + +var Key = require('./key'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var PrivateKey = require('./private-key'); +var errs = require('./errors'); + +module.exports = { + /* top-level classes */ + Key: Key, + parseKey: Key.parse, + Fingerprint: Fingerprint, + parseFingerprint: Fingerprint.parse, + Signature: Signature, + parseSignature: Signature.parse, + PrivateKey: PrivateKey, + parsePrivateKey: PrivateKey.parse, + + /* errors */ + FingerprintFormatError: errs.FingerprintFormatError, + InvalidAlgorithmError: errs.InvalidAlgorithmError, + KeyParseError: errs.KeyParseError, + SignatureParseError: errs.SignatureParseError, + KeyEncryptedError: errs.KeyEncryptedError +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/key.js b/node_modules/fsevents/node_modules/sshpk/lib/key.js new file mode 100644 index 0000000..edc5143 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/key.js @@ -0,0 +1,269 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = Key; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var DiffieHellman = require('./dhe'); +var errs = require('./errors'); +var utils = require('./utils'); +var PrivateKey = require('./private-key'); +var edCompat; + +try { + edCompat = require('./ed-compat'); +} catch (e) { + /* Just continue through, and bail out if we try to use it. */ +} + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var KeyParseError = errs.KeyParseError; + +var formats = {}; +formats['auto'] = require('./formats/auto'); +formats['pem'] = require('./formats/pem'); +formats['pkcs1'] = require('./formats/pkcs1'); +formats['pkcs8'] = require('./formats/pkcs8'); +formats['rfc4253'] = require('./formats/rfc4253'); +formats['ssh'] = require('./formats/ssh'); +formats['ssh-private'] = require('./formats/ssh-private'); +formats['openssh'] = formats['ssh-private']; + +function Key(opts) { + assert.object(opts, 'options'); + assert.arrayOfObject(opts.parts, 'options.parts'); + assert.string(opts.type, 'options.type'); + assert.optionalString(opts.comment, 'options.comment'); + + var algInfo = algs.info[opts.type]; + if (typeof (algInfo) !== 'object') + throw (new InvalidAlgorithmError(opts.type)); + + var partLookup = {}; + for (var i = 0; i < opts.parts.length; ++i) { + var part = opts.parts[i]; + partLookup[part.name] = part; + } + + this.type = opts.type; + this.parts = opts.parts; + this.part = partLookup; + this.comment = undefined; + this.source = opts.source; + + /* for speeding up hashing/fingerprint operations */ + this._rfc4253Cache = opts._rfc4253Cache; + this._hashCache = {}; + + var sz; + this.curve = undefined; + if (this.type === 'ecdsa') { + var curve = this.part.curve.data.toString(); + this.curve = curve; + sz = algs.curves[curve].size; + } else if (this.type === 'ed25519') { + sz = 256; + this.curve = 'curve25519'; + } else { + var szPart = this.part[algInfo.sizePart]; + sz = szPart.data.length; + sz = sz * 8 - utils.countZeros(szPart.data); + } + this.size = sz; +} + +Key.formats = formats; + +Key.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'ssh'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + if (format === 'rfc4253') { + if (this._rfc4253Cache === undefined) + this._rfc4253Cache = formats['rfc4253'].write(this); + return (this._rfc4253Cache); + } + + return (formats[format].write(this, options)); +}; + +Key.prototype.toString = function (format, options) { + return (this.toBuffer(format, options).toString()); +}; + +Key.prototype.hash = function (algo) { + assert.string(algo, 'algorithm'); + algo = algo.toLowerCase(); + if (algs.hashAlgs[algo] === undefined) + throw (new InvalidAlgorithmError(algo)); + + if (this._hashCache[algo]) + return (this._hashCache[algo]); + + var hash = crypto.createHash(algo). + update(this.toBuffer('rfc4253')).digest(); + this._hashCache[algo] = hash; + return (hash); +}; + +Key.prototype.fingerprint = function (algo) { + if (algo === undefined) + algo = 'sha256'; + assert.string(algo, 'algorithm'); + var opts = { + hash: this.hash(algo), + algorithm: algo + }; + return (new Fingerprint(opts)); +}; + +Key.prototype.defaultHashAlgorithm = function () { + var hashAlgo = 'sha1'; + if (this.type === 'rsa') + hashAlgo = 'sha256'; + if (this.type === 'dsa' && this.size > 1024) + hashAlgo = 'sha256'; + if (this.type === 'ed25519') + hashAlgo = 'sha512'; + if (this.type === 'ecdsa') { + if (this.size <= 256) + hashAlgo = 'sha256'; + else if (this.size <= 384) + hashAlgo = 'sha384'; + else + hashAlgo = 'sha512'; + } + return (hashAlgo); +}; + +Key.prototype.createVerify = function (hashAlgo) { + if (hashAlgo === undefined) + hashAlgo = this.defaultHashAlgorithm(); + assert.string(hashAlgo, 'hash algorithm'); + + /* ED25519 is not supported by OpenSSL, use a javascript impl. */ + if (this.type === 'ed25519' && edCompat !== undefined) + return (new edCompat.Verifier(this, hashAlgo)); + if (this.type === 'curve25519') + throw (new Error('Curve25519 keys are not suitable for ' + + 'signing or verification')); + + var v, nm, err; + try { + nm = hashAlgo.toUpperCase(); + v = crypto.createVerify(nm); + } catch (e) { + err = e; + } + if (v === undefined || (err instanceof Error && + err.message.match(/Unknown message digest/))) { + nm = 'RSA-'; + nm += hashAlgo.toUpperCase(); + v = crypto.createVerify(nm); + } + assert.ok(v, 'failed to create verifier'); + var oldVerify = v.verify.bind(v); + var key = this.toBuffer('pkcs8'); + var self = this; + v.verify = function (signature, fmt) { + if (Signature.isSignature(signature, [2, 0])) { + if (signature.type !== self.type) + return (false); + if (signature.hashAlgorithm && + signature.hashAlgorithm !== hashAlgo) + return (false); + return (oldVerify(key, signature.toBuffer('asn1'))); + + } else if (typeof (signature) === 'string' || + Buffer.isBuffer(signature)) { + return (oldVerify(key, signature, fmt)); + + /* + * Avoid doing this on valid arguments, walking the prototype + * chain can be quite slow. + */ + } else if (Signature.isSignature(signature, [1, 0])) { + throw (new Error('signature was created by too old ' + + 'a version of sshpk and cannot be verified')); + + } else { + throw (new TypeError('signature must be a string, ' + + 'Buffer, or Signature object')); + } + }; + return (v); +}; + +Key.prototype.createDiffieHellman = function () { + if (this.type === 'rsa') + throw (new Error('RSA keys do not support Diffie-Hellman')); + + return (new DiffieHellman(this)); +}; +Key.prototype.createDH = Key.prototype.createDiffieHellman; + +Key.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + if (k instanceof PrivateKey) + k = k.toPublic(); + if (!k.comment) + k.comment = options.filename; + return (k); + } catch (e) { + if (e.name === 'KeyEncryptedError') + throw (e); + throw (new KeyParseError(options.filename, format, e)); + } +}; + +Key.isKey = function (obj, ver) { + return (utils.isCompatible(obj, Key, ver)); +}; + +/* + * API versions for Key: + * [1,0] -- initial ver, may take Signature for createVerify or may not + * [1,1] -- added pkcs1, pkcs8 formats + * [1,2] -- added auto, ssh-private, openssh formats + * [1,3] -- added defaultHashAlgorithm + * [1,4] -- added ed support, createDH + * [1,5] -- first explicitly tagged version + */ +Key.prototype._sshpkApiVersion = [1, 5]; + +Key._oldVersionDetect = function (obj) { + assert.func(obj.toBuffer); + assert.func(obj.fingerprint); + if (obj.createDH) + return ([1, 4]); + if (obj.defaultHashAlgorithm) + return ([1, 3]); + if (obj.formats['auto']) + return ([1, 2]); + if (obj.formats['pkcs1']) + return ([1, 1]); + return ([1, 0]); +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/private-key.js b/node_modules/fsevents/node_modules/sshpk/lib/private-key.js new file mode 100644 index 0000000..f80d939 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/private-key.js @@ -0,0 +1,231 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = PrivateKey; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var errs = require('./errors'); +var util = require('util'); +var utils = require('./utils'); +var edCompat; +var ed; + +try { + edCompat = require('./ed-compat'); +} catch (e) { + /* Just continue through, and bail out if we try to use it. */ +} + +var Key = require('./key'); + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var KeyParseError = errs.KeyParseError; +var KeyEncryptedError = errs.KeyEncryptedError; + +var formats = {}; +formats['auto'] = require('./formats/auto'); +formats['pem'] = require('./formats/pem'); +formats['pkcs1'] = require('./formats/pkcs1'); +formats['pkcs8'] = require('./formats/pkcs8'); +formats['rfc4253'] = require('./formats/rfc4253'); +formats['ssh-private'] = require('./formats/ssh-private'); +formats['openssh'] = formats['ssh-private']; +formats['ssh'] = formats['ssh-private']; + +function PrivateKey(opts) { + assert.object(opts, 'options'); + Key.call(this, opts); + + this._pubCache = undefined; +} +util.inherits(PrivateKey, Key); + +PrivateKey.formats = formats; + +PrivateKey.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'pkcs1'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + return (formats[format].write(this, options)); +}; + +PrivateKey.prototype.hash = function (algo) { + return (this.toPublic().hash(algo)); +}; + +PrivateKey.prototype.toPublic = function () { + if (this._pubCache) + return (this._pubCache); + + var algInfo = algs.info[this.type]; + var pubParts = []; + for (var i = 0; i < algInfo.parts.length; ++i) { + var p = algInfo.parts[i]; + pubParts.push(this.part[p]); + } + + this._pubCache = new Key({ + type: this.type, + source: this, + parts: pubParts + }); + if (this.comment) + this._pubCache.comment = this.comment; + return (this._pubCache); +}; + +PrivateKey.prototype.derive = function (newType, newSize) { + assert.string(newType, 'type'); + assert.optionalNumber(newSize, 'size'); + var priv, pub; + + if (this.type === 'ed25519' && newType === 'curve25519') { + if (ed === undefined) + ed = require('jodid25519'); + + priv = this.part.r.data; + if (priv[0] === 0x00) + priv = priv.slice(1); + priv = priv.slice(0, 32); + + pub = ed.dh.publicKey(priv); + priv = utils.mpNormalize(Buffer.concat([priv, pub])); + + return (new PrivateKey({ + type: 'curve25519', + parts: [ + { name: 'R', data: utils.mpNormalize(pub) }, + { name: 'r', data: priv } + ] + })); + } else if (this.type === 'curve25519' && newType === 'ed25519') { + if (ed === undefined) + ed = require('jodid25519'); + + priv = this.part.r.data; + if (priv[0] === 0x00) + priv = priv.slice(1); + priv = priv.slice(0, 32); + + pub = ed.eddsa.publicKey(priv.toString('binary')); + pub = new Buffer(pub, 'binary'); + + priv = utils.mpNormalize(Buffer.concat([priv, pub])); + + return (new PrivateKey({ + type: 'ed25519', + parts: [ + { name: 'R', data: utils.mpNormalize(pub) }, + { name: 'r', data: priv } + ] + })); + } + throw (new Error('Key derivation not supported from ' + this.type + + ' to ' + newType)); +}; + +PrivateKey.prototype.createVerify = function (hashAlgo) { + return (this.toPublic().createVerify(hashAlgo)); +}; + +PrivateKey.prototype.createSign = function (hashAlgo) { + if (hashAlgo === undefined) + hashAlgo = this.defaultHashAlgorithm(); + assert.string(hashAlgo, 'hash algorithm'); + + /* ED25519 is not supported by OpenSSL, use a javascript impl. */ + if (this.type === 'ed25519' && edCompat !== undefined) + return (new edCompat.Signer(this, hashAlgo)); + if (this.type === 'curve25519') + throw (new Error('Curve25519 keys are not suitable for ' + + 'signing or verification')); + + var v, nm, err; + try { + nm = hashAlgo.toUpperCase(); + v = crypto.createSign(nm); + } catch (e) { + err = e; + } + if (v === undefined || (err instanceof Error && + err.message.match(/Unknown message digest/))) { + nm = 'RSA-'; + nm += hashAlgo.toUpperCase(); + v = crypto.createSign(nm); + } + assert.ok(v, 'failed to create verifier'); + var oldSign = v.sign.bind(v); + var key = this.toBuffer('pkcs1'); + var type = this.type; + v.sign = function () { + var sig = oldSign(key); + if (typeof (sig) === 'string') + sig = new Buffer(sig, 'binary'); + sig = Signature.parse(sig, type, 'asn1'); + sig.hashAlgorithm = hashAlgo; + return (sig); + }; + return (v); +}; + +PrivateKey.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + assert.ok(k instanceof PrivateKey, 'key is not a private key'); + if (!k.comment) + k.comment = options.filename; + return (k); + } catch (e) { + if (e.name === 'KeyEncryptedError') + throw (e); + throw (new KeyParseError(options.filename, format, e)); + } +}; + +PrivateKey.isPrivateKey = function (obj, ver) { + return (utils.isCompatible(obj, PrivateKey, ver)); +}; + +/* + * API versions for PrivateKey: + * [1,0] -- initial ver + * [1,1] -- added auto, pkcs[18], openssh/ssh-private formats + * [1,2] -- added defaultHashAlgorithm + * [1,3] -- added derive, ed, createDH + * [1,4] -- first tagged version + */ +PrivateKey.prototype._sshpkApiVersion = [1, 4]; + +PrivateKey._oldVersionDetect = function (obj) { + assert.func(obj.toPublic); + assert.func(obj.createSign); + if (obj.derive) + return ([1, 3]); + if (obj.defaultHashAlgorithm) + return ([1, 2]); + if (obj.formats['auto']) + return ([1, 1]); + return ([1, 0]); +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/signature.js b/node_modules/fsevents/node_modules/sshpk/lib/signature.js new file mode 100644 index 0000000..ddf4a8c --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/signature.js @@ -0,0 +1,237 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = Signature; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var errs = require('./errors'); +var utils = require('./utils'); +var asn1 = require('asn1'); +var SSHBuffer = require('./ssh-buffer'); + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var SignatureParseError = errs.SignatureParseError; + +function Signature(opts) { + assert.object(opts, 'options'); + assert.arrayOfObject(opts.parts, 'options.parts'); + assert.string(opts.type, 'options.type'); + + var partLookup = {}; + for (var i = 0; i < opts.parts.length; ++i) { + var part = opts.parts[i]; + partLookup[part.name] = part; + } + + this.type = opts.type; + this.hashAlgorithm = opts.hashAlgo; + this.parts = opts.parts; + this.part = partLookup; +} + +Signature.prototype.toBuffer = function (format) { + if (format === undefined) + format = 'asn1'; + assert.string(format, 'format'); + + var buf; + + switch (this.type) { + case 'rsa': + case 'ed25519': + if (format === 'ssh') { + buf = new SSHBuffer({}); + buf.writeString('ssh-' + this.type); + buf.writePart(this.part.sig); + return (buf.toBuffer()); + } else { + return (this.part.sig.data); + } + + case 'dsa': + case 'ecdsa': + var r, s; + if (format === 'asn1') { + var der = new asn1.BerWriter(); + der.startSequence(); + r = utils.mpNormalize(this.part.r.data); + s = utils.mpNormalize(this.part.s.data); + der.writeBuffer(r, asn1.Ber.Integer); + der.writeBuffer(s, asn1.Ber.Integer); + der.endSequence(); + return (der.buffer); + } else if (format === 'ssh' && this.type === 'dsa') { + buf = new SSHBuffer({}); + buf.writeString('ssh-dss'); + r = this.part.r.data; + if (r[0] === 0x00) + r = r.slice(1); + s = this.part.s.data; + buf.writeBuffer(Buffer.concat([r, s])); + return (buf.toBuffer()); + } else if (format === 'ssh' && this.type === 'ecdsa') { + var inner = new SSHBuffer({}); + r = this.part.r; + if (r[0] === 0x00) + r = r.slice(1); + inner.writePart(r); + inner.writePart(this.part.s); + + buf = new SSHBuffer({}); + /* XXX: find a more proper way to do this? */ + var curve; + var sz = this.part.r.data.length * 8; + if (sz === 256) + curve = 'nistp256'; + else if (sz === 384) + curve = 'nistp384'; + else if (sz === 528) + curve = 'nistp521'; + buf.writeString('ecdsa-sha2-' + curve); + buf.writeBuffer(inner.toBuffer()); + return (buf.toBuffer()); + } + throw (new Error('Invalid signature format')); + default: + throw (new Error('Invalid signature data')); + } +}; + +Signature.prototype.toString = function (format) { + assert.optionalString(format, 'format'); + return (this.toBuffer(format).toString('base64')); +}; + +Signature.parse = function (data, type, format) { + if (typeof (data) === 'string') + data = new Buffer(data, 'base64'); + assert.buffer(data, 'data'); + assert.string(format, 'format'); + assert.string(type, 'type'); + + var opts = {}; + opts.type = type.toLowerCase(); + opts.parts = []; + + try { + assert.ok(data.length > 0, 'signature must not be empty'); + switch (opts.type) { + case 'rsa': + return (parseOneNum(data, type, format, opts, + 'ssh-rsa')); + case 'ed25519': + return (parseOneNum(data, type, format, opts, + 'ssh-ed25519')); + + case 'dsa': + case 'ecdsa': + if (format === 'asn1') + return (parseDSAasn1(data, type, format, opts)); + else if (opts.type === 'dsa') + return (parseDSA(data, type, format, opts)); + else + return (parseECDSA(data, type, format, opts)); + + default: + throw (new InvalidAlgorithmError(type)); + } + + } catch (e) { + if (e instanceof InvalidAlgorithmError) + throw (e); + throw (new SignatureParseError(type, format, e)); + } +}; + +function parseOneNum(data, type, format, opts, headType) { + if (format === 'ssh') { + try { + var buf = new SSHBuffer({buffer: data}); + var head = buf.readString(); + } catch (e) { + /* fall through */ + } + if (head === headType) { + var sig = buf.readPart(); + assert.ok(buf.atEnd(), 'extra trailing bytes'); + sig.name = 'sig'; + opts.parts.push(sig); + return (new Signature(opts)); + } + } + opts.parts.push({name: 'sig', data: data}); + return (new Signature(opts)); +} + +function parseDSAasn1(data, type, format, opts) { + var der = new asn1.BerReader(data); + der.readSequence(); + var r = der.readString(asn1.Ber.Integer, true); + var s = der.readString(asn1.Ber.Integer, true); + + opts.parts.push({name: 'r', data: utils.mpNormalize(r)}); + opts.parts.push({name: 's', data: utils.mpNormalize(s)}); + + return (new Signature(opts)); +} + +function parseDSA(data, type, format, opts) { + if (data.length != 40) { + var buf = new SSHBuffer({buffer: data}); + var d = buf.readBuffer(); + if (d.toString('ascii') === 'ssh-dss') + d = buf.readBuffer(); + assert.ok(buf.atEnd(), 'extra trailing bytes'); + assert.strictEqual(d.length, 40, 'invalid inner length'); + data = d; + } + opts.parts.push({name: 'r', data: data.slice(0, 20)}); + opts.parts.push({name: 's', data: data.slice(20, 40)}); + return (new Signature(opts)); +} + +function parseECDSA(data, type, format, opts) { + var buf = new SSHBuffer({buffer: data}); + + var r, s; + var inner = buf.readBuffer(); + if (inner.toString('ascii').match(/^ecdsa-/)) { + inner = buf.readBuffer(); + assert.ok(buf.atEnd(), 'extra trailing bytes on outer'); + buf = new SSHBuffer({buffer: inner}); + r = buf.readPart(); + } else { + r = {data: inner}; + } + + s = buf.readPart(); + assert.ok(buf.atEnd(), 'extra trailing bytes'); + + r.name = 'r'; + s.name = 's'; + + opts.parts.push(r); + opts.parts.push(s); + return (new Signature(opts)); +} + +Signature.isSignature = function (obj, ver) { + return (utils.isCompatible(obj, Signature, ver)); +}; + +/* + * API versions for Signature: + * [1,0] -- initial ver + * [2,0] -- support for rsa in full ssh format, compat with sshpk-agent + * hashAlgorithm property + * [2,1] -- first tagged version + */ +Signature.prototype._sshpkApiVersion = [2, 1]; + +Signature._oldVersionDetect = function (obj) { + assert.func(obj.toBuffer); + if (obj.hasOwnProperty('hashAlgorithm')) + return ([2, 0]); + return ([1, 0]); +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/ssh-buffer.js b/node_modules/fsevents/node_modules/sshpk/lib/ssh-buffer.js new file mode 100644 index 0000000..0b00277 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/ssh-buffer.js @@ -0,0 +1,124 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = SSHBuffer; + +var assert = require('assert-plus'); + +function SSHBuffer(opts) { + assert.object(opts, 'options'); + if (opts.buffer !== undefined) + assert.buffer(opts.buffer, 'options.buffer'); + + this._size = opts.buffer ? opts.buffer.length : 1024; + this._buffer = opts.buffer || (new Buffer(this._size)); + this._offset = 0; +} + +SSHBuffer.prototype.toBuffer = function () { + return (this._buffer.slice(0, this._offset)); +}; + +SSHBuffer.prototype.atEnd = function () { + return (this._offset >= this._buffer.length); +}; + +SSHBuffer.prototype.remainder = function () { + return (this._buffer.slice(this._offset)); +}; + +SSHBuffer.prototype.skip = function (n) { + this._offset += n; +}; + +SSHBuffer.prototype.expand = function () { + this._size *= 2; + var buf = new Buffer(this._size); + this._buffer.copy(buf, 0); + this._buffer = buf; +}; + +SSHBuffer.prototype.readPart = function () { + return ({data: this.readBuffer()}); +}; + +SSHBuffer.prototype.readBuffer = function () { + var len = this._buffer.readUInt32BE(this._offset); + this._offset += 4; + assert.ok(this._offset + len <= this._buffer.length, + 'length out of bounds at +0x' + this._offset.toString(16) + + ' (data truncated?)'); + var buf = this._buffer.slice(this._offset, this._offset + len); + this._offset += len; + return (buf); +}; + +SSHBuffer.prototype.readString = function () { + return (this.readBuffer().toString()); +}; + +SSHBuffer.prototype.readCString = function () { + var offset = this._offset; + while (offset < this._buffer.length && + this._buffer[offset] !== 0x00) + offset++; + assert.ok(offset < this._buffer.length, 'c string does not terminate'); + var str = this._buffer.slice(this._offset, offset).toString(); + this._offset = offset + 1; + return (str); +}; + +SSHBuffer.prototype.readInt = function () { + var v = this._buffer.readUInt32BE(this._offset); + this._offset += 4; + return (v); +}; + +SSHBuffer.prototype.readChar = function () { + var v = this._buffer[this._offset++]; + return (v); +}; + +SSHBuffer.prototype.writeBuffer = function (buf) { + while (this._offset + 4 + buf.length > this._size) + this.expand(); + this._buffer.writeUInt32BE(buf.length, this._offset); + this._offset += 4; + buf.copy(this._buffer, this._offset); + this._offset += buf.length; +}; + +SSHBuffer.prototype.writeString = function (str) { + this.writeBuffer(new Buffer(str, 'utf8')); +}; + +SSHBuffer.prototype.writeCString = function (str) { + while (this._offset + 1 + str.length > this._size) + this.expand(); + this._buffer.write(str, this._offset); + this._offset += str.length; + this._buffer[this._offset++] = 0; +}; + +SSHBuffer.prototype.writeInt = function (v) { + while (this._offset + 4 > this._size) + this.expand(); + this._buffer.writeUInt32BE(v, this._offset); + this._offset += 4; +}; + +SSHBuffer.prototype.writeChar = function (v) { + while (this._offset + 1 > this._size) + this.expand(); + this._buffer[this._offset++] = v; +}; + +SSHBuffer.prototype.writePart = function (p) { + this.writeBuffer(p.data); +}; + +SSHBuffer.prototype.write = function (buf) { + while (this._offset + buf.length > this._size) + this.expand(); + buf.copy(this._buffer, this._offset); + this._offset += buf.length; +}; diff --git a/node_modules/fsevents/node_modules/sshpk/lib/utils.js b/node_modules/fsevents/node_modules/sshpk/lib/utils.js new file mode 100644 index 0000000..d57245c --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/lib/utils.js @@ -0,0 +1,246 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + bufferSplit: bufferSplit, + addRSAMissing: addRSAMissing, + calculateDSAPublic: calculateDSAPublic, + mpNormalize: mpNormalize, + ecNormalize: ecNormalize, + countZeros: countZeros, + assertCompatible: assertCompatible, + isCompatible: isCompatible, + opensslKeyDeriv: opensslKeyDeriv +}; + +var assert = require('assert-plus'); +var PrivateKey = require('./private-key'); +var crypto = require('crypto'); + +var MAX_CLASS_DEPTH = 3; + +function isCompatible(obj, klass, needVer) { + if (obj === null || typeof (obj) !== 'object') + return (false); + if (needVer === undefined) + needVer = klass.prototype._sshpkApiVersion; + if (obj instanceof klass && + klass.prototype._sshpkApiVersion[0] == needVer[0]) + return (true); + var proto = Object.getPrototypeOf(obj); + var depth = 0; + while (proto.constructor.name !== klass.name) { + proto = Object.getPrototypeOf(proto); + if (!proto || ++depth > MAX_CLASS_DEPTH) + return (false); + } + if (proto.constructor.name !== klass.name) + return (false); + var ver = proto._sshpkApiVersion; + if (ver === undefined) + ver = klass._oldVersionDetect(obj); + if (ver[0] != needVer[0] || ver[1] < needVer[1]) + return (false); + return (true); +} + +function assertCompatible(obj, klass, needVer, name) { + if (name === undefined) + name = 'object'; + assert.ok(obj, name + ' must not be null'); + assert.object(obj, name + ' must be an object'); + if (needVer === undefined) + needVer = klass.prototype._sshpkApiVersion; + if (obj instanceof klass && + klass.prototype._sshpkApiVersion[0] == needVer[0]) + return; + var proto = Object.getPrototypeOf(obj); + var depth = 0; + while (proto.constructor.name !== klass.name) { + proto = Object.getPrototypeOf(proto); + assert.ok(proto && ++depth <= MAX_CLASS_DEPTH, + name + ' must be a ' + klass.name + ' instance'); + } + assert.strictEqual(proto.constructor.name, klass.name, + name + ' must be a ' + klass.name + ' instance'); + var ver = proto._sshpkApiVersion; + if (ver === undefined) + ver = klass._oldVersionDetect(obj); + assert.ok(ver[0] == needVer[0] && ver[1] >= needVer[1], + name + ' must be compatible with ' + klass.name + ' klass ' + + 'version ' + needVer[0] + '.' + needVer[1]); +} + +var CIPHER_LEN = { + 'des-ede3-cbc': { key: 7, iv: 8 }, + 'aes-128-cbc': { key: 16, iv: 16 } +}; +var PKCS5_SALT_LEN = 8; + +function opensslKeyDeriv(cipher, salt, passphrase, count) { + assert.buffer(salt, 'salt'); + assert.buffer(passphrase, 'passphrase'); + assert.number(count, 'iteration count'); + + var clen = CIPHER_LEN[cipher]; + assert.object(clen, 'supported cipher'); + + salt = salt.slice(0, PKCS5_SALT_LEN); + + var D, D_prev, bufs; + var material = new Buffer(0); + while (material.length < clen.key + clen.iv) { + bufs = []; + if (D_prev) + bufs.push(D_prev); + bufs.push(passphrase); + bufs.push(salt); + D = Buffer.concat(bufs); + for (var j = 0; j < count; ++j) + D = crypto.createHash('md5').update(D).digest(); + material = Buffer.concat([material, D]); + D_prev = D; + } + + return ({ + key: material.slice(0, clen.key), + iv: material.slice(clen.key, clen.key + clen.iv) + }); +} + +/* Count leading zero bits on a buffer */ +function countZeros(buf) { + var o = 0, obit = 8; + while (o < buf.length) { + var mask = (1 << obit); + if ((buf[o] & mask) === mask) + break; + obit--; + if (obit < 0) { + o++; + obit = 8; + } + } + return (o*8 + (8 - obit) - 1); +} + +function bufferSplit(buf, chr) { + assert.buffer(buf); + assert.string(chr); + + var parts = []; + var lastPart = 0; + var matches = 0; + for (var i = 0; i < buf.length; ++i) { + if (buf[i] === chr.charCodeAt(matches)) + ++matches; + else if (buf[i] === chr.charCodeAt(0)) + matches = 1; + else + matches = 0; + + if (matches >= chr.length) { + var newPart = i + 1; + parts.push(buf.slice(lastPart, newPart - matches)); + lastPart = newPart; + matches = 0; + } + } + if (lastPart <= buf.length) + parts.push(buf.slice(lastPart, buf.length)); + + return (parts); +} + +function ecNormalize(buf, addZero) { + assert.buffer(buf); + if (buf[0] === 0x00 && buf[1] === 0x04) { + if (addZero) + return (buf); + return (buf.slice(1)); + } else if (buf[0] === 0x04) { + if (!addZero) + return (buf); + } else { + while (buf[0] === 0x00) + buf = buf.slice(1); + if (buf[0] === 0x02 || buf[0] === 0x03) + throw (new Error('Compressed elliptic curve points ' + + 'are not supported')); + if (buf[0] !== 0x04) + throw (new Error('Not a valid elliptic curve point')); + if (!addZero) + return (buf); + } + var b = new Buffer(buf.length + 1); + b[0] = 0x0; + buf.copy(b, 1); + return (b); +} + +function mpNormalize(buf) { + assert.buffer(buf); + while (buf.length > 1 && buf[0] === 0x00 && (buf[1] & 0x80) === 0x00) + buf = buf.slice(1); + if ((buf[0] & 0x80) === 0x80) { + var b = new Buffer(buf.length + 1); + b[0] = 0x00; + buf.copy(b, 1); + buf = b; + } + return (buf); +} + +function bigintToMpBuf(bigint) { + var buf = new Buffer(bigint.toByteArray()); + buf = mpNormalize(buf); + return (buf); +} + +function calculateDSAPublic(g, p, x) { + assert.buffer(g); + assert.buffer(p); + assert.buffer(x); + try { + var bigInt = require('jsbn').BigInteger; + } catch (e) { + throw (new Error('To load a PKCS#8 format DSA private key, ' + + 'the node jsbn library is required.')); + } + g = new bigInt(g); + p = new bigInt(p); + x = new bigInt(x); + var y = g.modPow(x, p); + var ybuf = bigintToMpBuf(y); + return (ybuf); +} + +function addRSAMissing(key) { + assert.object(key); + assertCompatible(key, PrivateKey, [1, 1]); + try { + var bigInt = require('jsbn').BigInteger; + } catch (e) { + throw (new Error('To write a PEM private key from ' + + 'this source, the node jsbn lib is required.')); + } + + var d = new bigInt(key.part.d.data); + var buf; + + if (!key.part.dmodp) { + var p = new bigInt(key.part.p.data); + var dmodp = d.mod(p.subtract(1)); + + buf = bigintToMpBuf(dmodp); + key.part.dmodp = {name: 'dmodp', data: buf}; + key.parts.push(key.part.dmodp); + } + if (!key.part.dmodq) { + var q = new bigInt(key.part.q.data); + var dmodq = d.mod(q.subtract(1)); + + buf = bigintToMpBuf(dmodq); + key.part.dmodq = {name: 'dmodq', data: buf}; + key.parts.push(key.part.dmodq); + } +} diff --git a/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-conv.1 b/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-conv.1 new file mode 100644 index 0000000..0887dce --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-conv.1 @@ -0,0 +1,135 @@ +.TH sshpk\-conv 1 "Jan 2016" sshpk "sshpk Commands" +.SH NAME +.PP +sshpk\-conv \- convert between key formats +.SH SYNOPSYS +.PP +\fB\fCsshpk\-conv\fR \-t FORMAT [FILENAME] [OPTIONS...] +.PP +\fB\fCsshpk\-conv\fR \-i [FILENAME] [OPTIONS...] +.SH DESCRIPTION +.PP +Reads in a public or private key and converts it between different formats, +particularly formats used in the SSH protocol and the well\-known PEM PKCS#1/7 +formats. +.PP +In the second form, with the \fB\fC\-i\fR option given, identifies a key and prints to +stderr information about its nature, size and fingerprint. +.SH EXAMPLES +.PP +Assume the following SSH\-format public key in \fB\fCid_ecdsa.pub\fR: +.PP +.RS +.nf +ecdsa\-sha2\-nistp256 AAAAE2VjZHNhLXNoYTI...9M/4c4= user@host +.fi +.RE +.PP +Identify it with \fB\fC\-i\fR: +.PP +.RS +.nf +$ sshpk\-conv \-i id_ecdsa.pub +id_ecdsa: a 256 bit ECDSA public key +ECDSA curve: nistp256 +Comment: user@host +Fingerprint: + SHA256:vCNX7eUkdvqqW0m4PoxQAZRv+CM4P4fS8+CbliAvS4k + 81:ad:d5:57:e5:6f:7d:a2:93:79:56:af:d7:c0:38:51 +.fi +.RE +.PP +Convert it to \fB\fCpkcs8\fR format, for use with e.g. OpenSSL: +.PP +.RS +.nf +$ sshpk\-conv \-t pkcs8 id_ecdsa +\-\-\-\-\-BEGIN PUBLIC KEY\-\-\-\-\- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEAsA4R6N6AS3gzaPBeLjG2ObSgUsR +zOt+kWJoijLnw3ZMYUKmAx+lD0I5XUxdrPcs1vH5f3cn9TvRvO9L0z/hzg== +\-\-\-\-\-END PUBLIC KEY\-\-\-\-\- +.fi +.RE +.PP +Retrieve the public half of a private key: +.PP +.RS +.nf +$ openssl genrsa 2048 | sshpk\-conv \-t ssh \-c foo@bar +ssh\-rsa AAAAB3NzaC1yc2EAAA...koK7 foo@bar +.fi +.RE +.PP +Convert a private key to PKCS#1 (OpenSSL) format from a new\-style OpenSSH key +format (the \fB\fCssh\-keygen \-o\fR format): +.PP +.RS +.nf +$ ssh\-keygen \-o \-f foobar +\&... +$ sshpk\-conv \-p \-t pkcs1 foobar +\-\-\-\-\-BEGIN RSA PRIVATE KEY\-\-\-\-\- +MIIDpAIBAAKCAQEA6T/GYJndb1TRH3+NL.... +\-\-\-\-\-END RSA PRIVATE KEY\-\-\-\-\- +.fi +.RE +.SH OPTIONS +.TP +\fB\fC\-i, \-\-identify\fR +Instead of converting the key, output identifying information about it to +stderr, including its type, size and fingerprints. +.TP +\fB\fC\-p, \-\-private\fR +Treat the key as a private key instead of a public key (the default). If you +supply \fB\fCsshpk\-conv\fR with a private key and do not give this option, it will +extract only the public half of the key from it and work with that. +.TP +\fB\fC\-f PATH, \-\-file=PATH\fR +Input file to take the key from instead of stdin. If a filename is supplied +as a positional argument, it is equivalent to using this option. +.TP +\fB\fC\-o PATH, \-\-out=PATH\fR +Output file name to use instead of stdout. +.PP +\fB\fC\-T FORMAT, \-\-informat=FORMAT\fR +.TP +\fB\fC\-t FORMAT, \-\-outformat=FORMAT\fR +Selects the input and output formats to be used (see FORMATS, below). +.TP +\fB\fC\-c TEXT, \-\-comment=TEXT\fR +Sets the key comment for the output file, if supported. +.SH FORMATS +.PP +Currently supported formats: +.TP +\fB\fCpem, pkcs1\fR +The standard PEM format used by older OpenSSH and most TLS libraries such as +OpenSSL. The classic \fB\fCid_rsa\fR file is usually in this format. It is an ASN.1 +encoded structure, base64\-encoded and placed between PEM headers. +.TP +\fB\fCssh\fR +The SSH public key text format (the format of an \fB\fCid_rsa.pub\fR file). A single +line, containing 3 space separated parts: the key type, key body and optional +key comment. +.TP +\fB\fCpkcs8\fR +A newer PEM format, usually used only for public keys by TLS libraries such +as OpenSSL. The ASN.1 structure is more generic than that of \fB\fCpkcs1\fR\&. +.TP +\fB\fCopenssh\fR +The new \fB\fCssh\-keygen \-o\fR format from OpenSSH. This can be mistaken for a PEM +encoding but is actually an OpenSSH internal format. +.TP +\fB\fCrfc4253\fR +The internal binary format of keys when sent over the wire in the SSH +protocol. This is also the format that the \fB\fCssh\-agent\fR uses in its protocol. +.SH SEE ALSO +.PP +.BR ssh-keygen (1), +.BR openssl (1) +.SH BUGS +.PP +Encrypted (password\-protected) keys are not supported. +.PP +Report bugs at Github +\[la]https://github.com/arekinath/node-sshpk/issues\[ra] diff --git a/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-sign.1 b/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-sign.1 new file mode 100644 index 0000000..749916b --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-sign.1 @@ -0,0 +1,81 @@ +.TH sshpk\-sign 1 "Jan 2016" sshpk "sshpk Commands" +.SH NAME +.PP +sshpk\-sign \- sign data using an SSH key +.SH SYNOPSYS +.PP +\fB\fCsshpk\-sign\fR \-i KEYPATH [OPTION...] +.SH DESCRIPTION +.PP +Takes in arbitrary bytes, and signs them using an SSH private key. The key can +be of any type or format supported by the \fB\fCsshpk\fR library, including the +standard OpenSSH formats, as well as PEM PKCS#1 and PKCS#8. +.PP +The signature is printed out in Base64 encoding, unless the \fB\fC\-\-binary\fR or \fB\fC\-b\fR +option is given. +.SH EXAMPLES +.PP +Signing with default settings: +.PP +.RS +.nf +$ printf 'foo' | sshpk\-sign \-i ~/.ssh/id_ecdsa +MEUCIAMdLS/vXrrtWFepwe... +.fi +.RE +.PP +Signing in SSH (RFC 4253) format (rather than the default ASN.1): +.PP +.RS +.nf +$ printf 'foo' | sshpk\-sign \-i ~/.ssh/id_ecdsa \-t ssh +AAAAFGVjZHNhLXNoYTIt... +.fi +.RE +.PP +Saving the binary signature to a file: +.PP +.RS +.nf +$ printf 'foo' | sshpk\-sign \-i ~/.ssh/id_ecdsa \\ + \-o signature.bin \-b +$ cat signature.bin | base64 +MEUCIAMdLS/vXrrtWFepwe... +.fi +.RE +.SH OPTIONS +.TP +\fB\fC\-v, \-\-verbose\fR +Print extra information about the key and signature to stderr when signing. +.TP +\fB\fC\-b, \-\-binary\fR +Don't base64\-encode the signature before outputting it. +.TP +\fB\fC\-i KEY, \-\-identity=KEY\fR +Select the key to be used for signing. \fB\fCKEY\fR must be a relative or absolute +filesystem path to the key file. Any format supported by the \fB\fCsshpk\fR library +is supported, including OpenSSH formats and standard PEM PKCS. +.TP +\fB\fC\-f PATH, \-\-file=PATH\fR +Input file to sign instead of stdin. +.TP +\fB\fC\-o PATH, \-\-out=PATH\fR +Output file to save signature in instead of stdout. +.TP +\fB\fC\-H HASH, \-\-hash=HASH\fR +Set the hash algorithm to be used for signing. This should be one of \fB\fCsha1\fR, +\fB\fCsha256\fR or \fB\fCsha512\fR\&. Some key types may place restrictions on which hash +algorithms may be used (e.g. ED25519 keys can only use SHA\-512). +.TP +\fB\fC\-t FORMAT, \-\-format=FORMAT\fR +Choose the signature format to use, from \fB\fCasn1\fR, \fB\fCssh\fR or \fB\fCraw\fR (only for +ED25519 signatures). The \fB\fCasn1\fR format is the default, as it is the format +used with TLS and typically the standard in most non\-SSH libraries (e.g. +OpenSSL). The \fB\fCssh\fR format is used in the SSH protocol and by the ssh\-agent. +.SH SEE ALSO +.PP +.BR sshpk-verify (1) +.SH BUGS +.PP +Report bugs at Github +\[la]https://github.com/arekinath/node-sshpk/issues\[ra] diff --git a/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-verify.1 b/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-verify.1 new file mode 100644 index 0000000..f79169d --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/man/man1/sshpk-verify.1 @@ -0,0 +1,68 @@ +.TH sshpk\-verify 1 "Jan 2016" sshpk "sshpk Commands" +.SH NAME +.PP +sshpk\-verify \- verify a signature on data using an SSH key +.SH SYNOPSYS +.PP +\fB\fCsshpk\-verify\fR \-i KEYPATH \-s SIGNATURE [OPTION...] +.SH DESCRIPTION +.PP +Takes in arbitrary bytes and a Base64\-encoded signature, and verifies that the +signature was produced by the private half of the given SSH public key. +.SH EXAMPLES +.PP +.RS +.nf +$ printf 'foo' | sshpk\-verify \-i ~/.ssh/id_ecdsa \-s MEUCIQCYp... +OK +$ printf 'foo' | sshpk\-verify \-i ~/.ssh/id_ecdsa \-s GARBAGE... +NOT OK +.fi +.RE +.SH EXIT STATUS +.TP +\fB\fC0\fR +Signature validates and matches the key. +.TP +\fB\fC1\fR +Signature is parseable and the correct length but does not match the key or +otherwise is invalid. +.TP +\fB\fC2\fR +The signature or key could not be parsed. +.TP +\fB\fC3\fR +Invalid commandline options were supplied. +.SH OPTIONS +.TP +\fB\fC\-v, \-\-verbose\fR +Print extra information about the key and signature to stderr when verifying. +.TP +\fB\fC\-i KEY, \-\-identity=KEY\fR +Select the key to be used for verification. \fB\fCKEY\fR must be a relative or +absolute filesystem path to the key file. Any format supported by the \fB\fCsshpk\fR +library is supported, including OpenSSH formats and standard PEM PKCS. +.TP +\fB\fC\-s BASE64, \-\-signature=BASE64\fR +Supplies the base64\-encoded signature to be verified. +.TP +\fB\fC\-f PATH, \-\-file=PATH\fR +Input file to verify instead of stdin. +.TP +\fB\fC\-H HASH, \-\-hash=HASH\fR +Set the hash algorithm to be used for signing. This should be one of \fB\fCsha1\fR, +\fB\fCsha256\fR or \fB\fCsha512\fR\&. Some key types may place restrictions on which hash +algorithms may be used (e.g. ED25519 keys can only use SHA\-512). +.TP +\fB\fC\-t FORMAT, \-\-format=FORMAT\fR +Choose the signature format to use, from \fB\fCasn1\fR, \fB\fCssh\fR or \fB\fCraw\fR (only for +ED25519 signatures). The \fB\fCasn1\fR format is the default, as it is the format +used with TLS and typically the standard in most non\-SSH libraries (e.g. +OpenSSL). The \fB\fCssh\fR format is used in the SSH protocol and by the ssh\-agent. +.SH SEE ALSO +.PP +.BR sshpk-sign (1) +.SH BUGS +.PP +Report bugs at Github +\[la]https://github.com/arekinath/node-sshpk/issues\[ra] diff --git a/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/AUTHORS b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/AUTHORS new file mode 100644 index 0000000..1923524 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/AUTHORS @@ -0,0 +1,6 @@ +Dave Eddy +Fred Kuo +Lars-Magnus Skog +Mark Cavage +Patrick Mooney +Rob Gulewich diff --git a/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/CHANGES.md b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/CHANGES.md new file mode 100644 index 0000000..57d92bf --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/CHANGES.md @@ -0,0 +1,14 @@ +# assert-plus Changelog + +## 1.0.0 + +- *BREAKING* assert.number (and derivatives) now accept Infinity as valid input +- Add assert.finite check. Previous assert.number callers should use this if + they expect Infinity inputs to throw. + +## 0.2.0 + +- Fix `assert.object(null)` so it throws +- Fix optional/arrayOf exports for non-type-of asserts +- Add optiona/arrayOf exports for Stream/Date/Regex/uuid +- Add basic unit test coverage diff --git a/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/README.md b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/README.md new file mode 100644 index 0000000..ec200d1 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/README.md @@ -0,0 +1,162 @@ +# assert-plus + +This library is a super small wrapper over node's assert module that has two +things: (1) the ability to disable assertions with the environment variable +NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like +`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks +like this: + +```javascript + var assert = require('assert-plus'); + + function fooAccount(options, callback) { + assert.object(options, 'options'); + assert.number(options.id, 'options.id'); + assert.bool(options.isManager, 'options.isManager'); + assert.string(options.name, 'options.name'); + assert.arrayOfString(options.email, 'options.email'); + assert.func(callback, 'callback'); + + // Do stuff + callback(null, {}); + } +``` + +# API + +All methods that *aren't* part of node's core assert API are simply assumed to +take an argument, and then a string 'name' that's not a message; `AssertionError` +will be thrown if the assertion fails with a message like: + + AssertionError: foo (string) is required + at test (/home/mark/work/foo/foo.js:3:9) + at Object. (/home/mark/work/foo/foo.js:15:1) + at Module._compile (module.js:446:26) + at Object..js (module.js:464:10) + at Module.load (module.js:353:31) + at Function._load (module.js:311:12) + at Array.0 (module.js:484:10) + at EventEmitter._tickCallback (node.js:190:38) + +from: + +```javascript + function test(foo) { + assert.string(foo, 'foo'); + } +``` + +There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: + +```javascript + function test(foo) { + assert.arrayOfString(foo, 'foo'); + } +``` + +You can assert IFF an argument is not `undefined` (i.e., an optional arg): + +```javascript + assert.optionalString(foo, 'foo'); +``` + +Lastly, you can opt-out of assertion checking altogether by setting the +environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have +lots of assertions, and don't want to pay `typeof ()` taxes to v8 in +production. Be advised: The standard functions re-exported from `assert` are +also disabled in assert-plus if NDEBUG is specified. Using them directly from +the `assert` module avoids this behavior. + +The complete list of APIs is: + +* assert.array +* assert.bool +* assert.buffer +* assert.func +* assert.number +* assert.finite +* assert.object +* assert.string +* assert.stream +* assert.date +* assert.regexp +* assert.uuid +* assert.arrayOfArray +* assert.arrayOfBool +* assert.arrayOfBuffer +* assert.arrayOfFunc +* assert.arrayOfNumber +* assert.arrayOfFinite +* assert.arrayOfObject +* assert.arrayOfString +* assert.arrayOfStream +* assert.arrayOfDate +* assert.arrayOfRegexp +* assert.arrayOfUuid +* assert.optionalArray +* assert.optionalBool +* assert.optionalBuffer +* assert.optionalFunc +* assert.optionalNumber +* assert.optionalFinite +* assert.optionalObject +* assert.optionalString +* assert.optionalStream +* assert.optionalDate +* assert.optionalRegexp +* assert.optionalUuid +* assert.optionalArrayOfArray +* assert.optionalArrayOfBool +* assert.optionalArrayOfBuffer +* assert.optionalArrayOfFunc +* assert.optionalArrayOfNumber +* assert.optionalArrayOfFinite +* assert.optionalArrayOfObject +* assert.optionalArrayOfString +* assert.optionalArrayOfStream +* assert.optionalArrayOfDate +* assert.optionalArrayOfRegexp +* assert.optionalArrayOfUuid +* assert.AssertionError +* assert.fail +* assert.ok +* assert.equal +* assert.notEqual +* assert.deepEqual +* assert.notDeepEqual +* assert.strictEqual +* assert.notStrictEqual +* assert.throws +* assert.doesNotThrow +* assert.ifError + +# Installation + + npm install assert-plus + +## License + +The MIT License (MIT) +Copyright (c) 2012 Mark Cavage + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## Bugs + +See . diff --git a/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/assert.js b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/assert.js new file mode 100644 index 0000000..26f944e --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/assert.js @@ -0,0 +1,211 @@ +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = require('assert'); +var Stream = require('stream').Stream; +var util = require('util'); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); diff --git a/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/package.json b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/package.json new file mode 100644 index 0000000..7c9df98 --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/node_modules/assert-plus/package.json @@ -0,0 +1,107 @@ +{ + "_args": [ + [ + "assert-plus@^1.0.0", + "/Users/eshanker/Code/fsevents/node_modules/sshpk" + ] + ], + "_from": "assert-plus@>=1.0.0 <2.0.0", + "_id": "assert-plus@1.0.0", + "_inCache": true, + "_installable": true, + "_location": "/sshpk/assert-plus", + "_nodeVersion": "0.10.40", + "_npmUser": { + "email": "patrick.f.mooney@gmail.com", + "name": "pfmooney" + }, + "_npmVersion": "3.3.9", + "_phantomChildren": {}, + "_requested": { + "name": "assert-plus", + "raw": "assert-plus@^1.0.0", + "rawSpec": "^1.0.0", + "scope": null, + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/sshpk" + ], + "_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "_shasum": "f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525", + "_shrinkwrap": null, + "_spec": "assert-plus@^1.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/sshpk", + "author": { + "email": "mcavage@gmail.com", + "name": "Mark Cavage" + }, + "bugs": { + "url": "https://github.com/mcavage/node-assert-plus/issues" + }, + "contributors": [ + { + "name": "Dave Eddy", + "email": "dave@daveeddy.com" + }, + { + "name": "Fred Kuo", + "email": "fred.kuo@joyent.com" + }, + { + "name": "Lars-Magnus Skog", + "email": "ralphtheninja@riseup.net" + }, + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "Patrick Mooney", + "email": "pmooney@pfmooney.com" + }, + { + "name": "Rob Gulewich", + "email": "robert.gulewich@joyent.com" + } + ], + "dependencies": {}, + "description": "Extra assertions on top of node's assert module", + "devDependencies": { + "faucet": "0.0.1", + "tape": "4.2.2" + }, + "directories": {}, + "dist": { + "shasum": "f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525", + "tarball": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" + }, + "engines": { + "node": ">=0.8" + }, + "homepage": "https://github.com/mcavage/node-assert-plus#readme", + "license": "MIT", + "main": "./assert.js", + "maintainers": [ + { + "name": "mcavage", + "email": "mcavage@gmail.com" + }, + { + "name": "pfmooney", + "email": "patrick.f.mooney@gmail.com" + } + ], + "name": "assert-plus", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/mcavage/node-assert-plus.git" + }, + "scripts": { + "test": "tape tests/*.js | ./node_modules/.bin/faucet" + }, + "version": "1.0.0" +} diff --git a/node_modules/fsevents/node_modules/sshpk/package.json b/node_modules/fsevents/node_modules/sshpk/package.json new file mode 100644 index 0000000..3f0f0aa --- /dev/null +++ b/node_modules/fsevents/node_modules/sshpk/package.json @@ -0,0 +1,125 @@ +{ + "_args": [ + [ + "sshpk@^1.7.0", + "/Users/eshanker/Code/fsevents/node_modules/http-signature" + ] + ], + "_from": "sshpk@>=1.7.0 <2.0.0", + "_id": "sshpk@1.8.3", + "_inCache": true, + "_installable": true, + "_location": "/sshpk", + "_nodeVersion": "0.12.13", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/sshpk-1.8.3.tgz_1461968607532_0.32797130732797086" + }, + "_npmUser": { + "email": "alex@cooperi.net", + "name": "arekinath" + }, + "_npmVersion": "2.15.1", + "_phantomChildren": {}, + "_requested": { + "name": "sshpk", + "raw": "sshpk@^1.7.0", + "rawSpec": "^1.7.0", + "scope": null, + "spec": ">=1.7.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/http-signature" + ], + "_resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.8.3.tgz", + "_shasum": "890cc9d614dc5292e5cb1a543b03c9abaa5c374e", + "_shrinkwrap": null, + "_spec": "sshpk@^1.7.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/http-signature", + "author": { + "name": "Joyent, Inc" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "bugs": { + "url": "https://github.com/arekinath/node-sshpk/issues" + }, + "contributors": [ + { + "name": "Dave Eddy", + "email": "dave@daveeddy.com" + }, + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "Alex Wilson", + "email": "alex@cooperi.net" + } + ], + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jodid25519": "^1.0.0", + "jsbn": "~0.1.0", + "tweetnacl": "~0.13.0" + }, + "description": "A library for finding and using SSH public keys", + "devDependencies": { + "benchmark": "^1.0.0", + "sinon": "^1.17.2", + "tape": "^3.5.0", + "temp": "^0.8.2" + }, + "directories": { + "bin": "./bin", + "lib": "./lib", + "man": "./man/man1" + }, + "dist": { + "shasum": "890cc9d614dc5292e5cb1a543b03c9abaa5c374e", + "tarball": "https://registry.npmjs.org/sshpk/-/sshpk-1.8.3.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "gitHead": "82d39066b2df4e8284350ff5ebb08c5b95c74652", + "homepage": "https://github.com/arekinath/node-sshpk#readme", + "license": "MIT", + "main": "lib/index.js", + "maintainers": [ + { + "name": "arekinath", + "email": "alex@cooperi.net" + } + ], + "man": [ + "/Users/alex.wilson/dev/sshpk/man/man1/sshpk-conv.1", + "/Users/alex.wilson/dev/sshpk/man/man1/sshpk-sign.1", + "/Users/alex.wilson/dev/sshpk/man/man1/sshpk-verify.1" + ], + "name": "sshpk", + "optionalDependencies": { + "ecc-jsbn": "~0.1.1", + "jodid25519": "^1.0.0", + "jsbn": "~0.1.0", + "tweetnacl": "~0.13.0" + }, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/arekinath/node-sshpk.git" + }, + "scripts": { + "test": "tape test/*.js" + }, + "version": "1.8.3" +} diff --git a/node_modules/fsevents/node_modules/string-width/index.js b/node_modules/fsevents/node_modules/string-width/index.js new file mode 100644 index 0000000..aa2f839 --- /dev/null +++ b/node_modules/fsevents/node_modules/string-width/index.js @@ -0,0 +1,32 @@ +'use strict'; +var stripAnsi = require('strip-ansi'); +var codePointAt = require('code-point-at'); +var isFullwidthCodePoint = require('is-fullwidth-code-point'); + +// https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1345 +module.exports = function (str) { + if (typeof str !== 'string' || str.length === 0) { + return 0; + } + + var width = 0; + + str = stripAnsi(str); + + for (var i = 0; i < str.length; i++) { + var code = codePointAt(str, i); + + // surrogates + if (code >= 0x10000) { + i++; + } + + if (isFullwidthCodePoint(code)) { + width += 2; + } else { + width++; + } + } + + return width; +}; diff --git a/node_modules/fsevents/node_modules/string-width/license b/node_modules/fsevents/node_modules/string-width/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/string-width/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/string-width/package.json b/node_modules/fsevents/node_modules/string-width/package.json new file mode 100644 index 0000000..bddf6ac --- /dev/null +++ b/node_modules/fsevents/node_modules/string-width/package.json @@ -0,0 +1,111 @@ +{ + "_args": [ + [ + "string-width@^1.0.1", + "/Users/eshanker/Code/fsevents/node_modules/gauge" + ] + ], + "_from": "string-width@>=1.0.1 <2.0.0", + "_id": "string-width@1.0.1", + "_inCache": true, + "_installable": true, + "_location": "/string-width", + "_nodeVersion": "0.12.5", + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "2.11.2", + "_phantomChildren": {}, + "_requested": { + "name": "string-width", + "raw": "string-width@^1.0.1", + "rawSpec": "^1.0.1", + "scope": null, + "spec": ">=1.0.1 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/gauge", + "/wide-align" + ], + "_resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.1.tgz", + "_shasum": "c92129b6f1d7f52acf9af424a26e3864a05ceb0a", + "_shrinkwrap": null, + "_spec": "string-width@^1.0.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/gauge", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/sindresorhus/string-width/issues" + }, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "description": "Get the visual width of a string - the number of columns required to display it", + "devDependencies": { + "ava": "0.0.4" + }, + "directories": {}, + "dist": { + "shasum": "c92129b6f1d7f52acf9af424a26e3864a05ceb0a", + "tarball": "https://registry.npmjs.org/string-width/-/string-width-1.0.1.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "f279cfd14835f0a3c8df69ba18e9a3960156e135", + "homepage": "https://github.com/sindresorhus/string-width", + "keywords": [ + "ansi", + "char", + "character", + "chinese", + "cjk", + "cli", + "codes", + "column", + "columns", + "command-line", + "console", + "escape", + "fixed-width", + "full", + "full-width", + "fullwidth", + "japanese", + "korean", + "str", + "string", + "terminal", + "unicode", + "visual", + "width" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + } + ], + "name": "string-width", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/string-width.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "1.0.1" +} diff --git a/node_modules/fsevents/node_modules/string-width/readme.md b/node_modules/fsevents/node_modules/string-width/readme.md new file mode 100644 index 0000000..a7737a9 --- /dev/null +++ b/node_modules/fsevents/node_modules/string-width/readme.md @@ -0,0 +1,41 @@ +# string-width [![Build Status](https://travis-ci.org/sindresorhus/string-width.svg?branch=master)](https://travis-ci.org/sindresorhus/string-width) + +> Get the visual width of a string - the number of columns required to display it + +Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. + +Useful to be able to measure the actual width of command-line output. + + +## Install + +``` +$ npm install --save string-width +``` + + +## Usage + +```js +var stringWidth = require('string-width'); + +stringWidth('古'); +//=> 2 + +stringWidth('\u001b[1m古\u001b[22m'); +//=> 2 + +stringWidth('a'); +//=> 1 +``` + + +## Related + +- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module +- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/string_decoder/.npmignore b/node_modules/fsevents/node_modules/string_decoder/.npmignore new file mode 100644 index 0000000..206320c --- /dev/null +++ b/node_modules/fsevents/node_modules/string_decoder/.npmignore @@ -0,0 +1,2 @@ +build +test diff --git a/node_modules/fsevents/node_modules/string_decoder/LICENSE b/node_modules/fsevents/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..6de584a --- /dev/null +++ b/node_modules/fsevents/node_modules/string_decoder/LICENSE @@ -0,0 +1,20 @@ +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/string_decoder/README.md b/node_modules/fsevents/node_modules/string_decoder/README.md new file mode 100644 index 0000000..4d2aa00 --- /dev/null +++ b/node_modules/fsevents/node_modules/string_decoder/README.md @@ -0,0 +1,7 @@ +**string_decoder.js** (`require('string_decoder')`) from Node.js core + +Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. + +Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** + +The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/string_decoder/index.js b/node_modules/fsevents/node_modules/string_decoder/index.js new file mode 100644 index 0000000..b00e54f --- /dev/null +++ b/node_modules/fsevents/node_modules/string_decoder/index.js @@ -0,0 +1,221 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var Buffer = require('buffer').Buffer; + +var isBufferEncoding = Buffer.isEncoding + || function(encoding) { + switch (encoding && encoding.toLowerCase()) { + case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; + default: return false; + } + } + + +function assertEncoding(encoding) { + if (encoding && !isBufferEncoding(encoding)) { + throw new Error('Unknown encoding: ' + encoding); + } +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. CESU-8 is handled as part of the UTF-8 encoding. +// +// @TODO Handling all encodings inside a single object makes it very difficult +// to reason about this code, so it should be split up in the future. +// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code +// points as used by CESU-8. +var StringDecoder = exports.StringDecoder = function(encoding) { + this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); + assertEncoding(encoding); + switch (this.encoding) { + case 'utf8': + // CESU-8 represents each of Surrogate Pair by 3-bytes + this.surrogateSize = 3; + break; + case 'ucs2': + case 'utf16le': + // UTF-16 represents each of Surrogate Pair by 2-bytes + this.surrogateSize = 2; + this.detectIncompleteChar = utf16DetectIncompleteChar; + break; + case 'base64': + // Base-64 stores 3 bytes in 4 chars, and pads the remainder. + this.surrogateSize = 3; + this.detectIncompleteChar = base64DetectIncompleteChar; + break; + default: + this.write = passThroughWrite; + return; + } + + // Enough space to store all bytes of a single character. UTF-8 needs 4 + // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). + this.charBuffer = new Buffer(6); + // Number of bytes received for the current incomplete multi-byte character. + this.charReceived = 0; + // Number of bytes expected for the current incomplete multi-byte character. + this.charLength = 0; +}; + + +// write decodes the given buffer and returns it as JS string that is +// guaranteed to not contain any partial multi-byte characters. Any partial +// character found at the end of the buffer is buffered up, and will be +// returned when calling write again with the remaining bytes. +// +// Note: Converting a Buffer containing an orphan surrogate to a String +// currently works, but converting a String to a Buffer (via `new Buffer`, or +// Buffer#write) will replace incomplete surrogates with the unicode +// replacement character. See https://codereview.chromium.org/121173009/ . +StringDecoder.prototype.write = function(buffer) { + var charStr = ''; + // if our last write ended with an incomplete multibyte character + while (this.charLength) { + // determine how many remaining bytes this buffer has to offer for this char + var available = (buffer.length >= this.charLength - this.charReceived) ? + this.charLength - this.charReceived : + buffer.length; + + // add the new bytes to the char buffer + buffer.copy(this.charBuffer, this.charReceived, 0, available); + this.charReceived += available; + + if (this.charReceived < this.charLength) { + // still not enough chars in this buffer? wait for more ... + return ''; + } + + // remove bytes belonging to the current character from the buffer + buffer = buffer.slice(available, buffer.length); + + // get the character that was split + charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); + + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + var charCode = charStr.charCodeAt(charStr.length - 1); + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + this.charLength += this.surrogateSize; + charStr = ''; + continue; + } + this.charReceived = this.charLength = 0; + + // if there are no more bytes in this buffer, just emit our char + if (buffer.length === 0) { + return charStr; + } + break; + } + + // determine and set charLength / charReceived + this.detectIncompleteChar(buffer); + + var end = buffer.length; + if (this.charLength) { + // buffer the incomplete character bytes we got + buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); + end -= this.charReceived; + } + + charStr += buffer.toString(this.encoding, 0, end); + + var end = charStr.length - 1; + var charCode = charStr.charCodeAt(end); + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + var size = this.surrogateSize; + this.charLength += size; + this.charReceived += size; + this.charBuffer.copy(this.charBuffer, size, 0, size); + buffer.copy(this.charBuffer, 0, 0, size); + return charStr.substring(0, end); + } + + // or just emit the charStr + return charStr; +}; + +// detectIncompleteChar determines if there is an incomplete UTF-8 character at +// the end of the given buffer. If so, it sets this.charLength to the byte +// length that character, and sets this.charReceived to the number of bytes +// that are available for this character. +StringDecoder.prototype.detectIncompleteChar = function(buffer) { + // determine how many bytes we have to check at the end of this buffer + var i = (buffer.length >= 3) ? 3 : buffer.length; + + // Figure out if one of the last i bytes of our buffer announces an + // incomplete char. + for (; i > 0; i--) { + var c = buffer[buffer.length - i]; + + // See http://en.wikipedia.org/wiki/UTF-8#Description + + // 110XXXXX + if (i == 1 && c >> 5 == 0x06) { + this.charLength = 2; + break; + } + + // 1110XXXX + if (i <= 2 && c >> 4 == 0x0E) { + this.charLength = 3; + break; + } + + // 11110XXX + if (i <= 3 && c >> 3 == 0x1E) { + this.charLength = 4; + break; + } + } + this.charReceived = i; +}; + +StringDecoder.prototype.end = function(buffer) { + var res = ''; + if (buffer && buffer.length) + res = this.write(buffer); + + if (this.charReceived) { + var cr = this.charReceived; + var buf = this.charBuffer; + var enc = this.encoding; + res += buf.slice(0, cr).toString(enc); + } + + return res; +}; + +function passThroughWrite(buffer) { + return buffer.toString(this.encoding); +} + +function utf16DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 2; + this.charLength = this.charReceived ? 2 : 0; +} + +function base64DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 3; + this.charLength = this.charReceived ? 3 : 0; +} diff --git a/node_modules/fsevents/node_modules/string_decoder/package.json b/node_modules/fsevents/node_modules/string_decoder/package.json new file mode 100644 index 0000000..2a81a53 --- /dev/null +++ b/node_modules/fsevents/node_modules/string_decoder/package.json @@ -0,0 +1,80 @@ +{ + "_args": [ + [ + "string_decoder@~0.10.x", + "/Users/eshanker/Code/fsevents/node_modules/readable-stream" + ] + ], + "_from": "string_decoder@>=0.10.0 <0.11.0", + "_id": "string_decoder@0.10.31", + "_inCache": true, + "_installable": true, + "_location": "/string_decoder", + "_npmUser": { + "email": "rod@vagg.org", + "name": "rvagg" + }, + "_npmVersion": "1.4.23", + "_phantomChildren": {}, + "_requested": { + "name": "string_decoder", + "raw": "string_decoder@~0.10.x", + "rawSpec": "~0.10.x", + "scope": null, + "spec": ">=0.10.0 <0.11.0", + "type": "range" + }, + "_requiredBy": [ + "/bl/readable-stream", + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", + "_shrinkwrap": null, + "_spec": "string_decoder@~0.10.x", + "_where": "/Users/eshanker/Code/fsevents/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/rvagg/string_decoder/issues" + }, + "dependencies": {}, + "description": "The string_decoder module from Node core", + "devDependencies": { + "tap": "~0.4.8" + }, + "directories": {}, + "dist": { + "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", + "tarball": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0", + "homepage": "https://github.com/rvagg/string_decoder", + "keywords": [ + "browser", + "browserify", + "decoder", + "string" + ], + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "substack", + "email": "mail@substack.net" + }, + { + "name": "rvagg", + "email": "rod@vagg.org" + } + ], + "name": "string_decoder", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/rvagg/string_decoder.git" + }, + "scripts": { + "test": "tap test/simple/*.js" + }, + "version": "0.10.31" +} diff --git a/node_modules/fsevents/node_modules/stringstream/.npmignore b/node_modules/fsevents/node_modules/stringstream/.npmignore new file mode 100644 index 0000000..7dccd97 --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/.npmignore @@ -0,0 +1,15 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +node_modules +npm-debug.log \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/stringstream/.travis.yml b/node_modules/fsevents/node_modules/stringstream/.travis.yml new file mode 100644 index 0000000..f1d0f13 --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/fsevents/node_modules/stringstream/LICENSE.txt b/node_modules/fsevents/node_modules/stringstream/LICENSE.txt new file mode 100644 index 0000000..ab861ac --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/LICENSE.txt @@ -0,0 +1,22 @@ +Copyright (c) 2012 Michael Hart (michael.hart.au@gmail.com) + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/stringstream/README.md b/node_modules/fsevents/node_modules/stringstream/README.md new file mode 100644 index 0000000..32fc982 --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/README.md @@ -0,0 +1,38 @@ +# Decode streams into strings The Right Way(tm) + +```javascript +var fs = require('fs') +var zlib = require('zlib') +var strs = require('stringstream') + +var utf8Stream = fs.createReadStream('massiveLogFile.gz') + .pipe(zlib.createGunzip()) + .pipe(strs('utf8')) +``` + +No need to deal with `setEncoding()` weirdness, just compose streams +like they were supposed to be! + +Handles input and output encoding: + +```javascript +// Stream from utf8 to hex to base64... Why not, ay. +var hex64Stream = fs.createReadStream('myFile') + .pipe(strs('utf8', 'hex')) + .pipe(strs('hex', 'base64')) +``` + +Also deals with `base64` output correctly by aligning each emitted data +chunk so that there are no dangling `=` characters: + +```javascript +var stream = fs.createReadStream('myFile').pipe(strs('base64')) + +var base64Str = '' + +stream.on('data', function(data) { base64Str += data }) +stream.on('end', function() { + console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding() + console.log('Original file is: ' + new Buffer(base64Str, 'base64')) +}) +``` diff --git a/node_modules/fsevents/node_modules/stringstream/example.js b/node_modules/fsevents/node_modules/stringstream/example.js new file mode 100644 index 0000000..f82b85e --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/example.js @@ -0,0 +1,27 @@ +var fs = require('fs') +var zlib = require('zlib') +var strs = require('stringstream') + +var utf8Stream = fs.createReadStream('massiveLogFile.gz') + .pipe(zlib.createGunzip()) + .pipe(strs('utf8')) + +utf8Stream.pipe(process.stdout) + +// Stream from utf8 to hex to base64... Why not, ay. +var hex64Stream = fs.createReadStream('myFile') + .pipe(strs('utf8', 'hex')) + .pipe(strs('hex', 'base64')) + +hex64Stream.pipe(process.stdout) + +// Deals with base64 correctly by aligning chunks +var stream = fs.createReadStream('myFile').pipe(strs('base64')) + +var base64Str = '' + +stream.on('data', function(data) { base64Str += data }) +stream.on('end', function() { + console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding() + console.log('Original file is: ' + new Buffer(base64Str, 'base64')) +}) diff --git a/node_modules/fsevents/node_modules/stringstream/package.json b/node_modules/fsevents/node_modules/stringstream/package.json new file mode 100644 index 0000000..b91302f --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/package.json @@ -0,0 +1,77 @@ +{ + "_args": [ + [ + "stringstream@~0.0.4", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "stringstream@>=0.0.4 <0.1.0", + "_id": "stringstream@0.0.5", + "_inCache": true, + "_installable": true, + "_location": "/stringstream", + "_nodeVersion": "4.2.1", + "_npmUser": { + "email": "michael.hart.au@gmail.com", + "name": "hichaelmart" + }, + "_npmVersion": "2.14.8", + "_phantomChildren": {}, + "_requested": { + "name": "stringstream", + "raw": "stringstream@~0.0.4", + "rawSpec": "~0.0.4", + "scope": null, + "spec": ">=0.0.4 <0.1.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", + "_shasum": "4e484cd4de5a0bbbee18e46307710a8a81621878", + "_shrinkwrap": null, + "_spec": "stringstream@~0.0.4", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "author": { + "email": "michael.hart.au@gmail.com", + "name": "Michael Hart", + "url": "http://github.com/mhart" + }, + "bugs": { + "url": "https://github.com/mhart/StringStream/issues" + }, + "dependencies": {}, + "description": "Encode and decode streams into string streams", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "4e484cd4de5a0bbbee18e46307710a8a81621878", + "tarball": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz" + }, + "gitHead": "1efe3bf507bf3a1161f8473908b60e881d41422b", + "homepage": "https://github.com/mhart/StringStream#readme", + "keywords": [ + "base64", + "gzip", + "stream", + "string" + ], + "license": "MIT", + "main": "stringstream.js", + "maintainers": [ + { + "name": "hichaelmart", + "email": "michael.hart.au@gmail.com" + } + ], + "name": "stringstream", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/mhart/StringStream.git" + }, + "scripts": {}, + "version": "0.0.5" +} diff --git a/node_modules/fsevents/node_modules/stringstream/stringstream.js b/node_modules/fsevents/node_modules/stringstream/stringstream.js new file mode 100644 index 0000000..4ece127 --- /dev/null +++ b/node_modules/fsevents/node_modules/stringstream/stringstream.js @@ -0,0 +1,102 @@ +var util = require('util') +var Stream = require('stream') +var StringDecoder = require('string_decoder').StringDecoder + +module.exports = StringStream +module.exports.AlignedStringDecoder = AlignedStringDecoder + +function StringStream(from, to) { + if (!(this instanceof StringStream)) return new StringStream(from, to) + + Stream.call(this) + + if (from == null) from = 'utf8' + + this.readable = this.writable = true + this.paused = false + this.toEncoding = (to == null ? from : to) + this.fromEncoding = (to == null ? '' : from) + this.decoder = new AlignedStringDecoder(this.toEncoding) +} +util.inherits(StringStream, Stream) + +StringStream.prototype.write = function(data) { + if (!this.writable) { + var err = new Error('stream not writable') + err.code = 'EPIPE' + this.emit('error', err) + return false + } + if (this.fromEncoding) { + if (Buffer.isBuffer(data)) data = data.toString() + data = new Buffer(data, this.fromEncoding) + } + var string = this.decoder.write(data) + if (string.length) this.emit('data', string) + return !this.paused +} + +StringStream.prototype.flush = function() { + if (this.decoder.flush) { + var string = this.decoder.flush() + if (string.length) this.emit('data', string) + } +} + +StringStream.prototype.end = function() { + if (!this.writable && !this.readable) return + this.flush() + this.emit('end') + this.writable = this.readable = false + this.destroy() +} + +StringStream.prototype.destroy = function() { + this.decoder = null + this.writable = this.readable = false + this.emit('close') +} + +StringStream.prototype.pause = function() { + this.paused = true +} + +StringStream.prototype.resume = function () { + if (this.paused) this.emit('drain') + this.paused = false +} + +function AlignedStringDecoder(encoding) { + StringDecoder.call(this, encoding) + + switch (this.encoding) { + case 'base64': + this.write = alignedWrite + this.alignedBuffer = new Buffer(3) + this.alignedBytes = 0 + break + } +} +util.inherits(AlignedStringDecoder, StringDecoder) + +AlignedStringDecoder.prototype.flush = function() { + if (!this.alignedBuffer || !this.alignedBytes) return '' + var leftover = this.alignedBuffer.toString(this.encoding, 0, this.alignedBytes) + this.alignedBytes = 0 + return leftover +} + +function alignedWrite(buffer) { + var rem = (this.alignedBytes + buffer.length) % this.alignedBuffer.length + if (!rem && !this.alignedBytes) return buffer.toString(this.encoding) + + var returnBuffer = new Buffer(this.alignedBytes + buffer.length - rem) + + this.alignedBuffer.copy(returnBuffer, 0, 0, this.alignedBytes) + buffer.copy(returnBuffer, this.alignedBytes, 0, buffer.length - rem) + + buffer.copy(this.alignedBuffer, 0, buffer.length - rem, buffer.length) + this.alignedBytes = rem + + return returnBuffer.toString(this.encoding) +} diff --git a/node_modules/fsevents/node_modules/strip-ansi/index.js b/node_modules/fsevents/node_modules/strip-ansi/index.js new file mode 100644 index 0000000..099480f --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-ansi/index.js @@ -0,0 +1,6 @@ +'use strict'; +var ansiRegex = require('ansi-regex')(); + +module.exports = function (str) { + return typeof str === 'string' ? str.replace(ansiRegex, '') : str; +}; diff --git a/node_modules/fsevents/node_modules/strip-ansi/license b/node_modules/fsevents/node_modules/strip-ansi/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-ansi/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/strip-ansi/package.json b/node_modules/fsevents/node_modules/strip-ansi/package.json new file mode 100644 index 0000000..2fa50d4 --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-ansi/package.json @@ -0,0 +1,117 @@ +{ + "_args": [ + [ + "strip-ansi@^3.0.1", + "/Users/eshanker/Code/fsevents/node_modules/gauge" + ] + ], + "_from": "strip-ansi@>=3.0.1 <4.0.0", + "_id": "strip-ansi@3.0.1", + "_inCache": true, + "_installable": true, + "_location": "/strip-ansi", + "_nodeVersion": "0.12.7", + "_npmOperationalInternal": { + "host": "packages-9-west.internal.npmjs.com", + "tmp": "tmp/strip-ansi-3.0.1.tgz_1456057278183_0.28958667791448534" + }, + "_npmUser": { + "email": "jappelman@xebia.com", + "name": "jbnicolai" + }, + "_npmVersion": "2.11.3", + "_phantomChildren": {}, + "_requested": { + "name": "strip-ansi", + "raw": "strip-ansi@^3.0.1", + "rawSpec": "^3.0.1", + "scope": null, + "spec": ">=3.0.1 <4.0.0", + "type": "range" + }, + "_requiredBy": [ + "/chalk", + "/gauge", + "/string-width" + ], + "_resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "_shasum": "6a385fb8853d952d5ff05d0e8aaf94278dc63dcf", + "_shrinkwrap": null, + "_spec": "strip-ansi@^3.0.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/gauge", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/chalk/strip-ansi/issues" + }, + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "description": "Strip ANSI escape codes", + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "directories": {}, + "dist": { + "shasum": "6a385fb8853d952d5ff05d0e8aaf94278dc63dcf", + "tarball": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js" + ], + "gitHead": "8270705c704956da865623e564eba4875c3ea17f", + "homepage": "https://github.com/chalk/strip-ansi", + "keywords": [ + "256", + "ansi", + "color", + "colors", + "colour", + "command-line", + "console", + "escape", + "formatting", + "log", + "logging", + "remove", + "rgb", + "shell", + "string", + "strip", + "styles", + "terminal", + "text", + "trim", + "tty", + "xterm" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + }, + { + "name": "jbnicolai", + "email": "jappelman@xebia.com" + } + ], + "name": "strip-ansi", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/strip-ansi.git" + }, + "scripts": { + "test": "xo && ava" + }, + "version": "3.0.1" +} diff --git a/node_modules/fsevents/node_modules/strip-ansi/readme.md b/node_modules/fsevents/node_modules/strip-ansi/readme.md new file mode 100644 index 0000000..cb7d9ff --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-ansi/readme.md @@ -0,0 +1,33 @@ +# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) + +> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save strip-ansi +``` + + +## Usage + +```js +var stripAnsi = require('strip-ansi'); + +stripAnsi('\u001b[4mcake\u001b[0m'); +//=> 'cake' +``` + + +## Related + +- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/strip-json-comments/cli.js b/node_modules/fsevents/node_modules/strip-json-comments/cli.js new file mode 100755 index 0000000..aec5aa2 --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-json-comments/cli.js @@ -0,0 +1,41 @@ +#!/usr/bin/env node +'use strict'; +var fs = require('fs'); +var strip = require('./strip-json-comments'); +var input = process.argv[2]; + + +function getStdin(cb) { + var ret = ''; + + process.stdin.setEncoding('utf8'); + + process.stdin.on('data', function (data) { + ret += data; + }); + + process.stdin.on('end', function () { + cb(ret); + }); +} + +if (process.argv.indexOf('-h') !== -1 || process.argv.indexOf('--help') !== -1) { + console.log('strip-json-comments input-file > output-file'); + console.log('or'); + console.log('strip-json-comments < input-file > output-file'); + return; +} + +if (process.argv.indexOf('-v') !== -1 || process.argv.indexOf('--version') !== -1) { + console.log(require('./package').version); + return; +} + +if (input) { + process.stdout.write(strip(fs.readFileSync(input, 'utf8'))); + return; +} + +getStdin(function (data) { + process.stdout.write(strip(data)); +}); diff --git a/node_modules/fsevents/node_modules/strip-json-comments/license b/node_modules/fsevents/node_modules/strip-json-comments/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-json-comments/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/strip-json-comments/package.json b/node_modules/fsevents/node_modules/strip-json-comments/package.json new file mode 100644 index 0000000..f19a218 --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-json-comments/package.json @@ -0,0 +1,104 @@ +{ + "_args": [ + [ + "strip-json-comments@~1.0.4", + "/Users/eshanker/Code/fsevents/node_modules/rc" + ] + ], + "_from": "strip-json-comments@>=1.0.4 <1.1.0", + "_id": "strip-json-comments@1.0.4", + "_inCache": true, + "_installable": true, + "_location": "/strip-json-comments", + "_nodeVersion": "0.12.5", + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "2.11.2", + "_phantomChildren": {}, + "_requested": { + "name": "strip-json-comments", + "raw": "strip-json-comments@~1.0.4", + "rawSpec": "~1.0.4", + "scope": null, + "spec": ">=1.0.4 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/rc" + ], + "_resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "_shasum": "1e15fbcac97d3ee99bf2d73b4c656b082bbafb91", + "_shrinkwrap": null, + "_spec": "strip-json-comments@~1.0.4", + "_where": "/Users/eshanker/Code/fsevents/node_modules/rc", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bin": { + "strip-json-comments": "cli.js" + }, + "bugs": { + "url": "https://github.com/sindresorhus/strip-json-comments/issues" + }, + "dependencies": {}, + "description": "Strip comments from JSON. Lets you use comments in your JSON files!", + "devDependencies": { + "mocha": "*" + }, + "directories": {}, + "dist": { + "shasum": "1e15fbcac97d3ee99bf2d73b4c656b082bbafb91", + "tarball": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz" + }, + "engines": { + "node": ">=0.8.0" + }, + "files": [ + "cli.js", + "strip-json-comments.js" + ], + "gitHead": "f58348696368583cc5bb18525fe31eacc9bd00e1", + "homepage": "https://github.com/sindresorhus/strip-json-comments", + "keywords": [ + "bin", + "cli", + "comments", + "conf", + "config", + "configuration", + "delete", + "env", + "environment", + "json", + "multiline", + "parse", + "remove", + "settings", + "strip", + "trim", + "util" + ], + "license": "MIT", + "main": "strip-json-comments", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + } + ], + "name": "strip-json-comments", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/sindresorhus/strip-json-comments.git" + }, + "scripts": { + "test": "mocha --ui tdd" + }, + "version": "1.0.4" +} diff --git a/node_modules/fsevents/node_modules/strip-json-comments/readme.md b/node_modules/fsevents/node_modules/strip-json-comments/readme.md new file mode 100644 index 0000000..63ce165 --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-json-comments/readme.md @@ -0,0 +1,80 @@ +# strip-json-comments [![Build Status](https://travis-ci.org/sindresorhus/strip-json-comments.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-json-comments) + +> Strip comments from JSON. Lets you use comments in your JSON files! + +This is now possible: + +```js +{ + // rainbows + "unicorn": /* ❤ */ "cake" +} +``` + +It will remove single-line comments `//` and multi-line comments `/**/`. + +Also available as a [gulp](https://github.com/sindresorhus/gulp-strip-json-comments)/[grunt](https://github.com/sindresorhus/grunt-strip-json-comments)/[broccoli](https://github.com/sindresorhus/broccoli-strip-json-comments) plugin. + +- + +*There's also [`json-comments`](https://npmjs.org/package/json-comments), but it's only for Node.js, inefficient, bloated as it also minifies, and comes with a `require` hook, which is :(* + + +## Install + +```sh +$ npm install --save strip-json-comments +``` + +```sh +$ bower install --save strip-json-comments +``` + +```sh +$ component install sindresorhus/strip-json-comments +``` + + +## Usage + +```js +var json = '{/*rainbows*/"unicorn":"cake"}'; +JSON.parse(stripJsonComments(json)); +//=> {unicorn: 'cake'} +``` + + +## API + +### stripJsonComments(input) + +#### input + +Type: `string` + +Accepts a string with JSON and returns a string without comments. + + +## CLI + +```sh +$ npm install --global strip-json-comments +``` + +```sh +$ strip-json-comments --help + +strip-json-comments input-file > output-file +# or +strip-json-comments < input-file > output-file +``` + + +## Related + +- [`strip-css-comments`](https://github.com/sindresorhus/strip-css-comments) + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/strip-json-comments/strip-json-comments.js b/node_modules/fsevents/node_modules/strip-json-comments/strip-json-comments.js new file mode 100644 index 0000000..eb77ce7 --- /dev/null +++ b/node_modules/fsevents/node_modules/strip-json-comments/strip-json-comments.js @@ -0,0 +1,73 @@ +/*! + strip-json-comments + Strip comments from JSON. Lets you use comments in your JSON files! + https://github.com/sindresorhus/strip-json-comments + by Sindre Sorhus + MIT License +*/ +(function () { + 'use strict'; + + var singleComment = 1; + var multiComment = 2; + + function stripJsonComments(str) { + var currentChar; + var nextChar; + var insideString = false; + var insideComment = false; + var ret = ''; + + for (var i = 0; i < str.length; i++) { + currentChar = str[i]; + nextChar = str[i + 1]; + + if (!insideComment && currentChar === '"') { + var escaped = str[i - 1] === '\\' && str[i - 2] !== '\\'; + if (!insideComment && !escaped && currentChar === '"') { + insideString = !insideString; + } + } + + if (insideString) { + ret += currentChar; + continue; + } + + if (!insideComment && currentChar + nextChar === '//') { + insideComment = singleComment; + i++; + } else if (insideComment === singleComment && currentChar + nextChar === '\r\n') { + insideComment = false; + i++; + ret += currentChar; + ret += nextChar; + continue; + } else if (insideComment === singleComment && currentChar === '\n') { + insideComment = false; + } else if (!insideComment && currentChar + nextChar === '/*') { + insideComment = multiComment; + i++; + continue; + } else if (insideComment === multiComment && currentChar + nextChar === '*/') { + insideComment = false; + i++; + continue; + } + + if (insideComment) { + continue; + } + + ret += currentChar; + } + + return ret; + } + + if (typeof module !== 'undefined' && module.exports) { + module.exports = stripJsonComments; + } else { + window.stripJsonComments = stripJsonComments; + } +})(); diff --git a/node_modules/fsevents/node_modules/supports-color/index.js b/node_modules/fsevents/node_modules/supports-color/index.js new file mode 100644 index 0000000..4346e27 --- /dev/null +++ b/node_modules/fsevents/node_modules/supports-color/index.js @@ -0,0 +1,50 @@ +'use strict'; +var argv = process.argv; + +var terminator = argv.indexOf('--'); +var hasFlag = function (flag) { + flag = '--' + flag; + var pos = argv.indexOf(flag); + return pos !== -1 && (terminator !== -1 ? pos < terminator : true); +}; + +module.exports = (function () { + if ('FORCE_COLOR' in process.env) { + return true; + } + + if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false')) { + return false; + } + + if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + return true; + } + + if (process.stdout && !process.stdout.isTTY) { + return false; + } + + if (process.platform === 'win32') { + return true; + } + + if ('COLORTERM' in process.env) { + return true; + } + + if (process.env.TERM === 'dumb') { + return false; + } + + if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) { + return true; + } + + return false; +})(); diff --git a/node_modules/fsevents/node_modules/supports-color/license b/node_modules/fsevents/node_modules/supports-color/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/fsevents/node_modules/supports-color/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/supports-color/package.json b/node_modules/fsevents/node_modules/supports-color/package.json new file mode 100644 index 0000000..14fa18c --- /dev/null +++ b/node_modules/fsevents/node_modules/supports-color/package.json @@ -0,0 +1,105 @@ +{ + "_args": [ + [ + "supports-color@^2.0.0", + "/Users/eshanker/Code/fsevents/node_modules/chalk" + ] + ], + "_from": "supports-color@>=2.0.0 <3.0.0", + "_id": "supports-color@2.0.0", + "_inCache": true, + "_installable": true, + "_location": "/supports-color", + "_nodeVersion": "0.12.5", + "_npmUser": { + "email": "sindresorhus@gmail.com", + "name": "sindresorhus" + }, + "_npmVersion": "2.11.2", + "_phantomChildren": {}, + "_requested": { + "name": "supports-color", + "raw": "supports-color@^2.0.0", + "rawSpec": "^2.0.0", + "scope": null, + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/chalk" + ], + "_resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "_shasum": "535d045ce6b6363fa40117084629995e9df324c7", + "_shrinkwrap": null, + "_spec": "supports-color@^2.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/chalk", + "author": { + "email": "sindresorhus@gmail.com", + "name": "Sindre Sorhus", + "url": "sindresorhus.com" + }, + "bugs": { + "url": "https://github.com/chalk/supports-color/issues" + }, + "dependencies": {}, + "description": "Detect whether a terminal supports color", + "devDependencies": { + "mocha": "*", + "require-uncached": "^1.0.2" + }, + "directories": {}, + "dist": { + "shasum": "535d045ce6b6363fa40117084629995e9df324c7", + "tarball": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" + }, + "engines": { + "node": ">=0.8.0" + }, + "files": [ + "index.js" + ], + "gitHead": "8400d98ade32b2adffd50902c06d9e725a5c6588", + "homepage": "https://github.com/chalk/supports-color", + "keywords": [ + "256", + "ansi", + "capability", + "cli", + "color", + "colors", + "colour", + "command-line", + "console", + "detect", + "rgb", + "shell", + "styles", + "support", + "supports", + "terminal", + "tty", + "xterm" + ], + "license": "MIT", + "maintainers": [ + { + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" + }, + { + "name": "jbnicolai", + "email": "jappelman@xebia.com" + } + ], + "name": "supports-color", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/chalk/supports-color.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "2.0.0" +} diff --git a/node_modules/fsevents/node_modules/supports-color/readme.md b/node_modules/fsevents/node_modules/supports-color/readme.md new file mode 100644 index 0000000..b4761f1 --- /dev/null +++ b/node_modules/fsevents/node_modules/supports-color/readme.md @@ -0,0 +1,36 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install --save supports-color +``` + + +## Usage + +```js +var supportsColor = require('supports-color'); + +if (supportsColor) { + console.log('Terminal supports color'); +} +``` + +It obeys the `--color` and `--no-color` CLI flags. + +For situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/fsevents/node_modules/tar-pack/.npmignore b/node_modules/fsevents/node_modules/tar-pack/.npmignore new file mode 100644 index 0000000..c6f386f --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/.npmignore @@ -0,0 +1,14 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid + +pids +logs +results + +npm-debug.log +node_modules diff --git a/node_modules/fsevents/node_modules/tar-pack/.travis.yml b/node_modules/fsevents/node_modules/tar-pack/.travis.yml new file mode 100644 index 0000000..cf5d636 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/.travis.yml @@ -0,0 +1,7 @@ +sudo: false +language: node_js +node_js: + - stable + - "0.10" + - "0.8" +before_install: if [[ `npm --version` != 3* ]]; then npm install -g npm; fi; diff --git a/node_modules/fsevents/node_modules/tar-pack/LICENSE b/node_modules/fsevents/node_modules/tar-pack/LICENSE new file mode 100644 index 0000000..67b20da --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2014, Forbes Lindesay +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/fsevents/node_modules/tar-pack/README.md b/node_modules/fsevents/node_modules/tar-pack/README.md new file mode 100644 index 0000000..dab8dc9 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/README.md @@ -0,0 +1,81 @@ +# Tar Pack + +Package and un-package modules of some sort (in tar/gz bundles). This is mostly useful for package managers. Note that it doesn't check for or touch `package.json` so it can be used even if that's not the way you store your package info. + +[![Build Status](https://img.shields.io/travis/ForbesLindesay/tar-pack/master.svg)](https://travis-ci.org/ForbesLindesay/tar-pack) +[![Dependency Status](https://img.shields.io/david/ForbesLindesay/tar-pack.svg)](https://david-dm.org/ForbesLindesay/tar-pack) +[![NPM version](https://img.shields.io/npm/v/tar-pack.svg)](https://www.npmjs.com/package/tar-pack) + +## Installation + + $ npm install tar-pack + +## API + +### pack(folder|packer, [options]) + +Pack the folder at `folder` into a gzipped tarball and return the tgz as a stream. Files ignored by `.gitignore` will not be in the package. + +You can optionally pass a `fstream.DirReader` directly, instead of folder. For example, to create an npm package, do: + +```js +pack(require("fstream-npm")(folder), [options]) +``` + +Options: + + - `noProprietary` (defaults to `false`) Set this to `true` to prevent any proprietary attributes being added to the tarball. These attributes are allowed by the spec, but may trip up some poorly written tarball parsers. + - `ignoreFiles` (defaults to `['.gitignore']`) These files can specify files to be excluded from the package using the syntax of `.gitignore`. This option is ignored if you parse a `fstream.DirReader` instead of a string for folder. + - `filter` (defaults to `entry => true`) A function that takes an entry and returns `true` if it should be included in the package and `false` if it should not. Entryies are of the form `{path, basename, dirname, type}` where (type is "Directory" or "File"). This function is ignored if you parse a `fstream.DirReader` instead of a string for folder. + +Example: + +```js +var write = require('fs').createWriteStream +var pack = require('tar-pack').pack +pack(process.cwd()) + .pipe(write(__dirname + '/package.tar.gz')) + .on('error', function (err) { + console.error(err.stack) + }) + .on('close', function () { + console.log('done') + }) +``` + +### unpack(folder, [options,] cb) + +Return a stream that unpacks a tarball into a folder at `folder`. N.B. the output folder will be removed first if it already exists. + +The callback is called with an optional error and, as its second argument, a string which is one of: + + - `'directory'`, indicating that the extracted package was a directory (either `.tar.gz` or `.tar`) + - `'file'`, incating that the extracted package was just a single file (extracted to `defaultName`, see options) + +Basic Options: + + - `defaultName` (defaults to `index.js`) If the package is a single file, rather than a tarball, it will be "extracted" to this file name, set to `false` to disable. + +Advanced Options (you probably don't need any of these): + + - `gid` - (defaults to `null`) the `gid` to use when writing files + - `uid` - (defaults to `null`) the `uid` to use when writing files + - `dmode` - (defaults to `0777`) The mode to use when creating directories + - `fmode` - (defaults to `0666`) The mode to use when creating files + - `unsafe` - (defaults to `false`) (on non win32 OSes it overrides `gid` and `uid` with the current processes IDs) + +Example: + +```js +var read = require('fs').createReadStream +var unpack = require('tar-pack').unpack +read(process.cwd() + '/package.tar.gz') + .pipe(unpack(__dirname + '/package/', function (err) { + if (err) console.error(err.stack) + else console.log('done') + })) +``` + +## License + + BSD \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tar-pack/index.js b/node_modules/fsevents/node_modules/tar-pack/index.js new file mode 100644 index 0000000..8a1539b --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/index.js @@ -0,0 +1,240 @@ +"use strict" + +var debug = require('debug')('tar-pack') +var uidNumber = require('uid-number') +var rm = require('rimraf') +var tar = require('tar') +var once = require('once') +var fstream = require('fstream') +var packer = require('fstream-ignore') + +var PassThrough = require('stream').PassThrough || require('readable-stream').PassThrough +var zlib = require('zlib') +var path = require('path') + +var win32 = process.platform === 'win32' +var myUid = process.getuid && process.getuid() +var myGid = process.getgid && process.getgid() + +if (process.env.SUDO_UID && myUid === 0) { + if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID + if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID +} + +exports.pack = pack +exports.unpack = unpack + +function pack(folder, options) { + options = options || {} + if (typeof folder === 'string') { + + var filter = options.filter || function (entry) { return true; } + + folder = packer({ + path: folder, + type: 'Directory', + isDirectory: true, + ignoreFiles: options.ignoreFiles || ['.gitignore'], + filter: function (entry) { // {path, basename, dirname, type} (type is "Directory" or "File") + var basename = entry.basename + // some files are *never* allowed under any circumstances + // these files should always be either temporary files or + // version control related files + if (basename === '.git' || basename === '.lock-wscript' || basename.match(/^\.wafpickle-[0-9]+$/) || + basename === 'CVS' || basename === '.svn' || basename === '.hg' || basename.match(/^\..*\.swp$/) || + basename === '.DS_Store' || basename.match(/^\._/)) { + return false + } + //custom excludes + return filter(entry) + } + }) + } + // By default, npm includes some proprietary attributes in the + // package tarball. This is sane, and allowed by the spec. + // However, npm *itself* excludes these from its own package, + // so that it can be more easily bootstrapped using old and + // non-compliant tar implementations. + var tarPack = tar.Pack({ noProprietary: options.noProprietary || false }) + var gzip = zlib.Gzip() + + folder + .on('error', function (er) { + if (er) debug('Error reading folder') + return gzip.emit('error', er) + }) + tarPack + .on('error', function (er) { + if (er) debug('tar creation error') + gzip.emit('error', er) + }) + return folder.pipe(tarPack).pipe(gzip) +} + +function unpack(unpackTarget, options, cb) { + if (typeof options === 'function' && cb === undefined) cb = options, options = undefined + + var tarball = new PassThrough() + if (typeof cb === 'function') { + cb = once(cb) + tarball.on('error', cb) + tarball.on('close', function () { + cb() + }) + } + + var parent = path.dirname(unpackTarget) + var base = path.basename(unpackTarget) + + options = options || {} + var gid = options.gid || null + var uid = options.uid || null + var dMode = options.dmode || 0x0777 //npm.modes.exec + var fMode = options.fmode || 0x0666 //npm.modes.file + var defaultName = options.defaultName || (options.defaultName === false ? false : 'index.js') + + // figure out who we're supposed to be, if we're not pretending + // to be a specific user. + if (options.unsafe && !win32) { + uid = myUid + gid = myGid + } + + var pending = 2 + uidNumber(uid, gid, function (er, uid, gid) { + if (er) { + tarball.emit('error', er) + return tarball.end() + } + if (0 === --pending) next() + }) + rm(unpackTarget, function (er) { + if (er) { + tarball.emit('error', er) + return tarball.end() + } + if (0 === --pending) next() + }) + function next() { + // gzip {tarball} --decompress --stdout \ + // | tar -mvxpf - --strip-components=1 -C {unpackTarget} + gunzTarPerm(tarball, unpackTarget, dMode, fMode, uid, gid, defaultName) + } + return tarball +} + + +function gunzTarPerm(tarball, target, dMode, fMode, uid, gid, defaultName) { + debug('modes %j', [dMode.toString(8), fMode.toString(8)]) + + function fixEntry(entry) { + debug('fixEntry %j', entry.path) + // never create things that are user-unreadable, + // or dirs that are user-un-listable. Only leads to headaches. + var originalMode = entry.mode = entry.mode || entry.props.mode + entry.mode = entry.mode | (entry.type === 'Directory' ? dMode : fMode) + entry.props.mode = entry.mode + if (originalMode !== entry.mode) { + debug('modified mode %j', [entry.path, originalMode, entry.mode]) + } + + // if there's a specific owner uid/gid that we want, then set that + if (!win32 && typeof uid === 'number' && typeof gid === 'number') { + entry.props.uid = entry.uid = uid + entry.props.gid = entry.gid = gid + } + } + + var extractOpts = { type: 'Directory', path: target, strip: 1 } + + if (!win32 && typeof uid === 'number' && typeof gid === 'number') { + extractOpts.uid = uid + extractOpts.gid = gid + } + + extractOpts.filter = function () { + // symbolic links are not allowed in packages. + if (this.type.match(/^.*Link$/)) { + debug('excluding symbolic link: ' + this.path.substr(target.length + 1) + ' -> ' + this.linkpath) + return false + } + return true + } + + + type(tarball, function (err, type) { + if (err) return tarball.emit('error', err) + var strm = tarball + if (type === 'gzip') { + strm = strm.pipe(zlib.Unzip()) + strm.on('error', function (er) { + if (er) debug('unzip error') + tarball.emit('error', er) + }) + type = 'tar' + } + if (type === 'tar') { + strm + .pipe(tar.Extract(extractOpts)) + .on('entry', fixEntry) + .on('error', function (er) { + if (er) debug('untar error') + tarball.emit('error', er) + }) + .on('close', function () { + tarball.emit('close') + }) + return + } + if (type === 'naked-file' && defaultName) { + var jsOpts = { path: path.resolve(target, defaultName) } + + if (!win32 && typeof uid === 'number' && typeof gid === 'number') { + jsOpts.uid = uid + jsOpts.gid = gid + } + + strm + .pipe(fstream.Writer(jsOpts)) + .on('error', function (er) { + if (er) debug('copy error') + tarball.emit('error', er) + }) + .on('close', function () { + tarball.emit('close') + }) + return + } + + return cb(new Error('Unrecognised package type')); + }) +} + +function type(stream, callback) { + stream.on('error', handle) + stream.on('data', parse) + function handle(err) { + stream.removeListener('data', parse) + stream.removeListener('error', handle) + } + function parse(chunk) { + // detect what it is. + // Then, depending on that, we'll figure out whether it's + // a single-file module, gzipped tarball, or naked tarball. + + // gzipped files all start with 1f8b08 + if (chunk[0] === 0x1F && chunk[1] === 0x8B && chunk[2] === 0x08) { + callback(null, 'gzip') + } else if (chunk.toString().match(/^package\/\u0000/)) { + // note, this will only pick up on tarballs with a root directory called package + callback(null, 'tar') + } else { + callback(null, 'naked-file') + } + + // now un-hook, and re-emit the chunk + stream.removeListener('data', parse) + stream.removeListener('error', handle) + stream.unshift(chunk) + } +} diff --git a/node_modules/fsevents/node_modules/tar-pack/package.json b/node_modules/fsevents/node_modules/tar-pack/package.json new file mode 100644 index 0000000..a749704 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/package.json @@ -0,0 +1,92 @@ +{ + "_args": [ + [ + "tar-pack@~3.1.0", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "tar-pack@>=3.1.0 <3.2.0", + "_id": "tar-pack@3.1.4", + "_inCache": true, + "_installable": true, + "_location": "/tar-pack", + "_nodeVersion": "6.2.2", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/tar-pack-3.1.4.tgz_1467088357244_0.18669610540382564" + }, + "_npmUser": { + "email": "snnskwtnb@gmail.com", + "name": "shinnn" + }, + "_npmVersion": "3.10.2", + "_phantomChildren": {}, + "_requested": { + "name": "tar-pack", + "raw": "tar-pack@~3.1.0", + "rawSpec": "~3.1.0", + "scope": null, + "spec": ">=3.1.0 <3.2.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp" + ], + "_resolved": "https://registry.npmjs.org/tar-pack/-/tar-pack-3.1.4.tgz", + "_shasum": "bc8cf9a22f5832739f12f3910dac1eb97b49708c", + "_shrinkwrap": null, + "_spec": "tar-pack@~3.1.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "bugs": { + "url": "https://github.com/ForbesLindesay/tar-pack/issues" + }, + "dependencies": { + "debug": "~2.2.0", + "fstream": "~1.0.10", + "fstream-ignore": "~1.0.5", + "once": "~1.3.3", + "readable-stream": "~2.1.4", + "rimraf": "~2.5.1", + "tar": "~2.2.1", + "uid-number": "~0.0.6" + }, + "description": "Package and un-package modules of some sort (in tar/gz bundles).", + "devDependencies": { + "mkdirp": "*", + "mocha": "*", + "rfile": "*" + }, + "directories": {}, + "dist": { + "shasum": "bc8cf9a22f5832739f12f3910dac1eb97b49708c", + "tarball": "https://registry.npmjs.org/tar-pack/-/tar-pack-3.1.4.tgz" + }, + "gitHead": "ee23170716772ae458574ea090500035202c7c64", + "homepage": "https://github.com/ForbesLindesay/tar-pack#readme", + "license": "BSD-2-Clause", + "maintainers": [ + { + "name": "es128", + "email": "elan.shanker+npm@gmail.com" + }, + { + "name": "forbeslindesay", + "email": "forbes@lindesay.co.uk" + }, + { + "name": "shinnn", + "email": "snnskwtnb@gmail.com" + } + ], + "name": "tar-pack", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/ForbesLindesay/tar-pack.git" + }, + "scripts": { + "test": "mocha -R list" + }, + "version": "3.1.4" +} diff --git a/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed-file.txt b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed-file.txt new file mode 100644 index 0000000..ba0e162 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed-file.txt @@ -0,0 +1 @@ +bar \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed.tar b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed.tar new file mode 100644 index 0000000..35fd174 Binary files /dev/null and b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed.tar differ diff --git a/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed.tar.gz b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed.tar.gz new file mode 100644 index 0000000..89516b1 Binary files /dev/null and b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/packed.tar.gz differ diff --git a/node_modules/fsevents/node_modules/tar-pack/test/fixtures/to-pack/bar.txt b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/to-pack/bar.txt new file mode 100644 index 0000000..3f95386 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/to-pack/bar.txt @@ -0,0 +1 @@ +baz \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tar-pack/test/fixtures/to-pack/foo.txt b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/to-pack/foo.txt new file mode 100644 index 0000000..ba0e162 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/test/fixtures/to-pack/foo.txt @@ -0,0 +1 @@ +bar \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tar-pack/test/index.js b/node_modules/fsevents/node_modules/tar-pack/test/index.js new file mode 100644 index 0000000..314c020 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar-pack/test/index.js @@ -0,0 +1,67 @@ +var tar = require('../') +var path = require('path') +var rfile = require('rfile') +var rimraf = require('rimraf').sync +var mkdir = require('mkdirp').sync + +var read = require('fs').createReadStream +var write = require('fs').createWriteStream +var assert = require('assert') + +beforeEach(function () { + rimraf(__dirname + '/output') +}) +afterEach(function () { + rimraf(__dirname + '/output') +}) +describe('tarball.pipe(unpack(directory, callback))', function () { + it('unpacks the tarball into the directory', function (done) { + read(__dirname + '/fixtures/packed.tar').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) +}) +describe('gziptarball.pipe(unpack(directory, callback))', function () { + it('unpacks the tarball into the directory', function (done) { + read(__dirname + '/fixtures/packed.tar.gz').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) +}) +describe('file.pipe(unpack(directory, callback))', function () { + it('copies the file into the directory', function (done) { + read(__dirname + '/fixtures/packed-file.txt').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/index.js'), rfile('./fixtures/packed-file.txt')) + done() + })) + }) +}) +describe('pack(directory).pipe(tarball)', function () { + it('packs the directory into the output', function (done) { + var called = false + mkdir(__dirname + '/output/') + tar.pack(__dirname + '/fixtures/to-pack').pipe(write(__dirname + '/output/packed.tar.gz')) + .on('error', function (err) { + if (called) return + called = true + done(err) + }) + .on('close', function () { + if (called) return + called = true + read(__dirname + '/output/packed.tar.gz').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) + }) +}) \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tar/.npmignore b/node_modules/fsevents/node_modules/tar/.npmignore new file mode 100644 index 0000000..c167ad5 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/.npmignore @@ -0,0 +1,5 @@ +.*.swp +node_modules +examples/extract/ +test/tmp/ +test/fixtures/ diff --git a/node_modules/fsevents/node_modules/tar/.travis.yml b/node_modules/fsevents/node_modules/tar/.travis.yml new file mode 100644 index 0000000..fca8ef0 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.10 + - 0.11 diff --git a/node_modules/fsevents/node_modules/tar/LICENSE b/node_modules/fsevents/node_modules/tar/LICENSE new file mode 100644 index 0000000..019b7e4 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/LICENSE @@ -0,0 +1,12 @@ +The ISC License +Copyright (c) Isaac Z. Schlueter and Contributors +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/tar/README.md b/node_modules/fsevents/node_modules/tar/README.md new file mode 100644 index 0000000..cfda2ac --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/README.md @@ -0,0 +1,50 @@ +# node-tar + +Tar for Node.js. + +[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/) + +## API + +See `examples/` for usage examples. + +### var tar = require('tar') + +Returns an object with `.Pack`, `.Extract` and `.Parse` methods. + +### tar.Pack([properties]) + +Returns a through stream. Use +[fstream](https://npmjs.org/package/fstream) to write files into the +pack stream and you will receive tar archive data from the pack +stream. + +This only works with directories, it does not work with individual files. + +The optional `properties` object are used to set properties in the tar +'Global Extended Header'. If the `fromBase` property is set to true, +the tar will contain files relative to the path passed, and not with +the path included. + +### tar.Extract([options]) + +Returns a through stream. Write tar data to the stream and the files +in the tarball will be extracted onto the filesystem. + +`options` can be: + +```js +{ + path: '/path/to/extract/tar/into', + strip: 0, // how many path segments to strip from the root when extracting +} +``` + +`options` also get passed to the `fstream.Writer` instance that `tar` +uses internally. + +### tar.Parse() + +Returns a writable stream. Write tar data to it and it will emit +`entry` events for each entry parsed from the tarball. This is used by +`tar.Extract`. diff --git a/node_modules/fsevents/node_modules/tar/examples/extracter.js b/node_modules/fsevents/node_modules/tar/examples/extracter.js new file mode 100644 index 0000000..f6253a7 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/examples/extracter.js @@ -0,0 +1,19 @@ +var tar = require("../tar.js") + , fs = require("fs") + + +function onError(err) { + console.error('An error occurred:', err) +} + +function onEnd() { + console.log('Extracted!') +} + +var extractor = tar.Extract({path: __dirname + "/extract"}) + .on('error', onError) + .on('end', onEnd); + +fs.createReadStream(__dirname + "/../test/fixtures/c.tar") + .on('error', onError) + .pipe(extractor); diff --git a/node_modules/fsevents/node_modules/tar/examples/packer.js b/node_modules/fsevents/node_modules/tar/examples/packer.js new file mode 100644 index 0000000..039969c --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/examples/packer.js @@ -0,0 +1,24 @@ +var tar = require("../tar.js") + , fstream = require("fstream") + , fs = require("fs") + +var dirDest = fs.createWriteStream('dir.tar') + + +function onError(err) { + console.error('An error occurred:', err) +} + +function onEnd() { + console.log('Packed!') +} + +var packer = tar.Pack({ noProprietary: true }) + .on('error', onError) + .on('end', onEnd); + +// This must be a "directory" +fstream.Reader({ path: __dirname, type: "Directory" }) + .on('error', onError) + .pipe(packer) + .pipe(dirDest) diff --git a/node_modules/fsevents/node_modules/tar/examples/reader.js b/node_modules/fsevents/node_modules/tar/examples/reader.js new file mode 100644 index 0000000..39f3f08 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/examples/reader.js @@ -0,0 +1,36 @@ +var tar = require("../tar.js") + , fs = require("fs") + +fs.createReadStream(__dirname + "/../test/fixtures/c.tar") + .pipe(tar.Parse()) + .on("extendedHeader", function (e) { + console.error("extended pax header", e.props) + e.on("end", function () { + console.error("extended pax fields:", e.fields) + }) + }) + .on("ignoredEntry", function (e) { + console.error("ignoredEntry?!?", e.props) + }) + .on("longLinkpath", function (e) { + console.error("longLinkpath entry", e.props) + e.on("end", function () { + console.error("value=%j", e.body.toString()) + }) + }) + .on("longPath", function (e) { + console.error("longPath entry", e.props) + e.on("end", function () { + console.error("value=%j", e.body.toString()) + }) + }) + .on("entry", function (e) { + console.error("entry", e.props) + e.on("data", function (c) { + console.error(" >>>" + c.toString().replace(/\n/g, "\\n")) + }) + e.on("end", function () { + console.error(" << 0 + return !this._needDrain +} + +EntryWriter.prototype.end = function (c) { + // console.error(".. ew end") + if (c) this._buffer.push(c) + this._buffer.push(EOF) + this._ended = true + this._process() + this._needDrain = this._buffer.length > 0 +} + +EntryWriter.prototype.pause = function () { + // console.error(".. ew pause") + this._paused = true + this.emit("pause") +} + +EntryWriter.prototype.resume = function () { + // console.error(".. ew resume") + this._paused = false + this.emit("resume") + this._process() +} + +EntryWriter.prototype.add = function (entry) { + // console.error(".. ew add") + if (!this.parent) return this.emit("error", new Error("no parent")) + + // make sure that the _header and such is emitted, and clear out + // the _currentEntry link on the parent. + if (!this._ended) this.end() + + return this.parent.add(entry) +} + +EntryWriter.prototype._header = function () { + // console.error(".. ew header") + if (this._didHeader) return + this._didHeader = true + + var headerBlock = TarHeader.encode(this.props) + + if (this.props.needExtended && !this._meta) { + var me = this + + ExtendedHeaderWriter = ExtendedHeaderWriter || + require("./extended-header-writer.js") + + ExtendedHeaderWriter(this.props) + .on("data", function (c) { + me.emit("data", c) + }) + .on("error", function (er) { + me.emit("error", er) + }) + .end() + } + + // console.error(".. .. ew headerBlock emitting") + this.emit("data", headerBlock) + this.emit("header") +} + +EntryWriter.prototype._process = function () { + // console.error(".. .. ew process") + if (!this._didHeader && !this._meta) { + this._header() + } + + if (this._paused || this._processing) { + // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing) + return + } + + this._processing = true + + var buf = this._buffer + for (var i = 0; i < buf.length; i ++) { + // console.error(".. .. .. i=%d", i) + + var c = buf[i] + + if (c === EOF) this._stream.end() + else this._stream.write(c) + + if (this._paused) { + // console.error(".. .. .. paused mid-emission") + this._processing = false + if (i < buf.length) { + this._needDrain = true + this._buffer = buf.slice(i + 1) + } + return + } + } + + // console.error(".. .. .. emitted") + this._buffer.length = 0 + this._processing = false + + // console.error(".. .. .. emitting drain") + this.emit("drain") +} + +EntryWriter.prototype.destroy = function () {} diff --git a/node_modules/fsevents/node_modules/tar/lib/entry.js b/node_modules/fsevents/node_modules/tar/lib/entry.js new file mode 100644 index 0000000..591202b --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/entry.js @@ -0,0 +1,220 @@ +// A passthrough read/write stream that sets its properties +// based on a header, extendedHeader, and globalHeader +// +// Can be either a file system object of some sort, or +// a pax/ustar metadata entry. + +module.exports = Entry + +var TarHeader = require("./header.js") + , tar = require("../tar") + , assert = require("assert").ok + , Stream = require("stream").Stream + , inherits = require("inherits") + , fstream = require("fstream").Abstract + +function Entry (header, extended, global) { + Stream.call(this) + this.readable = true + this.writable = true + + this._needDrain = false + this._paused = false + this._reading = false + this._ending = false + this._ended = false + this._remaining = 0 + this._abort = false + this._queue = [] + this._index = 0 + this._queueLen = 0 + + this._read = this._read.bind(this) + + this.props = {} + this._header = header + this._extended = extended || {} + + // globals can change throughout the course of + // a file parse operation. Freeze it at its current state. + this._global = {} + var me = this + Object.keys(global || {}).forEach(function (g) { + me._global[g] = global[g] + }) + + this._setProps() +} + +inherits(Entry, Stream) + +Entry.prototype.write = function (c) { + if (this._ending) this.error("write() after end()", null, true) + if (this._remaining === 0) { + this.error("invalid bytes past eof") + } + + // often we'll get a bunch of \0 at the end of the last write, + // since chunks will always be 512 bytes when reading a tarball. + if (c.length > this._remaining) { + c = c.slice(0, this._remaining) + } + this._remaining -= c.length + + // put it on the stack. + var ql = this._queueLen + this._queue.push(c) + this._queueLen ++ + + this._read() + + // either paused, or buffered + if (this._paused || ql > 0) { + this._needDrain = true + return false + } + + return true +} + +Entry.prototype.end = function (c) { + if (c) this.write(c) + this._ending = true + this._read() +} + +Entry.prototype.pause = function () { + this._paused = true + this.emit("pause") +} + +Entry.prototype.resume = function () { + // console.error(" Tar Entry resume", this.path) + this.emit("resume") + this._paused = false + this._read() + return this._queueLen - this._index > 1 +} + + // This is bound to the instance +Entry.prototype._read = function () { + // console.error(" Tar Entry _read", this.path) + + if (this._paused || this._reading || this._ended) return + + // set this flag so that event handlers don't inadvertently + // get multiple _read() calls running. + this._reading = true + + // have any data to emit? + while (this._index < this._queueLen && !this._paused) { + var chunk = this._queue[this._index ++] + this.emit("data", chunk) + } + + // check if we're drained + if (this._index >= this._queueLen) { + this._queue.length = this._queueLen = this._index = 0 + if (this._needDrain) { + this._needDrain = false + this.emit("drain") + } + if (this._ending) { + this._ended = true + this.emit("end") + } + } + + // if the queue gets too big, then pluck off whatever we can. + // this should be fairly rare. + var mql = this._maxQueueLen + if (this._queueLen > mql && this._index > 0) { + mql = Math.min(this._index, mql) + this._index -= mql + this._queueLen -= mql + this._queue = this._queue.slice(mql) + } + + this._reading = false +} + +Entry.prototype._setProps = function () { + // props = extended->global->header->{} + var header = this._header + , extended = this._extended + , global = this._global + , props = this.props + + // first get the values from the normal header. + var fields = tar.fields + for (var f = 0; fields[f] !== null; f ++) { + var field = fields[f] + , val = header[field] + if (typeof val !== "undefined") props[field] = val + } + + // next, the global header for this file. + // numeric values, etc, will have already been parsed. + ;[global, extended].forEach(function (p) { + Object.keys(p).forEach(function (f) { + if (typeof p[f] !== "undefined") props[f] = p[f] + }) + }) + + // no nulls allowed in path or linkpath + ;["path", "linkpath"].forEach(function (p) { + if (props.hasOwnProperty(p)) { + props[p] = props[p].split("\0")[0] + } + }) + + + // set date fields to be a proper date + ;["mtime", "ctime", "atime"].forEach(function (p) { + if (props.hasOwnProperty(p)) { + props[p] = new Date(props[p] * 1000) + } + }) + + // set the type so that we know what kind of file to create + var type + switch (tar.types[props.type]) { + case "OldFile": + case "ContiguousFile": + type = "File" + break + + case "GNUDumpDir": + type = "Directory" + break + + case undefined: + type = "Unknown" + break + + case "Link": + case "SymbolicLink": + case "CharacterDevice": + case "BlockDevice": + case "Directory": + case "FIFO": + default: + type = tar.types[props.type] + } + + this.type = type + this.path = props.path + this.size = props.size + + // size is special, since it signals when the file needs to end. + this._remaining = props.size +} + +// the parser may not call write if _abort is true. +// useful for skipping data from some files quickly. +Entry.prototype.abort = function(){ + this._abort = true +} + +Entry.prototype.warn = fstream.warn +Entry.prototype.error = fstream.error diff --git a/node_modules/fsevents/node_modules/tar/lib/extended-header-writer.js b/node_modules/fsevents/node_modules/tar/lib/extended-header-writer.js new file mode 100644 index 0000000..1728c45 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/extended-header-writer.js @@ -0,0 +1,191 @@ + +module.exports = ExtendedHeaderWriter + +var inherits = require("inherits") + , EntryWriter = require("./entry-writer.js") + +inherits(ExtendedHeaderWriter, EntryWriter) + +var tar = require("../tar.js") + , path = require("path") + , TarHeader = require("./header.js") + +// props is the props of the thing we need to write an +// extended header for. +// Don't be shy with it. Just encode everything. +function ExtendedHeaderWriter (props) { + // console.error(">> ehw ctor") + var me = this + + if (!(me instanceof ExtendedHeaderWriter)) { + return new ExtendedHeaderWriter(props) + } + + me.fields = props + + var p = + { path : ("PaxHeader" + path.join("/", props.path || "")) + .replace(/\\/g, "/").substr(0, 100) + , mode : props.mode || 0666 + , uid : props.uid || 0 + , gid : props.gid || 0 + , size : 0 // will be set later + , mtime : props.mtime || Date.now() / 1000 + , type : "x" + , linkpath : "" + , ustar : "ustar\0" + , ustarver : "00" + , uname : props.uname || "" + , gname : props.gname || "" + , devmaj : props.devmaj || 0 + , devmin : props.devmin || 0 + } + + + EntryWriter.call(me, p) + // console.error(">> ehw props", me.props) + me.props = p + + me._meta = true +} + +ExtendedHeaderWriter.prototype.end = function () { + // console.error(">> ehw end") + var me = this + + if (me._ended) return + me._ended = true + + me._encodeFields() + + if (me.props.size === 0) { + // nothing to write! + me._ready = true + me._stream.end() + return + } + + me._stream.write(TarHeader.encode(me.props)) + me.body.forEach(function (l) { + me._stream.write(l) + }) + me._ready = true + + // console.error(">> ehw _process calling end()", me.props) + this._stream.end() +} + +ExtendedHeaderWriter.prototype._encodeFields = function () { + // console.error(">> ehw _encodeFields") + this.body = [] + if (this.fields.prefix) { + this.fields.path = this.fields.prefix + "/" + this.fields.path + this.fields.prefix = "" + } + encodeFields(this.fields, "", this.body, this.fields.noProprietary) + var me = this + this.body.forEach(function (l) { + me.props.size += l.length + }) +} + +function encodeFields (fields, prefix, body, nop) { + // console.error(">> >> ehw encodeFields") + // "%d %s=%s\n", , , + // The length is a decimal number, and includes itself and the \n + // Numeric values are decimal strings. + + Object.keys(fields).forEach(function (k) { + var val = fields[k] + , numeric = tar.numeric[k] + + if (prefix) k = prefix + "." + k + + // already including NODETAR.type, don't need File=true also + if (k === fields.type && val === true) return + + switch (k) { + // don't include anything that's always handled just fine + // in the normal header, or only meaningful in the context + // of nodetar + case "mode": + case "cksum": + case "ustar": + case "ustarver": + case "prefix": + case "basename": + case "dirname": + case "needExtended": + case "block": + case "filter": + return + + case "rdev": + if (val === 0) return + break + + case "nlink": + case "dev": // Truly a hero among men, Creator of Star! + case "ino": // Speak his name with reverent awe! It is: + k = "SCHILY." + k + break + + default: break + } + + if (val && typeof val === "object" && + !Buffer.isBuffer(val)) encodeFields(val, k, body, nop) + else if (val === null || val === undefined) return + else body.push.apply(body, encodeField(k, val, nop)) + }) + + return body +} + +function encodeField (k, v, nop) { + // lowercase keys must be valid, otherwise prefix with + // "NODETAR." + if (k.charAt(0) === k.charAt(0).toLowerCase()) { + var m = k.split(".")[0] + if (!tar.knownExtended[m]) k = "NODETAR." + k + } + + // no proprietary + if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) { + return [] + } + + if (typeof val === "number") val = val.toString(10) + + var s = new Buffer(" " + k + "=" + v + "\n") + , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1 + + // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) + + // if adding that many digits will make it go over that length, + // then add one to it. For example, if the string is: + // " foo=bar\n" + // then that's 9 characters. With the "9", that bumps the length + // up to 10. However, this is invalid: + // "10 foo=bar\n" + // but, since that's actually 11 characters, since 10 adds another + // character to the length, and the length includes the number + // itself. In that case, just bump it up again. + if (s.length + digits >= Math.pow(10, digits)) digits += 1 + // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) + + var len = digits + s.length + // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len) + var lenBuf = new Buffer("" + len) + if (lenBuf.length + s.length !== len) { + throw new Error("Bad length calculation\n"+ + "len="+len+"\n"+ + "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+ + "lenBuf.length="+lenBuf.length+"\n"+ + "digits="+digits+"\n"+ + "s="+JSON.stringify(s.toString())+"\n"+ + "s.length="+s.length) + } + + return [lenBuf, s] +} diff --git a/node_modules/fsevents/node_modules/tar/lib/extended-header.js b/node_modules/fsevents/node_modules/tar/lib/extended-header.js new file mode 100644 index 0000000..74f432c --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/extended-header.js @@ -0,0 +1,140 @@ +// An Entry consisting of: +// +// "%d %s=%s\n", , , +// +// The length is a decimal number, and includes itself and the \n +// \0 does not terminate anything. Only the length terminates the string. +// Numeric values are decimal strings. + +module.exports = ExtendedHeader + +var Entry = require("./entry.js") + , inherits = require("inherits") + , tar = require("../tar.js") + , numeric = tar.numeric + , keyTrans = { "SCHILY.dev": "dev" + , "SCHILY.ino": "ino" + , "SCHILY.nlink": "nlink" } + +function ExtendedHeader () { + Entry.apply(this, arguments) + this.on("data", this._parse) + this.fields = {} + this._position = 0 + this._fieldPos = 0 + this._state = SIZE + this._sizeBuf = [] + this._keyBuf = [] + this._valBuf = [] + this._size = -1 + this._key = "" +} + +inherits(ExtendedHeader, Entry) +ExtendedHeader.prototype._parse = parse + +var s = 0 + , states = ExtendedHeader.states = {} + , SIZE = states.SIZE = s++ + , KEY = states.KEY = s++ + , VAL = states.VAL = s++ + , ERR = states.ERR = s++ + +Object.keys(states).forEach(function (s) { + states[states[s]] = states[s] +}) + +states[s] = null + +// char code values for comparison +var _0 = "0".charCodeAt(0) + , _9 = "9".charCodeAt(0) + , point = ".".charCodeAt(0) + , a = "a".charCodeAt(0) + , Z = "Z".charCodeAt(0) + , a = "a".charCodeAt(0) + , z = "z".charCodeAt(0) + , space = " ".charCodeAt(0) + , eq = "=".charCodeAt(0) + , cr = "\n".charCodeAt(0) + +function parse (c) { + if (this._state === ERR) return + + for ( var i = 0, l = c.length + ; i < l + ; this._position++, this._fieldPos++, i++) { + // console.error("top of loop, size="+this._size) + + var b = c[i] + + if (this._size >= 0 && this._fieldPos > this._size) { + error(this, "field exceeds length="+this._size) + return + } + + switch (this._state) { + case ERR: return + + case SIZE: + // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString()) + if (b === space) { + this._state = KEY + // this._fieldPos = this._sizeBuf.length + this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10) + this._sizeBuf.length = 0 + continue + } + if (b < _0 || b > _9) { + error(this, "expected [" + _0 + ".." + _9 + "], got " + b) + return + } + this._sizeBuf.push(b) + continue + + case KEY: + // can be any char except =, not > size. + if (b === eq) { + this._state = VAL + this._key = new Buffer(this._keyBuf).toString() + if (keyTrans[this._key]) this._key = keyTrans[this._key] + this._keyBuf.length = 0 + continue + } + this._keyBuf.push(b) + continue + + case VAL: + // field must end with cr + if (this._fieldPos === this._size - 1) { + // console.error("finished with "+this._key) + if (b !== cr) { + error(this, "expected \\n at end of field") + return + } + var val = new Buffer(this._valBuf).toString() + if (numeric[this._key]) { + val = parseFloat(val) + } + this.fields[this._key] = val + + this._valBuf.length = 0 + this._state = SIZE + this._size = -1 + this._fieldPos = -1 + continue + } + this._valBuf.push(b) + continue + } + } +} + +function error (me, msg) { + msg = "invalid header: " + msg + + "\nposition=" + me._position + + "\nfield position=" + me._fieldPos + + me.error(msg) + me.state = ERR +} diff --git a/node_modules/fsevents/node_modules/tar/lib/extract.js b/node_modules/fsevents/node_modules/tar/lib/extract.js new file mode 100644 index 0000000..fe1bb97 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/extract.js @@ -0,0 +1,94 @@ +// give it a tarball and a path, and it'll dump the contents + +module.exports = Extract + +var tar = require("../tar.js") + , fstream = require("fstream") + , inherits = require("inherits") + , path = require("path") + +function Extract (opts) { + if (!(this instanceof Extract)) return new Extract(opts) + tar.Parse.apply(this) + + if (typeof opts !== "object") { + opts = { path: opts } + } + + // better to drop in cwd? seems more standard. + opts.path = opts.path || path.resolve("node-tar-extract") + opts.type = "Directory" + opts.Directory = true + + // similar to --strip or --strip-components + opts.strip = +opts.strip + if (!opts.strip || opts.strip <= 0) opts.strip = 0 + + this._fst = fstream.Writer(opts) + + this.pause() + var me = this + + // Hardlinks in tarballs are relative to the root + // of the tarball. So, they need to be resolved against + // the target directory in order to be created properly. + me.on("entry", function (entry) { + // if there's a "strip" argument, then strip off that many + // path components. + if (opts.strip) { + var p = entry.path.split("/").slice(opts.strip).join("/") + entry.path = entry.props.path = p + if (entry.linkpath) { + var lp = entry.linkpath.split("/").slice(opts.strip).join("/") + entry.linkpath = entry.props.linkpath = lp + } + } + if (entry.type === "Link") { + entry.linkpath = entry.props.linkpath = + path.join(opts.path, path.join("/", entry.props.linkpath)) + } + + if (entry.type === "SymbolicLink") { + var dn = path.dirname(entry.path) || "" + var linkpath = entry.props.linkpath + var target = path.resolve(opts.path, dn, linkpath) + if (target.indexOf(opts.path) !== 0) { + linkpath = path.join(opts.path, path.join("/", linkpath)) + } + entry.linkpath = entry.props.linkpath = linkpath + } + }) + + this._fst.on("ready", function () { + me.pipe(me._fst, { end: false }) + me.resume() + }) + + this._fst.on('error', function(err) { + me.emit('error', err) + }) + + this._fst.on('drain', function() { + me.emit('drain') + }) + + // this._fst.on("end", function () { + // console.error("\nEEEE Extract End", me._fst.path) + // }) + + this._fst.on("close", function () { + // console.error("\nEEEE Extract End", me._fst.path) + me.emit("finish") + me.emit("end") + me.emit("close") + }) +} + +inherits(Extract, tar.Parse) + +Extract.prototype._streamEnd = function () { + var me = this + if (!me._ended || me._entry) me.error("unexpected eof") + me._fst.end() + // my .end() is coming later. +} diff --git a/node_modules/fsevents/node_modules/tar/lib/global-header-writer.js b/node_modules/fsevents/node_modules/tar/lib/global-header-writer.js new file mode 100644 index 0000000..0bfc7b8 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/global-header-writer.js @@ -0,0 +1,14 @@ +module.exports = GlobalHeaderWriter + +var ExtendedHeaderWriter = require("./extended-header-writer.js") + , inherits = require("inherits") + +inherits(GlobalHeaderWriter, ExtendedHeaderWriter) + +function GlobalHeaderWriter (props) { + if (!(this instanceof GlobalHeaderWriter)) { + return new GlobalHeaderWriter(props) + } + ExtendedHeaderWriter.call(this, props) + this.props.type = "g" +} diff --git a/node_modules/fsevents/node_modules/tar/lib/header.js b/node_modules/fsevents/node_modules/tar/lib/header.js new file mode 100644 index 0000000..05b237c --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/header.js @@ -0,0 +1,385 @@ +// parse a 512-byte header block to a data object, or vice-versa +// If the data won't fit nicely in a simple header, then generate +// the appropriate extended header file, and return that. + +module.exports = TarHeader + +var tar = require("../tar.js") + , fields = tar.fields + , fieldOffs = tar.fieldOffs + , fieldEnds = tar.fieldEnds + , fieldSize = tar.fieldSize + , numeric = tar.numeric + , assert = require("assert").ok + , space = " ".charCodeAt(0) + , slash = "/".charCodeAt(0) + , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null + +function TarHeader (block) { + if (!(this instanceof TarHeader)) return new TarHeader(block) + if (block) this.decode(block) +} + +TarHeader.prototype = + { decode : decode + , encode: encode + , calcSum: calcSum + , checkSum: checkSum + } + +TarHeader.parseNumeric = parseNumeric +TarHeader.encode = encode +TarHeader.decode = decode + +// note that this will only do the normal ustar header, not any kind +// of extended posix header file. If something doesn't fit comfortably, +// then it will set obj.needExtended = true, and set the block to +// the closest approximation. +function encode (obj) { + if (!obj && !(this instanceof TarHeader)) throw new Error( + "encode must be called on a TarHeader, or supplied an object") + + obj = obj || this + var block = obj.block = new Buffer(512) + + // if the object has a "prefix", then that's actually an extension of + // the path field. + if (obj.prefix) { + // console.error("%% header encoding, got a prefix", obj.prefix) + obj.path = obj.prefix + "/" + obj.path + // console.error("%% header encoding, prefixed path", obj.path) + obj.prefix = "" + } + + obj.needExtended = false + + if (obj.mode) { + if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8) + obj.mode = obj.mode & 0777 + } + + for (var f = 0; fields[f] !== null; f ++) { + var field = fields[f] + , off = fieldOffs[f] + , end = fieldEnds[f] + , ret + + switch (field) { + case "cksum": + // special, done below, after all the others + break + + case "prefix": + // special, this is an extension of the "path" field. + // console.error("%% header encoding, skip prefix later") + break + + case "type": + // convert from long name to a single char. + var type = obj.type || "0" + if (type.length > 1) { + type = tar.types[obj.type] + if (!type) type = "0" + } + writeText(block, off, end, type) + break + + case "path": + // uses the "prefix" field if > 100 bytes, but <= 255 + var pathLen = Buffer.byteLength(obj.path) + , pathFSize = fieldSize[fields.path] + , prefFSize = fieldSize[fields.prefix] + + // paths between 100 and 255 should use the prefix field. + // longer than 255 + if (pathLen > pathFSize && + pathLen <= pathFSize + prefFSize) { + // need to find a slash somewhere in the middle so that + // path and prefix both fit in their respective fields + var searchStart = pathLen - 1 - pathFSize + , searchEnd = prefFSize + , found = false + , pathBuf = new Buffer(obj.path) + + for ( var s = searchStart + ; (s <= searchEnd) + ; s ++ ) { + if (pathBuf[s] === slash || pathBuf[s] === bslash) { + found = s + break + } + } + + if (found !== false) { + prefix = pathBuf.slice(0, found).toString("utf8") + path = pathBuf.slice(found + 1).toString("utf8") + + ret = writeText(block, off, end, path) + off = fieldOffs[fields.prefix] + end = fieldEnds[fields.prefix] + // console.error("%% header writing prefix", off, end, prefix) + ret = writeText(block, off, end, prefix) || ret + break + } + } + + // paths less than 100 chars don't need a prefix + // and paths longer than 255 need an extended header and will fail + // on old implementations no matter what we do here. + // Null out the prefix, and fallthrough to default. + // console.error("%% header writing no prefix") + var poff = fieldOffs[fields.prefix] + , pend = fieldEnds[fields.prefix] + writeText(block, poff, pend, "") + // fallthrough + + // all other fields are numeric or text + default: + ret = numeric[field] + ? writeNumeric(block, off, end, obj[field]) + : writeText(block, off, end, obj[field] || "") + break + } + obj.needExtended = obj.needExtended || ret + } + + var off = fieldOffs[fields.cksum] + , end = fieldEnds[fields.cksum] + + writeNumeric(block, off, end, calcSum.call(this, block)) + + return block +} + +// if it's a negative number, or greater than will fit, +// then use write256. +var MAXNUM = { 12: 077777777777 + , 11: 07777777777 + , 8 : 07777777 + , 7 : 0777777 } +function writeNumeric (block, off, end, num) { + var writeLen = end - off + , maxNum = MAXNUM[writeLen] || 0 + + num = num || 0 + // console.error(" numeric", num) + + if (num instanceof Date || + Object.prototype.toString.call(num) === "[object Date]") { + num = num.getTime() / 1000 + } + + if (num > maxNum || num < 0) { + write256(block, off, end, num) + // need an extended header if negative or too big. + return true + } + + // god, tar is so annoying + // if the string is small enough, you should put a space + // between the octal string and the \0, but if it doesn't + // fit, then don't. + var numStr = Math.floor(num).toString(8) + if (num < MAXNUM[writeLen - 1]) numStr += " " + + // pad with "0" chars + if (numStr.length < writeLen) { + numStr = (new Array(writeLen - numStr.length).join("0")) + numStr + } + + if (numStr.length !== writeLen - 1) { + throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" + + "expected: "+writeLen) + } + block.write(numStr, off, writeLen, "utf8") + block[end - 1] = 0 +} + +function write256 (block, off, end, num) { + var buf = block.slice(off, end) + var positive = num >= 0 + buf[0] = positive ? 0x80 : 0xFF + + // get the number as a base-256 tuple + if (!positive) num *= -1 + var tuple = [] + do { + var n = num % 256 + tuple.push(n) + num = (num - n) / 256 + } while (num) + + var bytes = tuple.length + + var fill = buf.length - bytes + for (var i = 1; i < fill; i ++) { + buf[i] = positive ? 0 : 0xFF + } + + // tuple is a base256 number, with [0] as the *least* significant byte + // if it's negative, then we need to flip all the bits once we hit the + // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's- + // complement is (0xFF - n). + var zero = true + for (i = bytes; i > 0; i --) { + var byte = tuple[bytes - i] + if (positive) buf[fill + i] = byte + else if (zero && byte === 0) buf[fill + i] = 0 + else if (zero) { + zero = false + buf[fill + i] = 0x100 - byte + } else buf[fill + i] = 0xFF - byte + } +} + +function writeText (block, off, end, str) { + // strings are written as utf8, then padded with \0 + var strLen = Buffer.byteLength(str) + , writeLen = Math.min(strLen, end - off) + // non-ascii fields need extended headers + // long fields get truncated + , needExtended = strLen !== str.length || strLen > writeLen + + // write the string, and null-pad + if (writeLen > 0) block.write(str, off, writeLen, "utf8") + for (var i = off + writeLen; i < end; i ++) block[i] = 0 + + return needExtended +} + +function calcSum (block) { + block = block || this.block + assert(Buffer.isBuffer(block) && block.length === 512) + + if (!block) throw new Error("Need block to checksum") + + // now figure out what it would be if the cksum was " " + var sum = 0 + , start = fieldOffs[fields.cksum] + , end = fieldEnds[fields.cksum] + + for (var i = 0; i < fieldOffs[fields.cksum]; i ++) { + sum += block[i] + } + + for (var i = start; i < end; i ++) { + sum += space + } + + for (var i = end; i < 512; i ++) { + sum += block[i] + } + + return sum +} + + +function checkSum (block) { + var sum = calcSum.call(this, block) + block = block || this.block + + var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum]) + cksum = parseNumeric(cksum) + + return cksum === sum +} + +function decode (block) { + block = block || this.block + assert(Buffer.isBuffer(block) && block.length === 512) + + this.block = block + this.cksumValid = this.checkSum() + + var prefix = null + + // slice off each field. + for (var f = 0; fields[f] !== null; f ++) { + var field = fields[f] + , val = block.slice(fieldOffs[f], fieldEnds[f]) + + switch (field) { + case "ustar": + // if not ustar, then everything after that is just padding. + if (val.toString() !== "ustar\0") { + this.ustar = false + return + } else { + // console.error("ustar:", val, val.toString()) + this.ustar = val.toString() + } + break + + // prefix is special, since it might signal the xstar header + case "prefix": + var atime = parseNumeric(val.slice(131, 131 + 12)) + , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12)) + if ((val[130] === 0 || val[130] === space) && + typeof atime === "number" && + typeof ctime === "number" && + val[131 + 12] === space && + val[131 + 12 + 12] === space) { + this.atime = atime + this.ctime = ctime + val = val.slice(0, 130) + } + prefix = val.toString("utf8").replace(/\0+$/, "") + // console.error("%% header reading prefix", prefix) + break + + // all other fields are null-padding text + // or a number. + default: + if (numeric[field]) { + this[field] = parseNumeric(val) + } else { + this[field] = val.toString("utf8").replace(/\0+$/, "") + } + break + } + } + + // if we got a prefix, then prepend it to the path. + if (prefix) { + this.path = prefix + "/" + this.path + // console.error("%% header got a prefix", this.path) + } +} + +function parse256 (buf) { + // first byte MUST be either 80 or FF + // 80 for positive, FF for 2's comp + var positive + if (buf[0] === 0x80) positive = true + else if (buf[0] === 0xFF) positive = false + else return null + + // build up a base-256 tuple from the least sig to the highest + var zero = false + , tuple = [] + for (var i = buf.length - 1; i > 0; i --) { + var byte = buf[i] + if (positive) tuple.push(byte) + else if (zero && byte === 0) tuple.push(0) + else if (zero) { + zero = false + tuple.push(0x100 - byte) + } else tuple.push(0xFF - byte) + } + + for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) { + sum += tuple[i] * Math.pow(256, i) + } + + return positive ? sum : -1 * sum +} + +function parseNumeric (f) { + if (f[0] & 0x80) return parse256(f) + + var str = f.toString("utf8").split("\0")[0].trim() + , res = parseInt(str, 8) + + return isNaN(res) ? null : res +} + diff --git a/node_modules/fsevents/node_modules/tar/lib/pack.js b/node_modules/fsevents/node_modules/tar/lib/pack.js new file mode 100644 index 0000000..5a3bb95 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/pack.js @@ -0,0 +1,236 @@ +// pipe in an fstream, and it'll make a tarball. +// key-value pair argument is global extended header props. + +module.exports = Pack + +var EntryWriter = require("./entry-writer.js") + , Stream = require("stream").Stream + , path = require("path") + , inherits = require("inherits") + , GlobalHeaderWriter = require("./global-header-writer.js") + , collect = require("fstream").collect + , eof = new Buffer(512) + +for (var i = 0; i < 512; i ++) eof[i] = 0 + +inherits(Pack, Stream) + +function Pack (props) { + // console.error("-- p ctor") + var me = this + if (!(me instanceof Pack)) return new Pack(props) + + if (props) me._noProprietary = props.noProprietary + else me._noProprietary = false + + me._global = props + + me.readable = true + me.writable = true + me._buffer = [] + // console.error("-- -- set current to null in ctor") + me._currentEntry = null + me._processing = false + + me._pipeRoot = null + me.on("pipe", function (src) { + if (src.root === me._pipeRoot) return + me._pipeRoot = src + src.on("end", function () { + me._pipeRoot = null + }) + me.add(src) + }) +} + +Pack.prototype.addGlobal = function (props) { + // console.error("-- p addGlobal") + if (this._didGlobal) return + this._didGlobal = true + + var me = this + GlobalHeaderWriter(props) + .on("data", function (c) { + me.emit("data", c) + }) + .end() +} + +Pack.prototype.add = function (stream) { + if (this._global && !this._didGlobal) this.addGlobal(this._global) + + if (this._ended) return this.emit("error", new Error("add after end")) + + collect(stream) + this._buffer.push(stream) + this._process() + this._needDrain = this._buffer.length > 0 + return !this._needDrain +} + +Pack.prototype.pause = function () { + this._paused = true + if (this._currentEntry) this._currentEntry.pause() + this.emit("pause") +} + +Pack.prototype.resume = function () { + this._paused = false + if (this._currentEntry) this._currentEntry.resume() + this.emit("resume") + this._process() +} + +Pack.prototype.end = function () { + this._ended = true + this._buffer.push(eof) + this._process() +} + +Pack.prototype._process = function () { + var me = this + if (me._paused || me._processing) { + return + } + + var entry = me._buffer.shift() + + if (!entry) { + if (me._needDrain) { + me.emit("drain") + } + return + } + + if (entry.ready === false) { + // console.error("-- entry is not ready", entry) + me._buffer.unshift(entry) + entry.on("ready", function () { + // console.error("-- -- ready!", entry) + me._process() + }) + return + } + + me._processing = true + + if (entry === eof) { + // need 2 ending null blocks. + me.emit("data", eof) + me.emit("data", eof) + me.emit("end") + me.emit("close") + return + } + + // Change the path to be relative to the root dir that was + // added to the tarball. + // + // XXX This should be more like how -C works, so you can + // explicitly set a root dir, and also explicitly set a pathname + // in the tarball to use. That way we can skip a lot of extra + // work when resolving symlinks for bundled dependencies in npm. + + var root = path.dirname((entry.root || entry).path); + if (me._global && me._global.fromBase && entry.root && entry.root.path) { + // user set 'fromBase: true' indicating tar root should be directory itself + root = entry.root.path; + } + + var wprops = {} + + Object.keys(entry.props || {}).forEach(function (k) { + wprops[k] = entry.props[k] + }) + + if (me._noProprietary) wprops.noProprietary = true + + wprops.path = path.relative(root, entry.path || '') + + // actually not a matter of opinion or taste. + if (process.platform === "win32") { + wprops.path = wprops.path.replace(/\\/g, "/") + } + + if (!wprops.type) + wprops.type = 'Directory' + + switch (wprops.type) { + // sockets not supported + case "Socket": + return + + case "Directory": + wprops.path += "/" + wprops.size = 0 + break + + case "Link": + var lp = path.resolve(path.dirname(entry.path), entry.linkpath) + wprops.linkpath = path.relative(root, lp) || "." + wprops.size = 0 + break + + case "SymbolicLink": + var lp = path.resolve(path.dirname(entry.path), entry.linkpath) + wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "." + wprops.size = 0 + break + } + + // console.error("-- new writer", wprops) + // if (!wprops.type) { + // // console.error("-- no type?", entry.constructor.name, entry) + // } + + // console.error("-- -- set current to new writer", wprops.path) + var writer = me._currentEntry = EntryWriter(wprops) + + writer.parent = me + + // writer.on("end", function () { + // // console.error("-- -- writer end", writer.path) + // }) + + writer.on("data", function (c) { + me.emit("data", c) + }) + + writer.on("header", function () { + Buffer.prototype.toJSON = function () { + return this.toString().split(/\0/).join(".") + } + // console.error("-- -- writer header %j", writer.props) + if (writer.props.size === 0) nextEntry() + }) + writer.on("close", nextEntry) + + var ended = false + function nextEntry () { + if (ended) return + ended = true + + // console.error("-- -- writer close", writer.path) + // console.error("-- -- set current to null", wprops.path) + me._currentEntry = null + me._processing = false + me._process() + } + + writer.on("error", function (er) { + // console.error("-- -- writer error", writer.path) + me.emit("error", er) + }) + + // if it's the root, then there's no need to add its entries, + // or data, since they'll be added directly. + if (entry === me._pipeRoot) { + // console.error("-- is the root, don't auto-add") + writer.add = null + } + + entry.pipe(writer) +} + +Pack.prototype.destroy = function () {} +Pack.prototype.write = function () {} diff --git a/node_modules/fsevents/node_modules/tar/lib/parse.js b/node_modules/fsevents/node_modules/tar/lib/parse.js new file mode 100644 index 0000000..600ad78 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/lib/parse.js @@ -0,0 +1,275 @@ + +// A writable stream. +// It emits "entry" events, which provide a readable stream that has +// header info attached. + +module.exports = Parse.create = Parse + +var stream = require("stream") + , Stream = stream.Stream + , BlockStream = require("block-stream") + , tar = require("../tar.js") + , TarHeader = require("./header.js") + , Entry = require("./entry.js") + , BufferEntry = require("./buffer-entry.js") + , ExtendedHeader = require("./extended-header.js") + , assert = require("assert").ok + , inherits = require("inherits") + , fstream = require("fstream") + +// reading a tar is a lot like reading a directory +// However, we're actually not going to run the ctor, +// since it does a stat and various other stuff. +// This inheritance gives us the pause/resume/pipe +// behavior that is desired. +inherits(Parse, fstream.Reader) + +function Parse () { + var me = this + if (!(me instanceof Parse)) return new Parse() + + // doesn't apply fstream.Reader ctor? + // no, becasue we don't want to stat/etc, we just + // want to get the entry/add logic from .pipe() + Stream.apply(me) + + me.writable = true + me.readable = true + me._stream = new BlockStream(512) + me.position = 0 + me._ended = false + + me._stream.on("error", function (e) { + me.emit("error", e) + }) + + me._stream.on("data", function (c) { + me._process(c) + }) + + me._stream.on("end", function () { + me._streamEnd() + }) + + me._stream.on("drain", function () { + me.emit("drain") + }) +} + +// overridden in Extract class, since it needs to +// wait for its DirWriter part to finish before +// emitting "end" +Parse.prototype._streamEnd = function () { + var me = this + if (!me._ended || me._entry) me.error("unexpected eof") + me.emit("end") +} + +// a tar reader is actually a filter, not just a readable stream. +// So, you should pipe a tarball stream into it, and it needs these +// write/end methods to do that. +Parse.prototype.write = function (c) { + if (this._ended) { + // gnutar puts a LOT of nulls at the end. + // you can keep writing these things forever. + // Just ignore them. + for (var i = 0, l = c.length; i > l; i ++) { + if (c[i] !== 0) return this.error("write() after end()") + } + return + } + return this._stream.write(c) +} + +Parse.prototype.end = function (c) { + this._ended = true + return this._stream.end(c) +} + +// don't need to do anything, since we're just +// proxying the data up from the _stream. +// Just need to override the parent's "Not Implemented" +// error-thrower. +Parse.prototype._read = function () {} + +Parse.prototype._process = function (c) { + assert(c && c.length === 512, "block size should be 512") + + // one of three cases. + // 1. A new header + // 2. A part of a file/extended header + // 3. One of two or more EOF null blocks + + if (this._entry) { + var entry = this._entry + if(!entry._abort) entry.write(c) + else { + entry._remaining -= c.length + if(entry._remaining < 0) entry._remaining = 0 + } + if (entry._remaining === 0) { + entry.end() + this._entry = null + } + } else { + // either zeroes or a header + var zero = true + for (var i = 0; i < 512 && zero; i ++) { + zero = c[i] === 0 + } + + // eof is *at least* 2 blocks of nulls, and then the end of the + // file. you can put blocks of nulls between entries anywhere, + // so appending one tarball to another is technically valid. + // ending without the eof null blocks is not allowed, however. + if (zero) { + if (this._eofStarted) + this._ended = true + this._eofStarted = true + } else { + this._eofStarted = false + this._startEntry(c) + } + } + + this.position += 512 +} + +// take a header chunk, start the right kind of entry. +Parse.prototype._startEntry = function (c) { + var header = new TarHeader(c) + , self = this + , entry + , ev + , EntryType + , onend + , meta = false + + if (null === header.size || !header.cksumValid) { + var e = new Error("invalid tar file") + e.header = header + e.tar_file_offset = this.position + e.tar_block = this.position / 512 + return this.emit("error", e) + } + + switch (tar.types[header.type]) { + case "File": + case "OldFile": + case "Link": + case "SymbolicLink": + case "CharacterDevice": + case "BlockDevice": + case "Directory": + case "FIFO": + case "ContiguousFile": + case "GNUDumpDir": + // start a file. + // pass in any extended headers + // These ones consumers are typically most interested in. + EntryType = Entry + ev = "entry" + break + + case "GlobalExtendedHeader": + // extended headers that apply to the rest of the tarball + EntryType = ExtendedHeader + onend = function () { + self._global = self._global || {} + Object.keys(entry.fields).forEach(function (k) { + self._global[k] = entry.fields[k] + }) + } + ev = "globalExtendedHeader" + meta = true + break + + case "ExtendedHeader": + case "OldExtendedHeader": + // extended headers that apply to the next entry + EntryType = ExtendedHeader + onend = function () { + self._extended = entry.fields + } + ev = "extendedHeader" + meta = true + break + + case "NextFileHasLongLinkpath": + // set linkpath= in extended header + EntryType = BufferEntry + onend = function () { + self._extended = self._extended || {} + self._extended.linkpath = entry.body + } + ev = "longLinkpath" + meta = true + break + + case "NextFileHasLongPath": + case "OldGnuLongPath": + // set path= in file-extended header + EntryType = BufferEntry + onend = function () { + self._extended = self._extended || {} + self._extended.path = entry.body + } + ev = "longPath" + meta = true + break + + default: + // all the rest we skip, but still set the _entry + // member, so that we can skip over their data appropriately. + // emit an event to say that this is an ignored entry type? + EntryType = Entry + ev = "ignoredEntry" + break + } + + var global, extended + if (meta) { + global = extended = null + } else { + var global = this._global + var extended = this._extended + + // extendedHeader only applies to one entry, so once we start + // an entry, it's over. + this._extended = null + } + entry = new EntryType(header, extended, global) + entry.meta = meta + + // only proxy data events of normal files. + if (!meta) { + entry.on("data", function (c) { + me.emit("data", c) + }) + } + + if (onend) entry.on("end", onend) + + this._entry = entry + var me = this + + entry.on("pause", function () { + me.pause() + }) + + entry.on("resume", function () { + me.resume() + }) + + if (this.listeners("*").length) { + this.emit("*", ev, entry) + } + + this.emit(ev, entry) + + // Zero-byte entry. End immediately. + if (entry.props.size === 0) { + entry.end() + this._entry = null + } +} diff --git a/node_modules/fsevents/node_modules/tar/package.json b/node_modules/fsevents/node_modules/tar/package.json new file mode 100644 index 0000000..f105798 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/package.json @@ -0,0 +1,95 @@ +{ + "_args": [ + [ + "tar@~2.2.0", + "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp" + ] + ], + "_from": "tar@>=2.2.0 <2.3.0", + "_id": "tar@2.2.1", + "_inCache": true, + "_installable": true, + "_location": "/tar", + "_nodeVersion": "2.2.2", + "_npmUser": { + "email": "kat@sykosomatic.org", + "name": "zkat" + }, + "_npmVersion": "2.14.3", + "_phantomChildren": {}, + "_requested": { + "name": "tar", + "raw": "tar@~2.2.0", + "rawSpec": "~2.2.0", + "scope": null, + "spec": ">=2.2.0 <2.3.0", + "type": "range" + }, + "_requiredBy": [ + "/node-pre-gyp", + "/tar-pack" + ], + "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz", + "_shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1", + "_shrinkwrap": null, + "_spec": "tar@~2.2.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/node-pre-gyp", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/node-tar/issues" + }, + "dependencies": { + "block-stream": "*", + "fstream": "^1.0.2", + "inherits": "2" + }, + "description": "tar for node", + "devDependencies": { + "graceful-fs": "^4.1.2", + "mkdirp": "^0.5.0", + "rimraf": "1.x", + "tap": "0.x" + }, + "directories": {}, + "dist": { + "shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1", + "tarball": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz" + }, + "gitHead": "52237e39d2eb68d22a32d9a98f1d762189fe6a3d", + "homepage": "https://github.com/isaacs/node-tar#readme", + "license": "ISC", + "main": "tar.js", + "maintainers": [ + { + "name": "isaacs", + "email": "isaacs@npmjs.com" + }, + { + "name": "othiym23", + "email": "ogd@aoaioxxysz.net" + }, + { + "name": "soldair", + "email": "soldair@gmail.com" + }, + { + "name": "zkat", + "email": "kat@sykosomatic.org" + } + ], + "name": "tar", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-tar.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "version": "2.2.1" +} diff --git a/node_modules/fsevents/node_modules/tar/tar.js b/node_modules/fsevents/node_modules/tar/tar.js new file mode 100644 index 0000000..a81298b --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/tar.js @@ -0,0 +1,173 @@ +// field paths that every tar file must have. +// header is padded to 512 bytes. +var f = 0 + , fields = {} + , path = fields.path = f++ + , mode = fields.mode = f++ + , uid = fields.uid = f++ + , gid = fields.gid = f++ + , size = fields.size = f++ + , mtime = fields.mtime = f++ + , cksum = fields.cksum = f++ + , type = fields.type = f++ + , linkpath = fields.linkpath = f++ + , headerSize = 512 + , blockSize = 512 + , fieldSize = [] + +fieldSize[path] = 100 +fieldSize[mode] = 8 +fieldSize[uid] = 8 +fieldSize[gid] = 8 +fieldSize[size] = 12 +fieldSize[mtime] = 12 +fieldSize[cksum] = 8 +fieldSize[type] = 1 +fieldSize[linkpath] = 100 + +// "ustar\0" may introduce another bunch of headers. +// these are optional, and will be nulled out if not present. + +var ustar = fields.ustar = f++ + , ustarver = fields.ustarver = f++ + , uname = fields.uname = f++ + , gname = fields.gname = f++ + , devmaj = fields.devmaj = f++ + , devmin = fields.devmin = f++ + , prefix = fields.prefix = f++ + , fill = fields.fill = f++ + +// terminate fields. +fields[f] = null + +fieldSize[ustar] = 6 +fieldSize[ustarver] = 2 +fieldSize[uname] = 32 +fieldSize[gname] = 32 +fieldSize[devmaj] = 8 +fieldSize[devmin] = 8 +fieldSize[prefix] = 155 +fieldSize[fill] = 12 + +// nb: prefix field may in fact be 130 bytes of prefix, +// a null char, 12 bytes for atime, 12 bytes for ctime. +// +// To recognize this format: +// 1. prefix[130] === ' ' or '\0' +// 2. atime and ctime are octal numeric values +// 3. atime and ctime have ' ' in their last byte + +var fieldEnds = {} + , fieldOffs = {} + , fe = 0 +for (var i = 0; i < f; i ++) { + fieldOffs[i] = fe + fieldEnds[i] = (fe += fieldSize[i]) +} + +// build a translation table of field paths. +Object.keys(fields).forEach(function (f) { + if (fields[f] !== null) fields[fields[f]] = f +}) + +// different values of the 'type' field +// paths match the values of Stats.isX() functions, where appropriate +var types = + { 0: "File" + , "\0": "OldFile" // like 0 + , "": "OldFile" + , 1: "Link" + , 2: "SymbolicLink" + , 3: "CharacterDevice" + , 4: "BlockDevice" + , 5: "Directory" + , 6: "FIFO" + , 7: "ContiguousFile" // like 0 + // posix headers + , g: "GlobalExtendedHeader" // k=v for the rest of the archive + , x: "ExtendedHeader" // k=v for the next file + // vendor-specific stuff + , A: "SolarisACL" // skip + , D: "GNUDumpDir" // like 5, but with data, which should be skipped + , I: "Inode" // metadata only, skip + , K: "NextFileHasLongLinkpath" // data = link path of next file + , L: "NextFileHasLongPath" // data = path of next file + , M: "ContinuationFile" // skip + , N: "OldGnuLongPath" // like L + , S: "SparseFile" // skip + , V: "TapeVolumeHeader" // skip + , X: "OldExtendedHeader" // like x + } + +Object.keys(types).forEach(function (t) { + types[types[t]] = types[types[t]] || t +}) + +// values for the mode field +var modes = + { suid: 04000 // set uid on extraction + , sgid: 02000 // set gid on extraction + , svtx: 01000 // set restricted deletion flag on dirs on extraction + , uread: 0400 + , uwrite: 0200 + , uexec: 0100 + , gread: 040 + , gwrite: 020 + , gexec: 010 + , oread: 4 + , owrite: 2 + , oexec: 1 + , all: 07777 + } + +var numeric = + { mode: true + , uid: true + , gid: true + , size: true + , mtime: true + , devmaj: true + , devmin: true + , cksum: true + , atime: true + , ctime: true + , dev: true + , ino: true + , nlink: true + } + +Object.keys(modes).forEach(function (t) { + modes[modes[t]] = modes[modes[t]] || t +}) + +var knownExtended = + { atime: true + , charset: true + , comment: true + , ctime: true + , gid: true + , gname: true + , linkpath: true + , mtime: true + , path: true + , realtime: true + , security: true + , size: true + , uid: true + , uname: true } + + +exports.fields = fields +exports.fieldSize = fieldSize +exports.fieldOffs = fieldOffs +exports.fieldEnds = fieldEnds +exports.types = types +exports.modes = modes +exports.numeric = numeric +exports.headerSize = headerSize +exports.blockSize = blockSize +exports.knownExtended = knownExtended + +exports.Pack = require("./lib/pack.js") +exports.Parse = require("./lib/parse.js") +exports.Extract = require("./lib/extract.js") diff --git a/node_modules/fsevents/node_modules/tar/test/00-setup-fixtures.js b/node_modules/fsevents/node_modules/tar/test/00-setup-fixtures.js new file mode 100644 index 0000000..1524ff7 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/00-setup-fixtures.js @@ -0,0 +1,53 @@ +// the fixtures have some weird stuff that is painful +// to include directly in the repo for various reasons. +// +// So, unpack the fixtures with the system tar first. +// +// This means, of course, that it'll only work if you +// already have a tar implementation, and some of them +// will not properly unpack the fixtures anyway. +// +// But, since usually those tests will fail on Windows +// and other systems with less capable filesystems anyway, +// at least this way we don't cause inconveniences by +// merely cloning the repo or installing the package. + +var tap = require("tap") +, child_process = require("child_process") +, rimraf = require("rimraf") +, test = tap.test +, path = require("path") + +test("clean fixtures", function (t) { + rimraf(path.resolve(__dirname, "fixtures"), function (er) { + t.ifError(er, "rimraf ./fixtures/") + t.end() + }) +}) + +test("clean tmp", function (t) { + rimraf(path.resolve(__dirname, "tmp"), function (er) { + t.ifError(er, "rimraf ./tmp/") + t.end() + }) +}) + +test("extract fixtures", function (t) { + var c = child_process.spawn("tar" + ,["xzvf", "fixtures.tgz"] + ,{ cwd: __dirname }) + + c.stdout.on("data", errwrite) + c.stderr.on("data", errwrite) + function errwrite (chunk) { + process.stderr.write(chunk) + } + + c.on("exit", function (code) { + t.equal(code, 0, "extract fixtures should exit with 0") + if (code) { + t.comment("Note, all tests from here on out will fail because of this.") + } + t.end() + }) +}) diff --git a/node_modules/fsevents/node_modules/tar/test/cb-never-called-1.0.1.tgz b/node_modules/fsevents/node_modules/tar/test/cb-never-called-1.0.1.tgz new file mode 100644 index 0000000..9e7014d Binary files /dev/null and b/node_modules/fsevents/node_modules/tar/test/cb-never-called-1.0.1.tgz differ diff --git a/node_modules/fsevents/node_modules/tar/test/dir-normalization.js b/node_modules/fsevents/node_modules/tar/test/dir-normalization.js new file mode 100644 index 0000000..9719c42 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/dir-normalization.js @@ -0,0 +1,177 @@ +// Set the umask, so that it works the same everywhere. +process.umask(parseInt('22', 8)) + +var fs = require('fs') +var path = require('path') + +var fstream = require('fstream') +var test = require('tap').test + +var tar = require('../tar.js') +var file = path.resolve(__dirname, 'dir-normalization.tar') +var target = path.resolve(__dirname, 'tmp/dir-normalization-test') +var ee = 0 + +var expectEntries = [ + { path: 'fixtures/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/a/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/the-chumbler', + mode: '755', + type: '2', + linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'), + }, + { path: 'fixtures/a/b/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/a/x', + mode: '644', + type: '0', + linkpath: '' + }, + { path: 'fixtures/a/b/c/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/a/b/c/y', + mode: '755', + type: '2', + linkpath: '../../x', + } +] + +var ef = 0 +var expectFiles = [ + { path: '', + mode: '40755', + type: 'Directory', + depth: 0, + linkpath: undefined + }, + { path: '/fixtures', + mode: '40755', + type: 'Directory', + depth: 1, + linkpath: undefined + }, + { path: '/fixtures/a', + mode: '40755', + type: 'Directory', + depth: 2, + linkpath: undefined + }, + { path: '/fixtures/a/b', + mode: '40755', + type: 'Directory', + depth: 3, + linkpath: undefined + }, + { path: '/fixtures/a/b/c', + mode: '40755', + type: 'Directory', + depth: 4, + linkpath: undefined + }, + { path: '/fixtures/a/b/c/y', + mode: '120755', + type: 'SymbolicLink', + depth: 5, + linkpath: '../../x' + }, + { path: '/fixtures/a/x', + mode: '100644', + type: 'File', + depth: 3, + linkpath: undefined + }, + { path: '/fixtures/the-chumbler', + mode: '120755', + type: 'SymbolicLink', + depth: 2, + linkpath: path.resolve(target, 'a/b/c/d/the-chumbler') + } +] + +test('preclean', function (t) { + require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test')) + t.pass('cleaned!') + t.end() +}) + +test('extract test', function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + inp.pipe(extract) + + extract.on('end', function () { + t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries') + + // should get no more entries after end + extract.removeAllListeners('entry') + extract.on('entry', function (e) { + t.fail('Should not get entries after end!') + }) + + next() + }) + + extract.on('entry', function (entry) { + var mode = entry.props.mode & (~parseInt('22', 8)) + var found = { + path: entry.path, + mode: mode.toString(8), + type: entry.props.type, + linkpath: entry.props.linkpath, + } + + var wanted = expectEntries[ee++] + t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path)) + }) + + function next () { + var r = fstream.Reader({ + path: target, + type: 'Directory', + sort: 'alpha' + }) + + r.on('ready', function () { + foundEntry(r) + }) + + r.on('end', finish) + + function foundEntry (entry) { + var p = entry.path.substr(target.length) + var mode = entry.props.mode & (~parseInt('22', 8)) + var found = { + path: p, + mode: mode.toString(8), + type: entry.props.type, + depth: entry.props.depth, + linkpath: entry.props.linkpath + } + + var wanted = expectFiles[ef++] + t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path)) + + entry.on('entry', foundEntry) + } + + function finish () { + t.equal(ef, expectFiles.length, 'should have ' + ef + ' items') + t.end() + } + } +}) diff --git a/node_modules/fsevents/node_modules/tar/test/dir-normalization.tar b/node_modules/fsevents/node_modules/tar/test/dir-normalization.tar new file mode 100644 index 0000000..3c48453 Binary files /dev/null and b/node_modules/fsevents/node_modules/tar/test/dir-normalization.tar differ diff --git a/node_modules/fsevents/node_modules/tar/test/error-on-broken.js b/node_modules/fsevents/node_modules/tar/test/error-on-broken.js new file mode 100644 index 0000000..e484920 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/error-on-broken.js @@ -0,0 +1,33 @@ +var fs = require('fs') +var path = require('path') +var zlib = require('zlib') + +var tap = require('tap') + +var tar = require('../tar.js') + +var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz') +var target = path.join(__dirname, 'tmp/extract-test') + +tap.test('preclean', function (t) { + require('rimraf').sync(__dirname + '/tmp/extract-test') + t.pass('cleaned!') + t.end() +}) + +tap.test('extract test', function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + inp.pipe(zlib.createGunzip()).pipe(extract) + + extract.on('error', function (er) { + t.equal(er.message, 'unexpected eof', 'error noticed') + t.end() + }) + + extract.on('end', function () { + t.fail('shouldn\'t reach this point due to errors') + t.end() + }) +}) diff --git a/node_modules/fsevents/node_modules/tar/test/extract-move.js b/node_modules/fsevents/node_modules/tar/test/extract-move.js new file mode 100644 index 0000000..45400cd --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/extract-move.js @@ -0,0 +1,132 @@ +// Set the umask, so that it works the same everywhere. +process.umask(parseInt('22', 8)) + +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , gfs = require("graceful-fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/dir.tar") + , target = path.resolve(__dirname, "tmp/extract-test") + , index = 0 + , fstream = require("fstream") + , rimraf = require("rimraf") + , mkdirp = require("mkdirp") + + , ee = 0 + , expectEntries = [ + { + "path" : "dir/", + "mode" : "750", + "type" : "5", + "depth" : undefined, + "size" : 0, + "linkpath" : "", + "nlink" : undefined, + "dev" : undefined, + "ino" : undefined + }, + { + "path" : "dir/sub/", + "mode" : "750", + "type" : "5", + "depth" : undefined, + "size" : 0, + "linkpath" : "", + "nlink" : undefined, + "dev" : undefined, + "ino" : undefined + } ] + +function slow (fs, method, t1, t2) { + var orig = fs[method] + if (!orig) return null + fs[method] = function () { + var args = [].slice.call(arguments) + console.error("slow", method, args[0]) + var cb = args.pop() + + setTimeout(function () { + orig.apply(fs, args.concat(function(er, data) { + setTimeout(function() { + cb(er, data) + }, t2) + })) + }, t1) + } +} + +// Make sure we get the graceful-fs that fstream is using. +var gfs2 +try { + gfs2 = require("fstream/node_modules/graceful-fs") +} catch (er) {} + +var slowMethods = ["chown", "chmod", "utimes", "lutimes"] +slowMethods.forEach(function (method) { + var t1 = 500 + var t2 = 0 + slow(fs, method, t1, t2) + slow(gfs, method, t1, t2) + if (gfs2) { + slow(gfs2, method, t1, t2) + } +}) + + + +// The extract class basically just pipes the input +// to a Reader, and then to a fstream.DirWriter + +// So, this is as much a test of fstream.Reader and fstream.Writer +// as it is of tar.Extract, but it sort of makes sense. + +tap.test("preclean", function (t) { + rimraf.sync(target) + /mkdirp.sync(target) + t.pass("cleaned!") + t.end() +}) + +tap.test("extract test", function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + // give it a weird buffer size to try to break in odd places + inp.bufferSize = 1234 + + inp.pipe(extract) + + extract.on("end", function () { + rimraf.sync(target) + + t.equal(ee, expectEntries.length, "should see "+ee+" entries") + + // should get no more entries after end + extract.removeAllListeners("entry") + extract.on("entry", function (e) { + t.fail("Should not get entries after end!") + }) + + t.end() + }) + + + extract.on("entry", function (entry) { + var found = + { path: entry.path + , mode: entry.props.mode.toString(8) + , type: entry.props.type + , depth: entry.props.depth + , size: entry.props.size + , linkpath: entry.props.linkpath + , nlink: entry.props.nlink + , dev: entry.props.dev + , ino: entry.props.ino + } + + var wanted = expectEntries[ee ++] + + t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) + }) +}) diff --git a/node_modules/fsevents/node_modules/tar/test/extract.js b/node_modules/fsevents/node_modules/tar/test/extract.js new file mode 100644 index 0000000..eca4e7c --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/extract.js @@ -0,0 +1,367 @@ +// Set the umask, so that it works the same everywhere. +process.umask(parseInt('22', 8)) + +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/c.tar") + , target = path.resolve(__dirname, "tmp/extract-test") + , index = 0 + , fstream = require("fstream") + + , ee = 0 + , expectEntries = +[ { path: 'c.txt', + mode: '644', + type: '0', + depth: undefined, + size: 513, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'cc.txt', + mode: '644', + type: '0', + depth: undefined, + size: 513, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '0', + depth: undefined, + size: 100, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'Ω.txt', + mode: '644', + type: '0', + depth: undefined, + size: 2, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'Ω.txt', + mode: '644', + type: '0', + depth: undefined, + size: 2, + linkpath: '', + nlink: 1, + dev: 234881026, + ino: 51693379 }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '0', + depth: undefined, + size: 200, + linkpath: '', + nlink: 1, + dev: 234881026, + ino: 51681874 }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '0', + depth: undefined, + size: 201, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', + mode: '777', + type: '2', + depth: undefined, + size: 0, + linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: '200-hard', + mode: '644', + type: '0', + depth: undefined, + size: 200, + linkpath: '', + nlink: 2, + dev: 234881026, + ino: 51681874 }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '1', + depth: undefined, + size: 0, + linkpath: path.resolve(target, '200-hard'), + nlink: 2, + dev: 234881026, + ino: 51681874 } ] + + , ef = 0 + , expectFiles = +[ { path: '', + mode: '40755', + type: 'Directory', + depth: 0, + linkpath: undefined }, + { path: '/200-hard', + mode: '100644', + type: 'File', + depth: 1, + size: 200, + linkpath: undefined, + nlink: 2 }, + { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', + mode: '120777', + type: 'SymbolicLink', + depth: 1, + size: 200, + linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + nlink: 1 }, + { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '100644', + type: 'Link', + depth: 1, + size: 200, + linkpath: path.join(target, '200-hard'), + nlink: 2 }, + { path: '/c.txt', + mode: '100644', + type: 'File', + depth: 1, + size: 513, + linkpath: undefined, + nlink: 1 }, + { path: '/cc.txt', + mode: '100644', + type: 'File', + depth: 1, + size: 513, + linkpath: undefined, + nlink: 1 }, + { path: '/r', + mode: '40755', + type: 'Directory', + depth: 1, + linkpath: undefined }, + { path: '/r/e', + mode: '40755', + type: 'Directory', + depth: 2, + linkpath: undefined }, + { path: '/r/e/a', + mode: '40755', + type: 'Directory', + depth: 3, + linkpath: undefined }, + { path: '/r/e/a/l', + mode: '40755', + type: 'Directory', + depth: 4, + linkpath: undefined }, + { path: '/r/e/a/l/l', + mode: '40755', + type: 'Directory', + depth: 5, + linkpath: undefined }, + { path: '/r/e/a/l/l/y', + mode: '40755', + type: 'Directory', + depth: 6, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-', + mode: '40755', + type: 'Directory', + depth: 7, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d', + mode: '40755', + type: 'Directory', + depth: 8, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e', + mode: '40755', + type: 'Directory', + depth: 9, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e', + mode: '40755', + type: 'Directory', + depth: 10, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p', + mode: '40755', + type: 'Directory', + depth: 11, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-', + mode: '40755', + type: 'Directory', + depth: 12, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f', + mode: '40755', + type: 'Directory', + depth: 13, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o', + mode: '40755', + type: 'Directory', + depth: 14, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l', + mode: '40755', + type: 'Directory', + depth: 15, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d', + mode: '40755', + type: 'Directory', + depth: 16, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e', + mode: '40755', + type: 'Directory', + depth: 17, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r', + mode: '40755', + type: 'Directory', + depth: 18, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-', + mode: '40755', + type: 'Directory', + depth: 19, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p', + mode: '40755', + type: 'Directory', + depth: 20, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a', + mode: '40755', + type: 'Directory', + depth: 21, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', + mode: '40755', + type: 'Directory', + depth: 22, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h', + mode: '40755', + type: 'Directory', + depth: 23, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '100644', + type: 'File', + depth: 24, + size: 100, + linkpath: undefined, + nlink: 1 }, + { path: '/Ω.txt', + mode: '100644', + type: 'File', + depth: 1, + size: 2, + linkpath: undefined, + nlink: 1 } ] + + + +// The extract class basically just pipes the input +// to a Reader, and then to a fstream.DirWriter + +// So, this is as much a test of fstream.Reader and fstream.Writer +// as it is of tar.Extract, but it sort of makes sense. + +tap.test("preclean", function (t) { + require("rimraf").sync(__dirname + "/tmp/extract-test") + t.pass("cleaned!") + t.end() +}) + +tap.test("extract test", function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + // give it a weird buffer size to try to break in odd places + inp.bufferSize = 1234 + + inp.pipe(extract) + + extract.on("end", function () { + t.equal(ee, expectEntries.length, "should see "+ee+" entries") + + // should get no more entries after end + extract.removeAllListeners("entry") + extract.on("entry", function (e) { + t.fail("Should not get entries after end!") + }) + + next() + }) + + extract.on("entry", function (entry) { + var found = + { path: entry.path + , mode: entry.props.mode.toString(8) + , type: entry.props.type + , depth: entry.props.depth + , size: entry.props.size + , linkpath: entry.props.linkpath + , nlink: entry.props.nlink + , dev: entry.props.dev + , ino: entry.props.ino + } + + var wanted = expectEntries[ee ++] + + t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) + }) + + function next () { + var r = fstream.Reader({ path: target + , type: "Directory" + // this is just to encourage consistency + , sort: "alpha" }) + + r.on("ready", function () { + foundEntry(r) + }) + + r.on("end", finish) + + function foundEntry (entry) { + var p = entry.path.substr(target.length) + var found = + { path: p + , mode: entry.props.mode.toString(8) + , type: entry.props.type + , depth: entry.props.depth + , size: entry.props.size + , linkpath: entry.props.linkpath + , nlink: entry.props.nlink + } + + var wanted = expectFiles[ef ++] + + t.has(found, wanted, "unpacked file " + ef + " " + wanted.path) + + entry.on("entry", foundEntry) + } + + function finish () { + t.equal(ef, expectFiles.length, "should have "+ef+" items") + t.end() + } + } +}) diff --git a/node_modules/fsevents/node_modules/tar/test/fixtures.tgz b/node_modules/fsevents/node_modules/tar/test/fixtures.tgz new file mode 100644 index 0000000..f167602 Binary files /dev/null and b/node_modules/fsevents/node_modules/tar/test/fixtures.tgz differ diff --git a/node_modules/fsevents/node_modules/tar/test/header.js b/node_modules/fsevents/node_modules/tar/test/header.js new file mode 100644 index 0000000..8ea6f79 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/header.js @@ -0,0 +1,183 @@ +var tap = require("tap") +var TarHeader = require("../lib/header.js") +var tar = require("../tar.js") +var fs = require("fs") + + +var headers = + { "a.txt file header": + [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'a.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 257 + , mtime: 1319493851 + , cksum: 5417 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + + , "omega pax": // the extended header from omega tar. + [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'PaxHeader/Ω.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 120 + , mtime: 1301254537 + , cksum: 6697 + , type: 'x' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } ] + + , "omega file header": + [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'Ω.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 2 + , mtime: 1301254537 + , cksum: 5690 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } ] + + , "foo.js file header": + [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'foo.js' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 4 + , mtime: 1301246433 + , cksum: 5519 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + + , "b.txt file header": + [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'b.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 512 + , mtime: 1319494079 + , cksum: 5425 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + + , "deep nested file": + [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true, + path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 100 + , mtime: 1319687003 + , cksum: 18124 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + } + +tap.test("parsing", function (t) { + Object.keys(headers).forEach(function (name) { + var h = headers[name] + , header = new Buffer(h[0], "hex") + , expect = h[1] + , parsed = new TarHeader(header) + + // console.error(parsed) + t.has(parsed, expect, "parse " + name) + }) + t.end() +}) + +tap.test("encoding", function (t) { + Object.keys(headers).forEach(function (name) { + var h = headers[name] + , expect = new Buffer(h[0], "hex") + , encoded = TarHeader.encode(h[1]) + + // might have slightly different bytes, since the standard + // isn't very strict, but should have the same semantics + // checkSum will be different, but cksumValid will be true + + var th = new TarHeader(encoded) + delete h[1].block + delete h[1].needExtended + delete h[1].cksum + t.has(th, h[1], "fields "+name) + }) + t.end() +}) + +// test these manually. they're a bit rare to find in the wild +tap.test("parseNumeric tests", function (t) { + var parseNumeric = TarHeader.parseNumeric + , numbers = + { "303737373737373700": 2097151 + , "30373737373737373737373700": 8589934591 + , "303030303036343400": 420 + , "800000ffffffffffff": 281474976710655 + , "ffffff000000000001": -281474976710654 + , "ffffff000000000000": -281474976710655 + , "800000000000200000": 2097152 + , "8000000000001544c5": 1393861 + , "ffffffffffff1544c5": -15383354 } + Object.keys(numbers).forEach(function (n) { + var b = new Buffer(n, "hex") + t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n]) + }) + t.end() +}) diff --git a/node_modules/fsevents/node_modules/tar/test/pack-no-proprietary.js b/node_modules/fsevents/node_modules/tar/test/pack-no-proprietary.js new file mode 100644 index 0000000..d4b03a1 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/pack-no-proprietary.js @@ -0,0 +1,886 @@ +// This is exactly like test/pack.js, except that it's excluding +// any proprietary headers. +// +// This loses some information about the filesystem, but creates +// tarballs that are supported by more versions of tar, especially +// old non-spec-compliant copies of gnutar. + +// the symlink file is excluded from git, because it makes +// windows freak the hell out. +var fs = require("fs") + , path = require("path") + , symlink = path.resolve(__dirname, "fixtures/symlink") +try { fs.unlinkSync(symlink) } catch (e) {} +fs.symlinkSync("./hardlink-1", symlink) +process.on("exit", function () { + fs.unlinkSync(symlink) +}) + +var tap = require("tap") + , tar = require("../tar.js") + , pkg = require("../package.json") + , Pack = tar.Pack + , fstream = require("fstream") + , Reader = fstream.Reader + , Writer = fstream.Writer + , input = path.resolve(__dirname, "fixtures/") + , target = path.resolve(__dirname, "tmp/pack.tar") + , uid = process.getuid ? process.getuid() : 0 + , gid = process.getgid ? process.getgid() : 0 + + , entries = + + // the global header and root fixtures/ dir are going to get + // a different date each time, so omit that bit. + // Also, dev/ino values differ across machines, so that's not + // included. + [ [ 'entry', + { path: 'fixtures/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + uid: uid, + gid: gid, + size: 200 } ] + + , [ 'entry', + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/a.txt', + mode: 420, + uid: uid, + gid: gid, + size: 257, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/b.txt', + mode: 420, + uid: uid, + gid: gid, + size: 512, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/c.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/cc.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/sub/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/foo.js', + mode: 420, + uid: uid, + gid: gid, + size: 4, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-1', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-2', + mode: 420, + uid: uid, + gid: gid, + size: 0, + type: '1', + linkpath: 'fixtures/hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/star.4.html', + mode: 420, + uid: uid, + gid: gid, + size: 54081, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/packtest/Ω.txt', + uid: uid, + gid: gid, + size: 2 } ] + + , [ 'entry', + { path: 'fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 100, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/symlink', + uid: uid, + gid: gid, + size: 0, + type: '2', + linkpath: 'hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: "fixtures/Ω.txt" + , uid: uid + , gid: gid + , size: 2 } ] + + , [ 'entry', + { path: 'fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + ] + + +// first, make sure that the hardlinks are actually hardlinks, or this +// won't work. Git has a way of replacing them with a copy. +var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") + , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") + , fs = require("fs") + +try { fs.unlinkSync(hard2) } catch (e) {} +fs.linkSync(hard1, hard2) + +tap.test("with global header", { timeout: 10000 }, function (t) { + runTest(t, true) +}) + +tap.test("without global header", { timeout: 10000 }, function (t) { + runTest(t, false) +}) + +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} + + +function runTest (t, doGH) { + var reader = Reader({ path: input + , filter: function () { + return !this.path.match(/\.(tar|hex)$/) + } + , sort: alphasort + }) + + var props = doGH ? pkg : {} + props.noProprietary = true + var pack = Pack(props) + var writer = Writer(target) + + // global header should be skipped regardless, since it has no content. + var entry = 0 + + t.ok(reader, "reader ok") + t.ok(pack, "pack ok") + t.ok(writer, "writer ok") + + pack.pipe(writer) + + var parse = tar.Parse() + t.ok(parse, "parser should be ok") + + pack.on("data", function (c) { + // console.error("PACK DATA") + if (c.length !== 512) { + // this one is too noisy, only assert if it'll be relevant + t.equal(c.length, 512, "parser should emit data in 512byte blocks") + } + parse.write(c) + }) + + pack.on("end", function () { + // console.error("PACK END") + t.pass("parser ends") + parse.end() + }) + + pack.on("error", function (er) { + t.fail("pack error", er) + }) + + parse.on("error", function (er) { + t.fail("parse error", er) + }) + + writer.on("error", function (er) { + t.fail("writer error", er) + }) + + reader.on("error", function (er) { + t.fail("reader error", er) + }) + + parse.on("*", function (ev, e) { + var wanted = entries[entry++] + if (!wanted) { + t.fail("unexpected event: "+ev) + return + } + t.equal(ev, wanted[0], "event type should be "+wanted[0]) + + if (ev !== wanted[0] || e.path !== wanted[1].path) { + console.error("wanted", wanted) + console.error([ev, e.props]) + e.on("end", function () { + console.error(e.fields) + throw "break" + }) + } + + t.has(e.props, wanted[1], "properties "+wanted[1].path) + if (wanted[2]) { + e.on("end", function () { + if (!e.fields) { + t.ok(e.fields, "should get fields") + } else { + t.has(e.fields, wanted[2], "should get expected fields") + } + }) + } + }) + + reader.pipe(pack) + + writer.on("close", function () { + t.equal(entry, entries.length, "should get all expected entries") + t.pass("it finished") + t.end() + }) + +} diff --git a/node_modules/fsevents/node_modules/tar/test/pack.js b/node_modules/fsevents/node_modules/tar/test/pack.js new file mode 100644 index 0000000..0f16c07 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/pack.js @@ -0,0 +1,952 @@ + +// the symlink file is excluded from git, because it makes +// windows freak the hell out. +var fs = require("fs") + , path = require("path") + , symlink = path.resolve(__dirname, "fixtures/symlink") +try { fs.unlinkSync(symlink) } catch (e) {} +fs.symlinkSync("./hardlink-1", symlink) +process.on("exit", function () { + fs.unlinkSync(symlink) +}) + + +var tap = require("tap") + , tar = require("../tar.js") + , pkg = require("../package.json") + , Pack = tar.Pack + , fstream = require("fstream") + , Reader = fstream.Reader + , Writer = fstream.Writer + , input = path.resolve(__dirname, "fixtures/") + , target = path.resolve(__dirname, "tmp/pack.tar") + , uid = process.getuid ? process.getuid() : 0 + , gid = process.getgid ? process.getgid() : 0 + + , entries = + + // the global header and root fixtures/ dir are going to get + // a different date each time, so omit that bit. + // Also, dev/ino values differ across machines, so that's not + // included. + [ [ 'globalExtendedHeader', + { path: 'PaxHeader/', + mode: 438, + uid: 0, + gid: 0, + type: 'g', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { "NODETAR.author": pkg.author, + "NODETAR.name": pkg.name, + "NODETAR.description": pkg.description, + "NODETAR.version": pkg.version, + "NODETAR.repository.type": pkg.repository.type, + "NODETAR.repository.url": pkg.repository.url, + "NODETAR.main": pkg.main, + "NODETAR.scripts.test": pkg.scripts.test } ] + + , [ 'entry', + { path: 'fixtures/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'NODETAR.depth': '1', + 'NODETAR.type': 'File', + nlink: 1, + uid: uid, + gid: gid, + size: 200, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '', + 'NODETAR.depth': '1', + 'NODETAR.type': 'File', + nlink: 1, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/a.txt', + mode: 420, + uid: uid, + gid: gid, + size: 257, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/b.txt', + mode: 420, + uid: uid, + gid: gid, + size: 512, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/c.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/cc.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/sub/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + + , [ 'entry', + { path: 'fixtures/foo.js', + mode: 420, + uid: uid, + gid: gid, + size: 4, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-1', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-2', + mode: 420, + uid: uid, + gid: gid, + size: 0, + type: '1', + linkpath: 'fixtures/hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/star.4.html', + mode: 420, + uid: uid, + gid: gid, + size: 54081, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/packtest/Ω.txt', + 'NODETAR.depth': '2', + 'NODETAR.type': 'File', + nlink: 1, + uid: uid, + gid: gid, + size: 2, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '', + 'NODETAR.depth': '2', + 'NODETAR.type': 'File', + nlink: 1, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 100, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/symlink', + uid: uid, + gid: gid, + size: 0, + type: '2', + linkpath: 'hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: "fixtures/Ω.txt" + , "NODETAR.depth": "1" + , "NODETAR.type": "File" + , nlink: 1 + , uid: uid + , gid: gid + , size: 2 + , "NODETAR.blksize": "4096" + , "NODETAR.blocks": "8" } ] + + , [ 'entry', + { path: 'fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '', + 'NODETAR.depth': '1', + 'NODETAR.type': 'File', + nlink: 1, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + ] + + +// first, make sure that the hardlinks are actually hardlinks, or this +// won't work. Git has a way of replacing them with a copy. +var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") + , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") + , fs = require("fs") + +try { fs.unlinkSync(hard2) } catch (e) {} +fs.linkSync(hard1, hard2) + +tap.test("with global header", { timeout: 10000 }, function (t) { + runTest(t, true) +}) + +tap.test("without global header", { timeout: 10000 }, function (t) { + runTest(t, false) +}) + +tap.test("with from base", { timeout: 10000 }, function (t) { + runTest(t, true, true) +}) + +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} + + +function runTest (t, doGH, doFromBase) { + var reader = Reader({ path: input + , filter: function () { + return !this.path.match(/\.(tar|hex)$/) + } + , sort: alphasort + }) + + var props = doGH ? pkg : {} + if(doFromBase) props.fromBase = true; + + var pack = Pack(props) + var writer = Writer(target) + + // skip the global header if we're not doing that. + var entry = doGH ? 0 : 1 + + t.ok(reader, "reader ok") + t.ok(pack, "pack ok") + t.ok(writer, "writer ok") + + pack.pipe(writer) + + var parse = tar.Parse() + t.ok(parse, "parser should be ok") + + pack.on("data", function (c) { + // console.error("PACK DATA") + if (c.length !== 512) { + // this one is too noisy, only assert if it'll be relevant + t.equal(c.length, 512, "parser should emit data in 512byte blocks") + } + parse.write(c) + }) + + pack.on("end", function () { + // console.error("PACK END") + t.pass("parser ends") + parse.end() + }) + + pack.on("error", function (er) { + t.fail("pack error", er) + }) + + parse.on("error", function (er) { + t.fail("parse error", er) + }) + + writer.on("error", function (er) { + t.fail("writer error", er) + }) + + reader.on("error", function (er) { + t.fail("reader error", er) + }) + + parse.on("*", function (ev, e) { + var wanted = entries[entry++] + if (!wanted) { + t.fail("unexpected event: "+ev) + return + } + t.equal(ev, wanted[0], "event type should be "+wanted[0]) + + if(doFromBase) { + if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100) + wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc' + + if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/') + if(wanted[1].path == '') wanted[1].path = '/' + if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '') + + wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '') + } + + if (ev !== wanted[0] || e.path !== wanted[1].path) { + console.error("wanted", wanted) + console.error([ev, e.props]) + e.on("end", function () { + console.error(e.fields) + throw "break" + }) + } + + + t.has(e.props, wanted[1], "properties "+wanted[1].path) + if (wanted[2]) { + e.on("end", function () { + if (!e.fields) { + t.ok(e.fields, "should get fields") + } else { + t.has(e.fields, wanted[2], "should get expected fields") + } + }) + } + }) + + reader.pipe(pack) + + writer.on("close", function () { + t.equal(entry, entries.length, "should get all expected entries") + t.pass("it finished") + t.end() + }) + +} diff --git a/node_modules/fsevents/node_modules/tar/test/parse-discard.js b/node_modules/fsevents/node_modules/tar/test/parse-discard.js new file mode 100644 index 0000000..da01a65 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/parse-discard.js @@ -0,0 +1,29 @@ +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/c.tar") + +tap.test("parser test", function (t) { + var parser = tar.Parse() + var total = 0 + var dataTotal = 0 + + parser.on("end", function () { + + t.equals(total-513,dataTotal,'should have discarded only c.txt') + + t.end() + }) + + fs.createReadStream(file) + .pipe(parser) + .on('entry',function(entry){ + if(entry.path === 'c.txt') entry.abort() + + total += entry.size; + entry.on('data',function(data){ + dataTotal += data.length + }) + }) +}) diff --git a/node_modules/fsevents/node_modules/tar/test/parse.js b/node_modules/fsevents/node_modules/tar/test/parse.js new file mode 100644 index 0000000..f765a50 --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/parse.js @@ -0,0 +1,359 @@ +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/c.tar") + , index = 0 + + , expect = +[ [ 'entry', + { path: 'c.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 513, + mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'), + cksum: 5422, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'entry', + { path: 'cc.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 513, + mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'), + cksum: 5525, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'entry', + { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 100, + mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'), + cksum: 18124, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'entry', + { path: 'Ω.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 2, + mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + cksum: 5695, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/Ω.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 120, + mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + cksum: 6702, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'Ω.txt', + ctime: 1319737909, + atime: 1319739061, + dev: 234881026, + ino: 51693379, + nlink: 1 } ], + [ 'entry', + { path: 'Ω.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 2, + mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + cksum: 5695, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'), + dev: 234881026, + ino: 51693379, + nlink: 1 }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 353, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 14488, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ctime: 1319686868, + atime: 1319741254, + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 1 } ], + [ 'entry', + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 200, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 14570, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'), + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 1 }, + undefined ], + [ 'longPath', + { path: '././@LongLink', + mode: 0, + uid: 0, + gid: 0, + size: 201, + mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), + cksum: 4976, + type: 'L', + linkpath: '', + ustar: false }, + '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], + [ 'entry', + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 1000, + gid: 1000, + size: 201, + mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'), + cksum: 14086, + type: '0', + linkpath: '', + ustar: false }, + undefined ], + [ 'longLinkpath', + { path: '././@LongLink', + mode: 0, + uid: 0, + gid: 0, + size: 201, + mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), + cksum: 4975, + type: 'K', + linkpath: '', + ustar: false }, + '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], + [ 'longPath', + { path: '././@LongLink', + mode: 0, + uid: 0, + gid: 0, + size: 201, + mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), + cksum: 4976, + type: 'L', + linkpath: '', + ustar: false }, + '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ], + [ 'entry', + { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', + mode: 511, + uid: 1000, + gid: 1000, + size: 0, + mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'), + cksum: 21603, + type: '2', + linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ustar: false }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/200-hard', + mode: 420, + uid: 24561, + gid: 20, + size: 143, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 6533, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { ctime: 1320617144, + atime: 1320617232, + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 } ], + [ 'entry', + { path: '200-hard', + mode: 420, + uid: 24561, + gid: 20, + size: 200, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 5526, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), + atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'), + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 353, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 14488, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ctime: 1320617144, + atime: 1320617406, + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 } ], + [ 'entry', + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 0, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 15173, + type: '1', + linkpath: '200-hard', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), + atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'), + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 }, + undefined ] ] + + +tap.test("parser test", function (t) { + var parser = tar.Parse() + + parser.on("end", function () { + t.equal(index, expect.length, "saw all expected events") + t.end() + }) + + fs.createReadStream(file) + .pipe(parser) + .on("*", function (ev, entry) { + var wanted = expect[index] + if (!wanted) { + return t.fail("Unexpected event: " + ev) + } + var result = [ev, entry.props] + entry.on("end", function () { + result.push(entry.fields || entry.body) + + t.equal(ev, wanted[0], index + " event type") + t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties") + if (wanted[2]) { + t.equivalent(result[2], wanted[2], "metadata values") + } + index ++ + }) + }) +}) diff --git a/node_modules/fsevents/node_modules/tar/test/zz-cleanup.js b/node_modules/fsevents/node_modules/tar/test/zz-cleanup.js new file mode 100644 index 0000000..a00ff7f --- /dev/null +++ b/node_modules/fsevents/node_modules/tar/test/zz-cleanup.js @@ -0,0 +1,20 @@ +// clean up the fixtures + +var tap = require("tap") +, rimraf = require("rimraf") +, test = tap.test +, path = require("path") + +test("clean fixtures", function (t) { + rimraf(path.resolve(__dirname, "fixtures"), function (er) { + t.ifError(er, "rimraf ./fixtures/") + t.end() + }) +}) + +test("clean tmp", function (t) { + rimraf(path.resolve(__dirname, "tmp"), function (er) { + t.ifError(er, "rimraf ./tmp/") + t.end() + }) +}) diff --git a/node_modules/fsevents/node_modules/tough-cookie/LICENSE b/node_modules/fsevents/node_modules/tough-cookie/LICENSE new file mode 100644 index 0000000..1bc286f --- /dev/null +++ b/node_modules/fsevents/node_modules/tough-cookie/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015, Salesforce.com, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +=== + +The following exceptions apply: + +=== + +`public_suffix_list.dat` was obtained from + via +. The license for this file is MPL/2.0. The header of +that file reads as follows: + + // This Source Code Form is subject to the terms of the Mozilla Public + // License, v. 2.0. If a copy of the MPL was not distributed with this + // file, You can obtain one at http://mozilla.org/MPL/2.0/. diff --git a/node_modules/fsevents/node_modules/tough-cookie/README.md b/node_modules/fsevents/node_modules/tough-cookie/README.md new file mode 100644 index 0000000..9899dbf --- /dev/null +++ b/node_modules/fsevents/node_modules/tough-cookie/README.md @@ -0,0 +1,492 @@ +[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js + +[![Build Status](https://travis-ci.org/SalesforceEng/tough-cookie.png?branch=master)](https://travis-ci.org/SalesforceEng/tough-cookie) + +[![NPM Stats](https://nodei.co/npm/tough-cookie.png?downloads=true&stars=true)](https://npmjs.org/package/tough-cookie) +![NPM Downloads](https://nodei.co/npm-dl/tough-cookie.png?months=9) + +# Synopsis + +``` javascript +var tough = require('tough-cookie'); +var Cookie = tough.Cookie; +var cookie = Cookie.parse(header); +cookie.value = 'somethingdifferent'; +header = cookie.toString(); + +var cookiejar = new tough.CookieJar(); +cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb); +// ... +cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) { + res.headers['cookie'] = cookies.join('; '); +}); +``` + +# Installation + +It's _so_ easy! + +`npm install tough-cookie` + +Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken. + +# API + +## tough + +Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound". + +**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary. + +### `parseDate(string)` + +Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`. + +### `formatDate(date)` + +Format a Date into a RFC1123 string (the RFC6265-recommended format). + +### `canonicalDomain(str)` + +Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects). + +### `domainMatch(str,domStr[,canonicalize=true])` + +Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match". + +The `canonicalize` parameter will run the other two paramters through `canonicalDomain` or not. + +### `defaultPath(path)` + +Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC. + +The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output. + +### `pathMatch(reqPath,cookiePath)` + +Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean. + +This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`. + +### `parse(cookieString[, options])` + +alias for `Cookie.parse(cookieString[, options])` + +### `fromJSON(string)` + +alias for `Cookie.fromJSON(string)` + +### `getPublicSuffix(hostname)` + +Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it. + +For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`. + +For further information, see http://publicsuffix.org/. This module derives its list from that site. + +### `cookieCompare(a,b)` + +For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence: + +* Longest `.path` +* oldest `.creation` (which has a 1ms precision, same as `Date`) +* lowest `.creationIndex` (to get beyond the 1ms precision) + +``` javascript +var cookies = [ /* unsorted array of Cookie objects */ ]; +cookies = cookies.sort(cookieCompare); +``` + +**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`. + +### `permuteDomain(domain)` + +Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores. + +### `permutePath(path)` + +Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores. + + +## Cookie + +Exported via `tough.Cookie`. + +### `Cookie.parse(cookieString[, options])` + +Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed. + +The options parameter is not required and currently has only one property: + + * _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant. + +If options is not an object, it is ignored, which means you can use `Array#map` with it. + +Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response: + +``` javascript +if (res.headers['set-cookie'] instanceof Array) + cookies = res.headers['set-cookie'].map(Cookie.parse); +else + cookies = [Cookie.parse(res.headers['set-cookie'])]; +``` + +### Properties + +Cookie object properties: + + * _key_ - string - the name or key of the cookie (default "") + * _value_ - string - the value of the cookie (default "") + * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()` + * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()` + * _domain_ - string - the `Domain=` attribute of the cookie + * _path_ - string - the `Path=` of the cookie + * _secure_ - boolean - the `Secure` cookie flag + * _httpOnly_ - boolean - the `HttpOnly` cookie flag + * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside) + * _creation_ - `Date` - when this cookie was constructed + * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation) + +After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes: + + * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied) + * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one. + * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar + * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute. + +### `Cookie([{properties}])` + +Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties. + +### `.toString()` + +encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`. + +### `.cookieString()` + +encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '='). + +### `.setExpires(String)` + +sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set. + +### `.setMaxAge(number)` + +sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly. + +### `.expiryTime([now=Date.now()])` + +### `.expiryDate([now=Date.now()])` + +expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds. + +Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` paramter -- is used to offset the `.maxAge` attribute. + +If Expires (`.expires`) is set, that's returned. + +Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents). + +### `.TTL([now=Date.now()])` + +compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply. + +The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned. + +### `.canonicalizedDoman()` + +### `.cdomain()` + +return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters. + +### `.toJSON()` + +For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`). + +**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +### `Cookie.fromJSON(strOrObj)` + +Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer. + +Returns `null` upon JSON parsing error. + +### `.clone()` + +Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`. + +### `.validate()` + +Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive. + +validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct: + +``` javascript +if (cookie.validate() === true) { + // it's tasty +} else { + // yuck! +} +``` + + +## CookieJar + +Exported via `tough.CookieJar`. + +### `CookieJar([store],[options])` + +Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used. + +The `options` object can be omitted and can have the following properties: + + * _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk" + * _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name. + This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers. + +Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods. + +### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))` + +Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option. + +As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual). + +### `.setCookieSync(cookieOrString, currentUrl, [{options}])` + +Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookies(currentUrl, [{options},] cb(err,cookies))` + +Retrieve the list of cookies that can be sent in a Cookie header for the current url. + +If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially). + * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it). + +The `.lastAccessed` property of the returned cookies will have been updated. + +### `.getCookiesSync(currentUrl, [{options}])` + +Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookieString(...)` + +Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`. + +### `.getCookieStringSync(...)` + +Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getSetCookieStrings(...)` + +Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`. + +### `.getSetCookieStringsSync(...)` + +Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.serialize(cb(err,serializedObject))` + +Serialize the Jar if the underlying store supports `.getAllCookies`. + +**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +See [Serialization Format]. + +### `.serializeSync()` + +Sync version of .serialize + +### `.toJSON()` + +Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`. + +### `CookieJar.deserialize(serialized, [store], cb(err,object))` + +A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. + +As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback. + +### `CookieJar.deserializeSync(serialized, [store])` + +Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work. + +### `CookieJar.fromJSON(string)` + +Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`. + +### `.clone([store,]cb(err,newJar))` + +Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`. + +### `.cloneSync([store])` + +Synchronous version of `.clone`, returning a new `CookieJar` instance. + +The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used. + +The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls. + +## Store + +Base class for CookieJar stores. Available as `tough.Store`. + +## Store API + +The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores. + +Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. + +Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style + +All `domain` parameters will have been normalized before calling. + +The Cookie store must have all of the following methods. + +### `store.findCookie(domain, path, key, cb(err,cookie))` + +Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned. + +Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error). + +### `store.findCookies(domain, path, cb(err,cookies))` + +Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above. + +If no cookies are found, the callback MUST be passed an empty array. + +The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done. + +As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only). + +### `store.putCookie(cookie, cb(err))` + +Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur. + +The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties. + +Pass an error if the cookie cannot be stored. + +### `store.updateCookie(oldCookie, newCookie, cb(err))` + +Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store. + +The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement). + +Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object. + +The `newCookie` and `oldCookie` objects MUST NOT be modified. + +Pass an error if the newCookie cannot be stored. + +### `store.removeCookie(domain, path, key, cb(err))` + +Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint). + +The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie. + +### `store.removeCookies(domain, path, cb(err))` + +Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed. + +Pass an error ONLY if removing any existing cookies failed. + +### `store.getAllCookies(cb(err, cookies))` + +Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure. + +Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail. + +Pass an error if retrieval fails. + +## MemoryCookieStore + +Inherits from `Store`. + +A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. + +# Serialization Format + +**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`. + +```js + { + // The version of tough-cookie that serialized this jar. + version: 'tough-cookie@1.x.y', + + // add the store type, to make humans happy: + storeType: 'MemoryCookieStore', + + // CookieJar configuration: + rejectPublicSuffixes: true, + // ... future items go here + + // Gets filled from jar.store.getAllCookies(): + cookies: [ + { + key: 'string', + value: 'string', + // ... + /* other Cookie.serializableProperties go here */ + } + ] + } +``` + +# Copyright and License + +(tl;dr: BSD-3-Clause with some MPL/2.0) + +```text + Copyright (c) 2015, Salesforce.com, Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of Salesforce.com nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +``` + +Portions may be licensed under different licenses (in particular `public_suffix_list.dat` is MPL/2.0); please read that file and the LICENSE file for full details. diff --git a/node_modules/fsevents/node_modules/tough-cookie/lib/cookie.js b/node_modules/fsevents/node_modules/tough-cookie/lib/cookie.js new file mode 100644 index 0000000..12da297 --- /dev/null +++ b/node_modules/fsevents/node_modules/tough-cookie/lib/cookie.js @@ -0,0 +1,1342 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +'use strict'; +var net = require('net'); +var urlParse = require('url').parse; +var pubsuffix = require('./pubsuffix'); +var Store = require('./store').Store; +var MemoryCookieStore = require('./memstore').MemoryCookieStore; +var pathMatch = require('./pathMatch').pathMatch; +var VERSION = require('../package.json').version; + +var punycode; +try { + punycode = require('punycode'); +} catch(e) { + console.warn("cookie: can't load punycode; won't use punycode for domain normalization"); +} + +var DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; + +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +var COOKIE_OCTET = /[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]/; +var COOKIE_OCTETS = new RegExp('^'+COOKIE_OCTET.source+'+$'); + +var CONTROL_CHARS = /[\x00-\x1F]/; + +// Double quotes are part of the value (see: S4.1.1). +// '\r', '\n' and '\0' should be treated as a terminator in the "relaxed" mode +// (see: https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60) +// '=' and ';' are attribute/values separators +// (see: https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L64) +var COOKIE_PAIR = /^(([^=;]+))\s*=\s*([^\n\r\0]*)/; + +// Used to parse non-RFC-compliant cookies like '=abc' when given the `loose` +// option in Cookie.parse: +var LOOSE_COOKIE_PAIR = /^((?:=)?([^=;]*)\s*=\s*)?([^\n\r\0]*)/; + +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +var PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + +// Used for checking whether or not there is a trailing semi-colon +var TRAILING_SEMICOLON = /;+$/; + +var DAY_OF_MONTH = /^(\d{1,2})[^\d]*$/; +var TIME = /^(\d{1,2})[^\d]*:(\d{1,2})[^\d]*:(\d{1,2})[^\d]*$/; +var MONTH = /^(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)/i; + +var MONTH_TO_NUM = { + jan:0, feb:1, mar:2, apr:3, may:4, jun:5, + jul:6, aug:7, sep:8, oct:9, nov:10, dec:11 +}; +var NUM_TO_MONTH = [ + 'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec' +]; +var NUM_TO_DAY = [ + 'Sun','Mon','Tue','Wed','Thu','Fri','Sat' +]; + +var YEAR = /^(\d{2}|\d{4})$/; // 2 to 4 digits + +var MAX_TIME = 2147483647000; // 31-bit max +var MIN_TIME = 0; // 31-bit min + + +// RFC6265 S5.1.1 date parser: +function parseDate(str) { + if (!str) { + return; + } + + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + var tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } + + var hour = null; + var minutes = null; + var seconds = null; + var day = null; + var month = null; + var year = null; + + for (var i=0; i 23 || minutes > 59 || seconds > 59) { + return; + } + + continue; + } + } + + /* 2.2. If the found-day-of-month flag is not set and the date-token matches + * the day-of-month production, set the found-day-of- month flag and set + * the day-of-month-value to the number denoted by the date-token. Skip + * the remaining sub-steps and continue to the next date-token. + */ + if (day === null) { + result = DAY_OF_MONTH.exec(token); + if (result) { + day = parseInt(result, 10); + /* RFC6265 S5.1.1.5: + * [fail if] the day-of-month-value is less than 1 or greater than 31 + */ + if(day < 1 || day > 31) { + return; + } + continue; + } + } + + /* 2.3. If the found-month flag is not set and the date-token matches the + * month production, set the found-month flag and set the month-value to + * the month denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (month === null) { + result = MONTH.exec(token); + if (result) { + month = MONTH_TO_NUM[result[1].toLowerCase()]; + continue; + } + } + + /* 2.4. If the found-year flag is not set and the date-token matches the year + * production, set the found-year flag and set the year-value to the number + * denoted by the date-token. Skip the remaining sub-steps and continue to + * the next date-token. + */ + if (year === null) { + result = YEAR.exec(token); + if (result) { + year = parseInt(result[0], 10); + /* From S5.1.1: + * 3. If the year-value is greater than or equal to 70 and less + * than or equal to 99, increment the year-value by 1900. + * 4. If the year-value is greater than or equal to 0 and less + * than or equal to 69, increment the year-value by 2000. + */ + if (70 <= year && year <= 99) { + year += 1900; + } else if (0 <= year && year <= 69) { + year += 2000; + } + + if (year < 1601) { + return; // 5. ... the year-value is less than 1601 + } + } + } + } + + if (seconds === null || day === null || month === null || year === null) { + return; // 5. ... at least one of the found-day-of-month, found-month, found- + // year, or found-time flags is not set, + } + + return new Date(Date.UTC(year, month, day, hour, minutes, seconds)); +} + +function formatDate(date) { + var d = date.getUTCDate(); d = d >= 10 ? d : '0'+d; + var h = date.getUTCHours(); h = h >= 10 ? h : '0'+h; + var m = date.getUTCMinutes(); m = m >= 10 ? m : '0'+m; + var s = date.getUTCSeconds(); s = s >= 10 ? s : '0'+s; + return NUM_TO_DAY[date.getUTCDay()] + ', ' + + d+' '+ NUM_TO_MONTH[date.getUTCMonth()] +' '+ date.getUTCFullYear() +' '+ + h+':'+m+':'+s+' GMT'; +} + +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./,''); // S4.1.2.3 & S5.2.3: ignore leading . + + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } + + return str.toLowerCase(); +} + +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } + + /* + * "The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } + + /* "All of the following [three] conditions hold:" (order adjusted from the RFC) */ + + /* "* The string is a host name (i.e., not an IP address)." */ + if (net.isIP(str)) { + return false; + } + + /* "* The domain string is a suffix of the string" */ + var idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } + + // e.g "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { // it's not a suffix + return false; + } + + /* "* The last character of the string that is not included in the domain + * string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; + } + + return true; +} + + +// RFC6265 S5.1.4 Paths and Path-Match + +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0,1) !== "/") { + return "/"; + } + + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } + + var rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } + + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} + + +function parse(str, options) { + if (!options || typeof options !== 'object') { + options = {}; + } + str = str.trim(); + + // S4.1.1 Trailing semi-colons are not part of the specification. + var semiColonCheck = TRAILING_SEMICOLON.exec(str); + if (semiColonCheck) { + str = str.slice(0, semiColonCheck.index); + } + + // We use a regex to parse the "name-value-pair" part of S5.2 + var firstSemi = str.indexOf(';'); // S5.2 step 1 + var pairRe = options.loose ? LOOSE_COOKIE_PAIR : COOKIE_PAIR; + var result = pairRe.exec(firstSemi === -1 ? str : str.substr(0,firstSemi)); + + // Rx satisfies the "the name string is empty" and "lacks a %x3D ("=")" + // constraints as well as trimming any whitespace. + if (!result) { + return; + } + + var c = new Cookie(); + if (result[1]) { + c.key = result[2].trim(); + } else { + c.key = ''; + } + c.value = result[3].trim(); + if (CONTROL_CHARS.test(c.key) || CONTROL_CHARS.test(c.value)) { + return; + } + + if (firstSemi === -1) { + return c; + } + + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + var unparsed = str.slice(firstSemi).replace(/^\s*;\s*/,'').trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; + } + + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + var cookie_avs = unparsed.split(/\s*;\s*/); + while (cookie_avs.length) { + var av = cookie_avs.shift(); + var av_sep = av.indexOf('='); + var av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0,av_sep); + av_value = av.substr(av_sep+1); + } + + av_key = av_key.trim().toLowerCase(); + + if (av_value) { + av_value = av_value.trim(); + } + + switch(av_key) { + case 'expires': // S5.2.1 + if (av_value) { + var exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; + + case 'max-age': // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + var delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + + case 'domain': // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + var domain = av_value.trim().replace(/^\./, ''); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; + + case 'path': // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; + + case 'secure': // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; + + case 'httponly': // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } + } + + return c; +} + +// avoid the V8 deoptimization monster! +function jsonParse(str) { + var obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; + } + return obj; +} + +function fromJSON(str) { + if (!str) { + return null; + } + + var obj; + if (typeof str === 'string') { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; + } + + var c = new Cookie(); + for (var i=0; i 1) { + var lindex = path.lastIndexOf('/'); + if (lindex === 0) { + break; + } + path = path.substr(0,lindex); + permutations.push(path); + } + permutations.push('/'); + return permutations; +} + +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } + catch(err) { + // Silently swallow error + } + + return urlParse(url); +} + +function Cookie(options) { + options = options || {}; + + Object.keys(options).forEach(function(prop) { + if (Cookie.prototype.hasOwnProperty(prop) && + Cookie.prototype[prop] !== options[prop] && + prop.substr(0,1) !== '_') + { + this[prop] = options[prop]; + } + }, this); + + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, 'creationIndex', { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); +} + +Cookie.cookiesCreated = 0; // incremented each time a cookie is created + +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; + +Cookie.prototype.key = ""; +Cookie.prototype.value = ""; + +// the order in which the RFC has them: +Cookie.prototype.expires = "Infinity"; // coerces to literal Infinity +Cookie.prototype.maxAge = null; // takes precedence over expires for TTL +Cookie.prototype.domain = null; +Cookie.prototype.path = null; +Cookie.prototype.secure = false; +Cookie.prototype.httpOnly = false; +Cookie.prototype.extensions = null; + +// set by the CookieJar: +Cookie.prototype.hostOnly = null; // boolean when set +Cookie.prototype.pathIsDefault = null; // boolean when set +Cookie.prototype.creation = null; // Date when set; defaulted by Cookie.parse +Cookie.prototype.lastAccessed = null; // Date when set +Object.defineProperty(Cookie.prototype, 'creationIndex', { + configurable: true, + enumerable: false, + writable: true, + value: 0 +}); + +Cookie.serializableProperties = Object.keys(Cookie.prototype) + .filter(function(prop) { + return !( + Cookie.prototype[prop] instanceof Function || + prop === 'creationIndex' || + prop.substr(0,1) === '_' + ); + }); + +Cookie.prototype.inspect = function inspect() { + var now = Date.now(); + return 'Cookie="'+this.toString() + + '; hostOnly='+(this.hostOnly != null ? this.hostOnly : '?') + + '; aAge='+(this.lastAccessed ? (now-this.lastAccessed.getTime())+'ms' : '?') + + '; cAge='+(this.creation ? (now-this.creation.getTime())+'ms' : '?') + + '"'; +}; + +Cookie.prototype.toJSON = function() { + var obj = {}; + + var props = Cookie.serializableProperties; + for (var i=0; i suffixLen) { + var publicSuffix = parts.slice(0,suffixLen+1).reverse().join('.'); + return converted ? punycode.toUnicode(publicSuffix) : publicSuffix; + } + + return null; +}; + +// The following generated structure is used under the MPL version 2.0 +// See public-suffix.txt for more information + +var index = module.exports.index = Object.freeze( +{"ac":true,"com.ac":true,"edu.ac":true,"gov.ac":true,"net.ac":true,"mil.ac":true,"org.ac":true,"ad":true,"nom.ad":true,"ae":true,"co.ae":true,"net.ae":true,"org.ae":true,"sch.ae":true,"ac.ae":true,"gov.ae":true,"mil.ae":true,"aero":true,"accident-investigation.aero":true,"accident-prevention.aero":true,"aerobatic.aero":true,"aeroclub.aero":true,"aerodrome.aero":true,"agents.aero":true,"aircraft.aero":true,"airline.aero":true,"airport.aero":true,"air-surveillance.aero":true,"airtraffic.aero":true,"air-traffic-control.aero":true,"ambulance.aero":true,"amusement.aero":true,"association.aero":true,"author.aero":true,"ballooning.aero":true,"broker.aero":true,"caa.aero":true,"cargo.aero":true,"catering.aero":true,"certification.aero":true,"championship.aero":true,"charter.aero":true,"civilaviation.aero":true,"club.aero":true,"conference.aero":true,"consultant.aero":true,"consulting.aero":true,"control.aero":true,"council.aero":true,"crew.aero":true,"design.aero":true,"dgca.aero":true,"educator.aero":true,"emergency.aero":true,"engine.aero":true,"engineer.aero":true,"entertainment.aero":true,"equipment.aero":true,"exchange.aero":true,"express.aero":true,"federation.aero":true,"flight.aero":true,"freight.aero":true,"fuel.aero":true,"gliding.aero":true,"government.aero":true,"groundhandling.aero":true,"group.aero":true,"hanggliding.aero":true,"homebuilt.aero":true,"insurance.aero":true,"journal.aero":true,"journalist.aero":true,"leasing.aero":true,"logistics.aero":true,"magazine.aero":true,"maintenance.aero":true,"marketplace.aero":true,"media.aero":true,"microlight.aero":true,"modelling.aero":true,"navigation.aero":true,"parachuting.aero":true,"paragliding.aero":true,"passenger-association.aero":true,"pilot.aero":true,"press.aero":true,"production.aero":true,"recreation.aero":true,"repbody.aero":true,"res.aero":true,"research.aero":true,"rotorcraft.aero":true,"safety.aero":true,"scientist.aero":true,"services.aero":true,"show.aero":true,"skydiving.aero":true,"software.aero":true,"student.aero":true,"taxi.aero":true,"trader.aero":true,"trading.aero":true,"trainer.aero":true,"union.aero":true,"workinggroup.aero":true,"works.aero":true,"af":true,"gov.af":true,"com.af":true,"org.af":true,"net.af":true,"edu.af":true,"ag":true,"com.ag":true,"org.ag":true,"net.ag":true,"co.ag":true,"nom.ag":true,"ai":true,"off.ai":true,"com.ai":true,"net.ai":true,"org.ai":true,"al":true,"com.al":true,"edu.al":true,"gov.al":true,"mil.al":true,"net.al":true,"org.al":true,"am":true,"an":true,"com.an":true,"net.an":true,"org.an":true,"edu.an":true,"ao":true,"ed.ao":true,"gv.ao":true,"og.ao":true,"co.ao":true,"pb.ao":true,"it.ao":true,"aq":true,"ar":true,"com.ar":true,"edu.ar":true,"gob.ar":true,"gov.ar":true,"int.ar":true,"mil.ar":true,"net.ar":true,"org.ar":true,"tur.ar":true,"arpa":true,"e164.arpa":true,"in-addr.arpa":true,"ip6.arpa":true,"iris.arpa":true,"uri.arpa":true,"urn.arpa":true,"as":true,"gov.as":true,"asia":true,"at":true,"ac.at":true,"co.at":true,"gv.at":true,"or.at":true,"au":true,"com.au":true,"net.au":true,"org.au":true,"edu.au":true,"gov.au":true,"asn.au":true,"id.au":true,"info.au":true,"conf.au":true,"oz.au":true,"act.au":true,"nsw.au":true,"nt.au":true,"qld.au":true,"sa.au":true,"tas.au":true,"vic.au":true,"wa.au":true,"act.edu.au":true,"nsw.edu.au":true,"nt.edu.au":true,"qld.edu.au":true,"sa.edu.au":true,"tas.edu.au":true,"vic.edu.au":true,"wa.edu.au":true,"qld.gov.au":true,"sa.gov.au":true,"tas.gov.au":true,"vic.gov.au":true,"wa.gov.au":true,"aw":true,"com.aw":true,"ax":true,"az":true,"com.az":true,"net.az":true,"int.az":true,"gov.az":true,"org.az":true,"edu.az":true,"info.az":true,"pp.az":true,"mil.az":true,"name.az":true,"pro.az":true,"biz.az":true,"ba":true,"org.ba":true,"net.ba":true,"edu.ba":true,"gov.ba":true,"mil.ba":true,"unsa.ba":true,"unbi.ba":true,"co.ba":true,"com.ba":true,"rs.ba":true,"bb":true,"biz.bb":true,"co.bb":true,"com.bb":true,"edu.bb":true,"gov.bb":true,"info.bb":true,"net.bb":true,"org.bb":true,"store.bb":true,"tv.bb":true,"*.bd":true,"be":true,"ac.be":true,"bf":true,"gov.bf":true,"bg":true,"a.bg":true,"b.bg":true,"c.bg":true,"d.bg":true,"e.bg":true,"f.bg":true,"g.bg":true,"h.bg":true,"i.bg":true,"j.bg":true,"k.bg":true,"l.bg":true,"m.bg":true,"n.bg":true,"o.bg":true,"p.bg":true,"q.bg":true,"r.bg":true,"s.bg":true,"t.bg":true,"u.bg":true,"v.bg":true,"w.bg":true,"x.bg":true,"y.bg":true,"z.bg":true,"0.bg":true,"1.bg":true,"2.bg":true,"3.bg":true,"4.bg":true,"5.bg":true,"6.bg":true,"7.bg":true,"8.bg":true,"9.bg":true,"bh":true,"com.bh":true,"edu.bh":true,"net.bh":true,"org.bh":true,"gov.bh":true,"bi":true,"co.bi":true,"com.bi":true,"edu.bi":true,"or.bi":true,"org.bi":true,"biz":true,"bj":true,"asso.bj":true,"barreau.bj":true,"gouv.bj":true,"bm":true,"com.bm":true,"edu.bm":true,"gov.bm":true,"net.bm":true,"org.bm":true,"*.bn":true,"bo":true,"com.bo":true,"edu.bo":true,"gov.bo":true,"gob.bo":true,"int.bo":true,"org.bo":true,"net.bo":true,"mil.bo":true,"tv.bo":true,"br":true,"adm.br":true,"adv.br":true,"agr.br":true,"am.br":true,"arq.br":true,"art.br":true,"ato.br":true,"b.br":true,"bio.br":true,"blog.br":true,"bmd.br":true,"cim.br":true,"cng.br":true,"cnt.br":true,"com.br":true,"coop.br":true,"ecn.br":true,"eco.br":true,"edu.br":true,"emp.br":true,"eng.br":true,"esp.br":true,"etc.br":true,"eti.br":true,"far.br":true,"flog.br":true,"fm.br":true,"fnd.br":true,"fot.br":true,"fst.br":true,"g12.br":true,"ggf.br":true,"gov.br":true,"imb.br":true,"ind.br":true,"inf.br":true,"jor.br":true,"jus.br":true,"leg.br":true,"lel.br":true,"mat.br":true,"med.br":true,"mil.br":true,"mp.br":true,"mus.br":true,"net.br":true,"*.nom.br":true,"not.br":true,"ntr.br":true,"odo.br":true,"org.br":true,"ppg.br":true,"pro.br":true,"psc.br":true,"psi.br":true,"qsl.br":true,"radio.br":true,"rec.br":true,"slg.br":true,"srv.br":true,"taxi.br":true,"teo.br":true,"tmp.br":true,"trd.br":true,"tur.br":true,"tv.br":true,"vet.br":true,"vlog.br":true,"wiki.br":true,"zlg.br":true,"bs":true,"com.bs":true,"net.bs":true,"org.bs":true,"edu.bs":true,"gov.bs":true,"bt":true,"com.bt":true,"edu.bt":true,"gov.bt":true,"net.bt":true,"org.bt":true,"bv":true,"bw":true,"co.bw":true,"org.bw":true,"by":true,"gov.by":true,"mil.by":true,"com.by":true,"of.by":true,"bz":true,"com.bz":true,"net.bz":true,"org.bz":true,"edu.bz":true,"gov.bz":true,"ca":true,"ab.ca":true,"bc.ca":true,"mb.ca":true,"nb.ca":true,"nf.ca":true,"nl.ca":true,"ns.ca":true,"nt.ca":true,"nu.ca":true,"on.ca":true,"pe.ca":true,"qc.ca":true,"sk.ca":true,"yk.ca":true,"gc.ca":true,"cat":true,"cc":true,"cd":true,"gov.cd":true,"cf":true,"cg":true,"ch":true,"ci":true,"org.ci":true,"or.ci":true,"com.ci":true,"co.ci":true,"edu.ci":true,"ed.ci":true,"ac.ci":true,"net.ci":true,"go.ci":true,"asso.ci":true,"xn--aroport-bya.ci":true,"int.ci":true,"presse.ci":true,"md.ci":true,"gouv.ci":true,"*.ck":true,"www.ck":false,"cl":true,"gov.cl":true,"gob.cl":true,"co.cl":true,"mil.cl":true,"cm":true,"co.cm":true,"com.cm":true,"gov.cm":true,"net.cm":true,"cn":true,"ac.cn":true,"com.cn":true,"edu.cn":true,"gov.cn":true,"net.cn":true,"org.cn":true,"mil.cn":true,"xn--55qx5d.cn":true,"xn--io0a7i.cn":true,"xn--od0alg.cn":true,"ah.cn":true,"bj.cn":true,"cq.cn":true,"fj.cn":true,"gd.cn":true,"gs.cn":true,"gz.cn":true,"gx.cn":true,"ha.cn":true,"hb.cn":true,"he.cn":true,"hi.cn":true,"hl.cn":true,"hn.cn":true,"jl.cn":true,"js.cn":true,"jx.cn":true,"ln.cn":true,"nm.cn":true,"nx.cn":true,"qh.cn":true,"sc.cn":true,"sd.cn":true,"sh.cn":true,"sn.cn":true,"sx.cn":true,"tj.cn":true,"xj.cn":true,"xz.cn":true,"yn.cn":true,"zj.cn":true,"hk.cn":true,"mo.cn":true,"tw.cn":true,"co":true,"arts.co":true,"com.co":true,"edu.co":true,"firm.co":true,"gov.co":true,"info.co":true,"int.co":true,"mil.co":true,"net.co":true,"nom.co":true,"org.co":true,"rec.co":true,"web.co":true,"com":true,"coop":true,"cr":true,"ac.cr":true,"co.cr":true,"ed.cr":true,"fi.cr":true,"go.cr":true,"or.cr":true,"sa.cr":true,"cu":true,"com.cu":true,"edu.cu":true,"org.cu":true,"net.cu":true,"gov.cu":true,"inf.cu":true,"cv":true,"cw":true,"com.cw":true,"edu.cw":true,"net.cw":true,"org.cw":true,"cx":true,"gov.cx":true,"ac.cy":true,"biz.cy":true,"com.cy":true,"ekloges.cy":true,"gov.cy":true,"ltd.cy":true,"name.cy":true,"net.cy":true,"org.cy":true,"parliament.cy":true,"press.cy":true,"pro.cy":true,"tm.cy":true,"cz":true,"de":true,"dj":true,"dk":true,"dm":true,"com.dm":true,"net.dm":true,"org.dm":true,"edu.dm":true,"gov.dm":true,"do":true,"art.do":true,"com.do":true,"edu.do":true,"gob.do":true,"gov.do":true,"mil.do":true,"net.do":true,"org.do":true,"sld.do":true,"web.do":true,"dz":true,"com.dz":true,"org.dz":true,"net.dz":true,"gov.dz":true,"edu.dz":true,"asso.dz":true,"pol.dz":true,"art.dz":true,"ec":true,"com.ec":true,"info.ec":true,"net.ec":true,"fin.ec":true,"k12.ec":true,"med.ec":true,"pro.ec":true,"org.ec":true,"edu.ec":true,"gov.ec":true,"gob.ec":true,"mil.ec":true,"edu":true,"ee":true,"edu.ee":true,"gov.ee":true,"riik.ee":true,"lib.ee":true,"med.ee":true,"com.ee":true,"pri.ee":true,"aip.ee":true,"org.ee":true,"fie.ee":true,"eg":true,"com.eg":true,"edu.eg":true,"eun.eg":true,"gov.eg":true,"mil.eg":true,"name.eg":true,"net.eg":true,"org.eg":true,"sci.eg":true,"*.er":true,"es":true,"com.es":true,"nom.es":true,"org.es":true,"gob.es":true,"edu.es":true,"et":true,"com.et":true,"gov.et":true,"org.et":true,"edu.et":true,"biz.et":true,"name.et":true,"info.et":true,"net.et":true,"eu":true,"fi":true,"aland.fi":true,"*.fj":true,"*.fk":true,"fm":true,"fo":true,"fr":true,"com.fr":true,"asso.fr":true,"nom.fr":true,"prd.fr":true,"presse.fr":true,"tm.fr":true,"aeroport.fr":true,"assedic.fr":true,"avocat.fr":true,"avoues.fr":true,"cci.fr":true,"chambagri.fr":true,"chirurgiens-dentistes.fr":true,"experts-comptables.fr":true,"geometre-expert.fr":true,"gouv.fr":true,"greta.fr":true,"huissier-justice.fr":true,"medecin.fr":true,"notaires.fr":true,"pharmacien.fr":true,"port.fr":true,"veterinaire.fr":true,"ga":true,"gb":true,"gd":true,"ge":true,"com.ge":true,"edu.ge":true,"gov.ge":true,"org.ge":true,"mil.ge":true,"net.ge":true,"pvt.ge":true,"gf":true,"gg":true,"co.gg":true,"net.gg":true,"org.gg":true,"gh":true,"com.gh":true,"edu.gh":true,"gov.gh":true,"org.gh":true,"mil.gh":true,"gi":true,"com.gi":true,"ltd.gi":true,"gov.gi":true,"mod.gi":true,"edu.gi":true,"org.gi":true,"gl":true,"co.gl":true,"com.gl":true,"edu.gl":true,"net.gl":true,"org.gl":true,"gm":true,"gn":true,"ac.gn":true,"com.gn":true,"edu.gn":true,"gov.gn":true,"org.gn":true,"net.gn":true,"gov":true,"gp":true,"com.gp":true,"net.gp":true,"mobi.gp":true,"edu.gp":true,"org.gp":true,"asso.gp":true,"gq":true,"gr":true,"com.gr":true,"edu.gr":true,"net.gr":true,"org.gr":true,"gov.gr":true,"gs":true,"gt":true,"com.gt":true,"edu.gt":true,"gob.gt":true,"ind.gt":true,"mil.gt":true,"net.gt":true,"org.gt":true,"*.gu":true,"gw":true,"gy":true,"co.gy":true,"com.gy":true,"net.gy":true,"hk":true,"com.hk":true,"edu.hk":true,"gov.hk":true,"idv.hk":true,"net.hk":true,"org.hk":true,"xn--55qx5d.hk":true,"xn--wcvs22d.hk":true,"xn--lcvr32d.hk":true,"xn--mxtq1m.hk":true,"xn--gmqw5a.hk":true,"xn--ciqpn.hk":true,"xn--gmq050i.hk":true,"xn--zf0avx.hk":true,"xn--io0a7i.hk":true,"xn--mk0axi.hk":true,"xn--od0alg.hk":true,"xn--od0aq3b.hk":true,"xn--tn0ag.hk":true,"xn--uc0atv.hk":true,"xn--uc0ay4a.hk":true,"hm":true,"hn":true,"com.hn":true,"edu.hn":true,"org.hn":true,"net.hn":true,"mil.hn":true,"gob.hn":true,"hr":true,"iz.hr":true,"from.hr":true,"name.hr":true,"com.hr":true,"ht":true,"com.ht":true,"shop.ht":true,"firm.ht":true,"info.ht":true,"adult.ht":true,"net.ht":true,"pro.ht":true,"org.ht":true,"med.ht":true,"art.ht":true,"coop.ht":true,"pol.ht":true,"asso.ht":true,"edu.ht":true,"rel.ht":true,"gouv.ht":true,"perso.ht":true,"hu":true,"co.hu":true,"info.hu":true,"org.hu":true,"priv.hu":true,"sport.hu":true,"tm.hu":true,"2000.hu":true,"agrar.hu":true,"bolt.hu":true,"casino.hu":true,"city.hu":true,"erotica.hu":true,"erotika.hu":true,"film.hu":true,"forum.hu":true,"games.hu":true,"hotel.hu":true,"ingatlan.hu":true,"jogasz.hu":true,"konyvelo.hu":true,"lakas.hu":true,"media.hu":true,"news.hu":true,"reklam.hu":true,"sex.hu":true,"shop.hu":true,"suli.hu":true,"szex.hu":true,"tozsde.hu":true,"utazas.hu":true,"video.hu":true,"id":true,"ac.id":true,"biz.id":true,"co.id":true,"desa.id":true,"go.id":true,"mil.id":true,"my.id":true,"net.id":true,"or.id":true,"sch.id":true,"web.id":true,"ie":true,"gov.ie":true,"il":true,"ac.il":true,"co.il":true,"gov.il":true,"idf.il":true,"k12.il":true,"muni.il":true,"net.il":true,"org.il":true,"im":true,"ac.im":true,"co.im":true,"com.im":true,"ltd.co.im":true,"net.im":true,"org.im":true,"plc.co.im":true,"tt.im":true,"tv.im":true,"in":true,"co.in":true,"firm.in":true,"net.in":true,"org.in":true,"gen.in":true,"ind.in":true,"nic.in":true,"ac.in":true,"edu.in":true,"res.in":true,"gov.in":true,"mil.in":true,"info":true,"int":true,"eu.int":true,"io":true,"com.io":true,"iq":true,"gov.iq":true,"edu.iq":true,"mil.iq":true,"com.iq":true,"org.iq":true,"net.iq":true,"ir":true,"ac.ir":true,"co.ir":true,"gov.ir":true,"id.ir":true,"net.ir":true,"org.ir":true,"sch.ir":true,"xn--mgba3a4f16a.ir":true,"xn--mgba3a4fra.ir":true,"is":true,"net.is":true,"com.is":true,"edu.is":true,"gov.is":true,"org.is":true,"int.is":true,"it":true,"gov.it":true,"edu.it":true,"abr.it":true,"abruzzo.it":true,"aosta-valley.it":true,"aostavalley.it":true,"bas.it":true,"basilicata.it":true,"cal.it":true,"calabria.it":true,"cam.it":true,"campania.it":true,"emilia-romagna.it":true,"emiliaromagna.it":true,"emr.it":true,"friuli-v-giulia.it":true,"friuli-ve-giulia.it":true,"friuli-vegiulia.it":true,"friuli-venezia-giulia.it":true,"friuli-veneziagiulia.it":true,"friuli-vgiulia.it":true,"friuliv-giulia.it":true,"friulive-giulia.it":true,"friulivegiulia.it":true,"friulivenezia-giulia.it":true,"friuliveneziagiulia.it":true,"friulivgiulia.it":true,"fvg.it":true,"laz.it":true,"lazio.it":true,"lig.it":true,"liguria.it":true,"lom.it":true,"lombardia.it":true,"lombardy.it":true,"lucania.it":true,"mar.it":true,"marche.it":true,"mol.it":true,"molise.it":true,"piedmont.it":true,"piemonte.it":true,"pmn.it":true,"pug.it":true,"puglia.it":true,"sar.it":true,"sardegna.it":true,"sardinia.it":true,"sic.it":true,"sicilia.it":true,"sicily.it":true,"taa.it":true,"tos.it":true,"toscana.it":true,"trentino-a-adige.it":true,"trentino-aadige.it":true,"trentino-alto-adige.it":true,"trentino-altoadige.it":true,"trentino-s-tirol.it":true,"trentino-stirol.it":true,"trentino-sud-tirol.it":true,"trentino-sudtirol.it":true,"trentino-sued-tirol.it":true,"trentino-suedtirol.it":true,"trentinoa-adige.it":true,"trentinoaadige.it":true,"trentinoalto-adige.it":true,"trentinoaltoadige.it":true,"trentinos-tirol.it":true,"trentinostirol.it":true,"trentinosud-tirol.it":true,"trentinosudtirol.it":true,"trentinosued-tirol.it":true,"trentinosuedtirol.it":true,"tuscany.it":true,"umb.it":true,"umbria.it":true,"val-d-aosta.it":true,"val-daosta.it":true,"vald-aosta.it":true,"valdaosta.it":true,"valle-aosta.it":true,"valle-d-aosta.it":true,"valle-daosta.it":true,"valleaosta.it":true,"valled-aosta.it":true,"valledaosta.it":true,"vallee-aoste.it":true,"valleeaoste.it":true,"vao.it":true,"vda.it":true,"ven.it":true,"veneto.it":true,"ag.it":true,"agrigento.it":true,"al.it":true,"alessandria.it":true,"alto-adige.it":true,"altoadige.it":true,"an.it":true,"ancona.it":true,"andria-barletta-trani.it":true,"andria-trani-barletta.it":true,"andriabarlettatrani.it":true,"andriatranibarletta.it":true,"ao.it":true,"aosta.it":true,"aoste.it":true,"ap.it":true,"aq.it":true,"aquila.it":true,"ar.it":true,"arezzo.it":true,"ascoli-piceno.it":true,"ascolipiceno.it":true,"asti.it":true,"at.it":true,"av.it":true,"avellino.it":true,"ba.it":true,"balsan.it":true,"bari.it":true,"barletta-trani-andria.it":true,"barlettatraniandria.it":true,"belluno.it":true,"benevento.it":true,"bergamo.it":true,"bg.it":true,"bi.it":true,"biella.it":true,"bl.it":true,"bn.it":true,"bo.it":true,"bologna.it":true,"bolzano.it":true,"bozen.it":true,"br.it":true,"brescia.it":true,"brindisi.it":true,"bs.it":true,"bt.it":true,"bz.it":true,"ca.it":true,"cagliari.it":true,"caltanissetta.it":true,"campidano-medio.it":true,"campidanomedio.it":true,"campobasso.it":true,"carbonia-iglesias.it":true,"carboniaiglesias.it":true,"carrara-massa.it":true,"carraramassa.it":true,"caserta.it":true,"catania.it":true,"catanzaro.it":true,"cb.it":true,"ce.it":true,"cesena-forli.it":true,"cesenaforli.it":true,"ch.it":true,"chieti.it":true,"ci.it":true,"cl.it":true,"cn.it":true,"co.it":true,"como.it":true,"cosenza.it":true,"cr.it":true,"cremona.it":true,"crotone.it":true,"cs.it":true,"ct.it":true,"cuneo.it":true,"cz.it":true,"dell-ogliastra.it":true,"dellogliastra.it":true,"en.it":true,"enna.it":true,"fc.it":true,"fe.it":true,"fermo.it":true,"ferrara.it":true,"fg.it":true,"fi.it":true,"firenze.it":true,"florence.it":true,"fm.it":true,"foggia.it":true,"forli-cesena.it":true,"forlicesena.it":true,"fr.it":true,"frosinone.it":true,"ge.it":true,"genoa.it":true,"genova.it":true,"go.it":true,"gorizia.it":true,"gr.it":true,"grosseto.it":true,"iglesias-carbonia.it":true,"iglesiascarbonia.it":true,"im.it":true,"imperia.it":true,"is.it":true,"isernia.it":true,"kr.it":true,"la-spezia.it":true,"laquila.it":true,"laspezia.it":true,"latina.it":true,"lc.it":true,"le.it":true,"lecce.it":true,"lecco.it":true,"li.it":true,"livorno.it":true,"lo.it":true,"lodi.it":true,"lt.it":true,"lu.it":true,"lucca.it":true,"macerata.it":true,"mantova.it":true,"massa-carrara.it":true,"massacarrara.it":true,"matera.it":true,"mb.it":true,"mc.it":true,"me.it":true,"medio-campidano.it":true,"mediocampidano.it":true,"messina.it":true,"mi.it":true,"milan.it":true,"milano.it":true,"mn.it":true,"mo.it":true,"modena.it":true,"monza-brianza.it":true,"monza-e-della-brianza.it":true,"monza.it":true,"monzabrianza.it":true,"monzaebrianza.it":true,"monzaedellabrianza.it":true,"ms.it":true,"mt.it":true,"na.it":true,"naples.it":true,"napoli.it":true,"no.it":true,"novara.it":true,"nu.it":true,"nuoro.it":true,"og.it":true,"ogliastra.it":true,"olbia-tempio.it":true,"olbiatempio.it":true,"or.it":true,"oristano.it":true,"ot.it":true,"pa.it":true,"padova.it":true,"padua.it":true,"palermo.it":true,"parma.it":true,"pavia.it":true,"pc.it":true,"pd.it":true,"pe.it":true,"perugia.it":true,"pesaro-urbino.it":true,"pesarourbino.it":true,"pescara.it":true,"pg.it":true,"pi.it":true,"piacenza.it":true,"pisa.it":true,"pistoia.it":true,"pn.it":true,"po.it":true,"pordenone.it":true,"potenza.it":true,"pr.it":true,"prato.it":true,"pt.it":true,"pu.it":true,"pv.it":true,"pz.it":true,"ra.it":true,"ragusa.it":true,"ravenna.it":true,"rc.it":true,"re.it":true,"reggio-calabria.it":true,"reggio-emilia.it":true,"reggiocalabria.it":true,"reggioemilia.it":true,"rg.it":true,"ri.it":true,"rieti.it":true,"rimini.it":true,"rm.it":true,"rn.it":true,"ro.it":true,"roma.it":true,"rome.it":true,"rovigo.it":true,"sa.it":true,"salerno.it":true,"sassari.it":true,"savona.it":true,"si.it":true,"siena.it":true,"siracusa.it":true,"so.it":true,"sondrio.it":true,"sp.it":true,"sr.it":true,"ss.it":true,"suedtirol.it":true,"sv.it":true,"ta.it":true,"taranto.it":true,"te.it":true,"tempio-olbia.it":true,"tempioolbia.it":true,"teramo.it":true,"terni.it":true,"tn.it":true,"to.it":true,"torino.it":true,"tp.it":true,"tr.it":true,"trani-andria-barletta.it":true,"trani-barletta-andria.it":true,"traniandriabarletta.it":true,"tranibarlettaandria.it":true,"trapani.it":true,"trentino.it":true,"trento.it":true,"treviso.it":true,"trieste.it":true,"ts.it":true,"turin.it":true,"tv.it":true,"ud.it":true,"udine.it":true,"urbino-pesaro.it":true,"urbinopesaro.it":true,"va.it":true,"varese.it":true,"vb.it":true,"vc.it":true,"ve.it":true,"venezia.it":true,"venice.it":true,"verbania.it":true,"vercelli.it":true,"verona.it":true,"vi.it":true,"vibo-valentia.it":true,"vibovalentia.it":true,"vicenza.it":true,"viterbo.it":true,"vr.it":true,"vs.it":true,"vt.it":true,"vv.it":true,"je":true,"co.je":true,"net.je":true,"org.je":true,"*.jm":true,"jo":true,"com.jo":true,"org.jo":true,"net.jo":true,"edu.jo":true,"sch.jo":true,"gov.jo":true,"mil.jo":true,"name.jo":true,"jobs":true,"jp":true,"ac.jp":true,"ad.jp":true,"co.jp":true,"ed.jp":true,"go.jp":true,"gr.jp":true,"lg.jp":true,"ne.jp":true,"or.jp":true,"aichi.jp":true,"akita.jp":true,"aomori.jp":true,"chiba.jp":true,"ehime.jp":true,"fukui.jp":true,"fukuoka.jp":true,"fukushima.jp":true,"gifu.jp":true,"gunma.jp":true,"hiroshima.jp":true,"hokkaido.jp":true,"hyogo.jp":true,"ibaraki.jp":true,"ishikawa.jp":true,"iwate.jp":true,"kagawa.jp":true,"kagoshima.jp":true,"kanagawa.jp":true,"kochi.jp":true,"kumamoto.jp":true,"kyoto.jp":true,"mie.jp":true,"miyagi.jp":true,"miyazaki.jp":true,"nagano.jp":true,"nagasaki.jp":true,"nara.jp":true,"niigata.jp":true,"oita.jp":true,"okayama.jp":true,"okinawa.jp":true,"osaka.jp":true,"saga.jp":true,"saitama.jp":true,"shiga.jp":true,"shimane.jp":true,"shizuoka.jp":true,"tochigi.jp":true,"tokushima.jp":true,"tokyo.jp":true,"tottori.jp":true,"toyama.jp":true,"wakayama.jp":true,"yamagata.jp":true,"yamaguchi.jp":true,"yamanashi.jp":true,"xn--4pvxs.jp":true,"xn--vgu402c.jp":true,"xn--c3s14m.jp":true,"xn--f6qx53a.jp":true,"xn--8pvr4u.jp":true,"xn--uist22h.jp":true,"xn--djrs72d6uy.jp":true,"xn--mkru45i.jp":true,"xn--0trq7p7nn.jp":true,"xn--8ltr62k.jp":true,"xn--2m4a15e.jp":true,"xn--efvn9s.jp":true,"xn--32vp30h.jp":true,"xn--4it797k.jp":true,"xn--1lqs71d.jp":true,"xn--5rtp49c.jp":true,"xn--5js045d.jp":true,"xn--ehqz56n.jp":true,"xn--1lqs03n.jp":true,"xn--qqqt11m.jp":true,"xn--kbrq7o.jp":true,"xn--pssu33l.jp":true,"xn--ntsq17g.jp":true,"xn--uisz3g.jp":true,"xn--6btw5a.jp":true,"xn--1ctwo.jp":true,"xn--6orx2r.jp":true,"xn--rht61e.jp":true,"xn--rht27z.jp":true,"xn--djty4k.jp":true,"xn--nit225k.jp":true,"xn--rht3d.jp":true,"xn--klty5x.jp":true,"xn--kltx9a.jp":true,"xn--kltp7d.jp":true,"xn--uuwu58a.jp":true,"xn--zbx025d.jp":true,"xn--ntso0iqx3a.jp":true,"xn--elqq16h.jp":true,"xn--4it168d.jp":true,"xn--klt787d.jp":true,"xn--rny31h.jp":true,"xn--7t0a264c.jp":true,"xn--5rtq34k.jp":true,"xn--k7yn95e.jp":true,"xn--tor131o.jp":true,"xn--d5qv7z876c.jp":true,"*.kawasaki.jp":true,"*.kitakyushu.jp":true,"*.kobe.jp":true,"*.nagoya.jp":true,"*.sapporo.jp":true,"*.sendai.jp":true,"*.yokohama.jp":true,"city.kawasaki.jp":false,"city.kitakyushu.jp":false,"city.kobe.jp":false,"city.nagoya.jp":false,"city.sapporo.jp":false,"city.sendai.jp":false,"city.yokohama.jp":false,"aisai.aichi.jp":true,"ama.aichi.jp":true,"anjo.aichi.jp":true,"asuke.aichi.jp":true,"chiryu.aichi.jp":true,"chita.aichi.jp":true,"fuso.aichi.jp":true,"gamagori.aichi.jp":true,"handa.aichi.jp":true,"hazu.aichi.jp":true,"hekinan.aichi.jp":true,"higashiura.aichi.jp":true,"ichinomiya.aichi.jp":true,"inazawa.aichi.jp":true,"inuyama.aichi.jp":true,"isshiki.aichi.jp":true,"iwakura.aichi.jp":true,"kanie.aichi.jp":true,"kariya.aichi.jp":true,"kasugai.aichi.jp":true,"kira.aichi.jp":true,"kiyosu.aichi.jp":true,"komaki.aichi.jp":true,"konan.aichi.jp":true,"kota.aichi.jp":true,"mihama.aichi.jp":true,"miyoshi.aichi.jp":true,"nishio.aichi.jp":true,"nisshin.aichi.jp":true,"obu.aichi.jp":true,"oguchi.aichi.jp":true,"oharu.aichi.jp":true,"okazaki.aichi.jp":true,"owariasahi.aichi.jp":true,"seto.aichi.jp":true,"shikatsu.aichi.jp":true,"shinshiro.aichi.jp":true,"shitara.aichi.jp":true,"tahara.aichi.jp":true,"takahama.aichi.jp":true,"tobishima.aichi.jp":true,"toei.aichi.jp":true,"togo.aichi.jp":true,"tokai.aichi.jp":true,"tokoname.aichi.jp":true,"toyoake.aichi.jp":true,"toyohashi.aichi.jp":true,"toyokawa.aichi.jp":true,"toyone.aichi.jp":true,"toyota.aichi.jp":true,"tsushima.aichi.jp":true,"yatomi.aichi.jp":true,"akita.akita.jp":true,"daisen.akita.jp":true,"fujisato.akita.jp":true,"gojome.akita.jp":true,"hachirogata.akita.jp":true,"happou.akita.jp":true,"higashinaruse.akita.jp":true,"honjo.akita.jp":true,"honjyo.akita.jp":true,"ikawa.akita.jp":true,"kamikoani.akita.jp":true,"kamioka.akita.jp":true,"katagami.akita.jp":true,"kazuno.akita.jp":true,"kitaakita.akita.jp":true,"kosaka.akita.jp":true,"kyowa.akita.jp":true,"misato.akita.jp":true,"mitane.akita.jp":true,"moriyoshi.akita.jp":true,"nikaho.akita.jp":true,"noshiro.akita.jp":true,"odate.akita.jp":true,"oga.akita.jp":true,"ogata.akita.jp":true,"semboku.akita.jp":true,"yokote.akita.jp":true,"yurihonjo.akita.jp":true,"aomori.aomori.jp":true,"gonohe.aomori.jp":true,"hachinohe.aomori.jp":true,"hashikami.aomori.jp":true,"hiranai.aomori.jp":true,"hirosaki.aomori.jp":true,"itayanagi.aomori.jp":true,"kuroishi.aomori.jp":true,"misawa.aomori.jp":true,"mutsu.aomori.jp":true,"nakadomari.aomori.jp":true,"noheji.aomori.jp":true,"oirase.aomori.jp":true,"owani.aomori.jp":true,"rokunohe.aomori.jp":true,"sannohe.aomori.jp":true,"shichinohe.aomori.jp":true,"shingo.aomori.jp":true,"takko.aomori.jp":true,"towada.aomori.jp":true,"tsugaru.aomori.jp":true,"tsuruta.aomori.jp":true,"abiko.chiba.jp":true,"asahi.chiba.jp":true,"chonan.chiba.jp":true,"chosei.chiba.jp":true,"choshi.chiba.jp":true,"chuo.chiba.jp":true,"funabashi.chiba.jp":true,"futtsu.chiba.jp":true,"hanamigawa.chiba.jp":true,"ichihara.chiba.jp":true,"ichikawa.chiba.jp":true,"ichinomiya.chiba.jp":true,"inzai.chiba.jp":true,"isumi.chiba.jp":true,"kamagaya.chiba.jp":true,"kamogawa.chiba.jp":true,"kashiwa.chiba.jp":true,"katori.chiba.jp":true,"katsuura.chiba.jp":true,"kimitsu.chiba.jp":true,"kisarazu.chiba.jp":true,"kozaki.chiba.jp":true,"kujukuri.chiba.jp":true,"kyonan.chiba.jp":true,"matsudo.chiba.jp":true,"midori.chiba.jp":true,"mihama.chiba.jp":true,"minamiboso.chiba.jp":true,"mobara.chiba.jp":true,"mutsuzawa.chiba.jp":true,"nagara.chiba.jp":true,"nagareyama.chiba.jp":true,"narashino.chiba.jp":true,"narita.chiba.jp":true,"noda.chiba.jp":true,"oamishirasato.chiba.jp":true,"omigawa.chiba.jp":true,"onjuku.chiba.jp":true,"otaki.chiba.jp":true,"sakae.chiba.jp":true,"sakura.chiba.jp":true,"shimofusa.chiba.jp":true,"shirako.chiba.jp":true,"shiroi.chiba.jp":true,"shisui.chiba.jp":true,"sodegaura.chiba.jp":true,"sosa.chiba.jp":true,"tako.chiba.jp":true,"tateyama.chiba.jp":true,"togane.chiba.jp":true,"tohnosho.chiba.jp":true,"tomisato.chiba.jp":true,"urayasu.chiba.jp":true,"yachimata.chiba.jp":true,"yachiyo.chiba.jp":true,"yokaichiba.chiba.jp":true,"yokoshibahikari.chiba.jp":true,"yotsukaido.chiba.jp":true,"ainan.ehime.jp":true,"honai.ehime.jp":true,"ikata.ehime.jp":true,"imabari.ehime.jp":true,"iyo.ehime.jp":true,"kamijima.ehime.jp":true,"kihoku.ehime.jp":true,"kumakogen.ehime.jp":true,"masaki.ehime.jp":true,"matsuno.ehime.jp":true,"matsuyama.ehime.jp":true,"namikata.ehime.jp":true,"niihama.ehime.jp":true,"ozu.ehime.jp":true,"saijo.ehime.jp":true,"seiyo.ehime.jp":true,"shikokuchuo.ehime.jp":true,"tobe.ehime.jp":true,"toon.ehime.jp":true,"uchiko.ehime.jp":true,"uwajima.ehime.jp":true,"yawatahama.ehime.jp":true,"echizen.fukui.jp":true,"eiheiji.fukui.jp":true,"fukui.fukui.jp":true,"ikeda.fukui.jp":true,"katsuyama.fukui.jp":true,"mihama.fukui.jp":true,"minamiechizen.fukui.jp":true,"obama.fukui.jp":true,"ohi.fukui.jp":true,"ono.fukui.jp":true,"sabae.fukui.jp":true,"sakai.fukui.jp":true,"takahama.fukui.jp":true,"tsuruga.fukui.jp":true,"wakasa.fukui.jp":true,"ashiya.fukuoka.jp":true,"buzen.fukuoka.jp":true,"chikugo.fukuoka.jp":true,"chikuho.fukuoka.jp":true,"chikujo.fukuoka.jp":true,"chikushino.fukuoka.jp":true,"chikuzen.fukuoka.jp":true,"chuo.fukuoka.jp":true,"dazaifu.fukuoka.jp":true,"fukuchi.fukuoka.jp":true,"hakata.fukuoka.jp":true,"higashi.fukuoka.jp":true,"hirokawa.fukuoka.jp":true,"hisayama.fukuoka.jp":true,"iizuka.fukuoka.jp":true,"inatsuki.fukuoka.jp":true,"kaho.fukuoka.jp":true,"kasuga.fukuoka.jp":true,"kasuya.fukuoka.jp":true,"kawara.fukuoka.jp":true,"keisen.fukuoka.jp":true,"koga.fukuoka.jp":true,"kurate.fukuoka.jp":true,"kurogi.fukuoka.jp":true,"kurume.fukuoka.jp":true,"minami.fukuoka.jp":true,"miyako.fukuoka.jp":true,"miyama.fukuoka.jp":true,"miyawaka.fukuoka.jp":true,"mizumaki.fukuoka.jp":true,"munakata.fukuoka.jp":true,"nakagawa.fukuoka.jp":true,"nakama.fukuoka.jp":true,"nishi.fukuoka.jp":true,"nogata.fukuoka.jp":true,"ogori.fukuoka.jp":true,"okagaki.fukuoka.jp":true,"okawa.fukuoka.jp":true,"oki.fukuoka.jp":true,"omuta.fukuoka.jp":true,"onga.fukuoka.jp":true,"onojo.fukuoka.jp":true,"oto.fukuoka.jp":true,"saigawa.fukuoka.jp":true,"sasaguri.fukuoka.jp":true,"shingu.fukuoka.jp":true,"shinyoshitomi.fukuoka.jp":true,"shonai.fukuoka.jp":true,"soeda.fukuoka.jp":true,"sue.fukuoka.jp":true,"tachiarai.fukuoka.jp":true,"tagawa.fukuoka.jp":true,"takata.fukuoka.jp":true,"toho.fukuoka.jp":true,"toyotsu.fukuoka.jp":true,"tsuiki.fukuoka.jp":true,"ukiha.fukuoka.jp":true,"umi.fukuoka.jp":true,"usui.fukuoka.jp":true,"yamada.fukuoka.jp":true,"yame.fukuoka.jp":true,"yanagawa.fukuoka.jp":true,"yukuhashi.fukuoka.jp":true,"aizubange.fukushima.jp":true,"aizumisato.fukushima.jp":true,"aizuwakamatsu.fukushima.jp":true,"asakawa.fukushima.jp":true,"bandai.fukushima.jp":true,"date.fukushima.jp":true,"fukushima.fukushima.jp":true,"furudono.fukushima.jp":true,"futaba.fukushima.jp":true,"hanawa.fukushima.jp":true,"higashi.fukushima.jp":true,"hirata.fukushima.jp":true,"hirono.fukushima.jp":true,"iitate.fukushima.jp":true,"inawashiro.fukushima.jp":true,"ishikawa.fukushima.jp":true,"iwaki.fukushima.jp":true,"izumizaki.fukushima.jp":true,"kagamiishi.fukushima.jp":true,"kaneyama.fukushima.jp":true,"kawamata.fukushima.jp":true,"kitakata.fukushima.jp":true,"kitashiobara.fukushima.jp":true,"koori.fukushima.jp":true,"koriyama.fukushima.jp":true,"kunimi.fukushima.jp":true,"miharu.fukushima.jp":true,"mishima.fukushima.jp":true,"namie.fukushima.jp":true,"nango.fukushima.jp":true,"nishiaizu.fukushima.jp":true,"nishigo.fukushima.jp":true,"okuma.fukushima.jp":true,"omotego.fukushima.jp":true,"ono.fukushima.jp":true,"otama.fukushima.jp":true,"samegawa.fukushima.jp":true,"shimogo.fukushima.jp":true,"shirakawa.fukushima.jp":true,"showa.fukushima.jp":true,"soma.fukushima.jp":true,"sukagawa.fukushima.jp":true,"taishin.fukushima.jp":true,"tamakawa.fukushima.jp":true,"tanagura.fukushima.jp":true,"tenei.fukushima.jp":true,"yabuki.fukushima.jp":true,"yamato.fukushima.jp":true,"yamatsuri.fukushima.jp":true,"yanaizu.fukushima.jp":true,"yugawa.fukushima.jp":true,"anpachi.gifu.jp":true,"ena.gifu.jp":true,"gifu.gifu.jp":true,"ginan.gifu.jp":true,"godo.gifu.jp":true,"gujo.gifu.jp":true,"hashima.gifu.jp":true,"hichiso.gifu.jp":true,"hida.gifu.jp":true,"higashishirakawa.gifu.jp":true,"ibigawa.gifu.jp":true,"ikeda.gifu.jp":true,"kakamigahara.gifu.jp":true,"kani.gifu.jp":true,"kasahara.gifu.jp":true,"kasamatsu.gifu.jp":true,"kawaue.gifu.jp":true,"kitagata.gifu.jp":true,"mino.gifu.jp":true,"minokamo.gifu.jp":true,"mitake.gifu.jp":true,"mizunami.gifu.jp":true,"motosu.gifu.jp":true,"nakatsugawa.gifu.jp":true,"ogaki.gifu.jp":true,"sakahogi.gifu.jp":true,"seki.gifu.jp":true,"sekigahara.gifu.jp":true,"shirakawa.gifu.jp":true,"tajimi.gifu.jp":true,"takayama.gifu.jp":true,"tarui.gifu.jp":true,"toki.gifu.jp":true,"tomika.gifu.jp":true,"wanouchi.gifu.jp":true,"yamagata.gifu.jp":true,"yaotsu.gifu.jp":true,"yoro.gifu.jp":true,"annaka.gunma.jp":true,"chiyoda.gunma.jp":true,"fujioka.gunma.jp":true,"higashiagatsuma.gunma.jp":true,"isesaki.gunma.jp":true,"itakura.gunma.jp":true,"kanna.gunma.jp":true,"kanra.gunma.jp":true,"katashina.gunma.jp":true,"kawaba.gunma.jp":true,"kiryu.gunma.jp":true,"kusatsu.gunma.jp":true,"maebashi.gunma.jp":true,"meiwa.gunma.jp":true,"midori.gunma.jp":true,"minakami.gunma.jp":true,"naganohara.gunma.jp":true,"nakanojo.gunma.jp":true,"nanmoku.gunma.jp":true,"numata.gunma.jp":true,"oizumi.gunma.jp":true,"ora.gunma.jp":true,"ota.gunma.jp":true,"shibukawa.gunma.jp":true,"shimonita.gunma.jp":true,"shinto.gunma.jp":true,"showa.gunma.jp":true,"takasaki.gunma.jp":true,"takayama.gunma.jp":true,"tamamura.gunma.jp":true,"tatebayashi.gunma.jp":true,"tomioka.gunma.jp":true,"tsukiyono.gunma.jp":true,"tsumagoi.gunma.jp":true,"ueno.gunma.jp":true,"yoshioka.gunma.jp":true,"asaminami.hiroshima.jp":true,"daiwa.hiroshima.jp":true,"etajima.hiroshima.jp":true,"fuchu.hiroshima.jp":true,"fukuyama.hiroshima.jp":true,"hatsukaichi.hiroshima.jp":true,"higashihiroshima.hiroshima.jp":true,"hongo.hiroshima.jp":true,"jinsekikogen.hiroshima.jp":true,"kaita.hiroshima.jp":true,"kui.hiroshima.jp":true,"kumano.hiroshima.jp":true,"kure.hiroshima.jp":true,"mihara.hiroshima.jp":true,"miyoshi.hiroshima.jp":true,"naka.hiroshima.jp":true,"onomichi.hiroshima.jp":true,"osakikamijima.hiroshima.jp":true,"otake.hiroshima.jp":true,"saka.hiroshima.jp":true,"sera.hiroshima.jp":true,"seranishi.hiroshima.jp":true,"shinichi.hiroshima.jp":true,"shobara.hiroshima.jp":true,"takehara.hiroshima.jp":true,"abashiri.hokkaido.jp":true,"abira.hokkaido.jp":true,"aibetsu.hokkaido.jp":true,"akabira.hokkaido.jp":true,"akkeshi.hokkaido.jp":true,"asahikawa.hokkaido.jp":true,"ashibetsu.hokkaido.jp":true,"ashoro.hokkaido.jp":true,"assabu.hokkaido.jp":true,"atsuma.hokkaido.jp":true,"bibai.hokkaido.jp":true,"biei.hokkaido.jp":true,"bifuka.hokkaido.jp":true,"bihoro.hokkaido.jp":true,"biratori.hokkaido.jp":true,"chippubetsu.hokkaido.jp":true,"chitose.hokkaido.jp":true,"date.hokkaido.jp":true,"ebetsu.hokkaido.jp":true,"embetsu.hokkaido.jp":true,"eniwa.hokkaido.jp":true,"erimo.hokkaido.jp":true,"esan.hokkaido.jp":true,"esashi.hokkaido.jp":true,"fukagawa.hokkaido.jp":true,"fukushima.hokkaido.jp":true,"furano.hokkaido.jp":true,"furubira.hokkaido.jp":true,"haboro.hokkaido.jp":true,"hakodate.hokkaido.jp":true,"hamatonbetsu.hokkaido.jp":true,"hidaka.hokkaido.jp":true,"higashikagura.hokkaido.jp":true,"higashikawa.hokkaido.jp":true,"hiroo.hokkaido.jp":true,"hokuryu.hokkaido.jp":true,"hokuto.hokkaido.jp":true,"honbetsu.hokkaido.jp":true,"horokanai.hokkaido.jp":true,"horonobe.hokkaido.jp":true,"ikeda.hokkaido.jp":true,"imakane.hokkaido.jp":true,"ishikari.hokkaido.jp":true,"iwamizawa.hokkaido.jp":true,"iwanai.hokkaido.jp":true,"kamifurano.hokkaido.jp":true,"kamikawa.hokkaido.jp":true,"kamishihoro.hokkaido.jp":true,"kamisunagawa.hokkaido.jp":true,"kamoenai.hokkaido.jp":true,"kayabe.hokkaido.jp":true,"kembuchi.hokkaido.jp":true,"kikonai.hokkaido.jp":true,"kimobetsu.hokkaido.jp":true,"kitahiroshima.hokkaido.jp":true,"kitami.hokkaido.jp":true,"kiyosato.hokkaido.jp":true,"koshimizu.hokkaido.jp":true,"kunneppu.hokkaido.jp":true,"kuriyama.hokkaido.jp":true,"kuromatsunai.hokkaido.jp":true,"kushiro.hokkaido.jp":true,"kutchan.hokkaido.jp":true,"kyowa.hokkaido.jp":true,"mashike.hokkaido.jp":true,"matsumae.hokkaido.jp":true,"mikasa.hokkaido.jp":true,"minamifurano.hokkaido.jp":true,"mombetsu.hokkaido.jp":true,"moseushi.hokkaido.jp":true,"mukawa.hokkaido.jp":true,"muroran.hokkaido.jp":true,"naie.hokkaido.jp":true,"nakagawa.hokkaido.jp":true,"nakasatsunai.hokkaido.jp":true,"nakatombetsu.hokkaido.jp":true,"nanae.hokkaido.jp":true,"nanporo.hokkaido.jp":true,"nayoro.hokkaido.jp":true,"nemuro.hokkaido.jp":true,"niikappu.hokkaido.jp":true,"niki.hokkaido.jp":true,"nishiokoppe.hokkaido.jp":true,"noboribetsu.hokkaido.jp":true,"numata.hokkaido.jp":true,"obihiro.hokkaido.jp":true,"obira.hokkaido.jp":true,"oketo.hokkaido.jp":true,"okoppe.hokkaido.jp":true,"otaru.hokkaido.jp":true,"otobe.hokkaido.jp":true,"otofuke.hokkaido.jp":true,"otoineppu.hokkaido.jp":true,"oumu.hokkaido.jp":true,"ozora.hokkaido.jp":true,"pippu.hokkaido.jp":true,"rankoshi.hokkaido.jp":true,"rebun.hokkaido.jp":true,"rikubetsu.hokkaido.jp":true,"rishiri.hokkaido.jp":true,"rishirifuji.hokkaido.jp":true,"saroma.hokkaido.jp":true,"sarufutsu.hokkaido.jp":true,"shakotan.hokkaido.jp":true,"shari.hokkaido.jp":true,"shibecha.hokkaido.jp":true,"shibetsu.hokkaido.jp":true,"shikabe.hokkaido.jp":true,"shikaoi.hokkaido.jp":true,"shimamaki.hokkaido.jp":true,"shimizu.hokkaido.jp":true,"shimokawa.hokkaido.jp":true,"shinshinotsu.hokkaido.jp":true,"shintoku.hokkaido.jp":true,"shiranuka.hokkaido.jp":true,"shiraoi.hokkaido.jp":true,"shiriuchi.hokkaido.jp":true,"sobetsu.hokkaido.jp":true,"sunagawa.hokkaido.jp":true,"taiki.hokkaido.jp":true,"takasu.hokkaido.jp":true,"takikawa.hokkaido.jp":true,"takinoue.hokkaido.jp":true,"teshikaga.hokkaido.jp":true,"tobetsu.hokkaido.jp":true,"tohma.hokkaido.jp":true,"tomakomai.hokkaido.jp":true,"tomari.hokkaido.jp":true,"toya.hokkaido.jp":true,"toyako.hokkaido.jp":true,"toyotomi.hokkaido.jp":true,"toyoura.hokkaido.jp":true,"tsubetsu.hokkaido.jp":true,"tsukigata.hokkaido.jp":true,"urakawa.hokkaido.jp":true,"urausu.hokkaido.jp":true,"uryu.hokkaido.jp":true,"utashinai.hokkaido.jp":true,"wakkanai.hokkaido.jp":true,"wassamu.hokkaido.jp":true,"yakumo.hokkaido.jp":true,"yoichi.hokkaido.jp":true,"aioi.hyogo.jp":true,"akashi.hyogo.jp":true,"ako.hyogo.jp":true,"amagasaki.hyogo.jp":true,"aogaki.hyogo.jp":true,"asago.hyogo.jp":true,"ashiya.hyogo.jp":true,"awaji.hyogo.jp":true,"fukusaki.hyogo.jp":true,"goshiki.hyogo.jp":true,"harima.hyogo.jp":true,"himeji.hyogo.jp":true,"ichikawa.hyogo.jp":true,"inagawa.hyogo.jp":true,"itami.hyogo.jp":true,"kakogawa.hyogo.jp":true,"kamigori.hyogo.jp":true,"kamikawa.hyogo.jp":true,"kasai.hyogo.jp":true,"kasuga.hyogo.jp":true,"kawanishi.hyogo.jp":true,"miki.hyogo.jp":true,"minamiawaji.hyogo.jp":true,"nishinomiya.hyogo.jp":true,"nishiwaki.hyogo.jp":true,"ono.hyogo.jp":true,"sanda.hyogo.jp":true,"sannan.hyogo.jp":true,"sasayama.hyogo.jp":true,"sayo.hyogo.jp":true,"shingu.hyogo.jp":true,"shinonsen.hyogo.jp":true,"shiso.hyogo.jp":true,"sumoto.hyogo.jp":true,"taishi.hyogo.jp":true,"taka.hyogo.jp":true,"takarazuka.hyogo.jp":true,"takasago.hyogo.jp":true,"takino.hyogo.jp":true,"tamba.hyogo.jp":true,"tatsuno.hyogo.jp":true,"toyooka.hyogo.jp":true,"yabu.hyogo.jp":true,"yashiro.hyogo.jp":true,"yoka.hyogo.jp":true,"yokawa.hyogo.jp":true,"ami.ibaraki.jp":true,"asahi.ibaraki.jp":true,"bando.ibaraki.jp":true,"chikusei.ibaraki.jp":true,"daigo.ibaraki.jp":true,"fujishiro.ibaraki.jp":true,"hitachi.ibaraki.jp":true,"hitachinaka.ibaraki.jp":true,"hitachiomiya.ibaraki.jp":true,"hitachiota.ibaraki.jp":true,"ibaraki.ibaraki.jp":true,"ina.ibaraki.jp":true,"inashiki.ibaraki.jp":true,"itako.ibaraki.jp":true,"iwama.ibaraki.jp":true,"joso.ibaraki.jp":true,"kamisu.ibaraki.jp":true,"kasama.ibaraki.jp":true,"kashima.ibaraki.jp":true,"kasumigaura.ibaraki.jp":true,"koga.ibaraki.jp":true,"miho.ibaraki.jp":true,"mito.ibaraki.jp":true,"moriya.ibaraki.jp":true,"naka.ibaraki.jp":true,"namegata.ibaraki.jp":true,"oarai.ibaraki.jp":true,"ogawa.ibaraki.jp":true,"omitama.ibaraki.jp":true,"ryugasaki.ibaraki.jp":true,"sakai.ibaraki.jp":true,"sakuragawa.ibaraki.jp":true,"shimodate.ibaraki.jp":true,"shimotsuma.ibaraki.jp":true,"shirosato.ibaraki.jp":true,"sowa.ibaraki.jp":true,"suifu.ibaraki.jp":true,"takahagi.ibaraki.jp":true,"tamatsukuri.ibaraki.jp":true,"tokai.ibaraki.jp":true,"tomobe.ibaraki.jp":true,"tone.ibaraki.jp":true,"toride.ibaraki.jp":true,"tsuchiura.ibaraki.jp":true,"tsukuba.ibaraki.jp":true,"uchihara.ibaraki.jp":true,"ushiku.ibaraki.jp":true,"yachiyo.ibaraki.jp":true,"yamagata.ibaraki.jp":true,"yawara.ibaraki.jp":true,"yuki.ibaraki.jp":true,"anamizu.ishikawa.jp":true,"hakui.ishikawa.jp":true,"hakusan.ishikawa.jp":true,"kaga.ishikawa.jp":true,"kahoku.ishikawa.jp":true,"kanazawa.ishikawa.jp":true,"kawakita.ishikawa.jp":true,"komatsu.ishikawa.jp":true,"nakanoto.ishikawa.jp":true,"nanao.ishikawa.jp":true,"nomi.ishikawa.jp":true,"nonoichi.ishikawa.jp":true,"noto.ishikawa.jp":true,"shika.ishikawa.jp":true,"suzu.ishikawa.jp":true,"tsubata.ishikawa.jp":true,"tsurugi.ishikawa.jp":true,"uchinada.ishikawa.jp":true,"wajima.ishikawa.jp":true,"fudai.iwate.jp":true,"fujisawa.iwate.jp":true,"hanamaki.iwate.jp":true,"hiraizumi.iwate.jp":true,"hirono.iwate.jp":true,"ichinohe.iwate.jp":true,"ichinoseki.iwate.jp":true,"iwaizumi.iwate.jp":true,"iwate.iwate.jp":true,"joboji.iwate.jp":true,"kamaishi.iwate.jp":true,"kanegasaki.iwate.jp":true,"karumai.iwate.jp":true,"kawai.iwate.jp":true,"kitakami.iwate.jp":true,"kuji.iwate.jp":true,"kunohe.iwate.jp":true,"kuzumaki.iwate.jp":true,"miyako.iwate.jp":true,"mizusawa.iwate.jp":true,"morioka.iwate.jp":true,"ninohe.iwate.jp":true,"noda.iwate.jp":true,"ofunato.iwate.jp":true,"oshu.iwate.jp":true,"otsuchi.iwate.jp":true,"rikuzentakata.iwate.jp":true,"shiwa.iwate.jp":true,"shizukuishi.iwate.jp":true,"sumita.iwate.jp":true,"tanohata.iwate.jp":true,"tono.iwate.jp":true,"yahaba.iwate.jp":true,"yamada.iwate.jp":true,"ayagawa.kagawa.jp":true,"higashikagawa.kagawa.jp":true,"kanonji.kagawa.jp":true,"kotohira.kagawa.jp":true,"manno.kagawa.jp":true,"marugame.kagawa.jp":true,"mitoyo.kagawa.jp":true,"naoshima.kagawa.jp":true,"sanuki.kagawa.jp":true,"tadotsu.kagawa.jp":true,"takamatsu.kagawa.jp":true,"tonosho.kagawa.jp":true,"uchinomi.kagawa.jp":true,"utazu.kagawa.jp":true,"zentsuji.kagawa.jp":true,"akune.kagoshima.jp":true,"amami.kagoshima.jp":true,"hioki.kagoshima.jp":true,"isa.kagoshima.jp":true,"isen.kagoshima.jp":true,"izumi.kagoshima.jp":true,"kagoshima.kagoshima.jp":true,"kanoya.kagoshima.jp":true,"kawanabe.kagoshima.jp":true,"kinko.kagoshima.jp":true,"kouyama.kagoshima.jp":true,"makurazaki.kagoshima.jp":true,"matsumoto.kagoshima.jp":true,"minamitane.kagoshima.jp":true,"nakatane.kagoshima.jp":true,"nishinoomote.kagoshima.jp":true,"satsumasendai.kagoshima.jp":true,"soo.kagoshima.jp":true,"tarumizu.kagoshima.jp":true,"yusui.kagoshima.jp":true,"aikawa.kanagawa.jp":true,"atsugi.kanagawa.jp":true,"ayase.kanagawa.jp":true,"chigasaki.kanagawa.jp":true,"ebina.kanagawa.jp":true,"fujisawa.kanagawa.jp":true,"hadano.kanagawa.jp":true,"hakone.kanagawa.jp":true,"hiratsuka.kanagawa.jp":true,"isehara.kanagawa.jp":true,"kaisei.kanagawa.jp":true,"kamakura.kanagawa.jp":true,"kiyokawa.kanagawa.jp":true,"matsuda.kanagawa.jp":true,"minamiashigara.kanagawa.jp":true,"miura.kanagawa.jp":true,"nakai.kanagawa.jp":true,"ninomiya.kanagawa.jp":true,"odawara.kanagawa.jp":true,"oi.kanagawa.jp":true,"oiso.kanagawa.jp":true,"sagamihara.kanagawa.jp":true,"samukawa.kanagawa.jp":true,"tsukui.kanagawa.jp":true,"yamakita.kanagawa.jp":true,"yamato.kanagawa.jp":true,"yokosuka.kanagawa.jp":true,"yugawara.kanagawa.jp":true,"zama.kanagawa.jp":true,"zushi.kanagawa.jp":true,"aki.kochi.jp":true,"geisei.kochi.jp":true,"hidaka.kochi.jp":true,"higashitsuno.kochi.jp":true,"ino.kochi.jp":true,"kagami.kochi.jp":true,"kami.kochi.jp":true,"kitagawa.kochi.jp":true,"kochi.kochi.jp":true,"mihara.kochi.jp":true,"motoyama.kochi.jp":true,"muroto.kochi.jp":true,"nahari.kochi.jp":true,"nakamura.kochi.jp":true,"nankoku.kochi.jp":true,"nishitosa.kochi.jp":true,"niyodogawa.kochi.jp":true,"ochi.kochi.jp":true,"okawa.kochi.jp":true,"otoyo.kochi.jp":true,"otsuki.kochi.jp":true,"sakawa.kochi.jp":true,"sukumo.kochi.jp":true,"susaki.kochi.jp":true,"tosa.kochi.jp":true,"tosashimizu.kochi.jp":true,"toyo.kochi.jp":true,"tsuno.kochi.jp":true,"umaji.kochi.jp":true,"yasuda.kochi.jp":true,"yusuhara.kochi.jp":true,"amakusa.kumamoto.jp":true,"arao.kumamoto.jp":true,"aso.kumamoto.jp":true,"choyo.kumamoto.jp":true,"gyokuto.kumamoto.jp":true,"hitoyoshi.kumamoto.jp":true,"kamiamakusa.kumamoto.jp":true,"kashima.kumamoto.jp":true,"kikuchi.kumamoto.jp":true,"kosa.kumamoto.jp":true,"kumamoto.kumamoto.jp":true,"mashiki.kumamoto.jp":true,"mifune.kumamoto.jp":true,"minamata.kumamoto.jp":true,"minamioguni.kumamoto.jp":true,"nagasu.kumamoto.jp":true,"nishihara.kumamoto.jp":true,"oguni.kumamoto.jp":true,"ozu.kumamoto.jp":true,"sumoto.kumamoto.jp":true,"takamori.kumamoto.jp":true,"uki.kumamoto.jp":true,"uto.kumamoto.jp":true,"yamaga.kumamoto.jp":true,"yamato.kumamoto.jp":true,"yatsushiro.kumamoto.jp":true,"ayabe.kyoto.jp":true,"fukuchiyama.kyoto.jp":true,"higashiyama.kyoto.jp":true,"ide.kyoto.jp":true,"ine.kyoto.jp":true,"joyo.kyoto.jp":true,"kameoka.kyoto.jp":true,"kamo.kyoto.jp":true,"kita.kyoto.jp":true,"kizu.kyoto.jp":true,"kumiyama.kyoto.jp":true,"kyotamba.kyoto.jp":true,"kyotanabe.kyoto.jp":true,"kyotango.kyoto.jp":true,"maizuru.kyoto.jp":true,"minami.kyoto.jp":true,"minamiyamashiro.kyoto.jp":true,"miyazu.kyoto.jp":true,"muko.kyoto.jp":true,"nagaokakyo.kyoto.jp":true,"nakagyo.kyoto.jp":true,"nantan.kyoto.jp":true,"oyamazaki.kyoto.jp":true,"sakyo.kyoto.jp":true,"seika.kyoto.jp":true,"tanabe.kyoto.jp":true,"uji.kyoto.jp":true,"ujitawara.kyoto.jp":true,"wazuka.kyoto.jp":true,"yamashina.kyoto.jp":true,"yawata.kyoto.jp":true,"asahi.mie.jp":true,"inabe.mie.jp":true,"ise.mie.jp":true,"kameyama.mie.jp":true,"kawagoe.mie.jp":true,"kiho.mie.jp":true,"kisosaki.mie.jp":true,"kiwa.mie.jp":true,"komono.mie.jp":true,"kumano.mie.jp":true,"kuwana.mie.jp":true,"matsusaka.mie.jp":true,"meiwa.mie.jp":true,"mihama.mie.jp":true,"minamiise.mie.jp":true,"misugi.mie.jp":true,"miyama.mie.jp":true,"nabari.mie.jp":true,"shima.mie.jp":true,"suzuka.mie.jp":true,"tado.mie.jp":true,"taiki.mie.jp":true,"taki.mie.jp":true,"tamaki.mie.jp":true,"toba.mie.jp":true,"tsu.mie.jp":true,"udono.mie.jp":true,"ureshino.mie.jp":true,"watarai.mie.jp":true,"yokkaichi.mie.jp":true,"furukawa.miyagi.jp":true,"higashimatsushima.miyagi.jp":true,"ishinomaki.miyagi.jp":true,"iwanuma.miyagi.jp":true,"kakuda.miyagi.jp":true,"kami.miyagi.jp":true,"kawasaki.miyagi.jp":true,"kesennuma.miyagi.jp":true,"marumori.miyagi.jp":true,"matsushima.miyagi.jp":true,"minamisanriku.miyagi.jp":true,"misato.miyagi.jp":true,"murata.miyagi.jp":true,"natori.miyagi.jp":true,"ogawara.miyagi.jp":true,"ohira.miyagi.jp":true,"onagawa.miyagi.jp":true,"osaki.miyagi.jp":true,"rifu.miyagi.jp":true,"semine.miyagi.jp":true,"shibata.miyagi.jp":true,"shichikashuku.miyagi.jp":true,"shikama.miyagi.jp":true,"shiogama.miyagi.jp":true,"shiroishi.miyagi.jp":true,"tagajo.miyagi.jp":true,"taiwa.miyagi.jp":true,"tome.miyagi.jp":true,"tomiya.miyagi.jp":true,"wakuya.miyagi.jp":true,"watari.miyagi.jp":true,"yamamoto.miyagi.jp":true,"zao.miyagi.jp":true,"aya.miyazaki.jp":true,"ebino.miyazaki.jp":true,"gokase.miyazaki.jp":true,"hyuga.miyazaki.jp":true,"kadogawa.miyazaki.jp":true,"kawaminami.miyazaki.jp":true,"kijo.miyazaki.jp":true,"kitagawa.miyazaki.jp":true,"kitakata.miyazaki.jp":true,"kitaura.miyazaki.jp":true,"kobayashi.miyazaki.jp":true,"kunitomi.miyazaki.jp":true,"kushima.miyazaki.jp":true,"mimata.miyazaki.jp":true,"miyakonojo.miyazaki.jp":true,"miyazaki.miyazaki.jp":true,"morotsuka.miyazaki.jp":true,"nichinan.miyazaki.jp":true,"nishimera.miyazaki.jp":true,"nobeoka.miyazaki.jp":true,"saito.miyazaki.jp":true,"shiiba.miyazaki.jp":true,"shintomi.miyazaki.jp":true,"takaharu.miyazaki.jp":true,"takanabe.miyazaki.jp":true,"takazaki.miyazaki.jp":true,"tsuno.miyazaki.jp":true,"achi.nagano.jp":true,"agematsu.nagano.jp":true,"anan.nagano.jp":true,"aoki.nagano.jp":true,"asahi.nagano.jp":true,"azumino.nagano.jp":true,"chikuhoku.nagano.jp":true,"chikuma.nagano.jp":true,"chino.nagano.jp":true,"fujimi.nagano.jp":true,"hakuba.nagano.jp":true,"hara.nagano.jp":true,"hiraya.nagano.jp":true,"iida.nagano.jp":true,"iijima.nagano.jp":true,"iiyama.nagano.jp":true,"iizuna.nagano.jp":true,"ikeda.nagano.jp":true,"ikusaka.nagano.jp":true,"ina.nagano.jp":true,"karuizawa.nagano.jp":true,"kawakami.nagano.jp":true,"kiso.nagano.jp":true,"kisofukushima.nagano.jp":true,"kitaaiki.nagano.jp":true,"komagane.nagano.jp":true,"komoro.nagano.jp":true,"matsukawa.nagano.jp":true,"matsumoto.nagano.jp":true,"miasa.nagano.jp":true,"minamiaiki.nagano.jp":true,"minamimaki.nagano.jp":true,"minamiminowa.nagano.jp":true,"minowa.nagano.jp":true,"miyada.nagano.jp":true,"miyota.nagano.jp":true,"mochizuki.nagano.jp":true,"nagano.nagano.jp":true,"nagawa.nagano.jp":true,"nagiso.nagano.jp":true,"nakagawa.nagano.jp":true,"nakano.nagano.jp":true,"nozawaonsen.nagano.jp":true,"obuse.nagano.jp":true,"ogawa.nagano.jp":true,"okaya.nagano.jp":true,"omachi.nagano.jp":true,"omi.nagano.jp":true,"ookuwa.nagano.jp":true,"ooshika.nagano.jp":true,"otaki.nagano.jp":true,"otari.nagano.jp":true,"sakae.nagano.jp":true,"sakaki.nagano.jp":true,"saku.nagano.jp":true,"sakuho.nagano.jp":true,"shimosuwa.nagano.jp":true,"shinanomachi.nagano.jp":true,"shiojiri.nagano.jp":true,"suwa.nagano.jp":true,"suzaka.nagano.jp":true,"takagi.nagano.jp":true,"takamori.nagano.jp":true,"takayama.nagano.jp":true,"tateshina.nagano.jp":true,"tatsuno.nagano.jp":true,"togakushi.nagano.jp":true,"togura.nagano.jp":true,"tomi.nagano.jp":true,"ueda.nagano.jp":true,"wada.nagano.jp":true,"yamagata.nagano.jp":true,"yamanouchi.nagano.jp":true,"yasaka.nagano.jp":true,"yasuoka.nagano.jp":true,"chijiwa.nagasaki.jp":true,"futsu.nagasaki.jp":true,"goto.nagasaki.jp":true,"hasami.nagasaki.jp":true,"hirado.nagasaki.jp":true,"iki.nagasaki.jp":true,"isahaya.nagasaki.jp":true,"kawatana.nagasaki.jp":true,"kuchinotsu.nagasaki.jp":true,"matsuura.nagasaki.jp":true,"nagasaki.nagasaki.jp":true,"obama.nagasaki.jp":true,"omura.nagasaki.jp":true,"oseto.nagasaki.jp":true,"saikai.nagasaki.jp":true,"sasebo.nagasaki.jp":true,"seihi.nagasaki.jp":true,"shimabara.nagasaki.jp":true,"shinkamigoto.nagasaki.jp":true,"togitsu.nagasaki.jp":true,"tsushima.nagasaki.jp":true,"unzen.nagasaki.jp":true,"ando.nara.jp":true,"gose.nara.jp":true,"heguri.nara.jp":true,"higashiyoshino.nara.jp":true,"ikaruga.nara.jp":true,"ikoma.nara.jp":true,"kamikitayama.nara.jp":true,"kanmaki.nara.jp":true,"kashiba.nara.jp":true,"kashihara.nara.jp":true,"katsuragi.nara.jp":true,"kawai.nara.jp":true,"kawakami.nara.jp":true,"kawanishi.nara.jp":true,"koryo.nara.jp":true,"kurotaki.nara.jp":true,"mitsue.nara.jp":true,"miyake.nara.jp":true,"nara.nara.jp":true,"nosegawa.nara.jp":true,"oji.nara.jp":true,"ouda.nara.jp":true,"oyodo.nara.jp":true,"sakurai.nara.jp":true,"sango.nara.jp":true,"shimoichi.nara.jp":true,"shimokitayama.nara.jp":true,"shinjo.nara.jp":true,"soni.nara.jp":true,"takatori.nara.jp":true,"tawaramoto.nara.jp":true,"tenkawa.nara.jp":true,"tenri.nara.jp":true,"uda.nara.jp":true,"yamatokoriyama.nara.jp":true,"yamatotakada.nara.jp":true,"yamazoe.nara.jp":true,"yoshino.nara.jp":true,"aga.niigata.jp":true,"agano.niigata.jp":true,"gosen.niigata.jp":true,"itoigawa.niigata.jp":true,"izumozaki.niigata.jp":true,"joetsu.niigata.jp":true,"kamo.niigata.jp":true,"kariwa.niigata.jp":true,"kashiwazaki.niigata.jp":true,"minamiuonuma.niigata.jp":true,"mitsuke.niigata.jp":true,"muika.niigata.jp":true,"murakami.niigata.jp":true,"myoko.niigata.jp":true,"nagaoka.niigata.jp":true,"niigata.niigata.jp":true,"ojiya.niigata.jp":true,"omi.niigata.jp":true,"sado.niigata.jp":true,"sanjo.niigata.jp":true,"seiro.niigata.jp":true,"seirou.niigata.jp":true,"sekikawa.niigata.jp":true,"shibata.niigata.jp":true,"tagami.niigata.jp":true,"tainai.niigata.jp":true,"tochio.niigata.jp":true,"tokamachi.niigata.jp":true,"tsubame.niigata.jp":true,"tsunan.niigata.jp":true,"uonuma.niigata.jp":true,"yahiko.niigata.jp":true,"yoita.niigata.jp":true,"yuzawa.niigata.jp":true,"beppu.oita.jp":true,"bungoono.oita.jp":true,"bungotakada.oita.jp":true,"hasama.oita.jp":true,"hiji.oita.jp":true,"himeshima.oita.jp":true,"hita.oita.jp":true,"kamitsue.oita.jp":true,"kokonoe.oita.jp":true,"kuju.oita.jp":true,"kunisaki.oita.jp":true,"kusu.oita.jp":true,"oita.oita.jp":true,"saiki.oita.jp":true,"taketa.oita.jp":true,"tsukumi.oita.jp":true,"usa.oita.jp":true,"usuki.oita.jp":true,"yufu.oita.jp":true,"akaiwa.okayama.jp":true,"asakuchi.okayama.jp":true,"bizen.okayama.jp":true,"hayashima.okayama.jp":true,"ibara.okayama.jp":true,"kagamino.okayama.jp":true,"kasaoka.okayama.jp":true,"kibichuo.okayama.jp":true,"kumenan.okayama.jp":true,"kurashiki.okayama.jp":true,"maniwa.okayama.jp":true,"misaki.okayama.jp":true,"nagi.okayama.jp":true,"niimi.okayama.jp":true,"nishiawakura.okayama.jp":true,"okayama.okayama.jp":true,"satosho.okayama.jp":true,"setouchi.okayama.jp":true,"shinjo.okayama.jp":true,"shoo.okayama.jp":true,"soja.okayama.jp":true,"takahashi.okayama.jp":true,"tamano.okayama.jp":true,"tsuyama.okayama.jp":true,"wake.okayama.jp":true,"yakage.okayama.jp":true,"aguni.okinawa.jp":true,"ginowan.okinawa.jp":true,"ginoza.okinawa.jp":true,"gushikami.okinawa.jp":true,"haebaru.okinawa.jp":true,"higashi.okinawa.jp":true,"hirara.okinawa.jp":true,"iheya.okinawa.jp":true,"ishigaki.okinawa.jp":true,"ishikawa.okinawa.jp":true,"itoman.okinawa.jp":true,"izena.okinawa.jp":true,"kadena.okinawa.jp":true,"kin.okinawa.jp":true,"kitadaito.okinawa.jp":true,"kitanakagusuku.okinawa.jp":true,"kumejima.okinawa.jp":true,"kunigami.okinawa.jp":true,"minamidaito.okinawa.jp":true,"motobu.okinawa.jp":true,"nago.okinawa.jp":true,"naha.okinawa.jp":true,"nakagusuku.okinawa.jp":true,"nakijin.okinawa.jp":true,"nanjo.okinawa.jp":true,"nishihara.okinawa.jp":true,"ogimi.okinawa.jp":true,"okinawa.okinawa.jp":true,"onna.okinawa.jp":true,"shimoji.okinawa.jp":true,"taketomi.okinawa.jp":true,"tarama.okinawa.jp":true,"tokashiki.okinawa.jp":true,"tomigusuku.okinawa.jp":true,"tonaki.okinawa.jp":true,"urasoe.okinawa.jp":true,"uruma.okinawa.jp":true,"yaese.okinawa.jp":true,"yomitan.okinawa.jp":true,"yonabaru.okinawa.jp":true,"yonaguni.okinawa.jp":true,"zamami.okinawa.jp":true,"abeno.osaka.jp":true,"chihayaakasaka.osaka.jp":true,"chuo.osaka.jp":true,"daito.osaka.jp":true,"fujiidera.osaka.jp":true,"habikino.osaka.jp":true,"hannan.osaka.jp":true,"higashiosaka.osaka.jp":true,"higashisumiyoshi.osaka.jp":true,"higashiyodogawa.osaka.jp":true,"hirakata.osaka.jp":true,"ibaraki.osaka.jp":true,"ikeda.osaka.jp":true,"izumi.osaka.jp":true,"izumiotsu.osaka.jp":true,"izumisano.osaka.jp":true,"kadoma.osaka.jp":true,"kaizuka.osaka.jp":true,"kanan.osaka.jp":true,"kashiwara.osaka.jp":true,"katano.osaka.jp":true,"kawachinagano.osaka.jp":true,"kishiwada.osaka.jp":true,"kita.osaka.jp":true,"kumatori.osaka.jp":true,"matsubara.osaka.jp":true,"minato.osaka.jp":true,"minoh.osaka.jp":true,"misaki.osaka.jp":true,"moriguchi.osaka.jp":true,"neyagawa.osaka.jp":true,"nishi.osaka.jp":true,"nose.osaka.jp":true,"osakasayama.osaka.jp":true,"sakai.osaka.jp":true,"sayama.osaka.jp":true,"sennan.osaka.jp":true,"settsu.osaka.jp":true,"shijonawate.osaka.jp":true,"shimamoto.osaka.jp":true,"suita.osaka.jp":true,"tadaoka.osaka.jp":true,"taishi.osaka.jp":true,"tajiri.osaka.jp":true,"takaishi.osaka.jp":true,"takatsuki.osaka.jp":true,"tondabayashi.osaka.jp":true,"toyonaka.osaka.jp":true,"toyono.osaka.jp":true,"yao.osaka.jp":true,"ariake.saga.jp":true,"arita.saga.jp":true,"fukudomi.saga.jp":true,"genkai.saga.jp":true,"hamatama.saga.jp":true,"hizen.saga.jp":true,"imari.saga.jp":true,"kamimine.saga.jp":true,"kanzaki.saga.jp":true,"karatsu.saga.jp":true,"kashima.saga.jp":true,"kitagata.saga.jp":true,"kitahata.saga.jp":true,"kiyama.saga.jp":true,"kouhoku.saga.jp":true,"kyuragi.saga.jp":true,"nishiarita.saga.jp":true,"ogi.saga.jp":true,"omachi.saga.jp":true,"ouchi.saga.jp":true,"saga.saga.jp":true,"shiroishi.saga.jp":true,"taku.saga.jp":true,"tara.saga.jp":true,"tosu.saga.jp":true,"yoshinogari.saga.jp":true,"arakawa.saitama.jp":true,"asaka.saitama.jp":true,"chichibu.saitama.jp":true,"fujimi.saitama.jp":true,"fujimino.saitama.jp":true,"fukaya.saitama.jp":true,"hanno.saitama.jp":true,"hanyu.saitama.jp":true,"hasuda.saitama.jp":true,"hatogaya.saitama.jp":true,"hatoyama.saitama.jp":true,"hidaka.saitama.jp":true,"higashichichibu.saitama.jp":true,"higashimatsuyama.saitama.jp":true,"honjo.saitama.jp":true,"ina.saitama.jp":true,"iruma.saitama.jp":true,"iwatsuki.saitama.jp":true,"kamiizumi.saitama.jp":true,"kamikawa.saitama.jp":true,"kamisato.saitama.jp":true,"kasukabe.saitama.jp":true,"kawagoe.saitama.jp":true,"kawaguchi.saitama.jp":true,"kawajima.saitama.jp":true,"kazo.saitama.jp":true,"kitamoto.saitama.jp":true,"koshigaya.saitama.jp":true,"kounosu.saitama.jp":true,"kuki.saitama.jp":true,"kumagaya.saitama.jp":true,"matsubushi.saitama.jp":true,"minano.saitama.jp":true,"misato.saitama.jp":true,"miyashiro.saitama.jp":true,"miyoshi.saitama.jp":true,"moroyama.saitama.jp":true,"nagatoro.saitama.jp":true,"namegawa.saitama.jp":true,"niiza.saitama.jp":true,"ogano.saitama.jp":true,"ogawa.saitama.jp":true,"ogose.saitama.jp":true,"okegawa.saitama.jp":true,"omiya.saitama.jp":true,"otaki.saitama.jp":true,"ranzan.saitama.jp":true,"ryokami.saitama.jp":true,"saitama.saitama.jp":true,"sakado.saitama.jp":true,"satte.saitama.jp":true,"sayama.saitama.jp":true,"shiki.saitama.jp":true,"shiraoka.saitama.jp":true,"soka.saitama.jp":true,"sugito.saitama.jp":true,"toda.saitama.jp":true,"tokigawa.saitama.jp":true,"tokorozawa.saitama.jp":true,"tsurugashima.saitama.jp":true,"urawa.saitama.jp":true,"warabi.saitama.jp":true,"yashio.saitama.jp":true,"yokoze.saitama.jp":true,"yono.saitama.jp":true,"yorii.saitama.jp":true,"yoshida.saitama.jp":true,"yoshikawa.saitama.jp":true,"yoshimi.saitama.jp":true,"aisho.shiga.jp":true,"gamo.shiga.jp":true,"higashiomi.shiga.jp":true,"hikone.shiga.jp":true,"koka.shiga.jp":true,"konan.shiga.jp":true,"kosei.shiga.jp":true,"koto.shiga.jp":true,"kusatsu.shiga.jp":true,"maibara.shiga.jp":true,"moriyama.shiga.jp":true,"nagahama.shiga.jp":true,"nishiazai.shiga.jp":true,"notogawa.shiga.jp":true,"omihachiman.shiga.jp":true,"otsu.shiga.jp":true,"ritto.shiga.jp":true,"ryuoh.shiga.jp":true,"takashima.shiga.jp":true,"takatsuki.shiga.jp":true,"torahime.shiga.jp":true,"toyosato.shiga.jp":true,"yasu.shiga.jp":true,"akagi.shimane.jp":true,"ama.shimane.jp":true,"gotsu.shimane.jp":true,"hamada.shimane.jp":true,"higashiizumo.shimane.jp":true,"hikawa.shimane.jp":true,"hikimi.shimane.jp":true,"izumo.shimane.jp":true,"kakinoki.shimane.jp":true,"masuda.shimane.jp":true,"matsue.shimane.jp":true,"misato.shimane.jp":true,"nishinoshima.shimane.jp":true,"ohda.shimane.jp":true,"okinoshima.shimane.jp":true,"okuizumo.shimane.jp":true,"shimane.shimane.jp":true,"tamayu.shimane.jp":true,"tsuwano.shimane.jp":true,"unnan.shimane.jp":true,"yakumo.shimane.jp":true,"yasugi.shimane.jp":true,"yatsuka.shimane.jp":true,"arai.shizuoka.jp":true,"atami.shizuoka.jp":true,"fuji.shizuoka.jp":true,"fujieda.shizuoka.jp":true,"fujikawa.shizuoka.jp":true,"fujinomiya.shizuoka.jp":true,"fukuroi.shizuoka.jp":true,"gotemba.shizuoka.jp":true,"haibara.shizuoka.jp":true,"hamamatsu.shizuoka.jp":true,"higashiizu.shizuoka.jp":true,"ito.shizuoka.jp":true,"iwata.shizuoka.jp":true,"izu.shizuoka.jp":true,"izunokuni.shizuoka.jp":true,"kakegawa.shizuoka.jp":true,"kannami.shizuoka.jp":true,"kawanehon.shizuoka.jp":true,"kawazu.shizuoka.jp":true,"kikugawa.shizuoka.jp":true,"kosai.shizuoka.jp":true,"makinohara.shizuoka.jp":true,"matsuzaki.shizuoka.jp":true,"minamiizu.shizuoka.jp":true,"mishima.shizuoka.jp":true,"morimachi.shizuoka.jp":true,"nishiizu.shizuoka.jp":true,"numazu.shizuoka.jp":true,"omaezaki.shizuoka.jp":true,"shimada.shizuoka.jp":true,"shimizu.shizuoka.jp":true,"shimoda.shizuoka.jp":true,"shizuoka.shizuoka.jp":true,"susono.shizuoka.jp":true,"yaizu.shizuoka.jp":true,"yoshida.shizuoka.jp":true,"ashikaga.tochigi.jp":true,"bato.tochigi.jp":true,"haga.tochigi.jp":true,"ichikai.tochigi.jp":true,"iwafune.tochigi.jp":true,"kaminokawa.tochigi.jp":true,"kanuma.tochigi.jp":true,"karasuyama.tochigi.jp":true,"kuroiso.tochigi.jp":true,"mashiko.tochigi.jp":true,"mibu.tochigi.jp":true,"moka.tochigi.jp":true,"motegi.tochigi.jp":true,"nasu.tochigi.jp":true,"nasushiobara.tochigi.jp":true,"nikko.tochigi.jp":true,"nishikata.tochigi.jp":true,"nogi.tochigi.jp":true,"ohira.tochigi.jp":true,"ohtawara.tochigi.jp":true,"oyama.tochigi.jp":true,"sakura.tochigi.jp":true,"sano.tochigi.jp":true,"shimotsuke.tochigi.jp":true,"shioya.tochigi.jp":true,"takanezawa.tochigi.jp":true,"tochigi.tochigi.jp":true,"tsuga.tochigi.jp":true,"ujiie.tochigi.jp":true,"utsunomiya.tochigi.jp":true,"yaita.tochigi.jp":true,"aizumi.tokushima.jp":true,"anan.tokushima.jp":true,"ichiba.tokushima.jp":true,"itano.tokushima.jp":true,"kainan.tokushima.jp":true,"komatsushima.tokushima.jp":true,"matsushige.tokushima.jp":true,"mima.tokushima.jp":true,"minami.tokushima.jp":true,"miyoshi.tokushima.jp":true,"mugi.tokushima.jp":true,"nakagawa.tokushima.jp":true,"naruto.tokushima.jp":true,"sanagochi.tokushima.jp":true,"shishikui.tokushima.jp":true,"tokushima.tokushima.jp":true,"wajiki.tokushima.jp":true,"adachi.tokyo.jp":true,"akiruno.tokyo.jp":true,"akishima.tokyo.jp":true,"aogashima.tokyo.jp":true,"arakawa.tokyo.jp":true,"bunkyo.tokyo.jp":true,"chiyoda.tokyo.jp":true,"chofu.tokyo.jp":true,"chuo.tokyo.jp":true,"edogawa.tokyo.jp":true,"fuchu.tokyo.jp":true,"fussa.tokyo.jp":true,"hachijo.tokyo.jp":true,"hachioji.tokyo.jp":true,"hamura.tokyo.jp":true,"higashikurume.tokyo.jp":true,"higashimurayama.tokyo.jp":true,"higashiyamato.tokyo.jp":true,"hino.tokyo.jp":true,"hinode.tokyo.jp":true,"hinohara.tokyo.jp":true,"inagi.tokyo.jp":true,"itabashi.tokyo.jp":true,"katsushika.tokyo.jp":true,"kita.tokyo.jp":true,"kiyose.tokyo.jp":true,"kodaira.tokyo.jp":true,"koganei.tokyo.jp":true,"kokubunji.tokyo.jp":true,"komae.tokyo.jp":true,"koto.tokyo.jp":true,"kouzushima.tokyo.jp":true,"kunitachi.tokyo.jp":true,"machida.tokyo.jp":true,"meguro.tokyo.jp":true,"minato.tokyo.jp":true,"mitaka.tokyo.jp":true,"mizuho.tokyo.jp":true,"musashimurayama.tokyo.jp":true,"musashino.tokyo.jp":true,"nakano.tokyo.jp":true,"nerima.tokyo.jp":true,"ogasawara.tokyo.jp":true,"okutama.tokyo.jp":true,"ome.tokyo.jp":true,"oshima.tokyo.jp":true,"ota.tokyo.jp":true,"setagaya.tokyo.jp":true,"shibuya.tokyo.jp":true,"shinagawa.tokyo.jp":true,"shinjuku.tokyo.jp":true,"suginami.tokyo.jp":true,"sumida.tokyo.jp":true,"tachikawa.tokyo.jp":true,"taito.tokyo.jp":true,"tama.tokyo.jp":true,"toshima.tokyo.jp":true,"chizu.tottori.jp":true,"hino.tottori.jp":true,"kawahara.tottori.jp":true,"koge.tottori.jp":true,"kotoura.tottori.jp":true,"misasa.tottori.jp":true,"nanbu.tottori.jp":true,"nichinan.tottori.jp":true,"sakaiminato.tottori.jp":true,"tottori.tottori.jp":true,"wakasa.tottori.jp":true,"yazu.tottori.jp":true,"yonago.tottori.jp":true,"asahi.toyama.jp":true,"fuchu.toyama.jp":true,"fukumitsu.toyama.jp":true,"funahashi.toyama.jp":true,"himi.toyama.jp":true,"imizu.toyama.jp":true,"inami.toyama.jp":true,"johana.toyama.jp":true,"kamiichi.toyama.jp":true,"kurobe.toyama.jp":true,"nakaniikawa.toyama.jp":true,"namerikawa.toyama.jp":true,"nanto.toyama.jp":true,"nyuzen.toyama.jp":true,"oyabe.toyama.jp":true,"taira.toyama.jp":true,"takaoka.toyama.jp":true,"tateyama.toyama.jp":true,"toga.toyama.jp":true,"tonami.toyama.jp":true,"toyama.toyama.jp":true,"unazuki.toyama.jp":true,"uozu.toyama.jp":true,"yamada.toyama.jp":true,"arida.wakayama.jp":true,"aridagawa.wakayama.jp":true,"gobo.wakayama.jp":true,"hashimoto.wakayama.jp":true,"hidaka.wakayama.jp":true,"hirogawa.wakayama.jp":true,"inami.wakayama.jp":true,"iwade.wakayama.jp":true,"kainan.wakayama.jp":true,"kamitonda.wakayama.jp":true,"katsuragi.wakayama.jp":true,"kimino.wakayama.jp":true,"kinokawa.wakayama.jp":true,"kitayama.wakayama.jp":true,"koya.wakayama.jp":true,"koza.wakayama.jp":true,"kozagawa.wakayama.jp":true,"kudoyama.wakayama.jp":true,"kushimoto.wakayama.jp":true,"mihama.wakayama.jp":true,"misato.wakayama.jp":true,"nachikatsuura.wakayama.jp":true,"shingu.wakayama.jp":true,"shirahama.wakayama.jp":true,"taiji.wakayama.jp":true,"tanabe.wakayama.jp":true,"wakayama.wakayama.jp":true,"yuasa.wakayama.jp":true,"yura.wakayama.jp":true,"asahi.yamagata.jp":true,"funagata.yamagata.jp":true,"higashine.yamagata.jp":true,"iide.yamagata.jp":true,"kahoku.yamagata.jp":true,"kaminoyama.yamagata.jp":true,"kaneyama.yamagata.jp":true,"kawanishi.yamagata.jp":true,"mamurogawa.yamagata.jp":true,"mikawa.yamagata.jp":true,"murayama.yamagata.jp":true,"nagai.yamagata.jp":true,"nakayama.yamagata.jp":true,"nanyo.yamagata.jp":true,"nishikawa.yamagata.jp":true,"obanazawa.yamagata.jp":true,"oe.yamagata.jp":true,"oguni.yamagata.jp":true,"ohkura.yamagata.jp":true,"oishida.yamagata.jp":true,"sagae.yamagata.jp":true,"sakata.yamagata.jp":true,"sakegawa.yamagata.jp":true,"shinjo.yamagata.jp":true,"shirataka.yamagata.jp":true,"shonai.yamagata.jp":true,"takahata.yamagata.jp":true,"tendo.yamagata.jp":true,"tozawa.yamagata.jp":true,"tsuruoka.yamagata.jp":true,"yamagata.yamagata.jp":true,"yamanobe.yamagata.jp":true,"yonezawa.yamagata.jp":true,"yuza.yamagata.jp":true,"abu.yamaguchi.jp":true,"hagi.yamaguchi.jp":true,"hikari.yamaguchi.jp":true,"hofu.yamaguchi.jp":true,"iwakuni.yamaguchi.jp":true,"kudamatsu.yamaguchi.jp":true,"mitou.yamaguchi.jp":true,"nagato.yamaguchi.jp":true,"oshima.yamaguchi.jp":true,"shimonoseki.yamaguchi.jp":true,"shunan.yamaguchi.jp":true,"tabuse.yamaguchi.jp":true,"tokuyama.yamaguchi.jp":true,"toyota.yamaguchi.jp":true,"ube.yamaguchi.jp":true,"yuu.yamaguchi.jp":true,"chuo.yamanashi.jp":true,"doshi.yamanashi.jp":true,"fuefuki.yamanashi.jp":true,"fujikawa.yamanashi.jp":true,"fujikawaguchiko.yamanashi.jp":true,"fujiyoshida.yamanashi.jp":true,"hayakawa.yamanashi.jp":true,"hokuto.yamanashi.jp":true,"ichikawamisato.yamanashi.jp":true,"kai.yamanashi.jp":true,"kofu.yamanashi.jp":true,"koshu.yamanashi.jp":true,"kosuge.yamanashi.jp":true,"minami-alps.yamanashi.jp":true,"minobu.yamanashi.jp":true,"nakamichi.yamanashi.jp":true,"nanbu.yamanashi.jp":true,"narusawa.yamanashi.jp":true,"nirasaki.yamanashi.jp":true,"nishikatsura.yamanashi.jp":true,"oshino.yamanashi.jp":true,"otsuki.yamanashi.jp":true,"showa.yamanashi.jp":true,"tabayama.yamanashi.jp":true,"tsuru.yamanashi.jp":true,"uenohara.yamanashi.jp":true,"yamanakako.yamanashi.jp":true,"yamanashi.yamanashi.jp":true,"*.ke":true,"kg":true,"org.kg":true,"net.kg":true,"com.kg":true,"edu.kg":true,"gov.kg":true,"mil.kg":true,"*.kh":true,"ki":true,"edu.ki":true,"biz.ki":true,"net.ki":true,"org.ki":true,"gov.ki":true,"info.ki":true,"com.ki":true,"km":true,"org.km":true,"nom.km":true,"gov.km":true,"prd.km":true,"tm.km":true,"edu.km":true,"mil.km":true,"ass.km":true,"com.km":true,"coop.km":true,"asso.km":true,"presse.km":true,"medecin.km":true,"notaires.km":true,"pharmaciens.km":true,"veterinaire.km":true,"gouv.km":true,"kn":true,"net.kn":true,"org.kn":true,"edu.kn":true,"gov.kn":true,"kp":true,"com.kp":true,"edu.kp":true,"gov.kp":true,"org.kp":true,"rep.kp":true,"tra.kp":true,"kr":true,"ac.kr":true,"co.kr":true,"es.kr":true,"go.kr":true,"hs.kr":true,"kg.kr":true,"mil.kr":true,"ms.kr":true,"ne.kr":true,"or.kr":true,"pe.kr":true,"re.kr":true,"sc.kr":true,"busan.kr":true,"chungbuk.kr":true,"chungnam.kr":true,"daegu.kr":true,"daejeon.kr":true,"gangwon.kr":true,"gwangju.kr":true,"gyeongbuk.kr":true,"gyeonggi.kr":true,"gyeongnam.kr":true,"incheon.kr":true,"jeju.kr":true,"jeonbuk.kr":true,"jeonnam.kr":true,"seoul.kr":true,"ulsan.kr":true,"*.kw":true,"ky":true,"edu.ky":true,"gov.ky":true,"com.ky":true,"org.ky":true,"net.ky":true,"kz":true,"org.kz":true,"edu.kz":true,"net.kz":true,"gov.kz":true,"mil.kz":true,"com.kz":true,"la":true,"int.la":true,"net.la":true,"info.la":true,"edu.la":true,"gov.la":true,"per.la":true,"com.la":true,"org.la":true,"lb":true,"com.lb":true,"edu.lb":true,"gov.lb":true,"net.lb":true,"org.lb":true,"lc":true,"com.lc":true,"net.lc":true,"co.lc":true,"org.lc":true,"edu.lc":true,"gov.lc":true,"li":true,"lk":true,"gov.lk":true,"sch.lk":true,"net.lk":true,"int.lk":true,"com.lk":true,"org.lk":true,"edu.lk":true,"ngo.lk":true,"soc.lk":true,"web.lk":true,"ltd.lk":true,"assn.lk":true,"grp.lk":true,"hotel.lk":true,"ac.lk":true,"lr":true,"com.lr":true,"edu.lr":true,"gov.lr":true,"org.lr":true,"net.lr":true,"ls":true,"co.ls":true,"org.ls":true,"lt":true,"gov.lt":true,"lu":true,"lv":true,"com.lv":true,"edu.lv":true,"gov.lv":true,"org.lv":true,"mil.lv":true,"id.lv":true,"net.lv":true,"asn.lv":true,"conf.lv":true,"ly":true,"com.ly":true,"net.ly":true,"gov.ly":true,"plc.ly":true,"edu.ly":true,"sch.ly":true,"med.ly":true,"org.ly":true,"id.ly":true,"ma":true,"co.ma":true,"net.ma":true,"gov.ma":true,"org.ma":true,"ac.ma":true,"press.ma":true,"mc":true,"tm.mc":true,"asso.mc":true,"md":true,"me":true,"co.me":true,"net.me":true,"org.me":true,"edu.me":true,"ac.me":true,"gov.me":true,"its.me":true,"priv.me":true,"mg":true,"org.mg":true,"nom.mg":true,"gov.mg":true,"prd.mg":true,"tm.mg":true,"edu.mg":true,"mil.mg":true,"com.mg":true,"co.mg":true,"mh":true,"mil":true,"mk":true,"com.mk":true,"org.mk":true,"net.mk":true,"edu.mk":true,"gov.mk":true,"inf.mk":true,"name.mk":true,"ml":true,"com.ml":true,"edu.ml":true,"gouv.ml":true,"gov.ml":true,"net.ml":true,"org.ml":true,"presse.ml":true,"*.mm":true,"mn":true,"gov.mn":true,"edu.mn":true,"org.mn":true,"mo":true,"com.mo":true,"net.mo":true,"org.mo":true,"edu.mo":true,"gov.mo":true,"mobi":true,"mp":true,"mq":true,"mr":true,"gov.mr":true,"ms":true,"com.ms":true,"edu.ms":true,"gov.ms":true,"net.ms":true,"org.ms":true,"mt":true,"com.mt":true,"edu.mt":true,"net.mt":true,"org.mt":true,"mu":true,"com.mu":true,"net.mu":true,"org.mu":true,"gov.mu":true,"ac.mu":true,"co.mu":true,"or.mu":true,"museum":true,"academy.museum":true,"agriculture.museum":true,"air.museum":true,"airguard.museum":true,"alabama.museum":true,"alaska.museum":true,"amber.museum":true,"ambulance.museum":true,"american.museum":true,"americana.museum":true,"americanantiques.museum":true,"americanart.museum":true,"amsterdam.museum":true,"and.museum":true,"annefrank.museum":true,"anthro.museum":true,"anthropology.museum":true,"antiques.museum":true,"aquarium.museum":true,"arboretum.museum":true,"archaeological.museum":true,"archaeology.museum":true,"architecture.museum":true,"art.museum":true,"artanddesign.museum":true,"artcenter.museum":true,"artdeco.museum":true,"arteducation.museum":true,"artgallery.museum":true,"arts.museum":true,"artsandcrafts.museum":true,"asmatart.museum":true,"assassination.museum":true,"assisi.museum":true,"association.museum":true,"astronomy.museum":true,"atlanta.museum":true,"austin.museum":true,"australia.museum":true,"automotive.museum":true,"aviation.museum":true,"axis.museum":true,"badajoz.museum":true,"baghdad.museum":true,"bahn.museum":true,"bale.museum":true,"baltimore.museum":true,"barcelona.museum":true,"baseball.museum":true,"basel.museum":true,"baths.museum":true,"bauern.museum":true,"beauxarts.museum":true,"beeldengeluid.museum":true,"bellevue.museum":true,"bergbau.museum":true,"berkeley.museum":true,"berlin.museum":true,"bern.museum":true,"bible.museum":true,"bilbao.museum":true,"bill.museum":true,"birdart.museum":true,"birthplace.museum":true,"bonn.museum":true,"boston.museum":true,"botanical.museum":true,"botanicalgarden.museum":true,"botanicgarden.museum":true,"botany.museum":true,"brandywinevalley.museum":true,"brasil.museum":true,"bristol.museum":true,"british.museum":true,"britishcolumbia.museum":true,"broadcast.museum":true,"brunel.museum":true,"brussel.museum":true,"brussels.museum":true,"bruxelles.museum":true,"building.museum":true,"burghof.museum":true,"bus.museum":true,"bushey.museum":true,"cadaques.museum":true,"california.museum":true,"cambridge.museum":true,"can.museum":true,"canada.museum":true,"capebreton.museum":true,"carrier.museum":true,"cartoonart.museum":true,"casadelamoneda.museum":true,"castle.museum":true,"castres.museum":true,"celtic.museum":true,"center.museum":true,"chattanooga.museum":true,"cheltenham.museum":true,"chesapeakebay.museum":true,"chicago.museum":true,"children.museum":true,"childrens.museum":true,"childrensgarden.museum":true,"chiropractic.museum":true,"chocolate.museum":true,"christiansburg.museum":true,"cincinnati.museum":true,"cinema.museum":true,"circus.museum":true,"civilisation.museum":true,"civilization.museum":true,"civilwar.museum":true,"clinton.museum":true,"clock.museum":true,"coal.museum":true,"coastaldefence.museum":true,"cody.museum":true,"coldwar.museum":true,"collection.museum":true,"colonialwilliamsburg.museum":true,"coloradoplateau.museum":true,"columbia.museum":true,"columbus.museum":true,"communication.museum":true,"communications.museum":true,"community.museum":true,"computer.museum":true,"computerhistory.museum":true,"xn--comunicaes-v6a2o.museum":true,"contemporary.museum":true,"contemporaryart.museum":true,"convent.museum":true,"copenhagen.museum":true,"corporation.museum":true,"xn--correios-e-telecomunicaes-ghc29a.museum":true,"corvette.museum":true,"costume.museum":true,"countryestate.museum":true,"county.museum":true,"crafts.museum":true,"cranbrook.museum":true,"creation.museum":true,"cultural.museum":true,"culturalcenter.museum":true,"culture.museum":true,"cyber.museum":true,"cymru.museum":true,"dali.museum":true,"dallas.museum":true,"database.museum":true,"ddr.museum":true,"decorativearts.museum":true,"delaware.museum":true,"delmenhorst.museum":true,"denmark.museum":true,"depot.museum":true,"design.museum":true,"detroit.museum":true,"dinosaur.museum":true,"discovery.museum":true,"dolls.museum":true,"donostia.museum":true,"durham.museum":true,"eastafrica.museum":true,"eastcoast.museum":true,"education.museum":true,"educational.museum":true,"egyptian.museum":true,"eisenbahn.museum":true,"elburg.museum":true,"elvendrell.museum":true,"embroidery.museum":true,"encyclopedic.museum":true,"england.museum":true,"entomology.museum":true,"environment.museum":true,"environmentalconservation.museum":true,"epilepsy.museum":true,"essex.museum":true,"estate.museum":true,"ethnology.museum":true,"exeter.museum":true,"exhibition.museum":true,"family.museum":true,"farm.museum":true,"farmequipment.museum":true,"farmers.museum":true,"farmstead.museum":true,"field.museum":true,"figueres.museum":true,"filatelia.museum":true,"film.museum":true,"fineart.museum":true,"finearts.museum":true,"finland.museum":true,"flanders.museum":true,"florida.museum":true,"force.museum":true,"fortmissoula.museum":true,"fortworth.museum":true,"foundation.museum":true,"francaise.museum":true,"frankfurt.museum":true,"franziskaner.museum":true,"freemasonry.museum":true,"freiburg.museum":true,"fribourg.museum":true,"frog.museum":true,"fundacio.museum":true,"furniture.museum":true,"gallery.museum":true,"garden.museum":true,"gateway.museum":true,"geelvinck.museum":true,"gemological.museum":true,"geology.museum":true,"georgia.museum":true,"giessen.museum":true,"glas.museum":true,"glass.museum":true,"gorge.museum":true,"grandrapids.museum":true,"graz.museum":true,"guernsey.museum":true,"halloffame.museum":true,"hamburg.museum":true,"handson.museum":true,"harvestcelebration.museum":true,"hawaii.museum":true,"health.museum":true,"heimatunduhren.museum":true,"hellas.museum":true,"helsinki.museum":true,"hembygdsforbund.museum":true,"heritage.museum":true,"histoire.museum":true,"historical.museum":true,"historicalsociety.museum":true,"historichouses.museum":true,"historisch.museum":true,"historisches.museum":true,"history.museum":true,"historyofscience.museum":true,"horology.museum":true,"house.museum":true,"humanities.museum":true,"illustration.museum":true,"imageandsound.museum":true,"indian.museum":true,"indiana.museum":true,"indianapolis.museum":true,"indianmarket.museum":true,"intelligence.museum":true,"interactive.museum":true,"iraq.museum":true,"iron.museum":true,"isleofman.museum":true,"jamison.museum":true,"jefferson.museum":true,"jerusalem.museum":true,"jewelry.museum":true,"jewish.museum":true,"jewishart.museum":true,"jfk.museum":true,"journalism.museum":true,"judaica.museum":true,"judygarland.museum":true,"juedisches.museum":true,"juif.museum":true,"karate.museum":true,"karikatur.museum":true,"kids.museum":true,"koebenhavn.museum":true,"koeln.museum":true,"kunst.museum":true,"kunstsammlung.museum":true,"kunstunddesign.museum":true,"labor.museum":true,"labour.museum":true,"lajolla.museum":true,"lancashire.museum":true,"landes.museum":true,"lans.museum":true,"xn--lns-qla.museum":true,"larsson.museum":true,"lewismiller.museum":true,"lincoln.museum":true,"linz.museum":true,"living.museum":true,"livinghistory.museum":true,"localhistory.museum":true,"london.museum":true,"losangeles.museum":true,"louvre.museum":true,"loyalist.museum":true,"lucerne.museum":true,"luxembourg.museum":true,"luzern.museum":true,"mad.museum":true,"madrid.museum":true,"mallorca.museum":true,"manchester.museum":true,"mansion.museum":true,"mansions.museum":true,"manx.museum":true,"marburg.museum":true,"maritime.museum":true,"maritimo.museum":true,"maryland.museum":true,"marylhurst.museum":true,"media.museum":true,"medical.museum":true,"medizinhistorisches.museum":true,"meeres.museum":true,"memorial.museum":true,"mesaverde.museum":true,"michigan.museum":true,"midatlantic.museum":true,"military.museum":true,"mill.museum":true,"miners.museum":true,"mining.museum":true,"minnesota.museum":true,"missile.museum":true,"missoula.museum":true,"modern.museum":true,"moma.museum":true,"money.museum":true,"monmouth.museum":true,"monticello.museum":true,"montreal.museum":true,"moscow.museum":true,"motorcycle.museum":true,"muenchen.museum":true,"muenster.museum":true,"mulhouse.museum":true,"muncie.museum":true,"museet.museum":true,"museumcenter.museum":true,"museumvereniging.museum":true,"music.museum":true,"national.museum":true,"nationalfirearms.museum":true,"nationalheritage.museum":true,"nativeamerican.museum":true,"naturalhistory.museum":true,"naturalhistorymuseum.museum":true,"naturalsciences.museum":true,"nature.museum":true,"naturhistorisches.museum":true,"natuurwetenschappen.museum":true,"naumburg.museum":true,"naval.museum":true,"nebraska.museum":true,"neues.museum":true,"newhampshire.museum":true,"newjersey.museum":true,"newmexico.museum":true,"newport.museum":true,"newspaper.museum":true,"newyork.museum":true,"niepce.museum":true,"norfolk.museum":true,"north.museum":true,"nrw.museum":true,"nuernberg.museum":true,"nuremberg.museum":true,"nyc.museum":true,"nyny.museum":true,"oceanographic.museum":true,"oceanographique.museum":true,"omaha.museum":true,"online.museum":true,"ontario.museum":true,"openair.museum":true,"oregon.museum":true,"oregontrail.museum":true,"otago.museum":true,"oxford.museum":true,"pacific.museum":true,"paderborn.museum":true,"palace.museum":true,"paleo.museum":true,"palmsprings.museum":true,"panama.museum":true,"paris.museum":true,"pasadena.museum":true,"pharmacy.museum":true,"philadelphia.museum":true,"philadelphiaarea.museum":true,"philately.museum":true,"phoenix.museum":true,"photography.museum":true,"pilots.museum":true,"pittsburgh.museum":true,"planetarium.museum":true,"plantation.museum":true,"plants.museum":true,"plaza.museum":true,"portal.museum":true,"portland.museum":true,"portlligat.museum":true,"posts-and-telecommunications.museum":true,"preservation.museum":true,"presidio.museum":true,"press.museum":true,"project.museum":true,"public.museum":true,"pubol.museum":true,"quebec.museum":true,"railroad.museum":true,"railway.museum":true,"research.museum":true,"resistance.museum":true,"riodejaneiro.museum":true,"rochester.museum":true,"rockart.museum":true,"roma.museum":true,"russia.museum":true,"saintlouis.museum":true,"salem.museum":true,"salvadordali.museum":true,"salzburg.museum":true,"sandiego.museum":true,"sanfrancisco.museum":true,"santabarbara.museum":true,"santacruz.museum":true,"santafe.museum":true,"saskatchewan.museum":true,"satx.museum":true,"savannahga.museum":true,"schlesisches.museum":true,"schoenbrunn.museum":true,"schokoladen.museum":true,"school.museum":true,"schweiz.museum":true,"science.museum":true,"scienceandhistory.museum":true,"scienceandindustry.museum":true,"sciencecenter.museum":true,"sciencecenters.museum":true,"science-fiction.museum":true,"sciencehistory.museum":true,"sciences.museum":true,"sciencesnaturelles.museum":true,"scotland.museum":true,"seaport.museum":true,"settlement.museum":true,"settlers.museum":true,"shell.museum":true,"sherbrooke.museum":true,"sibenik.museum":true,"silk.museum":true,"ski.museum":true,"skole.museum":true,"society.museum":true,"sologne.museum":true,"soundandvision.museum":true,"southcarolina.museum":true,"southwest.museum":true,"space.museum":true,"spy.museum":true,"square.museum":true,"stadt.museum":true,"stalbans.museum":true,"starnberg.museum":true,"state.museum":true,"stateofdelaware.museum":true,"station.museum":true,"steam.museum":true,"steiermark.museum":true,"stjohn.museum":true,"stockholm.museum":true,"stpetersburg.museum":true,"stuttgart.museum":true,"suisse.museum":true,"surgeonshall.museum":true,"surrey.museum":true,"svizzera.museum":true,"sweden.museum":true,"sydney.museum":true,"tank.museum":true,"tcm.museum":true,"technology.museum":true,"telekommunikation.museum":true,"television.museum":true,"texas.museum":true,"textile.museum":true,"theater.museum":true,"time.museum":true,"timekeeping.museum":true,"topology.museum":true,"torino.museum":true,"touch.museum":true,"town.museum":true,"transport.museum":true,"tree.museum":true,"trolley.museum":true,"trust.museum":true,"trustee.museum":true,"uhren.museum":true,"ulm.museum":true,"undersea.museum":true,"university.museum":true,"usa.museum":true,"usantiques.museum":true,"usarts.museum":true,"uscountryestate.museum":true,"usculture.museum":true,"usdecorativearts.museum":true,"usgarden.museum":true,"ushistory.museum":true,"ushuaia.museum":true,"uslivinghistory.museum":true,"utah.museum":true,"uvic.museum":true,"valley.museum":true,"vantaa.museum":true,"versailles.museum":true,"viking.museum":true,"village.museum":true,"virginia.museum":true,"virtual.museum":true,"virtuel.museum":true,"vlaanderen.museum":true,"volkenkunde.museum":true,"wales.museum":true,"wallonie.museum":true,"war.museum":true,"washingtondc.museum":true,"watchandclock.museum":true,"watch-and-clock.museum":true,"western.museum":true,"westfalen.museum":true,"whaling.museum":true,"wildlife.museum":true,"williamsburg.museum":true,"windmill.museum":true,"workshop.museum":true,"york.museum":true,"yorkshire.museum":true,"yosemite.museum":true,"youth.museum":true,"zoological.museum":true,"zoology.museum":true,"xn--9dbhblg6di.museum":true,"xn--h1aegh.museum":true,"mv":true,"aero.mv":true,"biz.mv":true,"com.mv":true,"coop.mv":true,"edu.mv":true,"gov.mv":true,"info.mv":true,"int.mv":true,"mil.mv":true,"museum.mv":true,"name.mv":true,"net.mv":true,"org.mv":true,"pro.mv":true,"mw":true,"ac.mw":true,"biz.mw":true,"co.mw":true,"com.mw":true,"coop.mw":true,"edu.mw":true,"gov.mw":true,"int.mw":true,"museum.mw":true,"net.mw":true,"org.mw":true,"mx":true,"com.mx":true,"org.mx":true,"gob.mx":true,"edu.mx":true,"net.mx":true,"my":true,"com.my":true,"net.my":true,"org.my":true,"gov.my":true,"edu.my":true,"mil.my":true,"name.my":true,"*.mz":true,"teledata.mz":false,"na":true,"info.na":true,"pro.na":true,"name.na":true,"school.na":true,"or.na":true,"dr.na":true,"us.na":true,"mx.na":true,"ca.na":true,"in.na":true,"cc.na":true,"tv.na":true,"ws.na":true,"mobi.na":true,"co.na":true,"com.na":true,"org.na":true,"name":true,"nc":true,"asso.nc":true,"ne":true,"net":true,"nf":true,"com.nf":true,"net.nf":true,"per.nf":true,"rec.nf":true,"web.nf":true,"arts.nf":true,"firm.nf":true,"info.nf":true,"other.nf":true,"store.nf":true,"ng":true,"com.ng":true,"edu.ng":true,"name.ng":true,"net.ng":true,"org.ng":true,"sch.ng":true,"gov.ng":true,"mil.ng":true,"mobi.ng":true,"*.ni":true,"nl":true,"bv.nl":true,"no":true,"fhs.no":true,"vgs.no":true,"fylkesbibl.no":true,"folkebibl.no":true,"museum.no":true,"idrett.no":true,"priv.no":true,"mil.no":true,"stat.no":true,"dep.no":true,"kommune.no":true,"herad.no":true,"aa.no":true,"ah.no":true,"bu.no":true,"fm.no":true,"hl.no":true,"hm.no":true,"jan-mayen.no":true,"mr.no":true,"nl.no":true,"nt.no":true,"of.no":true,"ol.no":true,"oslo.no":true,"rl.no":true,"sf.no":true,"st.no":true,"svalbard.no":true,"tm.no":true,"tr.no":true,"va.no":true,"vf.no":true,"gs.aa.no":true,"gs.ah.no":true,"gs.bu.no":true,"gs.fm.no":true,"gs.hl.no":true,"gs.hm.no":true,"gs.jan-mayen.no":true,"gs.mr.no":true,"gs.nl.no":true,"gs.nt.no":true,"gs.of.no":true,"gs.ol.no":true,"gs.oslo.no":true,"gs.rl.no":true,"gs.sf.no":true,"gs.st.no":true,"gs.svalbard.no":true,"gs.tm.no":true,"gs.tr.no":true,"gs.va.no":true,"gs.vf.no":true,"akrehamn.no":true,"xn--krehamn-dxa.no":true,"algard.no":true,"xn--lgrd-poac.no":true,"arna.no":true,"brumunddal.no":true,"bryne.no":true,"bronnoysund.no":true,"xn--brnnysund-m8ac.no":true,"drobak.no":true,"xn--drbak-wua.no":true,"egersund.no":true,"fetsund.no":true,"floro.no":true,"xn--flor-jra.no":true,"fredrikstad.no":true,"hokksund.no":true,"honefoss.no":true,"xn--hnefoss-q1a.no":true,"jessheim.no":true,"jorpeland.no":true,"xn--jrpeland-54a.no":true,"kirkenes.no":true,"kopervik.no":true,"krokstadelva.no":true,"langevag.no":true,"xn--langevg-jxa.no":true,"leirvik.no":true,"mjondalen.no":true,"xn--mjndalen-64a.no":true,"mo-i-rana.no":true,"mosjoen.no":true,"xn--mosjen-eya.no":true,"nesoddtangen.no":true,"orkanger.no":true,"osoyro.no":true,"xn--osyro-wua.no":true,"raholt.no":true,"xn--rholt-mra.no":true,"sandnessjoen.no":true,"xn--sandnessjen-ogb.no":true,"skedsmokorset.no":true,"slattum.no":true,"spjelkavik.no":true,"stathelle.no":true,"stavern.no":true,"stjordalshalsen.no":true,"xn--stjrdalshalsen-sqb.no":true,"tananger.no":true,"tranby.no":true,"vossevangen.no":true,"afjord.no":true,"xn--fjord-lra.no":true,"agdenes.no":true,"al.no":true,"xn--l-1fa.no":true,"alesund.no":true,"xn--lesund-hua.no":true,"alstahaug.no":true,"alta.no":true,"xn--lt-liac.no":true,"alaheadju.no":true,"xn--laheadju-7ya.no":true,"alvdal.no":true,"amli.no":true,"xn--mli-tla.no":true,"amot.no":true,"xn--mot-tla.no":true,"andebu.no":true,"andoy.no":true,"xn--andy-ira.no":true,"andasuolo.no":true,"ardal.no":true,"xn--rdal-poa.no":true,"aremark.no":true,"arendal.no":true,"xn--s-1fa.no":true,"aseral.no":true,"xn--seral-lra.no":true,"asker.no":true,"askim.no":true,"askvoll.no":true,"askoy.no":true,"xn--asky-ira.no":true,"asnes.no":true,"xn--snes-poa.no":true,"audnedaln.no":true,"aukra.no":true,"aure.no":true,"aurland.no":true,"aurskog-holand.no":true,"xn--aurskog-hland-jnb.no":true,"austevoll.no":true,"austrheim.no":true,"averoy.no":true,"xn--avery-yua.no":true,"balestrand.no":true,"ballangen.no":true,"balat.no":true,"xn--blt-elab.no":true,"balsfjord.no":true,"bahccavuotna.no":true,"xn--bhccavuotna-k7a.no":true,"bamble.no":true,"bardu.no":true,"beardu.no":true,"beiarn.no":true,"bajddar.no":true,"xn--bjddar-pta.no":true,"baidar.no":true,"xn--bidr-5nac.no":true,"berg.no":true,"bergen.no":true,"berlevag.no":true,"xn--berlevg-jxa.no":true,"bearalvahki.no":true,"xn--bearalvhki-y4a.no":true,"bindal.no":true,"birkenes.no":true,"bjarkoy.no":true,"xn--bjarky-fya.no":true,"bjerkreim.no":true,"bjugn.no":true,"bodo.no":true,"xn--bod-2na.no":true,"badaddja.no":true,"xn--bdddj-mrabd.no":true,"budejju.no":true,"bokn.no":true,"bremanger.no":true,"bronnoy.no":true,"xn--brnny-wuac.no":true,"bygland.no":true,"bykle.no":true,"barum.no":true,"xn--brum-voa.no":true,"bo.telemark.no":true,"xn--b-5ga.telemark.no":true,"bo.nordland.no":true,"xn--b-5ga.nordland.no":true,"bievat.no":true,"xn--bievt-0qa.no":true,"bomlo.no":true,"xn--bmlo-gra.no":true,"batsfjord.no":true,"xn--btsfjord-9za.no":true,"bahcavuotna.no":true,"xn--bhcavuotna-s4a.no":true,"dovre.no":true,"drammen.no":true,"drangedal.no":true,"dyroy.no":true,"xn--dyry-ira.no":true,"donna.no":true,"xn--dnna-gra.no":true,"eid.no":true,"eidfjord.no":true,"eidsberg.no":true,"eidskog.no":true,"eidsvoll.no":true,"eigersund.no":true,"elverum.no":true,"enebakk.no":true,"engerdal.no":true,"etne.no":true,"etnedal.no":true,"evenes.no":true,"evenassi.no":true,"xn--eveni-0qa01ga.no":true,"evje-og-hornnes.no":true,"farsund.no":true,"fauske.no":true,"fuossko.no":true,"fuoisku.no":true,"fedje.no":true,"fet.no":true,"finnoy.no":true,"xn--finny-yua.no":true,"fitjar.no":true,"fjaler.no":true,"fjell.no":true,"flakstad.no":true,"flatanger.no":true,"flekkefjord.no":true,"flesberg.no":true,"flora.no":true,"fla.no":true,"xn--fl-zia.no":true,"folldal.no":true,"forsand.no":true,"fosnes.no":true,"frei.no":true,"frogn.no":true,"froland.no":true,"frosta.no":true,"frana.no":true,"xn--frna-woa.no":true,"froya.no":true,"xn--frya-hra.no":true,"fusa.no":true,"fyresdal.no":true,"forde.no":true,"xn--frde-gra.no":true,"gamvik.no":true,"gangaviika.no":true,"xn--ggaviika-8ya47h.no":true,"gaular.no":true,"gausdal.no":true,"gildeskal.no":true,"xn--gildeskl-g0a.no":true,"giske.no":true,"gjemnes.no":true,"gjerdrum.no":true,"gjerstad.no":true,"gjesdal.no":true,"gjovik.no":true,"xn--gjvik-wua.no":true,"gloppen.no":true,"gol.no":true,"gran.no":true,"grane.no":true,"granvin.no":true,"gratangen.no":true,"grimstad.no":true,"grong.no":true,"kraanghke.no":true,"xn--kranghke-b0a.no":true,"grue.no":true,"gulen.no":true,"hadsel.no":true,"halden.no":true,"halsa.no":true,"hamar.no":true,"hamaroy.no":true,"habmer.no":true,"xn--hbmer-xqa.no":true,"hapmir.no":true,"xn--hpmir-xqa.no":true,"hammerfest.no":true,"hammarfeasta.no":true,"xn--hmmrfeasta-s4ac.no":true,"haram.no":true,"hareid.no":true,"harstad.no":true,"hasvik.no":true,"aknoluokta.no":true,"xn--koluokta-7ya57h.no":true,"hattfjelldal.no":true,"aarborte.no":true,"haugesund.no":true,"hemne.no":true,"hemnes.no":true,"hemsedal.no":true,"heroy.more-og-romsdal.no":true,"xn--hery-ira.xn--mre-og-romsdal-qqb.no":true,"heroy.nordland.no":true,"xn--hery-ira.nordland.no":true,"hitra.no":true,"hjartdal.no":true,"hjelmeland.no":true,"hobol.no":true,"xn--hobl-ira.no":true,"hof.no":true,"hol.no":true,"hole.no":true,"holmestrand.no":true,"holtalen.no":true,"xn--holtlen-hxa.no":true,"hornindal.no":true,"horten.no":true,"hurdal.no":true,"hurum.no":true,"hvaler.no":true,"hyllestad.no":true,"hagebostad.no":true,"xn--hgebostad-g3a.no":true,"hoyanger.no":true,"xn--hyanger-q1a.no":true,"hoylandet.no":true,"xn--hylandet-54a.no":true,"ha.no":true,"xn--h-2fa.no":true,"ibestad.no":true,"inderoy.no":true,"xn--indery-fya.no":true,"iveland.no":true,"jevnaker.no":true,"jondal.no":true,"jolster.no":true,"xn--jlster-bya.no":true,"karasjok.no":true,"karasjohka.no":true,"xn--krjohka-hwab49j.no":true,"karlsoy.no":true,"galsa.no":true,"xn--gls-elac.no":true,"karmoy.no":true,"xn--karmy-yua.no":true,"kautokeino.no":true,"guovdageaidnu.no":true,"klepp.no":true,"klabu.no":true,"xn--klbu-woa.no":true,"kongsberg.no":true,"kongsvinger.no":true,"kragero.no":true,"xn--krager-gya.no":true,"kristiansand.no":true,"kristiansund.no":true,"krodsherad.no":true,"xn--krdsherad-m8a.no":true,"kvalsund.no":true,"rahkkeravju.no":true,"xn--rhkkervju-01af.no":true,"kvam.no":true,"kvinesdal.no":true,"kvinnherad.no":true,"kviteseid.no":true,"kvitsoy.no":true,"xn--kvitsy-fya.no":true,"kvafjord.no":true,"xn--kvfjord-nxa.no":true,"giehtavuoatna.no":true,"kvanangen.no":true,"xn--kvnangen-k0a.no":true,"navuotna.no":true,"xn--nvuotna-hwa.no":true,"kafjord.no":true,"xn--kfjord-iua.no":true,"gaivuotna.no":true,"xn--givuotna-8ya.no":true,"larvik.no":true,"lavangen.no":true,"lavagis.no":true,"loabat.no":true,"xn--loabt-0qa.no":true,"lebesby.no":true,"davvesiida.no":true,"leikanger.no":true,"leirfjord.no":true,"leka.no":true,"leksvik.no":true,"lenvik.no":true,"leangaviika.no":true,"xn--leagaviika-52b.no":true,"lesja.no":true,"levanger.no":true,"lier.no":true,"lierne.no":true,"lillehammer.no":true,"lillesand.no":true,"lindesnes.no":true,"lindas.no":true,"xn--linds-pra.no":true,"lom.no":true,"loppa.no":true,"lahppi.no":true,"xn--lhppi-xqa.no":true,"lund.no":true,"lunner.no":true,"luroy.no":true,"xn--lury-ira.no":true,"luster.no":true,"lyngdal.no":true,"lyngen.no":true,"ivgu.no":true,"lardal.no":true,"lerdal.no":true,"xn--lrdal-sra.no":true,"lodingen.no":true,"xn--ldingen-q1a.no":true,"lorenskog.no":true,"xn--lrenskog-54a.no":true,"loten.no":true,"xn--lten-gra.no":true,"malvik.no":true,"masoy.no":true,"xn--msy-ula0h.no":true,"muosat.no":true,"xn--muost-0qa.no":true,"mandal.no":true,"marker.no":true,"marnardal.no":true,"masfjorden.no":true,"meland.no":true,"meldal.no":true,"melhus.no":true,"meloy.no":true,"xn--mely-ira.no":true,"meraker.no":true,"xn--merker-kua.no":true,"moareke.no":true,"xn--moreke-jua.no":true,"midsund.no":true,"midtre-gauldal.no":true,"modalen.no":true,"modum.no":true,"molde.no":true,"moskenes.no":true,"moss.no":true,"mosvik.no":true,"malselv.no":true,"xn--mlselv-iua.no":true,"malatvuopmi.no":true,"xn--mlatvuopmi-s4a.no":true,"namdalseid.no":true,"aejrie.no":true,"namsos.no":true,"namsskogan.no":true,"naamesjevuemie.no":true,"xn--nmesjevuemie-tcba.no":true,"laakesvuemie.no":true,"nannestad.no":true,"narvik.no":true,"narviika.no":true,"naustdal.no":true,"nedre-eiker.no":true,"nes.akershus.no":true,"nes.buskerud.no":true,"nesna.no":true,"nesodden.no":true,"nesseby.no":true,"unjarga.no":true,"xn--unjrga-rta.no":true,"nesset.no":true,"nissedal.no":true,"nittedal.no":true,"nord-aurdal.no":true,"nord-fron.no":true,"nord-odal.no":true,"norddal.no":true,"nordkapp.no":true,"davvenjarga.no":true,"xn--davvenjrga-y4a.no":true,"nordre-land.no":true,"nordreisa.no":true,"raisa.no":true,"xn--risa-5na.no":true,"nore-og-uvdal.no":true,"notodden.no":true,"naroy.no":true,"xn--nry-yla5g.no":true,"notteroy.no":true,"xn--nttery-byae.no":true,"odda.no":true,"oksnes.no":true,"xn--ksnes-uua.no":true,"oppdal.no":true,"oppegard.no":true,"xn--oppegrd-ixa.no":true,"orkdal.no":true,"orland.no":true,"xn--rland-uua.no":true,"orskog.no":true,"xn--rskog-uua.no":true,"orsta.no":true,"xn--rsta-fra.no":true,"os.hedmark.no":true,"os.hordaland.no":true,"osen.no":true,"osteroy.no":true,"xn--ostery-fya.no":true,"ostre-toten.no":true,"xn--stre-toten-zcb.no":true,"overhalla.no":true,"ovre-eiker.no":true,"xn--vre-eiker-k8a.no":true,"oyer.no":true,"xn--yer-zna.no":true,"oygarden.no":true,"xn--ygarden-p1a.no":true,"oystre-slidre.no":true,"xn--ystre-slidre-ujb.no":true,"porsanger.no":true,"porsangu.no":true,"xn--porsgu-sta26f.no":true,"porsgrunn.no":true,"radoy.no":true,"xn--rady-ira.no":true,"rakkestad.no":true,"rana.no":true,"ruovat.no":true,"randaberg.no":true,"rauma.no":true,"rendalen.no":true,"rennebu.no":true,"rennesoy.no":true,"xn--rennesy-v1a.no":true,"rindal.no":true,"ringebu.no":true,"ringerike.no":true,"ringsaker.no":true,"rissa.no":true,"risor.no":true,"xn--risr-ira.no":true,"roan.no":true,"rollag.no":true,"rygge.no":true,"ralingen.no":true,"xn--rlingen-mxa.no":true,"rodoy.no":true,"xn--rdy-0nab.no":true,"romskog.no":true,"xn--rmskog-bya.no":true,"roros.no":true,"xn--rros-gra.no":true,"rost.no":true,"xn--rst-0na.no":true,"royken.no":true,"xn--ryken-vua.no":true,"royrvik.no":true,"xn--ryrvik-bya.no":true,"rade.no":true,"xn--rde-ula.no":true,"salangen.no":true,"siellak.no":true,"saltdal.no":true,"salat.no":true,"xn--slt-elab.no":true,"xn--slat-5na.no":true,"samnanger.no":true,"sande.more-og-romsdal.no":true,"sande.xn--mre-og-romsdal-qqb.no":true,"sande.vestfold.no":true,"sandefjord.no":true,"sandnes.no":true,"sandoy.no":true,"xn--sandy-yua.no":true,"sarpsborg.no":true,"sauda.no":true,"sauherad.no":true,"sel.no":true,"selbu.no":true,"selje.no":true,"seljord.no":true,"sigdal.no":true,"siljan.no":true,"sirdal.no":true,"skaun.no":true,"skedsmo.no":true,"ski.no":true,"skien.no":true,"skiptvet.no":true,"skjervoy.no":true,"xn--skjervy-v1a.no":true,"skierva.no":true,"xn--skierv-uta.no":true,"skjak.no":true,"xn--skjk-soa.no":true,"skodje.no":true,"skanland.no":true,"xn--sknland-fxa.no":true,"skanit.no":true,"xn--sknit-yqa.no":true,"smola.no":true,"xn--smla-hra.no":true,"snillfjord.no":true,"snasa.no":true,"xn--snsa-roa.no":true,"snoasa.no":true,"snaase.no":true,"xn--snase-nra.no":true,"sogndal.no":true,"sokndal.no":true,"sola.no":true,"solund.no":true,"songdalen.no":true,"sortland.no":true,"spydeberg.no":true,"stange.no":true,"stavanger.no":true,"steigen.no":true,"steinkjer.no":true,"stjordal.no":true,"xn--stjrdal-s1a.no":true,"stokke.no":true,"stor-elvdal.no":true,"stord.no":true,"stordal.no":true,"storfjord.no":true,"omasvuotna.no":true,"strand.no":true,"stranda.no":true,"stryn.no":true,"sula.no":true,"suldal.no":true,"sund.no":true,"sunndal.no":true,"surnadal.no":true,"sveio.no":true,"svelvik.no":true,"sykkylven.no":true,"sogne.no":true,"xn--sgne-gra.no":true,"somna.no":true,"xn--smna-gra.no":true,"sondre-land.no":true,"xn--sndre-land-0cb.no":true,"sor-aurdal.no":true,"xn--sr-aurdal-l8a.no":true,"sor-fron.no":true,"xn--sr-fron-q1a.no":true,"sor-odal.no":true,"xn--sr-odal-q1a.no":true,"sor-varanger.no":true,"xn--sr-varanger-ggb.no":true,"matta-varjjat.no":true,"xn--mtta-vrjjat-k7af.no":true,"sorfold.no":true,"xn--srfold-bya.no":true,"sorreisa.no":true,"xn--srreisa-q1a.no":true,"sorum.no":true,"xn--srum-gra.no":true,"tana.no":true,"deatnu.no":true,"time.no":true,"tingvoll.no":true,"tinn.no":true,"tjeldsund.no":true,"dielddanuorri.no":true,"tjome.no":true,"xn--tjme-hra.no":true,"tokke.no":true,"tolga.no":true,"torsken.no":true,"tranoy.no":true,"xn--trany-yua.no":true,"tromso.no":true,"xn--troms-zua.no":true,"tromsa.no":true,"romsa.no":true,"trondheim.no":true,"troandin.no":true,"trysil.no":true,"trana.no":true,"xn--trna-woa.no":true,"trogstad.no":true,"xn--trgstad-r1a.no":true,"tvedestrand.no":true,"tydal.no":true,"tynset.no":true,"tysfjord.no":true,"divtasvuodna.no":true,"divttasvuotna.no":true,"tysnes.no":true,"tysvar.no":true,"xn--tysvr-vra.no":true,"tonsberg.no":true,"xn--tnsberg-q1a.no":true,"ullensaker.no":true,"ullensvang.no":true,"ulvik.no":true,"utsira.no":true,"vadso.no":true,"xn--vads-jra.no":true,"cahcesuolo.no":true,"xn--hcesuolo-7ya35b.no":true,"vaksdal.no":true,"valle.no":true,"vang.no":true,"vanylven.no":true,"vardo.no":true,"xn--vard-jra.no":true,"varggat.no":true,"xn--vrggt-xqad.no":true,"vefsn.no":true,"vaapste.no":true,"vega.no":true,"vegarshei.no":true,"xn--vegrshei-c0a.no":true,"vennesla.no":true,"verdal.no":true,"verran.no":true,"vestby.no":true,"vestnes.no":true,"vestre-slidre.no":true,"vestre-toten.no":true,"vestvagoy.no":true,"xn--vestvgy-ixa6o.no":true,"vevelstad.no":true,"vik.no":true,"vikna.no":true,"vindafjord.no":true,"volda.no":true,"voss.no":true,"varoy.no":true,"xn--vry-yla5g.no":true,"vagan.no":true,"xn--vgan-qoa.no":true,"voagat.no":true,"vagsoy.no":true,"xn--vgsy-qoa0j.no":true,"vaga.no":true,"xn--vg-yiab.no":true,"valer.ostfold.no":true,"xn--vler-qoa.xn--stfold-9xa.no":true,"valer.hedmark.no":true,"xn--vler-qoa.hedmark.no":true,"*.np":true,"nr":true,"biz.nr":true,"info.nr":true,"gov.nr":true,"edu.nr":true,"org.nr":true,"net.nr":true,"com.nr":true,"nu":true,"nz":true,"ac.nz":true,"co.nz":true,"cri.nz":true,"geek.nz":true,"gen.nz":true,"govt.nz":true,"health.nz":true,"iwi.nz":true,"kiwi.nz":true,"maori.nz":true,"mil.nz":true,"xn--mori-qsa.nz":true,"net.nz":true,"org.nz":true,"parliament.nz":true,"school.nz":true,"om":true,"co.om":true,"com.om":true,"edu.om":true,"gov.om":true,"med.om":true,"museum.om":true,"net.om":true,"org.om":true,"pro.om":true,"org":true,"pa":true,"ac.pa":true,"gob.pa":true,"com.pa":true,"org.pa":true,"sld.pa":true,"edu.pa":true,"net.pa":true,"ing.pa":true,"abo.pa":true,"med.pa":true,"nom.pa":true,"pe":true,"edu.pe":true,"gob.pe":true,"nom.pe":true,"mil.pe":true,"org.pe":true,"com.pe":true,"net.pe":true,"pf":true,"com.pf":true,"org.pf":true,"edu.pf":true,"*.pg":true,"ph":true,"com.ph":true,"net.ph":true,"org.ph":true,"gov.ph":true,"edu.ph":true,"ngo.ph":true,"mil.ph":true,"i.ph":true,"pk":true,"com.pk":true,"net.pk":true,"edu.pk":true,"org.pk":true,"fam.pk":true,"biz.pk":true,"web.pk":true,"gov.pk":true,"gob.pk":true,"gok.pk":true,"gon.pk":true,"gop.pk":true,"gos.pk":true,"info.pk":true,"pl":true,"com.pl":true,"net.pl":true,"org.pl":true,"aid.pl":true,"agro.pl":true,"atm.pl":true,"auto.pl":true,"biz.pl":true,"edu.pl":true,"gmina.pl":true,"gsm.pl":true,"info.pl":true,"mail.pl":true,"miasta.pl":true,"media.pl":true,"mil.pl":true,"nieruchomosci.pl":true,"nom.pl":true,"pc.pl":true,"powiat.pl":true,"priv.pl":true,"realestate.pl":true,"rel.pl":true,"sex.pl":true,"shop.pl":true,"sklep.pl":true,"sos.pl":true,"szkola.pl":true,"targi.pl":true,"tm.pl":true,"tourism.pl":true,"travel.pl":true,"turystyka.pl":true,"gov.pl":true,"ap.gov.pl":true,"ic.gov.pl":true,"is.gov.pl":true,"us.gov.pl":true,"kmpsp.gov.pl":true,"kppsp.gov.pl":true,"kwpsp.gov.pl":true,"psp.gov.pl":true,"wskr.gov.pl":true,"kwp.gov.pl":true,"mw.gov.pl":true,"ug.gov.pl":true,"um.gov.pl":true,"umig.gov.pl":true,"ugim.gov.pl":true,"upow.gov.pl":true,"uw.gov.pl":true,"starostwo.gov.pl":true,"pa.gov.pl":true,"po.gov.pl":true,"psse.gov.pl":true,"pup.gov.pl":true,"rzgw.gov.pl":true,"sa.gov.pl":true,"so.gov.pl":true,"sr.gov.pl":true,"wsa.gov.pl":true,"sko.gov.pl":true,"uzs.gov.pl":true,"wiih.gov.pl":true,"winb.gov.pl":true,"pinb.gov.pl":true,"wios.gov.pl":true,"witd.gov.pl":true,"wzmiuw.gov.pl":true,"piw.gov.pl":true,"wiw.gov.pl":true,"griw.gov.pl":true,"wif.gov.pl":true,"oum.gov.pl":true,"sdn.gov.pl":true,"zp.gov.pl":true,"uppo.gov.pl":true,"mup.gov.pl":true,"wuoz.gov.pl":true,"konsulat.gov.pl":true,"oirm.gov.pl":true,"augustow.pl":true,"babia-gora.pl":true,"bedzin.pl":true,"beskidy.pl":true,"bialowieza.pl":true,"bialystok.pl":true,"bielawa.pl":true,"bieszczady.pl":true,"boleslawiec.pl":true,"bydgoszcz.pl":true,"bytom.pl":true,"cieszyn.pl":true,"czeladz.pl":true,"czest.pl":true,"dlugoleka.pl":true,"elblag.pl":true,"elk.pl":true,"glogow.pl":true,"gniezno.pl":true,"gorlice.pl":true,"grajewo.pl":true,"ilawa.pl":true,"jaworzno.pl":true,"jelenia-gora.pl":true,"jgora.pl":true,"kalisz.pl":true,"kazimierz-dolny.pl":true,"karpacz.pl":true,"kartuzy.pl":true,"kaszuby.pl":true,"katowice.pl":true,"kepno.pl":true,"ketrzyn.pl":true,"klodzko.pl":true,"kobierzyce.pl":true,"kolobrzeg.pl":true,"konin.pl":true,"konskowola.pl":true,"kutno.pl":true,"lapy.pl":true,"lebork.pl":true,"legnica.pl":true,"lezajsk.pl":true,"limanowa.pl":true,"lomza.pl":true,"lowicz.pl":true,"lubin.pl":true,"lukow.pl":true,"malbork.pl":true,"malopolska.pl":true,"mazowsze.pl":true,"mazury.pl":true,"mielec.pl":true,"mielno.pl":true,"mragowo.pl":true,"naklo.pl":true,"nowaruda.pl":true,"nysa.pl":true,"olawa.pl":true,"olecko.pl":true,"olkusz.pl":true,"olsztyn.pl":true,"opoczno.pl":true,"opole.pl":true,"ostroda.pl":true,"ostroleka.pl":true,"ostrowiec.pl":true,"ostrowwlkp.pl":true,"pila.pl":true,"pisz.pl":true,"podhale.pl":true,"podlasie.pl":true,"polkowice.pl":true,"pomorze.pl":true,"pomorskie.pl":true,"prochowice.pl":true,"pruszkow.pl":true,"przeworsk.pl":true,"pulawy.pl":true,"radom.pl":true,"rawa-maz.pl":true,"rybnik.pl":true,"rzeszow.pl":true,"sanok.pl":true,"sejny.pl":true,"slask.pl":true,"slupsk.pl":true,"sosnowiec.pl":true,"stalowa-wola.pl":true,"skoczow.pl":true,"starachowice.pl":true,"stargard.pl":true,"suwalki.pl":true,"swidnica.pl":true,"swiebodzin.pl":true,"swinoujscie.pl":true,"szczecin.pl":true,"szczytno.pl":true,"tarnobrzeg.pl":true,"tgory.pl":true,"turek.pl":true,"tychy.pl":true,"ustka.pl":true,"walbrzych.pl":true,"warmia.pl":true,"warszawa.pl":true,"waw.pl":true,"wegrow.pl":true,"wielun.pl":true,"wlocl.pl":true,"wloclawek.pl":true,"wodzislaw.pl":true,"wolomin.pl":true,"wroclaw.pl":true,"zachpomor.pl":true,"zagan.pl":true,"zarow.pl":true,"zgora.pl":true,"zgorzelec.pl":true,"pm":true,"pn":true,"gov.pn":true,"co.pn":true,"org.pn":true,"edu.pn":true,"net.pn":true,"post":true,"pr":true,"com.pr":true,"net.pr":true,"org.pr":true,"gov.pr":true,"edu.pr":true,"isla.pr":true,"pro.pr":true,"biz.pr":true,"info.pr":true,"name.pr":true,"est.pr":true,"prof.pr":true,"ac.pr":true,"pro":true,"aca.pro":true,"bar.pro":true,"cpa.pro":true,"jur.pro":true,"law.pro":true,"med.pro":true,"eng.pro":true,"ps":true,"edu.ps":true,"gov.ps":true,"sec.ps":true,"plo.ps":true,"com.ps":true,"org.ps":true,"net.ps":true,"pt":true,"net.pt":true,"gov.pt":true,"org.pt":true,"edu.pt":true,"int.pt":true,"publ.pt":true,"com.pt":true,"nome.pt":true,"pw":true,"co.pw":true,"ne.pw":true,"or.pw":true,"ed.pw":true,"go.pw":true,"belau.pw":true,"py":true,"com.py":true,"coop.py":true,"edu.py":true,"gov.py":true,"mil.py":true,"net.py":true,"org.py":true,"qa":true,"com.qa":true,"edu.qa":true,"gov.qa":true,"mil.qa":true,"name.qa":true,"net.qa":true,"org.qa":true,"sch.qa":true,"re":true,"com.re":true,"asso.re":true,"nom.re":true,"ro":true,"com.ro":true,"org.ro":true,"tm.ro":true,"nt.ro":true,"nom.ro":true,"info.ro":true,"rec.ro":true,"arts.ro":true,"firm.ro":true,"store.ro":true,"www.ro":true,"rs":true,"co.rs":true,"org.rs":true,"edu.rs":true,"ac.rs":true,"gov.rs":true,"in.rs":true,"ru":true,"ac.ru":true,"com.ru":true,"edu.ru":true,"int.ru":true,"net.ru":true,"org.ru":true,"pp.ru":true,"adygeya.ru":true,"altai.ru":true,"amur.ru":true,"arkhangelsk.ru":true,"astrakhan.ru":true,"bashkiria.ru":true,"belgorod.ru":true,"bir.ru":true,"bryansk.ru":true,"buryatia.ru":true,"cbg.ru":true,"chel.ru":true,"chelyabinsk.ru":true,"chita.ru":true,"chukotka.ru":true,"chuvashia.ru":true,"dagestan.ru":true,"dudinka.ru":true,"e-burg.ru":true,"grozny.ru":true,"irkutsk.ru":true,"ivanovo.ru":true,"izhevsk.ru":true,"jar.ru":true,"joshkar-ola.ru":true,"kalmykia.ru":true,"kaluga.ru":true,"kamchatka.ru":true,"karelia.ru":true,"kazan.ru":true,"kchr.ru":true,"kemerovo.ru":true,"khabarovsk.ru":true,"khakassia.ru":true,"khv.ru":true,"kirov.ru":true,"koenig.ru":true,"komi.ru":true,"kostroma.ru":true,"krasnoyarsk.ru":true,"kuban.ru":true,"kurgan.ru":true,"kursk.ru":true,"lipetsk.ru":true,"magadan.ru":true,"mari.ru":true,"mari-el.ru":true,"marine.ru":true,"mordovia.ru":true,"msk.ru":true,"murmansk.ru":true,"nalchik.ru":true,"nnov.ru":true,"nov.ru":true,"novosibirsk.ru":true,"nsk.ru":true,"omsk.ru":true,"orenburg.ru":true,"oryol.ru":true,"palana.ru":true,"penza.ru":true,"perm.ru":true,"ptz.ru":true,"rnd.ru":true,"ryazan.ru":true,"sakhalin.ru":true,"samara.ru":true,"saratov.ru":true,"simbirsk.ru":true,"smolensk.ru":true,"spb.ru":true,"stavropol.ru":true,"stv.ru":true,"surgut.ru":true,"tambov.ru":true,"tatarstan.ru":true,"tom.ru":true,"tomsk.ru":true,"tsaritsyn.ru":true,"tsk.ru":true,"tula.ru":true,"tuva.ru":true,"tver.ru":true,"tyumen.ru":true,"udm.ru":true,"udmurtia.ru":true,"ulan-ude.ru":true,"vladikavkaz.ru":true,"vladimir.ru":true,"vladivostok.ru":true,"volgograd.ru":true,"vologda.ru":true,"voronezh.ru":true,"vrn.ru":true,"vyatka.ru":true,"yakutia.ru":true,"yamal.ru":true,"yaroslavl.ru":true,"yekaterinburg.ru":true,"yuzhno-sakhalinsk.ru":true,"amursk.ru":true,"baikal.ru":true,"cmw.ru":true,"fareast.ru":true,"jamal.ru":true,"kms.ru":true,"k-uralsk.ru":true,"kustanai.ru":true,"kuzbass.ru":true,"magnitka.ru":true,"mytis.ru":true,"nakhodka.ru":true,"nkz.ru":true,"norilsk.ru":true,"oskol.ru":true,"pyatigorsk.ru":true,"rubtsovsk.ru":true,"snz.ru":true,"syzran.ru":true,"vdonsk.ru":true,"zgrad.ru":true,"gov.ru":true,"mil.ru":true,"test.ru":true,"rw":true,"gov.rw":true,"net.rw":true,"edu.rw":true,"ac.rw":true,"com.rw":true,"co.rw":true,"int.rw":true,"mil.rw":true,"gouv.rw":true,"sa":true,"com.sa":true,"net.sa":true,"org.sa":true,"gov.sa":true,"med.sa":true,"pub.sa":true,"edu.sa":true,"sch.sa":true,"sb":true,"com.sb":true,"edu.sb":true,"gov.sb":true,"net.sb":true,"org.sb":true,"sc":true,"com.sc":true,"gov.sc":true,"net.sc":true,"org.sc":true,"edu.sc":true,"sd":true,"com.sd":true,"net.sd":true,"org.sd":true,"edu.sd":true,"med.sd":true,"tv.sd":true,"gov.sd":true,"info.sd":true,"se":true,"a.se":true,"ac.se":true,"b.se":true,"bd.se":true,"brand.se":true,"c.se":true,"d.se":true,"e.se":true,"f.se":true,"fh.se":true,"fhsk.se":true,"fhv.se":true,"g.se":true,"h.se":true,"i.se":true,"k.se":true,"komforb.se":true,"kommunalforbund.se":true,"komvux.se":true,"l.se":true,"lanbib.se":true,"m.se":true,"n.se":true,"naturbruksgymn.se":true,"o.se":true,"org.se":true,"p.se":true,"parti.se":true,"pp.se":true,"press.se":true,"r.se":true,"s.se":true,"t.se":true,"tm.se":true,"u.se":true,"w.se":true,"x.se":true,"y.se":true,"z.se":true,"sg":true,"com.sg":true,"net.sg":true,"org.sg":true,"gov.sg":true,"edu.sg":true,"per.sg":true,"sh":true,"com.sh":true,"net.sh":true,"gov.sh":true,"org.sh":true,"mil.sh":true,"si":true,"sj":true,"sk":true,"sl":true,"com.sl":true,"net.sl":true,"edu.sl":true,"gov.sl":true,"org.sl":true,"sm":true,"sn":true,"art.sn":true,"com.sn":true,"edu.sn":true,"gouv.sn":true,"org.sn":true,"perso.sn":true,"univ.sn":true,"so":true,"com.so":true,"net.so":true,"org.so":true,"sr":true,"st":true,"co.st":true,"com.st":true,"consulado.st":true,"edu.st":true,"embaixada.st":true,"gov.st":true,"mil.st":true,"net.st":true,"org.st":true,"principe.st":true,"saotome.st":true,"store.st":true,"su":true,"adygeya.su":true,"arkhangelsk.su":true,"balashov.su":true,"bashkiria.su":true,"bryansk.su":true,"dagestan.su":true,"grozny.su":true,"ivanovo.su":true,"kalmykia.su":true,"kaluga.su":true,"karelia.su":true,"khakassia.su":true,"krasnodar.su":true,"kurgan.su":true,"lenug.su":true,"mordovia.su":true,"msk.su":true,"murmansk.su":true,"nalchik.su":true,"nov.su":true,"obninsk.su":true,"penza.su":true,"pokrovsk.su":true,"sochi.su":true,"spb.su":true,"togliatti.su":true,"troitsk.su":true,"tula.su":true,"tuva.su":true,"vladikavkaz.su":true,"vladimir.su":true,"vologda.su":true,"sv":true,"com.sv":true,"edu.sv":true,"gob.sv":true,"org.sv":true,"red.sv":true,"sx":true,"gov.sx":true,"sy":true,"edu.sy":true,"gov.sy":true,"net.sy":true,"mil.sy":true,"com.sy":true,"org.sy":true,"sz":true,"co.sz":true,"ac.sz":true,"org.sz":true,"tc":true,"td":true,"tel":true,"tf":true,"tg":true,"th":true,"ac.th":true,"co.th":true,"go.th":true,"in.th":true,"mi.th":true,"net.th":true,"or.th":true,"tj":true,"ac.tj":true,"biz.tj":true,"co.tj":true,"com.tj":true,"edu.tj":true,"go.tj":true,"gov.tj":true,"int.tj":true,"mil.tj":true,"name.tj":true,"net.tj":true,"nic.tj":true,"org.tj":true,"test.tj":true,"web.tj":true,"tk":true,"tl":true,"gov.tl":true,"tm":true,"com.tm":true,"co.tm":true,"org.tm":true,"net.tm":true,"nom.tm":true,"gov.tm":true,"mil.tm":true,"edu.tm":true,"tn":true,"com.tn":true,"ens.tn":true,"fin.tn":true,"gov.tn":true,"ind.tn":true,"intl.tn":true,"nat.tn":true,"net.tn":true,"org.tn":true,"info.tn":true,"perso.tn":true,"tourism.tn":true,"edunet.tn":true,"rnrt.tn":true,"rns.tn":true,"rnu.tn":true,"mincom.tn":true,"agrinet.tn":true,"defense.tn":true,"turen.tn":true,"to":true,"com.to":true,"gov.to":true,"net.to":true,"org.to":true,"edu.to":true,"mil.to":true,"tp":true,"tr":true,"com.tr":true,"info.tr":true,"biz.tr":true,"net.tr":true,"org.tr":true,"web.tr":true,"gen.tr":true,"tv.tr":true,"av.tr":true,"dr.tr":true,"bbs.tr":true,"name.tr":true,"tel.tr":true,"gov.tr":true,"bel.tr":true,"pol.tr":true,"mil.tr":true,"k12.tr":true,"edu.tr":true,"kep.tr":true,"nc.tr":true,"gov.nc.tr":true,"travel":true,"tt":true,"co.tt":true,"com.tt":true,"org.tt":true,"net.tt":true,"biz.tt":true,"info.tt":true,"pro.tt":true,"int.tt":true,"coop.tt":true,"jobs.tt":true,"mobi.tt":true,"travel.tt":true,"museum.tt":true,"aero.tt":true,"name.tt":true,"gov.tt":true,"edu.tt":true,"tv":true,"tw":true,"edu.tw":true,"gov.tw":true,"mil.tw":true,"com.tw":true,"net.tw":true,"org.tw":true,"idv.tw":true,"game.tw":true,"ebiz.tw":true,"club.tw":true,"xn--zf0ao64a.tw":true,"xn--uc0atv.tw":true,"xn--czrw28b.tw":true,"tz":true,"ac.tz":true,"co.tz":true,"go.tz":true,"hotel.tz":true,"info.tz":true,"me.tz":true,"mil.tz":true,"mobi.tz":true,"ne.tz":true,"or.tz":true,"sc.tz":true,"tv.tz":true,"ua":true,"com.ua":true,"edu.ua":true,"gov.ua":true,"in.ua":true,"net.ua":true,"org.ua":true,"cherkassy.ua":true,"cherkasy.ua":true,"chernigov.ua":true,"chernihiv.ua":true,"chernivtsi.ua":true,"chernovtsy.ua":true,"ck.ua":true,"cn.ua":true,"cr.ua":true,"crimea.ua":true,"cv.ua":true,"dn.ua":true,"dnepropetrovsk.ua":true,"dnipropetrovsk.ua":true,"dominic.ua":true,"donetsk.ua":true,"dp.ua":true,"if.ua":true,"ivano-frankivsk.ua":true,"kh.ua":true,"kharkiv.ua":true,"kharkov.ua":true,"kherson.ua":true,"khmelnitskiy.ua":true,"khmelnytskyi.ua":true,"kiev.ua":true,"kirovograd.ua":true,"km.ua":true,"kr.ua":true,"krym.ua":true,"ks.ua":true,"kv.ua":true,"kyiv.ua":true,"lg.ua":true,"lt.ua":true,"lugansk.ua":true,"lutsk.ua":true,"lv.ua":true,"lviv.ua":true,"mk.ua":true,"mykolaiv.ua":true,"nikolaev.ua":true,"od.ua":true,"odesa.ua":true,"odessa.ua":true,"pl.ua":true,"poltava.ua":true,"rivne.ua":true,"rovno.ua":true,"rv.ua":true,"sb.ua":true,"sebastopol.ua":true,"sevastopol.ua":true,"sm.ua":true,"sumy.ua":true,"te.ua":true,"ternopil.ua":true,"uz.ua":true,"uzhgorod.ua":true,"vinnica.ua":true,"vinnytsia.ua":true,"vn.ua":true,"volyn.ua":true,"yalta.ua":true,"zaporizhzhe.ua":true,"zaporizhzhia.ua":true,"zhitomir.ua":true,"zhytomyr.ua":true,"zp.ua":true,"zt.ua":true,"ug":true,"co.ug":true,"or.ug":true,"ac.ug":true,"sc.ug":true,"go.ug":true,"ne.ug":true,"com.ug":true,"org.ug":true,"uk":true,"ac.uk":true,"co.uk":true,"gov.uk":true,"ltd.uk":true,"me.uk":true,"net.uk":true,"nhs.uk":true,"org.uk":true,"plc.uk":true,"police.uk":true,"*.sch.uk":true,"us":true,"dni.us":true,"fed.us":true,"isa.us":true,"kids.us":true,"nsn.us":true,"ak.us":true,"al.us":true,"ar.us":true,"as.us":true,"az.us":true,"ca.us":true,"co.us":true,"ct.us":true,"dc.us":true,"de.us":true,"fl.us":true,"ga.us":true,"gu.us":true,"hi.us":true,"ia.us":true,"id.us":true,"il.us":true,"in.us":true,"ks.us":true,"ky.us":true,"la.us":true,"ma.us":true,"md.us":true,"me.us":true,"mi.us":true,"mn.us":true,"mo.us":true,"ms.us":true,"mt.us":true,"nc.us":true,"nd.us":true,"ne.us":true,"nh.us":true,"nj.us":true,"nm.us":true,"nv.us":true,"ny.us":true,"oh.us":true,"ok.us":true,"or.us":true,"pa.us":true,"pr.us":true,"ri.us":true,"sc.us":true,"sd.us":true,"tn.us":true,"tx.us":true,"ut.us":true,"vi.us":true,"vt.us":true,"va.us":true,"wa.us":true,"wi.us":true,"wv.us":true,"wy.us":true,"k12.ak.us":true,"k12.al.us":true,"k12.ar.us":true,"k12.as.us":true,"k12.az.us":true,"k12.ca.us":true,"k12.co.us":true,"k12.ct.us":true,"k12.dc.us":true,"k12.de.us":true,"k12.fl.us":true,"k12.ga.us":true,"k12.gu.us":true,"k12.ia.us":true,"k12.id.us":true,"k12.il.us":true,"k12.in.us":true,"k12.ks.us":true,"k12.ky.us":true,"k12.la.us":true,"k12.ma.us":true,"k12.md.us":true,"k12.me.us":true,"k12.mi.us":true,"k12.mn.us":true,"k12.mo.us":true,"k12.ms.us":true,"k12.mt.us":true,"k12.nc.us":true,"k12.ne.us":true,"k12.nh.us":true,"k12.nj.us":true,"k12.nm.us":true,"k12.nv.us":true,"k12.ny.us":true,"k12.oh.us":true,"k12.ok.us":true,"k12.or.us":true,"k12.pa.us":true,"k12.pr.us":true,"k12.ri.us":true,"k12.sc.us":true,"k12.tn.us":true,"k12.tx.us":true,"k12.ut.us":true,"k12.vi.us":true,"k12.vt.us":true,"k12.va.us":true,"k12.wa.us":true,"k12.wi.us":true,"k12.wy.us":true,"cc.ak.us":true,"cc.al.us":true,"cc.ar.us":true,"cc.as.us":true,"cc.az.us":true,"cc.ca.us":true,"cc.co.us":true,"cc.ct.us":true,"cc.dc.us":true,"cc.de.us":true,"cc.fl.us":true,"cc.ga.us":true,"cc.gu.us":true,"cc.hi.us":true,"cc.ia.us":true,"cc.id.us":true,"cc.il.us":true,"cc.in.us":true,"cc.ks.us":true,"cc.ky.us":true,"cc.la.us":true,"cc.ma.us":true,"cc.md.us":true,"cc.me.us":true,"cc.mi.us":true,"cc.mn.us":true,"cc.mo.us":true,"cc.ms.us":true,"cc.mt.us":true,"cc.nc.us":true,"cc.nd.us":true,"cc.ne.us":true,"cc.nh.us":true,"cc.nj.us":true,"cc.nm.us":true,"cc.nv.us":true,"cc.ny.us":true,"cc.oh.us":true,"cc.ok.us":true,"cc.or.us":true,"cc.pa.us":true,"cc.pr.us":true,"cc.ri.us":true,"cc.sc.us":true,"cc.sd.us":true,"cc.tn.us":true,"cc.tx.us":true,"cc.ut.us":true,"cc.vi.us":true,"cc.vt.us":true,"cc.va.us":true,"cc.wa.us":true,"cc.wi.us":true,"cc.wv.us":true,"cc.wy.us":true,"lib.ak.us":true,"lib.al.us":true,"lib.ar.us":true,"lib.as.us":true,"lib.az.us":true,"lib.ca.us":true,"lib.co.us":true,"lib.ct.us":true,"lib.dc.us":true,"lib.de.us":true,"lib.fl.us":true,"lib.ga.us":true,"lib.gu.us":true,"lib.hi.us":true,"lib.ia.us":true,"lib.id.us":true,"lib.il.us":true,"lib.in.us":true,"lib.ks.us":true,"lib.ky.us":true,"lib.la.us":true,"lib.ma.us":true,"lib.md.us":true,"lib.me.us":true,"lib.mi.us":true,"lib.mn.us":true,"lib.mo.us":true,"lib.ms.us":true,"lib.mt.us":true,"lib.nc.us":true,"lib.nd.us":true,"lib.ne.us":true,"lib.nh.us":true,"lib.nj.us":true,"lib.nm.us":true,"lib.nv.us":true,"lib.ny.us":true,"lib.oh.us":true,"lib.ok.us":true,"lib.or.us":true,"lib.pa.us":true,"lib.pr.us":true,"lib.ri.us":true,"lib.sc.us":true,"lib.sd.us":true,"lib.tn.us":true,"lib.tx.us":true,"lib.ut.us":true,"lib.vi.us":true,"lib.vt.us":true,"lib.va.us":true,"lib.wa.us":true,"lib.wi.us":true,"lib.wy.us":true,"pvt.k12.ma.us":true,"chtr.k12.ma.us":true,"paroch.k12.ma.us":true,"uy":true,"com.uy":true,"edu.uy":true,"gub.uy":true,"mil.uy":true,"net.uy":true,"org.uy":true,"uz":true,"co.uz":true,"com.uz":true,"net.uz":true,"org.uz":true,"va":true,"vc":true,"com.vc":true,"net.vc":true,"org.vc":true,"gov.vc":true,"mil.vc":true,"edu.vc":true,"ve":true,"arts.ve":true,"co.ve":true,"com.ve":true,"e12.ve":true,"edu.ve":true,"firm.ve":true,"gob.ve":true,"gov.ve":true,"info.ve":true,"int.ve":true,"mil.ve":true,"net.ve":true,"org.ve":true,"rec.ve":true,"store.ve":true,"tec.ve":true,"web.ve":true,"vg":true,"vi":true,"co.vi":true,"com.vi":true,"k12.vi":true,"net.vi":true,"org.vi":true,"vn":true,"com.vn":true,"net.vn":true,"org.vn":true,"edu.vn":true,"gov.vn":true,"int.vn":true,"ac.vn":true,"biz.vn":true,"info.vn":true,"name.vn":true,"pro.vn":true,"health.vn":true,"vu":true,"com.vu":true,"edu.vu":true,"net.vu":true,"org.vu":true,"wf":true,"ws":true,"com.ws":true,"net.ws":true,"org.ws":true,"gov.ws":true,"edu.ws":true,"yt":true,"xn--mgbaam7a8h":true,"xn--y9a3aq":true,"xn--54b7fta0cc":true,"xn--90ais":true,"xn--fiqs8s":true,"xn--fiqz9s":true,"xn--lgbbat1ad8j":true,"xn--wgbh1c":true,"xn--node":true,"xn--qxam":true,"xn--j6w193g":true,"xn--h2brj9c":true,"xn--mgbbh1a71e":true,"xn--fpcrj9c3d":true,"xn--gecrj9c":true,"xn--s9brj9c":true,"xn--45brj9c":true,"xn--xkc2dl3a5ee0h":true,"xn--mgba3a4f16a":true,"xn--mgba3a4fra":true,"xn--mgbtx2b":true,"xn--mgbayh7gpa":true,"xn--3e0b707e":true,"xn--80ao21a":true,"xn--fzc2c9e2c":true,"xn--xkc2al3hye2a":true,"xn--mgbc0a9azcg":true,"xn--d1alf":true,"xn--l1acc":true,"xn--mix891f":true,"xn--mix082f":true,"xn--mgbx4cd0ab":true,"xn--mgb9awbf":true,"xn--mgbai9azgqp6j":true,"xn--mgbai9a5eva00b":true,"xn--ygbi2ammx":true,"xn--90a3ac":true,"xn--o1ac.xn--90a3ac":true,"xn--c1avg.xn--90a3ac":true,"xn--90azh.xn--90a3ac":true,"xn--d1at.xn--90a3ac":true,"xn--o1ach.xn--90a3ac":true,"xn--80au.xn--90a3ac":true,"xn--p1ai":true,"xn--wgbl6a":true,"xn--mgberp4a5d4ar":true,"xn--mgberp4a5d4a87g":true,"xn--mgbqly7c0a67fbc":true,"xn--mgbqly7cvafr":true,"xn--mgbpl2fh":true,"xn--yfro4i67o":true,"xn--clchc0ea0b2g2a9gcd":true,"xn--ogbpf8fl":true,"xn--mgbtf8fl":true,"xn--o3cw4h":true,"xn--pgbs0dh":true,"xn--kpry57d":true,"xn--kprw13d":true,"xn--nnx388a":true,"xn--j1amh":true,"xn--mgb2ddes":true,"xxx":true,"*.ye":true,"ac.za":true,"agrica.za":true,"alt.za":true,"co.za":true,"edu.za":true,"gov.za":true,"grondar.za":true,"law.za":true,"mil.za":true,"net.za":true,"ngo.za":true,"nis.za":true,"nom.za":true,"org.za":true,"school.za":true,"tm.za":true,"web.za":true,"*.zm":true,"*.zw":true,"aaa":true,"aarp":true,"abarth":true,"abb":true,"abbott":true,"abbvie":true,"abc":true,"able":true,"abogado":true,"abudhabi":true,"academy":true,"accenture":true,"accountant":true,"accountants":true,"aco":true,"active":true,"actor":true,"adac":true,"ads":true,"adult":true,"aeg":true,"aetna":true,"afamilycompany":true,"afl":true,"africa":true,"africamagic":true,"agakhan":true,"agency":true,"aig":true,"aigo":true,"airbus":true,"airforce":true,"airtel":true,"akdn":true,"alfaromeo":true,"alibaba":true,"alipay":true,"allfinanz":true,"allstate":true,"ally":true,"alsace":true,"alstom":true,"americanexpress":true,"americanfamily":true,"amex":true,"amfam":true,"amica":true,"amsterdam":true,"analytics":true,"android":true,"anquan":true,"anz":true,"aol":true,"apartments":true,"app":true,"apple":true,"aquarelle":true,"aramco":true,"archi":true,"army":true,"arte":true,"asda":true,"associates":true,"athleta":true,"attorney":true,"auction":true,"audi":true,"audible":true,"audio":true,"auspost":true,"author":true,"auto":true,"autos":true,"avianca":true,"aws":true,"axa":true,"azure":true,"baby":true,"baidu":true,"banamex":true,"bananarepublic":true,"band":true,"bank":true,"bar":true,"barcelona":true,"barclaycard":true,"barclays":true,"barefoot":true,"bargains":true,"basketball":true,"bauhaus":true,"bayern":true,"bbc":true,"bbt":true,"bbva":true,"bcg":true,"bcn":true,"beats":true,"beer":true,"bentley":true,"berlin":true,"best":true,"bestbuy":true,"bet":true,"bharti":true,"bible":true,"bid":true,"bike":true,"bing":true,"bingo":true,"bio":true,"black":true,"blackfriday":true,"blanco":true,"blockbuster":true,"blog":true,"bloomberg":true,"blue":true,"bms":true,"bmw":true,"bnl":true,"bnpparibas":true,"boats":true,"boehringer":true,"bofa":true,"bom":true,"bond":true,"boo":true,"book":true,"booking":true,"boots":true,"bosch":true,"bostik":true,"bot":true,"boutique":true,"bradesco":true,"bridgestone":true,"broadway":true,"broker":true,"brother":true,"brussels":true,"budapest":true,"bugatti":true,"build":true,"builders":true,"business":true,"buy":true,"buzz":true,"bzh":true,"cab":true,"cafe":true,"cal":true,"call":true,"calvinklein":true,"camera":true,"camp":true,"cancerresearch":true,"canon":true,"capetown":true,"capital":true,"capitalone":true,"car":true,"caravan":true,"cards":true,"care":true,"career":true,"careers":true,"cars":true,"cartier":true,"casa":true,"case":true,"caseih":true,"cash":true,"casino":true,"catering":true,"cba":true,"cbn":true,"cbre":true,"cbs":true,"ceb":true,"center":true,"ceo":true,"cern":true,"cfa":true,"cfd":true,"chanel":true,"channel":true,"chase":true,"chat":true,"cheap":true,"chintai":true,"chloe":true,"christmas":true,"chrome":true,"chrysler":true,"church":true,"cipriani":true,"circle":true,"cisco":true,"citadel":true,"citi":true,"citic":true,"city":true,"cityeats":true,"claims":true,"cleaning":true,"click":true,"clinic":true,"clothing":true,"cloud":true,"club":true,"clubmed":true,"coach":true,"codes":true,"coffee":true,"college":true,"cologne":true,"comcast":true,"commbank":true,"community":true,"company":true,"computer":true,"comsec":true,"condos":true,"construction":true,"consulting":true,"contact":true,"contractors":true,"cooking":true,"cookingchannel":true,"cool":true,"corsica":true,"country":true,"coupon":true,"coupons":true,"courses":true,"credit":true,"creditcard":true,"creditunion":true,"cricket":true,"crown":true,"crs":true,"cruises":true,"csc":true,"cuisinella":true,"cymru":true,"cyou":true,"dabur":true,"dad":true,"dance":true,"date":true,"dating":true,"datsun":true,"day":true,"dclk":true,"dds":true,"deal":true,"dealer":true,"deals":true,"degree":true,"delivery":true,"dell":true,"deloitte":true,"delta":true,"democrat":true,"dental":true,"dentist":true,"desi":true,"design":true,"dev":true,"dhl":true,"diamonds":true,"diet":true,"digital":true,"direct":true,"directory":true,"discount":true,"discover":true,"dish":true,"dnp":true,"docs":true,"dodge":true,"dog":true,"doha":true,"domains":true,"doosan":true,"dot":true,"download":true,"drive":true,"dstv":true,"dtv":true,"dubai":true,"duck":true,"dunlop":true,"duns":true,"dupont":true,"durban":true,"dvag":true,"dwg":true,"earth":true,"eat":true,"edeka":true,"education":true,"email":true,"emerck":true,"emerson":true,"energy":true,"engineer":true,"engineering":true,"enterprises":true,"epost":true,"epson":true,"equipment":true,"ericsson":true,"erni":true,"esq":true,"estate":true,"esurance":true,"etisalat":true,"eurovision":true,"eus":true,"events":true,"everbank":true,"exchange":true,"expert":true,"exposed":true,"express":true,"extraspace":true,"fage":true,"fail":true,"fairwinds":true,"faith":true,"family":true,"fan":true,"fans":true,"farm":true,"farmers":true,"fashion":true,"fast":true,"fedex":true,"feedback":true,"ferrari":true,"ferrero":true,"fiat":true,"fidelity":true,"fido":true,"film":true,"final":true,"finance":true,"financial":true,"fire":true,"firestone":true,"firmdale":true,"fish":true,"fishing":true,"fit":true,"fitness":true,"flickr":true,"flights":true,"flir":true,"florist":true,"flowers":true,"flsmidth":true,"fly":true,"foo":true,"foodnetwork":true,"football":true,"ford":true,"forex":true,"forsale":true,"forum":true,"foundation":true,"fox":true,"fresenius":true,"frl":true,"frogans":true,"frontdoor":true,"frontier":true,"ftr":true,"fujitsu":true,"fujixerox":true,"fund":true,"furniture":true,"futbol":true,"fyi":true,"gal":true,"gallery":true,"gallo":true,"gallup":true,"game":true,"games":true,"gap":true,"garden":true,"gbiz":true,"gdn":true,"gea":true,"gent":true,"genting":true,"george":true,"ggee":true,"gift":true,"gifts":true,"gives":true,"giving":true,"glade":true,"glass":true,"gle":true,"global":true,"globo":true,"gmail":true,"gmo":true,"gmx":true,"godaddy":true,"gold":true,"goldpoint":true,"golf":true,"goo":true,"goodhands":true,"goodyear":true,"goog":true,"google":true,"gop":true,"got":true,"gotv":true,"grainger":true,"graphics":true,"gratis":true,"green":true,"gripe":true,"group":true,"guardian":true,"gucci":true,"guge":true,"guide":true,"guitars":true,"guru":true,"hamburg":true,"hangout":true,"haus":true,"hbo":true,"hdfc":true,"hdfcbank":true,"health":true,"healthcare":true,"help":true,"helsinki":true,"here":true,"hermes":true,"hgtv":true,"hiphop":true,"hisamitsu":true,"hitachi":true,"hiv":true,"hkt":true,"hockey":true,"holdings":true,"holiday":true,"homedepot":true,"homegoods":true,"homes":true,"homesense":true,"honda":true,"honeywell":true,"horse":true,"host":true,"hosting":true,"hot":true,"hoteles":true,"hotmail":true,"house":true,"how":true,"hsbc":true,"htc":true,"hughes":true,"hyatt":true,"hyundai":true,"ibm":true,"icbc":true,"ice":true,"icu":true,"ieee":true,"ifm":true,"iinet":true,"ikano":true,"imamat":true,"imdb":true,"immo":true,"immobilien":true,"industries":true,"infiniti":true,"ing":true,"ink":true,"institute":true,"insurance":true,"insure":true,"intel":true,"international":true,"intuit":true,"investments":true,"ipiranga":true,"irish":true,"iselect":true,"ismaili":true,"ist":true,"istanbul":true,"itau":true,"itv":true,"iveco":true,"iwc":true,"jaguar":true,"java":true,"jcb":true,"jcp":true,"jeep":true,"jetzt":true,"jewelry":true,"jio":true,"jlc":true,"jll":true,"jmp":true,"jnj":true,"joburg":true,"jot":true,"joy":true,"jpmorgan":true,"jprs":true,"juegos":true,"juniper":true,"kaufen":true,"kddi":true,"kerryhotels":true,"kerrylogistics":true,"kerryproperties":true,"kfh":true,"kia":true,"kim":true,"kinder":true,"kindle":true,"kitchen":true,"kiwi":true,"koeln":true,"komatsu":true,"kosher":true,"kpmg":true,"kpn":true,"krd":true,"kred":true,"kuokgroup":true,"kyknet":true,"kyoto":true,"lacaixa":true,"ladbrokes":true,"lamborghini":true,"lancaster":true,"lancia":true,"lancome":true,"land":true,"landrover":true,"lanxess":true,"lasalle":true,"lat":true,"latino":true,"latrobe":true,"law":true,"lawyer":true,"lds":true,"lease":true,"leclerc":true,"lefrak":true,"legal":true,"lego":true,"lexus":true,"lgbt":true,"liaison":true,"lidl":true,"life":true,"lifeinsurance":true,"lifestyle":true,"lighting":true,"like":true,"lilly":true,"limited":true,"limo":true,"lincoln":true,"linde":true,"link":true,"lipsy":true,"live":true,"living":true,"lixil":true,"loan":true,"loans":true,"locker":true,"locus":true,"loft":true,"lol":true,"london":true,"lotte":true,"lotto":true,"love":true,"lpl":true,"lplfinancial":true,"ltd":true,"ltda":true,"lundbeck":true,"lupin":true,"luxe":true,"luxury":true,"macys":true,"madrid":true,"maif":true,"maison":true,"makeup":true,"man":true,"management":true,"mango":true,"market":true,"marketing":true,"markets":true,"marriott":true,"marshalls":true,"maserati":true,"mattel":true,"mba":true,"mcd":true,"mcdonalds":true,"mckinsey":true,"med":true,"media":true,"meet":true,"melbourne":true,"meme":true,"memorial":true,"men":true,"menu":true,"meo":true,"metlife":true,"miami":true,"microsoft":true,"mini":true,"mint":true,"mit":true,"mitsubishi":true,"mlb":true,"mls":true,"mma":true,"mnet":true,"mobily":true,"moda":true,"moe":true,"moi":true,"mom":true,"monash":true,"money":true,"monster":true,"montblanc":true,"mopar":true,"mormon":true,"mortgage":true,"moscow":true,"moto":true,"motorcycles":true,"mov":true,"movie":true,"movistar":true,"msd":true,"mtn":true,"mtpc":true,"mtr":true,"multichoice":true,"mutual":true,"mutuelle":true,"mzansimagic":true,"nab":true,"nadex":true,"nagoya":true,"naspers":true,"nationwide":true,"natura":true,"navy":true,"nba":true,"nec":true,"netbank":true,"netflix":true,"network":true,"neustar":true,"new":true,"newholland":true,"news":true,"next":true,"nextdirect":true,"nexus":true,"nfl":true,"ngo":true,"nhk":true,"nico":true,"nike":true,"nikon":true,"ninja":true,"nissan":true,"nokia":true,"northwesternmutual":true,"norton":true,"now":true,"nowruz":true,"nowtv":true,"nra":true,"nrw":true,"ntt":true,"nyc":true,"obi":true,"observer":true,"off":true,"office":true,"okinawa":true,"olayan":true,"olayangroup":true,"oldnavy":true,"ollo":true,"omega":true,"one":true,"ong":true,"onl":true,"online":true,"onyourside":true,"ooo":true,"open":true,"oracle":true,"orange":true,"organic":true,"orientexpress":true,"osaka":true,"otsuka":true,"ott":true,"ovh":true,"page":true,"pamperedchef":true,"panasonic":true,"panerai":true,"paris":true,"pars":true,"partners":true,"parts":true,"party":true,"passagens":true,"pay":true,"payu":true,"pccw":true,"pet":true,"pfizer":true,"pharmacy":true,"philips":true,"photo":true,"photography":true,"photos":true,"physio":true,"piaget":true,"pics":true,"pictet":true,"pictures":true,"pid":true,"pin":true,"ping":true,"pink":true,"pioneer":true,"pizza":true,"place":true,"play":true,"playstation":true,"plumbing":true,"plus":true,"pnc":true,"pohl":true,"poker":true,"politie":true,"porn":true,"pramerica":true,"praxi":true,"press":true,"prime":true,"prod":true,"productions":true,"prof":true,"progressive":true,"promo":true,"properties":true,"property":true,"protection":true,"pru":true,"prudential":true,"pub":true,"qpon":true,"quebec":true,"quest":true,"qvc":true,"racing":true,"raid":true,"read":true,"realestate":true,"realtor":true,"realty":true,"recipes":true,"red":true,"redstone":true,"redumbrella":true,"rehab":true,"reise":true,"reisen":true,"reit":true,"reliance":true,"ren":true,"rent":true,"rentals":true,"repair":true,"report":true,"republican":true,"rest":true,"restaurant":true,"review":true,"reviews":true,"rexroth":true,"rich":true,"richardli":true,"ricoh":true,"rightathome":true,"ril":true,"rio":true,"rip":true,"rocher":true,"rocks":true,"rodeo":true,"rogers":true,"room":true,"rsvp":true,"ruhr":true,"run":true,"rwe":true,"ryukyu":true,"saarland":true,"safe":true,"safety":true,"sakura":true,"sale":true,"salon":true,"samsclub":true,"samsung":true,"sandvik":true,"sandvikcoromant":true,"sanofi":true,"sap":true,"sapo":true,"sarl":true,"sas":true,"save":true,"saxo":true,"sbi":true,"sbs":true,"sca":true,"scb":true,"schaeffler":true,"schmidt":true,"scholarships":true,"school":true,"schule":true,"schwarz":true,"science":true,"scjohnson":true,"scor":true,"scot":true,"seat":true,"secure":true,"security":true,"seek":true,"sener":true,"services":true,"ses":true,"seven":true,"sew":true,"sex":true,"sexy":true,"sfr":true,"shangrila":true,"sharp":true,"shaw":true,"shell":true,"shia":true,"shiksha":true,"shoes":true,"shouji":true,"show":true,"showtime":true,"shriram":true,"silk":true,"sina":true,"singles":true,"site":true,"ski":true,"skin":true,"sky":true,"skype":true,"sling":true,"smart":true,"smile":true,"sncf":true,"soccer":true,"social":true,"softbank":true,"software":true,"sohu":true,"solar":true,"solutions":true,"song":true,"sony":true,"soy":true,"space":true,"spiegel":true,"spot":true,"spreadbetting":true,"srl":true,"srt":true,"stada":true,"staples":true,"star":true,"starhub":true,"statebank":true,"statefarm":true,"statoil":true,"stc":true,"stcgroup":true,"stockholm":true,"storage":true,"store":true,"studio":true,"study":true,"style":true,"sucks":true,"supersport":true,"supplies":true,"supply":true,"support":true,"surf":true,"surgery":true,"suzuki":true,"swatch":true,"swiftcover":true,"swiss":true,"sydney":true,"symantec":true,"systems":true,"tab":true,"taipei":true,"talk":true,"taobao":true,"target":true,"tatamotors":true,"tatar":true,"tattoo":true,"tax":true,"taxi":true,"tci":true,"tdk":true,"team":true,"tech":true,"technology":true,"telecity":true,"telefonica":true,"temasek":true,"tennis":true,"teva":true,"thd":true,"theater":true,"theatre":true,"theguardian":true,"tiaa":true,"tickets":true,"tienda":true,"tiffany":true,"tips":true,"tires":true,"tirol":true,"tjmaxx":true,"tjx":true,"tkmaxx":true,"tmall":true,"today":true,"tokyo":true,"tools":true,"top":true,"toray":true,"toshiba":true,"total":true,"tours":true,"town":true,"toyota":true,"toys":true,"trade":true,"trading":true,"training":true,"travelchannel":true,"travelers":true,"travelersinsurance":true,"trust":true,"trv":true,"tube":true,"tui":true,"tunes":true,"tushu":true,"tvs":true,"ubank":true,"ubs":true,"uconnect":true,"university":true,"uno":true,"uol":true,"ups":true,"vacations":true,"vana":true,"vanguard":true,"vegas":true,"ventures":true,"verisign":true,"versicherung":true,"vet":true,"viajes":true,"video":true,"vig":true,"viking":true,"villas":true,"vin":true,"vip":true,"virgin":true,"visa":true,"vision":true,"vista":true,"vistaprint":true,"viva":true,"vivo":true,"vlaanderen":true,"vodka":true,"volkswagen":true,"vote":true,"voting":true,"voto":true,"voyage":true,"vuelos":true,"wales":true,"walmart":true,"walter":true,"wang":true,"wanggou":true,"warman":true,"watch":true,"watches":true,"weather":true,"weatherchannel":true,"webcam":true,"weber":true,"website":true,"wed":true,"wedding":true,"weibo":true,"weir":true,"whoswho":true,"wien":true,"wiki":true,"williamhill":true,"win":true,"windows":true,"wine":true,"winners":true,"wme":true,"wolterskluwer":true,"woodside":true,"work":true,"works":true,"world":true,"wtc":true,"wtf":true,"xbox":true,"xerox":true,"xfinity":true,"xihuan":true,"xin":true,"xn--11b4c3d":true,"xn--1ck2e1b":true,"xn--1qqw23a":true,"xn--30rr7y":true,"xn--3bst00m":true,"xn--3ds443g":true,"xn--3oq18vl8pn36a":true,"xn--3pxu8k":true,"xn--42c2d9a":true,"xn--45q11c":true,"xn--4gbrim":true,"xn--4gq48lf9j":true,"xn--55qw42g":true,"xn--55qx5d":true,"xn--5su34j936bgsg":true,"xn--5tzm5g":true,"xn--6frz82g":true,"xn--6qq986b3xl":true,"xn--80adxhks":true,"xn--80asehdb":true,"xn--80aswg":true,"xn--8y0a063a":true,"xn--9dbq2a":true,"xn--9et52u":true,"xn--9krt00a":true,"xn--b4w605ferd":true,"xn--bck1b9a5dre4c":true,"xn--c1avg":true,"xn--c2br7g":true,"xn--cck2b3b":true,"xn--cg4bki":true,"xn--czr694b":true,"xn--czrs0t":true,"xn--czru2d":true,"xn--d1acj3b":true,"xn--eckvdtc9d":true,"xn--efvy88h":true,"xn--estv75g":true,"xn--fct429k":true,"xn--fhbei":true,"xn--fiq228c5hs":true,"xn--fiq64b":true,"xn--fjq720a":true,"xn--flw351e":true,"xn--fzys8d69uvgm":true,"xn--g2xx48c":true,"xn--gckr3f0f":true,"xn--hxt814e":true,"xn--i1b6b1a6a2e":true,"xn--imr513n":true,"xn--io0a7i":true,"xn--j1aef":true,"xn--jlq61u9w7b":true,"xn--jvr189m":true,"xn--kcrx77d1x4a":true,"xn--kpu716f":true,"xn--kput3i":true,"xn--mgba3a3ejt":true,"xn--mgba7c0bbn0a":true,"xn--mgbaakc7dvf":true,"xn--mgbab2bd":true,"xn--mgbb9fbpob":true,"xn--mgbca7dzdo":true,"xn--mgbt3dhd":true,"xn--mk1bu44c":true,"xn--mxtq1m":true,"xn--ngbc5azd":true,"xn--ngbe9e0a":true,"xn--nqv7f":true,"xn--nqv7fs00ema":true,"xn--nyqy26a":true,"xn--p1acf":true,"xn--pbt977c":true,"xn--pssy2u":true,"xn--q9jyb4c":true,"xn--qcka1pmc":true,"xn--rhqv96g":true,"xn--rovu88b":true,"xn--ses554g":true,"xn--t60b56a":true,"xn--tckwe":true,"xn--unup4y":true,"xn--vermgensberater-ctb":true,"xn--vermgensberatung-pwb":true,"xn--vhquv":true,"xn--vuq861b":true,"xn--w4r85el8fhu5dnra":true,"xn--w4rs40l":true,"xn--xhq521b":true,"xn--zfr164b":true,"xperia":true,"xyz":true,"yachts":true,"yahoo":true,"yamaxun":true,"yandex":true,"yodobashi":true,"yoga":true,"yokohama":true,"you":true,"youtube":true,"yun":true,"zappos":true,"zara":true,"zero":true,"zip":true,"zippo":true,"zone":true,"zuerich":true,"cloudfront.net":true,"ap-northeast-1.compute.amazonaws.com":true,"ap-southeast-1.compute.amazonaws.com":true,"ap-southeast-2.compute.amazonaws.com":true,"cn-north-1.compute.amazonaws.cn":true,"compute.amazonaws.cn":true,"compute.amazonaws.com":true,"compute-1.amazonaws.com":true,"eu-west-1.compute.amazonaws.com":true,"eu-central-1.compute.amazonaws.com":true,"sa-east-1.compute.amazonaws.com":true,"us-east-1.amazonaws.com":true,"us-gov-west-1.compute.amazonaws.com":true,"us-west-1.compute.amazonaws.com":true,"us-west-2.compute.amazonaws.com":true,"z-1.compute-1.amazonaws.com":true,"z-2.compute-1.amazonaws.com":true,"elasticbeanstalk.com":true,"elb.amazonaws.com":true,"s3.amazonaws.com":true,"s3-ap-northeast-1.amazonaws.com":true,"s3-ap-southeast-1.amazonaws.com":true,"s3-ap-southeast-2.amazonaws.com":true,"s3-external-1.amazonaws.com":true,"s3-external-2.amazonaws.com":true,"s3-fips-us-gov-west-1.amazonaws.com":true,"s3-eu-central-1.amazonaws.com":true,"s3-eu-west-1.amazonaws.com":true,"s3-sa-east-1.amazonaws.com":true,"s3-us-gov-west-1.amazonaws.com":true,"s3-us-west-1.amazonaws.com":true,"s3-us-west-2.amazonaws.com":true,"s3.cn-north-1.amazonaws.com.cn":true,"s3.eu-central-1.amazonaws.com":true,"betainabox.com":true,"ae.org":true,"ar.com":true,"br.com":true,"cn.com":true,"com.de":true,"com.se":true,"de.com":true,"eu.com":true,"gb.com":true,"gb.net":true,"hu.com":true,"hu.net":true,"jp.net":true,"jpn.com":true,"kr.com":true,"mex.com":true,"no.com":true,"qc.com":true,"ru.com":true,"sa.com":true,"se.com":true,"se.net":true,"uk.com":true,"uk.net":true,"us.com":true,"uy.com":true,"za.bz":true,"za.com":true,"africa.com":true,"gr.com":true,"in.net":true,"us.org":true,"co.com":true,"c.la":true,"cloudcontrolled.com":true,"cloudcontrolapp.com":true,"co.ca":true,"c.cdn77.org":true,"cdn77-ssl.net":true,"r.cdn77.net":true,"rsc.cdn77.org":true,"ssl.origin.cdn77-secure.org":true,"co.nl":true,"co.no":true,"*.platform.sh":true,"cupcake.is":true,"dreamhosters.com":true,"duckdns.org":true,"dyndns-at-home.com":true,"dyndns-at-work.com":true,"dyndns-blog.com":true,"dyndns-free.com":true,"dyndns-home.com":true,"dyndns-ip.com":true,"dyndns-mail.com":true,"dyndns-office.com":true,"dyndns-pics.com":true,"dyndns-remote.com":true,"dyndns-server.com":true,"dyndns-web.com":true,"dyndns-wiki.com":true,"dyndns-work.com":true,"dyndns.biz":true,"dyndns.info":true,"dyndns.org":true,"dyndns.tv":true,"at-band-camp.net":true,"ath.cx":true,"barrel-of-knowledge.info":true,"barrell-of-knowledge.info":true,"better-than.tv":true,"blogdns.com":true,"blogdns.net":true,"blogdns.org":true,"blogsite.org":true,"boldlygoingnowhere.org":true,"broke-it.net":true,"buyshouses.net":true,"cechire.com":true,"dnsalias.com":true,"dnsalias.net":true,"dnsalias.org":true,"dnsdojo.com":true,"dnsdojo.net":true,"dnsdojo.org":true,"does-it.net":true,"doesntexist.com":true,"doesntexist.org":true,"dontexist.com":true,"dontexist.net":true,"dontexist.org":true,"doomdns.com":true,"doomdns.org":true,"dvrdns.org":true,"dyn-o-saur.com":true,"dynalias.com":true,"dynalias.net":true,"dynalias.org":true,"dynathome.net":true,"dyndns.ws":true,"endofinternet.net":true,"endofinternet.org":true,"endoftheinternet.org":true,"est-a-la-maison.com":true,"est-a-la-masion.com":true,"est-le-patron.com":true,"est-mon-blogueur.com":true,"for-better.biz":true,"for-more.biz":true,"for-our.info":true,"for-some.biz":true,"for-the.biz":true,"forgot.her.name":true,"forgot.his.name":true,"from-ak.com":true,"from-al.com":true,"from-ar.com":true,"from-az.net":true,"from-ca.com":true,"from-co.net":true,"from-ct.com":true,"from-dc.com":true,"from-de.com":true,"from-fl.com":true,"from-ga.com":true,"from-hi.com":true,"from-ia.com":true,"from-id.com":true,"from-il.com":true,"from-in.com":true,"from-ks.com":true,"from-ky.com":true,"from-la.net":true,"from-ma.com":true,"from-md.com":true,"from-me.org":true,"from-mi.com":true,"from-mn.com":true,"from-mo.com":true,"from-ms.com":true,"from-mt.com":true,"from-nc.com":true,"from-nd.com":true,"from-ne.com":true,"from-nh.com":true,"from-nj.com":true,"from-nm.com":true,"from-nv.com":true,"from-ny.net":true,"from-oh.com":true,"from-ok.com":true,"from-or.com":true,"from-pa.com":true,"from-pr.com":true,"from-ri.com":true,"from-sc.com":true,"from-sd.com":true,"from-tn.com":true,"from-tx.com":true,"from-ut.com":true,"from-va.com":true,"from-vt.com":true,"from-wa.com":true,"from-wi.com":true,"from-wv.com":true,"from-wy.com":true,"ftpaccess.cc":true,"fuettertdasnetz.de":true,"game-host.org":true,"game-server.cc":true,"getmyip.com":true,"gets-it.net":true,"go.dyndns.org":true,"gotdns.com":true,"gotdns.org":true,"groks-the.info":true,"groks-this.info":true,"ham-radio-op.net":true,"here-for-more.info":true,"hobby-site.com":true,"hobby-site.org":true,"home.dyndns.org":true,"homedns.org":true,"homeftp.net":true,"homeftp.org":true,"homeip.net":true,"homelinux.com":true,"homelinux.net":true,"homelinux.org":true,"homeunix.com":true,"homeunix.net":true,"homeunix.org":true,"iamallama.com":true,"in-the-band.net":true,"is-a-anarchist.com":true,"is-a-blogger.com":true,"is-a-bookkeeper.com":true,"is-a-bruinsfan.org":true,"is-a-bulls-fan.com":true,"is-a-candidate.org":true,"is-a-caterer.com":true,"is-a-celticsfan.org":true,"is-a-chef.com":true,"is-a-chef.net":true,"is-a-chef.org":true,"is-a-conservative.com":true,"is-a-cpa.com":true,"is-a-cubicle-slave.com":true,"is-a-democrat.com":true,"is-a-designer.com":true,"is-a-doctor.com":true,"is-a-financialadvisor.com":true,"is-a-geek.com":true,"is-a-geek.net":true,"is-a-geek.org":true,"is-a-green.com":true,"is-a-guru.com":true,"is-a-hard-worker.com":true,"is-a-hunter.com":true,"is-a-knight.org":true,"is-a-landscaper.com":true,"is-a-lawyer.com":true,"is-a-liberal.com":true,"is-a-libertarian.com":true,"is-a-linux-user.org":true,"is-a-llama.com":true,"is-a-musician.com":true,"is-a-nascarfan.com":true,"is-a-nurse.com":true,"is-a-painter.com":true,"is-a-patsfan.org":true,"is-a-personaltrainer.com":true,"is-a-photographer.com":true,"is-a-player.com":true,"is-a-republican.com":true,"is-a-rockstar.com":true,"is-a-socialist.com":true,"is-a-soxfan.org":true,"is-a-student.com":true,"is-a-teacher.com":true,"is-a-techie.com":true,"is-a-therapist.com":true,"is-an-accountant.com":true,"is-an-actor.com":true,"is-an-actress.com":true,"is-an-anarchist.com":true,"is-an-artist.com":true,"is-an-engineer.com":true,"is-an-entertainer.com":true,"is-by.us":true,"is-certified.com":true,"is-found.org":true,"is-gone.com":true,"is-into-anime.com":true,"is-into-cars.com":true,"is-into-cartoons.com":true,"is-into-games.com":true,"is-leet.com":true,"is-lost.org":true,"is-not-certified.com":true,"is-saved.org":true,"is-slick.com":true,"is-uberleet.com":true,"is-very-bad.org":true,"is-very-evil.org":true,"is-very-good.org":true,"is-very-nice.org":true,"is-very-sweet.org":true,"is-with-theband.com":true,"isa-geek.com":true,"isa-geek.net":true,"isa-geek.org":true,"isa-hockeynut.com":true,"issmarterthanyou.com":true,"isteingeek.de":true,"istmein.de":true,"kicks-ass.net":true,"kicks-ass.org":true,"knowsitall.info":true,"land-4-sale.us":true,"lebtimnetz.de":true,"leitungsen.de":true,"likes-pie.com":true,"likescandy.com":true,"merseine.nu":true,"mine.nu":true,"misconfused.org":true,"mypets.ws":true,"myphotos.cc":true,"neat-url.com":true,"office-on-the.net":true,"on-the-web.tv":true,"podzone.net":true,"podzone.org":true,"readmyblog.org":true,"saves-the-whales.com":true,"scrapper-site.net":true,"scrapping.cc":true,"selfip.biz":true,"selfip.com":true,"selfip.info":true,"selfip.net":true,"selfip.org":true,"sells-for-less.com":true,"sells-for-u.com":true,"sells-it.net":true,"sellsyourhome.org":true,"servebbs.com":true,"servebbs.net":true,"servebbs.org":true,"serveftp.net":true,"serveftp.org":true,"servegame.org":true,"shacknet.nu":true,"simple-url.com":true,"space-to-rent.com":true,"stuff-4-sale.org":true,"stuff-4-sale.us":true,"teaches-yoga.com":true,"thruhere.net":true,"traeumtgerade.de":true,"webhop.biz":true,"webhop.info":true,"webhop.net":true,"webhop.org":true,"worse-than.tv":true,"writesthisblog.com":true,"eu.org":true,"al.eu.org":true,"asso.eu.org":true,"at.eu.org":true,"au.eu.org":true,"be.eu.org":true,"bg.eu.org":true,"ca.eu.org":true,"cd.eu.org":true,"ch.eu.org":true,"cn.eu.org":true,"cy.eu.org":true,"cz.eu.org":true,"de.eu.org":true,"dk.eu.org":true,"edu.eu.org":true,"ee.eu.org":true,"es.eu.org":true,"fi.eu.org":true,"fr.eu.org":true,"gr.eu.org":true,"hr.eu.org":true,"hu.eu.org":true,"ie.eu.org":true,"il.eu.org":true,"in.eu.org":true,"int.eu.org":true,"is.eu.org":true,"it.eu.org":true,"jp.eu.org":true,"kr.eu.org":true,"lt.eu.org":true,"lu.eu.org":true,"lv.eu.org":true,"mc.eu.org":true,"me.eu.org":true,"mk.eu.org":true,"mt.eu.org":true,"my.eu.org":true,"net.eu.org":true,"ng.eu.org":true,"nl.eu.org":true,"no.eu.org":true,"nz.eu.org":true,"paris.eu.org":true,"pl.eu.org":true,"pt.eu.org":true,"q-a.eu.org":true,"ro.eu.org":true,"ru.eu.org":true,"se.eu.org":true,"si.eu.org":true,"sk.eu.org":true,"tr.eu.org":true,"uk.eu.org":true,"us.eu.org":true,"a.ssl.fastly.net":true,"b.ssl.fastly.net":true,"global.ssl.fastly.net":true,"a.prod.fastly.net":true,"global.prod.fastly.net":true,"firebaseapp.com":true,"flynnhub.com":true,"service.gov.uk":true,"github.io":true,"githubusercontent.com":true,"ro.com":true,"appspot.com":true,"blogspot.ae":true,"blogspot.al":true,"blogspot.am":true,"blogspot.ba":true,"blogspot.be":true,"blogspot.bg":true,"blogspot.bj":true,"blogspot.ca":true,"blogspot.cf":true,"blogspot.ch":true,"blogspot.cl":true,"blogspot.co.at":true,"blogspot.co.id":true,"blogspot.co.il":true,"blogspot.co.ke":true,"blogspot.co.nz":true,"blogspot.co.uk":true,"blogspot.co.za":true,"blogspot.com":true,"blogspot.com.ar":true,"blogspot.com.au":true,"blogspot.com.br":true,"blogspot.com.by":true,"blogspot.com.co":true,"blogspot.com.cy":true,"blogspot.com.ee":true,"blogspot.com.eg":true,"blogspot.com.es":true,"blogspot.com.mt":true,"blogspot.com.ng":true,"blogspot.com.tr":true,"blogspot.com.uy":true,"blogspot.cv":true,"blogspot.cz":true,"blogspot.de":true,"blogspot.dk":true,"blogspot.fi":true,"blogspot.fr":true,"blogspot.gr":true,"blogspot.hk":true,"blogspot.hr":true,"blogspot.hu":true,"blogspot.ie":true,"blogspot.in":true,"blogspot.is":true,"blogspot.it":true,"blogspot.jp":true,"blogspot.kr":true,"blogspot.li":true,"blogspot.lt":true,"blogspot.lu":true,"blogspot.md":true,"blogspot.mk":true,"blogspot.mr":true,"blogspot.mx":true,"blogspot.my":true,"blogspot.nl":true,"blogspot.no":true,"blogspot.pe":true,"blogspot.pt":true,"blogspot.qa":true,"blogspot.re":true,"blogspot.ro":true,"blogspot.rs":true,"blogspot.ru":true,"blogspot.se":true,"blogspot.sg":true,"blogspot.si":true,"blogspot.sk":true,"blogspot.sn":true,"blogspot.td":true,"blogspot.tw":true,"blogspot.ug":true,"blogspot.vn":true,"codespot.com":true,"googleapis.com":true,"googlecode.com":true,"pagespeedmobilizer.com":true,"withgoogle.com":true,"withyoutube.com":true,"herokuapp.com":true,"herokussl.com":true,"iki.fi":true,"biz.at":true,"info.at":true,"co.pl":true,"azurewebsites.net":true,"azure-mobile.net":true,"cloudapp.net":true,"bmoattachments.org":true,"4u.com":true,"nfshost.com":true,"nyc.mn":true,"nid.io":true,"operaunite.com":true,"outsystemscloud.com":true,"art.pl":true,"gliwice.pl":true,"krakow.pl":true,"poznan.pl":true,"wroc.pl":true,"zakopane.pl":true,"pantheon.io":true,"gotpantheon.com":true,"priv.at":true,"qa2.com":true,"rhcloud.com":true,"sandcats.io":true,"biz.ua":true,"co.ua":true,"pp.ua":true,"sinaapp.com":true,"vipsinaapp.com":true,"1kapp.com":true,"gda.pl":true,"gdansk.pl":true,"gdynia.pl":true,"med.pl":true,"sopot.pl":true,"hk.com":true,"hk.org":true,"ltd.hk":true,"inc.hk":true,"yolasite.com":true,"za.net":true,"za.org":true}); + +// END of automatically generated file diff --git a/node_modules/fsevents/node_modules/tough-cookie/lib/store.js b/node_modules/fsevents/node_modules/tough-cookie/lib/store.js new file mode 100644 index 0000000..bce5292 --- /dev/null +++ b/node_modules/fsevents/node_modules/tough-cookie/lib/store.js @@ -0,0 +1,71 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +'use strict'; +/*jshint unused:false */ + +function Store() { +} +exports.Store = Store; + +// Stores may be synchronous, but are still required to use a +// Continuation-Passing Style API. The CookieJar itself will expose a "*Sync" +// API that converts from synchronous-callbacks to imperative style. +Store.prototype.synchronous = false; + +Store.prototype.findCookie = function(domain, path, key, cb) { + throw new Error('findCookie is not implemented'); +}; + +Store.prototype.findCookies = function(domain, path, cb) { + throw new Error('findCookies is not implemented'); +}; + +Store.prototype.putCookie = function(cookie, cb) { + throw new Error('putCookie is not implemented'); +}; + +Store.prototype.updateCookie = function(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error('updateCookie is not implemented'); +}; + +Store.prototype.removeCookie = function(domain, path, key, cb) { + throw new Error('removeCookie is not implemented'); +}; + +Store.prototype.removeCookies = function(domain, path, cb) { + throw new Error('removeCookies is not implemented'); +}; + +Store.prototype.getAllCookies = function(cb) { + throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)'); +}; diff --git a/node_modules/fsevents/node_modules/tough-cookie/package.json b/node_modules/fsevents/node_modules/tough-cookie/package.json new file mode 100644 index 0000000..4a9dfb8 --- /dev/null +++ b/node_modules/fsevents/node_modules/tough-cookie/package.json @@ -0,0 +1,120 @@ +{ + "_args": [ + [ + "tough-cookie@~2.2.0", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "tough-cookie@>=2.2.0 <2.3.0", + "_id": "tough-cookie@2.2.2", + "_inCache": true, + "_installable": true, + "_location": "/tough-cookie", + "_nodeVersion": "5.1.1", + "_npmOperationalInternal": { + "host": "packages-13-west.internal.npmjs.com", + "tmp": "tmp/tough-cookie-2.2.2.tgz_1457564639182_0.5129188685677946" + }, + "_npmUser": { + "email": "jstash@gmail.com", + "name": "jstash" + }, + "_npmVersion": "3.3.12", + "_phantomChildren": {}, + "_requested": { + "name": "tough-cookie", + "raw": "tough-cookie@~2.2.0", + "rawSpec": "~2.2.0", + "scope": null, + "spec": ">=2.2.0 <2.3.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.2.tgz", + "_shasum": "c83a1830f4e5ef0b93ef2a3488e724f8de016ac7", + "_shrinkwrap": null, + "_spec": "tough-cookie@~2.2.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "author": { + "email": "jstashewsky@salesforce.com", + "name": "Jeremy Stashewsky" + }, + "bugs": { + "url": "https://github.com/SalesforceEng/tough-cookie/issues" + }, + "contributors": [ + { + "name": "Alexander Savin" + }, + { + "name": "Ian Livingstone" + }, + { + "name": "Ivan Nikulin" + }, + { + "name": "Lalit Kapoor" + }, + { + "name": "Sam Thompson" + }, + { + "name": "Sebastian Mayr" + } + ], + "dependencies": {}, + "description": "RFC6265 Cookies and Cookie Jar for node.js", + "devDependencies": { + "async": "^1.4.2", + "vows": "^0.8.1" + }, + "directories": {}, + "dist": { + "shasum": "c83a1830f4e5ef0b93ef2a3488e724f8de016ac7", + "tarball": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.2.tgz" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "lib" + ], + "gitHead": "cc46628c4d7d2e8c372ecba29293ca8a207ec192", + "homepage": "https://github.com/SalesforceEng/tough-cookie", + "keywords": [ + "HTTP", + "RFC2965", + "RFC6265", + "cookie", + "cookiejar", + "cookies", + "jar", + "set-cookie" + ], + "license": "BSD-3-Clause", + "main": "./lib/cookie", + "maintainers": [ + { + "name": "jstash", + "email": "jeremy@goinstant.com" + }, + { + "name": "goinstant", + "email": "services@goinstant.com" + } + ], + "name": "tough-cookie", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/SalesforceEng/tough-cookie.git" + }, + "scripts": { + "suffixup": "curl -o public_suffix_list.dat https://publicsuffix.org/list/public_suffix_list.dat && ./generate-pubsuffix.js", + "test": "vows test/*_test.js" + }, + "version": "2.2.2" +} diff --git a/node_modules/fsevents/node_modules/tunnel-agent/LICENSE b/node_modules/fsevents/node_modules/tunnel-agent/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/fsevents/node_modules/tunnel-agent/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tunnel-agent/README.md b/node_modules/fsevents/node_modules/tunnel-agent/README.md new file mode 100644 index 0000000..bb533d5 --- /dev/null +++ b/node_modules/fsevents/node_modules/tunnel-agent/README.md @@ -0,0 +1,4 @@ +tunnel-agent +============ + +HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module. diff --git a/node_modules/fsevents/node_modules/tunnel-agent/index.js b/node_modules/fsevents/node_modules/tunnel-agent/index.js new file mode 100644 index 0000000..68013ac --- /dev/null +++ b/node_modules/fsevents/node_modules/tunnel-agent/index.js @@ -0,0 +1,243 @@ +'use strict' + +var net = require('net') + , tls = require('tls') + , http = require('http') + , https = require('https') + , events = require('events') + , assert = require('assert') + , util = require('util') + ; + +exports.httpOverHttp = httpOverHttp +exports.httpsOverHttp = httpsOverHttp +exports.httpOverHttps = httpOverHttps +exports.httpsOverHttps = httpsOverHttps + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options) + agent.request = http.request + return agent +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options) + agent.request = http.request + agent.createSocket = createSecureSocket + agent.defaultPort = 443 + return agent +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options) + agent.request = https.request + return agent +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options) + agent.request = https.request + agent.createSocket = createSecureSocket + agent.defaultPort = 443 + return agent +} + + +function TunnelingAgent(options) { + var self = this + self.options = options || {} + self.proxyOptions = self.options.proxy || {} + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets + self.requests = [] + self.sockets = [] + + self.on('free', function onFree(socket, host, port) { + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i] + if (pending.host === host && pending.port === port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1) + pending.request.onSocket(socket) + return + } + } + socket.destroy() + self.removeSocket(socket) + }) +} +util.inherits(TunnelingAgent, events.EventEmitter) + +TunnelingAgent.prototype.addRequest = function addRequest(req, options) { + var self = this + + // Legacy API: addRequest(req, host, port, path) + if (typeof options === 'string') { + options = { + host: options, + port: arguments[2], + path: arguments[3] + }; + } + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push({host: options.host, port: options.port, request: req}) + return + } + + // If we are under maxSockets create a new one. + self.createConnection({host: options.host, port: options.port, request: req}) +} + +TunnelingAgent.prototype.createConnection = function createConnection(pending) { + var self = this + + self.createSocket(pending, function(socket) { + socket.on('free', onFree) + socket.on('close', onCloseOrRemove) + socket.on('agentRemove', onCloseOrRemove) + pending.request.onSocket(socket) + + function onFree() { + self.emit('free', socket, pending.host, pending.port) + } + + function onCloseOrRemove(err) { + self.removeSocket(socket) + socket.removeListener('free', onFree) + socket.removeListener('close', onCloseOrRemove) + socket.removeListener('agentRemove', onCloseOrRemove) + } + }) +} + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this + var placeholder = {} + self.sockets.push(placeholder) + + var connectOptions = mergeOptions({}, self.proxyOptions, + { method: 'CONNECT' + , path: options.host + ':' + options.port + , agent: false + } + ) + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {} + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64') + } + + debug('making CONNECT request') + var connectReq = self.request(connectOptions) + connectReq.useChunkedEncodingByDefault = false // for v0.6 + connectReq.once('response', onResponse) // for v0.6 + connectReq.once('upgrade', onUpgrade) // for v0.6 + connectReq.once('connect', onConnect) // for v0.7 or later + connectReq.once('error', onError) + connectReq.end() + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head) + }) + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners() + socket.removeAllListeners() + + if (res.statusCode === 200) { + assert.equal(head.length, 0) + debug('tunneling connection has established') + self.sockets[self.sockets.indexOf(placeholder)] = socket + cb(socket) + } else { + debug('tunneling socket could not be established, statusCode=%d', res.statusCode) + var error = new Error('tunneling socket could not be established, ' + 'statusCode=' + res.statusCode) + error.code = 'ECONNRESET' + options.request.emit('error', error) + self.removeSocket(placeholder) + } + } + + function onError(cause) { + connectReq.removeAllListeners() + + debug('tunneling socket could not be established, cause=%s\n', cause.message, cause.stack) + var error = new Error('tunneling socket could not be established, ' + 'cause=' + cause.message) + error.code = 'ECONNRESET' + options.request.emit('error', error) + self.removeSocket(placeholder) + } +} + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) return + + this.sockets.splice(pos, 1) + + var pending = this.requests.shift() + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createConnection(pending) + } +} + +function createSecureSocket(options, cb) { + var self = this + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, mergeOptions({}, self.options, + { servername: options.host + , socket: socket + } + )) + self.sockets[self.sockets.indexOf(socket)] = secureSocket + cb(secureSocket) + }) +} + + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i] + if (typeof overrides === 'object') { + var keys = Object.keys(overrides) + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j] + if (overrides[k] !== undefined) { + target[k] = overrides[k] + } + } + } + } + return target +} + + +var debug +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments) + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0] + } else { + args.unshift('TUNNEL:') + } + console.error.apply(console, args) + } +} else { + debug = function() {} +} +exports.debug = debug // for test diff --git a/node_modules/fsevents/node_modules/tunnel-agent/package.json b/node_modules/fsevents/node_modules/tunnel-agent/package.json new file mode 100644 index 0000000..a7ec6ab --- /dev/null +++ b/node_modules/fsevents/node_modules/tunnel-agent/package.json @@ -0,0 +1,92 @@ +{ + "_args": [ + [ + "tunnel-agent@~0.4.1", + "/Users/eshanker/Code/fsevents/node_modules/request" + ] + ], + "_from": "tunnel-agent@>=0.4.1 <0.5.0", + "_id": "tunnel-agent@0.4.3", + "_inCache": true, + "_installable": true, + "_location": "/tunnel-agent", + "_nodeVersion": "5.9.0", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/tunnel-agent-0.4.3.tgz_1462396470295_0.23639482469297945" + }, + "_npmUser": { + "email": "simeonvelichkov@gmail.com", + "name": "simov" + }, + "_npmVersion": "2.15.3", + "_phantomChildren": {}, + "_requested": { + "name": "tunnel-agent", + "raw": "tunnel-agent@~0.4.1", + "rawSpec": "~0.4.1", + "scope": null, + "spec": ">=0.4.1 <0.5.0", + "type": "range" + }, + "_requiredBy": [ + "/request" + ], + "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz", + "_shasum": "6373db76909fe570e08d73583365ed828a74eeeb", + "_shrinkwrap": null, + "_spec": "tunnel-agent@~0.4.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/request", + "author": { + "email": "mikeal.rogers@gmail.com", + "name": "Mikeal Rogers", + "url": "http://www.futurealoof.com" + }, + "bugs": { + "url": "https://github.com/mikeal/tunnel-agent/issues" + }, + "dependencies": {}, + "description": "HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "6373db76909fe570e08d73583365ed828a74eeeb", + "tarball": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz" + }, + "engines": { + "node": "*" + }, + "files": [ + "index.js" + ], + "gitHead": "e72d830f5ed388a2a71d37ce062c38e3fb34bdde", + "homepage": "https://github.com/mikeal/tunnel-agent#readme", + "license": "Apache-2.0", + "main": "index.js", + "maintainers": [ + { + "name": "mikeal", + "email": "mikeal.rogers@gmail.com" + }, + { + "name": "nylen", + "email": "jnylen@gmail.com" + }, + { + "name": "fredkschott", + "email": "fkschott@gmail.com" + }, + { + "name": "simov", + "email": "simeonvelichkov@gmail.com" + } + ], + "name": "tunnel-agent", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "url": "git+https://github.com/mikeal/tunnel-agent.git" + }, + "scripts": {}, + "version": "0.4.3" +} diff --git a/node_modules/fsevents/node_modules/tweetnacl/.npmignore b/node_modules/fsevents/node_modules/tweetnacl/.npmignore new file mode 100644 index 0000000..7d98dcb --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/.npmignore @@ -0,0 +1,4 @@ +.eslintrc +.travis.yml +bower.json +test diff --git a/node_modules/fsevents/node_modules/tweetnacl/CHANGELOG.md b/node_modules/fsevents/node_modules/tweetnacl/CHANGELOG.md new file mode 100644 index 0000000..77c69bd --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/CHANGELOG.md @@ -0,0 +1,128 @@ +TweetNaCl.js Changelog +====================== + + +v0.13.2 +------- + +* Fixed undefined variable bug in fast version of Poly1305. No worries, this + bug was *never* triggered. + +* Specified CC0 public domain dedication. + +* Updated development dependencies. + + +v0.13.1 +------- + +* Exclude `crypto` and `buffer` modules from browserify builds. + + +v0.13.0 +------- + +* Made `nacl-fast` the default version in NPM package. Now + `require("tweetnacl")` will use fast version; to get the original version, + use `require("tweetnacl/nacl.js")`. + +* Cleanup temporary array after generating random bytes. + + +v0.12.2 +------- + +* Improved performance of curve operations, making `nacl.scalarMult`, `nacl.box`, + `nacl.sign` and related functions up to 3x faster in `nacl-fast` version. + + +v0.12.1 +------- + +* Significantly improved performance of Salsa20 (~1.5x faster) and + Poly1305 (~3.5x faster) in `nacl-fast` version. + + +v0.12.0 +------- + +* Instead of using the given secret key directly, TweetNaCl.js now copies it to + a new array in `nacl.box.keyPair.fromSecretKey` and + `nacl.sign.keyPair.fromSecretKey`. + + +v0.11.2 +------- + +* Added new constant: `nacl.sign.seedLength`. + + +v0.11.1 +------- + +* Even faster hash for both short and long inputs (in `nacl-fast`). + + +v0.11.0 +------- + +* Implement `nacl.sign.keyPair.fromSeed` to enable creation of sign key pairs + deterministically from a 32-byte seed. (It behaves like + [libsodium's](http://doc.libsodium.org/public-key_cryptography/public-key_signatures.html) + `crypto_sign_seed_keypair`: the seed becomes a secret part of the secret key.) + +* Fast version now has an improved hash implementation that is 2x-5x faster. + +* Fixed benchmarks, which may have produced incorrect measurements. + + +v0.10.1 +------- + +* Exported undocumented `nacl.lowlevel.crypto_core_hsalsa20`. + + +v0.10.0 +------- + +* **Signature API breaking change!** `nacl.sign` and `nacl.sign.open` now deal + with signed messages, and new `nacl.sign.detached` and + `nacl.sign.detached.verify` are available. + + Previously, `nacl.sign` returned a signature, and `nacl.sign.open` accepted a + message and "detached" signature. This was unlike NaCl's API, which dealt with + signed messages (concatenation of signature and message). + + The new API is: + + nacl.sign(message, secretKey) -> signedMessage + nacl.sign.open(signedMessage, publicKey) -> message | null + + Since detached signatures are common, two new API functions were introduced: + + nacl.sign.detached(message, secretKey) -> signature + nacl.sign.detached.verify(message, signature, publicKey) -> true | false + + (Note that it's `verify`, not `open`, and it returns a boolean value, unlike + `open`, which returns an "unsigned" message.) + +* NPM package now comes without `test` directory to keep it small. + + +v0.9.2 +------ + +* Improved documentation. +* Fast version: increased theoretical message size limit from 2^32-1 to 2^52 + bytes in Poly1305 (and thus, secretbox and box). However this has no impact + in practice since JavaScript arrays or ArrayBuffers are limited to 32-bit + indexes, and most implementations won't allocate more than a gigabyte or so. + (Obviously, there are no tests for the correctness of implementation.) Also, + it's not recommended to use messages that large without splitting them into + smaller packets anyway. + + +v0.9.1 +------ + +* Initial release diff --git a/node_modules/fsevents/node_modules/tweetnacl/README.md b/node_modules/fsevents/node_modules/tweetnacl/README.md new file mode 100644 index 0000000..11bd347 --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/README.md @@ -0,0 +1,463 @@ +TweetNaCl.js +============ + +Port of [TweetNaCl](http://tweetnacl.cr.yp.to) / [NaCl](http://nacl.cr.yp.to/) +to JavaScript for modern browsers and Node.js. Public domain. + +[![Build Status](https://travis-ci.org/dchest/tweetnacl-js.svg?branch=master) +](https://travis-ci.org/dchest/tweetnacl-js) + +[Demo](https://dchest.github.io/tweetnacl-js/) + +**:warning: Beta version. The library is stable and API is frozen, however +it has not been independently reviewed. If you can help reviewing it, please +[contact me](mailto:dmitry@codingrobots.com).** + +Documentation +============= + +* [Overview](#overview) +* [Installation](#installation) +* [Usage](#usage) + * [Public-key authenticated encryption (box)](#public-key-authenticated-encryption-box) + * [Secret-key authenticated encryption (secretbox)](#secret-key-authenticated-encryption-secretbox) + * [Scalar multiplication](#scalar-multiplication) + * [Signatures](#signatures) + * [Hashing](#hashing) + * [Random bytes generation](#random-bytes-generation) + * [Constant-time comparison](#constant-time-comparison) + * [Utilities](#utilities) +* [Examples](#examples) +* [System requirements](#system-requirements) +* [Development and testing](#development-and-testing) +* [Contributors](#contributors) +* [Who uses it](#who-uses-it) + + +Overview +-------- + +The primary goal of this project is to produce a translation of TweetNaCl to +JavaScript which is as close as possible to the original C implementation, plus +a thin layer of idiomatic high-level API on top of it. + +There are two versions, you can use either of them: + +* `nacl.js` is the port of TweetNaCl with minimum differences from the + original + high-level API. + +* `nacl-fast.js` is like `nacl.js`, but with some functions replaced with + faster versions. + + +Installation +------------ + +You can install TweetNaCl.js via a package manager: + +[Bower](http://bower.io): + + $ bower install tweetnacl + +[NPM](https://www.npmjs.org/): + + $ npm install tweetnacl + +or [download source code](https://github.com/dchest/tweetnacl-js/releases). + + +Usage +------ + +All API functions accept and return bytes as `Uint8Array`s. If you need to +encode or decode strings, use functions from `nacl.util` namespace. + +### Public-key authenticated encryption (box) + +Implements *curve25519-xsalsa20-poly1305*. + +#### nacl.box.keyPair() + +Generates a new random key pair for box and returns it as an object with +`publicKey` and `secretKey` members: + + { + publicKey: ..., // Uint8Array with 32-byte public key + secretKey: ... // Uint8Array with 32-byte secret key + } + + +#### nacl.box.keyPair.fromSecretKey(secretKey) + +Returns a key pair for box with public key corresponding to the given secret +key. + +#### nacl.box(message, nonce, theirPublicKey, mySecretKey) + +Encrypt and authenticates message using peer's public key, our secret key, and +the given nonce, which must be unique for each distinct message for a key pair. + +Returns an encrypted and authenticated message, which is +`nacl.box.overheadLength` longer than the original message. + +#### nacl.box.open(box, nonce, theirPublicKey, mySecretKey) + +Authenticates and decrypts the given box with peer's public key, our secret +key, and the given nonce. + +Returns the original message, or `false` if authentication fails. + +#### nacl.box.before(theirPublicKey, mySecretKey) + +Returns a precomputed shared key which can be used in `nacl.box.after` and +`nacl.box.open.after`. + +#### nacl.box.after(message, nonce, sharedKey) + +Same as `nacl.box`, but uses a shared key precomputed with `nacl.box.before`. + +#### nacl.box.open.after(box, nonce, sharedKey) + +Same as `nacl.box.open`, but uses a shared key precomputed with `nacl.box.before`. + +#### nacl.box.publicKeyLength = 32 + +Length of public key in bytes. + +#### nacl.box.secretKeyLength = 32 + +Length of secret key in bytes. + +#### nacl.box.sharedKeyLength = 32 + +Length of precomputed shared key in bytes. + +#### nacl.box.nonceLength = 24 + +Length of nonce in bytes. + +#### nacl.box.overheadLength = 16 + +Length of overhead added to box compared to original message. + + +### Secret-key authenticated encryption (secretbox) + +Implements *xsalsa20-poly1305*. + +#### nacl.secretbox(message, nonce, key) + +Encrypt and authenticates message using the key and the nonce. The nonce must +be unique for each distinct message for this key. + +Returns an encrypted and authenticated message, which is +`nacl.secretbox.overheadLength` longer than the original message. + +#### nacl.secretbox.open(box, nonce, key) + +Authenticates and decrypts the given secret box using the key and the nonce. + +Returns the original message, or `false` if authentication fails. + +#### nacl.secretbox.keyLength = 32 + +Length of key in bytes. + +#### nacl.secretbox.nonceLength = 24 + +Length of nonce in bytes. + +#### nacl.secretbox.overheadLength = 16 + +Length of overhead added to secret box compared to original message. + + +### Scalar multiplication + +Implements *curve25519*. + +#### nacl.scalarMult(n, p) + +Multiplies an integer `n` by a group element `p` and returns the resulting +group element. + +#### nacl.scalarMult.base(n) + +Multiplies an integer `n` by a standard group element and returns the resulting +group element. + +#### nacl.scalarMult.scalarLength = 32 + +Length of scalar in bytes. + +#### nacl.scalarMult.groupElementLength = 32 + +Length of group element in bytes. + + +### Signatures + +Implements [ed25519](http://ed25519.cr.yp.to). + +#### nacl.sign.keyPair() + +Generates new random key pair for signing and returns it as an object with +`publicKey` and `secretKey` members: + + { + publicKey: ..., // Uint8Array with 32-byte public key + secretKey: ... // Uint8Array with 64-byte secret key + } + +#### nacl.sign.keyPair.fromSecretKey(secretKey) + +Returns a signing key pair with public key corresponding to the given +64-byte secret key. The secret key must have been generated by +`nacl.sign.keyPair` or `nacl.sign.keyPair.fromSeed`. + +#### nacl.sign.keyPair.fromSeed(seed) + +Returns a new signing key pair generated deterministically from a 32-byte seed. +The seed must contain enough entropy to be secure. This method is not +recommended for general use: instead, use `nacl.sign.keyPair` to generate a new +key pair from a random seed. + +#### nacl.sign(message, secretKey) + +Signs the message using the secret key and returns a signed message. + +#### nacl.sign.open(signedMessage, publicKey) + +Verifies the signed message and returns the message without signature. + +Returns `null` if verification failed. + +#### nacl.sign.detached(message, secretKey) + +Signs the message using the secret key and returns a signature. + +#### nacl.sign.detached.verify(message, signature, publicKey) + +Verifies the signature for the message and returns `true` if verification +succeeded or `false` if it failed. + +#### nacl.sign.publicKeyLength = 32 + +Length of signing public key in bytes. + +#### nacl.sign.secretKeyLength = 64 + +Length of signing secret key in bytes. + +#### nacl.sign.seedLength = 32 + +Length of seed for `nacl.sign.keyPair.fromSeed` in bytes. + +#### nacl.sign.signatureLength = 64 + +Length of signature in bytes. + + +### Hashing + +Implements *SHA-512*. + +#### nacl.hash(message) + +Returns SHA-512 hash of the message. + +#### nacl.hash.hashLength = 64 + +Length of hash in bytes. + + +### Random bytes generation + +#### nacl.randomBytes(length) + +Returns a `Uint8Array` of the given length containing random bytes of +cryptographic quality. + +**Implementation note** + +TweetNaCl.js uses the following methods to generate random bytes, +depending on the platform it runs on: + +* `window.crypto.getRandomValues` (WebCrypto standard) +* `window.msCrypto.getRandomValues` (Internet Explorer 11) +* `crypto.randomBytes` (Node.js) + +Note that browsers are required to throw `QuotaExceededError` exception if +requested `length` is more than 65536, so do not ask for more than 65536 bytes +in *one call* (multiple calls to get as many bytes as you like are okay: +browsers can generate infinite amount of random bytes without any bad +consequences). + +If the platform doesn't provide a suitable PRNG, the following functions, +which require random numbers, will throw exception: + +* `nacl.randomBytes` +* `nacl.box.keyPair` +* `nacl.sign.keyPair` + +Other functions are deterministic and will continue working. + +If a platform you are targeting doesn't implement secure random number +generator, but you somehow have a cryptographically-strong source of entropy +(not `Math.random`!), and you know what you are doing, you can plug it into +TweetNaCl.js like this: + + nacl.setPRNG(function(x, n) { + // ... copy n random bytes into x ... + }); + +Note that `nacl.setPRNG` *completely replaces* internal random byte generator +with the one provided. + + +### Constant-time comparison + +#### nacl.verify(x, y) + +Compares `x` and `y` in constant time and returns `true` if their lengths are +non-zero and equal, and their contents are equal. + +Returns `false` if either of the arguments has zero length, or arguments have +different lengths, or their contents differ. + + +### Utilities + +Encoding/decoding functions are provided for convenience. They are correct, +however their performance and wide compatibility with uncommon runtimes is not +something that is considered important compared to the simplicity and size of +implementation. You can use third-party libraries if you need to. + +#### nacl.util.decodeUTF8(string) + +Decodes string and returns `Uint8Array` of bytes. + +#### nacl.util.encodeUTF8(array) + +Encodes `Uint8Array` or `Array` of bytes into string. + +#### nacl.util.decodeBase64(string) + +Decodes Base-64 encoded string and returns `Uint8Array` of bytes. + +#### nacl.util.encodeBase64(array) + +Encodes `Uint8Array` or `Array` of bytes into string using Base-64 encoding. + + +System requirements +------------------- + +TweetNaCl.js supports modern browsers that have a cryptographically secure +pseudorandom number generator and typed arrays, including the latest versions +of: + +* Chrome +* Firefox +* Safari (Mac, iOS) +* Internet Explorer 11 + +Other systems: + +* Node.js (we test on 0.10 and later) + + +Development and testing +------------------------ + +Install NPM modules needed for development: + + $ npm install + +To build minified versions: + + $ npm run build + +Tests use minified version, so make sure to rebuild it every time you change +`nacl.js` or `nacl-fast.js`. + +### Testing + +To run tests in Node.js: + + $ npm test + +By default all tests described here work on `nacl.min.js`. To test other +versions, set environment variable `NACL_SRC` to the file name you want to test. +For example, the following command will test fast minified version: + + $ NACL_SRC=nacl-fast.min.js npm test + +To run full suite of tests in Node.js, including comparing outputs of +JavaScript port to outputs of the original C version: + + $ npm run testall + +To prepare tests for browsers: + + $ npm run browser + +and then open `test/browser/test.html` (or `test/browser/test-fast.html`) to +run them. + +To run headless browser tests with `testling`: + + $ npm run testling + +(If you get `Error: spawn ENOENT`, install *xvfb*: `sudo apt-get install xvfb`.) + +### Benchmarking + +To run benchmarks in Node.js: + + $ npm run bench + $ NACL_SRC=nacl-fast.min.js npm run bench + +To run benchmarks in a browser, open `test/benchmark/bench.html` (or +`test/benchmark/bench-fast.html`). + + +Contributors +------------ + +JavaScript port: + + * [Dmitry Chestnykh](http://github.com/dchest) (ported xsalsa20, poly1305, curve25519) + * [Devi Mandiri](https://github.com/devi) (ported curve25519, ed25519, sha512) + +Original authors of [NaCl](http://nacl.cr.yp.to), [TweetNaCl](http://tweetnacl.cr.yp.to) +and [Poly1305-donna](https://github.com/floodyberry/poly1305-donna) +(who are *not* responsible for any errors in this implementation): + + * [Daniel J. Bernstein](http://cr.yp.to/djb.html) + * Wesley Janssen + * [Tanja Lange](http://hyperelliptic.org/tanja) + * [Peter Schwabe](http://www.cryptojedi.org/users/peter/) + * [Matthew Dempsky](https://github.com/mdempsky) + * [Andrew Moon](https://github.com/floodyberry) + +Contributors have dedicated their work to the public domain. + +This software is distributed without any warranty. + + +Third-party libraries based on TweetNaCl.js +------------------------------------------- + +* [forward-secrecy](https://github.com/alax/forward-secrecy) — Axolotl ratchet implementation +* [nacl-stream](https://github.com/dchest/nacl-stream-js) - streaming encryption +* [tweetnacl-auth-js](https://github.com/dchest/tweetnacl-auth-js) — implementation of [`crypto_auth`](http://nacl.cr.yp.to/auth.html) + + +Who uses it +----------- + +Some notable users of TweetNaCl.js: + +* [miniLock](http://minilock.io/) +* [Stellar](https://www.stellar.org/) diff --git a/node_modules/fsevents/node_modules/tweetnacl/nacl-fast.js b/node_modules/fsevents/node_modules/tweetnacl/nacl-fast.js new file mode 100644 index 0000000..6c49958 --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/nacl-fast.js @@ -0,0 +1,2418 @@ +(function(nacl) { +'use strict'; + +// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +// +// Implementation derived from TweetNaCl version 20140427. +// See for details: http://tweetnacl.cr.yp.to/ + +var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; +}; + +// Pluggable, initialized in high-level API below. +var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + +var _0 = new Uint8Array(16); +var _9 = new Uint8Array(32); _9[0] = 9; + +var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +function ts64(x, i, h, l) { + x[i] = (h >> 24) & 0xff; + x[i+1] = (h >> 16) & 0xff; + x[i+2] = (h >> 8) & 0xff; + x[i+3] = h & 0xff; + x[i+4] = (l >> 24) & 0xff; + x[i+5] = (l >> 16) & 0xff; + x[i+6] = (l >> 8) & 0xff; + x[i+7] = l & 0xff; +} + +function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; +} + +function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); +} + +function core_salsa20(o, p, k, c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + x0 = x0 + j0 | 0; + x1 = x1 + j1 | 0; + x2 = x2 + j2 | 0; + x3 = x3 + j3 | 0; + x4 = x4 + j4 | 0; + x5 = x5 + j5 | 0; + x6 = x6 + j6 | 0; + x7 = x7 + j7 | 0; + x8 = x8 + j8 | 0; + x9 = x9 + j9 | 0; + x10 = x10 + j10 | 0; + x11 = x11 + j11 | 0; + x12 = x12 + j12 | 0; + x13 = x13 + j13 | 0; + x14 = x14 + j14 | 0; + x15 = x15 + j15 | 0; + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x1 >>> 0 & 0xff; + o[ 5] = x1 >>> 8 & 0xff; + o[ 6] = x1 >>> 16 & 0xff; + o[ 7] = x1 >>> 24 & 0xff; + + o[ 8] = x2 >>> 0 & 0xff; + o[ 9] = x2 >>> 8 & 0xff; + o[10] = x2 >>> 16 & 0xff; + o[11] = x2 >>> 24 & 0xff; + + o[12] = x3 >>> 0 & 0xff; + o[13] = x3 >>> 8 & 0xff; + o[14] = x3 >>> 16 & 0xff; + o[15] = x3 >>> 24 & 0xff; + + o[16] = x4 >>> 0 & 0xff; + o[17] = x4 >>> 8 & 0xff; + o[18] = x4 >>> 16 & 0xff; + o[19] = x4 >>> 24 & 0xff; + + o[20] = x5 >>> 0 & 0xff; + o[21] = x5 >>> 8 & 0xff; + o[22] = x5 >>> 16 & 0xff; + o[23] = x5 >>> 24 & 0xff; + + o[24] = x6 >>> 0 & 0xff; + o[25] = x6 >>> 8 & 0xff; + o[26] = x6 >>> 16 & 0xff; + o[27] = x6 >>> 24 & 0xff; + + o[28] = x7 >>> 0 & 0xff; + o[29] = x7 >>> 8 & 0xff; + o[30] = x7 >>> 16 & 0xff; + o[31] = x7 >>> 24 & 0xff; + + o[32] = x8 >>> 0 & 0xff; + o[33] = x8 >>> 8 & 0xff; + o[34] = x8 >>> 16 & 0xff; + o[35] = x8 >>> 24 & 0xff; + + o[36] = x9 >>> 0 & 0xff; + o[37] = x9 >>> 8 & 0xff; + o[38] = x9 >>> 16 & 0xff; + o[39] = x9 >>> 24 & 0xff; + + o[40] = x10 >>> 0 & 0xff; + o[41] = x10 >>> 8 & 0xff; + o[42] = x10 >>> 16 & 0xff; + o[43] = x10 >>> 24 & 0xff; + + o[44] = x11 >>> 0 & 0xff; + o[45] = x11 >>> 8 & 0xff; + o[46] = x11 >>> 16 & 0xff; + o[47] = x11 >>> 24 & 0xff; + + o[48] = x12 >>> 0 & 0xff; + o[49] = x12 >>> 8 & 0xff; + o[50] = x12 >>> 16 & 0xff; + o[51] = x12 >>> 24 & 0xff; + + o[52] = x13 >>> 0 & 0xff; + o[53] = x13 >>> 8 & 0xff; + o[54] = x13 >>> 16 & 0xff; + o[55] = x13 >>> 24 & 0xff; + + o[56] = x14 >>> 0 & 0xff; + o[57] = x14 >>> 8 & 0xff; + o[58] = x14 >>> 16 & 0xff; + o[59] = x14 >>> 24 & 0xff; + + o[60] = x15 >>> 0 & 0xff; + o[61] = x15 >>> 8 & 0xff; + o[62] = x15 >>> 16 & 0xff; + o[63] = x15 >>> 24 & 0xff; +} + +function core_hsalsa20(o,p,k,c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x5 >>> 0 & 0xff; + o[ 5] = x5 >>> 8 & 0xff; + o[ 6] = x5 >>> 16 & 0xff; + o[ 7] = x5 >>> 24 & 0xff; + + o[ 8] = x10 >>> 0 & 0xff; + o[ 9] = x10 >>> 8 & 0xff; + o[10] = x10 >>> 16 & 0xff; + o[11] = x10 >>> 24 & 0xff; + + o[12] = x15 >>> 0 & 0xff; + o[13] = x15 >>> 8 & 0xff; + o[14] = x15 >>> 16 & 0xff; + o[15] = x15 >>> 24 & 0xff; + + o[16] = x6 >>> 0 & 0xff; + o[17] = x6 >>> 8 & 0xff; + o[18] = x6 >>> 16 & 0xff; + o[19] = x6 >>> 24 & 0xff; + + o[20] = x7 >>> 0 & 0xff; + o[21] = x7 >>> 8 & 0xff; + o[22] = x7 >>> 16 & 0xff; + o[23] = x7 >>> 24 & 0xff; + + o[24] = x8 >>> 0 & 0xff; + o[25] = x8 >>> 8 & 0xff; + o[26] = x8 >>> 16 & 0xff; + o[27] = x8 >>> 24 & 0xff; + + o[28] = x9 >>> 0 & 0xff; + o[29] = x9 >>> 8 & 0xff; + o[30] = x9 >>> 16 & 0xff; + o[31] = x9 >>> 24 & 0xff; +} + +function crypto_core_salsa20(out,inp,k,c) { + core_salsa20(out,inp,k,c); +} + +function crypto_core_hsalsa20(out,inp,k,c) { + core_hsalsa20(out,inp,k,c); +} + +var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + +function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + } + return 0; +} + +function crypto_stream_salsa20(c,cpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = x[i]; + } + return 0; +} + +function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20(c,cpos,d,sn,s); +} + +function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,sn,s); +} + +/* +* Port of Andrew Moon's Poly1305-donna-16. Public domain. +* https://github.com/floodyberry/poly1305-donna +*/ + +var poly1305 = function(key) { + this.buffer = new Uint8Array(16); + this.r = new Uint16Array(10); + this.h = new Uint16Array(10); + this.pad = new Uint16Array(8); + this.leftover = 0; + this.fin = 0; + + var t0, t1, t2, t3, t4, t5, t6, t7; + + t0 = key[ 0] & 0xff | (key[ 1] & 0xff) << 8; this.r[0] = ( t0 ) & 0x1fff; + t1 = key[ 2] & 0xff | (key[ 3] & 0xff) << 8; this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = key[ 4] & 0xff | (key[ 5] & 0xff) << 8; this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03; + t3 = key[ 6] & 0xff | (key[ 7] & 0xff) << 8; this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = key[ 8] & 0xff | (key[ 9] & 0xff) << 8; this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff; + this.r[5] = ((t4 >>> 1)) & 0x1ffe; + t5 = key[10] & 0xff | (key[11] & 0xff) << 8; this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = key[12] & 0xff | (key[13] & 0xff) << 8; this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81; + t7 = key[14] & 0xff | (key[15] & 0xff) << 8; this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + this.r[9] = ((t7 >>> 5)) & 0x007f; + + this.pad[0] = key[16] & 0xff | (key[17] & 0xff) << 8; + this.pad[1] = key[18] & 0xff | (key[19] & 0xff) << 8; + this.pad[2] = key[20] & 0xff | (key[21] & 0xff) << 8; + this.pad[3] = key[22] & 0xff | (key[23] & 0xff) << 8; + this.pad[4] = key[24] & 0xff | (key[25] & 0xff) << 8; + this.pad[5] = key[26] & 0xff | (key[27] & 0xff) << 8; + this.pad[6] = key[28] & 0xff | (key[29] & 0xff) << 8; + this.pad[7] = key[30] & 0xff | (key[31] & 0xff) << 8; +}; + +poly1305.prototype.blocks = function(m, mpos, bytes) { + var hibit = this.fin ? 0 : (1 << 11); + var t0, t1, t2, t3, t4, t5, t6, t7, c; + var d0, d1, d2, d3, d4, d5, d6, d7, d8, d9; + + var h0 = this.h[0], + h1 = this.h[1], + h2 = this.h[2], + h3 = this.h[3], + h4 = this.h[4], + h5 = this.h[5], + h6 = this.h[6], + h7 = this.h[7], + h8 = this.h[8], + h9 = this.h[9]; + + var r0 = this.r[0], + r1 = this.r[1], + r2 = this.r[2], + r3 = this.r[3], + r4 = this.r[4], + r5 = this.r[5], + r6 = this.r[6], + r7 = this.r[7], + r8 = this.r[8], + r9 = this.r[9]; + + while (bytes >= 16) { + t0 = m[mpos+ 0] & 0xff | (m[mpos+ 1] & 0xff) << 8; h0 += ( t0 ) & 0x1fff; + t1 = m[mpos+ 2] & 0xff | (m[mpos+ 3] & 0xff) << 8; h1 += ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = m[mpos+ 4] & 0xff | (m[mpos+ 5] & 0xff) << 8; h2 += ((t1 >>> 10) | (t2 << 6)) & 0x1fff; + t3 = m[mpos+ 6] & 0xff | (m[mpos+ 7] & 0xff) << 8; h3 += ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = m[mpos+ 8] & 0xff | (m[mpos+ 9] & 0xff) << 8; h4 += ((t3 >>> 4) | (t4 << 12)) & 0x1fff; + h5 += ((t4 >>> 1)) & 0x1fff; + t5 = m[mpos+10] & 0xff | (m[mpos+11] & 0xff) << 8; h6 += ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = m[mpos+12] & 0xff | (m[mpos+13] & 0xff) << 8; h7 += ((t5 >>> 11) | (t6 << 5)) & 0x1fff; + t7 = m[mpos+14] & 0xff | (m[mpos+15] & 0xff) << 8; h8 += ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + h9 += ((t7 >>> 5)) | hibit; + + c = 0; + + d0 = c; + d0 += h0 * r0; + d0 += h1 * (5 * r9); + d0 += h2 * (5 * r8); + d0 += h3 * (5 * r7); + d0 += h4 * (5 * r6); + c = (d0 >>> 13); d0 &= 0x1fff; + d0 += h5 * (5 * r5); + d0 += h6 * (5 * r4); + d0 += h7 * (5 * r3); + d0 += h8 * (5 * r2); + d0 += h9 * (5 * r1); + c += (d0 >>> 13); d0 &= 0x1fff; + + d1 = c; + d1 += h0 * r1; + d1 += h1 * r0; + d1 += h2 * (5 * r9); + d1 += h3 * (5 * r8); + d1 += h4 * (5 * r7); + c = (d1 >>> 13); d1 &= 0x1fff; + d1 += h5 * (5 * r6); + d1 += h6 * (5 * r5); + d1 += h7 * (5 * r4); + d1 += h8 * (5 * r3); + d1 += h9 * (5 * r2); + c += (d1 >>> 13); d1 &= 0x1fff; + + d2 = c; + d2 += h0 * r2; + d2 += h1 * r1; + d2 += h2 * r0; + d2 += h3 * (5 * r9); + d2 += h4 * (5 * r8); + c = (d2 >>> 13); d2 &= 0x1fff; + d2 += h5 * (5 * r7); + d2 += h6 * (5 * r6); + d2 += h7 * (5 * r5); + d2 += h8 * (5 * r4); + d2 += h9 * (5 * r3); + c += (d2 >>> 13); d2 &= 0x1fff; + + d3 = c; + d3 += h0 * r3; + d3 += h1 * r2; + d3 += h2 * r1; + d3 += h3 * r0; + d3 += h4 * (5 * r9); + c = (d3 >>> 13); d3 &= 0x1fff; + d3 += h5 * (5 * r8); + d3 += h6 * (5 * r7); + d3 += h7 * (5 * r6); + d3 += h8 * (5 * r5); + d3 += h9 * (5 * r4); + c += (d3 >>> 13); d3 &= 0x1fff; + + d4 = c; + d4 += h0 * r4; + d4 += h1 * r3; + d4 += h2 * r2; + d4 += h3 * r1; + d4 += h4 * r0; + c = (d4 >>> 13); d4 &= 0x1fff; + d4 += h5 * (5 * r9); + d4 += h6 * (5 * r8); + d4 += h7 * (5 * r7); + d4 += h8 * (5 * r6); + d4 += h9 * (5 * r5); + c += (d4 >>> 13); d4 &= 0x1fff; + + d5 = c; + d5 += h0 * r5; + d5 += h1 * r4; + d5 += h2 * r3; + d5 += h3 * r2; + d5 += h4 * r1; + c = (d5 >>> 13); d5 &= 0x1fff; + d5 += h5 * r0; + d5 += h6 * (5 * r9); + d5 += h7 * (5 * r8); + d5 += h8 * (5 * r7); + d5 += h9 * (5 * r6); + c += (d5 >>> 13); d5 &= 0x1fff; + + d6 = c; + d6 += h0 * r6; + d6 += h1 * r5; + d6 += h2 * r4; + d6 += h3 * r3; + d6 += h4 * r2; + c = (d6 >>> 13); d6 &= 0x1fff; + d6 += h5 * r1; + d6 += h6 * r0; + d6 += h7 * (5 * r9); + d6 += h8 * (5 * r8); + d6 += h9 * (5 * r7); + c += (d6 >>> 13); d6 &= 0x1fff; + + d7 = c; + d7 += h0 * r7; + d7 += h1 * r6; + d7 += h2 * r5; + d7 += h3 * r4; + d7 += h4 * r3; + c = (d7 >>> 13); d7 &= 0x1fff; + d7 += h5 * r2; + d7 += h6 * r1; + d7 += h7 * r0; + d7 += h8 * (5 * r9); + d7 += h9 * (5 * r8); + c += (d7 >>> 13); d7 &= 0x1fff; + + d8 = c; + d8 += h0 * r8; + d8 += h1 * r7; + d8 += h2 * r6; + d8 += h3 * r5; + d8 += h4 * r4; + c = (d8 >>> 13); d8 &= 0x1fff; + d8 += h5 * r3; + d8 += h6 * r2; + d8 += h7 * r1; + d8 += h8 * r0; + d8 += h9 * (5 * r9); + c += (d8 >>> 13); d8 &= 0x1fff; + + d9 = c; + d9 += h0 * r9; + d9 += h1 * r8; + d9 += h2 * r7; + d9 += h3 * r6; + d9 += h4 * r5; + c = (d9 >>> 13); d9 &= 0x1fff; + d9 += h5 * r4; + d9 += h6 * r3; + d9 += h7 * r2; + d9 += h8 * r1; + d9 += h9 * r0; + c += (d9 >>> 13); d9 &= 0x1fff; + + c = (((c << 2) + c)) | 0; + c = (c + d0) | 0; + d0 = c & 0x1fff; + c = (c >>> 13); + d1 += c; + + h0 = d0; + h1 = d1; + h2 = d2; + h3 = d3; + h4 = d4; + h5 = d5; + h6 = d6; + h7 = d7; + h8 = d8; + h9 = d9; + + mpos += 16; + bytes -= 16; + } + this.h[0] = h0; + this.h[1] = h1; + this.h[2] = h2; + this.h[3] = h3; + this.h[4] = h4; + this.h[5] = h5; + this.h[6] = h6; + this.h[7] = h7; + this.h[8] = h8; + this.h[9] = h9; +}; + +poly1305.prototype.finish = function(mac, macpos) { + var g = new Uint16Array(10); + var c, mask, f, i; + + if (this.leftover) { + i = this.leftover; + this.buffer[i++] = 1; + for (; i < 16; i++) this.buffer[i] = 0; + this.fin = 1; + this.blocks(this.buffer, 0, 16); + } + + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + for (i = 2; i < 10; i++) { + this.h[i] += c; + c = this.h[i] >>> 13; + this.h[i] &= 0x1fff; + } + this.h[0] += (c * 5); + c = this.h[0] >>> 13; + this.h[0] &= 0x1fff; + this.h[1] += c; + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + this.h[2] += c; + + g[0] = this.h[0] + 5; + c = g[0] >>> 13; + g[0] &= 0x1fff; + for (i = 1; i < 10; i++) { + g[i] = this.h[i] + c; + c = g[i] >>> 13; + g[i] &= 0x1fff; + } + g[9] -= (1 << 13); + + mask = (g[9] >>> ((2 * 8) - 1)) - 1; + for (i = 0; i < 10; i++) g[i] &= mask; + mask = ~mask; + for (i = 0; i < 10; i++) this.h[i] = (this.h[i] & mask) | g[i]; + + this.h[0] = ((this.h[0] ) | (this.h[1] << 13) ) & 0xffff; + this.h[1] = ((this.h[1] >>> 3) | (this.h[2] << 10) ) & 0xffff; + this.h[2] = ((this.h[2] >>> 6) | (this.h[3] << 7) ) & 0xffff; + this.h[3] = ((this.h[3] >>> 9) | (this.h[4] << 4) ) & 0xffff; + this.h[4] = ((this.h[4] >>> 12) | (this.h[5] << 1) | (this.h[6] << 14)) & 0xffff; + this.h[5] = ((this.h[6] >>> 2) | (this.h[7] << 11) ) & 0xffff; + this.h[6] = ((this.h[7] >>> 5) | (this.h[8] << 8) ) & 0xffff; + this.h[7] = ((this.h[8] >>> 8) | (this.h[9] << 5) ) & 0xffff; + + f = this.h[0] + this.pad[0]; + this.h[0] = f & 0xffff; + for (i = 1; i < 8; i++) { + f = (((this.h[i] + this.pad[i]) | 0) + (f >>> 16)) | 0; + this.h[i] = f & 0xffff; + } + + mac[macpos+ 0] = (this.h[0] >>> 0) & 0xff; + mac[macpos+ 1] = (this.h[0] >>> 8) & 0xff; + mac[macpos+ 2] = (this.h[1] >>> 0) & 0xff; + mac[macpos+ 3] = (this.h[1] >>> 8) & 0xff; + mac[macpos+ 4] = (this.h[2] >>> 0) & 0xff; + mac[macpos+ 5] = (this.h[2] >>> 8) & 0xff; + mac[macpos+ 6] = (this.h[3] >>> 0) & 0xff; + mac[macpos+ 7] = (this.h[3] >>> 8) & 0xff; + mac[macpos+ 8] = (this.h[4] >>> 0) & 0xff; + mac[macpos+ 9] = (this.h[4] >>> 8) & 0xff; + mac[macpos+10] = (this.h[5] >>> 0) & 0xff; + mac[macpos+11] = (this.h[5] >>> 8) & 0xff; + mac[macpos+12] = (this.h[6] >>> 0) & 0xff; + mac[macpos+13] = (this.h[6] >>> 8) & 0xff; + mac[macpos+14] = (this.h[7] >>> 0) & 0xff; + mac[macpos+15] = (this.h[7] >>> 8) & 0xff; +}; + +poly1305.prototype.update = function(m, mpos, bytes) { + var i, want; + + if (this.leftover) { + want = (16 - this.leftover); + if (want > bytes) + want = bytes; + for (i = 0; i < want; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + bytes -= want; + mpos += want; + this.leftover += want; + if (this.leftover < 16) + return; + this.blocks(this.buffer, 0, 16); + this.leftover = 0; + } + + if (bytes >= 16) { + want = bytes - (bytes % 16); + this.blocks(m, mpos, want); + mpos += want; + bytes -= want; + } + + if (bytes) { + for (i = 0; i < bytes; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + this.leftover += bytes; + } +}; + +function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s = new poly1305(k); + s.update(m, mpos, n); + s.finish(out, outpos); + return 0; +} + +function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); +} + +function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; +} + +function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; +} + +function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; +} + +function car25519(o) { + var i, v, c = 1; + for (i = 0; i < 16; i++) { + v = o[i] + c + 65535; + c = Math.floor(v / 65536); + o[i] = v - c * 65536; + } + o[0] += c-1 + 37 * (c-1); +} + +function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } +} + +function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; +} + +function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; +} + +function A(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] + b[i]; +} + +function Z(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] - b[i]; +} + +function M(o, a, b) { + var v, c, + t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, + t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, + t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, + t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, + b0 = b[0], + b1 = b[1], + b2 = b[2], + b3 = b[3], + b4 = b[4], + b5 = b[5], + b6 = b[6], + b7 = b[7], + b8 = b[8], + b9 = b[9], + b10 = b[10], + b11 = b[11], + b12 = b[12], + b13 = b[13], + b14 = b[14], + b15 = b[15]; + + v = a[0]; + t0 += v * b0; + t1 += v * b1; + t2 += v * b2; + t3 += v * b3; + t4 += v * b4; + t5 += v * b5; + t6 += v * b6; + t7 += v * b7; + t8 += v * b8; + t9 += v * b9; + t10 += v * b10; + t11 += v * b11; + t12 += v * b12; + t13 += v * b13; + t14 += v * b14; + t15 += v * b15; + v = a[1]; + t1 += v * b0; + t2 += v * b1; + t3 += v * b2; + t4 += v * b3; + t5 += v * b4; + t6 += v * b5; + t7 += v * b6; + t8 += v * b7; + t9 += v * b8; + t10 += v * b9; + t11 += v * b10; + t12 += v * b11; + t13 += v * b12; + t14 += v * b13; + t15 += v * b14; + t16 += v * b15; + v = a[2]; + t2 += v * b0; + t3 += v * b1; + t4 += v * b2; + t5 += v * b3; + t6 += v * b4; + t7 += v * b5; + t8 += v * b6; + t9 += v * b7; + t10 += v * b8; + t11 += v * b9; + t12 += v * b10; + t13 += v * b11; + t14 += v * b12; + t15 += v * b13; + t16 += v * b14; + t17 += v * b15; + v = a[3]; + t3 += v * b0; + t4 += v * b1; + t5 += v * b2; + t6 += v * b3; + t7 += v * b4; + t8 += v * b5; + t9 += v * b6; + t10 += v * b7; + t11 += v * b8; + t12 += v * b9; + t13 += v * b10; + t14 += v * b11; + t15 += v * b12; + t16 += v * b13; + t17 += v * b14; + t18 += v * b15; + v = a[4]; + t4 += v * b0; + t5 += v * b1; + t6 += v * b2; + t7 += v * b3; + t8 += v * b4; + t9 += v * b5; + t10 += v * b6; + t11 += v * b7; + t12 += v * b8; + t13 += v * b9; + t14 += v * b10; + t15 += v * b11; + t16 += v * b12; + t17 += v * b13; + t18 += v * b14; + t19 += v * b15; + v = a[5]; + t5 += v * b0; + t6 += v * b1; + t7 += v * b2; + t8 += v * b3; + t9 += v * b4; + t10 += v * b5; + t11 += v * b6; + t12 += v * b7; + t13 += v * b8; + t14 += v * b9; + t15 += v * b10; + t16 += v * b11; + t17 += v * b12; + t18 += v * b13; + t19 += v * b14; + t20 += v * b15; + v = a[6]; + t6 += v * b0; + t7 += v * b1; + t8 += v * b2; + t9 += v * b3; + t10 += v * b4; + t11 += v * b5; + t12 += v * b6; + t13 += v * b7; + t14 += v * b8; + t15 += v * b9; + t16 += v * b10; + t17 += v * b11; + t18 += v * b12; + t19 += v * b13; + t20 += v * b14; + t21 += v * b15; + v = a[7]; + t7 += v * b0; + t8 += v * b1; + t9 += v * b2; + t10 += v * b3; + t11 += v * b4; + t12 += v * b5; + t13 += v * b6; + t14 += v * b7; + t15 += v * b8; + t16 += v * b9; + t17 += v * b10; + t18 += v * b11; + t19 += v * b12; + t20 += v * b13; + t21 += v * b14; + t22 += v * b15; + v = a[8]; + t8 += v * b0; + t9 += v * b1; + t10 += v * b2; + t11 += v * b3; + t12 += v * b4; + t13 += v * b5; + t14 += v * b6; + t15 += v * b7; + t16 += v * b8; + t17 += v * b9; + t18 += v * b10; + t19 += v * b11; + t20 += v * b12; + t21 += v * b13; + t22 += v * b14; + t23 += v * b15; + v = a[9]; + t9 += v * b0; + t10 += v * b1; + t11 += v * b2; + t12 += v * b3; + t13 += v * b4; + t14 += v * b5; + t15 += v * b6; + t16 += v * b7; + t17 += v * b8; + t18 += v * b9; + t19 += v * b10; + t20 += v * b11; + t21 += v * b12; + t22 += v * b13; + t23 += v * b14; + t24 += v * b15; + v = a[10]; + t10 += v * b0; + t11 += v * b1; + t12 += v * b2; + t13 += v * b3; + t14 += v * b4; + t15 += v * b5; + t16 += v * b6; + t17 += v * b7; + t18 += v * b8; + t19 += v * b9; + t20 += v * b10; + t21 += v * b11; + t22 += v * b12; + t23 += v * b13; + t24 += v * b14; + t25 += v * b15; + v = a[11]; + t11 += v * b0; + t12 += v * b1; + t13 += v * b2; + t14 += v * b3; + t15 += v * b4; + t16 += v * b5; + t17 += v * b6; + t18 += v * b7; + t19 += v * b8; + t20 += v * b9; + t21 += v * b10; + t22 += v * b11; + t23 += v * b12; + t24 += v * b13; + t25 += v * b14; + t26 += v * b15; + v = a[12]; + t12 += v * b0; + t13 += v * b1; + t14 += v * b2; + t15 += v * b3; + t16 += v * b4; + t17 += v * b5; + t18 += v * b6; + t19 += v * b7; + t20 += v * b8; + t21 += v * b9; + t22 += v * b10; + t23 += v * b11; + t24 += v * b12; + t25 += v * b13; + t26 += v * b14; + t27 += v * b15; + v = a[13]; + t13 += v * b0; + t14 += v * b1; + t15 += v * b2; + t16 += v * b3; + t17 += v * b4; + t18 += v * b5; + t19 += v * b6; + t20 += v * b7; + t21 += v * b8; + t22 += v * b9; + t23 += v * b10; + t24 += v * b11; + t25 += v * b12; + t26 += v * b13; + t27 += v * b14; + t28 += v * b15; + v = a[14]; + t14 += v * b0; + t15 += v * b1; + t16 += v * b2; + t17 += v * b3; + t18 += v * b4; + t19 += v * b5; + t20 += v * b6; + t21 += v * b7; + t22 += v * b8; + t23 += v * b9; + t24 += v * b10; + t25 += v * b11; + t26 += v * b12; + t27 += v * b13; + t28 += v * b14; + t29 += v * b15; + v = a[15]; + t15 += v * b0; + t16 += v * b1; + t17 += v * b2; + t18 += v * b3; + t19 += v * b4; + t20 += v * b5; + t21 += v * b6; + t22 += v * b7; + t23 += v * b8; + t24 += v * b9; + t25 += v * b10; + t26 += v * b11; + t27 += v * b12; + t28 += v * b13; + t29 += v * b14; + t30 += v * b15; + + t0 += 38 * t16; + t1 += 38 * t17; + t2 += 38 * t18; + t3 += 38 * t19; + t4 += 38 * t20; + t5 += 38 * t21; + t6 += 38 * t22; + t7 += 38 * t23; + t8 += 38 * t24; + t9 += 38 * t25; + t10 += 38 * t26; + t11 += 38 * t27; + t12 += 38 * t28; + t13 += 38 * t29; + t14 += 38 * t30; + // t15 left as is + + // first car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + // second car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + o[ 0] = t0; + o[ 1] = t1; + o[ 2] = t2; + o[ 3] = t3; + o[ 4] = t4; + o[ 5] = t5; + o[ 6] = t6; + o[ 7] = t7; + o[ 8] = t8; + o[ 9] = t9; + o[10] = t10; + o[11] = t11; + o[12] = t12; + o[13] = t13; + o[14] = t14; + o[15] = t15; +} + +function S(o, a) { + M(o, a, a); +} + +function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; +} + +function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); +} + +function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); +} + +function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); +} + +var crypto_box_afternm = crypto_secretbox; +var crypto_box_open_afternm = crypto_secretbox_open; + +function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); +} + +function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); +} + +var K = [ + 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, + 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, + 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, + 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, + 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, + 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, + 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, + 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, + 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, + 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, + 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, + 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, + 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, + 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, + 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, + 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, + 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, + 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, + 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, + 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, + 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, + 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, + 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, + 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, + 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, + 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, + 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, + 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, + 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, + 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, + 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, + 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, + 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, + 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, + 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, + 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, + 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, + 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, + 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, + 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 +]; + +function crypto_hashblocks_hl(hh, hl, m, n) { + var wh = new Int32Array(16), wl = new Int32Array(16), + bh0, bh1, bh2, bh3, bh4, bh5, bh6, bh7, + bl0, bl1, bl2, bl3, bl4, bl5, bl6, bl7, + th, tl, i, j, h, l, a, b, c, d; + + var ah0 = hh[0], + ah1 = hh[1], + ah2 = hh[2], + ah3 = hh[3], + ah4 = hh[4], + ah5 = hh[5], + ah6 = hh[6], + ah7 = hh[7], + + al0 = hl[0], + al1 = hl[1], + al2 = hl[2], + al3 = hl[3], + al4 = hl[4], + al5 = hl[5], + al6 = hl[6], + al7 = hl[7]; + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) { + j = 8 * i + pos; + wh[i] = (m[j+0] << 24) | (m[j+1] << 16) | (m[j+2] << 8) | m[j+3]; + wl[i] = (m[j+4] << 24) | (m[j+5] << 16) | (m[j+6] << 8) | m[j+7]; + } + for (i = 0; i < 80; i++) { + bh0 = ah0; + bh1 = ah1; + bh2 = ah2; + bh3 = ah3; + bh4 = ah4; + bh5 = ah5; + bh6 = ah6; + bh7 = ah7; + + bl0 = al0; + bl1 = al1; + bl2 = al2; + bl3 = al3; + bl4 = al4; + bl5 = al5; + bl6 = al6; + bl7 = al7; + + // add + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma1 + h = ((ah4 >>> 14) | (al4 << (32-14))) ^ ((ah4 >>> 18) | (al4 << (32-18))) ^ ((al4 >>> (41-32)) | (ah4 << (32-(41-32)))); + l = ((al4 >>> 14) | (ah4 << (32-14))) ^ ((al4 >>> 18) | (ah4 << (32-18))) ^ ((ah4 >>> (41-32)) | (al4 << (32-(41-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Ch + h = (ah4 & ah5) ^ (~ah4 & ah6); + l = (al4 & al5) ^ (~al4 & al6); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // K + h = K[i*2]; + l = K[i*2+1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // w + h = wh[i%16]; + l = wl[i%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + th = c & 0xffff | d << 16; + tl = a & 0xffff | b << 16; + + // add + h = th; + l = tl; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma0 + h = ((ah0 >>> 28) | (al0 << (32-28))) ^ ((al0 >>> (34-32)) | (ah0 << (32-(34-32)))) ^ ((al0 >>> (39-32)) | (ah0 << (32-(39-32)))); + l = ((al0 >>> 28) | (ah0 << (32-28))) ^ ((ah0 >>> (34-32)) | (al0 << (32-(34-32)))) ^ ((ah0 >>> (39-32)) | (al0 << (32-(39-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Maj + h = (ah0 & ah1) ^ (ah0 & ah2) ^ (ah1 & ah2); + l = (al0 & al1) ^ (al0 & al2) ^ (al1 & al2); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh7 = (c & 0xffff) | (d << 16); + bl7 = (a & 0xffff) | (b << 16); + + // add + h = bh3; + l = bl3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = th; + l = tl; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh3 = (c & 0xffff) | (d << 16); + bl3 = (a & 0xffff) | (b << 16); + + ah1 = bh0; + ah2 = bh1; + ah3 = bh2; + ah4 = bh3; + ah5 = bh4; + ah6 = bh5; + ah7 = bh6; + ah0 = bh7; + + al1 = bl0; + al2 = bl1; + al3 = bl2; + al4 = bl3; + al5 = bl4; + al6 = bl5; + al7 = bl6; + al0 = bl7; + + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + // add + h = wh[j]; + l = wl[j]; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = wh[(j+9)%16]; + l = wl[(j+9)%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma0 + th = wh[(j+1)%16]; + tl = wl[(j+1)%16]; + h = ((th >>> 1) | (tl << (32-1))) ^ ((th >>> 8) | (tl << (32-8))) ^ (th >>> 7); + l = ((tl >>> 1) | (th << (32-1))) ^ ((tl >>> 8) | (th << (32-8))) ^ ((tl >>> 7) | (th << (32-7))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma1 + th = wh[(j+14)%16]; + tl = wl[(j+14)%16]; + h = ((th >>> 19) | (tl << (32-19))) ^ ((tl >>> (61-32)) | (th << (32-(61-32)))) ^ (th >>> 6); + l = ((tl >>> 19) | (th << (32-19))) ^ ((th >>> (61-32)) | (tl << (32-(61-32)))) ^ ((tl >>> 6) | (th << (32-6))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + wh[j] = (c & 0xffff) | (d << 16); + wl[j] = (a & 0xffff) | (b << 16); + } + } + } + + // add + h = ah0; + l = al0; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[0]; + l = hl[0]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[0] = ah0 = (c & 0xffff) | (d << 16); + hl[0] = al0 = (a & 0xffff) | (b << 16); + + h = ah1; + l = al1; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[1]; + l = hl[1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[1] = ah1 = (c & 0xffff) | (d << 16); + hl[1] = al1 = (a & 0xffff) | (b << 16); + + h = ah2; + l = al2; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[2]; + l = hl[2]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[2] = ah2 = (c & 0xffff) | (d << 16); + hl[2] = al2 = (a & 0xffff) | (b << 16); + + h = ah3; + l = al3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[3]; + l = hl[3]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[3] = ah3 = (c & 0xffff) | (d << 16); + hl[3] = al3 = (a & 0xffff) | (b << 16); + + h = ah4; + l = al4; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[4]; + l = hl[4]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[4] = ah4 = (c & 0xffff) | (d << 16); + hl[4] = al4 = (a & 0xffff) | (b << 16); + + h = ah5; + l = al5; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[5]; + l = hl[5]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[5] = ah5 = (c & 0xffff) | (d << 16); + hl[5] = al5 = (a & 0xffff) | (b << 16); + + h = ah6; + l = al6; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[6]; + l = hl[6]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[6] = ah6 = (c & 0xffff) | (d << 16); + hl[6] = al6 = (a & 0xffff) | (b << 16); + + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[7]; + l = hl[7]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[7] = ah7 = (c & 0xffff) | (d << 16); + hl[7] = al7 = (a & 0xffff) | (b << 16); + + pos += 128; + n -= 128; + } + + return n; +} + +function crypto_hash(out, m, n) { + var hh = new Int32Array(8), + hl = new Int32Array(8), + x = new Uint8Array(256), + i, b = n; + + hh[0] = 0x6a09e667; + hh[1] = 0xbb67ae85; + hh[2] = 0x3c6ef372; + hh[3] = 0xa54ff53a; + hh[4] = 0x510e527f; + hh[5] = 0x9b05688c; + hh[6] = 0x1f83d9ab; + hh[7] = 0x5be0cd19; + + hl[0] = 0xf3bcc908; + hl[1] = 0x84caa73b; + hl[2] = 0xfe94f82b; + hl[3] = 0x5f1d36f1; + hl[4] = 0xade682d1; + hl[5] = 0x2b3e6c1f; + hl[6] = 0xfb41bd6b; + hl[7] = 0x137e2179; + + crypto_hashblocks_hl(hh, hl, m, n); + n %= 128; + + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, (b / 0x20000000) | 0, b << 3); + crypto_hashblocks_hl(hh, hl, x, n); + + for (i = 0; i < 8; i++) ts64(out, 8*i, hh[i], hl[i]); + + return 0; +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; +} + +var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + +function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = (x[j] + 128) >> 8; + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; +} + +function crypto_sign_open(m, sm, n, pk) { + var i, mlen; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + mlen = -1; + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + mlen = n; + return mlen; +} + +var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + +nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES +}; + +/* High-level API */ + +function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); +} + +function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); +} + +function checkArrayTypes() { + var t, i; + for (i = 0; i < arguments.length; i++) { + if ((t = Object.prototype.toString.call(arguments[i])) !== '[object Uint8Array]') + throw new TypeError('unexpected type ' + t + ', use Uint8Array'); + } +} + +function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; +} + +nacl.util = {}; + +nacl.util.decodeUTF8 = function(s) { + var i, d = unescape(encodeURIComponent(s)), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; +}; + +nacl.util.encodeUTF8 = function(arr) { + var i, s = []; + for (i = 0; i < arr.length; i++) s.push(String.fromCharCode(arr[i])); + return decodeURIComponent(escape(s.join(''))); +}; + +nacl.util.encodeBase64 = function(arr) { + if (typeof btoa === 'undefined') { + return (new Buffer(arr)).toString('base64'); + } else { + var i, s = [], len = arr.length; + for (i = 0; i < len; i++) s.push(String.fromCharCode(arr[i])); + return btoa(s.join('')); + } +}; + +nacl.util.decodeBase64 = function(s) { + if (typeof atob === 'undefined') { + return new Uint8Array(Array.prototype.slice.call(new Buffer(s, 'base64'), 0)); + } else { + var i, d = atob(s), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + } +}; + +nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; +}; + +nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); +}; + +nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return false; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return false; + return m.subarray(crypto_secretbox_ZEROBYTES); +}; + +nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; +nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; +nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + +nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; +}; + +nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; +}; + +nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; +nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + +nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); +}; + +nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; +}; + +nacl.box.after = nacl.secretbox; + +nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); +}; + +nacl.box.open.after = nacl.secretbox.open; + +nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; +nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; +nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; +nacl.box.nonceLength = crypto_box_NONCEBYTES; +nacl.box.overheadLength = nacl.secretbox.overheadLength; + +nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; +}; + +nacl.sign.open = function(signedMsg, publicKey) { + if (arguments.length !== 2) + throw new Error('nacl.sign.open accepts 2 arguments; did you mean to use nacl.sign.detached.verify?'); + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; +}; + +nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; +}; + +nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; +nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; +nacl.sign.seedLength = crypto_sign_SEEDBYTES; +nacl.sign.signatureLength = crypto_sign_BYTES; + +nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; +}; + +nacl.hash.hashLength = crypto_hash_BYTES; + +nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; +}; + +nacl.setPRNG = function(fn) { + randombytes = fn; +}; + +(function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto; + if (typeof window !== 'undefined') { + // Browser. + if (window.crypto && window.crypto.getRandomValues) { + crypto = window.crypto; // Standard + } else if (window.msCrypto && window.msCrypto.getRandomValues) { + crypto = window.msCrypto; // Internet Explorer 11+ + } + if (crypto) { + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + crypto.getRandomValues(v); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } else if (typeof require !== 'undefined') { + // Node.js. + crypto = require('crypto'); + if (crypto) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } +})(); + +})(typeof module !== 'undefined' && module.exports ? module.exports : (window.nacl = window.nacl || {})); diff --git a/node_modules/fsevents/node_modules/tweetnacl/nacl-fast.min.js b/node_modules/fsevents/node_modules/tweetnacl/nacl-fast.min.js new file mode 100644 index 0000000..7072c2a --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/nacl-fast.min.js @@ -0,0 +1,2 @@ +!function(r){"use strict";function t(r,t,n,e){r[t]=n>>24&255,r[t+1]=n>>16&255,r[t+2]=n>>8&255,r[t+3]=255&n,r[t+4]=e>>24&255,r[t+5]=e>>16&255,r[t+6]=e>>8&255,r[t+7]=255&e}function n(r,t,n,e,o){var i,h=0;for(i=0;o>i;i++)h|=r[t+i]^n[e+i];return(1&h-1>>>8)-1}function e(r,t,e,o){return n(r,t,e,o,16)}function o(r,t,e,o){return n(r,t,e,o,32)}function i(r,t,n,e){for(var o,i=255&e[0]|(255&e[1])<<8|(255&e[2])<<16|(255&e[3])<<24,h=255&n[0]|(255&n[1])<<8|(255&n[2])<<16|(255&n[3])<<24,a=255&n[4]|(255&n[5])<<8|(255&n[6])<<16|(255&n[7])<<24,f=255&n[8]|(255&n[9])<<8|(255&n[10])<<16|(255&n[11])<<24,s=255&n[12]|(255&n[13])<<8|(255&n[14])<<16|(255&n[15])<<24,u=255&e[4]|(255&e[5])<<8|(255&e[6])<<16|(255&e[7])<<24,c=255&t[0]|(255&t[1])<<8|(255&t[2])<<16|(255&t[3])<<24,y=255&t[4]|(255&t[5])<<8|(255&t[6])<<16|(255&t[7])<<24,l=255&t[8]|(255&t[9])<<8|(255&t[10])<<16|(255&t[11])<<24,w=255&t[12]|(255&t[13])<<8|(255&t[14])<<16|(255&t[15])<<24,p=255&e[8]|(255&e[9])<<8|(255&e[10])<<16|(255&e[11])<<24,g=255&n[16]|(255&n[17])<<8|(255&n[18])<<16|(255&n[19])<<24,v=255&n[20]|(255&n[21])<<8|(255&n[22])<<16|(255&n[23])<<24,b=255&n[24]|(255&n[25])<<8|(255&n[26])<<16|(255&n[27])<<24,d=255&n[28]|(255&n[29])<<8|(255&n[30])<<16|(255&n[31])<<24,A=255&e[12]|(255&e[13])<<8|(255&e[14])<<16|(255&e[15])<<24,_=i,U=h,E=a,x=f,M=s,m=u,B=c,S=y,K=l,T=w,Y=p,k=g,L=v,C=b,R=d,z=A,P=0;20>P;P+=2)o=_+L|0,M^=o<<7|o>>>25,o=M+_|0,K^=o<<9|o>>>23,o=K+M|0,L^=o<<13|o>>>19,o=L+K|0,_^=o<<18|o>>>14,o=m+U|0,T^=o<<7|o>>>25,o=T+m|0,C^=o<<9|o>>>23,o=C+T|0,U^=o<<13|o>>>19,o=U+C|0,m^=o<<18|o>>>14,o=Y+B|0,R^=o<<7|o>>>25,o=R+Y|0,E^=o<<9|o>>>23,o=E+R|0,B^=o<<13|o>>>19,o=B+E|0,Y^=o<<18|o>>>14,o=z+k|0,x^=o<<7|o>>>25,o=x+z|0,S^=o<<9|o>>>23,o=S+x|0,k^=o<<13|o>>>19,o=k+S|0,z^=o<<18|o>>>14,o=_+x|0,U^=o<<7|o>>>25,o=U+_|0,E^=o<<9|o>>>23,o=E+U|0,x^=o<<13|o>>>19,o=x+E|0,_^=o<<18|o>>>14,o=m+M|0,B^=o<<7|o>>>25,o=B+m|0,S^=o<<9|o>>>23,o=S+B|0,M^=o<<13|o>>>19,o=M+S|0,m^=o<<18|o>>>14,o=Y+T|0,k^=o<<7|o>>>25,o=k+Y|0,K^=o<<9|o>>>23,o=K+k|0,T^=o<<13|o>>>19,o=T+K|0,Y^=o<<18|o>>>14,o=z+R|0,L^=o<<7|o>>>25,o=L+z|0,C^=o<<9|o>>>23,o=C+L|0,R^=o<<13|o>>>19,o=R+C|0,z^=o<<18|o>>>14;_=_+i|0,U=U+h|0,E=E+a|0,x=x+f|0,M=M+s|0,m=m+u|0,B=B+c|0,S=S+y|0,K=K+l|0,T=T+w|0,Y=Y+p|0,k=k+g|0,L=L+v|0,C=C+b|0,R=R+d|0,z=z+A|0,r[0]=_>>>0&255,r[1]=_>>>8&255,r[2]=_>>>16&255,r[3]=_>>>24&255,r[4]=U>>>0&255,r[5]=U>>>8&255,r[6]=U>>>16&255,r[7]=U>>>24&255,r[8]=E>>>0&255,r[9]=E>>>8&255,r[10]=E>>>16&255,r[11]=E>>>24&255,r[12]=x>>>0&255,r[13]=x>>>8&255,r[14]=x>>>16&255,r[15]=x>>>24&255,r[16]=M>>>0&255,r[17]=M>>>8&255,r[18]=M>>>16&255,r[19]=M>>>24&255,r[20]=m>>>0&255,r[21]=m>>>8&255,r[22]=m>>>16&255,r[23]=m>>>24&255,r[24]=B>>>0&255,r[25]=B>>>8&255,r[26]=B>>>16&255,r[27]=B>>>24&255,r[28]=S>>>0&255,r[29]=S>>>8&255,r[30]=S>>>16&255,r[31]=S>>>24&255,r[32]=K>>>0&255,r[33]=K>>>8&255,r[34]=K>>>16&255,r[35]=K>>>24&255,r[36]=T>>>0&255,r[37]=T>>>8&255,r[38]=T>>>16&255,r[39]=T>>>24&255,r[40]=Y>>>0&255,r[41]=Y>>>8&255,r[42]=Y>>>16&255,r[43]=Y>>>24&255,r[44]=k>>>0&255,r[45]=k>>>8&255,r[46]=k>>>16&255,r[47]=k>>>24&255,r[48]=L>>>0&255,r[49]=L>>>8&255,r[50]=L>>>16&255,r[51]=L>>>24&255,r[52]=C>>>0&255,r[53]=C>>>8&255,r[54]=C>>>16&255,r[55]=C>>>24&255,r[56]=R>>>0&255,r[57]=R>>>8&255,r[58]=R>>>16&255,r[59]=R>>>24&255,r[60]=z>>>0&255,r[61]=z>>>8&255,r[62]=z>>>16&255,r[63]=z>>>24&255}function h(r,t,n,e){for(var o,i=255&e[0]|(255&e[1])<<8|(255&e[2])<<16|(255&e[3])<<24,h=255&n[0]|(255&n[1])<<8|(255&n[2])<<16|(255&n[3])<<24,a=255&n[4]|(255&n[5])<<8|(255&n[6])<<16|(255&n[7])<<24,f=255&n[8]|(255&n[9])<<8|(255&n[10])<<16|(255&n[11])<<24,s=255&n[12]|(255&n[13])<<8|(255&n[14])<<16|(255&n[15])<<24,u=255&e[4]|(255&e[5])<<8|(255&e[6])<<16|(255&e[7])<<24,c=255&t[0]|(255&t[1])<<8|(255&t[2])<<16|(255&t[3])<<24,y=255&t[4]|(255&t[5])<<8|(255&t[6])<<16|(255&t[7])<<24,l=255&t[8]|(255&t[9])<<8|(255&t[10])<<16|(255&t[11])<<24,w=255&t[12]|(255&t[13])<<8|(255&t[14])<<16|(255&t[15])<<24,p=255&e[8]|(255&e[9])<<8|(255&e[10])<<16|(255&e[11])<<24,g=255&n[16]|(255&n[17])<<8|(255&n[18])<<16|(255&n[19])<<24,v=255&n[20]|(255&n[21])<<8|(255&n[22])<<16|(255&n[23])<<24,b=255&n[24]|(255&n[25])<<8|(255&n[26])<<16|(255&n[27])<<24,d=255&n[28]|(255&n[29])<<8|(255&n[30])<<16|(255&n[31])<<24,A=255&e[12]|(255&e[13])<<8|(255&e[14])<<16|(255&e[15])<<24,_=i,U=h,E=a,x=f,M=s,m=u,B=c,S=y,K=l,T=w,Y=p,k=g,L=v,C=b,R=d,z=A,P=0;20>P;P+=2)o=_+L|0,M^=o<<7|o>>>25,o=M+_|0,K^=o<<9|o>>>23,o=K+M|0,L^=o<<13|o>>>19,o=L+K|0,_^=o<<18|o>>>14,o=m+U|0,T^=o<<7|o>>>25,o=T+m|0,C^=o<<9|o>>>23,o=C+T|0,U^=o<<13|o>>>19,o=U+C|0,m^=o<<18|o>>>14,o=Y+B|0,R^=o<<7|o>>>25,o=R+Y|0,E^=o<<9|o>>>23,o=E+R|0,B^=o<<13|o>>>19,o=B+E|0,Y^=o<<18|o>>>14,o=z+k|0,x^=o<<7|o>>>25,o=x+z|0,S^=o<<9|o>>>23,o=S+x|0,k^=o<<13|o>>>19,o=k+S|0,z^=o<<18|o>>>14,o=_+x|0,U^=o<<7|o>>>25,o=U+_|0,E^=o<<9|o>>>23,o=E+U|0,x^=o<<13|o>>>19,o=x+E|0,_^=o<<18|o>>>14,o=m+M|0,B^=o<<7|o>>>25,o=B+m|0,S^=o<<9|o>>>23,o=S+B|0,M^=o<<13|o>>>19,o=M+S|0,m^=o<<18|o>>>14,o=Y+T|0,k^=o<<7|o>>>25,o=k+Y|0,K^=o<<9|o>>>23,o=K+k|0,T^=o<<13|o>>>19,o=T+K|0,Y^=o<<18|o>>>14,o=z+R|0,L^=o<<7|o>>>25,o=L+z|0,C^=o<<9|o>>>23,o=C+L|0,R^=o<<13|o>>>19,o=R+C|0,z^=o<<18|o>>>14;r[0]=_>>>0&255,r[1]=_>>>8&255,r[2]=_>>>16&255,r[3]=_>>>24&255,r[4]=m>>>0&255,r[5]=m>>>8&255,r[6]=m>>>16&255,r[7]=m>>>24&255,r[8]=Y>>>0&255,r[9]=Y>>>8&255,r[10]=Y>>>16&255,r[11]=Y>>>24&255,r[12]=z>>>0&255,r[13]=z>>>8&255,r[14]=z>>>16&255,r[15]=z>>>24&255,r[16]=B>>>0&255,r[17]=B>>>8&255,r[18]=B>>>16&255,r[19]=B>>>24&255,r[20]=S>>>0&255,r[21]=S>>>8&255,r[22]=S>>>16&255,r[23]=S>>>24&255,r[24]=K>>>0&255,r[25]=K>>>8&255,r[26]=K>>>16&255,r[27]=K>>>24&255,r[28]=T>>>0&255,r[29]=T>>>8&255,r[30]=T>>>16&255,r[31]=T>>>24&255}function a(r,t,n,e){i(r,t,n,e)}function f(r,t,n,e){h(r,t,n,e)}function s(r,t,n,e,o,i,h){var f,s,u=new Uint8Array(16),c=new Uint8Array(64);for(s=0;16>s;s++)u[s]=0;for(s=0;8>s;s++)u[s]=i[s];for(;o>=64;){for(a(c,u,h,cr),s=0;64>s;s++)r[t+s]=n[e+s]^c[s];for(f=1,s=8;16>s;s++)f=f+(255&u[s])|0,u[s]=255&f,f>>>=8;o-=64,t+=64,e+=64}if(o>0)for(a(c,u,h,cr),s=0;o>s;s++)r[t+s]=n[e+s]^c[s];return 0}function u(r,t,n,e,o){var i,h,f=new Uint8Array(16),s=new Uint8Array(64);for(h=0;16>h;h++)f[h]=0;for(h=0;8>h;h++)f[h]=e[h];for(;n>=64;){for(a(s,f,o,cr),h=0;64>h;h++)r[t+h]=s[h];for(i=1,h=8;16>h;h++)i=i+(255&f[h])|0,f[h]=255&i,i>>>=8;n-=64,t+=64}if(n>0)for(a(s,f,o,cr),h=0;n>h;h++)r[t+h]=s[h];return 0}function c(r,t,n,e,o){var i=new Uint8Array(32);f(i,e,o,cr);for(var h=new Uint8Array(8),a=0;8>a;a++)h[a]=e[a+16];return u(r,t,n,h,i)}function y(r,t,n,e,o,i,h){var a=new Uint8Array(32);f(a,i,h,cr);for(var u=new Uint8Array(8),c=0;8>c;c++)u[c]=i[c+16];return s(r,t,n,e,o,u,a)}function l(r,t,n,e,o,i){var h=new yr(i);return h.update(n,e,o),h.finish(r,t),0}function w(r,t,n,o,i,h){var a=new Uint8Array(16);return l(a,0,n,o,i,h),e(r,t,a,0)}function p(r,t,n,e,o){var i;if(32>n)return-1;for(y(r,0,t,0,n,e,o),l(r,16,r,32,n-32,r),i=0;16>i;i++)r[i]=0;return 0}function g(r,t,n,e,o){var i,h=new Uint8Array(32);if(32>n)return-1;if(c(h,0,32,e,o),0!==w(t,16,t,32,n-32,h))return-1;for(y(r,0,t,0,n,e,o),i=0;32>i;i++)r[i]=0;return 0}function v(r,t){var n;for(n=0;16>n;n++)r[n]=0|t[n]}function b(r){var t,n,e=1;for(t=0;16>t;t++)n=r[t]+e+65535,e=Math.floor(n/65536),r[t]=n-65536*e;r[0]+=e-1+37*(e-1)}function d(r,t,n){for(var e,o=~(n-1),i=0;16>i;i++)e=o&(r[i]^t[i]),r[i]^=e,t[i]^=e}function A(r,t){var n,e,o,i=$(),h=$();for(n=0;16>n;n++)h[n]=t[n];for(b(h),b(h),b(h),e=0;2>e;e++){for(i[0]=h[0]-65517,n=1;15>n;n++)i[n]=h[n]-65535-(i[n-1]>>16&1),i[n-1]&=65535;i[15]=h[15]-32767-(i[14]>>16&1),o=i[15]>>16&1,i[14]&=65535,d(h,i,1-o)}for(n=0;16>n;n++)r[2*n]=255&h[n],r[2*n+1]=h[n]>>8}function _(r,t){var n=new Uint8Array(32),e=new Uint8Array(32);return A(n,r),A(e,t),o(n,0,e,0)}function U(r){var t=new Uint8Array(32);return A(t,r),1&t[0]}function E(r,t){var n;for(n=0;16>n;n++)r[n]=t[2*n]+(t[2*n+1]<<8);r[15]&=32767}function x(r,t,n){for(var e=0;16>e;e++)r[e]=t[e]+n[e]}function M(r,t,n){for(var e=0;16>e;e++)r[e]=t[e]-n[e]}function m(r,t,n){var e,o,i=0,h=0,a=0,f=0,s=0,u=0,c=0,y=0,l=0,w=0,p=0,g=0,v=0,b=0,d=0,A=0,_=0,U=0,E=0,x=0,M=0,m=0,B=0,S=0,K=0,T=0,Y=0,k=0,L=0,C=0,R=0,z=n[0],P=n[1],O=n[2],N=n[3],F=n[4],I=n[5],j=n[6],G=n[7],Z=n[8],V=n[9],q=n[10],X=n[11],D=n[12],H=n[13],J=n[14],Q=n[15];e=t[0],i+=e*z,h+=e*P,a+=e*O,f+=e*N,s+=e*F,u+=e*I,c+=e*j,y+=e*G,l+=e*Z,w+=e*V,p+=e*q,g+=e*X,v+=e*D,b+=e*H,d+=e*J,A+=e*Q,e=t[1],h+=e*z,a+=e*P,f+=e*O,s+=e*N,u+=e*F,c+=e*I,y+=e*j,l+=e*G,w+=e*Z,p+=e*V,g+=e*q,v+=e*X,b+=e*D,d+=e*H,A+=e*J,_+=e*Q,e=t[2],a+=e*z,f+=e*P,s+=e*O,u+=e*N,c+=e*F,y+=e*I,l+=e*j,w+=e*G,p+=e*Z,g+=e*V,v+=e*q,b+=e*X,d+=e*D,A+=e*H,_+=e*J,U+=e*Q,e=t[3],f+=e*z,s+=e*P,u+=e*O,c+=e*N,y+=e*F,l+=e*I,w+=e*j,p+=e*G,g+=e*Z,v+=e*V,b+=e*q,d+=e*X,A+=e*D,_+=e*H,U+=e*J,E+=e*Q,e=t[4],s+=e*z,u+=e*P,c+=e*O,y+=e*N,l+=e*F,w+=e*I,p+=e*j,g+=e*G,v+=e*Z,b+=e*V,d+=e*q,A+=e*X,_+=e*D,U+=e*H,E+=e*J,x+=e*Q,e=t[5],u+=e*z,c+=e*P,y+=e*O,l+=e*N,w+=e*F,p+=e*I,g+=e*j,v+=e*G,b+=e*Z,d+=e*V,A+=e*q,_+=e*X,U+=e*D,E+=e*H,x+=e*J,M+=e*Q,e=t[6],c+=e*z,y+=e*P,l+=e*O,w+=e*N,p+=e*F,g+=e*I,v+=e*j,b+=e*G,d+=e*Z,A+=e*V,_+=e*q,U+=e*X,E+=e*D,x+=e*H,M+=e*J,m+=e*Q,e=t[7],y+=e*z,l+=e*P,w+=e*O,p+=e*N,g+=e*F,v+=e*I,b+=e*j,d+=e*G,A+=e*Z,_+=e*V,U+=e*q,E+=e*X,x+=e*D,M+=e*H,m+=e*J,B+=e*Q,e=t[8],l+=e*z,w+=e*P,p+=e*O,g+=e*N,v+=e*F,b+=e*I,d+=e*j,A+=e*G,_+=e*Z,U+=e*V,E+=e*q,x+=e*X,M+=e*D,m+=e*H,B+=e*J,S+=e*Q,e=t[9],w+=e*z,p+=e*P,g+=e*O,v+=e*N,b+=e*F,d+=e*I,A+=e*j,_+=e*G,U+=e*Z,E+=e*V,x+=e*q,M+=e*X,m+=e*D,B+=e*H,S+=e*J,K+=e*Q,e=t[10],p+=e*z,g+=e*P,v+=e*O,b+=e*N,d+=e*F,A+=e*I,_+=e*j,U+=e*G,E+=e*Z,x+=e*V,M+=e*q,m+=e*X,B+=e*D,S+=e*H,K+=e*J,T+=e*Q,e=t[11],g+=e*z,v+=e*P,b+=e*O,d+=e*N,A+=e*F,_+=e*I,U+=e*j,E+=e*G,x+=e*Z,M+=e*V,m+=e*q,B+=e*X,S+=e*D,K+=e*H,T+=e*J,Y+=e*Q,e=t[12],v+=e*z,b+=e*P,d+=e*O,A+=e*N,_+=e*F,U+=e*I,E+=e*j,x+=e*G,M+=e*Z,m+=e*V,B+=e*q,S+=e*X,K+=e*D,T+=e*H,Y+=e*J,k+=e*Q,e=t[13],b+=e*z,d+=e*P,A+=e*O,_+=e*N,U+=e*F,E+=e*I,x+=e*j,M+=e*G,m+=e*Z,B+=e*V,S+=e*q,K+=e*X,T+=e*D,Y+=e*H,k+=e*J,L+=e*Q,e=t[14],d+=e*z,A+=e*P,_+=e*O,U+=e*N,E+=e*F,x+=e*I,M+=e*j,m+=e*G,B+=e*Z,S+=e*V,K+=e*q,T+=e*X,Y+=e*D,k+=e*H,L+=e*J,C+=e*Q,e=t[15],A+=e*z,_+=e*P,U+=e*O,E+=e*N,x+=e*F,M+=e*I,m+=e*j,B+=e*G,S+=e*Z,K+=e*V,T+=e*q,Y+=e*X,k+=e*D,L+=e*H,C+=e*J,R+=e*Q,i+=38*_,h+=38*U,a+=38*E,f+=38*x,s+=38*M,u+=38*m,c+=38*B,y+=38*S,l+=38*K,w+=38*T,p+=38*Y,g+=38*k,v+=38*L,b+=38*C,d+=38*R,o=1,e=i+o+65535,o=Math.floor(e/65536),i=e-65536*o,e=h+o+65535,o=Math.floor(e/65536),h=e-65536*o,e=a+o+65535,o=Math.floor(e/65536),a=e-65536*o,e=f+o+65535,o=Math.floor(e/65536),f=e-65536*o,e=s+o+65535,o=Math.floor(e/65536),s=e-65536*o,e=u+o+65535,o=Math.floor(e/65536),u=e-65536*o,e=c+o+65535,o=Math.floor(e/65536),c=e-65536*o,e=y+o+65535,o=Math.floor(e/65536),y=e-65536*o,e=l+o+65535,o=Math.floor(e/65536),l=e-65536*o,e=w+o+65535,o=Math.floor(e/65536),w=e-65536*o,e=p+o+65535,o=Math.floor(e/65536),p=e-65536*o,e=g+o+65535,o=Math.floor(e/65536),g=e-65536*o,e=v+o+65535,o=Math.floor(e/65536),v=e-65536*o,e=b+o+65535,o=Math.floor(e/65536),b=e-65536*o,e=d+o+65535,o=Math.floor(e/65536),d=e-65536*o,e=A+o+65535,o=Math.floor(e/65536),A=e-65536*o,i+=o-1+37*(o-1),o=1,e=i+o+65535,o=Math.floor(e/65536),i=e-65536*o,e=h+o+65535,o=Math.floor(e/65536),h=e-65536*o,e=a+o+65535,o=Math.floor(e/65536),a=e-65536*o,e=f+o+65535,o=Math.floor(e/65536),f=e-65536*o,e=s+o+65535,o=Math.floor(e/65536),s=e-65536*o,e=u+o+65535,o=Math.floor(e/65536),u=e-65536*o,e=c+o+65535,o=Math.floor(e/65536),c=e-65536*o,e=y+o+65535,o=Math.floor(e/65536),y=e-65536*o,e=l+o+65535,o=Math.floor(e/65536),l=e-65536*o,e=w+o+65535,o=Math.floor(e/65536),w=e-65536*o,e=p+o+65535,o=Math.floor(e/65536),p=e-65536*o,e=g+o+65535,o=Math.floor(e/65536),g=e-65536*o,e=v+o+65535,o=Math.floor(e/65536),v=e-65536*o,e=b+o+65535,o=Math.floor(e/65536),b=e-65536*o,e=d+o+65535,o=Math.floor(e/65536),d=e-65536*o,e=A+o+65535,o=Math.floor(e/65536),A=e-65536*o,i+=o-1+37*(o-1),r[0]=i,r[1]=h,r[2]=a,r[3]=f,r[4]=s,r[5]=u,r[6]=c,r[7]=y,r[8]=l,r[9]=w,r[10]=p,r[11]=g,r[12]=v,r[13]=b,r[14]=d,r[15]=A}function B(r,t){m(r,t,t)}function S(r,t){var n,e=$();for(n=0;16>n;n++)e[n]=t[n];for(n=253;n>=0;n--)B(e,e),2!==n&&4!==n&&m(e,e,t);for(n=0;16>n;n++)r[n]=e[n]}function K(r,t){var n,e=$();for(n=0;16>n;n++)e[n]=t[n];for(n=250;n>=0;n--)B(e,e),1!==n&&m(e,e,t);for(n=0;16>n;n++)r[n]=e[n]}function T(r,t,n){var e,o,i=new Uint8Array(32),h=new Float64Array(80),a=$(),f=$(),s=$(),u=$(),c=$(),y=$();for(o=0;31>o;o++)i[o]=t[o];for(i[31]=127&t[31]|64,i[0]&=248,E(h,n),o=0;16>o;o++)f[o]=h[o],u[o]=a[o]=s[o]=0;for(a[0]=u[0]=1,o=254;o>=0;--o)e=i[o>>>3]>>>(7&o)&1,d(a,f,e),d(s,u,e),x(c,a,s),M(a,a,s),x(s,f,u),M(f,f,u),B(u,c),B(y,a),m(a,s,a),m(s,f,c),x(c,a,s),M(a,a,s),B(f,a),M(s,u,y),m(a,s,ir),x(a,a,u),m(s,s,a),m(a,u,y),m(u,f,h),B(f,c),d(a,f,e),d(s,u,e);for(o=0;16>o;o++)h[o+16]=a[o],h[o+32]=s[o],h[o+48]=f[o],h[o+64]=u[o];var l=h.subarray(32),w=h.subarray(16);return S(l,l),m(w,w,l),A(r,w),0}function Y(r,t){return T(r,t,nr)}function k(r,t){return rr(t,32),Y(r,t)}function L(r,t,n){var e=new Uint8Array(32);return T(e,n,t),f(r,tr,e,cr)}function C(r,t,n,e,o,i){var h=new Uint8Array(32);return L(h,o,i),lr(r,t,n,e,h)}function R(r,t,n,e,o,i){var h=new Uint8Array(32);return L(h,o,i),wr(r,t,n,e,h)}function z(r,t,n,e){for(var o,i,h,a,f,s,u,c,y,l,w,p,g,v,b,d,A,_,U,E,x,M,m,B,S,K,T=new Int32Array(16),Y=new Int32Array(16),k=r[0],L=r[1],C=r[2],R=r[3],z=r[4],P=r[5],O=r[6],N=r[7],F=t[0],I=t[1],j=t[2],G=t[3],Z=t[4],V=t[5],q=t[6],X=t[7],D=0;e>=128;){for(U=0;16>U;U++)E=8*U+D,T[U]=n[E+0]<<24|n[E+1]<<16|n[E+2]<<8|n[E+3],Y[U]=n[E+4]<<24|n[E+5]<<16|n[E+6]<<8|n[E+7];for(U=0;80>U;U++)if(o=k,i=L,h=C,a=R,f=z,s=P,u=O,c=N,y=F,l=I,w=j,p=G,g=Z,v=V,b=q,d=X,x=N,M=X,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=(z>>>14|Z<<18)^(z>>>18|Z<<14)^(Z>>>9|z<<23),M=(Z>>>14|z<<18)^(Z>>>18|z<<14)^(z>>>9|Z<<23),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=z&P^~z&O,M=Z&V^~Z&q,m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=pr[2*U],M=pr[2*U+1],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=T[U%16],M=Y[U%16],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,A=65535&S|K<<16,_=65535&m|B<<16,x=A,M=_,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=(k>>>28|F<<4)^(F>>>2|k<<30)^(F>>>7|k<<25),M=(F>>>28|k<<4)^(k>>>2|F<<30)^(k>>>7|F<<25),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=k&L^k&C^L&C,M=F&I^F&j^I&j,m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,c=65535&S|K<<16,d=65535&m|B<<16,x=a,M=p,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=A,M=_,m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,a=65535&S|K<<16,p=65535&m|B<<16,L=o,C=i,R=h,z=a,P=f,O=s,N=u,k=c,I=y,j=l,G=w,Z=p,V=g,q=v,X=b,F=d,U%16===15)for(E=0;16>E;E++)x=T[E],M=Y[E],m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=T[(E+9)%16],M=Y[(E+9)%16],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,A=T[(E+1)%16],_=Y[(E+1)%16],x=(A>>>1|_<<31)^(A>>>8|_<<24)^A>>>7,M=(_>>>1|A<<31)^(_>>>8|A<<24)^(_>>>7|A<<25),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,A=T[(E+14)%16],_=Y[(E+14)%16],x=(A>>>19|_<<13)^(_>>>29|A<<3)^A>>>6,M=(_>>>19|A<<13)^(A>>>29|_<<3)^(_>>>6|A<<26),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,T[E]=65535&S|K<<16,Y[E]=65535&m|B<<16;x=k,M=F,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[0],M=t[0],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[0]=k=65535&S|K<<16,t[0]=F=65535&m|B<<16,x=L,M=I,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[1],M=t[1],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[1]=L=65535&S|K<<16,t[1]=I=65535&m|B<<16,x=C,M=j,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[2],M=t[2],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[2]=C=65535&S|K<<16,t[2]=j=65535&m|B<<16,x=R,M=G,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[3],M=t[3],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[3]=R=65535&S|K<<16,t[3]=G=65535&m|B<<16,x=z,M=Z,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[4],M=t[4],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[4]=z=65535&S|K<<16,t[4]=Z=65535&m|B<<16,x=P,M=V,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[5],M=t[5],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[5]=P=65535&S|K<<16,t[5]=V=65535&m|B<<16,x=O,M=q,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[6],M=t[6],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[6]=O=65535&S|K<<16,t[6]=q=65535&m|B<<16,x=N,M=X,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[7],M=t[7],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[7]=N=65535&S|K<<16,t[7]=X=65535&m|B<<16,D+=128,e-=128}return e}function P(r,n,e){var o,i=new Int32Array(8),h=new Int32Array(8),a=new Uint8Array(256),f=e;for(i[0]=1779033703,i[1]=3144134277,i[2]=1013904242,i[3]=2773480762,i[4]=1359893119,i[5]=2600822924,i[6]=528734635,i[7]=1541459225,h[0]=4089235720,h[1]=2227873595,h[2]=4271175723,h[3]=1595750129,h[4]=2917565137,h[5]=725511199,h[6]=4215389547,h[7]=327033209,z(i,h,n,e),e%=128,o=0;e>o;o++)a[o]=n[f-e+o];for(a[e]=128,e=256-128*(112>e?1:0),a[e-9]=0,t(a,e-8,f/536870912|0,f<<3),z(i,h,a,e),o=0;8>o;o++)t(r,8*o,i[o],h[o]);return 0}function O(r,t){var n=$(),e=$(),o=$(),i=$(),h=$(),a=$(),f=$(),s=$(),u=$();M(n,r[1],r[0]),M(u,t[1],t[0]),m(n,n,u),x(e,r[0],r[1]),x(u,t[0],t[1]),m(e,e,u),m(o,r[3],t[3]),m(o,o,ar),m(i,r[2],t[2]),x(i,i,i),M(h,e,n),M(a,i,o),x(f,i,o),x(s,e,n),m(r[0],h,a),m(r[1],s,f),m(r[2],f,a),m(r[3],h,s)}function N(r,t,n){var e;for(e=0;4>e;e++)d(r[e],t[e],n)}function F(r,t){var n=$(),e=$(),o=$();S(o,t[2]),m(n,t[0],o),m(e,t[1],o),A(r,e),r[31]^=U(n)<<7}function I(r,t,n){var e,o;for(v(r[0],er),v(r[1],or),v(r[2],or),v(r[3],er),o=255;o>=0;--o)e=n[o/8|0]>>(7&o)&1,N(r,t,e),O(t,r),O(r,r),N(r,t,e)}function j(r,t){var n=[$(),$(),$(),$()];v(n[0],fr),v(n[1],sr),v(n[2],or),m(n[3],fr,sr),I(r,n,t)}function G(r,t,n){var e,o=new Uint8Array(64),i=[$(),$(),$(),$()];for(n||rr(t,32),P(o,t,32),o[0]&=248,o[31]&=127,o[31]|=64,j(i,o),F(r,i),e=0;32>e;e++)t[e+32]=r[e];return 0}function Z(r,t){var n,e,o,i;for(e=63;e>=32;--e){for(n=0,o=e-32,i=e-12;i>o;++o)t[o]+=n-16*t[e]*gr[o-(e-32)],n=t[o]+128>>8,t[o]-=256*n;t[o]+=n,t[e]=0}for(n=0,o=0;32>o;o++)t[o]+=n-(t[31]>>4)*gr[o],n=t[o]>>8,t[o]&=255;for(o=0;32>o;o++)t[o]-=n*gr[o];for(e=0;32>e;e++)t[e+1]+=t[e]>>8,r[e]=255&t[e]}function V(r){var t,n=new Float64Array(64);for(t=0;64>t;t++)n[t]=r[t];for(t=0;64>t;t++)r[t]=0;Z(r,n)}function q(r,t,n,e){var o,i,h=new Uint8Array(64),a=new Uint8Array(64),f=new Uint8Array(64),s=new Float64Array(64),u=[$(),$(),$(),$()];P(h,e,32),h[0]&=248,h[31]&=127,h[31]|=64;var c=n+64;for(o=0;n>o;o++)r[64+o]=t[o];for(o=0;32>o;o++)r[32+o]=h[32+o];for(P(f,r.subarray(32),n+32),V(f),j(u,f),F(r,u),o=32;64>o;o++)r[o]=e[o];for(P(a,r,n+64),V(a),o=0;64>o;o++)s[o]=0;for(o=0;32>o;o++)s[o]=f[o];for(o=0;32>o;o++)for(i=0;32>i;i++)s[o+i]+=a[o]*h[i];return Z(r.subarray(32),s),c}function X(r,t){var n=$(),e=$(),o=$(),i=$(),h=$(),a=$(),f=$();return v(r[2],or),E(r[1],t),B(o,r[1]),m(i,o,hr),M(o,o,r[2]),x(i,r[2],i),B(h,i),B(a,h),m(f,a,h),m(n,f,o),m(n,n,i),K(n,n),m(n,n,o),m(n,n,i),m(n,n,i),m(r[0],n,i),B(e,r[0]),m(e,e,i),_(e,o)&&m(r[0],r[0],ur),B(e,r[0]),m(e,e,i),_(e,o)?-1:(U(r[0])===t[31]>>7&&M(r[0],er,r[0]),m(r[3],r[0],r[1]),0)}function D(r,t,n,e){var i,h,a=new Uint8Array(32),f=new Uint8Array(64),s=[$(),$(),$(),$()],u=[$(),$(),$(),$()];if(h=-1,64>n)return-1;if(X(u,e))return-1;for(i=0;n>i;i++)r[i]=t[i];for(i=0;32>i;i++)r[i+32]=e[i];if(P(f,r,n),V(f),I(s,u,f),j(u,t.subarray(32)),O(s,u),F(a,s),n-=64,o(t,0,a,0)){for(i=0;n>i;i++)r[i]=0;return-1}for(i=0;n>i;i++)r[i]=t[i+64];return h=n}function H(r,t){if(r.length!==vr)throw new Error("bad key size");if(t.length!==br)throw new Error("bad nonce size")}function J(r,t){if(r.length!==Er)throw new Error("bad public key size");if(t.length!==xr)throw new Error("bad secret key size")}function Q(){var r,t;for(t=0;t>>13|n<<3),e=255&r[4]|(255&r[5])<<8,this.r[2]=7939&(n>>>10|e<<6),o=255&r[6]|(255&r[7])<<8,this.r[3]=8191&(e>>>7|o<<9),i=255&r[8]|(255&r[9])<<8,this.r[4]=255&(o>>>4|i<<12),this.r[5]=i>>>1&8190,h=255&r[10]|(255&r[11])<<8,this.r[6]=8191&(i>>>14|h<<2),a=255&r[12]|(255&r[13])<<8,this.r[7]=8065&(h>>>11|a<<5),f=255&r[14]|(255&r[15])<<8,this.r[8]=8191&(a>>>8|f<<8),this.r[9]=f>>>5&127,this.pad[0]=255&r[16]|(255&r[17])<<8,this.pad[1]=255&r[18]|(255&r[19])<<8,this.pad[2]=255&r[20]|(255&r[21])<<8,this.pad[3]=255&r[22]|(255&r[23])<<8,this.pad[4]=255&r[24]|(255&r[25])<<8,this.pad[5]=255&r[26]|(255&r[27])<<8,this.pad[6]=255&r[28]|(255&r[29])<<8,this.pad[7]=255&r[30]|(255&r[31])<<8};yr.prototype.blocks=function(r,t,n){for(var e,o,i,h,a,f,s,u,c,y,l,w,p,g,v,b,d,A,_,U=this.fin?0:2048,E=this.h[0],x=this.h[1],M=this.h[2],m=this.h[3],B=this.h[4],S=this.h[5],K=this.h[6],T=this.h[7],Y=this.h[8],k=this.h[9],L=this.r[0],C=this.r[1],R=this.r[2],z=this.r[3],P=this.r[4],O=this.r[5],N=this.r[6],F=this.r[7],I=this.r[8],j=this.r[9];n>=16;)e=255&r[t+0]|(255&r[t+1])<<8,E+=8191&e,o=255&r[t+2]|(255&r[t+3])<<8,x+=8191&(e>>>13|o<<3),i=255&r[t+4]|(255&r[t+5])<<8,M+=8191&(o>>>10|i<<6),h=255&r[t+6]|(255&r[t+7])<<8,m+=8191&(i>>>7|h<<9),a=255&r[t+8]|(255&r[t+9])<<8,B+=8191&(h>>>4|a<<12),S+=a>>>1&8191,f=255&r[t+10]|(255&r[t+11])<<8,K+=8191&(a>>>14|f<<2),s=255&r[t+12]|(255&r[t+13])<<8,T+=8191&(f>>>11|s<<5),u=255&r[t+14]|(255&r[t+15])<<8,Y+=8191&(s>>>8|u<<8),k+=u>>>5|U,c=0,y=c,y+=E*L,y+=5*x*j,y+=5*M*I,y+=5*m*F,y+=5*B*N,c=y>>>13,y&=8191,y+=5*S*O,y+=5*K*P,y+=5*T*z,y+=5*Y*R,y+=5*k*C,c+=y>>>13,y&=8191,l=c,l+=E*C,l+=x*L,l+=5*M*j,l+=5*m*I,l+=5*B*F,c=l>>>13,l&=8191,l+=5*S*N,l+=5*K*O,l+=5*T*P,l+=5*Y*z,l+=5*k*R,c+=l>>>13,l&=8191,w=c,w+=E*R,w+=x*C,w+=M*L,w+=5*m*j,w+=5*B*I,c=w>>>13,w&=8191,w+=5*S*F,w+=5*K*N,w+=5*T*O,w+=5*Y*P,w+=5*k*z,c+=w>>>13,w&=8191,p=c,p+=E*z,p+=x*R,p+=M*C,p+=m*L,p+=5*B*j,c=p>>>13,p&=8191,p+=5*S*I,p+=5*K*F,p+=5*T*N,p+=5*Y*O,p+=5*k*P,c+=p>>>13,p&=8191,g=c,g+=E*P,g+=x*z,g+=M*R,g+=m*C,g+=B*L,c=g>>>13,g&=8191,g+=5*S*j,g+=5*K*I,g+=5*T*F,g+=5*Y*N,g+=5*k*O,c+=g>>>13,g&=8191,v=c,v+=E*O,v+=x*P,v+=M*z,v+=m*R,v+=B*C,c=v>>>13,v&=8191,v+=S*L,v+=5*K*j,v+=5*T*I,v+=5*Y*F,v+=5*k*N,c+=v>>>13,v&=8191,b=c,b+=E*N,b+=x*O,b+=M*P,b+=m*z,b+=B*R,c=b>>>13,b&=8191,b+=S*C,b+=K*L,b+=5*T*j,b+=5*Y*I,b+=5*k*F,c+=b>>>13,b&=8191,d=c,d+=E*F,d+=x*N,d+=M*O,d+=m*P,d+=B*z,c=d>>>13,d&=8191,d+=S*R,d+=K*C,d+=T*L,d+=5*Y*j,d+=5*k*I,c+=d>>>13,d&=8191,A=c,A+=E*I,A+=x*F,A+=M*N,A+=m*O,A+=B*P,c=A>>>13,A&=8191,A+=S*z,A+=K*R,A+=T*C,A+=Y*L,A+=5*k*j,c+=A>>>13,A&=8191,_=c,_+=E*j,_+=x*I,_+=M*F,_+=m*N,_+=B*O,c=_>>>13,_&=8191,_+=S*P,_+=K*z,_+=T*R,_+=Y*C,_+=k*L,c+=_>>>13,_&=8191,c=(c<<2)+c|0,c=c+y|0,y=8191&c,c>>>=13,l+=c,E=y,x=l,M=w,m=p,B=g,S=v,K=b,T=d,Y=A,k=_,t+=16,n-=16;this.h[0]=E,this.h[1]=x,this.h[2]=M,this.h[3]=m,this.h[4]=B,this.h[5]=S,this.h[6]=K,this.h[7]=T,this.h[8]=Y,this.h[9]=k},yr.prototype.finish=function(r,t){var n,e,o,i,h=new Uint16Array(10);if(this.leftover){for(i=this.leftover,this.buffer[i++]=1;16>i;i++)this.buffer[i]=0;this.fin=1,this.blocks(this.buffer,0,16)}for(n=this.h[1]>>>13,this.h[1]&=8191,i=2;10>i;i++)this.h[i]+=n,n=this.h[i]>>>13,this.h[i]&=8191;for(this.h[0]+=5*n,n=this.h[0]>>>13,this.h[0]&=8191,this.h[1]+=n,n=this.h[1]>>>13,this.h[1]&=8191,this.h[2]+=n,h[0]=this.h[0]+5,n=h[0]>>>13,h[0]&=8191,i=1;10>i;i++)h[i]=this.h[i]+n,n=h[i]>>>13,h[i]&=8191;for(h[9]-=8192,e=(h[9]>>>15)-1,i=0;10>i;i++)h[i]&=e;for(e=~e,i=0;10>i;i++)this.h[i]=this.h[i]&e|h[i];for(this.h[0]=65535&(this.h[0]|this.h[1]<<13),this.h[1]=65535&(this.h[1]>>>3|this.h[2]<<10),this.h[2]=65535&(this.h[2]>>>6|this.h[3]<<7),this.h[3]=65535&(this.h[3]>>>9|this.h[4]<<4),this.h[4]=65535&(this.h[4]>>>12|this.h[5]<<1|this.h[6]<<14),this.h[5]=65535&(this.h[6]>>>2|this.h[7]<<11),this.h[6]=65535&(this.h[7]>>>5|this.h[8]<<8),this.h[7]=65535&(this.h[8]>>>8|this.h[9]<<5),o=this.h[0]+this.pad[0],this.h[0]=65535&o,i=1;8>i;i++)o=(this.h[i]+this.pad[i]|0)+(o>>>16)|0,this.h[i]=65535&o;r[t+0]=this.h[0]>>>0&255,r[t+1]=this.h[0]>>>8&255,r[t+2]=this.h[1]>>>0&255,r[t+3]=this.h[1]>>>8&255,r[t+4]=this.h[2]>>>0&255,r[t+5]=this.h[2]>>>8&255,r[t+6]=this.h[3]>>>0&255,r[t+7]=this.h[3]>>>8&255,r[t+8]=this.h[4]>>>0&255,r[t+9]=this.h[4]>>>8&255,r[t+10]=this.h[5]>>>0&255,r[t+11]=this.h[5]>>>8&255,r[t+12]=this.h[6]>>>0&255,r[t+13]=this.h[6]>>>8&255,r[t+14]=this.h[7]>>>0&255,r[t+15]=this.h[7]>>>8&255},yr.prototype.update=function(r,t,n){var e,o;if(this.leftover){for(o=16-this.leftover,o>n&&(o=n),e=0;o>e;e++)this.buffer[this.leftover+e]=r[t+e];if(n-=o,t+=o,this.leftover+=o,this.leftover<16)return;this.blocks(this.buffer,0,16),this.leftover=0}if(n>=16&&(o=n-n%16,this.blocks(r,t,o),t+=o,n-=o),n){for(e=0;n>e;e++)this.buffer[this.leftover+e]=r[t+e];this.leftover+=n}};var lr=p,wr=g,pr=[1116352408,3609767458,1899447441,602891725,3049323471,3964484399,3921009573,2173295548,961987163,4081628472,1508970993,3053834265,2453635748,2937671579,2870763221,3664609560,3624381080,2734883394,310598401,1164996542,607225278,1323610764,1426881987,3590304994,1925078388,4068182383,2162078206,991336113,2614888103,633803317,3248222580,3479774868,3835390401,2666613458,4022224774,944711139,264347078,2341262773,604807628,2007800933,770255983,1495990901,1249150122,1856431235,1555081692,3175218132,1996064986,2198950837,2554220882,3999719339,2821834349,766784016,2952996808,2566594879,3210313671,3203337956,3336571891,1034457026,3584528711,2466948901,113926993,3758326383,338241895,168717936,666307205,1188179964,773529912,1546045734,1294757372,1522805485,1396182291,2643833823,1695183700,2343527390,1986661051,1014477480,2177026350,1206759142,2456956037,344077627,2730485921,1290863460,2820302411,3158454273,3259730800,3505952657,3345764771,106217008,3516065817,3606008344,3600352804,1432725776,4094571909,1467031594,275423344,851169720,430227734,3100823752,506948616,1363258195,659060556,3750685593,883997877,3785050280,958139571,3318307427,1322822218,3812723403,1537002063,2003034995,1747873779,3602036899,1955562222,1575990012,2024104815,1125592928,2227730452,2716904306,2361852424,442776044,2428436474,593698344,2756734187,3733110249,3204031479,2999351573,3329325298,3815920427,3391569614,3928383900,3515267271,566280711,3940187606,3454069534,4118630271,4000239992,116418474,1914138554,174292421,2731055270,289380356,3203993006,460393269,320620315,685471733,587496836,852142971,1086792851,1017036298,365543100,1126000580,2618297676,1288033470,3409855158,1501505948,4234509866,1607167915,987167468,1816402316,1246189591],gr=new Float64Array([237,211,245,92,26,99,18,88,214,156,247,162,222,249,222,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16]),vr=32,br=24,dr=32,Ar=16,_r=32,Ur=32,Er=32,xr=32,Mr=32,mr=br,Br=dr,Sr=Ar,Kr=64,Tr=32,Yr=64,kr=32,Lr=64;r.lowlevel={crypto_core_hsalsa20:f,crypto_stream_xor:y,crypto_stream:c,crypto_stream_salsa20_xor:s,crypto_stream_salsa20:u,crypto_onetimeauth:l,crypto_onetimeauth_verify:w,crypto_verify_16:e,crypto_verify_32:o,crypto_secretbox:p,crypto_secretbox_open:g,crypto_scalarmult:T,crypto_scalarmult_base:Y,crypto_box_beforenm:L,crypto_box_afternm:lr,crypto_box:C,crypto_box_open:R,crypto_box_keypair:k,crypto_hash:P,crypto_sign:q,crypto_sign_keypair:G,crypto_sign_open:D,crypto_secretbox_KEYBYTES:vr,crypto_secretbox_NONCEBYTES:br,crypto_secretbox_ZEROBYTES:dr,crypto_secretbox_BOXZEROBYTES:Ar,crypto_scalarmult_BYTES:_r,crypto_scalarmult_SCALARBYTES:Ur,crypto_box_PUBLICKEYBYTES:Er,crypto_box_SECRETKEYBYTES:xr,crypto_box_BEFORENMBYTES:Mr,crypto_box_NONCEBYTES:mr,crypto_box_ZEROBYTES:Br,crypto_box_BOXZEROBYTES:Sr,crypto_sign_BYTES:Kr,crypto_sign_PUBLICKEYBYTES:Tr,crypto_sign_SECRETKEYBYTES:Yr,crypto_sign_SEEDBYTES:kr,crypto_hash_BYTES:Lr},r.util={},r.util.decodeUTF8=function(r){var t,n=unescape(encodeURIComponent(r)),e=new Uint8Array(n.length);for(t=0;tt;t++)n.push(String.fromCharCode(r[t]));return btoa(n.join(""))},r.util.decodeBase64=function(r){if("undefined"==typeof atob)return new Uint8Array(Array.prototype.slice.call(new Buffer(r,"base64"),0));var t,n=atob(r),e=new Uint8Array(n.length);for(t=0;te)return null;for(var o=new Uint8Array(e),i=0;ie;e++)o[e]=t[e];for(e=0;e=0},r.sign.keyPair=function(){var r=new Uint8Array(Tr),t=new Uint8Array(Yr);return G(r,t),{publicKey:r,secretKey:t}},r.sign.keyPair.fromSecretKey=function(r){if(Q(r),r.length!==Yr)throw new Error("bad secret key size");for(var t=new Uint8Array(Tr),n=0;ne;e++)n[e]=r[e];return G(t,n,!0),{publicKey:t,secretKey:n}},r.sign.publicKeyLength=Tr,r.sign.secretKeyLength=Yr,r.sign.seedLength=kr,r.sign.signatureLength=Kr,r.hash=function(r){Q(r);var t=new Uint8Array(Lr);return P(t,r,r.length),t},r.hash.hashLength=Lr,r.verify=function(r,t){return Q(r,t),0===r.length||0===t.length?!1:r.length!==t.length?!1:0===n(r,0,t,0,r.length)?!0:!1},r.setPRNG=function(r){rr=r},function(){var t;"undefined"!=typeof window?(window.crypto&&window.crypto.getRandomValues?t=window.crypto:window.msCrypto&&window.msCrypto.getRandomValues&&(t=window.msCrypto),t&&r.setPRNG(function(r,n){var e,o=new Uint8Array(n);for(t.getRandomValues(o),e=0;n>e;e++)r[e]=o[e];W(o)})):"undefined"!=typeof require&&(t=require("crypto"),t&&r.setPRNG(function(r,n){var e,o=t.randomBytes(n);for(e=0;n>e;e++)r[e]=o[e];W(o)}))}()}("undefined"!=typeof module&&module.exports?module.exports:window.nacl=window.nacl||{}); \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tweetnacl/nacl.js b/node_modules/fsevents/node_modules/tweetnacl/nacl.js new file mode 100644 index 0000000..b8edbbe --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/nacl.js @@ -0,0 +1,1205 @@ +(function(nacl) { +'use strict'; + +// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +// +// Implementation derived from TweetNaCl version 20140427. +// See for details: http://tweetnacl.cr.yp.to/ + +var u64 = function(h, l) { this.hi = h|0 >>> 0; this.lo = l|0 >>> 0; }; +var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; +}; + +// Pluggable, initialized in high-level API below. +var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + +var _0 = new Uint8Array(16); +var _9 = new Uint8Array(32); _9[0] = 9; + +var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +function L32(x, c) { return (x << c) | (x >>> (32 - c)); } + +function ld32(x, i) { + var u = x[i+3] & 0xff; + u = (u<<8)|(x[i+2] & 0xff); + u = (u<<8)|(x[i+1] & 0xff); + return (u<<8)|(x[i+0] & 0xff); +} + +function dl64(x, i) { + var h = (x[i] << 24) | (x[i+1] << 16) | (x[i+2] << 8) | x[i+3]; + var l = (x[i+4] << 24) | (x[i+5] << 16) | (x[i+6] << 8) | x[i+7]; + return new u64(h, l); +} + +function st32(x, j, u) { + var i; + for (i = 0; i < 4; i++) { x[j+i] = u & 255; u >>>= 8; } +} + +function ts64(x, i, u) { + x[i] = (u.hi >> 24) & 0xff; + x[i+1] = (u.hi >> 16) & 0xff; + x[i+2] = (u.hi >> 8) & 0xff; + x[i+3] = u.hi & 0xff; + x[i+4] = (u.lo >> 24) & 0xff; + x[i+5] = (u.lo >> 16) & 0xff; + x[i+6] = (u.lo >> 8) & 0xff; + x[i+7] = u.lo & 0xff; +} + +function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; +} + +function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); +} + +function core(out,inp,k,c,h) { + var w = new Uint32Array(16), x = new Uint32Array(16), + y = new Uint32Array(16), t = new Uint32Array(4); + var i, j, m; + + for (i = 0; i < 4; i++) { + x[5*i] = ld32(c, 4*i); + x[1+i] = ld32(k, 4*i); + x[6+i] = ld32(inp, 4*i); + x[11+i] = ld32(k, 16+4*i); + } + + for (i = 0; i < 16; i++) y[i] = x[i]; + + for (i = 0; i < 20; i++) { + for (j = 0; j < 4; j++) { + for (m = 0; m < 4; m++) t[m] = x[(5*j+4*m)%16]; + t[1] ^= L32((t[0]+t[3])|0, 7); + t[2] ^= L32((t[1]+t[0])|0, 9); + t[3] ^= L32((t[2]+t[1])|0,13); + t[0] ^= L32((t[3]+t[2])|0,18); + for (m = 0; m < 4; m++) w[4*j+(j+m)%4] = t[m]; + } + for (m = 0; m < 16; m++) x[m] = w[m]; + } + + if (h) { + for (i = 0; i < 16; i++) x[i] = (x[i] + y[i]) | 0; + for (i = 0; i < 4; i++) { + x[5*i] = (x[5*i] - ld32(c, 4*i)) | 0; + x[6+i] = (x[6+i] - ld32(inp, 4*i)) | 0; + } + for (i = 0; i < 4; i++) { + st32(out,4*i,x[5*i]); + st32(out,16+4*i,x[6+i]); + } + } else { + for (i = 0; i < 16; i++) st32(out, 4 * i, (x[i] + y[i]) | 0); + } +} + +function crypto_core_salsa20(out,inp,k,c) { + core(out,inp,k,c,false); + return 0; +} + +function crypto_core_hsalsa20(out,inp,k,c) { + core(out,inp,k,c,true); + return 0; +} + +var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + +function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + if (!b) return 0; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = (m?m[mpos+i]:0) ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + if (m) mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = (m?m[mpos+i]:0) ^ x[i]; + } + return 0; +} + +function crypto_stream_salsa20(c,cpos,d,n,k) { + return crypto_stream_salsa20_xor(c,cpos,null,0,d,n,k); +} + +function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + return crypto_stream_salsa20(c,cpos,d,n.subarray(16),s); +} + +function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,n.subarray(16),s); +} + +function add1305(h, c) { + var j, u = 0; + for (j = 0; j < 17; j++) { + u = (u + ((h[j] + c[j]) | 0)) | 0; + h[j] = u & 255; + u >>>= 8; + } +} + +var minusp = new Uint32Array([ + 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252 +]); + +function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s, i, j, u; + var x = new Uint32Array(17), r = new Uint32Array(17), + h = new Uint32Array(17), c = new Uint32Array(17), + g = new Uint32Array(17); + for (j = 0; j < 17; j++) r[j]=h[j]=0; + for (j = 0; j < 16; j++) r[j]=k[j]; + r[3]&=15; + r[4]&=252; + r[7]&=15; + r[8]&=252; + r[11]&=15; + r[12]&=252; + r[15]&=15; + + while (n > 0) { + for (j = 0; j < 17; j++) c[j] = 0; + for (j = 0; (j < 16) && (j < n); ++j) c[j] = m[mpos+j]; + c[j] = 1; + mpos += j; n -= j; + add1305(h,c); + for (i = 0; i < 17; i++) { + x[i] = 0; + for (j = 0; j < 17; j++) x[i] = (x[i] + (h[j] * ((j <= i) ? r[i - j] : ((320 * r[i + 17 - j])|0))) | 0) | 0; + } + for (i = 0; i < 17; i++) h[i] = x[i]; + u = 0; + for (j = 0; j < 16; j++) { + u = (u + h[j]) | 0; + h[j] = u & 255; + u >>>= 8; + } + u = (u + h[16]) | 0; h[16] = u & 3; + u = (5 * (u >>> 2)) | 0; + for (j = 0; j < 16; j++) { + u = (u + h[j]) | 0; + h[j] = u & 255; + u >>>= 8; + } + u = (u + h[16]) | 0; h[16] = u; + } + + for (j = 0; j < 17; j++) g[j] = h[j]; + add1305(h,minusp); + s = (-(h[16] >>> 7) | 0); + for (j = 0; j < 17; j++) h[j] ^= s & (g[j] ^ h[j]); + + for (j = 0; j < 16; j++) c[j] = k[j + 16]; + c[16] = 0; + add1305(h,c); + for (j = 0; j < 16; j++) out[outpos+j] = h[j]; + return 0; +} + +function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); +} + +function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; +} + +function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; +} + +function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; +} + +function car25519(o) { + var c; + var i; + for (i = 0; i < 16; i++) { + o[i] += 65536; + c = Math.floor(o[i] / 65536); + o[(i+1)*(i<15?1:0)] += c - 1 + 37 * (c-1) * (i===15?1:0); + o[i] -= (c * 65536); + } +} + +function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } +} + +function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; +} + +function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; +} + +function A(o, a, b) { + var i; + for (i = 0; i < 16; i++) o[i] = (a[i] + b[i])|0; +} + +function Z(o, a, b) { + var i; + for (i = 0; i < 16; i++) o[i] = (a[i] - b[i])|0; +} + +function M(o, a, b) { + var i, j, t = new Float64Array(31); + for (i = 0; i < 31; i++) t[i] = 0; + for (i = 0; i < 16; i++) { + for (j = 0; j < 16; j++) { + t[i+j] += a[i] * b[j]; + } + } + for (i = 0; i < 15; i++) { + t[i] += 38 * t[i+16]; + } + for (i = 0; i < 16; i++) o[i] = t[i]; + car25519(o); + car25519(o); +} + +function S(o, a) { + M(o, a, a); +} + +function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; +} + +function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); +} + +function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); +} + +function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); +} + +var crypto_box_afternm = crypto_secretbox; +var crypto_box_open_afternm = crypto_secretbox_open; + +function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); +} + +function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); +} + +function add64() { + var a = 0, b = 0, c = 0, d = 0, m16 = 65535, l, h, i; + for (i = 0; i < arguments.length; i++) { + l = arguments[i].lo; + h = arguments[i].hi; + a += (l & m16); b += (l >>> 16); + c += (h & m16); d += (h >>> 16); + } + + b += (a >>> 16); + c += (b >>> 16); + d += (c >>> 16); + + return new u64((c & m16) | (d << 16), (a & m16) | (b << 16)); +} + +function shr64(x, c) { + return new u64((x.hi >>> c), (x.lo >>> c) | (x.hi << (32 - c))); +} + +function xor64() { + var l = 0, h = 0, i; + for (i = 0; i < arguments.length; i++) { + l ^= arguments[i].lo; + h ^= arguments[i].hi; + } + return new u64(h, l); +} + +function R(x, c) { + var h, l, c1 = 32 - c; + if (c < 32) { + h = (x.hi >>> c) | (x.lo << c1); + l = (x.lo >>> c) | (x.hi << c1); + } else if (c < 64) { + h = (x.lo >>> c) | (x.hi << c1); + l = (x.hi >>> c) | (x.lo << c1); + } + return new u64(h, l); +} + +function Ch(x, y, z) { + var h = (x.hi & y.hi) ^ (~x.hi & z.hi), + l = (x.lo & y.lo) ^ (~x.lo & z.lo); + return new u64(h, l); +} + +function Maj(x, y, z) { + var h = (x.hi & y.hi) ^ (x.hi & z.hi) ^ (y.hi & z.hi), + l = (x.lo & y.lo) ^ (x.lo & z.lo) ^ (y.lo & z.lo); + return new u64(h, l); +} + +function Sigma0(x) { return xor64(R(x,28), R(x,34), R(x,39)); } +function Sigma1(x) { return xor64(R(x,14), R(x,18), R(x,41)); } +function sigma0(x) { return xor64(R(x, 1), R(x, 8), shr64(x,7)); } +function sigma1(x) { return xor64(R(x,19), R(x,61), shr64(x,6)); } + +var K = [ + new u64(0x428a2f98, 0xd728ae22), new u64(0x71374491, 0x23ef65cd), + new u64(0xb5c0fbcf, 0xec4d3b2f), new u64(0xe9b5dba5, 0x8189dbbc), + new u64(0x3956c25b, 0xf348b538), new u64(0x59f111f1, 0xb605d019), + new u64(0x923f82a4, 0xaf194f9b), new u64(0xab1c5ed5, 0xda6d8118), + new u64(0xd807aa98, 0xa3030242), new u64(0x12835b01, 0x45706fbe), + new u64(0x243185be, 0x4ee4b28c), new u64(0x550c7dc3, 0xd5ffb4e2), + new u64(0x72be5d74, 0xf27b896f), new u64(0x80deb1fe, 0x3b1696b1), + new u64(0x9bdc06a7, 0x25c71235), new u64(0xc19bf174, 0xcf692694), + new u64(0xe49b69c1, 0x9ef14ad2), new u64(0xefbe4786, 0x384f25e3), + new u64(0x0fc19dc6, 0x8b8cd5b5), new u64(0x240ca1cc, 0x77ac9c65), + new u64(0x2de92c6f, 0x592b0275), new u64(0x4a7484aa, 0x6ea6e483), + new u64(0x5cb0a9dc, 0xbd41fbd4), new u64(0x76f988da, 0x831153b5), + new u64(0x983e5152, 0xee66dfab), new u64(0xa831c66d, 0x2db43210), + new u64(0xb00327c8, 0x98fb213f), new u64(0xbf597fc7, 0xbeef0ee4), + new u64(0xc6e00bf3, 0x3da88fc2), new u64(0xd5a79147, 0x930aa725), + new u64(0x06ca6351, 0xe003826f), new u64(0x14292967, 0x0a0e6e70), + new u64(0x27b70a85, 0x46d22ffc), new u64(0x2e1b2138, 0x5c26c926), + new u64(0x4d2c6dfc, 0x5ac42aed), new u64(0x53380d13, 0x9d95b3df), + new u64(0x650a7354, 0x8baf63de), new u64(0x766a0abb, 0x3c77b2a8), + new u64(0x81c2c92e, 0x47edaee6), new u64(0x92722c85, 0x1482353b), + new u64(0xa2bfe8a1, 0x4cf10364), new u64(0xa81a664b, 0xbc423001), + new u64(0xc24b8b70, 0xd0f89791), new u64(0xc76c51a3, 0x0654be30), + new u64(0xd192e819, 0xd6ef5218), new u64(0xd6990624, 0x5565a910), + new u64(0xf40e3585, 0x5771202a), new u64(0x106aa070, 0x32bbd1b8), + new u64(0x19a4c116, 0xb8d2d0c8), new u64(0x1e376c08, 0x5141ab53), + new u64(0x2748774c, 0xdf8eeb99), new u64(0x34b0bcb5, 0xe19b48a8), + new u64(0x391c0cb3, 0xc5c95a63), new u64(0x4ed8aa4a, 0xe3418acb), + new u64(0x5b9cca4f, 0x7763e373), new u64(0x682e6ff3, 0xd6b2b8a3), + new u64(0x748f82ee, 0x5defb2fc), new u64(0x78a5636f, 0x43172f60), + new u64(0x84c87814, 0xa1f0ab72), new u64(0x8cc70208, 0x1a6439ec), + new u64(0x90befffa, 0x23631e28), new u64(0xa4506ceb, 0xde82bde9), + new u64(0xbef9a3f7, 0xb2c67915), new u64(0xc67178f2, 0xe372532b), + new u64(0xca273ece, 0xea26619c), new u64(0xd186b8c7, 0x21c0c207), + new u64(0xeada7dd6, 0xcde0eb1e), new u64(0xf57d4f7f, 0xee6ed178), + new u64(0x06f067aa, 0x72176fba), new u64(0x0a637dc5, 0xa2c898a6), + new u64(0x113f9804, 0xbef90dae), new u64(0x1b710b35, 0x131c471b), + new u64(0x28db77f5, 0x23047d84), new u64(0x32caab7b, 0x40c72493), + new u64(0x3c9ebe0a, 0x15c9bebc), new u64(0x431d67c4, 0x9c100d4c), + new u64(0x4cc5d4be, 0xcb3e42b6), new u64(0x597f299c, 0xfc657e2a), + new u64(0x5fcb6fab, 0x3ad6faec), new u64(0x6c44198c, 0x4a475817) +]; + +function crypto_hashblocks(x, m, n) { + var z = [], b = [], a = [], w = [], t, i, j; + + for (i = 0; i < 8; i++) z[i] = a[i] = dl64(x, 8*i); + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) w[i] = dl64(m, 8*i+pos); + for (i = 0; i < 80; i++) { + for (j = 0; j < 8; j++) b[j] = a[j]; + t = add64(a[7], Sigma1(a[4]), Ch(a[4], a[5], a[6]), K[i], w[i%16]); + b[7] = add64(t, Sigma0(a[0]), Maj(a[0], a[1], a[2])); + b[3] = add64(b[3], t); + for (j = 0; j < 8; j++) a[(j+1)%8] = b[j]; + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + w[j] = add64(w[j], w[(j+9)%16], sigma0(w[(j+1)%16]), sigma1(w[(j+14)%16])); + } + } + } + + for (i = 0; i < 8; i++) { + a[i] = add64(a[i], z[i]); + z[i] = a[i]; + } + + pos += 128; + n -= 128; + } + + for (i = 0; i < 8; i++) ts64(x, 8*i, z[i]); + return n; +} + +var iv = new Uint8Array([ + 0x6a,0x09,0xe6,0x67,0xf3,0xbc,0xc9,0x08, + 0xbb,0x67,0xae,0x85,0x84,0xca,0xa7,0x3b, + 0x3c,0x6e,0xf3,0x72,0xfe,0x94,0xf8,0x2b, + 0xa5,0x4f,0xf5,0x3a,0x5f,0x1d,0x36,0xf1, + 0x51,0x0e,0x52,0x7f,0xad,0xe6,0x82,0xd1, + 0x9b,0x05,0x68,0x8c,0x2b,0x3e,0x6c,0x1f, + 0x1f,0x83,0xd9,0xab,0xfb,0x41,0xbd,0x6b, + 0x5b,0xe0,0xcd,0x19,0x13,0x7e,0x21,0x79 +]); + +function crypto_hash(out, m, n) { + var h = new Uint8Array(64), x = new Uint8Array(256); + var i, b = n; + + for (i = 0; i < 64; i++) h[i] = iv[i]; + + crypto_hashblocks(h, m, n); + n %= 128; + + for (i = 0; i < 256; i++) x[i] = 0; + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, new u64((b / 0x20000000) | 0, b << 3)); + crypto_hashblocks(h, x, n); + + for (i = 0; i < 64; i++) out[i] = h[i]; + + return 0; +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; +} + +var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + +function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = (x[j] + 128) >> 8; + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; +} + +function crypto_sign_open(m, sm, n, pk) { + var i, mlen; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + mlen = -1; + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + mlen = n; + return mlen; +} + +var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + +nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES +}; + +/* High-level API */ + +function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); +} + +function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); +} + +function checkArrayTypes() { + var t, i; + for (i = 0; i < arguments.length; i++) { + if ((t = Object.prototype.toString.call(arguments[i])) !== '[object Uint8Array]') + throw new TypeError('unexpected type ' + t + ', use Uint8Array'); + } +} + +function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; +} + +nacl.util = {}; + +nacl.util.decodeUTF8 = function(s) { + var i, d = unescape(encodeURIComponent(s)), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; +}; + +nacl.util.encodeUTF8 = function(arr) { + var i, s = []; + for (i = 0; i < arr.length; i++) s.push(String.fromCharCode(arr[i])); + return decodeURIComponent(escape(s.join(''))); +}; + +nacl.util.encodeBase64 = function(arr) { + if (typeof btoa === 'undefined') { + return (new Buffer(arr)).toString('base64'); + } else { + var i, s = [], len = arr.length; + for (i = 0; i < len; i++) s.push(String.fromCharCode(arr[i])); + return btoa(s.join('')); + } +}; + +nacl.util.decodeBase64 = function(s) { + if (typeof atob === 'undefined') { + return new Uint8Array(Array.prototype.slice.call(new Buffer(s, 'base64'), 0)); + } else { + var i, d = atob(s), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + } +}; + +nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; +}; + +nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); +}; + +nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return false; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return false; + return m.subarray(crypto_secretbox_ZEROBYTES); +}; + +nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; +nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; +nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + +nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; +}; + +nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; +}; + +nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; +nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + +nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); +}; + +nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; +}; + +nacl.box.after = nacl.secretbox; + +nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); +}; + +nacl.box.open.after = nacl.secretbox.open; + +nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; +nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; +nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; +nacl.box.nonceLength = crypto_box_NONCEBYTES; +nacl.box.overheadLength = nacl.secretbox.overheadLength; + +nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; +}; + +nacl.sign.open = function(signedMsg, publicKey) { + if (arguments.length !== 2) + throw new Error('nacl.sign.open accepts 2 arguments; did you mean to use nacl.sign.detached.verify?'); + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; +}; + +nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; +}; + +nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; +nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; +nacl.sign.seedLength = crypto_sign_SEEDBYTES; +nacl.sign.signatureLength = crypto_sign_BYTES; + +nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; +}; + +nacl.hash.hashLength = crypto_hash_BYTES; + +nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; +}; + +nacl.setPRNG = function(fn) { + randombytes = fn; +}; + +(function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto; + if (typeof window !== 'undefined') { + // Browser. + if (window.crypto && window.crypto.getRandomValues) { + crypto = window.crypto; // Standard + } else if (window.msCrypto && window.msCrypto.getRandomValues) { + crypto = window.msCrypto; // Internet Explorer 11+ + } + if (crypto) { + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + crypto.getRandomValues(v); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } else if (typeof require !== 'undefined') { + // Node.js. + crypto = require('crypto'); + if (crypto) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } +})(); + +})(typeof module !== 'undefined' && module.exports ? module.exports : (window.nacl = window.nacl || {})); diff --git a/node_modules/fsevents/node_modules/tweetnacl/nacl.min.js b/node_modules/fsevents/node_modules/tweetnacl/nacl.min.js new file mode 100644 index 0000000..95d8695 --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/nacl.min.js @@ -0,0 +1 @@ +!function(r){"use strict";function n(r,n){return r<>>32-n}function e(r,n){var e=255&r[n+3];return e=e<<8|255&r[n+2],e=e<<8|255&r[n+1],e<<8|255&r[n+0]}function t(r,n){var e=r[n]<<24|r[n+1]<<16|r[n+2]<<8|r[n+3],t=r[n+4]<<24|r[n+5]<<16|r[n+6]<<8|r[n+7];return new lr(e,t)}function o(r,n,e){var t;for(t=0;4>t;t++)r[n+t]=255&e,e>>>=8}function i(r,n,e){r[n]=e.hi>>24&255,r[n+1]=e.hi>>16&255,r[n+2]=e.hi>>8&255,r[n+3]=255&e.hi,r[n+4]=e.lo>>24&255,r[n+5]=e.lo>>16&255,r[n+6]=e.lo>>8&255,r[n+7]=255&e.lo}function a(r,n,e,t,o){var i,a=0;for(i=0;o>i;i++)a|=r[n+i]^e[t+i];return(1&a-1>>>8)-1}function f(r,n,e,t){return a(r,n,e,t,16)}function u(r,n,e,t){return a(r,n,e,t,32)}function c(r,t,i,a,f){var u,c,w,y=new Uint32Array(16),s=new Uint32Array(16),l=new Uint32Array(16),h=new Uint32Array(4);for(u=0;4>u;u++)s[5*u]=e(a,4*u),s[1+u]=e(i,4*u),s[6+u]=e(t,4*u),s[11+u]=e(i,16+4*u);for(u=0;16>u;u++)l[u]=s[u];for(u=0;20>u;u++){for(c=0;4>c;c++){for(w=0;4>w;w++)h[w]=s[(5*c+4*w)%16];for(h[1]^=n(h[0]+h[3]|0,7),h[2]^=n(h[1]+h[0]|0,9),h[3]^=n(h[2]+h[1]|0,13),h[0]^=n(h[3]+h[2]|0,18),w=0;4>w;w++)y[4*c+(c+w)%4]=h[w]}for(w=0;16>w;w++)s[w]=y[w]}if(f){for(u=0;16>u;u++)s[u]=s[u]+l[u]|0;for(u=0;4>u;u++)s[5*u]=s[5*u]-e(a,4*u)|0,s[6+u]=s[6+u]-e(t,4*u)|0;for(u=0;4>u;u++)o(r,4*u,s[5*u]),o(r,16+4*u,s[6+u])}else for(u=0;16>u;u++)o(r,4*u,s[u]+l[u]|0)}function w(r,n,e,t){return c(r,n,e,t,!1),0}function y(r,n,e,t){return c(r,n,e,t,!0),0}function s(r,n,e,t,o,i,a){var f,u,c=new Uint8Array(16),y=new Uint8Array(64);if(!o)return 0;for(u=0;16>u;u++)c[u]=0;for(u=0;8>u;u++)c[u]=i[u];for(;o>=64;){for(w(y,c,a,Br),u=0;64>u;u++)r[n+u]=(e?e[t+u]:0)^y[u];for(f=1,u=8;16>u;u++)f=f+(255&c[u])|0,c[u]=255&f,f>>>=8;o-=64,n+=64,e&&(t+=64)}if(o>0)for(w(y,c,a,Br),u=0;o>u;u++)r[n+u]=(e?e[t+u]:0)^y[u];return 0}function l(r,n,e,t,o){return s(r,n,null,0,e,t,o)}function h(r,n,e,t,o){var i=new Uint8Array(32);return y(i,t,o,Br),l(r,n,e,t.subarray(16),i)}function g(r,n,e,t,o,i,a){var f=new Uint8Array(32);return y(f,i,a,Br),s(r,n,e,t,o,i.subarray(16),f)}function p(r,n){var e,t=0;for(e=0;17>e;e++)t=t+(r[e]+n[e]|0)|0,r[e]=255&t,t>>>=8}function v(r,n,e,t,o,i){var a,f,u,c,w=new Uint32Array(17),y=new Uint32Array(17),s=new Uint32Array(17),l=new Uint32Array(17),h=new Uint32Array(17);for(u=0;17>u;u++)y[u]=s[u]=0;for(u=0;16>u;u++)y[u]=i[u];for(y[3]&=15,y[4]&=252,y[7]&=15,y[8]&=252,y[11]&=15,y[12]&=252,y[15]&=15;o>0;){for(u=0;17>u;u++)l[u]=0;for(u=0;16>u&&o>u;++u)l[u]=e[t+u];for(l[u]=1,t+=u,o-=u,p(s,l),f=0;17>f;f++)for(w[f]=0,u=0;17>u;u++)w[f]=w[f]+s[u]*(f>=u?y[f-u]:320*y[f+17-u]|0)|0|0;for(f=0;17>f;f++)s[f]=w[f];for(c=0,u=0;16>u;u++)c=c+s[u]|0,s[u]=255&c,c>>>=8;for(c=c+s[16]|0,s[16]=3&c,c=5*(c>>>2)|0,u=0;16>u;u++)c=c+s[u]|0,s[u]=255&c,c>>>=8;c=c+s[16]|0,s[16]=c}for(u=0;17>u;u++)h[u]=s[u];for(p(s,Sr),a=0|-(s[16]>>>7),u=0;17>u;u++)s[u]^=a&(h[u]^s[u]);for(u=0;16>u;u++)l[u]=i[u+16];for(l[16]=0,p(s,l),u=0;16>u;u++)r[n+u]=s[u];return 0}function b(r,n,e,t,o,i){var a=new Uint8Array(16);return v(a,0,e,t,o,i),f(r,n,a,0)}function A(r,n,e,t,o){var i;if(32>e)return-1;for(g(r,0,n,0,e,t,o),v(r,16,r,32,e-32,r),i=0;16>i;i++)r[i]=0;return 0}function U(r,n,e,t,o){var i,a=new Uint8Array(32);if(32>e)return-1;if(h(a,0,32,t,o),0!==b(n,16,n,32,e-32,a))return-1;for(g(r,0,n,0,e,t,o),i=0;32>i;i++)r[i]=0;return 0}function _(r,n){var e;for(e=0;16>e;e++)r[e]=0|n[e]}function d(r){var n,e;for(e=0;16>e;e++)r[e]+=65536,n=Math.floor(r[e]/65536),r[(e+1)*(15>e?1:0)]+=n-1+37*(n-1)*(15===e?1:0),r[e]-=65536*n}function E(r,n,e){for(var t,o=~(e-1),i=0;16>i;i++)t=o&(r[i]^n[i]),r[i]^=t,n[i]^=t}function x(r,n){var e,t,o,i=hr(),a=hr();for(e=0;16>e;e++)a[e]=n[e];for(d(a),d(a),d(a),t=0;2>t;t++){for(i[0]=a[0]-65517,e=1;15>e;e++)i[e]=a[e]-65535-(i[e-1]>>16&1),i[e-1]&=65535;i[15]=a[15]-32767-(i[14]>>16&1),o=i[15]>>16&1,i[14]&=65535,E(a,i,1-o)}for(e=0;16>e;e++)r[2*e]=255&a[e],r[2*e+1]=a[e]>>8}function m(r,n){var e=new Uint8Array(32),t=new Uint8Array(32);return x(e,r),x(t,n),u(e,0,t,0)}function B(r){var n=new Uint8Array(32);return x(n,r),1&n[0]}function S(r,n){var e;for(e=0;16>e;e++)r[e]=n[2*e]+(n[2*e+1]<<8);r[15]&=32767}function K(r,n,e){var t;for(t=0;16>t;t++)r[t]=n[t]+e[t]|0}function T(r,n,e){var t;for(t=0;16>t;t++)r[t]=n[t]-e[t]|0}function Y(r,n,e){var t,o,i=new Float64Array(31);for(t=0;31>t;t++)i[t]=0;for(t=0;16>t;t++)for(o=0;16>o;o++)i[t+o]+=n[t]*e[o];for(t=0;15>t;t++)i[t]+=38*i[t+16];for(t=0;16>t;t++)r[t]=i[t];d(r),d(r)}function L(r,n){Y(r,n,n)}function C(r,n){var e,t=hr();for(e=0;16>e;e++)t[e]=n[e];for(e=253;e>=0;e--)L(t,t),2!==e&&4!==e&&Y(t,t,n);for(e=0;16>e;e++)r[e]=t[e]}function R(r,n){var e,t=hr();for(e=0;16>e;e++)t[e]=n[e];for(e=250;e>=0;e--)L(t,t),1!==e&&Y(t,t,n);for(e=0;16>e;e++)r[e]=t[e]}function k(r,n,e){var t,o,i=new Uint8Array(32),a=new Float64Array(80),f=hr(),u=hr(),c=hr(),w=hr(),y=hr(),s=hr();for(o=0;31>o;o++)i[o]=n[o];for(i[31]=127&n[31]|64,i[0]&=248,S(a,e),o=0;16>o;o++)u[o]=a[o],w[o]=f[o]=c[o]=0;for(f[0]=w[0]=1,o=254;o>=0;--o)t=i[o>>>3]>>>(7&o)&1,E(f,u,t),E(c,w,t),K(y,f,c),T(f,f,c),K(c,u,w),T(u,u,w),L(w,y),L(s,f),Y(f,c,f),Y(c,u,y),K(y,f,c),T(f,f,c),L(u,f),T(c,w,s),Y(f,c,Ur),K(f,f,w),Y(c,c,f),Y(f,w,s),Y(w,u,a),L(u,y),E(f,u,t),E(c,w,t);for(o=0;16>o;o++)a[o+16]=f[o],a[o+32]=c[o],a[o+48]=u[o],a[o+64]=w[o];var l=a.subarray(32),h=a.subarray(16);return C(l,l),Y(h,h,l),x(r,h),0}function z(r,n){return k(r,n,vr)}function P(r,n){return gr(n,32),z(r,n)}function O(r,n,e){var t=new Uint8Array(32);return k(t,e,n),y(r,pr,t,Br)}function F(r,n,e,t,o,i){var a=new Uint8Array(32);return O(a,o,i),Kr(r,n,e,t,a)}function N(r,n,e,t,o,i){var a=new Uint8Array(32);return O(a,o,i),Tr(r,n,e,t,a)}function M(){var r,n,e,t=0,o=0,i=0,a=0,f=65535;for(e=0;e>>16,i+=n&f,a+=n>>>16;return o+=t>>>16,i+=o>>>16,a+=i>>>16,new lr(i&f|a<<16,t&f|o<<16)}function j(r,n){return new lr(r.hi>>>n,r.lo>>>n|r.hi<<32-n)}function G(){var r,n=0,e=0;for(r=0;rn?(e=r.hi>>>n|r.lo<>>n|r.hi<n&&(e=r.lo>>>n|r.hi<>>n|r.lo<a;a++)u[a]=w[a]=t(r,8*a);for(var s=0;e>=128;){for(a=0;16>a;a++)y[a]=t(n,8*a+s);for(a=0;80>a;a++){for(f=0;8>f;f++)c[f]=w[f];for(o=M(w[7],X(w[4]),Z(w[4],w[5],w[6]),Yr[a],y[a%16]),c[7]=M(o,q(w[0]),V(w[0],w[1],w[2])),c[3]=M(c[3],o),f=0;8>f;f++)w[(f+1)%8]=c[f];if(a%16===15)for(f=0;16>f;f++)y[f]=M(y[f],y[(f+9)%16],D(y[(f+1)%16]),H(y[(f+14)%16]))}for(a=0;8>a;a++)w[a]=M(w[a],u[a]),u[a]=w[a];s+=128,e-=128}for(a=0;8>a;a++)i(r,8*a,u[a]);return e}function Q(r,n,e){var t,o=new Uint8Array(64),a=new Uint8Array(256),f=e;for(t=0;64>t;t++)o[t]=Lr[t];for(J(o,n,e),e%=128,t=0;256>t;t++)a[t]=0;for(t=0;e>t;t++)a[t]=n[f-e+t];for(a[e]=128,e=256-128*(112>e?1:0),a[e-9]=0,i(a,e-8,new lr(f/536870912|0,f<<3)),J(o,a,e),t=0;64>t;t++)r[t]=o[t];return 0}function W(r,n){var e=hr(),t=hr(),o=hr(),i=hr(),a=hr(),f=hr(),u=hr(),c=hr(),w=hr();T(e,r[1],r[0]),T(w,n[1],n[0]),Y(e,e,w),K(t,r[0],r[1]),K(w,n[0],n[1]),Y(t,t,w),Y(o,r[3],n[3]),Y(o,o,dr),Y(i,r[2],n[2]),K(i,i,i),T(a,t,e),T(f,i,o),K(u,i,o),K(c,t,e),Y(r[0],a,f),Y(r[1],c,u),Y(r[2],u,f),Y(r[3],a,c)}function $(r,n,e){var t;for(t=0;4>t;t++)E(r[t],n[t],e)}function rr(r,n){var e=hr(),t=hr(),o=hr();C(o,n[2]),Y(e,n[0],o),Y(t,n[1],o),x(r,t),r[31]^=B(e)<<7}function nr(r,n,e){var t,o;for(_(r[0],br),_(r[1],Ar),_(r[2],Ar),_(r[3],br),o=255;o>=0;--o)t=e[o/8|0]>>(7&o)&1,$(r,n,t),W(n,r),W(r,r),$(r,n,t)}function er(r,n){var e=[hr(),hr(),hr(),hr()];_(e[0],Er),_(e[1],xr),_(e[2],Ar),Y(e[3],Er,xr),nr(r,e,n)}function tr(r,n,e){var t,o=new Uint8Array(64),i=[hr(),hr(),hr(),hr()];for(e||gr(n,32),Q(o,n,32),o[0]&=248,o[31]&=127,o[31]|=64,er(i,o),rr(r,i),t=0;32>t;t++)n[t+32]=r[t];return 0}function or(r,n){var e,t,o,i;for(t=63;t>=32;--t){for(e=0,o=t-32,i=t-12;i>o;++o)n[o]+=e-16*n[t]*Cr[o-(t-32)],e=n[o]+128>>8,n[o]-=256*e;n[o]+=e,n[t]=0}for(e=0,o=0;32>o;o++)n[o]+=e-(n[31]>>4)*Cr[o],e=n[o]>>8,n[o]&=255;for(o=0;32>o;o++)n[o]-=e*Cr[o];for(t=0;32>t;t++)n[t+1]+=n[t]>>8,r[t]=255&n[t]}function ir(r){var n,e=new Float64Array(64);for(n=0;64>n;n++)e[n]=r[n];for(n=0;64>n;n++)r[n]=0;or(r,e)}function ar(r,n,e,t){var o,i,a=new Uint8Array(64),f=new Uint8Array(64),u=new Uint8Array(64),c=new Float64Array(64),w=[hr(),hr(),hr(),hr()];Q(a,t,32),a[0]&=248,a[31]&=127,a[31]|=64;var y=e+64;for(o=0;e>o;o++)r[64+o]=n[o];for(o=0;32>o;o++)r[32+o]=a[32+o];for(Q(u,r.subarray(32),e+32),ir(u),er(w,u),rr(r,w),o=32;64>o;o++)r[o]=t[o];for(Q(f,r,e+64),ir(f),o=0;64>o;o++)c[o]=0;for(o=0;32>o;o++)c[o]=u[o];for(o=0;32>o;o++)for(i=0;32>i;i++)c[o+i]+=f[o]*a[i];return or(r.subarray(32),c),y}function fr(r,n){var e=hr(),t=hr(),o=hr(),i=hr(),a=hr(),f=hr(),u=hr();return _(r[2],Ar),S(r[1],n),L(o,r[1]),Y(i,o,_r),T(o,o,r[2]),K(i,r[2],i),L(a,i),L(f,a),Y(u,f,a),Y(e,u,o),Y(e,e,i),R(e,e),Y(e,e,o),Y(e,e,i),Y(e,e,i),Y(r[0],e,i),L(t,r[0]),Y(t,t,i),m(t,o)&&Y(r[0],r[0],mr),L(t,r[0]),Y(t,t,i),m(t,o)?-1:(B(r[0])===n[31]>>7&&T(r[0],br,r[0]),Y(r[3],r[0],r[1]),0)}function ur(r,n,e,t){var o,i,a=new Uint8Array(32),f=new Uint8Array(64),c=[hr(),hr(),hr(),hr()],w=[hr(),hr(),hr(),hr()];if(i=-1,64>e)return-1;if(fr(w,t))return-1;for(o=0;e>o;o++)r[o]=n[o];for(o=0;32>o;o++)r[o+32]=t[o];if(Q(f,r,e),ir(f),nr(c,w,f),er(w,n.subarray(32)),W(c,w),rr(a,c),e-=64,u(n,0,a,0)){for(o=0;e>o;o++)r[o]=0;return-1}for(o=0;e>o;o++)r[o]=n[o+64];return i=e}function cr(r,n){if(r.length!==Rr)throw new Error("bad key size");if(n.length!==kr)throw new Error("bad nonce size")}function wr(r,n){if(r.length!==Nr)throw new Error("bad public key size");if(n.length!==Mr)throw new Error("bad secret key size")}function yr(){var r,n;for(n=0;nn;n++)e.push(String.fromCharCode(r[n]));return btoa(e.join(""))},r.util.decodeBase64=function(r){if("undefined"==typeof atob)return new Uint8Array(Array.prototype.slice.call(new Buffer(r,"base64"),0));var n,e=atob(r),t=new Uint8Array(e.length);for(n=0;nt)return null;for(var o=new Uint8Array(t),i=0;it;t++)o[t]=n[t];for(t=0;t=0},r.sign.keyPair=function(){var r=new Uint8Array(qr),n=new Uint8Array(Xr);return tr(r,n),{publicKey:r,secretKey:n}},r.sign.keyPair.fromSecretKey=function(r){if(yr(r),r.length!==Xr)throw new Error("bad secret key size");for(var n=new Uint8Array(qr),e=0;et;t++)e[t]=r[t];return tr(n,e,!0),{publicKey:n,secretKey:e}},r.sign.publicKeyLength=qr,r.sign.secretKeyLength=Xr,r.sign.seedLength=Dr,r.sign.signatureLength=Vr,r.hash=function(r){yr(r);var n=new Uint8Array(Hr);return Q(n,r,r.length),n},r.hash.hashLength=Hr,r.verify=function(r,n){return yr(r,n),0===r.length||0===n.length?!1:r.length!==n.length?!1:0===a(r,0,n,0,r.length)?!0:!1},r.setPRNG=function(r){gr=r},function(){var n;"undefined"!=typeof window?(window.crypto&&window.crypto.getRandomValues?n=window.crypto:window.msCrypto&&window.msCrypto.getRandomValues&&(n=window.msCrypto),n&&r.setPRNG(function(r,e){var t,o=new Uint8Array(e);for(n.getRandomValues(o),t=0;e>t;t++)r[t]=o[t];sr(o)})):"undefined"!=typeof require&&(n=require("crypto"),n&&r.setPRNG(function(r,e){var t,o=n.randomBytes(e);for(t=0;e>t;t++)r[t]=o[t];sr(o)}))}()}("undefined"!=typeof module&&module.exports?module.exports:window.nacl=window.nacl||{}); \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/tweetnacl/package.json b/node_modules/fsevents/node_modules/tweetnacl/package.json new file mode 100644 index 0000000..2e7b73b --- /dev/null +++ b/node_modules/fsevents/node_modules/tweetnacl/package.json @@ -0,0 +1,120 @@ +{ + "_args": [ + [ + "tweetnacl@~0.13.0", + "/Users/eshanker/Code/fsevents/node_modules/sshpk" + ] + ], + "_from": "tweetnacl@>=0.13.0 <0.14.0", + "_id": "tweetnacl@0.13.3", + "_inCache": true, + "_installable": true, + "_location": "/tweetnacl", + "_nodeVersion": "4.2.3", + "_npmUser": { + "email": "dmitry@codingrobots.com", + "name": "dchest" + }, + "_npmVersion": "2.14.7", + "_phantomChildren": {}, + "_requested": { + "name": "tweetnacl", + "raw": "tweetnacl@~0.13.0", + "rawSpec": "~0.13.0", + "scope": null, + "spec": ">=0.13.0 <0.14.0", + "type": "range" + }, + "_requiredBy": [ + "/sshpk" + ], + "_resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.13.3.tgz", + "_shasum": "d628b56f3bcc3d5ae74ba9d4c1a704def5ab4b56", + "_shrinkwrap": null, + "_spec": "tweetnacl@~0.13.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/sshpk", + "author": { + "name": "TweetNaCl-js contributors" + }, + "browser": { + "buffer": false, + "crypto": false + }, + "bugs": { + "url": "https://github.com/dchest/tweetnacl-js/issues" + }, + "dependencies": {}, + "description": "Port of TweetNaCl cryptographic library to JavaScript", + "devDependencies": { + "browserify": "^10.1.3", + "eslint": "^1.4.3", + "faucet": "0.0.1", + "tap-browser-color": "^0.1.2", + "tape": "^4.0.0", + "testling": "^1.7.1", + "uglify-js": "^2.4.21" + }, + "directories": { + "test": "test" + }, + "dist": { + "shasum": "d628b56f3bcc3d5ae74ba9d4c1a704def5ab4b56", + "tarball": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.13.3.tgz" + }, + "gitHead": "2bb422cb707fba4a5ec9654688564a4fb861b068", + "homepage": "https://dchest.github.io/tweetnacl-js", + "keywords": [ + "crypto", + "cryptography", + "curve25519", + "ed25519", + "encrypt", + "hash", + "key", + "nacl", + "poly1305", + "public", + "salsa20", + "signatures" + ], + "license": "Public domain", + "main": "nacl-fast.js", + "maintainers": [ + { + "name": "dchest", + "email": "dmitry@codingrobots.com" + } + ], + "name": "tweetnacl", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/dchest/tweetnacl-js.git" + }, + "scripts": { + "bench": "node test/benchmark/bench.js", + "browser": "browserify test/browser/init.js test/*.js | uglifyjs -c -m -o test/browser/_bundle.js 2>/dev/null", + "browser-quick": "browserify test/browser/init.js test/*.quick.js | uglifyjs -c -m -o test/browser/_bundle-quick.js 2>/dev/null", + "build": "uglifyjs nacl.js -c -m -o nacl.min.js && uglifyjs nacl-fast.js -c -m -o nacl-fast.min.js", + "chrome": "browserify test/browser/testling_init.js test/*.js | testling -x google-chrome | faucet", + "firefox": "browserify test/browser/testling_init.js test/*.js | testling -x firefox | faucet", + "lint": "eslint nacl.js nacl-fast.js test/*.js test/benchmark/*.js", + "test": "tape test/*.js | faucet", + "testall": "make -C test/c && tape test/*.js test/c/*.js | faucet", + "testling": "browserify test/browser/testling_init.js test/*.js | testling | faucet" + }, + "testling": { + "browsers": [ + "android-browser/latest", + "chrome/22..latest", + "firefox/16..latest", + "ipad/6..latest", + "iphone/6..latest", + "opera/11.0..latest", + "safari/latest" + ], + "files": "test/*.js" + }, + "version": "0.13.3" +} diff --git a/node_modules/fsevents/node_modules/uid-number/LICENSE b/node_modules/fsevents/node_modules/uid-number/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/fsevents/node_modules/uid-number/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/uid-number/README.md b/node_modules/fsevents/node_modules/uid-number/README.md new file mode 100644 index 0000000..8116675 --- /dev/null +++ b/node_modules/fsevents/node_modules/uid-number/README.md @@ -0,0 +1,17 @@ +Use this module to convert a username/groupname to a uid/gid number. + +Usage: + +``` +npm install uid-number +``` + +Then, in your node program: + +```javascript +var uidNumber = require("uid-number") +uidNumber("isaacs", function (er, uid, gid) { + // gid is null because we didn't ask for a group name + // uid === 24561 because that's my number. +}) +``` diff --git a/node_modules/fsevents/node_modules/uid-number/get-uid-gid.js b/node_modules/fsevents/node_modules/uid-number/get-uid-gid.js new file mode 100755 index 0000000..0b39174 --- /dev/null +++ b/node_modules/fsevents/node_modules/uid-number/get-uid-gid.js @@ -0,0 +1,24 @@ +if (module !== require.main) { + throw new Error("This file should not be loaded with require()") +} + +if (!process.getuid || !process.getgid) { + throw new Error("this file should not be called without uid/gid support") +} + +var argv = process.argv.slice(2) + , user = argv[0] || process.getuid() + , group = argv[1] || process.getgid() + +if (!isNaN(user)) user = +user +if (!isNaN(group)) group = +group + +console.error([user, group]) + +try { + process.setgid(group) + process.setuid(user) + console.log(JSON.stringify({uid:+process.getuid(), gid:+process.getgid()})) +} catch (ex) { + console.log(JSON.stringify({error:ex.message,errno:ex.errno})) +} diff --git a/node_modules/fsevents/node_modules/uid-number/package.json b/node_modules/fsevents/node_modules/uid-number/package.json new file mode 100644 index 0000000..0547581 --- /dev/null +++ b/node_modules/fsevents/node_modules/uid-number/package.json @@ -0,0 +1,74 @@ +{ + "_args": [ + [ + "uid-number@~0.0.6", + "/Users/eshanker/Code/fsevents/node_modules/tar-pack" + ] + ], + "_from": "uid-number@>=0.0.6 <0.1.0", + "_id": "uid-number@0.0.6", + "_inCache": true, + "_installable": true, + "_location": "/uid-number", + "_nodeVersion": "0.10.31", + "_npmUser": { + "email": "i@izs.me", + "name": "isaacs" + }, + "_npmVersion": "2.1.3", + "_phantomChildren": {}, + "_requested": { + "name": "uid-number", + "raw": "uid-number@~0.0.6", + "rawSpec": "~0.0.6", + "scope": null, + "spec": ">=0.0.6 <0.1.0", + "type": "range" + }, + "_requiredBy": [ + "/tar-pack" + ], + "_resolved": "https://registry.npmjs.org/uid-number/-/uid-number-0.0.6.tgz", + "_shasum": "0ea10e8035e8eb5b8e4449f06da1c730663baa81", + "_shrinkwrap": null, + "_spec": "uid-number@~0.0.6", + "_where": "/Users/eshanker/Code/fsevents/node_modules/tar-pack", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/uid-number/issues" + }, + "dependencies": {}, + "description": "Convert a username/group name to a uid/gid number", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "0ea10e8035e8eb5b8e4449f06da1c730663baa81", + "tarball": "https://registry.npmjs.org/uid-number/-/uid-number-0.0.6.tgz" + }, + "engines": { + "node": "*" + }, + "gitHead": "aab48f5d6bda85794946b26d945d2ee452e0e9ab", + "homepage": "https://github.com/isaacs/uid-number", + "license": "ISC", + "main": "uid-number.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "name": "uid-number", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/uid-number.git" + }, + "scripts": {}, + "version": "0.0.6" +} diff --git a/node_modules/fsevents/node_modules/uid-number/uid-number.js b/node_modules/fsevents/node_modules/uid-number/uid-number.js new file mode 100644 index 0000000..bd62184 --- /dev/null +++ b/node_modules/fsevents/node_modules/uid-number/uid-number.js @@ -0,0 +1,59 @@ +module.exports = uidNumber + +// This module calls into get-uid-gid.js, which sets the +// uid and gid to the supplied argument, in order to find out their +// numeric value. This can't be done in the main node process, +// because otherwise node would be running as that user from this +// point on. + +var child_process = require("child_process") + , path = require("path") + , uidSupport = process.getuid && process.setuid + , uidCache = {} + , gidCache = {} + +function uidNumber (uid, gid, cb) { + if (!uidSupport) return cb() + if (typeof cb !== "function") cb = gid, gid = null + if (typeof cb !== "function") cb = uid, uid = null + if (gid == null) gid = process.getgid() + if (uid == null) uid = process.getuid() + if (!isNaN(gid)) gid = gidCache[gid] = +gid + if (!isNaN(uid)) uid = uidCache[uid] = +uid + + if (uidCache.hasOwnProperty(uid)) uid = uidCache[uid] + if (gidCache.hasOwnProperty(gid)) gid = gidCache[gid] + + if (typeof gid === "number" && typeof uid === "number") { + return process.nextTick(cb.bind(null, null, uid, gid)) + } + + var getter = require.resolve("./get-uid-gid.js") + + child_process.execFile( process.execPath + , [getter, uid, gid] + , function (code, out, stderr) { + if (code) { + var er = new Error("could not get uid/gid\n" + stderr) + er.code = code + return cb(er) + } + + try { + out = JSON.parse(out+"") + } catch (ex) { + return cb(ex) + } + + if (out.error) { + var er = new Error(out.error) + er.errno = out.errno + return cb(er) + } + + if (isNaN(out.uid) || isNaN(out.gid)) return cb(new Error( + "Could not get uid/gid: "+JSON.stringify(out))) + + cb(null, uidCache[uid] = +out.uid, gidCache[gid] = +out.gid) + }) +} diff --git a/node_modules/fsevents/node_modules/util-deprecate/History.md b/node_modules/fsevents/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/fsevents/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/fsevents/node_modules/util-deprecate/LICENSE b/node_modules/fsevents/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/fsevents/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/util-deprecate/README.md b/node_modules/fsevents/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/fsevents/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/util-deprecate/browser.js b/node_modules/fsevents/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/fsevents/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/fsevents/node_modules/util-deprecate/node.js b/node_modules/fsevents/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/fsevents/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/fsevents/node_modules/util-deprecate/package.json b/node_modules/fsevents/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..67c5e56 --- /dev/null +++ b/node_modules/fsevents/node_modules/util-deprecate/package.json @@ -0,0 +1,82 @@ +{ + "_args": [ + [ + "util-deprecate@~1.0.1", + "/Users/eshanker/Code/fsevents/node_modules/readable-stream" + ] + ], + "_from": "util-deprecate@>=1.0.1 <1.1.0", + "_id": "util-deprecate@1.0.2", + "_inCache": true, + "_installable": true, + "_location": "/util-deprecate", + "_nodeVersion": "4.1.2", + "_npmUser": { + "email": "nathan@tootallnate.net", + "name": "tootallnate" + }, + "_npmVersion": "2.14.4", + "_phantomChildren": {}, + "_requested": { + "name": "util-deprecate", + "raw": "util-deprecate@~1.0.1", + "rawSpec": "~1.0.1", + "scope": null, + "spec": ">=1.0.1 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/bl/readable-stream", + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "_shrinkwrap": null, + "_spec": "util-deprecate@~1.0.1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/readable-stream", + "author": { + "email": "nathan@tootallnate.net", + "name": "Nathan Rajlich", + "url": "http://n8.io/" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "dependencies": {}, + "description": "The Node.js `util.deprecate()` function with browser support", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "tarball": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + }, + "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4", + "homepage": "https://github.com/TooTallNate/util-deprecate", + "keywords": [ + "browser", + "browserify", + "deprecate", + "node", + "util" + ], + "license": "MIT", + "main": "node.js", + "maintainers": [ + { + "name": "tootallnate", + "email": "nathan@tootallnate.net" + } + ], + "name": "util-deprecate", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.0.2" +} diff --git a/node_modules/fsevents/node_modules/verror/.gitmodules b/node_modules/fsevents/node_modules/verror/.gitmodules new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/fsevents/node_modules/verror/.npmignore b/node_modules/fsevents/node_modules/verror/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/fsevents/node_modules/verror/LICENSE b/node_modules/fsevents/node_modules/verror/LICENSE new file mode 100644 index 0000000..cbc0bb3 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012, Joyent, Inc. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/fsevents/node_modules/verror/Makefile b/node_modules/fsevents/node_modules/verror/Makefile new file mode 100644 index 0000000..00faa97 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/Makefile @@ -0,0 +1,35 @@ +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile: top-level Makefile +# +# This Makefile contains only repo-specific logic and uses included makefiles +# to supply common targets (javascriptlint, jsstyle, restdown, etc.), which are +# used by other repos as well. +# + +# +# Tools +# +NPM = npm + +# +# Files +# +JS_FILES := $(shell find lib examples tests -name '*.js') +JSL_FILES_NODE = $(JS_FILES) +JSSTYLE_FILES = $(JS_FILES) +JSL_CONF_NODE = jsl.node.conf + +.PHONY: all +all: + $(NPM) install + +.PHONY: test +test: + node tests/tst.inherit.js + node tests/tst.verror.js + node tests/tst.werror.js + @echo all tests passed + +include ./Makefile.targ diff --git a/node_modules/fsevents/node_modules/verror/Makefile.targ b/node_modules/fsevents/node_modules/verror/Makefile.targ new file mode 100644 index 0000000..2a64fe7 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/Makefile.targ @@ -0,0 +1,285 @@ +# -*- mode: makefile -*- +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile.targ: common targets. +# +# NOTE: This makefile comes from the "eng" repo. It's designed to be dropped +# into other repos as-is without requiring any modifications. If you find +# yourself changing this file, you should instead update the original copy in +# eng.git and then update your repo to use the new version. +# +# This Makefile defines several useful targets and rules. You can use it by +# including it from a Makefile that specifies some of the variables below. +# +# Targets defined in this Makefile: +# +# check Checks JavaScript files for lint and style +# Checks bash scripts for syntax +# Checks SMF manifests for validity against the SMF DTD +# +# clean Removes built files +# +# docs Builds restdown documentation in docs/ +# +# prepush Depends on "check" and "test" +# +# test Does nothing (you should override this) +# +# xref Generates cscope (source cross-reference index) +# +# For details on what these targets are supposed to do, see the Joyent +# Engineering Guide. +# +# To make use of these targets, you'll need to set some of these variables. Any +# variables left unset will simply not be used. +# +# BASH_FILES Bash scripts to check for syntax +# (paths relative to top-level Makefile) +# +# CLEAN_FILES Files to remove as part of the "clean" target. Note +# that files generated by targets in this Makefile are +# automatically included in CLEAN_FILES. These include +# restdown-generated HTML and JSON files. +# +# DOC_FILES Restdown (documentation source) files. These are +# assumed to be contained in "docs/", and must NOT +# contain the "docs/" prefix. +# +# JSL_CONF_NODE Specify JavaScriptLint configuration files +# JSL_CONF_WEB (paths relative to top-level Makefile) +# +# Node.js and Web configuration files are separate +# because you'll usually want different global variable +# configurations. If no file is specified, none is given +# to jsl, which causes it to use a default configuration, +# which probably isn't what you want. +# +# JSL_FILES_NODE JavaScript files to check with Node config file. +# JSL_FILES_WEB JavaScript files to check with Web config file. +# +# You can also override these variables: +# +# BASH Path to bash (default: bash) +# +# CSCOPE_DIRS Directories to search for source files for the cscope +# index. (default: ".") +# +# JSL Path to JavaScriptLint (default: "jsl") +# +# JSL_FLAGS_NODE Additional flags to pass through to JSL +# JSL_FLAGS_WEB +# JSL_FLAGS +# +# JSSTYLE Path to jsstyle (default: jsstyle) +# +# JSSTYLE_FLAGS Additional flags to pass through to jsstyle +# + +# +# Defaults for the various tools we use. +# +BASH ?= bash +BASHSTYLE ?= tools/bashstyle +CP ?= cp +CSCOPE ?= cscope +CSCOPE_DIRS ?= . +JSL ?= jsl +JSSTYLE ?= jsstyle +MKDIR ?= mkdir -p +MV ?= mv +RESTDOWN_FLAGS ?= +RMTREE ?= rm -rf +JSL_FLAGS ?= --nologo --nosummary + +ifeq ($(shell uname -s),SunOS) + TAR ?= gtar +else + TAR ?= tar +endif + + +# +# Defaults for other fixed values. +# +BUILD = build +DISTCLEAN_FILES += $(BUILD) +DOC_BUILD = $(BUILD)/docs/public + +# +# Configure JSL_FLAGS_{NODE,WEB} based on JSL_CONF_{NODE,WEB}. +# +ifneq ($(origin JSL_CONF_NODE), undefined) + JSL_FLAGS_NODE += --conf=$(JSL_CONF_NODE) +endif + +ifneq ($(origin JSL_CONF_WEB), undefined) + JSL_FLAGS_WEB += --conf=$(JSL_CONF_WEB) +endif + +# +# Targets. For descriptions on what these are supposed to do, see the +# Joyent Engineering Guide. +# + +# +# Instruct make to keep around temporary files. We have rules below that +# automatically update git submodules as needed, but they employ a deps/*/.git +# temporary file. Without this directive, make tries to remove these .git +# directories after the build has completed. +# +.SECONDARY: $($(wildcard deps/*):%=%/.git) + +# +# This rule enables other rules that use files from a git submodule to have +# those files depend on deps/module/.git and have "make" automatically check +# out the submodule as needed. +# +deps/%/.git: + git submodule update --init deps/$* + +# +# These recipes make heavy use of dynamically-created phony targets. The parent +# Makefile defines a list of input files like BASH_FILES. We then say that each +# of these files depends on a fake target called filename.bashchk, and then we +# define a pattern rule for those targets that runs bash in check-syntax-only +# mode. This mechanism has the nice properties that if you specify zero files, +# the rule becomes a noop (unlike a single rule to check all bash files, which +# would invoke bash with zero files), and you can check individual files from +# the command line with "make filename.bashchk". +# +.PHONY: check-bash +check-bash: $(BASH_FILES:%=%.bashchk) $(BASH_FILES:%=%.bashstyle) + +%.bashchk: % + $(BASH) -n $^ + +%.bashstyle: % + $(BASHSTYLE) $^ + +.PHONY: check-jsl check-jsl-node check-jsl-web +check-jsl: check-jsl-node check-jsl-web + +check-jsl-node: $(JSL_FILES_NODE:%=%.jslnodechk) + +check-jsl-web: $(JSL_FILES_WEB:%=%.jslwebchk) + +%.jslnodechk: % $(JSL_EXEC) + $(JSL) $(JSL_FLAGS) $(JSL_FLAGS_NODE) $< + +%.jslwebchk: % $(JSL_EXEC) + $(JSL) $(JSL_FLAGS) $(JSL_FLAGS_WEB) $< + +.PHONY: check-jsstyle +check-jsstyle: $(JSSTYLE_FILES:%=%.jsstylechk) + +%.jsstylechk: % $(JSSTYLE_EXEC) + $(JSSTYLE) $(JSSTYLE_FLAGS) $< + +.PHONY: check +check: check-jsl check-jsstyle check-bash + @echo check ok + +.PHONY: clean +clean:: + -$(RMTREE) $(CLEAN_FILES) + +.PHONY: distclean +distclean:: clean + -$(RMTREE) $(DISTCLEAN_FILES) + +CSCOPE_FILES = cscope.in.out cscope.out cscope.po.out +CLEAN_FILES += $(CSCOPE_FILES) + +.PHONY: xref +xref: cscope.files + $(CSCOPE) -bqR + +.PHONY: cscope.files +cscope.files: + find $(CSCOPE_DIRS) -name '*.c' -o -name '*.h' -o -name '*.cc' \ + -o -name '*.js' -o -name '*.s' -o -name '*.cpp' > $@ + +# +# The "docs" target is complicated because we do several things here: +# +# (1) Use restdown to build HTML and JSON files from each of DOC_FILES. +# +# (2) Copy these files into $(DOC_BUILD) (build/docs/public), which +# functions as a complete copy of the documentation that could be +# mirrored or served over HTTP. +# +# (3) Then copy any directories and media from docs/media into +# $(DOC_BUILD)/media. This allows projects to include their own media, +# including files that will override same-named files provided by +# restdown. +# +# Step (3) is the surprisingly complex part: in order to do this, we need to +# identify the subdirectories in docs/media, recreate them in +# $(DOC_BUILD)/media, then do the same with the files. +# +DOC_MEDIA_DIRS := $(shell find docs/media -type d 2>/dev/null | grep -v "^docs/media$$") +DOC_MEDIA_DIRS := $(DOC_MEDIA_DIRS:docs/media/%=%) +DOC_MEDIA_DIRS_BUILD := $(DOC_MEDIA_DIRS:%=$(DOC_BUILD)/media/%) + +DOC_MEDIA_FILES := $(shell find docs/media -type f 2>/dev/null) +DOC_MEDIA_FILES := $(DOC_MEDIA_FILES:docs/media/%=%) +DOC_MEDIA_FILES_BUILD := $(DOC_MEDIA_FILES:%=$(DOC_BUILD)/media/%) + +# +# Like the other targets, "docs" just depends on the final files we want to +# create in $(DOC_BUILD), leveraging other targets and recipes to define how +# to get there. +# +.PHONY: docs +docs: \ + $(DOC_FILES:%.restdown=$(DOC_BUILD)/%.html) \ + $(DOC_FILES:%.restdown=$(DOC_BUILD)/%.json) \ + $(DOC_MEDIA_FILES_BUILD) + +# +# We keep the intermediate files so that the next build can see whether the +# files in DOC_BUILD are up to date. +# +.PRECIOUS: \ + $(DOC_FILES:%.restdown=docs/%.html) \ + $(DOC_FILES:%.restdown=docs/%json) + +# +# We do clean those intermediate files, as well as all of DOC_BUILD. +# +CLEAN_FILES += \ + $(DOC_BUILD) \ + $(DOC_FILES:%.restdown=docs/%.html) \ + $(DOC_FILES:%.restdown=docs/%.json) + +# +# Before installing the files, we must make sure the directories exist. The | +# syntax tells make that the dependency need only exist, not be up to date. +# Otherwise, it might try to rebuild spuriously because the directory itself +# appears out of date. +# +$(DOC_MEDIA_FILES_BUILD): | $(DOC_MEDIA_DIRS_BUILD) + +$(DOC_BUILD)/%: docs/% | $(DOC_BUILD) + $(CP) $< $@ + +docs/%.json docs/%.html: docs/%.restdown | $(DOC_BUILD) $(RESTDOWN_EXEC) + $(RESTDOWN) $(RESTDOWN_FLAGS) -m $(DOC_BUILD) $< + +$(DOC_BUILD): + $(MKDIR) $@ + +$(DOC_MEDIA_DIRS_BUILD): + $(MKDIR) $@ + +# +# The default "test" target does nothing. This should usually be overridden by +# the parent Makefile. It's included here so we can define "prepush" without +# requiring the repo to define "test". +# +.PHONY: test +test: + +.PHONY: prepush +prepush: check test diff --git a/node_modules/fsevents/node_modules/verror/README.md b/node_modules/fsevents/node_modules/verror/README.md new file mode 100644 index 0000000..e9b5497 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/README.md @@ -0,0 +1,120 @@ +# verror: richer JavaScript errors + +This module provides two classes: VError, for accretive errors, and WError, for +wrapping errors. Both support printf-style error messages using extsprintf. + +## Printf-style errors + +At the most basic level, VError is just like JavaScript's Error class, but with +printf-style arguments: + + var verror = require('verror'); + + var opname = 'read'; + var err = new verror.VError('"%s" operation failed', opname); + console.log(err.message); + console.log(err.stack); + +This prints: + + "read" operation failed + "read" operation failed + at Object. (/Users/dap/node-verror/examples/varargs.js:4:11) + at Module._compile (module.js:449:26) + at Object.Module._extensions..js (module.js:467:10) + at Module.load (module.js:356:32) + at Function.Module._load (module.js:312:12) + at Module.runMain (module.js:492:10) + at process.startup.processNextTick.process._tickCallback (node.js:244:9) + + +## VError for accretive error messages + +More interestingly, you can use VError to build up an error describing what +happened at various levels in the stack. For example, suppose you have a +request handler that stats a file and fails if it doesn't exist: + + var fs = require('fs'); + var verror = require('verror'); + + function checkFile(filename, callback) { + fs.stat(filename, function (err) { + if (err) + /* Annotate the "stat" error with what we were doing. */ + return (callback(new verror.VError(err, + 'failed to check "%s"', filename))); + + /* ... */ + }); + } + + function handleRequest(filename, callback) { + checkFile('/nonexistent', function (err) { + if (err) { + /* Annotate the "checkFile" error with what we were doing. */ + return (callback(new verror.VError(err, 'request failed'))); + } + + /* ... */ + }); + } + + handleRequest('/nonexistent', function (err) { + if (err) + console.log(err.message); + /* ... */ + }); + +Since the file "/nonexistent" doesn't exist, this prints out: + + request failed: failed to check "/nonexistent": ENOENT, stat '/nonexistent' + +The idea here is that the lowest level (Node's "fs.stat" function) generates an +arbitrary error, and each higher level (request handler and stat callback) +creates a new VError that annotates the previous error with what it was doing, +so that the result is a clear message explaining what failed at each level. + +This plays nicely with extsprintf's "%r" specifier, which prints out a +Java-style stacktrace with the whole chain of exceptions: + + EXCEPTION: VError: request failed: failed to check "/nonexistent": ENOENT, stat '/nonexistent' + at /Users/dap/work/node-verror/examples/levels.js:21:21 + at /Users/dap/work/node-verror/examples/levels.js:9:12 + at Object.oncomplete (fs.js:297:15) + Caused by: EXCEPTION: VError: failed to check "/nonexistent": ENOENT, stat '/nonexistent' + at /Users/dap/work/node-verror/examples/levels.js:9:21 + at Object.oncomplete (fs.js:297:15) + Caused by: EXCEPTION: Error: Error: ENOENT, stat '/nonexistent' + + +## WError for wrapped errors + +Sometimes you don't want an Error's "message" field to include the details of +all of the low-level errors, but you still want to be able to get at them +programmatically. For example, in an HTTP server, you probably don't want to +spew all of the low-level errors back to the client, but you do want to include +them in the audit log entry for the request. In that case, you can use a +WError, which is created exactly like VError (and also supports both +printf-style arguments and an optional cause), but the resulting "message" only +contains the top-level error. It's also more verbose, including the class +associated with each error in the cause chain. Using the same example above, +but replacing the VError in handleRequest with WError, we get this output: + + request failed + +That's what we wanted -- just a high-level summary for the client. But we can +get the object's toString() for the full details: + + WError: request failed; caused by WError: failed to check "/nonexistent"; + caused by Error: ENOENT, stat '/nonexistent' + +# Contributing + +Contributions welcome. Code should be "make check" clean. To run "make check", +you'll need these tools: + +* https://github.com/davepacheco/jsstyle +* https://github.com/davepacheco/javascriptlint + +If you're changing something non-trivial or user-facing, you may want to submit +an issue first. diff --git a/node_modules/fsevents/node_modules/verror/examples/levels-verror.js b/node_modules/fsevents/node_modules/verror/examples/levels-verror.js new file mode 100644 index 0000000..53a7022 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/examples/levels-verror.js @@ -0,0 +1,36 @@ +var extsprintf = require('extsprintf'); +var fs = require('fs'); +var verror = require('../lib/verror'); + +function checkFile(filename, callback) { + fs.stat(filename, function (err) { + if (err) + /* Annotate the "stat" error with what we were doing. */ + return (callback(new verror.VError(err, + 'failed to check "%s"', filename))); + + /* ... */ + return (callback()); + }); +} + +function handleRequest(filename, callback) { + checkFile('/nonexistent', function (err) { + if (err) + /* Annotate the "checkFile" error. */ + return (callback(new verror.VError( + err, 'request failed'))); + + /* ... */ + return (callback()); + }); +} + +handleRequest('/nonexistent', function (err) { + if (err) { + console.log(err.message); + console.log(extsprintf.sprintf('%r', err)); + } + + /* ... */ +}); diff --git a/node_modules/fsevents/node_modules/verror/examples/levels-werror.js b/node_modules/fsevents/node_modules/verror/examples/levels-werror.js new file mode 100644 index 0000000..7e57075 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/examples/levels-werror.js @@ -0,0 +1,34 @@ +var extsprintf = require('extsprintf'); +var fs = require('fs'); +var verror = require('../lib/verror'); + +function checkFile(filename, callback) { + fs.stat(filename, function (err) { + if (err) + /* Annotate the "stat" error with what we were doing. */ + return (callback(new verror.VError(err, + 'failed to check "%s"', filename))); + + /* ... */ + return (callback()); + }); +} + +function handleRequest(filename, callback) { + checkFile('/nonexistent', function (err) { + if (err) + /* Wrap the "checkFile" error. */ + return (callback(new verror.WError( + err, 'request failed'))); + + /* ... */ + return (callback()); + }); +} + +handleRequest('/nonexistent', function (err) { + if (err) { + console.log(err.message); + console.log(err.toString()); + } +}); diff --git a/node_modules/fsevents/node_modules/verror/examples/varargs.js b/node_modules/fsevents/node_modules/verror/examples/varargs.js new file mode 100644 index 0000000..2e14ee4 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/examples/varargs.js @@ -0,0 +1,6 @@ +var verror = require('../lib/verror'); + +var opname = 'read'; +var err = new verror.VError('"%s" operation failed', opname); +console.log(err.message); +console.log(err.stack); diff --git a/node_modules/fsevents/node_modules/verror/examples/verror.js b/node_modules/fsevents/node_modules/verror/examples/verror.js new file mode 100644 index 0000000..887b181 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/examples/verror.js @@ -0,0 +1,13 @@ +var mod_fs = require('fs'); +var mod_verror = require('../lib/verror'); + +var filename = '/nonexistent'; + +mod_fs.stat(filename, function (err1) { + var err2 = new mod_verror.VError(err1, 'failed to stat "%s"', filename); + + /* The following would normally be higher up the stack. */ + var err3 = new mod_verror.VError(err2, 'failed to handle request'); + console.log(err3.message); + console.log(err3.stack); +}); diff --git a/node_modules/fsevents/node_modules/verror/examples/werror.js b/node_modules/fsevents/node_modules/verror/examples/werror.js new file mode 100644 index 0000000..f55e532 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/examples/werror.js @@ -0,0 +1,14 @@ +var mod_fs = require('fs'); +var mod_verror = require('../lib/verror'); + +var filename = '/nonexistent'; + +mod_fs.stat(filename, function (err1) { + var err2 = new mod_verror.WError(err1, 'failed to stat "%s"', filename); + + /* The following would normally be higher up the stack. */ + var err3 = new mod_verror.WError(err2, 'failed to handle request'); + console.log(err3.message); + console.log(err3.toString()); + console.log(err3.stack); +}); diff --git a/node_modules/fsevents/node_modules/verror/jsl.node.conf b/node_modules/fsevents/node_modules/verror/jsl.node.conf new file mode 100644 index 0000000..bd724a2 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/jsl.node.conf @@ -0,0 +1,139 @@ +# +# Configuration File for JavaScript Lint +# +# This configuration file can be used to lint a collection of scripts, or to enable +# or disable warnings for scripts that are linted via the command line. +# + +### Warnings +# Enable or disable warnings based on requirements. +# Use "+WarningName" to display or "-WarningName" to suppress. +# ++ambiguous_else_stmt # the else statement could be matched with one of multiple if statements (use curly braces to indicate intent ++ambiguous_nested_stmt # block statements containing block statements should use curly braces to resolve ambiguity ++ambiguous_newline # unexpected end of line; it is ambiguous whether these lines are part of the same statement ++anon_no_return_value # anonymous function does not always return value ++assign_to_function_call # assignment to a function call +-block_without_braces # block statement without curly braces ++comma_separated_stmts # multiple statements separated by commas (use semicolons?) ++comparison_type_conv # comparisons against null, 0, true, false, or an empty string allowing implicit type conversion (use === or !==) ++default_not_at_end # the default case is not at the end of the switch statement ++dup_option_explicit # duplicate "option explicit" control comment ++duplicate_case_in_switch # duplicate case in switch statement ++duplicate_formal # duplicate formal argument {name} ++empty_statement # empty statement or extra semicolon ++identifier_hides_another # identifer {name} hides an identifier in a parent scope +-inc_dec_within_stmt # increment (++) and decrement (--) operators used as part of greater statement ++incorrect_version # Expected /*jsl:content-type*/ control comment. The script was parsed with the wrong version. ++invalid_fallthru # unexpected "fallthru" control comment ++invalid_pass # unexpected "pass" control comment ++jsl_cc_not_understood # couldn't understand control comment using /*jsl:keyword*/ syntax ++leading_decimal_point # leading decimal point may indicate a number or an object member ++legacy_cc_not_understood # couldn't understand control comment using /*@keyword@*/ syntax ++meaningless_block # meaningless block; curly braces have no impact ++mismatch_ctrl_comments # mismatched control comment; "ignore" and "end" control comments must have a one-to-one correspondence ++misplaced_regex # regular expressions should be preceded by a left parenthesis, assignment, colon, or comma ++missing_break # missing break statement ++missing_break_for_last_case # missing break statement for last case in switch ++missing_default_case # missing default case in switch statement ++missing_option_explicit # the "option explicit" control comment is missing ++missing_semicolon # missing semicolon ++missing_semicolon_for_lambda # missing semicolon for lambda assignment ++multiple_plus_minus # unknown order of operations for successive plus (e.g. x+++y) or minus (e.g. x---y) signs ++nested_comment # nested comment ++no_return_value # function {name} does not always return a value ++octal_number # leading zeros make an octal number ++parseint_missing_radix # parseInt missing radix parameter ++partial_option_explicit # the "option explicit" control comment, if used, must be in the first script tag ++redeclared_var # redeclaration of {name} ++trailing_comma_in_array # extra comma is not recommended in array initializers ++trailing_decimal_point # trailing decimal point may indicate a number or an object member ++undeclared_identifier # undeclared identifier: {name} ++unreachable_code # unreachable code +-unreferenced_argument # argument declared but never referenced: {name} +-unreferenced_function # function is declared but never referenced: {name} ++unreferenced_variable # variable is declared but never referenced: {name} ++unsupported_version # JavaScript {version} is not supported ++use_of_label # use of label ++useless_assign # useless assignment ++useless_comparison # useless comparison; comparing identical expressions +-useless_quotes # the quotation marks are unnecessary ++useless_void # use of the void type may be unnecessary (void is always undefined) ++var_hides_arg # variable {name} hides argument ++want_assign_or_call # expected an assignment or function call ++with_statement # with statement hides undeclared variables; use temporary variable instead + + +### Output format +# Customize the format of the error message. +# __FILE__ indicates current file path +# __FILENAME__ indicates current file name +# __LINE__ indicates current line +# __COL__ indicates current column +# __ERROR__ indicates error message (__ERROR_PREFIX__: __ERROR_MSG__) +# __ERROR_NAME__ indicates error name (used in configuration file) +# __ERROR_PREFIX__ indicates error prefix +# __ERROR_MSG__ indicates error message +# +# For machine-friendly output, the output format can be prefixed with +# "encode:". If specified, all items will be encoded with C-slashes. +# +# Visual Studio syntax (default): ++output-format __FILE__(__LINE__): __ERROR__ +# Alternative syntax: +#+output-format __FILE__:__LINE__: __ERROR__ + + +### Context +# Show the in-line position of the error. +# Use "+context" to display or "-context" to suppress. +# ++context + + +### Control Comments +# Both JavaScript Lint and the JScript interpreter confuse each other with the syntax for +# the /*@keyword@*/ control comments and JScript conditional comments. (The latter is +# enabled in JScript with @cc_on@). The /*jsl:keyword*/ syntax is preferred for this reason, +# although legacy control comments are enabled by default for backward compatibility. +# +-legacy_control_comments + + +### Defining identifiers +# By default, "option explicit" is enabled on a per-file basis. +# To enable this for all files, use "+always_use_option_explicit" +-always_use_option_explicit + +# Define certain identifiers of which the lint is not aware. +# (Use this in conjunction with the "undeclared identifier" warning.) +# +# Common uses for webpages might be: ++define __dirname ++define clearInterval ++define clearTimeout ++define console ++define exports ++define global ++define process ++define require ++define setInterval ++define setTimeout ++define Buffer ++define JSON ++define Math ++define __dirname ++define __filename + +### JavaScript Version +# To change the default JavaScript version: +#+default-type text/javascript;version=1.5 +#+default-type text/javascript;e4x=1 + +### Files +# Specify which files to lint +# Use "+recurse" to enable recursion (disabled by default). +# To add a set of files, use "+process FileName", "+process Folder\Path\*.js", +# or "+process Folder\Path\*.htm". +# + diff --git a/node_modules/fsevents/node_modules/verror/lib/verror.js b/node_modules/fsevents/node_modules/verror/lib/verror.js new file mode 100644 index 0000000..9ca087b --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/lib/verror.js @@ -0,0 +1,157 @@ +/* + * verror.js: richer JavaScript errors + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +var mod_extsprintf = require('extsprintf'); + +/* + * Public interface + */ +exports.VError = VError; +exports.WError = WError; +exports.MultiError = MultiError; + +/* + * Like JavaScript's built-in Error class, but supports a "cause" argument and a + * printf-style message. The cause argument can be null. + */ +function VError(options) +{ + var args, causedBy, ctor, tailmsg; + + if (options instanceof Error || typeof (options) === 'object') { + args = Array.prototype.slice.call(arguments, 1); + } else { + args = Array.prototype.slice.call(arguments, 0); + options = undefined; + } + + tailmsg = args.length > 0 ? + mod_extsprintf.sprintf.apply(null, args) : ''; + this.jse_shortmsg = tailmsg; + this.jse_summary = tailmsg; + + if (options) { + causedBy = options.cause; + + if (!causedBy || !(options.cause instanceof Error)) + causedBy = options; + + if (causedBy && (causedBy instanceof Error)) { + this.jse_cause = causedBy; + this.jse_summary += ': ' + causedBy.message; + } + } + + this.message = this.jse_summary; + Error.call(this, this.jse_summary); + + if (Error.captureStackTrace) { + ctor = options ? options.constructorOpt : undefined; + ctor = ctor || arguments.callee; + Error.captureStackTrace(this, ctor); + } +} + +mod_util.inherits(VError, Error); +VError.prototype.name = 'VError'; + +VError.prototype.toString = function ve_toString() +{ + var str = (this.hasOwnProperty('name') && this.name || + this.constructor.name || this.constructor.prototype.name); + if (this.message) + str += ': ' + this.message; + + return (str); +}; + +VError.prototype.cause = function ve_cause() +{ + return (this.jse_cause); +}; + + +/* + * Represents a collection of errors for the purpose of consumers that generally + * only deal with one error. Callers can extract the individual errors + * contained in this object, but may also just treat it as a normal single + * error, in which case a summary message will be printed. + */ +function MultiError(errors) +{ + mod_assert.ok(errors.length > 0); + this.ase_errors = errors; + + VError.call(this, errors[0], 'first of %d error%s', + errors.length, errors.length == 1 ? '' : 's'); +} + +mod_util.inherits(MultiError, VError); + + + +/* + * Like JavaScript's built-in Error class, but supports a "cause" argument which + * is wrapped, not "folded in" as with VError. Accepts a printf-style message. + * The cause argument can be null. + */ +function WError(options) +{ + Error.call(this); + + var args, cause, ctor; + if (typeof (options) === 'object') { + args = Array.prototype.slice.call(arguments, 1); + } else { + args = Array.prototype.slice.call(arguments, 0); + options = undefined; + } + + if (args.length > 0) { + this.message = mod_extsprintf.sprintf.apply(null, args); + } else { + this.message = ''; + } + + if (options) { + if (options instanceof Error) { + cause = options; + } else { + cause = options.cause; + ctor = options.constructorOpt; + } + } + + Error.captureStackTrace(this, ctor || this.constructor); + if (cause) + this.cause(cause); + +} + +mod_util.inherits(WError, Error); +WError.prototype.name = 'WError'; + + +WError.prototype.toString = function we_toString() +{ + var str = (this.hasOwnProperty('name') && this.name || + this.constructor.name || this.constructor.prototype.name); + if (this.message) + str += ': ' + this.message; + if (this.we_cause && this.we_cause.message) + str += '; caused by ' + this.we_cause.toString(); + + return (str); +}; + +WError.prototype.cause = function we_cause(c) +{ + if (c instanceof Error) + this.we_cause = c; + + return (this.we_cause); +}; diff --git a/node_modules/fsevents/node_modules/verror/package.json b/node_modules/fsevents/node_modules/verror/package.json new file mode 100644 index 0000000..bb6cd90 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/package.json @@ -0,0 +1,70 @@ +{ + "_args": [ + [ + "verror@1.3.6", + "/Users/eshanker/Code/fsevents/node_modules/jsprim" + ] + ], + "_from": "verror@1.3.6", + "_id": "verror@1.3.6", + "_inCache": true, + "_installable": true, + "_location": "/verror", + "_npmUser": { + "email": "dap@cs.brown.edu", + "name": "dap" + }, + "_npmVersion": "1.1.65", + "_phantomChildren": {}, + "_requested": { + "name": "verror", + "raw": "verror@1.3.6", + "rawSpec": "1.3.6", + "scope": null, + "spec": "1.3.6", + "type": "version" + }, + "_requiredBy": [ + "/jsprim" + ], + "_resolved": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz", + "_shasum": "cff5df12946d297d2baaefaa2689e25be01c005c", + "_shrinkwrap": null, + "_spec": "verror@1.3.6", + "_where": "/Users/eshanker/Code/fsevents/node_modules/jsprim", + "bugs": { + "url": "https://github.com/davepacheco/node-verror/issues" + }, + "dependencies": { + "extsprintf": "1.0.2" + }, + "description": "richer JavaScript errors", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "cff5df12946d297d2baaefaa2689e25be01c005c", + "tarball": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz" + }, + "engines": [ + "node >=0.6.0" + ], + "homepage": "https://github.com/davepacheco/node-verror#readme", + "main": "./lib/verror.js", + "maintainers": [ + { + "name": "dap", + "email": "dap@cs.brown.edu" + } + ], + "name": "verror", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/davepacheco/node-verror.git" + }, + "scripts": { + "test": "make test" + }, + "version": "1.3.6" +} diff --git a/node_modules/fsevents/node_modules/verror/tests/tst.inherit.js b/node_modules/fsevents/node_modules/verror/tests/tst.inherit.js new file mode 100644 index 0000000..0f0d70b --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/tests/tst.inherit.js @@ -0,0 +1,100 @@ +/* + * tst.inherit.js: test that inheriting from VError and WError work as expected. + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +var mod_verror = require('../lib/verror'); + +var VError = mod_verror.VError; +var WError = mod_verror.WError; +var err, suberr; + +function VErrorChild() +{ + VError.apply(this, Array.prototype.slice.call(arguments)); +} + +mod_util.inherits(VErrorChild, VError); +VErrorChild.prototype.name = 'VErrorChild'; + + +function WErrorChild() +{ + WError.apply(this, Array.prototype.slice.call(arguments)); +} + +mod_util.inherits(WErrorChild, WError); +WErrorChild.prototype.name = 'WErrorChild'; + + +suberr = new Error('root cause'); +err = new VErrorChild(suberr, 'top'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof VError); +mod_assert.ok(err instanceof VErrorChild); +mod_assert.equal(err.cause(), suberr); +mod_assert.equal(err.message, 'top: root cause'); +mod_assert.equal(err.toString(), 'VErrorChild: top: root cause'); +mod_assert.equal(err.stack.split('\n')[0], 'VErrorChild: top: root cause'); + +suberr = new Error('root cause'); +err = new WErrorChild(suberr, 'top'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof WError); +mod_assert.ok(err instanceof WErrorChild); +mod_assert.equal(err.cause(), suberr); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), + 'WErrorChild: top; caused by Error: root cause'); +mod_assert.equal(err.stack.split('\n')[0], + 'WErrorChild: top; caused by Error: root cause'); + + +// Test that `.toString()` uses the ctor name. I.e. setting +// `.prototype.name` isn't necessary. +function VErrorChildNoName() { + VError.apply(this, Array.prototype.slice.call(arguments)); +} +mod_util.inherits(VErrorChildNoName, VError); +err = new VErrorChildNoName('top'); +mod_assert.equal(err.toString(), 'VErrorChildNoName: top'); + +function WErrorChildNoName() { + WError.apply(this, Array.prototype.slice.call(arguments)); +} +mod_util.inherits(WErrorChildNoName, WError); +err = new WErrorChildNoName('top'); +mod_assert.equal(err.toString(), 'WErrorChildNoName: top'); + + +// Test that `.prototype.name` can be used for the `.toString()` +// when the ctor is anonymous. +var VErrorChildAnon = function () { + VError.apply(this, Array.prototype.slice.call(arguments)); +}; +mod_util.inherits(VErrorChildAnon, VError); +VErrorChildAnon.prototype.name = 'VErrorChildAnon'; +err = new VErrorChildAnon('top'); +mod_assert.equal(err.toString(), 'VErrorChildAnon: top'); + +var WErrorChildAnon = function () { + WError.apply(this, Array.prototype.slice.call(arguments)); +}; +mod_util.inherits(WErrorChildAnon, WError); +WErrorChildAnon.prototype.name = 'WErrorChildAnon'; +err = new WErrorChildAnon('top'); +mod_assert.equal(err.toString(), 'WErrorChildAnon: top'); + + +// Test get appropriate exception name in `.toString()` when reconstituting +// an error instance a la: +// https://github.com/mcavage/node-fast/blob/master/lib/client.js#L215 +err = new VError('top'); +err.name = 'CustomNameError'; +mod_assert.equal(err.toString(), 'CustomNameError: top'); + +err = new WError('top'); +err.name = 'CustomNameError'; +mod_assert.equal(err.toString(), 'CustomNameError: top'); diff --git a/node_modules/fsevents/node_modules/verror/tests/tst.verror.js b/node_modules/fsevents/node_modules/verror/tests/tst.verror.js new file mode 100644 index 0000000..ee937cd --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/tests/tst.verror.js @@ -0,0 +1,156 @@ +/* + * tst.verror.js: tests basic functionality of the VError class. + */ + +var mod_assert = require('assert'); +var mod_verror = require('../lib/verror'); + +var VError = mod_verror.VError; +var WError = mod_verror.WError; + +var err, suberr, stack, substack; + +/* + * Remove full paths and relative line numbers from stack traces so that we can + * compare against "known-good" output. + */ +function cleanStack(stacktxt) +{ + var re = new RegExp(__filename + ':\\d+:\\d+', 'gm'); + stacktxt = stacktxt.replace(re, 'tst.verror.js'); + return (stacktxt); +} + +/* + * Save the generic parts of all stack traces so we can avoid hardcoding + * Node-specific implementation details in our testing of stack traces. + */ +var nodestack = new Error().stack.split('\n').slice(2).join('\n'); + +/* no arguments */ +err = new VError(); +mod_assert.equal(err.name, 'VError'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof VError); +mod_assert.equal(err.message, ''); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +/* options-argument form */ +err = new VError({}); +mod_assert.equal(err.message, ''); +mod_assert.ok(err.cause() === undefined); + +/* simple message */ +err = new VError('my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: my error', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = new VError({}, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); + +/* printf-style message */ +err = new VError('%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +err = new VError({}, '%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +/* caused by another error, with no additional message */ +suberr = new Error('root cause'); +err = new VError(suberr); +mod_assert.equal(err.message, ': root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new VError({ 'cause': suberr }); +mod_assert.equal(err.message, ': root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by another error, with annotation */ +err = new VError(suberr, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: proximate cause: 3 issues: root cause', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = new VError({ 'cause': suberr }, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: proximate cause: 3 issues: root cause', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +/* caused by another VError, with annotation. */ +suberr = err; +err = new VError(suberr, 'top'); +mod_assert.equal(err.message, 'top: proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new VError({ 'cause': suberr }, 'top'); +mod_assert.equal(err.message, 'top: proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by a WError */ +suberr = new WError(new Error('root cause'), 'mid'); +err = new VError(suberr, 'top'); +mod_assert.equal(err.message, 'top: mid'); +mod_assert.ok(err.cause() === suberr); + +/* null cause (for backwards compatibility with older versions) */ +err = new VError(null, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: my error', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = new VError({ 'cause': null }, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); + +err = new VError(null); +mod_assert.equal(err.message, ''); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +/* constructorOpt */ +function makeErr(options) { + return (new VError(options, 'test error')); +} +err = makeErr({}); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: test error', + ' at makeErr (tst.verror.js)', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = makeErr({ 'constructorOpt': makeErr }); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: test error', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); diff --git a/node_modules/fsevents/node_modules/verror/tests/tst.werror.js b/node_modules/fsevents/node_modules/verror/tests/tst.werror.js new file mode 100644 index 0000000..c8cdc61 --- /dev/null +++ b/node_modules/fsevents/node_modules/verror/tests/tst.werror.js @@ -0,0 +1,179 @@ +/* + * tst.werror.js: tests basic functionality of the WError class. + */ + +var mod_assert = require('assert'); +var mod_verror = require('../lib/verror'); + +var VError = mod_verror.VError; +var WError = mod_verror.WError; + +var err, suberr, stack, substack; + +/* + * Remove full paths and relative line numbers from stack traces so that we can + * compare against "known-good" output. + */ +function cleanStack(stacktxt) +{ + var re = new RegExp(__filename + ':\\d+:\\d+', 'gm'); + stacktxt = stacktxt.replace(re, 'tst.werror.js'); + return (stacktxt); +} + +/* + * Save the generic parts of all stack traces so we can avoid hardcoding + * Node-specific implementation details in our testing of stack traces. + */ +var nodestack = new Error().stack.split('\n').slice(2).join('\n'); + +/* no arguments */ +err = new WError(); +mod_assert.equal(err.name, 'WError'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof WError); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +/* options-argument form */ +err = new WError({}); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError'); +mod_assert.ok(err.cause() === undefined); + +/* simple message */ +err = new WError('my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: my error', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = new WError({}, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); + +/* printf-style message */ +err = new WError('%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.equal(err.toString(), 'WError: very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +err = new WError({}, '%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.equal(err.toString(), 'WError: very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +/* caused by another error, with no additional message */ +suberr = new Error('root cause'); +err = new WError(suberr); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new WError({ 'cause': suberr }); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by another error, with annotation */ +err = new WError(suberr, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues'); +mod_assert.equal(err.toString(), 'WError: proximate cause: 3 issues; ' + + 'caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: proximate cause: 3 issues; caused by Error: root cause', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = new WError({ 'cause': suberr }, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues'); +mod_assert.equal(err.toString(), 'WError: proximate cause: 3 issues; ' + + 'caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: proximate cause: 3 issues; caused by Error: root cause', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +/* caused by another WError, with annotation. */ +suberr = err; +err = new WError(suberr, 'top'); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), 'WError: top; caused by WError: ' + + 'proximate cause: 3 issues; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new WError({ 'cause': suberr }, 'top'); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), 'WError: top; caused by WError: ' + + 'proximate cause: 3 issues; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by a VError */ +suberr = new VError(new Error('root cause'), 'mid'); +err = new WError(suberr, 'top'); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), + 'WError: top; caused by VError: mid: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* null cause (for backwards compatibility with older versions) */ +err = new WError(null, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: my error', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = new WError({ 'cause': null }, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); + +err = new WError(null); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +/* constructorOpt */ +function makeErr(options) { + return (new WError(options, 'test error')); +} +err = makeErr({}); +mod_assert.equal(err.toString(), 'WError: test error'); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: test error', + ' at makeErr (tst.werror.js)', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = makeErr({ 'constructorOpt': makeErr }); +mod_assert.equal(err.toString(), 'WError: test error'); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: test error', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); diff --git a/node_modules/fsevents/node_modules/wide-align/.npmignore b/node_modules/fsevents/node_modules/wide-align/.npmignore new file mode 100644 index 0000000..d1a6b00 --- /dev/null +++ b/node_modules/fsevents/node_modules/wide-align/.npmignore @@ -0,0 +1,5 @@ +*~ +/node_modules +.#* +/.nyc_output +/coverage diff --git a/node_modules/fsevents/node_modules/wide-align/LICENSE b/node_modules/fsevents/node_modules/wide-align/LICENSE new file mode 100644 index 0000000..f4be44d --- /dev/null +++ b/node_modules/fsevents/node_modules/wide-align/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/fsevents/node_modules/wide-align/README.md b/node_modules/fsevents/node_modules/wide-align/README.md new file mode 100644 index 0000000..32f1be0 --- /dev/null +++ b/node_modules/fsevents/node_modules/wide-align/README.md @@ -0,0 +1,47 @@ +wide-align +---------- + +A wide-character aware text alignment function for use in terminals / on the +console. + +### Usage + +``` +var align = require('wide-align') + +// Note that if you view this on a unicode console, all of the slashes are +// aligned. This is because on a console, all narrow characters are +// an en wide and all wide characters are an em. In browsers, this isn't +// held to and wide characters like "古" can be less than two narrow +// characters even with a fixed width font. + +console.log(align.center('abc', 10)) // ' abc ' +console.log(align.center('古古古', 10)) // ' 古古古 ' +console.log(align.left('abc', 10)) // 'abc ' +console.log(align.left('古古古', 10)) // '古古古 ' +console.log(align.right('abc', 10)) // ' abc' +console.log(align.right('古古古', 10)) // ' 古古古' +``` + +### Functions + +#### `align.center(str, length)` → `str` + +Returns *str* with spaces added to both sides such that that it is *length* +chars long and centered in the spaces. + +#### `align.left(str, length)` → `str` + +Returns *str* with spaces to the right such that it is *length* chars long. + +### `align.right(str, length)` → `str` + +Returns *str* with spaces to the left such that it is *length* chars long. + +### Origins + +These functions were originally taken from +[cliui](https://npmjs.com/package/cliui). Changes include switching to the +MUCH faster pad generation function from +[lodash](https://npmjs.com/package/lodash), making center alignment pad +both sides and adding left alignment. diff --git a/node_modules/fsevents/node_modules/wide-align/align.js b/node_modules/fsevents/node_modules/wide-align/align.js new file mode 100644 index 0000000..4f94ca4 --- /dev/null +++ b/node_modules/fsevents/node_modules/wide-align/align.js @@ -0,0 +1,65 @@ +'use strict' +var stringWidth = require('string-width') + +exports.center = alignCenter +exports.left = alignLeft +exports.right = alignRight + +// lodash's way of generating pad characters. + +function createPadding (width) { + var result = '' + var string = ' ' + var n = width + do { + if (n % 2) { + result += string; + } + n = Math.floor(n / 2); + string += string; + } while (n); + + return result; +} + +function alignLeft (str, width) { + var trimmed = str.trimRight() + if (trimmed.length === 0 && str.length >= width) return str + var padding = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + padding = createPadding(width - strWidth) + } + + return trimmed + padding +} + +function alignRight (str, width) { + var trimmed = str.trimLeft() + if (trimmed.length === 0 && str.length >= width) return str + var padding = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + padding = createPadding(width - strWidth) + } + + return padding + trimmed +} + +function alignCenter (str, width) { + var trimmed = str.trim() + if (trimmed.length === 0 && str.length >= width) return str + var padLeft = '' + var padRight = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + var padLeftBy = parseInt((width - strWidth) / 2, 10) + padLeft = createPadding(padLeftBy) + padRight = createPadding(width - (strWidth + padLeftBy)) + } + + return padLeft + trimmed + padRight +} diff --git a/node_modules/fsevents/node_modules/wide-align/package.json b/node_modules/fsevents/node_modules/wide-align/package.json new file mode 100644 index 0000000..10bbbb1 --- /dev/null +++ b/node_modules/fsevents/node_modules/wide-align/package.json @@ -0,0 +1,85 @@ +{ + "_args": [ + [ + "wide-align@^1.1.0", + "/Users/eshanker/Code/fsevents/node_modules/gauge" + ] + ], + "_from": "wide-align@>=1.1.0 <2.0.0", + "_id": "wide-align@1.1.0", + "_inCache": true, + "_installable": true, + "_location": "/wide-align", + "_nodeVersion": "4.2.2", + "_npmUser": { + "email": "me@re-becca.org", + "name": "iarna" + }, + "_npmVersion": "3.5.3", + "_phantomChildren": {}, + "_requested": { + "name": "wide-align", + "raw": "wide-align@^1.1.0", + "rawSpec": "^1.1.0", + "scope": null, + "spec": ">=1.1.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/gauge" + ], + "_resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.0.tgz", + "_shasum": "40edde802a71fea1f070da3e62dcda2e7add96ad", + "_shrinkwrap": null, + "_spec": "wide-align@^1.1.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/gauge", + "author": { + "email": "me@re-becca.org", + "name": "Rebecca Turner", + "url": "http://re-becca.org/" + }, + "bugs": { + "url": "https://github.com/iarna/wide-align/issues" + }, + "dependencies": { + "string-width": "^1.0.1" + }, + "description": "A wide-character aware text alignment function for use on the console or with fixed width fonts.", + "devDependencies": { + "tap": "^2.3.2" + }, + "directories": {}, + "dist": { + "shasum": "40edde802a71fea1f070da3e62dcda2e7add96ad", + "tarball": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.0.tgz" + }, + "gitHead": "fe3f7f210650913d5bee702d7e19938f6977bc8a", + "homepage": "https://github.com/iarna/wide-align#readme", + "keywords": [ + "align", + "cjkv", + "double", + "pad", + "unicode", + "wide" + ], + "license": "ISC", + "main": "align.js", + "maintainers": [ + { + "name": "iarna", + "email": "me@re-becca.org" + } + ], + "name": "wide-align", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/iarna/wide-align.git" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "version": "1.1.0" +} diff --git a/node_modules/fsevents/node_modules/wide-align/test/align.js b/node_modules/fsevents/node_modules/wide-align/test/align.js new file mode 100644 index 0000000..64e9f9d --- /dev/null +++ b/node_modules/fsevents/node_modules/wide-align/test/align.js @@ -0,0 +1,37 @@ +'use strict' +var test = require('tap').test +var align = require('..') + +test('align', function (t) { + t.is(align.center('abc', 10), ' abc ', 'center narrow') + t.is(align.center('古古古', 10), ' 古古古 ', 'center wide') + t.is(align.left('abc', 10), 'abc ', 'left narrow') + t.is(align.left('古古古', 10), '古古古 ', 'left wide') + t.is(align.right('abc', 10), ' abc', 'right narrow') + t.is(align.right('古古古', 10), ' 古古古', 'right wide') + + t.is(align.center('abc', 2), 'abc', 'center narrow overflow') + t.is(align.center('古古古', 4), '古古古', 'center wide overflow') + t.is(align.left('abc', 2), 'abc', 'left narrow overflow') + t.is(align.left('古古古', 4), '古古古', 'left wide overflow') + t.is(align.right('abc', 2), 'abc', 'right narrow overflow') + t.is(align.right('古古古', 4), '古古古', 'right wide overflow') + + t.is(align.left('', 5), ' ', 'left align nothing') + t.is(align.center('', 5), ' ', 'center align nothing') + t.is(align.right('', 5), ' ', 'right align nothing') + + t.is(align.left(' ', 5), ' ', 'left align whitespace') + t.is(align.center(' ', 5), ' ', 'center align whitespace') + t.is(align.right(' ', 5), ' ', 'right align whitespace') + + t.is(align.left(' ', 2), ' ', 'left align whitespace overflow') + t.is(align.center(' ', 2), ' ', 'center align whitespace overflow') + t.is(align.right(' ', 2), ' ', 'right align whitespace overflow') + + t.is(align.left('x ', 10), 'x ', 'left align whitespace mix') + t.is(align.center('x ', 10), ' x ', 'center align whitespace mix') + t.is(align.right('x ', 10), ' x', 'right align whitespace mix') + + t.end() +}) diff --git a/node_modules/fsevents/node_modules/wrappy/LICENSE b/node_modules/fsevents/node_modules/wrappy/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fsevents/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fsevents/node_modules/wrappy/README.md b/node_modules/fsevents/node_modules/wrappy/README.md new file mode 100644 index 0000000..98eab25 --- /dev/null +++ b/node_modules/fsevents/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/node_modules/fsevents/node_modules/wrappy/package.json b/node_modules/fsevents/node_modules/wrappy/package.json new file mode 100644 index 0000000..d170a46 --- /dev/null +++ b/node_modules/fsevents/node_modules/wrappy/package.json @@ -0,0 +1,89 @@ +{ + "_args": [ + [ + "wrappy@1", + "/Users/eshanker/Code/fsevents/node_modules/inflight" + ] + ], + "_from": "wrappy@>=1.0.0 <2.0.0", + "_id": "wrappy@1.0.2", + "_inCache": true, + "_installable": true, + "_location": "/wrappy", + "_nodeVersion": "5.10.1", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/wrappy-1.0.2.tgz_1463527848281_0.037129373755306005" + }, + "_npmUser": { + "email": "kat@sykosomatic.org", + "name": "zkat" + }, + "_npmVersion": "3.9.1", + "_phantomChildren": {}, + "_requested": { + "name": "wrappy", + "raw": "wrappy@1", + "rawSpec": "1", + "scope": null, + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "_requiredBy": [ + "/inflight", + "/once" + ], + "_resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "_shasum": "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", + "_shrinkwrap": null, + "_spec": "wrappy@1", + "_where": "/Users/eshanker/Code/fsevents/node_modules/inflight", + "author": { + "email": "i@izs.me", + "name": "Isaac Z. Schlueter", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "dependencies": {}, + "description": "Callback wrapping utility", + "devDependencies": { + "tap": "^2.3.1" + }, + "directories": { + "test": "test" + }, + "dist": { + "shasum": "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f", + "tarball": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" + }, + "files": [ + "wrappy.js" + ], + "gitHead": "71d91b6dc5bdeac37e218c2cf03f9ab55b60d214", + "homepage": "https://github.com/npm/wrappy", + "license": "ISC", + "main": "wrappy.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + }, + { + "name": "zkat", + "email": "kat@sykosomatic.org" + } + ], + "name": "wrappy", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/wrappy.git" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "version": "1.0.2" +} diff --git a/node_modules/fsevents/node_modules/wrappy/wrappy.js b/node_modules/fsevents/node_modules/wrappy/wrappy.js new file mode 100644 index 0000000..bb7e7d6 --- /dev/null +++ b/node_modules/fsevents/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/node_modules/fsevents/node_modules/xtend/.jshintrc b/node_modules/fsevents/node_modules/xtend/.jshintrc new file mode 100644 index 0000000..77887b5 --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/node_modules/fsevents/node_modules/xtend/.npmignore b/node_modules/fsevents/node_modules/xtend/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/fsevents/node_modules/xtend/LICENCE b/node_modules/fsevents/node_modules/xtend/LICENCE new file mode 100644 index 0000000..1a14b43 --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/LICENCE @@ -0,0 +1,19 @@ +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/node_modules/xtend/Makefile b/node_modules/fsevents/node_modules/xtend/Makefile new file mode 100644 index 0000000..d583fcf --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/Makefile @@ -0,0 +1,4 @@ +browser: + node ./support/compile + +.PHONY: browser \ No newline at end of file diff --git a/node_modules/fsevents/node_modules/xtend/README.md b/node_modules/fsevents/node_modules/xtend/README.md new file mode 100644 index 0000000..093cb29 --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: 'c' +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licenced + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/node_modules/fsevents/node_modules/xtend/immutable.js b/node_modules/fsevents/node_modules/xtend/immutable.js new file mode 100644 index 0000000..94889c9 --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/fsevents/node_modules/xtend/mutable.js b/node_modules/fsevents/node_modules/xtend/mutable.js new file mode 100644 index 0000000..72debed --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/fsevents/node_modules/xtend/package.json b/node_modules/fsevents/node_modules/xtend/package.json new file mode 100644 index 0000000..993e214 --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/package.json @@ -0,0 +1,109 @@ +{ + "_args": [ + [ + "xtend@^4.0.0", + "/Users/eshanker/Code/fsevents/node_modules/is-my-json-valid" + ] + ], + "_from": "xtend@>=4.0.0 <5.0.0", + "_id": "xtend@4.0.1", + "_inCache": true, + "_installable": true, + "_location": "/xtend", + "_nodeVersion": "0.10.32", + "_npmUser": { + "email": "raynos2@gmail.com", + "name": "raynos" + }, + "_npmVersion": "2.14.1", + "_phantomChildren": {}, + "_requested": { + "name": "xtend", + "raw": "xtend@^4.0.0", + "rawSpec": "^4.0.0", + "scope": null, + "spec": ">=4.0.0 <5.0.0", + "type": "range" + }, + "_requiredBy": [ + "/is-my-json-valid" + ], + "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "_shasum": "a5c6d532be656e23db820efb943a1f04998d63af", + "_shrinkwrap": null, + "_spec": "xtend@^4.0.0", + "_where": "/Users/eshanker/Code/fsevents/node_modules/is-my-json-valid", + "author": { + "email": "raynos2@gmail.com", + "name": "Raynos" + }, + "bugs": { + "email": "raynos2@gmail.com", + "url": "https://github.com/Raynos/xtend/issues" + }, + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "dependencies": {}, + "description": "extend like a boss", + "devDependencies": { + "tape": "~1.1.0" + }, + "directories": {}, + "dist": { + "shasum": "a5c6d532be656e23db820efb943a1f04998d63af", + "tarball": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + }, + "engines": { + "node": ">=0.4" + }, + "gitHead": "23dc302a89756da89c1897bc732a752317e35390", + "homepage": "https://github.com/Raynos/xtend", + "keywords": [ + "array", + "extend", + "merge", + "object", + "options", + "opts" + ], + "license": "MIT", + "main": "immutable", + "maintainers": [ + { + "name": "raynos", + "email": "raynos2@gmail.com" + } + ], + "name": "xtend", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/Raynos/xtend.git" + }, + "scripts": { + "test": "node test" + }, + "testling": { + "browsers": [ + "chrome/22..latest", + "chrome/canary", + "firefox/16..latest", + "firefox/nightly", + "ie/7..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "opera/12..latest", + "opera/next", + "safari/5.1..latest" + ], + "files": "test.js" + }, + "version": "4.0.1" +} diff --git a/node_modules/fsevents/node_modules/xtend/test.js b/node_modules/fsevents/node_modules/xtend/test.js new file mode 100644 index 0000000..093a2b0 --- /dev/null +++ b/node_modules/fsevents/node_modules/xtend/test.js @@ -0,0 +1,83 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) diff --git a/node_modules/fsevents/package.json b/node_modules/fsevents/package.json new file mode 100644 index 0000000..c8fe4e9 --- /dev/null +++ b/node_modules/fsevents/package.json @@ -0,0 +1,48 @@ +{ + "name": "fsevents", + "version": "1.0.14", + "description": "Native Access to Mac OS-X FSEvents", + "main": "fsevents.js", + "dependencies": { + "nan": "^2.3.0", + "node-pre-gyp": "^0.6.29" + }, + "os": [ + "darwin" + ], + "engines": { + "node": ">=0.8.0" + }, + "scripts": { + "install": "node-pre-gyp install --fallback-to-build", + "prepublish": "if [ $(npm -v | head -c 1) -lt 3 ]; then exit 1; fi && npm dedupe", + "test": "tap ./test" + }, + "binary": { + "module_name": "fse", + "module_path": "./lib/binding/{configuration}/{node_abi}-{platform}-{arch}/", + "remote_path": "./v{version}/", + "package_name": "{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz", + "host": "https://fsevents-binaries.s3-us-west-2.amazonaws.com" + }, + "repository": { + "type": "git", + "url": "https://github.com/strongloop/fsevents.git" + }, + "keywords": [ + "fsevents", + "mac" + ], + "author": "Philipp Dunkel ", + "license": "MIT", + "bugs": { + "url": "https://github.com/strongloop/fsevents/issues" + }, + "bundledDependencies": [ + "node-pre-gyp" + ], + "homepage": "https://github.com/strongloop/fsevents", + "devDependencies": { + "tap": "~0.4.8" + } +} diff --git a/node_modules/fsevents/src/async.cc b/node_modules/fsevents/src/async.cc new file mode 100644 index 0000000..d512994 --- /dev/null +++ b/node_modules/fsevents/src/async.cc @@ -0,0 +1,43 @@ +/* +** © 2014 by Philipp Dunkel +** Licensed under MIT License. +*/ + + +void async_propagate(uv_async_t *async) { + if (!async->data) return; + FSEvents *fse = (FSEvents *)async->data; + CFIndex idx, cnt; + fse_event *event; + char pathbuf[1024]; + const char *pathptr = NULL; + fse->lock(); + cnt = fse->events.size(); + for (idx=0; idxevents.at(idx); + if (event == NULL) continue; + pathptr = CFStringGetCStringPtr(event->path, kCFStringEncodingUTF8); + if (!pathptr) CFStringGetCString(event->path, pathbuf, 1024, kCFStringEncodingUTF8); + fse->emitEvent(pathptr ? pathptr : pathbuf, event->flags, event->id); + delete event; + } + if (cnt>0) fse->events.clear(); + fse->unlock(); +} + +void FSEvents::asyncStart() { + if (async.data == this) return; + async.data = this; + uv_async_init(uv_default_loop(), &async, (uv_async_cb) async_propagate); +} + +void FSEvents::asyncTrigger() { + if (async.data != this) return; + uv_async_send(&async); +} + +void FSEvents::asyncStop() { + if (async.data != this) return; + async.data = NULL; + uv_close((uv_handle_t *) &async, NULL); +} diff --git a/node_modules/fsevents/src/constants.cc b/node_modules/fsevents/src/constants.cc new file mode 100644 index 0000000..fbd61ee --- /dev/null +++ b/node_modules/fsevents/src/constants.cc @@ -0,0 +1,110 @@ +/* +** © 2014 by Philipp Dunkel +** Licensed under MIT License. +*/ + +// constants from https://developer.apple.com/library/mac/documentation/Darwin/Reference/FSEvents_Ref/index.html#//apple_ref/doc/constant_group/FSEventStreamEventFlags +#ifndef kFSEventStreamEventFlagNone +#define kFSEventStreamEventFlagNone 0x00000000 +#endif + +#ifndef kFSEventStreamEventFlagMustScanSubDirs +#define kFSEventStreamEventFlagMustScanSubDirs 0x00000001 +#endif + +#ifndef kFSEventStreamEventFlagUserDropped +#define kFSEventStreamEventFlagUserDropped 0x00000002 +#endif + +#ifndef kFSEventStreamEventFlagKernelDropped +#define kFSEventStreamEventFlagKernelDropped 0x00000004 +#endif + +#ifndef kFSEventStreamEventFlagEventIdsWrapped +#define kFSEventStreamEventFlagEventIdsWrapped 0x00000008 +#endif + +#ifndef kFSEventStreamEventFlagHistoryDone +#define kFSEventStreamEventFlagHistoryDone 0x00000010 +#endif + +#ifndef kFSEventStreamEventFlagRootChanged +#define kFSEventStreamEventFlagRootChanged 0x00000020 +#endif + +#ifndef kFSEventStreamEventFlagMount +#define kFSEventStreamEventFlagMount 0x00000040 +#endif + +#ifndef kFSEventStreamEventFlagUnmount +#define kFSEventStreamEventFlagUnmount 0x00000080 +#endif + +#ifndef kFSEventStreamEventFlagItemCreated +#define kFSEventStreamEventFlagItemCreated 0x00000100 +#endif + +#ifndef kFSEventStreamEventFlagItemRemoved +#define kFSEventStreamEventFlagItemRemoved 0x00000200 +#endif + +#ifndef kFSEventStreamEventFlagItemInodeMetaMod +#define kFSEventStreamEventFlagItemInodeMetaMod 0x00000400 +#endif + +#ifndef kFSEventStreamEventFlagItemRenamed +#define kFSEventStreamEventFlagItemRenamed 0x00000800 +#endif + +#ifndef kFSEventStreamEventFlagItemModified +#define kFSEventStreamEventFlagItemModified 0x00001000 +#endif + +#ifndef kFSEventStreamEventFlagItemFinderInfoMod +#define kFSEventStreamEventFlagItemFinderInfoMod 0x00002000 +#endif + +#ifndef kFSEventStreamEventFlagItemChangeOwner +#define kFSEventStreamEventFlagItemChangeOwner 0x00004000 +#endif + +#ifndef kFSEventStreamEventFlagItemXattrMod +#define kFSEventStreamEventFlagItemXattrMod 0x00008000 +#endif + +#ifndef kFSEventStreamEventFlagItemIsFile +#define kFSEventStreamEventFlagItemIsFile 0x00010000 +#endif + +#ifndef kFSEventStreamEventFlagItemIsDir +#define kFSEventStreamEventFlagItemIsDir 0x00020000 +#endif + +#ifndef kFSEventStreamEventFlagItemIsSymlink +#define kFSEventStreamEventFlagItemIsSymlink 0x00040000 +#endif + +static v8::Local Constants() { + v8::Local object = Nan::New(); + object->Set(Nan::New("kFSEventStreamEventFlagNone").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagNone)); + object->Set(Nan::New("kFSEventStreamEventFlagMustScanSubDirs").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagMustScanSubDirs)); + object->Set(Nan::New("kFSEventStreamEventFlagUserDropped").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagUserDropped)); + object->Set(Nan::New("kFSEventStreamEventFlagKernelDropped").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagKernelDropped)); + object->Set(Nan::New("kFSEventStreamEventFlagEventIdsWrapped").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagEventIdsWrapped)); + object->Set(Nan::New("kFSEventStreamEventFlagHistoryDone").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagHistoryDone)); + object->Set(Nan::New("kFSEventStreamEventFlagRootChanged").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagRootChanged)); + object->Set(Nan::New("kFSEventStreamEventFlagMount").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagMount)); + object->Set(Nan::New("kFSEventStreamEventFlagUnmount").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagUnmount)); + object->Set(Nan::New("kFSEventStreamEventFlagItemCreated").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemCreated)); + object->Set(Nan::New("kFSEventStreamEventFlagItemRemoved").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemRemoved)); + object->Set(Nan::New("kFSEventStreamEventFlagItemInodeMetaMod").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemInodeMetaMod)); + object->Set(Nan::New("kFSEventStreamEventFlagItemRenamed").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemRenamed)); + object->Set(Nan::New("kFSEventStreamEventFlagItemModified").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemModified)); + object->Set(Nan::New("kFSEventStreamEventFlagItemFinderInfoMod").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemFinderInfoMod)); + object->Set(Nan::New("kFSEventStreamEventFlagItemChangeOwner").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemChangeOwner)); + object->Set(Nan::New("kFSEventStreamEventFlagItemXattrMod").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemXattrMod)); + object->Set(Nan::New("kFSEventStreamEventFlagItemIsFile").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemIsFile)); + object->Set(Nan::New("kFSEventStreamEventFlagItemIsDir").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemIsDir)); + object->Set(Nan::New("kFSEventStreamEventFlagItemIsSymlink").ToLocalChecked(), Nan::New(kFSEventStreamEventFlagItemIsSymlink)); + return object; +} diff --git a/node_modules/fsevents/src/locking.cc b/node_modules/fsevents/src/locking.cc new file mode 100644 index 0000000..2712080 --- /dev/null +++ b/node_modules/fsevents/src/locking.cc @@ -0,0 +1,27 @@ +/* +** © 2014 by Philipp Dunkel +** Licensed under MIT License. +*/ + +void FSEvents::lockingStart() { + if (lockStarted) return; + lockStarted = true; + pthread_mutex_init(&lockmutex, NULL); +} + +void FSEvents::lock() { + if (!lockStarted) return; + pthread_mutex_lock(&lockmutex); +} + +void FSEvents::unlock() { + if (!lockStarted) return; + pthread_mutex_unlock(&lockmutex); +} + +void FSEvents::lockingStop() { + if (!lockStarted) return; + lockStarted = false; + + pthread_mutex_destroy(&lockmutex); +} diff --git a/node_modules/fsevents/src/methods.cc b/node_modules/fsevents/src/methods.cc new file mode 100644 index 0000000..976c959 --- /dev/null +++ b/node_modules/fsevents/src/methods.cc @@ -0,0 +1,42 @@ +/* +** © 2014 by Philipp Dunkel +** Licensed under MIT License. +*/ + +void FSEvents::emitEvent(const char *path, UInt32 flags, UInt64 id) { + if (!handler) return; + Nan::HandleScope handle_scope; + v8::Local argv[] = { + Nan::New(path).ToLocalChecked(), + Nan::New(flags), + Nan::New(id) + }; + handler->Call(3, argv); +} + +NAN_METHOD(FSEvents::New) { + Nan::Utf8String *path = new Nan::Utf8String(info[0]); + Nan::Callback *callback = new Nan::Callback(info[1].As()); + + FSEvents *fse = new FSEvents(**path, callback); + fse->Wrap(info.This()); + + info.GetReturnValue().Set(info.This()); +} + +NAN_METHOD(FSEvents::Stop) { + FSEvents* fse = node::ObjectWrap::Unwrap(info.This()); + + fse->threadStop(); + fse->asyncStop(); + + info.GetReturnValue().Set(info.This()); +} + +NAN_METHOD(FSEvents::Start) { + FSEvents* fse = node::ObjectWrap::Unwrap(info.This()); + fse->asyncStart(); + fse->threadStart(); + + info.GetReturnValue().Set(info.This()); +} diff --git a/node_modules/fsevents/src/storage.cc b/node_modules/fsevents/src/storage.cc new file mode 100644 index 0000000..9dc5588 --- /dev/null +++ b/node_modules/fsevents/src/storage.cc @@ -0,0 +1,27 @@ +/* + ** © 2014 by Philipp Dunkel + ** Licensed under MIT License. + */ + +struct fse_event { + UInt64 id; + UInt32 flags; + CFStringRef path; + + fse_event(CFStringRef eventPath, UInt32 eventFlag, UInt64 eventId) { + this->path = eventPath; + this->flags = eventFlag; + this->id = eventId; + if (this->path != NULL) + CFRetain(this->path); + } + + ~fse_event() { + if (this->path != NULL) + CFRelease(this->path); + } + +private: + fse_event(const fse_event&); + void operator=(const fse_event&); +}; diff --git a/node_modules/fsevents/src/thread.cc b/node_modules/fsevents/src/thread.cc new file mode 100644 index 0000000..4813fdc --- /dev/null +++ b/node_modules/fsevents/src/thread.cc @@ -0,0 +1,72 @@ +/* +** © 2014 by Philipp Dunkel +** Licensed under MIT License. +*/ + +// constants from https://developer.apple.com/library/mac/documentation/Darwin/Reference/FSEvents_Ref/index.html#//apple_ref/doc/constant_group/FSEventStreamCreateFlags +#ifndef kFSEventStreamCreateFlagNone +#define kFSEventStreamCreateFlagNone 0x00000000 +#endif + +#ifndef kFSEventStreamCreateFlagUseCFTypes +#define kFSEventStreamCreateFlagUseCFTypes 0x00000001 +#endif + +#ifndef kFSEventStreamCreateFlagNoDefer +#define kFSEventStreamCreateFlagNoDefer 0x00000002 +#endif + +#ifndef kFSEventStreamCreateFlagWatchRoot +#define kFSEventStreamCreateFlagWatchRoot 0x00000004 +#endif + +#ifndef kFSEventStreamCreateFlagIgnoreSelf +#define kFSEventStreamCreateFlagIgnoreSelf 0x00000008 +#endif + +#ifndef kFSEventStreamCreateFlagFileEvents +#define kFSEventStreamCreateFlagFileEvents 0x00000010 +#endif + +void FSEvents::threadStart() { + if (threadloop) return; + pthread_create(&thread, NULL, &FSEvents::threadRun, this); +} + +void HandleStreamEvents(ConstFSEventStreamRef stream, void *ctx, size_t numEvents, void *eventPaths, const FSEventStreamEventFlags eventFlags[], const FSEventStreamEventId eventIds[]) { + FSEvents * fse = (FSEvents *)ctx; + size_t idx; + fse->lock(); + for (idx=0; idx < numEvents; idx++) { + fse_event *event = new fse_event( + (CFStringRef)CFArrayGetValueAtIndex((CFArrayRef)eventPaths, idx), + eventFlags[idx], + eventIds[idx] + ); + fse->events.push_back(event); + } + fse->asyncTrigger(); + fse->unlock(); +} + +void *FSEvents::threadRun(void *ctx) { + FSEvents *fse = (FSEvents*)ctx; + FSEventStreamContext context = { 0, ctx, NULL, NULL, NULL }; + fse->threadloop = CFRunLoopGetCurrent(); + FSEventStreamRef stream = FSEventStreamCreate(NULL, &HandleStreamEvents, &context, fse->paths, kFSEventStreamEventIdSinceNow, (CFAbsoluteTime) 0.1, kFSEventStreamCreateFlagNone | kFSEventStreamCreateFlagWatchRoot | kFSEventStreamCreateFlagFileEvents | kFSEventStreamCreateFlagUseCFTypes); + FSEventStreamScheduleWithRunLoop(stream, fse->threadloop, kCFRunLoopDefaultMode); + FSEventStreamStart(stream); + CFRunLoopRun(); + FSEventStreamStop(stream); + FSEventStreamUnscheduleFromRunLoop(stream, fse->threadloop, kCFRunLoopDefaultMode); + FSEventStreamInvalidate(stream); + FSEventStreamRelease(stream); + fse->threadloop = NULL; + return NULL; +} + +void FSEvents::threadStop() { + if (!threadloop) return; + CFRunLoopStop(threadloop); + pthread_join(thread, NULL); +} diff --git a/node_modules/fstream-ignore/LICENSE b/node_modules/fstream-ignore/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fstream-ignore/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fstream-ignore/README.md b/node_modules/fstream-ignore/README.md new file mode 100644 index 0000000..31170fe --- /dev/null +++ b/node_modules/fstream-ignore/README.md @@ -0,0 +1,22 @@ +# fstream-ignore + +A fstream DirReader that filters out files that match globs in `.ignore` +files throughout the tree, like how git ignores files based on a +`.gitignore` file. + +Here's an example: + +```javascript +var Ignore = require("fstream-ignore") +Ignore({ path: __dirname + , ignoreFiles: [".ignore", ".gitignore"] + }) + .on("child", function (c) { + console.error(c.path.substr(c.root.path.length + 1)) + }) + .pipe(tar.Pack()) + .pipe(fs.createWriteStream("foo.tar")) +``` + +This will tar up the files in __dirname into `foo.tar`, ignoring +anything matched by the globs in any .iginore or .gitignore file. diff --git a/node_modules/fstream-ignore/ignore.js b/node_modules/fstream-ignore/ignore.js new file mode 100644 index 0000000..212fc7b --- /dev/null +++ b/node_modules/fstream-ignore/ignore.js @@ -0,0 +1,275 @@ +// Essentially, this is a fstream.DirReader class, but with a +// bit of special logic to read the specified sort of ignore files, +// and a filter that prevents it from picking up anything excluded +// by those files. + +var Minimatch = require("minimatch").Minimatch +, fstream = require("fstream") +, DirReader = fstream.DirReader +, inherits = require("inherits") +, path = require("path") +, fs = require("fs") + +module.exports = IgnoreReader + +inherits(IgnoreReader, DirReader) + +function IgnoreReader (props) { + if (!(this instanceof IgnoreReader)) { + return new IgnoreReader(props) + } + + // must be a Directory type + if (typeof props === "string") { + props = { path: path.resolve(props) } + } + + props.type = "Directory" + props.Directory = true + + if (!props.ignoreFiles) props.ignoreFiles = [".ignore"] + this.ignoreFiles = props.ignoreFiles + + this.ignoreRules = null + + // ensure that .ignore files always show up at the top of the list + // that way, they can be read before proceeding to handle other + // entries in that same folder + if (props.sort) { + this._sort = props.sort === "alpha" ? alphasort : props.sort + props.sort = null + } + + this.on("entries", function () { + // if there are any ignore files in the list, then + // pause and add them. + // then, filter the list based on our ignoreRules + + var hasIg = this.entries.some(this.isIgnoreFile, this) + + if (!hasIg) return this.filterEntries() + + this.addIgnoreFiles() + }) + + // we filter entries before we know what they are. + // however, directories have to be re-tested against + // rules with a "/" appended, because "a/b/" will only + // match if "a/b" is a dir, and not otherwise. + this.on("_entryStat", function (entry, props) { + var t = entry.basename + if (!this.applyIgnores(entry.basename, + entry.type === "Directory", + entry)) { + entry.abort() + } + }.bind(this)) + + DirReader.call(this, props) +} + + +IgnoreReader.prototype.addIgnoreFiles = function () { + if (this._paused) { + this.once("resume", this.addIgnoreFiles) + return + } + if (this._ignoreFilesAdded) return + this._ignoreFilesAdded = true + + var newIg = this.entries.filter(this.isIgnoreFile, this) + , count = newIg.length + , errState = null + + if (!count) return + + this.pause() + + var then = function (er) { + if (errState) return + if (er) return this.emit("error", errState = er) + if (-- count === 0) { + this.filterEntries() + this.resume() + } else { + this.addIgnoreFile(newIg[newIg.length - count], then) + } + }.bind(this) + + this.addIgnoreFile(newIg[0], then) +} + + +IgnoreReader.prototype.isIgnoreFile = function (e) { + return e !== "." && + e !== ".." && + -1 !== this.ignoreFiles.indexOf(e) +} + + +IgnoreReader.prototype.getChildProps = function (stat) { + var props = DirReader.prototype.getChildProps.call(this, stat) + props.ignoreFiles = this.ignoreFiles + + // Directories have to be read as IgnoreReaders + // otherwise fstream.Reader will create a DirReader instead. + if (stat.isDirectory()) { + props.type = this.constructor + } + return props +} + + +IgnoreReader.prototype.addIgnoreFile = function (e, cb) { + // read the file, and then call addIgnoreRules + // if there's an error, then tell the cb about it. + + var ig = path.resolve(this.path, e) + fs.readFile(ig, function (er, data) { + if (er) return cb(er) + + this.emit("ignoreFile", e, data) + var rules = this.readRules(data, e) + this.addIgnoreRules(rules, e) + cb() + }.bind(this)) +} + + +IgnoreReader.prototype.readRules = function (buf, e) { + return buf.toString().split(/\r?\n/) +} + + +// Override this to do fancier things, like read the +// "files" array from a package.json file or something. +IgnoreReader.prototype.addIgnoreRules = function (set, e) { + // filter out anything obvious + set = set.filter(function (s) { + s = s.trim() + return s && !s.match(/^#/) + }) + + // no rules to add! + if (!set.length) return + + // now get a minimatch object for each one of these. + // Note that we need to allow dot files by default, and + // not switch the meaning of their exclusion + var mmopt = { matchBase: true, dot: true, flipNegate: true } + , mm = set.map(function (s) { + var m = new Minimatch(s, mmopt) + m.ignoreFile = e + return m + }) + + if (!this.ignoreRules) this.ignoreRules = [] + this.ignoreRules.push.apply(this.ignoreRules, mm) +} + + +IgnoreReader.prototype.filterEntries = function () { + // this exclusion is at the point where we know the list of + // entries in the dir, but don't know what they are. since + // some of them *might* be directories, we have to run the + // match in dir-mode as well, so that we'll pick up partials + // of files that will be included later. Anything included + // at this point will be checked again later once we know + // what it is. + this.entries = this.entries.filter(function (entry) { + // at this point, we don't know if it's a dir or not. + return this.applyIgnores(entry) || this.applyIgnores(entry, true) + }, this) +} + + +IgnoreReader.prototype.applyIgnores = function (entry, partial, obj) { + var included = true + + // this = /a/b/c + // entry = d + // parent /a/b sees c/d + if (this.parent && this.parent.applyIgnores) { + var pt = this.basename + "/" + entry + included = this.parent.applyIgnores(pt, partial) + } + + // Negated Rules + // Since we're *ignoring* things here, negating means that a file + // is re-included, if it would have been excluded by a previous + // rule. So, negated rules are only relevant if the file + // has been excluded. + // + // Similarly, if a file has been excluded, then there's no point + // trying it against rules that have already been applied + // + // We're using the "flipnegate" flag here, which tells minimatch + // to set the "negate" for our information, but still report + // whether the core pattern was a hit or a miss. + + if (!this.ignoreRules) { + return included + } + + this.ignoreRules.forEach(function (rule) { + // negation means inclusion + if (rule.negate && included || + !rule.negate && !included) { + // unnecessary + return + } + + // first, match against /foo/bar + var match = rule.match("/" + entry) + + if (!match) { + // try with the leading / trimmed off the test + // eg: foo/bar instead of /foo/bar + match = rule.match(entry) + } + + // if the entry is a directory, then it will match + // with a trailing slash. eg: /foo/bar/ or foo/bar/ + if (!match && partial) { + match = rule.match("/" + entry + "/") || + rule.match(entry + "/") + } + + // When including a file with a negated rule, it's + // relevant if a directory partially matches, since + // it may then match a file within it. + // Eg, if you ignore /a, but !/a/b/c + if (!match && rule.negate && partial) { + match = rule.match("/" + entry, true) || + rule.match(entry, true) + } + + if (match) { + included = rule.negate + } + }, this) + + return included +} + + +IgnoreReader.prototype.sort = function (a, b) { + var aig = this.ignoreFiles.indexOf(a) !== -1 + , big = this.ignoreFiles.indexOf(b) !== -1 + + if (aig && !big) return -1 + if (big && !aig) return 1 + return this._sort(a, b) +} + +IgnoreReader.prototype._sort = function (a, b) { + return 0 +} + +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} diff --git a/node_modules/fstream-ignore/package.json b/node_modules/fstream-ignore/package.json new file mode 100644 index 0000000..dd4b707 --- /dev/null +++ b/node_modules/fstream-ignore/package.json @@ -0,0 +1,28 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "fstream-ignore", + "description": "A thing for ignoring files based on globs", + "version": "1.0.5", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/fstream-ignore.git" + }, + "main": "ignore.js", + "scripts": { + "test": "tap test/*.js --cov" + }, + "dependencies": { + "fstream": "^1.0.0", + "inherits": "2", + "minimatch": "^3.0.0" + }, + "devDependencies": { + "mkdirp": "", + "rimraf": "", + "tap": "^5.7.1" + }, + "license": "ISC", + "files": [ + "ignore.js" + ] +} diff --git a/node_modules/fstream/.npmignore b/node_modules/fstream/.npmignore new file mode 100644 index 0000000..494272a --- /dev/null +++ b/node_modules/fstream/.npmignore @@ -0,0 +1,5 @@ +.*.swp +node_modules/ +examples/deep-copy/ +examples/path/ +examples/filter-copy/ diff --git a/node_modules/fstream/.travis.yml b/node_modules/fstream/.travis.yml new file mode 100644 index 0000000..9f5972a --- /dev/null +++ b/node_modules/fstream/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +node_js: + - "6" + - "4" + - "0.10" + - "0.12" +before_install: + - "npm config set spin false" + - "npm install -g npm/npm" diff --git a/node_modules/fstream/LICENSE b/node_modules/fstream/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fstream/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fstream/README.md b/node_modules/fstream/README.md new file mode 100644 index 0000000..9d8cb77 --- /dev/null +++ b/node_modules/fstream/README.md @@ -0,0 +1,76 @@ +Like FS streams, but with stat on them, and supporting directories and +symbolic links, as well as normal files. Also, you can use this to set +the stats on a file, even if you don't change its contents, or to create +a symlink, etc. + +So, for example, you can "write" a directory, and it'll call `mkdir`. You +can specify a uid and gid, and it'll call `chown`. You can specify a +`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink +and provide a `linkpath` and it'll call `symlink`. + +Note that it won't automatically resolve symbolic links. So, if you +call `fstream.Reader('/some/symlink')` then you'll get an object +that stats and then ends immediately (since it has no data). To follow +symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow: +true })`. + +There are various checks to make sure that the bytes emitted are the +same as the intended size, if the size is set. + +## Examples + +```javascript +fstream + .Writer({ path: "path/to/file" + , mode: 0755 + , size: 6 + }) + .write("hello\n") + .end() +``` + +This will create the directories if they're missing, and then write +`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have +been written when it's done. + +```javascript +fstream + .Writer({ path: "path/to/file" + , mode: 0755 + , size: 6 + , flags: "a" + }) + .write("hello\n") + .end() +``` + +You can pass flags in, if you want to append to a file. + +```javascript +fstream + .Writer({ path: "path/to/symlink" + , linkpath: "./file" + , SymbolicLink: true + , mode: "0755" // octal strings supported + }) + .end() +``` + +If isSymbolicLink is a function, it'll be called, and if it returns +true, then it'll treat it as a symlink. If it's not a function, then +any truish value will make a symlink, or you can set `type: +'SymbolicLink'`, which does the same thing. + +Note that the linkpath is relative to the symbolic link location, not +the parent dir or cwd. + +```javascript +fstream + .Reader("path/to/dir") + .pipe(fstream.Writer("path/to/other/dir")) +``` + +This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other +dir exists and isn't a directory, then it'll emit an error. It'll also +set the uid, gid, mode, etc. to be identical. In this way, it's more +like `rsync -a` than simply a copy. diff --git a/node_modules/fstream/examples/filter-pipe.js b/node_modules/fstream/examples/filter-pipe.js new file mode 100644 index 0000000..83dadef --- /dev/null +++ b/node_modules/fstream/examples/filter-pipe.js @@ -0,0 +1,134 @@ +var fstream = require('../fstream.js') +var path = require('path') + +var r = fstream.Reader({ + path: path.dirname(__dirname), + filter: function () { + return !this.basename.match(/^\./) && + !this.basename.match(/^node_modules$/) && + !this.basename.match(/^deep-copy$/) && + !this.basename.match(/^filter-copy$/) + } +}) + +// this writer will only write directories +var w = fstream.Writer({ + path: path.resolve(__dirname, 'filter-copy'), + type: 'Directory', + filter: function () { + return this.type === 'Directory' + } +}) + +var indent = '' + +r.on('entry', appears) +r.on('ready', function () { + console.error('ready to begin!', r.path) +}) + +function appears (entry) { + console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename) + if (foggy) { + console.error('FOGGY!') + var p = entry + do { + console.error(p.depth, p.path, p._paused) + p = p.parent + } while (p) + + throw new Error('\u001b[mshould not have entries while foggy') + } + indent += '\t' + entry.on('data', missile(entry)) + entry.on('end', runaway(entry)) + entry.on('entry', appears) +} + +var foggy +function missile (entry) { + function liftFog (who) { + if (!foggy) return + if (who) { + console.error('%s breaks the spell!', who && who.path) + } else { + console.error('the spell expires!') + } + console.error('\u001b[mthe fog lifts!\n') + clearTimeout(foggy) + foggy = null + if (entry._paused) entry.resume() + } + + if (entry.type === 'Directory') { + var ended = false + entry.once('end', function () { ended = true }) + return function (c) { + // throw in some pathological pause()/resume() behavior + // just for extra fun. + process.nextTick(function () { + if (!foggy && !ended) { // && Math.random() < 0.3) { + console.error(indent + '%s casts a spell', entry.basename) + console.error('\na slowing fog comes over the battlefield...\n\u001b[32m') + entry.pause() + entry.once('resume', liftFog) + foggy = setTimeout(liftFog, 1000) + } + }) + } + } + + return function (c) { + var e = Math.random() < 0.5 + console.error(indent + '%s %s for %d damage!', + entry.basename, + e ? 'is struck' : 'fires a chunk', + c.length) + } +} + +function runaway (entry) { + return function () { + var e = Math.random() < 0.5 + console.error(indent + '%s %s', + entry.basename, + e ? 'turns to flee' : 'is vanquished!') + indent = indent.slice(0, -1) + } +} + +w.on('entry', attacks) +// w.on('ready', function () { attacks(w) }) +function attacks (entry) { + console.error(indent + '%s %s!', entry.basename, + entry.type === 'Directory' ? 'calls for backup' : 'attacks') + entry.on('entry', attacks) +} + +var ended = false +var i = 1 +r.on('end', function () { + if (foggy) clearTimeout(foggy) + console.error("\u001b[mIT'S OVER!!") + console.error('A WINNAR IS YOU!') + + console.log('ok ' + (i++) + ' A WINNAR IS YOU') + ended = true + // now go through and verify that everything in there is a dir. + var p = path.resolve(__dirname, 'filter-copy') + var checker = fstream.Reader({ path: p }) + checker.checker = true + checker.on('child', function (e) { + var ok = e.type === 'Directory' + console.log((ok ? '' : 'not ') + 'ok ' + (i++) + + ' should be a dir: ' + + e.path.substr(checker.path.length + 1)) + }) +}) + +process.on('exit', function () { + console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended') + console.log('1..' + i) +}) + +r.pipe(w) diff --git a/node_modules/fstream/examples/pipe.js b/node_modules/fstream/examples/pipe.js new file mode 100644 index 0000000..3de42ef --- /dev/null +++ b/node_modules/fstream/examples/pipe.js @@ -0,0 +1,118 @@ +var fstream = require('../fstream.js') +var path = require('path') + +var r = fstream.Reader({ + path: path.dirname(__dirname), + filter: function () { + return !this.basename.match(/^\./) && + !this.basename.match(/^node_modules$/) && + !this.basename.match(/^deep-copy$/) + } +}) + +var w = fstream.Writer({ + path: path.resolve(__dirname, 'deep-copy'), + type: 'Directory' +}) + +var indent = '' + +r.on('entry', appears) +r.on('ready', function () { + console.error('ready to begin!', r.path) +}) + +function appears (entry) { + console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry) + if (foggy) { + console.error('FOGGY!') + var p = entry + do { + console.error(p.depth, p.path, p._paused) + p = p.parent + } while (p) + + throw new Error('\u001b[mshould not have entries while foggy') + } + indent += '\t' + entry.on('data', missile(entry)) + entry.on('end', runaway(entry)) + entry.on('entry', appears) +} + +var foggy +function missile (entry) { + function liftFog (who) { + if (!foggy) return + if (who) { + console.error('%s breaks the spell!', who && who.path) + } else { + console.error('the spell expires!') + } + console.error('\u001b[mthe fog lifts!\n') + clearTimeout(foggy) + foggy = null + if (entry._paused) entry.resume() + } + + if (entry.type === 'Directory') { + var ended = false + entry.once('end', function () { ended = true }) + return function (c) { + // throw in some pathological pause()/resume() behavior + // just for extra fun. + process.nextTick(function () { + if (!foggy && !ended) { // && Math.random() < 0.3) { + console.error(indent + '%s casts a spell', entry.basename) + console.error('\na slowing fog comes over the battlefield...\n\u001b[32m') + entry.pause() + entry.once('resume', liftFog) + foggy = setTimeout(liftFog, 10) + } + }) + } + } + + return function (c) { + var e = Math.random() < 0.5 + console.error(indent + '%s %s for %d damage!', + entry.basename, + e ? 'is struck' : 'fires a chunk', + c.length) + } +} + +function runaway (entry) { + return function () { + var e = Math.random() < 0.5 + console.error(indent + '%s %s', + entry.basename, + e ? 'turns to flee' : 'is vanquished!') + indent = indent.slice(0, -1) + } +} + +w.on('entry', attacks) +// w.on('ready', function () { attacks(w) }) +function attacks (entry) { + console.error(indent + '%s %s!', entry.basename, + entry.type === 'Directory' ? 'calls for backup' : 'attacks') + entry.on('entry', attacks) +} + +var ended = false +r.on('end', function () { + if (foggy) clearTimeout(foggy) + console.error("\u001b[mIT'S OVER!!") + console.error('A WINNAR IS YOU!') + + console.log('ok 1 A WINNAR IS YOU') + ended = true +}) + +process.on('exit', function () { + console.log((ended ? '' : 'not ') + 'ok 2 ended') + console.log('1..2') +}) + +r.pipe(w) diff --git a/node_modules/fstream/examples/reader.js b/node_modules/fstream/examples/reader.js new file mode 100644 index 0000000..19affbe --- /dev/null +++ b/node_modules/fstream/examples/reader.js @@ -0,0 +1,68 @@ +var fstream = require('../fstream.js') +var tap = require('tap') +var fs = require('fs') +var path = require('path') +var dir = path.dirname(__dirname) + +tap.test('reader test', function (t) { + var children = -1 + var gotReady = false + var ended = false + + var r = fstream.Reader({ + path: dir, + filter: function () { + // return this.parent === r + return this.parent === r || this === r + } + }) + + r.on('ready', function () { + gotReady = true + children = fs.readdirSync(dir).length + console.error('Setting expected children to ' + children) + t.equal(r.type, 'Directory', 'should be a directory') + }) + + r.on('entry', function (entry) { + children-- + if (!gotReady) { + t.fail('children before ready!') + } + t.equal(entry.dirname, r.path, 'basename is parent dir') + }) + + r.on('error', function (er) { + t.fail(er) + t.end() + process.exit(1) + }) + + r.on('end', function () { + t.equal(children, 0, 'should have seen all children') + ended = true + }) + + var closed = false + r.on('close', function () { + t.ok(ended, 'saw end before close') + t.notOk(closed, 'close should only happen once') + closed = true + t.end() + }) +}) + +tap.test('reader error test', function (t) { + // assumes non-root on a *nix system + var r = fstream.Reader({ path: '/etc/shadow' }) + + r.once('error', function (er) { + t.ok(true) + t.end() + }) + + r.on('end', function () { + t.fail('reader ended without error') + t.end() + }) +}) diff --git a/node_modules/fstream/examples/symlink-write.js b/node_modules/fstream/examples/symlink-write.js new file mode 100644 index 0000000..19e81ee --- /dev/null +++ b/node_modules/fstream/examples/symlink-write.js @@ -0,0 +1,27 @@ +var fstream = require('../fstream.js') +var notOpen = false +process.chdir(__dirname) + +fstream + .Writer({ + path: 'path/to/symlink', + linkpath: './file', + isSymbolicLink: true, + mode: '0755' // octal strings supported + }) + .on('close', function () { + notOpen = true + var fs = require('fs') + var s = fs.lstatSync('path/to/symlink') + var isSym = s.isSymbolicLink() + console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink') + var t = fs.readlinkSync('path/to/symlink') + var isTarget = t === './file' + console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file') + }) + .end() + +process.on('exit', function () { + console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed') + console.log('1..3') +}) diff --git a/node_modules/fstream/fstream.js b/node_modules/fstream/fstream.js new file mode 100644 index 0000000..c0eb3be --- /dev/null +++ b/node_modules/fstream/fstream.js @@ -0,0 +1,35 @@ +exports.Abstract = require('./lib/abstract.js') +exports.Reader = require('./lib/reader.js') +exports.Writer = require('./lib/writer.js') + +exports.File = { + Reader: require('./lib/file-reader.js'), + Writer: require('./lib/file-writer.js') +} + +exports.Dir = { + Reader: require('./lib/dir-reader.js'), + Writer: require('./lib/dir-writer.js') +} + +exports.Link = { + Reader: require('./lib/link-reader.js'), + Writer: require('./lib/link-writer.js') +} + +exports.Proxy = { + Reader: require('./lib/proxy-reader.js'), + Writer: require('./lib/proxy-writer.js') +} + +exports.Reader.Dir = exports.DirReader = exports.Dir.Reader +exports.Reader.File = exports.FileReader = exports.File.Reader +exports.Reader.Link = exports.LinkReader = exports.Link.Reader +exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader + +exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer +exports.Writer.File = exports.FileWriter = exports.File.Writer +exports.Writer.Link = exports.LinkWriter = exports.Link.Writer +exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer + +exports.collect = require('./lib/collect.js') diff --git a/node_modules/fstream/lib/abstract.js b/node_modules/fstream/lib/abstract.js new file mode 100644 index 0000000..97c120e --- /dev/null +++ b/node_modules/fstream/lib/abstract.js @@ -0,0 +1,85 @@ +// the parent class for all fstreams. + +module.exports = Abstract + +var Stream = require('stream').Stream +var inherits = require('inherits') + +function Abstract () { + Stream.call(this) +} + +inherits(Abstract, Stream) + +Abstract.prototype.on = function (ev, fn) { + if (ev === 'ready' && this.ready) { + process.nextTick(fn.bind(this)) + } else { + Stream.prototype.on.call(this, ev, fn) + } + return this +} + +Abstract.prototype.abort = function () { + this._aborted = true + this.emit('abort') +} + +Abstract.prototype.destroy = function () {} + +Abstract.prototype.warn = function (msg, code) { + var self = this + var er = decorate(msg, code, self) + if (!self.listeners('warn')) { + console.error('%s %s\n' + + 'path = %s\n' + + 'syscall = %s\n' + + 'fstream_type = %s\n' + + 'fstream_path = %s\n' + + 'fstream_unc_path = %s\n' + + 'fstream_class = %s\n' + + 'fstream_stack =\n%s\n', + code || 'UNKNOWN', + er.stack, + er.path, + er.syscall, + er.fstream_type, + er.fstream_path, + er.fstream_unc_path, + er.fstream_class, + er.fstream_stack.join('\n')) + } else { + self.emit('warn', er) + } +} + +Abstract.prototype.info = function (msg, code) { + this.emit('info', msg, code) +} + +Abstract.prototype.error = function (msg, code, th) { + var er = decorate(msg, code, this) + if (th) throw er + else this.emit('error', er) +} + +function decorate (er, code, self) { + if (!(er instanceof Error)) er = new Error(er) + er.code = er.code || code + er.path = er.path || self.path + er.fstream_type = er.fstream_type || self.type + er.fstream_path = er.fstream_path || self.path + if (self._path !== self.path) { + er.fstream_unc_path = er.fstream_unc_path || self._path + } + if (self.linkpath) { + er.fstream_linkpath = er.fstream_linkpath || self.linkpath + } + er.fstream_class = er.fstream_class || self.constructor.name + er.fstream_stack = er.fstream_stack || + new Error().stack.split(/\n/).slice(3).map(function (s) { + return s.replace(/^ {4}at /, '') + }) + + return er +} diff --git a/node_modules/fstream/lib/collect.js b/node_modules/fstream/lib/collect.js new file mode 100644 index 0000000..e5d4f35 --- /dev/null +++ b/node_modules/fstream/lib/collect.js @@ -0,0 +1,70 @@ +module.exports = collect + +function collect (stream) { + if (stream._collected) return + + if (stream._paused) return stream.on('resume', collect.bind(null, stream)) + + stream._collected = true + stream.pause() + + stream.on('data', save) + stream.on('end', save) + var buf = [] + function save (b) { + if (typeof b === 'string') b = new Buffer(b) + if (Buffer.isBuffer(b) && !b.length) return + buf.push(b) + } + + stream.on('entry', saveEntry) + var entryBuffer = [] + function saveEntry (e) { + collect(e) + entryBuffer.push(e) + } + + stream.on('proxy', proxyPause) + function proxyPause (p) { + p.pause() + } + + // replace the pipe method with a new version that will + // unlock the buffered stuff. if you just call .pipe() + // without a destination, then it'll re-play the events. + stream.pipe = (function (orig) { + return function (dest) { + // console.error(' === open the pipes', dest && dest.path) + + // let the entries flow through one at a time. + // Once they're all done, then we can resume completely. + var e = 0 + ;(function unblockEntry () { + var entry = entryBuffer[e++] + // console.error(" ==== unblock entry", entry && entry.path) + if (!entry) return resume() + entry.on('end', unblockEntry) + if (dest) dest.add(entry) + else stream.emit('entry', entry) + })() + + function resume () { + stream.removeListener('entry', saveEntry) + stream.removeListener('data', save) + stream.removeListener('end', save) + + stream.pipe = orig + if (dest) stream.pipe(dest) + + buf.forEach(function (b) { + if (b) stream.emit('data', b) + else stream.emit('end') + }) + + stream.resume() + } + + return dest + } + })(stream.pipe) +} diff --git a/node_modules/fstream/lib/dir-reader.js b/node_modules/fstream/lib/dir-reader.js new file mode 100644 index 0000000..820cdc8 --- /dev/null +++ b/node_modules/fstream/lib/dir-reader.js @@ -0,0 +1,252 @@ +// A thing that emits "entry" events with Reader objects +// Pausing it causes it to stop emitting entry events, and also +// pauses the current entry if there is one. + +module.exports = DirReader + +var fs = require('graceful-fs') +var inherits = require('inherits') +var path = require('path') +var Reader = require('./reader.js') +var assert = require('assert').ok + +inherits(DirReader, Reader) + +function DirReader (props) { + var self = this + if (!(self instanceof DirReader)) { + throw new Error('DirReader must be called as constructor.') + } + + // should already be established as a Directory type + if (props.type !== 'Directory' || !props.Directory) { + throw new Error('Non-directory type ' + props.type) + } + + self.entries = null + self._index = -1 + self._paused = false + self._length = -1 + + if (props.sort) { + this.sort = props.sort + } + + Reader.call(this, props) +} + +DirReader.prototype._getEntries = function () { + var self = this + + // race condition. might pause() before calling _getEntries, + // and then resume, and try to get them a second time. + if (self._gotEntries) return + self._gotEntries = true + + fs.readdir(self._path, function (er, entries) { + if (er) return self.error(er) + + self.entries = entries + + self.emit('entries', entries) + if (self._paused) self.once('resume', processEntries) + else processEntries() + + function processEntries () { + self._length = self.entries.length + if (typeof self.sort === 'function') { + self.entries = self.entries.sort(self.sort.bind(self)) + } + self._read() + } + }) +} + +// start walking the dir, and emit an "entry" event for each one. +DirReader.prototype._read = function () { + var self = this + + if (!self.entries) return self._getEntries() + + if (self._paused || self._currentEntry || self._aborted) { + // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted) + return + } + + self._index++ + if (self._index >= self.entries.length) { + if (!self._ended) { + self._ended = true + self.emit('end') + self.emit('close') + } + return + } + + // ok, handle this one, then. + + // save creating a proxy, by stat'ing the thing now. + var p = path.resolve(self._path, self.entries[self._index]) + assert(p !== self._path) + assert(self.entries[self._index]) + + // set this to prevent trying to _read() again in the stat time. + self._currentEntry = p + fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) { + if (er) return self.error(er) + + var who = self._proxy || self + + stat.path = p + stat.basename = path.basename(p) + stat.dirname = path.dirname(p) + var childProps = self.getChildProps.call(who, stat) + childProps.path = p + childProps.basename = path.basename(p) + childProps.dirname = path.dirname(p) + + var entry = Reader(childProps, stat) + + // console.error("DR Entry", p, stat.size) + + self._currentEntry = entry + + // "entry" events are for direct entries in a specific dir. + // "child" events are for any and all children at all levels. + // This nomenclature is not completely final. + + entry.on('pause', function (who) { + if (!self._paused && !entry._disowned) { + self.pause(who) + } + }) + + entry.on('resume', function (who) { + if (self._paused && !entry._disowned) { + self.resume(who) + } + }) + + entry.on('stat', function (props) { + self.emit('_entryStat', entry, props) + if (entry._aborted) return + if (entry._paused) { + entry.once('resume', function () { + self.emit('entryStat', entry, props) + }) + } else self.emit('entryStat', entry, props) + }) + + entry.on('ready', function EMITCHILD () { + // console.error("DR emit child", entry._path) + if (self._paused) { + // console.error(" DR emit child - try again later") + // pause the child, and emit the "entry" event once we drain. + // console.error("DR pausing child entry") + entry.pause(self) + return self.once('resume', EMITCHILD) + } + + // skip over sockets. they can't be piped around properly, + // so there's really no sense even acknowledging them. + // if someone really wants to see them, they can listen to + // the "socket" events. + if (entry.type === 'Socket') { + self.emit('socket', entry) + } else { + self.emitEntry(entry) + } + }) + + var ended = false + entry.on('close', onend) + entry.on('disown', onend) + function onend () { + if (ended) return + ended = true + self.emit('childEnd', entry) + self.emit('entryEnd', entry) + self._currentEntry = null + if (!self._paused) { + self._read() + } + } + + // XXX Remove this. Works in node as of 0.6.2 or so. + // Long filenames should not break stuff. + entry.on('error', function (er) { + if (entry._swallowErrors) { + self.warn(er) + entry.emit('end') + entry.emit('close') + } else { + self.emit('error', er) + } + }) + + // proxy up some events. + ;[ + 'child', + 'childEnd', + 'warn' + ].forEach(function (ev) { + entry.on(ev, self.emit.bind(self, ev)) + }) + }) +} + +DirReader.prototype.disown = function (entry) { + entry.emit('beforeDisown') + entry._disowned = true + entry.parent = entry.root = null + if (entry === this._currentEntry) { + this._currentEntry = null + } + entry.emit('disown') +} + +DirReader.prototype.getChildProps = function () { + return { + depth: this.depth + 1, + root: this.root || this, + parent: this, + follow: this.follow, + filter: this.filter, + sort: this.props.sort, + hardlinks: this.props.hardlinks + } +} + +DirReader.prototype.pause = function (who) { + var self = this + if (self._paused) return + who = who || self + self._paused = true + if (self._currentEntry && self._currentEntry.pause) { + self._currentEntry.pause(who) + } + self.emit('pause', who) +} + +DirReader.prototype.resume = function (who) { + var self = this + if (!self._paused) return + who = who || self + + self._paused = false + // console.error('DR Emit Resume', self._path) + self.emit('resume', who) + if (self._paused) { + // console.error('DR Re-paused', self._path) + return + } + + if (self._currentEntry) { + if (self._currentEntry.resume) self._currentEntry.resume(who) + } else self._read() +} + +DirReader.prototype.emitEntry = function (entry) { + this.emit('entry', entry) + this.emit('child', entry) +} diff --git a/node_modules/fstream/lib/dir-writer.js b/node_modules/fstream/lib/dir-writer.js new file mode 100644 index 0000000..ec50dca --- /dev/null +++ b/node_modules/fstream/lib/dir-writer.js @@ -0,0 +1,174 @@ +// It is expected that, when .add() returns false, the consumer +// of the DirWriter will pause until a "drain" event occurs. Note +// that this is *almost always going to be the case*, unless the +// thing being written is some sort of unsupported type, and thus +// skipped over. + +module.exports = DirWriter + +var Writer = require('./writer.js') +var inherits = require('inherits') +var mkdir = require('mkdirp') +var path = require('path') +var collect = require('./collect.js') + +inherits(DirWriter, Writer) + +function DirWriter (props) { + var self = this + if (!(self instanceof DirWriter)) { + self.error('DirWriter must be called as constructor.', null, true) + } + + // should already be established as a Directory type + if (props.type !== 'Directory' || !props.Directory) { + self.error('Non-directory type ' + props.type + ' ' + + JSON.stringify(props), null, true) + } + + Writer.call(this, props) +} + +DirWriter.prototype._create = function () { + var self = this + mkdir(self._path, Writer.dirmode, function (er) { + if (er) return self.error(er) + // ready to start getting entries! + self.ready = true + self.emit('ready') + self._process() + }) +} + +// a DirWriter has an add(entry) method, but its .write() doesn't +// do anything. Why a no-op rather than a throw? Because this +// leaves open the door for writing directory metadata for +// gnu/solaris style dumpdirs. +DirWriter.prototype.write = function () { + return true +} + +DirWriter.prototype.end = function () { + this._ended = true + this._process() +} + +DirWriter.prototype.add = function (entry) { + var self = this + + // console.error('\tadd', entry._path, '->', self._path) + collect(entry) + if (!self.ready || self._currentEntry) { + self._buffer.push(entry) + return false + } + + // create a new writer, and pipe the incoming entry into it. + if (self._ended) { + return self.error('add after end') + } + + self._buffer.push(entry) + self._process() + + return this._buffer.length === 0 +} + +DirWriter.prototype._process = function () { + var self = this + + // console.error('DW Process p=%j', self._processing, self.basename) + + if (self._processing) return + + var entry = self._buffer.shift() + if (!entry) { + // console.error("DW Drain") + self.emit('drain') + if (self._ended) self._finish() + return + } + + self._processing = true + // console.error("DW Entry", entry._path) + + self.emit('entry', entry) + + // ok, add this entry + // + // don't allow recursive copying + var p = entry + var pp + do { + pp = p._path || p.path + if (pp === self.root._path || pp === self._path || + (pp && pp.indexOf(self._path) === 0)) { + // console.error('DW Exit (recursive)', entry.basename, self._path) + self._processing = false + if (entry._collected) entry.pipe() + return self._process() + } + p = p.parent + } while (p) + + // console.error("DW not recursive") + + // chop off the entry's root dir, replace with ours + var props = { + parent: self, + root: self.root || self, + type: entry.type, + depth: self.depth + 1 + } + + pp = entry._path || entry.path || entry.props.path + if (entry.parent) { + pp = pp.substr(entry.parent._path.length + 1) + } + // get rid of any ../../ shenanigans + props.path = path.join(self.path, path.join('/', pp)) + + // if i have a filter, the child should inherit it. + props.filter = self.filter + + // all the rest of the stuff, copy over from the source. + Object.keys(entry.props).forEach(function (k) { + if (!props.hasOwnProperty(k)) { + props[k] = entry.props[k] + } + }) + + // not sure at this point what kind of writer this is. + var child = self._currentChild = new Writer(props) + child.on('ready', function () { + // console.error("DW Child Ready", child.type, child._path) + // console.error(" resuming", entry._path) + entry.pipe(child) + entry.resume() + }) + + // XXX Make this work in node. + // Long filenames should not break stuff. + child.on('error', function (er) { + if (child._swallowErrors) { + self.warn(er) + child.emit('end') + child.emit('close') + } else { + self.emit('error', er) + } + }) + + // we fire _end internally *after* end, so that we don't move on + // until any "end" listeners have had their chance to do stuff. + child.on('close', onend) + var ended = false + function onend () { + if (ended) return + ended = true + // console.error("* DW Child end", child.basename) + self._currentChild = null + self._processing = false + self._process() + } +} diff --git a/node_modules/fstream/lib/file-reader.js b/node_modules/fstream/lib/file-reader.js new file mode 100644 index 0000000..baa01f4 --- /dev/null +++ b/node_modules/fstream/lib/file-reader.js @@ -0,0 +1,150 @@ +// Basically just a wrapper around an fs.ReadStream + +module.exports = FileReader + +var fs = require('graceful-fs') +var inherits = require('inherits') +var Reader = require('./reader.js') +var EOF = {EOF: true} +var CLOSE = {CLOSE: true} + +inherits(FileReader, Reader) + +function FileReader (props) { + // console.error(" FR create", props.path, props.size, new Error().stack) + var self = this + if (!(self instanceof FileReader)) { + throw new Error('FileReader must be called as constructor.') + } + + // should already be established as a File type + // XXX Todo: preserve hardlinks by tracking dev+inode+nlink, + // with a HardLinkReader class. + if (!((props.type === 'Link' && props.Link) || + (props.type === 'File' && props.File))) { + throw new Error('Non-file type ' + props.type) + } + + self._buffer = [] + self._bytesEmitted = 0 + Reader.call(self, props) +} + +FileReader.prototype._getStream = function () { + var self = this + var stream = self._stream = fs.createReadStream(self._path, self.props) + + if (self.props.blksize) { + stream.bufferSize = self.props.blksize + } + + stream.on('open', self.emit.bind(self, 'open')) + + stream.on('data', function (c) { + // console.error('\t\t%d %s', c.length, self.basename) + self._bytesEmitted += c.length + // no point saving empty chunks + if (!c.length) { + return + } else if (self._paused || self._buffer.length) { + self._buffer.push(c) + self._read() + } else self.emit('data', c) + }) + + stream.on('end', function () { + if (self._paused || self._buffer.length) { + // console.error('FR Buffering End', self._path) + self._buffer.push(EOF) + self._read() + } else { + self.emit('end') + } + + if (self._bytesEmitted !== self.props.size) { + self.error("Didn't get expected byte count\n" + + 'expect: ' + self.props.size + '\n' + + 'actual: ' + self._bytesEmitted) + } + }) + + stream.on('close', function () { + if (self._paused || self._buffer.length) { + // console.error('FR Buffering Close', self._path) + self._buffer.push(CLOSE) + self._read() + } else { + // console.error('FR close 1', self._path) + self.emit('close') + } + }) + + stream.on('error', function (e) { + self.emit('error', e) + }) + + self._read() +} + +FileReader.prototype._read = function () { + var self = this + // console.error('FR _read', self._path) + if (self._paused) { + // console.error('FR _read paused', self._path) + return + } + + if (!self._stream) { + // console.error('FR _getStream calling', self._path) + return self._getStream() + } + + // clear out the buffer, if there is one. + if (self._buffer.length) { + // console.error('FR _read has buffer', self._buffer.length, self._path) + var buf = self._buffer + for (var i = 0, l = buf.length; i < l; i++) { + var c = buf[i] + if (c === EOF) { + // console.error('FR Read emitting buffered end', self._path) + self.emit('end') + } else if (c === CLOSE) { + // console.error('FR Read emitting buffered close', self._path) + self.emit('close') + } else { + // console.error('FR Read emitting buffered data', self._path) + self.emit('data', c) + } + + if (self._paused) { + // console.error('FR Read Re-pausing at '+i, self._path) + self._buffer = buf.slice(i) + return + } + } + self._buffer.length = 0 + } +// console.error("FR _read done") +// that's about all there is to it. +} + +FileReader.prototype.pause = function (who) { + var self = this + // console.error('FR Pause', self._path) + if (self._paused) return + who = who || self + self._paused = true + if (self._stream) self._stream.pause() + self.emit('pause', who) +} + +FileReader.prototype.resume = function (who) { + var self = this + // console.error('FR Resume', self._path) + if (!self._paused) return + who = who || self + self.emit('resume', who) + self._paused = false + if (self._stream) self._stream.resume() + self._read() +} diff --git a/node_modules/fstream/lib/file-writer.js b/node_modules/fstream/lib/file-writer.js new file mode 100644 index 0000000..4c803d8 --- /dev/null +++ b/node_modules/fstream/lib/file-writer.js @@ -0,0 +1,107 @@ +module.exports = FileWriter + +var fs = require('graceful-fs') +var Writer = require('./writer.js') +var inherits = require('inherits') +var EOF = {} + +inherits(FileWriter, Writer) + +function FileWriter (props) { + var self = this + if (!(self instanceof FileWriter)) { + throw new Error('FileWriter must be called as constructor.') + } + + // should already be established as a File type + if (props.type !== 'File' || !props.File) { + throw new Error('Non-file type ' + props.type) + } + + self._buffer = [] + self._bytesWritten = 0 + + Writer.call(this, props) +} + +FileWriter.prototype._create = function () { + var self = this + if (self._stream) return + + var so = {} + if (self.props.flags) so.flags = self.props.flags + so.mode = Writer.filemode + if (self._old && self._old.blksize) so.bufferSize = self._old.blksize + + self._stream = fs.createWriteStream(self._path, so) + + self._stream.on('open', function () { + // console.error("FW open", self._buffer, self._path) + self.ready = true + self._buffer.forEach(function (c) { + if (c === EOF) self._stream.end() + else self._stream.write(c) + }) + self.emit('ready') + // give this a kick just in case it needs it. + self.emit('drain') + }) + + self._stream.on('error', function (er) { self.emit('error', er) }) + + self._stream.on('drain', function () { self.emit('drain') }) + + self._stream.on('close', function () { + // console.error('\n\nFW Stream Close', self._path, self.size) + self._finish() + }) +} + +FileWriter.prototype.write = function (c) { + var self = this + + self._bytesWritten += c.length + + if (!self.ready) { + if (!Buffer.isBuffer(c) && typeof c !== 'string') { + throw new Error('invalid write data') + } + self._buffer.push(c) + return false + } + + var ret = self._stream.write(c) + // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length) + + // allow 2 buffered writes, because otherwise there's just too + // much stop and go bs. + if (ret === false && self._stream._queue) { + return self._stream._queue.length <= 2 + } else { + return ret + } +} + +FileWriter.prototype.end = function (c) { + var self = this + + if (c) self.write(c) + + if (!self.ready) { + self._buffer.push(EOF) + return false + } + + return self._stream.end() +} + +FileWriter.prototype._finish = function () { + var self = this + if (typeof self.size === 'number' && self._bytesWritten !== self.size) { + self.error( + 'Did not get expected byte count.\n' + + 'expect: ' + self.size + '\n' + + 'actual: ' + self._bytesWritten) + } + Writer.prototype._finish.call(self) +} diff --git a/node_modules/fstream/lib/get-type.js b/node_modules/fstream/lib/get-type.js new file mode 100644 index 0000000..19f6a65 --- /dev/null +++ b/node_modules/fstream/lib/get-type.js @@ -0,0 +1,33 @@ +module.exports = getType + +function getType (st) { + var types = [ + 'Directory', + 'File', + 'SymbolicLink', + 'Link', // special for hardlinks from tarballs + 'BlockDevice', + 'CharacterDevice', + 'FIFO', + 'Socket' + ] + var type + + if (st.type && types.indexOf(st.type) !== -1) { + st[st.type] = true + return st.type + } + + for (var i = 0, l = types.length; i < l; i++) { + type = types[i] + var is = st[type] || st['is' + type] + if (typeof is === 'function') is = is.call(st) + if (is) { + st[type] = true + st.type = type + return type + } + } + + return null +} diff --git a/node_modules/fstream/lib/link-reader.js b/node_modules/fstream/lib/link-reader.js new file mode 100644 index 0000000..fb4cc67 --- /dev/null +++ b/node_modules/fstream/lib/link-reader.js @@ -0,0 +1,53 @@ +// Basically just a wrapper around an fs.readlink +// +// XXX: Enhance this to support the Link type, by keeping +// a lookup table of {:}, so that hardlinks +// can be preserved in tarballs. + +module.exports = LinkReader + +var fs = require('graceful-fs') +var inherits = require('inherits') +var Reader = require('./reader.js') + +inherits(LinkReader, Reader) + +function LinkReader (props) { + var self = this + if (!(self instanceof LinkReader)) { + throw new Error('LinkReader must be called as constructor.') + } + + if (!((props.type === 'Link' && props.Link) || + (props.type === 'SymbolicLink' && props.SymbolicLink))) { + throw new Error('Non-link type ' + props.type) + } + + Reader.call(self, props) +} + +// When piping a LinkReader into a LinkWriter, we have to +// already have the linkpath property set, so that has to +// happen *before* the "ready" event, which means we need to +// override the _stat method. +LinkReader.prototype._stat = function (currentStat) { + var self = this + fs.readlink(self._path, function (er, linkpath) { + if (er) return self.error(er) + self.linkpath = self.props.linkpath = linkpath + self.emit('linkpath', linkpath) + Reader.prototype._stat.call(self, currentStat) + }) +} + +LinkReader.prototype._read = function () { + var self = this + if (self._paused) return + // basically just a no-op, since we got all the info we need + // from the _stat method + if (!self._ended) { + self.emit('end') + self.emit('close') + self._ended = true + } +} diff --git a/node_modules/fstream/lib/link-writer.js b/node_modules/fstream/lib/link-writer.js new file mode 100644 index 0000000..af54284 --- /dev/null +++ b/node_modules/fstream/lib/link-writer.js @@ -0,0 +1,95 @@ +module.exports = LinkWriter + +var fs = require('graceful-fs') +var Writer = require('./writer.js') +var inherits = require('inherits') +var path = require('path') +var rimraf = require('rimraf') + +inherits(LinkWriter, Writer) + +function LinkWriter (props) { + var self = this + if (!(self instanceof LinkWriter)) { + throw new Error('LinkWriter must be called as constructor.') + } + + // should already be established as a Link type + if (!((props.type === 'Link' && props.Link) || + (props.type === 'SymbolicLink' && props.SymbolicLink))) { + throw new Error('Non-link type ' + props.type) + } + + if (props.linkpath === '') props.linkpath = '.' + if (!props.linkpath) { + self.error('Need linkpath property to create ' + props.type) + } + + Writer.call(this, props) +} + +LinkWriter.prototype._create = function () { + // console.error(" LW _create") + var self = this + var hard = self.type === 'Link' || process.platform === 'win32' + var link = hard ? 'link' : 'symlink' + var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath + + // can only change the link path by clobbering + // For hard links, let's just assume that's always the case, since + // there's no good way to read them if we don't already know. + if (hard) return clobber(self, lp, link) + + fs.readlink(self._path, function (er, p) { + // only skip creation if it's exactly the same link + if (p && p === lp) return finish(self) + clobber(self, lp, link) + }) +} + +function clobber (self, lp, link) { + rimraf(self._path, function (er) { + if (er) return self.error(er) + create(self, lp, link) + }) +} + +function create (self, lp, link) { + fs[link](lp, self._path, function (er) { + // if this is a hard link, and we're in the process of writing out a + // directory, it's very possible that the thing we're linking to + // doesn't exist yet (especially if it was intended as a symlink), + // so swallow ENOENT errors here and just soldier in. + // Additionally, an EPERM or EACCES can happen on win32 if it's trying + // to make a link to a directory. Again, just skip it. + // A better solution would be to have fs.symlink be supported on + // windows in some nice fashion. + if (er) { + if ((er.code === 'ENOENT' || + er.code === 'EACCES' || + er.code === 'EPERM') && process.platform === 'win32') { + self.ready = true + self.emit('ready') + self.emit('end') + self.emit('close') + self.end = self._finish = function () {} + } else return self.error(er) + } + finish(self) + }) +} + +function finish (self) { + self.ready = true + self.emit('ready') + if (self._ended && !self._finished) self._finish() +} + +LinkWriter.prototype.end = function () { + // console.error("LW finish in end") + this._ended = true + if (this.ready) { + this._finished = true + this._finish() + } +} diff --git a/node_modules/fstream/lib/proxy-reader.js b/node_modules/fstream/lib/proxy-reader.js new file mode 100644 index 0000000..4f431c9 --- /dev/null +++ b/node_modules/fstream/lib/proxy-reader.js @@ -0,0 +1,95 @@ +// A reader for when we don't yet know what kind of thing +// the thing is. + +module.exports = ProxyReader + +var Reader = require('./reader.js') +var getType = require('./get-type.js') +var inherits = require('inherits') +var fs = require('graceful-fs') + +inherits(ProxyReader, Reader) + +function ProxyReader (props) { + var self = this + if (!(self instanceof ProxyReader)) { + throw new Error('ProxyReader must be called as constructor.') + } + + self.props = props + self._buffer = [] + self.ready = false + + Reader.call(self, props) +} + +ProxyReader.prototype._stat = function () { + var self = this + var props = self.props + // stat the thing to see what the proxy should be. + var stat = props.follow ? 'stat' : 'lstat' + + fs[stat](props.path, function (er, current) { + var type + if (er || !current) { + type = 'File' + } else { + type = getType(current) + } + + props[type] = true + props.type = self.type = type + + self._old = current + self._addProxy(Reader(props, current)) + }) +} + +ProxyReader.prototype._addProxy = function (proxy) { + var self = this + if (self._proxyTarget) { + return self.error('proxy already set') + } + + self._proxyTarget = proxy + proxy._proxy = self + + ;[ + 'error', + 'data', + 'end', + 'close', + 'linkpath', + 'entry', + 'entryEnd', + 'child', + 'childEnd', + 'warn', + 'stat' + ].forEach(function (ev) { + // console.error('~~ proxy event', ev, self.path) + proxy.on(ev, self.emit.bind(self, ev)) + }) + + self.emit('proxy', proxy) + + proxy.on('ready', function () { + // console.error("~~ proxy is ready!", self.path) + self.ready = true + self.emit('ready') + }) + + var calls = self._buffer + self._buffer.length = 0 + calls.forEach(function (c) { + proxy[c[0]].apply(proxy, c[1]) + }) +} + +ProxyReader.prototype.pause = function () { + return this._proxyTarget ? this._proxyTarget.pause() : false +} + +ProxyReader.prototype.resume = function () { + return this._proxyTarget ? this._proxyTarget.resume() : false +} diff --git a/node_modules/fstream/lib/proxy-writer.js b/node_modules/fstream/lib/proxy-writer.js new file mode 100644 index 0000000..a654462 --- /dev/null +++ b/node_modules/fstream/lib/proxy-writer.js @@ -0,0 +1,111 @@ +// A writer for when we don't know what kind of thing +// the thing is. That is, it's not explicitly set, +// so we're going to make it whatever the thing already +// is, or "File" +// +// Until then, collect all events. + +module.exports = ProxyWriter + +var Writer = require('./writer.js') +var getType = require('./get-type.js') +var inherits = require('inherits') +var collect = require('./collect.js') +var fs = require('fs') + +inherits(ProxyWriter, Writer) + +function ProxyWriter (props) { + var self = this + if (!(self instanceof ProxyWriter)) { + throw new Error('ProxyWriter must be called as constructor.') + } + + self.props = props + self._needDrain = false + + Writer.call(self, props) +} + +ProxyWriter.prototype._stat = function () { + var self = this + var props = self.props + // stat the thing to see what the proxy should be. + var stat = props.follow ? 'stat' : 'lstat' + + fs[stat](props.path, function (er, current) { + var type + if (er || !current) { + type = 'File' + } else { + type = getType(current) + } + + props[type] = true + props.type = self.type = type + + self._old = current + self._addProxy(Writer(props, current)) + }) +} + +ProxyWriter.prototype._addProxy = function (proxy) { + // console.error("~~ set proxy", this.path) + var self = this + if (self._proxy) { + return self.error('proxy already set') + } + + self._proxy = proxy + ;[ + 'ready', + 'error', + 'close', + 'pipe', + 'drain', + 'warn' + ].forEach(function (ev) { + proxy.on(ev, self.emit.bind(self, ev)) + }) + + self.emit('proxy', proxy) + + var calls = self._buffer + calls.forEach(function (c) { + // console.error("~~ ~~ proxy buffered call", c[0], c[1]) + proxy[c[0]].apply(proxy, c[1]) + }) + self._buffer.length = 0 + if (self._needsDrain) self.emit('drain') +} + +ProxyWriter.prototype.add = function (entry) { + // console.error("~~ proxy add") + collect(entry) + + if (!this._proxy) { + this._buffer.push(['add', [entry]]) + this._needDrain = true + return false + } + return this._proxy.add(entry) +} + +ProxyWriter.prototype.write = function (c) { + // console.error('~~ proxy write') + if (!this._proxy) { + this._buffer.push(['write', [c]]) + this._needDrain = true + return false + } + return this._proxy.write(c) +} + +ProxyWriter.prototype.end = function (c) { + // console.error('~~ proxy end') + if (!this._proxy) { + this._buffer.push(['end', [c]]) + return false + } + return this._proxy.end(c) +} diff --git a/node_modules/fstream/lib/reader.js b/node_modules/fstream/lib/reader.js new file mode 100644 index 0000000..876021f --- /dev/null +++ b/node_modules/fstream/lib/reader.js @@ -0,0 +1,255 @@ +module.exports = Reader + +var fs = require('graceful-fs') +var Stream = require('stream').Stream +var inherits = require('inherits') +var path = require('path') +var getType = require('./get-type.js') +var hardLinks = Reader.hardLinks = {} +var Abstract = require('./abstract.js') + +// Must do this *before* loading the child classes +inherits(Reader, Abstract) + +var LinkReader = require('./link-reader.js') + +function Reader (props, currentStat) { + var self = this + if (!(self instanceof Reader)) return new Reader(props, currentStat) + + if (typeof props === 'string') { + props = { path: props } + } + + if (!props.path) { + self.error('Must provide a path', null, true) + } + + // polymorphism. + // call fstream.Reader(dir) to get a DirReader object, etc. + // Note that, unlike in the Writer case, ProxyReader is going + // to be the *normal* state of affairs, since we rarely know + // the type of a file prior to reading it. + + var type + var ClassType + + if (props.type && typeof props.type === 'function') { + type = props.type + ClassType = type + } else { + type = getType(props) + ClassType = Reader + } + + if (currentStat && !type) { + type = getType(currentStat) + props[type] = true + props.type = type + } + + switch (type) { + case 'Directory': + ClassType = require('./dir-reader.js') + break + + case 'Link': + // XXX hard links are just files. + // However, it would be good to keep track of files' dev+inode + // and nlink values, and create a HardLinkReader that emits + // a linkpath value of the original copy, so that the tar + // writer can preserve them. + // ClassType = HardLinkReader + // break + + case 'File': + ClassType = require('./file-reader.js') + break + + case 'SymbolicLink': + ClassType = LinkReader + break + + case 'Socket': + ClassType = require('./socket-reader.js') + break + + case null: + ClassType = require('./proxy-reader.js') + break + } + + if (!(self instanceof ClassType)) { + return new ClassType(props) + } + + Abstract.call(self) + + self.readable = true + self.writable = false + + self.type = type + self.props = props + self.depth = props.depth = props.depth || 0 + self.parent = props.parent || null + self.root = props.root || (props.parent && props.parent.root) || self + + self._path = self.path = path.resolve(props.path) + if (process.platform === 'win32') { + self.path = self._path = self.path.replace(/\?/g, '_') + if (self._path.length >= 260) { + // how DOES one create files on the moon? + // if the path has spaces in it, then UNC will fail. + self._swallowErrors = true + // if (self._path.indexOf(" ") === -1) { + self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') + // } + } + } + self.basename = props.basename = path.basename(self.path) + self.dirname = props.dirname = path.dirname(self.path) + + // these have served their purpose, and are now just noisy clutter + props.parent = props.root = null + + // console.error("\n\n\n%s setting size to", props.path, props.size) + self.size = props.size + self.filter = typeof props.filter === 'function' ? props.filter : null + if (props.sort === 'alpha') props.sort = alphasort + + // start the ball rolling. + // this will stat the thing, and then call self._read() + // to start reading whatever it is. + // console.error("calling stat", props.path, currentStat) + self._stat(currentStat) +} + +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} + +Reader.prototype._stat = function (currentStat) { + var self = this + var props = self.props + var stat = props.follow ? 'stat' : 'lstat' + // console.error("Reader._stat", self._path, currentStat) + if (currentStat) process.nextTick(statCb.bind(null, null, currentStat)) + else fs[stat](self._path, statCb) + + function statCb (er, props_) { + // console.error("Reader._stat, statCb", self._path, props_, props_.nlink) + if (er) return self.error(er) + + Object.keys(props_).forEach(function (k) { + props[k] = props_[k] + }) + + // if it's not the expected size, then abort here. + if (undefined !== self.size && props.size !== self.size) { + return self.error('incorrect size') + } + self.size = props.size + + var type = getType(props) + var handleHardlinks = props.hardlinks !== false + + // special little thing for handling hardlinks. + if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) { + var k = props.dev + ':' + props.ino + // console.error("Reader has nlink", self._path, k) + if (hardLinks[k] === self._path || !hardLinks[k]) { + hardLinks[k] = self._path + } else { + // switch into hardlink mode. + type = self.type = self.props.type = 'Link' + self.Link = self.props.Link = true + self.linkpath = self.props.linkpath = hardLinks[k] + // console.error("Hardlink detected, switching mode", self._path, self.linkpath) + // Setting __proto__ would arguably be the "correct" + // approach here, but that just seems too wrong. + self._stat = self._read = LinkReader.prototype._read + } + } + + if (self.type && self.type !== type) { + self.error('Unexpected type: ' + type) + } + + // if the filter doesn't pass, then just skip over this one. + // still have to emit end so that dir-walking can move on. + if (self.filter) { + var who = self._proxy || self + // special handling for ProxyReaders + if (!self.filter.call(who, who, props)) { + if (!self._disowned) { + self.abort() + self.emit('end') + self.emit('close') + } + return + } + } + + // last chance to abort or disown before the flow starts! + var events = ['_stat', 'stat', 'ready'] + var e = 0 + ;(function go () { + if (self._aborted) { + self.emit('end') + self.emit('close') + return + } + + if (self._paused && self.type !== 'Directory') { + self.once('resume', go) + return + } + + var ev = events[e++] + if (!ev) { + return self._read() + } + self.emit(ev, props) + go() + })() + } +} + +Reader.prototype.pipe = function (dest) { + var self = this + if (typeof dest.add === 'function') { + // piping to a multi-compatible, and we've got directory entries. + self.on('entry', function (entry) { + var ret = dest.add(entry) + if (ret === false) { + self.pause() + } + }) + } + + // console.error("R Pipe apply Stream Pipe") + return Stream.prototype.pipe.apply(this, arguments) +} + +Reader.prototype.pause = function (who) { + this._paused = true + who = who || this + this.emit('pause', who) + if (this._stream) this._stream.pause(who) +} + +Reader.prototype.resume = function (who) { + this._paused = false + who = who || this + this.emit('resume', who) + if (this._stream) this._stream.resume(who) + this._read() +} + +Reader.prototype._read = function () { + this.error('Cannot read unknown type: ' + this.type) +} diff --git a/node_modules/fstream/lib/socket-reader.js b/node_modules/fstream/lib/socket-reader.js new file mode 100644 index 0000000..e0456ba --- /dev/null +++ b/node_modules/fstream/lib/socket-reader.js @@ -0,0 +1,36 @@ +// Just get the stats, and then don't do anything. +// You can't really "read" from a socket. You "connect" to it. +// Mostly, this is here so that reading a dir with a socket in it +// doesn't blow up. + +module.exports = SocketReader + +var inherits = require('inherits') +var Reader = require('./reader.js') + +inherits(SocketReader, Reader) + +function SocketReader (props) { + var self = this + if (!(self instanceof SocketReader)) { + throw new Error('SocketReader must be called as constructor.') + } + + if (!(props.type === 'Socket' && props.Socket)) { + throw new Error('Non-socket type ' + props.type) + } + + Reader.call(self, props) +} + +SocketReader.prototype._read = function () { + var self = this + if (self._paused) return + // basically just a no-op, since we got all the info we have + // from the _stat method + if (!self._ended) { + self.emit('end') + self.emit('close') + self._ended = true + } +} diff --git a/node_modules/fstream/lib/writer.js b/node_modules/fstream/lib/writer.js new file mode 100644 index 0000000..ca3396b --- /dev/null +++ b/node_modules/fstream/lib/writer.js @@ -0,0 +1,390 @@ +module.exports = Writer + +var fs = require('graceful-fs') +var inherits = require('inherits') +var rimraf = require('rimraf') +var mkdir = require('mkdirp') +var path = require('path') +var umask = process.platform === 'win32' ? 0 : process.umask() +var getType = require('./get-type.js') +var Abstract = require('./abstract.js') + +// Must do this *before* loading the child classes +inherits(Writer, Abstract) + +Writer.dirmode = parseInt('0777', 8) & (~umask) +Writer.filemode = parseInt('0666', 8) & (~umask) + +var DirWriter = require('./dir-writer.js') +var LinkWriter = require('./link-writer.js') +var FileWriter = require('./file-writer.js') +var ProxyWriter = require('./proxy-writer.js') + +// props is the desired state. current is optionally the current stat, +// provided here so that subclasses can avoid statting the target +// more than necessary. +function Writer (props, current) { + var self = this + + if (typeof props === 'string') { + props = { path: props } + } + + if (!props.path) self.error('Must provide a path', null, true) + + // polymorphism. + // call fstream.Writer(dir) to get a DirWriter object, etc. + var type = getType(props) + var ClassType = Writer + + switch (type) { + case 'Directory': + ClassType = DirWriter + break + case 'File': + ClassType = FileWriter + break + case 'Link': + case 'SymbolicLink': + ClassType = LinkWriter + break + case null: + default: + // Don't know yet what type to create, so we wrap in a proxy. + ClassType = ProxyWriter + break + } + + if (!(self instanceof ClassType)) return new ClassType(props) + + // now get down to business. + + Abstract.call(self) + + // props is what we want to set. + // set some convenience properties as well. + self.type = props.type + self.props = props + self.depth = props.depth || 0 + self.clobber = props.clobber === false ? props.clobber : true + self.parent = props.parent || null + self.root = props.root || (props.parent && props.parent.root) || self + + self._path = self.path = path.resolve(props.path) + if (process.platform === 'win32') { + self.path = self._path = self.path.replace(/\?/g, '_') + if (self._path.length >= 260) { + self._swallowErrors = true + self._path = '\\\\?\\' + self.path.replace(/\//g, '\\') + } + } + self.basename = path.basename(props.path) + self.dirname = path.dirname(props.path) + self.linkpath = props.linkpath || null + + props.parent = props.root = null + + // console.error("\n\n\n%s setting size to", props.path, props.size) + self.size = props.size + + if (typeof props.mode === 'string') { + props.mode = parseInt(props.mode, 8) + } + + self.readable = false + self.writable = true + + // buffer until ready, or while handling another entry + self._buffer = [] + self.ready = false + + self.filter = typeof props.filter === 'function' ? props.filter : null + + // start the ball rolling. + // this checks what's there already, and then calls + // self._create() to call the impl-specific creation stuff. + self._stat(current) +} + +// Calling this means that it's something we can't create. +// Just assert that it's already there, otherwise raise a warning. +Writer.prototype._create = function () { + var self = this + fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) { + if (er) { + return self.warn('Cannot create ' + self._path + '\n' + + 'Unsupported type: ' + self.type, 'ENOTSUP') + } + self._finish() + }) +} + +Writer.prototype._stat = function (current) { + var self = this + var props = self.props + var stat = props.follow ? 'stat' : 'lstat' + var who = self._proxy || self + + if (current) statCb(null, current) + else fs[stat](self._path, statCb) + + function statCb (er, current) { + if (self.filter && !self.filter.call(who, who, current)) { + self._aborted = true + self.emit('end') + self.emit('close') + return + } + + // if it's not there, great. We'll just create it. + // if it is there, then we'll need to change whatever differs + if (er || !current) { + return create(self) + } + + self._old = current + var currentType = getType(current) + + // if it's a type change, then we need to clobber or error. + // if it's not a type change, then let the impl take care of it. + if (currentType !== self.type) { + return rimraf(self._path, function (er) { + if (er) return self.error(er) + self._old = null + create(self) + }) + } + + // otherwise, just handle in the app-specific way + // this creates a fs.WriteStream, or mkdir's, or whatever + create(self) + } +} + +function create (self) { + // console.error("W create", self._path, Writer.dirmode) + + // XXX Need to clobber non-dirs that are in the way, + // unless { clobber: false } in the props. + mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) { + // console.error("W created", path.dirname(self._path), er) + if (er) return self.error(er) + + // later on, we have to set the mode and owner for these + self._madeDir = made + return self._create() + }) +} + +function endChmod (self, want, current, path, cb) { + var wantMode = want.mode + var chmod = want.follow || self.type !== 'SymbolicLink' + ? 'chmod' : 'lchmod' + + if (!fs[chmod]) return cb() + if (typeof wantMode !== 'number') return cb() + + var curMode = current.mode & parseInt('0777', 8) + wantMode = wantMode & parseInt('0777', 8) + if (wantMode === curMode) return cb() + + fs[chmod](path, wantMode, cb) +} + +function endChown (self, want, current, path, cb) { + // Don't even try it unless root. Too easy to EPERM. + if (process.platform === 'win32') return cb() + if (!process.getuid || process.getuid() !== 0) return cb() + if (typeof want.uid !== 'number' && + typeof want.gid !== 'number') return cb() + + if (current.uid === want.uid && + current.gid === want.gid) return cb() + + var chown = (self.props.follow || self.type !== 'SymbolicLink') + ? 'chown' : 'lchown' + if (!fs[chown]) return cb() + + if (typeof want.uid !== 'number') want.uid = current.uid + if (typeof want.gid !== 'number') want.gid = current.gid + + fs[chown](path, want.uid, want.gid, cb) +} + +function endUtimes (self, want, current, path, cb) { + if (!fs.utimes || process.platform === 'win32') return cb() + + var utimes = (want.follow || self.type !== 'SymbolicLink') + ? 'utimes' : 'lutimes' + + if (utimes === 'lutimes' && !fs[utimes]) { + utimes = 'utimes' + } + + if (!fs[utimes]) return cb() + + var curA = current.atime + var curM = current.mtime + var meA = want.atime + var meM = want.mtime + + if (meA === undefined) meA = curA + if (meM === undefined) meM = curM + + if (!isDate(meA)) meA = new Date(meA) + if (!isDate(meM)) meA = new Date(meM) + + if (meA.getTime() === curA.getTime() && + meM.getTime() === curM.getTime()) return cb() + + fs[utimes](path, meA, meM, cb) +} + +// XXX This function is beastly. Break it up! +Writer.prototype._finish = function () { + var self = this + + if (self._finishing) return + self._finishing = true + + // console.error(" W Finish", self._path, self.size) + + // set up all the things. + // At this point, we're already done writing whatever we've gotta write, + // adding files to the dir, etc. + var todo = 0 + var errState = null + var done = false + + if (self._old) { + // the times will almost *certainly* have changed. + // adds the utimes syscall, but remove another stat. + self._old.atime = new Date(0) + self._old.mtime = new Date(0) + // console.error(" W Finish Stale Stat", self._path, self.size) + setProps(self._old) + } else { + var stat = self.props.follow ? 'stat' : 'lstat' + // console.error(" W Finish Stating", self._path, self.size) + fs[stat](self._path, function (er, current) { + // console.error(" W Finish Stated", self._path, self.size, current) + if (er) { + // if we're in the process of writing out a + // directory, it's very possible that the thing we're linking to + // doesn't exist yet (especially if it was intended as a symlink), + // so swallow ENOENT errors here and just soldier on. + if (er.code === 'ENOENT' && + (self.type === 'Link' || self.type === 'SymbolicLink') && + process.platform === 'win32') { + self.ready = true + self.emit('ready') + self.emit('end') + self.emit('close') + self.end = self._finish = function () {} + return + } else return self.error(er) + } + setProps(self._old = current) + }) + } + + return + + function setProps (current) { + todo += 3 + endChmod(self, self.props, current, self._path, next('chmod')) + endChown(self, self.props, current, self._path, next('chown')) + endUtimes(self, self.props, current, self._path, next('utimes')) + } + + function next (what) { + return function (er) { + // console.error(" W Finish", what, todo) + if (errState) return + if (er) { + er.fstream_finish_call = what + return self.error(errState = er) + } + if (--todo > 0) return + if (done) return + done = true + + // we may still need to set the mode/etc. on some parent dirs + // that were created previously. delay end/close until then. + if (!self._madeDir) return end() + else endMadeDir(self, self._path, end) + + function end (er) { + if (er) { + er.fstream_finish_call = 'setupMadeDir' + return self.error(er) + } + // all the props have been set, so we're completely done. + self.emit('end') + self.emit('close') + } + } + } +} + +function endMadeDir (self, p, cb) { + var made = self._madeDir + // everything *between* made and path.dirname(self._path) + // needs to be set up. Note that this may just be one dir. + var d = path.dirname(p) + + endMadeDir_(self, d, function (er) { + if (er) return cb(er) + if (d === made) { + return cb() + } + endMadeDir(self, d, cb) + }) +} + +function endMadeDir_ (self, p, cb) { + var dirProps = {} + Object.keys(self.props).forEach(function (k) { + dirProps[k] = self.props[k] + + // only make non-readable dirs if explicitly requested. + if (k === 'mode' && self.type !== 'Directory') { + dirProps[k] = dirProps[k] | parseInt('0111', 8) + } + }) + + var todo = 3 + var errState = null + fs.stat(p, function (er, current) { + if (er) return cb(errState = er) + endChmod(self, dirProps, current, p, next) + endChown(self, dirProps, current, p, next) + endUtimes(self, dirProps, current, p, next) + }) + + function next (er) { + if (errState) return + if (er) return cb(errState = er) + if (--todo === 0) return cb() + } +} + +Writer.prototype.pipe = function () { + this.error("Can't pipe from writable stream") +} + +Writer.prototype.add = function () { + this.error("Can't add to non-Directory type") +} + +Writer.prototype.write = function () { + return true +} + +function objectToString (d) { + return Object.prototype.toString.call(d) +} + +function isDate (d) { + return typeof d === 'object' && objectToString(d) === '[object Date]' +} diff --git a/node_modules/fstream/node_modules/.bin/mkdirp b/node_modules/fstream/node_modules/.bin/mkdirp new file mode 120000 index 0000000..91a5f62 --- /dev/null +++ b/node_modules/fstream/node_modules/.bin/mkdirp @@ -0,0 +1 @@ +../../../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/fstream/node_modules/.bin/rimraf b/node_modules/fstream/node_modules/.bin/rimraf new file mode 120000 index 0000000..632d6da --- /dev/null +++ b/node_modules/fstream/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../../../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/fstream/package.json b/node_modules/fstream/package.json new file mode 100644 index 0000000..8d0376e --- /dev/null +++ b/node_modules/fstream/package.json @@ -0,0 +1,28 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "fstream", + "description": "Advanced file system stream things", + "version": "1.0.10", + "repository": { + "type": "git", + "url": "https://github.com/npm/fstream.git" + }, + "main": "fstream.js", + "engines": { + "node": ">=0.6" + }, + "dependencies": { + "graceful-fs": "^4.1.2", + "inherits": "~2.0.0", + "mkdirp": ">=0.5 0", + "rimraf": "2" + }, + "devDependencies": { + "standard": "^4.0.0", + "tap": "^1.2.0" + }, + "scripts": { + "test": "standard && tap examples/*.js" + }, + "license": "ISC" +} diff --git a/node_modules/gauge/CHANGELOG.md b/node_modules/gauge/CHANGELOG.md new file mode 100644 index 0000000..efd08fc --- /dev/null +++ b/node_modules/gauge/CHANGELOG.md @@ -0,0 +1,131 @@ +### v2.6.0 + +* Bug fix: Don't run the code associated with `enable`/`disable` if the gauge + is already enabled or disabled respectively. This prevents leaking event + listeners, amongst other weirdness. +* New feature: Template items can have default values that will be used if no + value was otherwise passed in. + +### v2.5.3 + +* Default to `enabled` only if we have a tty. Users can always override + this by passing in the `enabled` option explicitly or by calling calling + `gauge.enable()`. + +### v2.5.2 + +* Externalized `./console-strings.js` into `console-control-strings`. + +### v2.5.1 + +* Update to `signal-exit@3.0.0`, which fixes a compatibility bug with the + node profiler. +* [#39](https://github.com/iarna/gauge/pull/39) Fix tests on 0.10 and add + a missing devDependency. ([@helloyou2012](https://github.com/helloyou2012)) + +### v2.5.0 + +* Add way to programmatically fetch a list of theme names in a themeset + (`Themeset.getThemeNames`). + +### v2.4.0 + +* Add support for setting themesets on existing gauge objects. +* Add post-IO callback to `gauge.hide()` as it is somtetimes necessary when + your terminal is interleaving output from multiple filehandles (ie, stdout + & stderr). + +### v2.3.1 + +* Fix a refactor bug in setTheme where it wasn't accepting the various types + of args it should. + +### v2.3.0 + +#### FEATURES + +* Add setTemplate & setTheme back in. +* Add support for named themes, you can now ask for things like 'colorASCII' + and 'brailleSpinner'. Of course, you can still pass in theme objects. + Additionally you can now pass in an object with `hasUnicode`, `hasColor` and + `platform` keys in order to override our guesses as to those values when + selecting a default theme from the themeset. +* Make the output stream optional (it defaults to `process.stderr` now). +* Add `setWriteTo(stream[, tty])` to change the output stream and, + optionally, tty. + +#### BUG FIXES & REFACTORING + +* Abort the display phase early if we're supposed to be hidden and we are. +* Stop printing a bunch of spaces at the end of lines, since we're already + using an erase-to-end-of-line code anyway. +* The unicode themes were missing the subsection separator. + +### v2.2.1 + +* Fix image in readme + +### v2.2.0 + +* All new themes API– reference themes by name and pass in custom themes and + themesets (themesets get platform support autodetection done on them to + select the best theme). Theme mixins let you add features to all existing + themes. +* Much, much improved test coverage. + +### v2.1.0 + +* Got rid of ░ in the default platform, noUnicode, hasColor theme. Thanks + to @yongtw123 for pointing out this had snuck in. +* Fiddled with the demo output to make it easier to see the spinner spin. Also + added prints before each platforms test output. +* I forgot to include `signal-exit` in our deps. <.< Thank you @KenanY for + finding this. Then I was lazy and made a new commit instead of using his + PR. Again, thank you for your patience @KenenY. +* Drastically speed up travis testing. +* Add a small javascript demo (demo.js) for showing off the various themes + (and testing them on diff platforms). +* Change: The subsection separator from ⁄ and / (different chars) to >. +* Fix crasher: A show or pulse without a label would cause the template renderer + to complain about a missing value. +* New feature: Add the ability to disable the clean-up-on-exit behavior. + Not something I expect to be widely desirable, but important if you have + multiple distinct gauge instances in your app. +* Use our own color support detection. + The `has-color` module proved too magic for my needs, making assumptions + as to which stream we write to and reading command line arguments. + +### v2.0.0 + +This is a major rewrite of the internals. Externally there are fewer +changes: + +* On node>0.8 gauge object now prints updates at a fixed rate. This means + that when you call `show` it may wate up to `updateInterval` ms before it + actually prints an update. You override this behavior with the + `fixedFramerate` option. +* The gauge object now keeps the cursor hidden as long as it's enabled and + shown. +* The constructor's arguments have changed, now it takes a mandatory output + stream and an optional options object. The stream no longer needs to be + an `ansi`ified stream, although it can be if you want (but we won't make + use of its special features). +* Previously the gauge was disabled by default if `process.stdout` wasn't a + tty. Now it always defaults to enabled. If you want the previous + behavior set the `enabled` option to `process.stdout.isTTY`. +* The constructor's options have changed– see the docs for details. +* Themes are entirely different. If you were using a custom theme, or + referring to one directly (eg via `Gauge.unicode` or `Gauge.ascii`) then + you'll need to change your code. You can get the equivalent of the latter + with: + ``` + var themes = require('gauge/themes') + var unicodeTheme = themes(true, true) // returns the color unicode theme for your platform + ``` + The default themes no longer use any ambiguous width characters, so even + if you choose to display those as wide your progress bar should still + display correctly. +* Templates are entirely different and if you were using a custom one, you + should consult the documentation to learn how to recreate it. If you were + using the default, be aware that it has changed and the result looks quite + a bit different. diff --git a/node_modules/gauge/CHANGELOG.md~ b/node_modules/gauge/CHANGELOG.md~ new file mode 100644 index 0000000..a4c8756 --- /dev/null +++ b/node_modules/gauge/CHANGELOG.md~ @@ -0,0 +1,123 @@ +### v2.5.3 + +* Default to `enabled` only if we have a tty. Users can always override + this by passing in the `enabled` option explicitly or by calling calling + `gauge.enable()`. + +### v2.5.2 + +* Externalized `./console-strings.js` into `console-control-strings`. + +### v2.5.1 + +* Update to `signal-exit@3.0.0`, which fixes a compatibility bug with the + node profiler. +* [#39](https://github.com/iarna/gauge/pull/39) Fix tests on 0.10 and add + a missing devDependency. ([@helloyou2012](https://github.com/helloyou2012)) + +### v2.5.0 + +* Add way to programmatically fetch a list of theme names in a themeset + (`Themeset.getThemeNames`). + +### v2.4.0 + +* Add support for setting themesets on existing gauge objects. +* Add post-IO callback to `gauge.hide()` as it is somtetimes necessary when + your terminal is interleaving output from multiple filehandles (ie, stdout + & stderr). + +### v2.3.1 + +* Fix a refactor bug in setTheme where it wasn't accepting the various types + of args it should. + +### v2.3.0 + +#### FEATURES + +* Add setTemplate & setTheme back in. +* Add support for named themes, you can now ask for things like 'colorASCII' + and 'brailleSpinner'. Of course, you can still pass in theme objects. + Additionally you can now pass in an object with `hasUnicode`, `hasColor` and + `platform` keys in order to override our guesses as to those values when + selecting a default theme from the themeset. +* Make the output stream optional (it defaults to `process.stderr` now). +* Add `setWriteTo(stream[, tty])` to change the output stream and, + optionally, tty. + +#### BUG FIXES & REFACTORING + +* Abort the display phase early if we're supposed to be hidden and we are. +* Stop printing a bunch of spaces at the end of lines, since we're already + using an erase-to-end-of-line code anyway. +* The unicode themes were missing the subsection separator. + +### v2.2.1 + +* Fix image in readme + +### v2.2.0 + +* All new themes API– reference themes by name and pass in custom themes and + themesets (themesets get platform support autodetection done on them to + select the best theme). Theme mixins let you add features to all existing + themes. +* Much, much improved test coverage. + +### v2.1.0 + +* Got rid of ░ in the default platform, noUnicode, hasColor theme. Thanks + to @yongtw123 for pointing out this had snuck in. +* Fiddled with the demo output to make it easier to see the spinner spin. Also + added prints before each platforms test output. +* I forgot to include `signal-exit` in our deps. <.< Thank you @KenanY for + finding this. Then I was lazy and made a new commit instead of using his + PR. Again, thank you for your patience @KenenY. +* Drastically speed up travis testing. +* Add a small javascript demo (demo.js) for showing off the various themes + (and testing them on diff platforms). +* Change: The subsection separator from ⁄ and / (different chars) to >. +* Fix crasher: A show or pulse without a label would cause the template renderer + to complain about a missing value. +* New feature: Add the ability to disable the clean-up-on-exit behavior. + Not something I expect to be widely desirable, but important if you have + multiple distinct gauge instances in your app. +* Use our own color support detection. + The `has-color` module proved too magic for my needs, making assumptions + as to which stream we write to and reading command line arguments. + +### v2.0.0 + +This is a major rewrite of the internals. Externally there are fewer +changes: + +* On node>0.8 gauge object now prints updates at a fixed rate. This means + that when you call `show` it may wate up to `updateInterval` ms before it + actually prints an update. You override this behavior with the + `fixedFramerate` option. +* The gauge object now keeps the cursor hidden as long as it's enabled and + shown. +* The constructor's arguments have changed, now it takes a mandatory output + stream and an optional options object. The stream no longer needs to be + an `ansi`ified stream, although it can be if you want (but we won't make + use of its special features). +* Previously the gauge was disabled by default if `process.stdout` wasn't a + tty. Now it always defaults to enabled. If you want the previous + behavior set the `enabled` option to `process.stdout.isTTY`. +* The constructor's options have changed– see the docs for details. +* Themes are entirely different. If you were using a custom theme, or + referring to one directly (eg via `Gauge.unicode` or `Gauge.ascii`) then + you'll need to change your code. You can get the equivalent of the latter + with: + ``` + var themes = require('gauge/themes') + var unicodeTheme = themes(true, true) // returns the color unicode theme for your platform + ``` + The default themes no longer use any ambiguous width characters, so even + if you choose to display those as wide your progress bar should still + display correctly. +* Templates are entirely different and if you were using a custom one, you + should consult the documentation to learn how to recreate it. If you were + using the default, be aware that it has changed and the result looks quite + a bit different. diff --git a/node_modules/gauge/LICENSE b/node_modules/gauge/LICENSE new file mode 100644 index 0000000..e756052 --- /dev/null +++ b/node_modules/gauge/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/gauge/README.md b/node_modules/gauge/README.md new file mode 100644 index 0000000..bf87d18 --- /dev/null +++ b/node_modules/gauge/README.md @@ -0,0 +1,395 @@ +gauge +===== + +A nearly stateless terminal based horizontal gauge / progress bar. + +```javascript +var Gauge = require("gauge") + +var gauge = new Gauge() + +gauge.show("test", 0.20) + +gauge.pulse("this") + +gauge.hide() +``` + +![](gauge-demo.gif) + + +### CHANGES FROM 1.x + +Gauge 2.x is breaking release, please see the [changelog] for details on +what's changed if you were previously a user of this module. + +[changelog]: CHANGELOG.md + +### THE GAUGE CLASS + +This is the typical interface to the module– it provides a pretty +fire-and-forget interface to displaying your status information. + +``` +var Gauge = require("gauge") + +var gauge = new Gauge([stream], [options]) +``` + +* **stream** – *(optional, default STDERR)* A stream that progress bar + updates are to be written to. Gauge honors backpressure and will pause + most writing if it is indicated. +* **options** – *(optional)* An option object. + +Constructs a new gauge. Gauges are drawn on a single line, and are not drawn +if **stream** isn't a tty and a tty isn't explicitly provided. + +If **stream** is a terminal or if you pass in **tty** to **options** then we +will detect terminal resizes and redraw to fit. We do this by watching for +`resize` events on the tty. (To work around a bug in verisons of Node prior +to 2.5.0, we watch for them on stdout if the tty is stderr.) Resizes to +larger window sizes will be clean, but shrinking the window will always +result in some cruft. + +**IMPORTANT:** If you prevously were passing in a non-tty stream but you still +want output (for example, a stream wrapped by the `ansi` module) then you +need to pass in the **tty** option below, as `gauge` needs access to +the underlying tty in order to do things like terminal resizes and terminal +width detection. + +The **options** object can have the following properties, all of which are +optional: + +* **updateInterval**: How often gauge updates should be drawn, in miliseconds. +* **fixedFramerate**: Defaults to false on node 0.8, true on everything + else. When this is true a timer is created to trigger once every + `updateInterval` ms, when false, updates are printed as soon as they come + in but updates more often than `updateInterval` are ignored. The reason + 0.8 doesn't have this set to true is that it can't `unref` its timer and + so it would stop your program from exiting– if you want to use this + feature with 0.8 just make sure you call `gauge.disable()` before you + expect your program to exit. +* **themes**: A themeset to use when selecting the theme to use. Defaults + to `gauge/themes`, see the [themes] documentation for details. +* **theme**: Select a theme for use, it can be a: + * Theme object, in which case the **themes** is not used. + * The name of a theme, which will be looked up in the current *themes* + object. + * A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if wlll be used to override our guesses when making + a default theme selection. + + If no theme is selected then a default is picked using a combination of our + best guesses at your OS, color support and unicode support. +* **template**: Describes what you want your gauge to look like. The + default is what npm uses. Detailed [documentation] is later in this + document. +* **hideCursor**: Defaults to true. If true, then the cursor will be hidden + while the gauge is displayed. +* **tty**: The tty that you're ultimately writing to. Defaults to the same + as **stream**. This is used for detecting the width of the terminal and + resizes. The width used is `tty.columns - 1`. If no tty is available then + a width of `79` is assumed. +* **enabled**: Defaults to true if `tty` is a TTY, false otherwise. If true + the gauge starts enabled. If disabled then all update commands are + ignored and no gauge will be printed until you call `.enable()`. +* **Plumbing**: The class to use to actually generate the gauge for + printing. This defaults to `require('gauge/plumbing')` and ordinarly you + shouldn't need to override this. +* **cleanupOnExit**: Defaults to true. Ordinarily we register an exit + handler to make sure your cursor is turned back on and the progress bar + erased when your process exits, even if you Ctrl-C out or otherwise exit + unexpectedly. You can disable this and it won't register the exit handler. + +[has-unicode]: https://www.npmjs.com/package/has-unicode +[themes]: #themes +[documentation]: #templates + +#### `gauge.show(section | status, [completed])` + +The first argument is either the section, the name of the current thing +contributing to progress, or an object with keys like **section**, +**subsection** & **completed** (or any others you have types for in a custom +template). If you don't want to update or set any of these you can pass +`null` and it will be ignored. + +The second argument is the percent completed as a value between 0 and 1. +Without it, completion is just not updated. You'll also note that completion +can be passed in as part of a status object as the first argument. If both +it and the completed argument are passed in, the completed argument wins. + +#### `gauge.hide([cb])` + +Removes the gauge from the terminal. Optionally, callback `cb` after IO has +had an opportunity to happen (currently this just means after `setImmediate` +has called back.) + +It turns out this is important when you're pausing the progress bar on one +filehandle and printing to another– otherwise (with a big enough print) node +can end up printing the "end progress bar" bits to the progress bar filehandle +while other stuff is printing to another filehandle. These getting interleaved +can cause corruption in some terminals. + +#### `gauge.pulse([subsection])` + +* **subsection** – *(optional)* The specific thing that triggered this pulse + +Spins the spinner in the gauge to show output. If **subsection** is +included then it will be combined with the last name passed to `gauge.show`. + +#### `gauge.disable()` + +Hides the gauge and ignores further calls to `show` or `pulse`. + +#### `gauge.enable()` + +Shows the gauge and resumes updating when `show` or `pulse` is called. + +#### `gauge.setThemeset(themes)` + +Change the themeset to select a theme from. The same as the `themes` option +used in the constructor. The theme will be reselected from this themeset. + +#### `gauge.setTheme(theme)` + +Change the active theme, will be displayed with the next show or pulse. This can be: + +* Theme object, in which case the **themes** is not used. +* The name of a theme, which will be looked up in the current *themes* + object. +* A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if wlll be used to override our guesses when making + a default theme selection. + +If no theme is selected then a default is picked using a combination of our +best guesses at your OS, color support and unicode support. + +#### `gauge.setTemplate(template)` + +Change the active template, will be displayed with the next show or pulse + +### Tracking Completion + +If you have more than one thing going on that you want to track completion +of, you may find the related [are-we-there-yet] helpful. It's `change` +event can be wired up to the `show` method to get a more traditional +progress bar interface. + +[are-we-there-yet]: https://www.npmjs.com/package/are-we-there-yet + +### THEMES + +``` +var themes = require('gauge/themes') + +// fetch the default color unicode theme for this platform +var ourTheme = themes({hasUnicode: true, hasColor: true}) + +// fetch the default non-color unicode theme for osx +var ourTheme = themes({hasUnicode: true, hasColor: false, platform: 'darwin'}) + +// create a new theme based on the color ascii theme for this platform +// that brackets the progress bar with arrows +var ourTheme = themes.newTheme(theme(hasUnicode: false, hasColor: true}), { + preProgressbar: '→', + postProgressbar: '←' +}) +``` + +The object returned by `gauge/themes` is an instance of the `ThemeSet` class. + +``` +var ThemeSet = require('gauge/theme-set') +var themes = new ThemeSet() +// or +var themes = require('gauge/themes') +var mythemes = themes.newThemeset() // creates a new themeset based on the default themes +``` + +#### themes(opts) +#### themes.getDefault(opts) + +Theme objects are a function that fetches the default theme based on +platform, unicode and color support. + +Options is an object with the following properties: + +* **hasUnicode** - If true, fetch a unicode theme, if no unicode theme is + available then a non-unicode theme will be used. +* **hasColor** - If true, fetch a color theme, if no color theme is + available a non-color theme will be used. +* **platform** (optional) - Defaults to `process.platform`. If no + platform match is available then `fallback` is used instead. + +If no compatible theme can be found then an error will be thrown with a +`code` of `EMISSINGTHEME`. + +#### themes.addTheme(themeName, themeObj) +#### themes.addTheme(themeName, [parentTheme], newTheme) + +Adds a named theme to the themeset. You can pass in either a theme object, +as returned by `themes.newTheme` or the arguments you'd pass to +`themes.newTheme`. + +#### themes.getThemeNames() + +Return a list of all of the names of the themes in this themeset. Suitable +for use in `themes.getTheme(…)`. + +#### themes.getTheme(name) + +Returns the theme object from this theme set named `name`. + +If `name` does not exist in this themeset an error will be thrown with +a `code` of `EMISSINGTHEME`. + +#### themes.setDefault([opts], themeName) + +`opts` is an object with the following properties. + +* **platform** - Defaults to `'fallback'`. If your theme is platform + specific, specify that here with the platform from `process.platform`, eg, + `win32`, `darwin`, etc. +* **hasUnicode** - Defaults to `false`. If your theme uses unicode you + should set this to true. +* **hasColor** - Defaults to `false`. If your theme uses color you should + set this to true. + +`themeName` is the name of the theme (as given to `addTheme`) to use for +this set of `opts`. + +#### themes.newTheme([parentTheme,] newTheme) + +Create a new theme object based on `parentTheme`. If no `parentTheme` is +provided then a minimal parentTheme that defines functions for rendering the +activity indicator (spinner) and progress bar will be defined. (This +fallback parent is defined in `gauge/base-theme`.) + +newTheme should be a bare object– we'll start by discussing the properties +defined by the default themes: + +* **preProgressbar** - displayed prior to the progress bar, if the progress + bar is displayed. +* **postProgressbar** - displayed after the progress bar, if the progress bar + is displayed. +* **progressBarTheme** - The subtheme passed through to the progress bar + renderer, it's an object with `complete` and `remaining` properties + that are the strings you want repeated for those sections of the progress + bar. +* **activityIndicatorTheme** - The theme for the activity indicator (spinner), + this can either be a string, in which each character is a different step, or + an array of strings. +* **preSubsection** - Displayed as a separator between the `section` and + `subsection` when the latter is printed. + +More generally, themes can have any value that would be a valid value when rendering +templates. The properties in the theme are used when their name matches a type in +the template. Their values can be: + +* **strings & numbers** - They'll be included as is +* **function (values, theme, width)** - Should return what you want in your output. + *values* is an object with values provided via `gauge.show`, + *theme* is the theme specific to this item (see below) or this theme object, + and *width* is the number of characters wide your result should be. + +There are a couple of special prefixes: + +* **pre** - Is shown prior to the property, if its displayed. +* **post** - Is shown after the property, if its displayed. + +And one special suffix: + +* **Theme** - Its value is passed to a function-type item as the theme. + +#### themes.addToAllThemes(theme) + +This *mixes-in* `theme` into all themes currently defined. It also adds it +to the default parent theme for this themeset, so future themes added to +this themeset will get the values from `theme` by default. + +#### themes.newThemeset() + +Copy the current themeset into a new one. This allows you to easily inherit +one themeset from another. + +### TEMPLATES + +A template is an array of objects and strings that, after being evaluated, +will be turned into the gauge line. The default template is: + +```javascript +[ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1, default: ''}, + {type: 'subsection', kerning: 1, default: ''} +] +``` + +The various template elements can either be **plain strings**, in which case they will +be be included verbatum in the output, or objects with the following properties: + +* *type* can be any of the following plus any keys you pass into `gauge.show` plus + any keys you have on a custom theme. + * `section` – What big thing you're working on now. + * `subsection` – What component of that thing is currently working. + * `activityIndicator` – Shows a spinner using the `activityIndicatorTheme` + from your active theme. + * `progressbar` – A progress bar representing your current `completed` + using the `progressbarTheme` from your active theme. +* *kerning* – Number of spaces that must be between this item and other + items, if this item is displayed at all. +* *maxLength* – The maximum length for this element. If its value is longer it + will be truncated. +* *minLength* – The minimum length for this element. If its value is shorter it + will be padded according to the *align* value. +* *align* – (Default: left) Possible values "left", "right" and "center". Works + as you'd expect from word processors. +* *length* – Provides a single value for both *minLength* and *maxLength*. If both + *length* and *minLength or *maxLength* are specifed then the latter take precedence. +* *value* – A literal value to use for this template item. +* *default* – A default value to use for this template item if a value + wasn't otherwise passed in. + +### PLUMBING + +This is the super simple, assume nothing, do no magic internals used by gauge to +implement its ordinary interface. + +``` +var Plumbing = require('gauge/plumbing') +var gauge = new Plumbing(theme, template, width) +``` + +* **theme**: The theme to use. +* **template**: The template to use. +* **width**: How wide your gauge should be + +#### `gauge.setTheme(theme)` + +Change the active theme. + +#### `gauge.setTemplate(template)` + +Change the active template. + +#### `gauge.setWidth(width)` + +Change the width to render at. + +#### `gauge.hide()` + +Return the string necessary to hide the progress bar + +#### `gauge.hideCursor()` + +Return a string to hide the cursor. + +#### `gauge.showCursor()` + +Return a string to show the cursor. + +#### `gauge.show(status)` + +Using `status` for values, render the provided template with the theme and return +a string that is suitable for printing to update the gauge. diff --git a/node_modules/gauge/README.md~ b/node_modules/gauge/README.md~ new file mode 100644 index 0000000..171e337 --- /dev/null +++ b/node_modules/gauge/README.md~ @@ -0,0 +1,393 @@ +gauge +===== + +A nearly stateless terminal based horizontal gauge / progress bar. + +```javascript +var Gauge = require("gauge") + +var gauge = new Gauge() + +gauge.show("test", 0.20) + +gauge.pulse("this") + +gauge.hide() +``` + +![](gauge-demo.gif) + + +### CHANGES FROM 1.x + +Gauge 2.x is breaking release, please see the [changelog] for details on +what's changed if you were previously a user of this module. + +[changelog]: CHANGELOG.md + +### THE GAUGE CLASS + +This is the typical interface to the module– it provides a pretty +fire-and-forget interface to displaying your status information. + +``` +var Gauge = require("gauge") + +var gauge = new Gauge([stream], [options]) +``` + +* **stream** – *(optional, default STDERR)* A stream that progress bar + updates are to be written to. Gauge honors backpressure and will pause + most writing if it is indicated. +* **options** – *(optional)* An option object. + +Constructs a new gauge. Gauges are drawn on a single line, and are not drawn +if **stream** isn't a tty and a tty isn't explicitly provided. + +If **stream** is a terminal or if you pass in **tty** to **options** then we +will detect terminal resizes and redraw to fit. We do this by watching for +`resize` events on the tty. (To work around a bug in verisons of Node prior +to 2.5.0, we watch for them on stdout if the tty is stderr.) Resizes to +larger window sizes will be clean, but shrinking the window will always +result in some cruft. + +**IMPORTANT:** If you prevously were passing in a non-tty stream but you still +want output (for example, a stream wrapped by the `ansi` module) then you +need to pass in the **tty** option below, as `gauge` needs access to +the underlying tty in order to do things like terminal resizes and terminal +width detection. + +The **options** object can have the following properties, all of which are +optional: + +* **updateInterval**: How often gauge updates should be drawn, in miliseconds. +* **fixedFramerate**: Defaults to false on node 0.8, true on everything + else. When this is true a timer is created to trigger once every + `updateInterval` ms, when false, updates are printed as soon as they come + in but updates more often than `updateInterval` are ignored. The reason + 0.8 doesn't have this set to true is that it can't `unref` its timer and + so it would stop your program from exiting– if you want to use this + feature with 0.8 just make sure you call `gauge.disable()` before you + expect your program to exit. +* **themes**: A themeset to use when selecting the theme to use. Defaults + to `gauge/themes`, see the [themes] documentation for details. +* **theme**: Select a theme for use, it can be a: + * Theme object, in which case the **themes** is not used. + * The name of a theme, which will be looked up in the current *themes* + object. + * A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if wlll be used to override our guesses when making + a default theme selection. + + If no theme is selected then a default is picked using a combination of our + best guesses at your OS, color support and unicode support. +* **template**: Describes what you want your gauge to look like. The + default is what npm uses. Detailed [documentation] is later in this + document. +* **hideCursor**: Defaults to true. If true, then the cursor will be hidden + while the gauge is displayed. +* **tty**: The tty that you're ultimately writing to. Defaults to the same + as **stream**. This is used for detecting the width of the terminal and + resizes. The width used is `tty.columns - 1`. If no tty is available then + a width of `79` is assumed. +* **enabled**: Defaults to true if `tty` is a TTY, false otherwise. If true + the gauge starts enabled. If disabled then all update commands are + ignored and no gauge will be printed until you call `.enable()`. +* **Plumbing**: The class to use to actually generate the gauge for + printing. This defaults to `require('gauge/plumbing')` and ordinarly you + shouldn't need to override this. +* **cleanupOnExit**: Defaults to true. Ordinarily we register an exit + handler to make sure your cursor is turned back on and the progress bar + erased when your process exits, even if you Ctrl-C out or otherwise exit + unexpectedly. You can disable this and it won't register the exit handler. + +[has-unicode]: https://www.npmjs.com/package/has-unicode +[themes]: #themes +[documentation]: #templates + +#### `gauge.show(section | status, [completed])` + +The first argument is either the section, the name of the current thing +contributing to progress, or an object with keys like **section**, +**subsection** & **completed** (or any others you have types for in a custom +template). If you don't want to update or set any of these you can pass +`null` and it will be ignored. + +The second argument is the percent completed as a value between 0 and 1. +Without it, completion is just not updated. You'll also note that completion +can be passed in as part of a status object as the first argument. If both +it and the completed argument are passed in, the completed argument wins. + +#### `gauge.hide([cb])` + +Removes the gauge from the terminal. Optionally, callback `cb` after IO has +had an opportunity to happen (currently this just means after `setImmediate` +has called back.) + +It turns out this is important when you're pausing the progress bar on one +filehandle and printing to another– otherwise (with a big enough print) node +can end up printing the "end progress bar" bits to the progress bar filehandle +while other stuff is printing to another filehandle. These getting interleaved +can cause corruption in some terminals. + +#### `gauge.pulse([subsection])` + +* **subsection** – *(optional)* The specific thing that triggered this pulse + +Spins the spinner in the gauge to show output. If **subsection** is +included then it will be combined with the last name passed to `gauge.show`. + +#### `gauge.disable()` + +Hides the gauge and ignores further calls to `show` or `pulse`. + +#### `gauge.enable()` + +Shows the gauge and resumes updating when `show` or `pulse` is called. + +#### `gauge.setThemeset(themes)` + +Change the themeset to select a theme from. The same as the `themes` option +used in the constructor. The theme will be reselected from this themeset. + +#### `gauge.setTheme(theme)` + +Change the active theme, will be displayed with the next show or pulse. This can be: + +* Theme object, in which case the **themes** is not used. +* The name of a theme, which will be looked up in the current *themes* + object. +* A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if wlll be used to override our guesses when making + a default theme selection. + +If no theme is selected then a default is picked using a combination of our +best guesses at your OS, color support and unicode support. + +#### `gauge.setTemplate(template)` + +Change the active template, will be displayed with the next show or pulse + +### Tracking Completion + +If you have more than one thing going on that you want to track completion +of, you may find the related [are-we-there-yet] helpful. It's `change` +event can be wired up to the `show` method to get a more traditional +progress bar interface. + +[are-we-there-yet]: https://www.npmjs.com/package/are-we-there-yet + +### THEMES + +``` +var themes = require('gauge/themes') + +// fetch the default color unicode theme for this platform +var ourTheme = themes({hasUnicode: true, hasColor: true}) + +// fetch the default non-color unicode theme for osx +var ourTheme = themes({hasUnicode: true, hasColor: false, platform: 'darwin'}) + +// create a new theme based on the color ascii theme for this platform +// that brackets the progress bar with arrows +var ourTheme = themes.newTheme(theme(hasUnicode: false, hasColor: true}), { + preProgressbar: '→', + postProgressbar: '←' +}) +``` + +The object returned by `gauge/themes` is an instance of the `ThemeSet` class. + +``` +var ThemeSet = require('gauge/theme-set') +var themes = new ThemeSet() +// or +var themes = require('gauge/themes') +var mythemes = themes.newThemeset() // creates a new themeset based on the default themes +``` + +#### themes(opts) +#### themes.getDefault(opts) + +Theme objects are a function that fetches the default theme based on +platform, unicode and color support. + +Options is an object with the following properties: + +* **hasUnicode** - If true, fetch a unicode theme, if no unicode theme is + available then a non-unicode theme will be used. +* **hasColor** - If true, fetch a color theme, if no color theme is + available a non-color theme will be used. +* **platform** (optional) - Defaults to `process.platform`. If no + platform match is available then `fallback` is used instead. + +If no compatible theme can be found then an error will be thrown with a +`code` of `EMISSINGTHEME`. + +#### themes.addTheme(themeName, themeObj) +#### themes.addTheme(themeName, [parentTheme], newTheme) + +Adds a named theme to the themeset. You can pass in either a theme object, +as returned by `themes.newTheme` or the arguments you'd pass to +`themes.newTheme`. + +#### themes.getThemeNames() + +Return a list of all of the names of the themes in this themeset. Suitable +for use in `themes.getTheme(…)`. + +#### themes.getTheme(name) + +Returns the theme object from this theme set named `name`. + +If `name` does not exist in this themeset an error will be thrown with +a `code` of `EMISSINGTHEME`. + +#### themes.setDefault([opts], themeName) + +`opts` is an object with the following properties. + +* **platform** - Defaults to `'fallback'`. If your theme is platform + specific, specify that here with the platform from `process.platform`, eg, + `win32`, `darwin`, etc. +* **hasUnicode** - Defaults to `false`. If your theme uses unicode you + should set this to true. +* **hasColor** - Defaults to `false`. If your theme uses color you should + set this to true. + +`themeName` is the name of the theme (as given to `addTheme`) to use for +this set of `opts`. + +#### themes.newTheme([parentTheme,] newTheme) + +Create a new theme object based on `parentTheme`. If no `parentTheme` is +provided then a minimal parentTheme that defines functions for rendering the +activity indicator (spinner) and progress bar will be defined. (This +fallback parent is defined in `gauge/base-theme`.) + +newTheme should be a bare object– we'll start by discussing the properties +defined by the default themes: + +* **preProgressbar** - displayed prior to the progress bar, if the progress + bar is displayed. +* **postProgressbar** - displayed after the progress bar, if the progress bar + is displayed. +* **progressBarTheme** - The subtheme passed through to the progress bar + renderer, it's an object with `complete` and `remaining` properties + that are the strings you want repeated for those sections of the progress + bar. +* **activityIndicatorTheme** - The theme for the activity indicator (spinner), + this can either be a string, in which each character is a different step, or + an array of strings. +* **preSubsection** - Displayed as a separator between the `section` and + `subsection` when the latter is printed. + +More generally, themes can have any value that would be a valid value when rendering +templates. The properties in the theme are used when their name matches a type in +the template. Their values can be: + +* **strings & numbers** - They'll be included as is +* **function (values, theme, width)** - Should return what you want in your output. + *values* is an object with values provided via `gauge.show`, + *theme* is the theme specific to this item (see below) or this theme object, + and *width* is the number of characters wide your result should be. + +There are a couple of special prefixes: + +* **pre** - Is shown prior to the property, if its displayed. +* **post** - Is shown after the property, if its displayed. + +And one special suffix: + +* **Theme** - Its value is passed to a function-type item as the theme. + +#### themes.addToAllThemes(theme) + +This *mixes-in* `theme` into all themes currently defined. It also adds it +to the default parent theme for this themeset, so future themes added to +this themeset will get the values from `theme` by default. + +#### themes.newThemeset() + +Copy the current themeset into a new one. This allows you to easily inherit +one themeset from another. + +### TEMPLATES + +A template is an array of objects and strings that, after being evaluated, +will be turned into the gauge line. The default template is: + +```javascript +[ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1}, + {type: 'subsection', kerning: 1} +] +``` + +The various template elements can either be **plain strings**, in which case they will +be be included verbatum in the output, or objects with the following properties: + +* *type* can be any of the following plus any keys you pass into `gauge.show` plus + any keys you have on a custom theme. + * `section` – What big thing you're working on now. + * `subsection` – What component of that thing is currently working. + * `activityIndicator` – Shows a spinner using the `activityIndicatorTheme` + from your active theme. + * `progressbar` – A progress bar representing your current `completed` + using the `progressbarTheme` from your active theme. +* *kerning* – Number of spaces that must be between this item and other + items, if this item is displayed at all. +* *maxLength* – The maximum length for this element. If its value is longer it + will be truncated. +* *minLength* – The minimum length for this element. If its value is shorter it + will be padded according to the *align* value. +* *align* – (Default: left) Possible values "left", "right" and "center". Works + as you'd expect from word processors. +* *length* – Provides a single value for both *minLength* and *maxLength*. If both + *length* and *minLength or *maxLength* are specifed then the latter take precedence. +* *value* – A literal value to use for this template item. + +### PLUMBING + +This is the super simple, assume nothing, do no magic internals used by gauge to +implement its ordinary interface. + +``` +var Plumbing = require('gauge/plumbing') +var gauge = new Plumbing(theme, template, width) +``` + +* **theme**: The theme to use. +* **template**: The template to use. +* **width**: How wide your gauge should be + +#### `gauge.setTheme(theme)` + +Change the active theme. + +#### `gauge.setTemplate(template)` + +Change the active template. + +#### `gauge.setWidth(width)` + +Change the width to render at. + +#### `gauge.hide()` + +Return the string necessary to hide the progress bar + +#### `gauge.hideCursor()` + +Return a string to hide the cursor. + +#### `gauge.showCursor()` + +Return a string to show the cursor. + +#### `gauge.show(status)` + +Using `status` for values, render the provided template with the theme and return +a string that is suitable for printing to update the gauge. diff --git a/node_modules/gauge/base-theme.js b/node_modules/gauge/base-theme.js new file mode 100644 index 0000000..0b67638 --- /dev/null +++ b/node_modules/gauge/base-theme.js @@ -0,0 +1,14 @@ +'use strict' +var spin = require('./spin.js') +var progressBar = require('./progress-bar.js') + +module.exports = { + activityIndicator: function (values, theme, width) { + if (values.spun == null) return + return spin(theme, values.spun) + }, + progressbar: function (values, theme, width) { + if (values.completed == null) return + return progressBar(theme, width, values.completed) + } +} diff --git a/node_modules/gauge/error.js b/node_modules/gauge/error.js new file mode 100644 index 0000000..d9914ba --- /dev/null +++ b/node_modules/gauge/error.js @@ -0,0 +1,24 @@ +'use strict' +var util = require('util') + +var User = exports.User = function User (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, User) + err.code = 'EGAUGE' + return err +} + +exports.MissingTemplateValue = function MissingTemplateValue (item, values) { + var err = new User(util.format('Missing template value "%s"', item.type)) + Error.captureStackTrace(err, MissingTemplateValue) + err.template = item + err.values = values + return err +} + +exports.Internal = function Internal (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, Internal) + err.code = 'EGAUGEINTERNAL' + return err +} diff --git a/node_modules/gauge/has-color.js b/node_modules/gauge/has-color.js new file mode 100644 index 0000000..e283a25 --- /dev/null +++ b/node_modules/gauge/has-color.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = isWin32() || isColorTerm() + +function isWin32 () { + return process.platform === 'win32' +} + +function isColorTerm () { + var termHasColor = /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i + return !!process.env.COLORTERM || termHasColor.test(process.env.TERM) +} diff --git a/node_modules/gauge/index.js b/node_modules/gauge/index.js new file mode 100644 index 0000000..7eefb95 --- /dev/null +++ b/node_modules/gauge/index.js @@ -0,0 +1,226 @@ +'use strict' +var Plumbing = require('./plumbing.js') +var hasUnicode = require('has-unicode') +var hasColor = require('./has-color.js') +var onExit = require('signal-exit') +var defaultThemes = require('./themes') +var setInterval = require('./set-interval.js') +var process = require('./process.js') +var setImmediate = require('./set-immediate') + +module.exports = Gauge + +function callWith (obj, method) { + return function () { + return method.call(obj) + } +} + +function Gauge (arg1, arg2) { + var options, writeTo + if (arg1 && arg1.write) { + writeTo = arg1 + options = arg2 || {} + } else if (arg2 && arg2.write) { + writeTo = arg2 + options = arg1 || {} + } else { + writeTo = process.stderr + options = arg1 || arg2 || {} + } + + this._status = { + spun: 0, + section: '', + subsection: '' + } + this._paused = false // are we paused for back pressure? + this._disabled = true // are all progress bar updates disabled? + this._showing = false // do we WANT the progress bar on screen + this._onScreen = false // IS the progress bar on screen + this._needsRedraw = false // should we print something at next tick? + this._hideCursor = options.hideCursor == null ? true : options.hideCursor + this._fixedFramerate = options.fixedFramerate == null + ? !(/^v0\.8\./.test(process.version)) + : options.fixedFramerate + this._lastUpdateAt = null + this._updateInterval = options.updateInterval == null ? 50 : options.updateInterval + + this._themes = options.themes || defaultThemes + this._theme = options.theme + var theme = this._computeTheme(options.theme) + var template = options.template || [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1, default: ''}, + {type: 'subsection', kerning: 1, default: ''} + ] + this.setWriteTo(writeTo, options.tty) + var PlumbingClass = options.Plumbing || Plumbing + this._gauge = new PlumbingClass(theme, template, this.getWidth()) + + this._$$doRedraw = callWith(this, this._doRedraw) + this._$$handleSizeChange = callWith(this, this._handleSizeChange) + + if (options.cleanupOnExit == null || options.cleanupOnExit) { + onExit(callWith(this, this.disable)) + } + + if (options.enabled || (options.enabled == null && this._tty && this._tty.isTTY)) { + this.enable() + } else { + this.disable() + } +} +Gauge.prototype = {} + +Gauge.prototype.setTemplate = function (template) { + this._gauge.setTemplate(template) + if (this._showing) this._requestRedraw() +} + +Gauge.prototype._computeTheme = function (theme) { + if (!theme) theme = {} + if (theme && (Object.keys(theme).length === 0 || theme.hasUnicode != null || theme.hasColor != null)) { + var useUnicode = theme.hasUnicode == null ? hasUnicode() : theme.hasUnicode + var useColor = theme.hasColor == null ? hasColor : theme.hasColor + theme = this._themes.getDefault({hasUnicode: useUnicode, hasColor: useColor, platform: theme.platform}) + } else if (typeof theme === 'string') { + theme = this._themes.getTheme(theme) + } + return theme +} + +Gauge.prototype.setThemeset = function (themes) { + this._themes = themes + this.setTheme(this._theme) +} + +Gauge.prototype.setTheme = function (theme) { + this._gauge.setTheme(this._computeTheme(theme)) + if (this._showing) this._requestRedraw() + this._theme = theme +} + +Gauge.prototype._requestRedraw = function () { + this._needsRedraw = true + if (!this._fixedFramerate) this._doRedraw() +} + +Gauge.prototype.getWidth = function () { + return ((this._tty && this._tty.columns) || 80) - 1 +} + +Gauge.prototype.setWriteTo = function (writeTo, tty) { + var enabled = !this._disabled + if (enabled) this.disable() + this._writeTo = writeTo + this._tty = tty || + (writeTo === process.stderr && process.stdout.isTTY && process.stdout) || + (writeTo.isTTY && writeTo) || + this._tty + if (this._gauge) this._gauge.setWidth(this.getWidth()) + if (enabled) this.enable() +} + +Gauge.prototype.enable = function () { + if (!this._disabled) return + this._disabled = false + if (this._tty) this._enableEvents() + if (this._showing) this.show() +} + +Gauge.prototype.disable = function () { + if (this._disabled) return + if (this._showing) { + this._lastUpdateAt = null + this._showing = false + this._doRedraw() + this._showing = true + } + this._disabled = true + if (this._tty) this._disableEvents() +} + +Gauge.prototype._enableEvents = function () { + this._tty.on('resize', this._$$handleSizeChange) + if (this._fixedFramerate) { + this.redrawTracker = setInterval(this._$$doRedraw, this._updateInterval) + if (this.redrawTracker.unref) this.redrawTracker.unref() + } +} + +Gauge.prototype._disableEvents = function () { + this._tty.removeListener('resize', this._$$handleSizeChange) + if (this._fixedFramerate) clearInterval(this.redrawTracker) +} + +Gauge.prototype.hide = function (cb) { + if (this._disabled) return cb && process.nextTick(cb) + if (!this._showing) return cb && process.nextTick(cb) + this._showing = false + this._doRedraw() + cb && setImmediate(cb) +} + +Gauge.prototype.show = function (section, completed) { + if (this._disabled) return + this._showing = true + if (typeof section === 'string') { + this._status.section = section + } else if (typeof section === 'object') { + var sectionKeys = Object.keys(section) + for (var ii = 0; ii < sectionKeys.length; ++ii) { + var key = sectionKeys[ii] + this._status[key] = section[key] + } + } + if (completed != null) this._status.completed = completed + this._requestRedraw() +} + +Gauge.prototype.pulse = function (subsection) { + if (this._disabled) return + if (!this._showing) return + this._status.subsection = subsection || '' + this._status.spun ++ + this._requestRedraw() +} + +Gauge.prototype._handleSizeChange = function () { + this._gauge.setWidth(this._tty.columns - 1) + this._requestRedraw() +} + +Gauge.prototype._doRedraw = function () { + if (this._disabled || this._paused) return + if (!this._fixedFramerate) { + var now = Date.now() + if (this._lastUpdateAt && now - this._lastUpdateAt < this._updateInterval) return + this._lastUpdateAt = now + } + if (!this._showing && this._onScreen) { + this._onScreen = false + var result = this._gauge.hide() + if (this._hideCursor) { + result += this._gauge.showCursor() + } + return this._writeTo.write(result) + } + if (!this._showing && !this._onScreen) return + if (this._showing && !this._onScreen) { + this._onScreen = true + this._needsRedraw = true + if (this._hideCursor) { + this._writeTo.write(this._gauge.hideCursor()) + } + } + if (!this._needsRedraw) return + if (!this._writeTo.write(this._gauge.show(this._status))) { + this._paused = true + this._writeTo.on('drain', callWith(this, function () { + this._paused = false + this._doRedraw() + })) + } +} diff --git a/node_modules/gauge/package.json b/node_modules/gauge/package.json new file mode 100644 index 0000000..a5d729c --- /dev/null +++ b/node_modules/gauge/package.json @@ -0,0 +1,63 @@ +{ + "name": "gauge", + "version": "2.6.0", + "description": "A terminal based horizontal guage", + "main": "index.js", + "scripts": { + "test": "standard && tap test/*.js --coverage" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/gauge" + }, + "keywords": [ + "progressbar", + "progress", + "gauge" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/gauge/issues" + }, + "homepage": "https://github.com/iarna/gauge", + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-color": "^0.1.7", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + }, + "devDependencies": { + "readable-stream": "^2.0.6", + "require-inject": "^1.4.0", + "standard": "^7.1.2", + "tap": "^5.7.2", + "through2": "^2.0.0" + }, + "files": [ + "base-theme.js", + "CHANGELOG.md", + "error.js", + "has-color.js", + "index.js", + "LICENSE", + "package.json", + "plumbing.js", + "process.js", + "progress-bar.js", + "README.md", + "render-template.js", + "set-immediate.js", + "set-interval.js", + "spin.js", + "template-item.js", + "theme-set.js", + "themes.js", + "wide-truncate.js" + ] +} diff --git a/node_modules/gauge/plumbing.js b/node_modules/gauge/plumbing.js new file mode 100644 index 0000000..589a9c9 --- /dev/null +++ b/node_modules/gauge/plumbing.js @@ -0,0 +1,47 @@ +'use strict' +var consoleControl = require('console-control-strings') +var renderTemplate = require('./render-template.js') +var validate = require('aproba') + +var Plumbing = module.exports = function (theme, template, width) { + if (!width) width = 80 + validate('OAN', [theme, template, width]) + this.showing = false + this.theme = theme + this.width = width + this.template = template +} +Plumbing.prototype = {} + +Plumbing.prototype.setTheme = function (theme) { + validate('O', [theme]) + this.theme = theme +} + +Plumbing.prototype.setTemplate = function (template) { + validate('A', [template]) + this.template = template +} + +Plumbing.prototype.setWidth = function (width) { + validate('N', [width]) + this.width = width +} + +Plumbing.prototype.hide = function () { + return consoleControl.gotoSOL() + consoleControl.eraseLine() +} + +Plumbing.prototype.hideCursor = consoleControl.hideCursor + +Plumbing.prototype.showCursor = consoleControl.showCursor + +Plumbing.prototype.show = function (status) { + var values = Object.create(this.theme) + for (var key in status) { + values[key] = status[key] + } + + return renderTemplate(this.width, this.template, values).trim() + + consoleControl.eraseLine() + consoleControl.gotoSOL() +} diff --git a/node_modules/gauge/process.js b/node_modules/gauge/process.js new file mode 100644 index 0000000..05e8569 --- /dev/null +++ b/node_modules/gauge/process.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = process diff --git a/node_modules/gauge/progress-bar.js b/node_modules/gauge/progress-bar.js new file mode 100644 index 0000000..7f8dd68 --- /dev/null +++ b/node_modules/gauge/progress-bar.js @@ -0,0 +1,35 @@ +'use strict' +var validate = require('aproba') +var renderTemplate = require('./render-template.js') +var wideTruncate = require('./wide-truncate') +var stringWidth = require('string-width') + +module.exports = function (theme, width, completed) { + validate('ONN', [theme, width, completed]) + if (completed < 0) completed = 0 + if (completed > 1) completed = 1 + if (width <= 0) return '' + var sofar = Math.round(width * completed) + var rest = width - sofar + var template = [ + {type: 'complete', value: repeat(theme.complete, sofar), length: sofar}, + {type: 'remaining', value: repeat(theme.remaining, rest), length: rest} + ] + return renderTemplate(width, template, theme) +} + +// lodash's way of repeating +function repeat (string, width) { + var result = '' + var n = width + do { + if (n % 2) { + result += string + } + n = Math.floor(n / 2) + /*eslint no-self-assign: 0*/ + string += string + } while (n && stringWidth(result) < width) + + return wideTruncate(result, width) +} diff --git a/node_modules/gauge/render-template.js b/node_modules/gauge/render-template.js new file mode 100644 index 0000000..3261bfb --- /dev/null +++ b/node_modules/gauge/render-template.js @@ -0,0 +1,181 @@ +'use strict' +var align = require('wide-align') +var validate = require('aproba') +var objectAssign = require('object-assign') +var wideTruncate = require('./wide-truncate') +var error = require('./error') +var TemplateItem = require('./template-item') + +function renderValueWithValues (values) { + return function (item) { + return renderValue(item, values) + } +} + +var renderTemplate = module.exports = function (width, template, values) { + var items = prepareItems(width, template, values) + var rendered = items.map(renderValueWithValues(values)).join('') + return align.left(wideTruncate(rendered, width), width) +} + +function preType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'pre' + cappedTypeName +} + +function postType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'post' + cappedTypeName +} + +function hasPreOrPost (item, values) { + if (!item.type) return + return values[preType(item)] || values[postType(item)] +} + +function generatePreAndPost (baseItem, parentValues) { + var item = objectAssign({}, baseItem) + var values = Object.create(parentValues) + var template = [] + var pre = preType(item) + var post = postType(item) + if (values[pre]) { + template.push({value: values[pre]}) + values[pre] = null + } + item.minLength = null + item.length = null + item.maxLength = null + template.push(item) + values[item.type] = values[item.type] + if (values[post]) { + template.push({value: values[post]}) + values[post] = null + } + return function ($1, $2, length) { + return renderTemplate(length, template, values) + } +} + +function prepareItems (width, template, values) { + function cloneAndObjectify (item, index, arr) { + var cloned = new TemplateItem(item, width) + var type = cloned.type + if (cloned.value == null) { + if (!(type in values)) { + if (cloned.default == null) { + throw new error.MissingTemplateValue(cloned, values) + } else { + cloned.value = cloned.default + } + } else { + cloned.value = values[type] + } + } + if (cloned.value == null || cloned.value === '') return null + cloned.index = index + cloned.first = index === 0 + cloned.last = index === arr.length - 1 + if (hasPreOrPost(cloned, values)) cloned.value = generatePreAndPost(cloned, values) + return cloned + } + + var output = template.map(cloneAndObjectify).filter(function (item) { return item != null }) + + var outputLength = 0 + var remainingSpace = width + var variableCount = output.length + + function consumeSpace (length) { + if (length > remainingSpace) length = remainingSpace + outputLength += length + remainingSpace -= length + } + + function finishSizing (item, length) { + if (item.finished) throw new error.Internal('Tried to finish template item that was already finished') + if (length === Infinity) throw new error.Internal('Length of template item cannot be infinity') + if (length != null) item.length = length + item.minLength = null + item.maxLength = null + --variableCount + item.finished = true + if (item.length == null) item.length = item.getBaseLength() + if (item.length == null) throw new error.Internal('Finished template items must have a length') + consumeSpace(item.getLength()) + } + + output.forEach(function (item) { + if (!item.kerning) return + var prevPadRight = item.first ? 0 : output[item.index - 1].padRight + if (!item.first && prevPadRight < item.kerning) item.padLeft = item.kerning - prevPadRight + if (!item.last) item.padRight = item.kerning + }) + + // Finish any that have a fixed (literal or intuited) length + output.forEach(function (item) { + if (item.getBaseLength() == null) return + finishSizing(item) + }) + + var resized = 0 + var resizing + var hunkSize + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + if (!item.maxLength) return + if (item.getMaxLength() < hunkSize) { + finishSizing(item, item.maxLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) throw new error.Internal('Resize loop iterated too many times while determining maxLength') + + resized = 0 + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + if (!item.minLength) return + if (item.getMinLength() >= hunkSize) { + finishSizing(item, item.minLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) throw new error.Internal('Resize loop iterated too many times while determining minLength') + + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + finishSizing(item, hunkSize) + }) + + return output +} + +function renderFunction (item, values, length) { + validate('OON', arguments) + if (item.type) { + return item.value(values, values[item.type + 'Theme'] || {}, length) + } else { + return item.value(values, {}, length) + } +} + +function renderValue (item, values) { + var length = item.getBaseLength() + var value = typeof item.value === 'function' ? renderFunction(item, values, length) : item.value + if (value == null || value === '') return '' + var alignWith = align[item.align] || align.left + var leftPadding = item.padLeft ? align.left('', item.padLeft) : '' + var rightPadding = item.padRight ? align.right('', item.padRight) : '' + var truncated = wideTruncate(String(value), length) + var aligned = alignWith(truncated, length) + return leftPadding + aligned + rightPadding +} diff --git a/node_modules/gauge/set-immediate.js b/node_modules/gauge/set-immediate.js new file mode 100644 index 0000000..6650a48 --- /dev/null +++ b/node_modules/gauge/set-immediate.js @@ -0,0 +1,7 @@ +'use strict' +var process = require('./process') +try { + module.exports = setImmediate +} catch (ex) { + module.exports = process.nextTick +} diff --git a/node_modules/gauge/set-interval.js b/node_modules/gauge/set-interval.js new file mode 100644 index 0000000..5761987 --- /dev/null +++ b/node_modules/gauge/set-interval.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = setInterval diff --git a/node_modules/gauge/spin.js b/node_modules/gauge/spin.js new file mode 100644 index 0000000..34142ee --- /dev/null +++ b/node_modules/gauge/spin.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = function spin (spinstr, spun) { + return spinstr[spun % spinstr.length] +} diff --git a/node_modules/gauge/template-item.js b/node_modules/gauge/template-item.js new file mode 100644 index 0000000..e46f447 --- /dev/null +++ b/node_modules/gauge/template-item.js @@ -0,0 +1,73 @@ +'use strict' +var stringWidth = require('string-width') + +module.exports = TemplateItem + +function isPercent (num) { + if (typeof num !== 'string') return false + return num.slice(-1) === '%' +} + +function percent (num) { + return Number(num.slice(0, -1)) / 100 +} + +function TemplateItem (values, outputLength) { + this.overallOutputLength = outputLength + this.finished = false + this.type = null + this.value = null + this.length = null + this.maxLength = null + this.minLength = null + this.kerning = null + this.align = 'left' + this.padLeft = 0 + this.padRight = 0 + this.index = null + this.first = null + this.last = null + if (typeof values === 'string') { + this.value = values + } else { + for (var prop in values) this[prop] = values[prop] + } + // Realize percents + if (isPercent(this.length)) { + this.length = Math.round(this.overallOutputLength * percent(this.length)) + } + if (isPercent(this.minLength)) { + this.minLength = Math.round(this.overallOutputLength * percent(this.minLength)) + } + if (isPercent(this.maxLength)) { + this.maxLength = Math.round(this.overallOutputLength * percent(this.maxLength)) + } + return this +} + +TemplateItem.prototype = {} + +TemplateItem.prototype.getBaseLength = function () { + var length = this.length + if (length == null && typeof this.value === 'string' && this.maxLength == null && this.minLength == null) { + length = stringWidth(this.value) + } + return length +} + +TemplateItem.prototype.getLength = function () { + var length = this.getBaseLength() + if (length == null) return null + return length + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMaxLength = function () { + if (this.maxLength == null) return null + return this.maxLength + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMinLength = function () { + if (this.minLength == null) return null + return this.minLength + this.padLeft + this.padRight +} + diff --git a/node_modules/gauge/theme-set.js b/node_modules/gauge/theme-set.js new file mode 100644 index 0000000..68971d5 --- /dev/null +++ b/node_modules/gauge/theme-set.js @@ -0,0 +1,115 @@ +'use strict' +var objectAssign = require('object-assign') + +module.exports = function () { + return ThemeSetProto.newThemeSet() +} + +var ThemeSetProto = {} + +ThemeSetProto.baseTheme = require('./base-theme.js') + +ThemeSetProto.newTheme = function (parent, theme) { + if (!theme) { + theme = parent + parent = this.baseTheme + } + return objectAssign({}, parent, theme) +} + +ThemeSetProto.getThemeNames = function () { + return Object.keys(this.themes) +} + +ThemeSetProto.addTheme = function (name, parent, theme) { + this.themes[name] = this.newTheme(parent, theme) +} + +ThemeSetProto.addToAllThemes = function (theme) { + var themes = this.themes + Object.keys(themes).forEach(function (name) { + objectAssign(themes[name], theme) + }) + objectAssign(this.baseTheme, theme) +} + +ThemeSetProto.getTheme = function (name) { + if (!this.themes[name]) throw this.newMissingThemeError(name) + return this.themes[name] +} + +ThemeSetProto.setDefault = function (opts, name) { + if (name == null) { + name = opts + opts = {} + } + var platform = opts.platform == null ? 'fallback' : opts.platform + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!this.defaults[platform]) this.defaults[platform] = {true: {}, false: {}} + this.defaults[platform][hasUnicode][hasColor] = name +} + +ThemeSetProto.getDefault = function (opts) { + if (!opts) opts = {} + var platformName = opts.platform || process.platform + var platform = this.defaults[platformName] || this.defaults.fallback + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!platform) throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + if (!platform[hasUnicode][hasColor]) { + if (hasUnicode && hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (hasUnicode && hasColor && platform[!hasUnicode][!hasColor]) { + hasUnicode = false + hasColor = false + } else if (hasUnicode && !hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (!hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (platform === this.defaults.fallback) { + throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + } + } + if (platform[hasUnicode][hasColor]) { + return this.getTheme(platform[hasUnicode][hasColor]) + } else { + return this.getDefault(objectAssign({}, opts, {platform: 'fallback'})) + } +} + +ThemeSetProto.newMissingThemeError = function newMissingThemeError (name) { + var err = new Error('Could not find a gauge theme named "' + name + '"') + Error.captureStackTrace.call(err, newMissingThemeError) + err.theme = name + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newMissingDefaultThemeError = function newMissingDefaultThemeError (platformName, hasUnicode, hasColor) { + var err = new Error( + 'Could not find a gauge theme for your platform/unicode/color use combo:\n' + + ' platform = ' + platformName + '\n' + + ' hasUnicode = ' + hasUnicode + '\n' + + ' hasColor = ' + hasColor) + Error.captureStackTrace.call(err, newMissingDefaultThemeError) + err.platform = platformName + err.hasUnicode = hasUnicode + err.hasColor = hasColor + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newThemeSet = function () { + var themeset = function (opts) { + return themeset.getDefault(opts) + } + return objectAssign(themeset, ThemeSetProto, { + themes: objectAssign({}, this.themes), + baseTheme: objectAssign({}, this.baseTheme), + defaults: JSON.parse(JSON.stringify(this.defaults || {})) + }) +} + diff --git a/node_modules/gauge/themes.js b/node_modules/gauge/themes.js new file mode 100644 index 0000000..eb5a4f5 --- /dev/null +++ b/node_modules/gauge/themes.js @@ -0,0 +1,54 @@ +'use strict' +var consoleControl = require('console-control-strings') +var ThemeSet = require('./theme-set.js') + +var themes = module.exports = new ThemeSet() + +themes.addTheme('ASCII', { + preProgressbar: '[', + postProgressbar: ']', + progressbarTheme: { + complete: '#', + remaining: '.' + }, + activityIndicatorTheme: '-\\|/', + preSubsection: '>' +}) + +themes.addTheme('colorASCII', themes.getTheme('ASCII'), { + progressbarTheme: { + preComplete: consoleControl.color('inverse'), + complete: ' ', + postComplete: consoleControl.color('stopInverse'), + preRemaining: consoleControl.color('brightBlack'), + remaining: '.', + postRemaining: consoleControl.color('reset') + } +}) + +themes.addTheme('brailleSpinner', { + preProgressbar: '⸨', + postProgressbar: '⸩', + progressbarTheme: { + complete: '░', + remaining: '⠂' + }, + activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏', + preSubsection: '>' +}) + +themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), { + progressbarTheme: { + preComplete: consoleControl.color('inverse'), + complete: ' ', + postComplete: consoleControl.color('stopInverse'), + preRemaining: consoleControl.color('brightBlack'), + remaining: '░', + postRemaining: consoleControl.color('reset') + } +}) + +themes.setDefault({}, 'ASCII') +themes.setDefault({hasColor: true}, 'colorASCII') +themes.setDefault({platform: 'darwin', hasUnicode: true}, 'brailleSpinner') +themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') diff --git a/node_modules/gauge/wide-truncate.js b/node_modules/gauge/wide-truncate.js new file mode 100644 index 0000000..c531bc4 --- /dev/null +++ b/node_modules/gauge/wide-truncate.js @@ -0,0 +1,25 @@ +'use strict' +var stringWidth = require('string-width') +var stripAnsi = require('strip-ansi') + +module.exports = wideTruncate + +function wideTruncate (str, target) { + if (stringWidth(str) === 0) return str + if (target <= 0) return '' + if (stringWidth(str) <= target) return str + + // We compute the number of bytes of ansi sequences here and add + // that to our initial truncation to ensure that we don't slice one + // that we want to keep in half. + var noAnsi = stripAnsi(str) + var ansiSize = str.length + noAnsi.length + var truncated = str.slice(0, target + ansiSize) + + // we have to shrink the result to account for our ansi sequence buffer + // (if an ansi sequence was truncated) and double width characters. + while (stringWidth(truncated) > target) { + truncated = truncated.slice(0, -1) + } + return truncated +} diff --git a/node_modules/generate-function/.npmignore b/node_modules/generate-function/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/generate-function/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/generate-function/.travis.yml b/node_modules/generate-function/.travis.yml new file mode 100644 index 0000000..6e5919d --- /dev/null +++ b/node_modules/generate-function/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "0.10" diff --git a/node_modules/generate-function/README.md b/node_modules/generate-function/README.md new file mode 100644 index 0000000..693bff8 --- /dev/null +++ b/node_modules/generate-function/README.md @@ -0,0 +1,72 @@ +# generate-function + +Module that helps you write generated functions in Node + +``` +npm install generate-function +``` + +[![build status](http://img.shields.io/travis/mafintosh/generate-function.svg?style=flat)](http://travis-ci.org/mafintosh/generate-function) + +## Disclamer + +Writing code that generates code is hard. +You should only use this if you really, really, really need this for performance reasons (like schema validators / parsers etc). + +## Usage + +``` js +var genfun = require('generate-function') + +var addNumber = function(val) { + var fn = genfun() + ('function add(n) {') + ('return n + %d', val) // supports format strings to insert values + ('}') + + return fn.toFunction() // will compile the function +} + +var add2 = addNumber(2) + +console.log('1+2=', add2(1)) +console.log(add2.toString()) // prints the generated function +``` + +If you need to close over variables in your generated function pass them to `toFunction(scope)` + +``` js +var multiply = function(a, b) { + return a * b +} + +var addAndMultiplyNumber = function(val) { + var fn = genfun() + ('function(n) {') + ('if (typeof n !== "number") {') // ending a line with { will indent the source + ('throw new Error("argument should be a number")') + ('}') + ('var result = multiply(%d, n+%d)', val, val) + ('return result') + ('}') + + // use fn.toString() if you want to see the generated source + + return fn.toFunction({ + multiply: multiply + }) +} + +var addAndMultiply2 = addAndMultiplyNumber(2) + +console.log('(3 + 2) * 2 =', addAndMultiply2(3)) +``` + +## Related + +See [generate-object-property](https://github.com/mafintosh/generate-object-property) if you need to safely generate code that +can be used to reference an object property + +## License + +MIT \ No newline at end of file diff --git a/node_modules/generate-function/example.js b/node_modules/generate-function/example.js new file mode 100644 index 0000000..8d1fee1 --- /dev/null +++ b/node_modules/generate-function/example.js @@ -0,0 +1,27 @@ +var genfun = require('./') + +var multiply = function(a, b) { + return a * b +} + +var addAndMultiplyNumber = function(val) { + var fn = genfun() + ('function(n) {') + ('if (typeof n !== "number") {') // ending a line with { will indent the source + ('throw new Error("argument should be a number")') + ('}') + ('var result = multiply(%d, n+%d)', val, val) + ('return result') + ('}') + + // use fn.toString() if you want to see the generated source + + return fn.toFunction({ + multiply: multiply + }) +} + +var addAndMultiply2 = addAndMultiplyNumber(2) + +console.log(addAndMultiply2.toString()) +console.log('(3 + 2) * 2 =', addAndMultiply2(3)) \ No newline at end of file diff --git a/node_modules/generate-function/index.js b/node_modules/generate-function/index.js new file mode 100644 index 0000000..37e064b --- /dev/null +++ b/node_modules/generate-function/index.js @@ -0,0 +1,61 @@ +var util = require('util') + +var INDENT_START = /[\{\[]/ +var INDENT_END = /[\}\]]/ + +module.exports = function() { + var lines = [] + var indent = 0 + + var push = function(str) { + var spaces = '' + while (spaces.length < indent*2) spaces += ' ' + lines.push(spaces+str) + } + + var line = function(fmt) { + if (!fmt) return line + + if (INDENT_END.test(fmt.trim()[0]) && INDENT_START.test(fmt[fmt.length-1])) { + indent-- + push(util.format.apply(util, arguments)) + indent++ + return line + } + if (INDENT_START.test(fmt[fmt.length-1])) { + push(util.format.apply(util, arguments)) + indent++ + return line + } + if (INDENT_END.test(fmt.trim()[0])) { + indent-- + push(util.format.apply(util, arguments)) + return line + } + + push(util.format.apply(util, arguments)) + return line + } + + line.toString = function() { + return lines.join('\n') + } + + line.toFunction = function(scope) { + var src = 'return ('+line.toString()+')' + + var keys = Object.keys(scope || {}).map(function(key) { + return key + }) + + var vals = keys.map(function(key) { + return scope[key] + }) + + return Function.apply(null, keys.concat(src)).apply(null, vals) + } + + if (arguments.length) line.apply(null, arguments) + + return line +} diff --git a/node_modules/generate-function/package.json b/node_modules/generate-function/package.json new file mode 100644 index 0000000..6c4b38d --- /dev/null +++ b/node_modules/generate-function/package.json @@ -0,0 +1,29 @@ +{ + "name": "generate-function", + "version": "2.0.0", + "description": "Module that helps you write generated functions in Node", + "main": "index.js", + "scripts": { + "test": "tape test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/generate-function" + }, + "keywords": [ + "generate", + "code", + "generation", + "function", + "performance" + ], + "author": "Mathias Buus", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/generate-function/issues" + }, + "homepage": "https://github.com/mafintosh/generate-function", + "devDependencies": { + "tape": "^2.13.4" + } +} diff --git a/node_modules/generate-function/test.js b/node_modules/generate-function/test.js new file mode 100644 index 0000000..2768893 --- /dev/null +++ b/node_modules/generate-function/test.js @@ -0,0 +1,33 @@ +var tape = require('tape') +var genfun = require('./') + +tape('generate add function', function(t) { + var fn = genfun() + ('function add(n) {') + ('return n + %d', 42) + ('}') + + t.same(fn.toString(), 'function add(n) {\n return n + 42\n}', 'code is indented') + t.same(fn.toFunction()(10), 52, 'function works') + t.end() +}) + +tape('generate function + closed variables', function(t) { + var fn = genfun() + ('function add(n) {') + ('return n + %d + number', 42) + ('}') + + var notGood = fn.toFunction() + var good = fn.toFunction({number:10}) + + try { + notGood(10) + t.ok(false, 'function should not work') + } catch (err) { + t.same(err.message, 'number is not defined', 'throws reference error') + } + + t.same(good(11), 63, 'function with closed var works') + t.end() +}) \ No newline at end of file diff --git a/node_modules/generate-object-property/.npmignore b/node_modules/generate-object-property/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/generate-object-property/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/generate-object-property/.travis.yml b/node_modules/generate-object-property/.travis.yml new file mode 100644 index 0000000..6e5919d --- /dev/null +++ b/node_modules/generate-object-property/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "0.10" diff --git a/node_modules/generate-object-property/LICENSE b/node_modules/generate-object-property/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/generate-object-property/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/generate-object-property/README.md b/node_modules/generate-object-property/README.md new file mode 100644 index 0000000..0ee0461 --- /dev/null +++ b/node_modules/generate-object-property/README.md @@ -0,0 +1,19 @@ +# generate-object-property + +Generate safe JS code that can used to reference a object property + + npm install generate-object-property + +[![build status](http://img.shields.io/travis/mafintosh/generate-object-property.svg?style=flat)](http://travis-ci.org/mafintosh/generate-object-property) + +## Usage + +``` js +var gen = require('generate-object-property'); +console.log(gen('a','b')); // prints a.b +console.log(gen('a', 'foo-bar')); // prints a["foo-bar"] +``` + +## License + +MIT \ No newline at end of file diff --git a/node_modules/generate-object-property/index.js b/node_modules/generate-object-property/index.js new file mode 100644 index 0000000..5dc9f77 --- /dev/null +++ b/node_modules/generate-object-property/index.js @@ -0,0 +1,12 @@ +var isProperty = require('is-property') + +var gen = function(obj, prop) { + return isProperty(prop) ? obj+'.'+prop : obj+'['+JSON.stringify(prop)+']' +} + +gen.valid = isProperty +gen.property = function (prop) { + return isProperty(prop) ? prop : JSON.stringify(prop) +} + +module.exports = gen diff --git a/node_modules/generate-object-property/package.json b/node_modules/generate-object-property/package.json new file mode 100644 index 0000000..e6f7b11 --- /dev/null +++ b/node_modules/generate-object-property/package.json @@ -0,0 +1,25 @@ +{ + "name": "generate-object-property", + "version": "1.2.0", + "description": "Generate safe JS code that can used to reference a object property", + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/generate-object-property" + }, + "devDependencies": { + "tape": "^2.13.0" + }, + "scripts": { + "test": "tape test.js" + }, + "dependencies": { + "is-property": "^1.0.0" + }, + "bugs": { + "url": "https://github.com/mafintosh/generate-object-property/issues" + }, + "homepage": "https://github.com/mafintosh/generate-object-property", + "main": "index.js", + "author": "Mathias Buus (@mafintosh)", + "license": "MIT" +} diff --git a/node_modules/generate-object-property/test.js b/node_modules/generate-object-property/test.js new file mode 100644 index 0000000..6c299c6 --- /dev/null +++ b/node_modules/generate-object-property/test.js @@ -0,0 +1,12 @@ +var tape = require('tape') +var gen = require('./') + +tape('valid', function(t) { + t.same(gen('a', 'b'), 'a.b') + t.end() +}) + +tape('invalid', function(t) { + t.same(gen('a', '-b'), 'a["-b"]') + t.end() +}) \ No newline at end of file diff --git a/node_modules/getpass/.npmignore b/node_modules/getpass/.npmignore new file mode 100644 index 0000000..a4261fc --- /dev/null +++ b/node_modules/getpass/.npmignore @@ -0,0 +1,8 @@ +.gitmodules +deps +docs +Makefile +node_modules +test +tools +coverage diff --git a/node_modules/getpass/.travis.yml b/node_modules/getpass/.travis.yml new file mode 100644 index 0000000..d8b5833 --- /dev/null +++ b/node_modules/getpass/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +node_js: + - "5.10" + - "4.4" + - "4.1" + - "0.12" + - "0.10" +before_install: + - "make check" diff --git a/node_modules/getpass/LICENSE b/node_modules/getpass/LICENSE new file mode 100644 index 0000000..f6d947d --- /dev/null +++ b/node_modules/getpass/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/getpass/README.md b/node_modules/getpass/README.md new file mode 100644 index 0000000..6e4a50f --- /dev/null +++ b/node_modules/getpass/README.md @@ -0,0 +1,32 @@ +## getpass + +Get a password from the terminal. Sounds simple? Sounds like the `readline` +module should be able to do it? NOPE. + +## Install and use it + +```bash +npm install --save getpass +``` + +```javascript +const mod_getpass = require('getpass'); +``` + +## API + +### `mod_getpass.getPass([options, ]callback)` + +Gets a password from the terminal. If available, this uses `/dev/tty` to avoid +interfering with any data being piped in or out of stdio. + +This function prints a prompt (by default `Password:`) and then accepts input +without echoing. + +Parameters: + + * `options`, an Object, with properties: + * `prompt`, an optional String + * `callback`, a `Func(error, password)`, with arguments: + * `error`, either `null` (no error) or an `Error` instance + * `password`, a String diff --git a/node_modules/getpass/lib/index.js b/node_modules/getpass/lib/index.js new file mode 100644 index 0000000..55a7718 --- /dev/null +++ b/node_modules/getpass/lib/index.js @@ -0,0 +1,123 @@ +/* + * Copyright 2016, Joyent, Inc. All rights reserved. + * Author: Alex Wilson + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. +*/ + +module.exports = { + getPass: getPass +}; + +const mod_tty = require('tty'); +const mod_fs = require('fs'); +const mod_assert = require('assert-plus'); + +var BACKSPACE = String.fromCharCode(127); +var CTRLC = '\u0003'; +var CTRLD = '\u0004'; + +function getPass(opts, cb) { + if (typeof (opts) === 'function' && cb === undefined) { + cb = opts; + opts = {}; + } + mod_assert.object(opts, 'options'); + mod_assert.func(cb, 'callback'); + + mod_assert.optionalString(opts.prompt, 'options.prompt'); + if (opts.prompt === undefined) + opts.prompt = 'Password'; + + openTTY(function (err, rfd, wfd, rtty, wtty) { + if (err) { + cb(err); + return; + } + + wtty.write(opts.prompt + ':'); + rtty.resume(); + rtty.setRawMode(true); + rtty.resume(); + rtty.setEncoding('utf8'); + + var pw = ''; + rtty.on('data', onData); + + function onData(data) { + var str = data.toString('utf8'); + for (var i = 0; i < str.length; ++i) { + var ch = str[i]; + switch (ch) { + case '\r': + case '\n': + case CTRLD: + cleanup(); + cb(null, pw); + return; + case CTRLC: + cleanup(); + cb(new Error('Aborted')); + return; + case BACKSPACE: + pw = pw.slice(0, pw.length - 1); + break; + default: + pw += ch; + break; + } + } + } + + function cleanup() { + wtty.write('\r\n'); + rtty.setRawMode(false); + rtty.pause(); + rtty.removeListener('data', onData); + if (wfd !== undefined && wfd !== rfd) { + wtty.end(); + mod_fs.closeSync(wfd); + } + if (rfd !== undefined) { + rtty.end(); + mod_fs.closeSync(rfd); + } + } + }); +} + +function openTTY(cb) { + mod_fs.open('/dev/tty', 'r+', function (err, rttyfd) { + if ((err && (err.code === 'ENOENT' || err.code === 'EACCES')) || + (process.version.match(/^v0[.][0-8][.]/))) { + cb(null, undefined, undefined, process.stdin, + process.stdout); + return; + } + var rtty = new mod_tty.ReadStream(rttyfd); + mod_fs.open('/dev/tty', 'w+', function (err3, wttyfd) { + var wtty = new mod_tty.WriteStream(wttyfd); + if (err3) { + cb(err3); + return; + } + cb(null, rttyfd, wttyfd, rtty, wtty); + }); + }); +} diff --git a/node_modules/getpass/node_modules/assert-plus/AUTHORS b/node_modules/getpass/node_modules/assert-plus/AUTHORS new file mode 100644 index 0000000..1923524 --- /dev/null +++ b/node_modules/getpass/node_modules/assert-plus/AUTHORS @@ -0,0 +1,6 @@ +Dave Eddy +Fred Kuo +Lars-Magnus Skog +Mark Cavage +Patrick Mooney +Rob Gulewich diff --git a/node_modules/getpass/node_modules/assert-plus/CHANGES.md b/node_modules/getpass/node_modules/assert-plus/CHANGES.md new file mode 100644 index 0000000..57d92bf --- /dev/null +++ b/node_modules/getpass/node_modules/assert-plus/CHANGES.md @@ -0,0 +1,14 @@ +# assert-plus Changelog + +## 1.0.0 + +- *BREAKING* assert.number (and derivatives) now accept Infinity as valid input +- Add assert.finite check. Previous assert.number callers should use this if + they expect Infinity inputs to throw. + +## 0.2.0 + +- Fix `assert.object(null)` so it throws +- Fix optional/arrayOf exports for non-type-of asserts +- Add optiona/arrayOf exports for Stream/Date/Regex/uuid +- Add basic unit test coverage diff --git a/node_modules/getpass/node_modules/assert-plus/README.md b/node_modules/getpass/node_modules/assert-plus/README.md new file mode 100644 index 0000000..ec200d1 --- /dev/null +++ b/node_modules/getpass/node_modules/assert-plus/README.md @@ -0,0 +1,162 @@ +# assert-plus + +This library is a super small wrapper over node's assert module that has two +things: (1) the ability to disable assertions with the environment variable +NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like +`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks +like this: + +```javascript + var assert = require('assert-plus'); + + function fooAccount(options, callback) { + assert.object(options, 'options'); + assert.number(options.id, 'options.id'); + assert.bool(options.isManager, 'options.isManager'); + assert.string(options.name, 'options.name'); + assert.arrayOfString(options.email, 'options.email'); + assert.func(callback, 'callback'); + + // Do stuff + callback(null, {}); + } +``` + +# API + +All methods that *aren't* part of node's core assert API are simply assumed to +take an argument, and then a string 'name' that's not a message; `AssertionError` +will be thrown if the assertion fails with a message like: + + AssertionError: foo (string) is required + at test (/home/mark/work/foo/foo.js:3:9) + at Object. (/home/mark/work/foo/foo.js:15:1) + at Module._compile (module.js:446:26) + at Object..js (module.js:464:10) + at Module.load (module.js:353:31) + at Function._load (module.js:311:12) + at Array.0 (module.js:484:10) + at EventEmitter._tickCallback (node.js:190:38) + +from: + +```javascript + function test(foo) { + assert.string(foo, 'foo'); + } +``` + +There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: + +```javascript + function test(foo) { + assert.arrayOfString(foo, 'foo'); + } +``` + +You can assert IFF an argument is not `undefined` (i.e., an optional arg): + +```javascript + assert.optionalString(foo, 'foo'); +``` + +Lastly, you can opt-out of assertion checking altogether by setting the +environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have +lots of assertions, and don't want to pay `typeof ()` taxes to v8 in +production. Be advised: The standard functions re-exported from `assert` are +also disabled in assert-plus if NDEBUG is specified. Using them directly from +the `assert` module avoids this behavior. + +The complete list of APIs is: + +* assert.array +* assert.bool +* assert.buffer +* assert.func +* assert.number +* assert.finite +* assert.object +* assert.string +* assert.stream +* assert.date +* assert.regexp +* assert.uuid +* assert.arrayOfArray +* assert.arrayOfBool +* assert.arrayOfBuffer +* assert.arrayOfFunc +* assert.arrayOfNumber +* assert.arrayOfFinite +* assert.arrayOfObject +* assert.arrayOfString +* assert.arrayOfStream +* assert.arrayOfDate +* assert.arrayOfRegexp +* assert.arrayOfUuid +* assert.optionalArray +* assert.optionalBool +* assert.optionalBuffer +* assert.optionalFunc +* assert.optionalNumber +* assert.optionalFinite +* assert.optionalObject +* assert.optionalString +* assert.optionalStream +* assert.optionalDate +* assert.optionalRegexp +* assert.optionalUuid +* assert.optionalArrayOfArray +* assert.optionalArrayOfBool +* assert.optionalArrayOfBuffer +* assert.optionalArrayOfFunc +* assert.optionalArrayOfNumber +* assert.optionalArrayOfFinite +* assert.optionalArrayOfObject +* assert.optionalArrayOfString +* assert.optionalArrayOfStream +* assert.optionalArrayOfDate +* assert.optionalArrayOfRegexp +* assert.optionalArrayOfUuid +* assert.AssertionError +* assert.fail +* assert.ok +* assert.equal +* assert.notEqual +* assert.deepEqual +* assert.notDeepEqual +* assert.strictEqual +* assert.notStrictEqual +* assert.throws +* assert.doesNotThrow +* assert.ifError + +# Installation + + npm install assert-plus + +## License + +The MIT License (MIT) +Copyright (c) 2012 Mark Cavage + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## Bugs + +See . diff --git a/node_modules/getpass/node_modules/assert-plus/assert.js b/node_modules/getpass/node_modules/assert-plus/assert.js new file mode 100644 index 0000000..26f944e --- /dev/null +++ b/node_modules/getpass/node_modules/assert-plus/assert.js @@ -0,0 +1,211 @@ +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = require('assert'); +var Stream = require('stream').Stream; +var util = require('util'); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); diff --git a/node_modules/getpass/node_modules/assert-plus/package.json b/node_modules/getpass/node_modules/assert-plus/package.json new file mode 100644 index 0000000..40d6a5c --- /dev/null +++ b/node_modules/getpass/node_modules/assert-plus/package.json @@ -0,0 +1,23 @@ +{ + "author": "Mark Cavage ", + "name": "assert-plus", + "description": "Extra assertions on top of node's assert module", + "version": "1.0.0", + "license": "MIT", + "main": "./assert.js", + "devDependencies": { + "tape": "4.2.2", + "faucet": "0.0.1" + }, + "optionalDependencies": {}, + "scripts": { + "test": "./node_modules/.bin/tape tests/*.js | ./node_modules/.bin/faucet" + }, + "repository": { + "type": "git", + "url": "https://github.com/mcavage/node-assert-plus.git" + }, + "engines": { + "node": ">=0.8" + } +} diff --git a/node_modules/getpass/package.json b/node_modules/getpass/package.json new file mode 100644 index 0000000..72bee69 --- /dev/null +++ b/node_modules/getpass/package.json @@ -0,0 +1,23 @@ +{ + "name": "getpass", + "version": "0.1.6", + "description": "getpass for node.js", + "main": "lib/index.js", + "dependencies": { + "assert-plus": "^1.0.0" + }, + "devDependencies": { + "json": "^9.0.3", + "pty.js": "^0.3.0", + "tape": "^4.4.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/arekinath/node-getpass.git" + }, + "scripts": { + "test": "tape test/*.test.js" + }, + "author": "Alex Wilson ", + "license": "MIT" +} diff --git a/node_modules/glob-base/LICENSE b/node_modules/glob-base/LICENSE new file mode 100644 index 0000000..65f90ac --- /dev/null +++ b/node_modules/glob-base/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/glob-base/README.md b/node_modules/glob-base/README.md new file mode 100644 index 0000000..1da2e82 --- /dev/null +++ b/node_modules/glob-base/README.md @@ -0,0 +1,158 @@ +# glob-base [![NPM version](https://badge.fury.io/js/glob-base.svg)](http://badge.fury.io/js/glob-base) [![Build Status](https://travis-ci.org/jonschlinkert/glob-base.svg)](https://travis-ci.org/jonschlinkert/glob-base) + +> Returns an object with the (non-glob) base path and the actual pattern. + +Use [glob-parent](https://github.com/es128/glob-parent) if you just want the base path. + +## Install with [npm](npmjs.org) + +```bash +npm i glob-base --save +``` + +## Related projects +* [glob-parent](https://github.com/es128/glob-parent): Strips glob magic from a string to provide the parent path +* [micromatch](https://github.com/jonschlinkert/micromatch): Glob matching for javascript/node.js. A faster alternative to minimatch (10-45x faster on avg), with all the features you're used to using in your Grunt and gulp tasks. +* [parse-glob](https://github.com/jonschlinkert/parse-glob): Parse a glob pattern into an object of tokens. +* [is-glob](https://github.com/jonschlinkert/is-glob): Returns `true` if the given string looks like a glob pattern. +* [braces](https://github.com/jonschlinkert/braces): Fastest brace expansion for node.js, with the most complete support for the Bash 4.3 braces specification. +* [fill-range](https://github.com/jonschlinkert/fill-range): Fill in a range of numbers or letters, optionally passing an increment or multiplier to use. +* [expand-range](https://github.com/jonschlinkert/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. See the benchmarks. Used by micromatch. + +## Usage + +```js +var globBase = require('glob-base'); + +globBase('a/b/.git/'); +//=> { base: 'a/b/.git/', isGlob: false, glob: '' }) + +globBase('a/b/**/e'); +//=> { base: 'a/b', isGlob: true, glob: '**/e' } + +globBase('a/b/*.{foo,bar}'); +//=> { base: 'a/b', isGlob: true, glob: '*.{foo,bar}' } + +globBase('a/b/.git/**'); +//=> { base: 'a/b/.git', isGlob: true, glob: '**' } + +globBase('a/b/c/*.md'); +//=> { base: 'a/b/c', isGlob: true, glob: '*.md' } + +globBase('a/b/c/.*.md'); +//=> { base: 'a/b/c', isGlob: true, glob: '.*.md' } + +globBase('a/b/{c,d}'); +//=> { base: 'a/b', isGlob: true, glob: '{c,d}' } + +globBase('!*.min.js'); +//=> { base: '.', isGlob: true, glob: '!*.min.js' } + +globBase('!foo'); +//=> { base: '.', isGlob: true, glob: '!foo' } + +globBase('!foo/(a|b).min.js'); +//=> { base: '.', isGlob: true, glob: '!foo/(a|b).min.js' } + +globBase(''); +//=> { base: '.', isGlob: false, glob: '' } + +globBase('**/*.md'); +//=> { base: '.', isGlob: true, glob: '**/*.md' } + +globBase('**/*.min.js'); +//=> { base: '.', isGlob: true, glob: '**/*.min.js' } + +globBase('**/.*'); +//=> { base: '.', isGlob: true, glob: '**/.*' } + +globBase('**/d'); +//=> { base: '.', isGlob: true, glob: '**/d' } + +globBase('*.*'); +//=> { base: '.', isGlob: true, glob: '*.*' } + +globBase('*.min.js'); +//=> { base: '.', isGlob: true, glob: '*.min.js' } + +globBase('*/*'); +//=> { base: '.', isGlob: true, glob: '*/*' } + +globBase('*b'); +//=> { base: '.', isGlob: true, glob: '*b' } + +globBase('.'); +//=> { base: '.', isGlob: false, glob: '.' } + +globBase('.*'); +//=> { base: '.', isGlob: true, glob: '.*' } + +globBase('./*'); +//=> { base: '.', isGlob: true, glob: '*' } + +globBase('/a'); +//=> { base: '/', isGlob: false, glob: 'a' } + +globBase('@(a|b)/e.f.g/'); +//=> { base: '.', isGlob: true, glob: '@(a|b)/e.f.g/' } + +globBase('[a-c]b*'); +//=> { base: '.', isGlob: true, glob: '[a-c]b*' } + +globBase('a'); +//=> { base: '.', isGlob: false, glob: 'a' } + +globBase('a.min.js'); +//=> { base: '.', isGlob: false, glob: 'a.min.js' } + +globBase('a/'); +//=> { base: 'a/', isGlob: false, glob: '' } + +globBase('a/**/j/**/z/*.md'); +//=> { base: 'a', isGlob: true, glob: '**/j/**/z/*.md' } + +globBase('a/*/c/*.md'); +//=> { base: 'a', isGlob: true, glob: '*/c/*.md' } + +globBase('a/?/c.md'); +//=> { base: 'a', isGlob: true, glob: '?/c.md' } + +globBase('a/??/c.js'); +//=> { base: 'a', isGlob: true, glob: '??/c.js' } + +globBase('a?b'); +//=> { base: '.', isGlob: true, glob: 'a?b' } + +globBase('bb'); +//=> { base: '.', isGlob: false, glob: 'bb' } + +globBase('c.md'); +//=> { base: '.', isGlob: false, glob: 'c.md' } +``` + +## Running tests +Install dev dependencies. + +```bash +npm i -d && npm test +``` + + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/glob-base/issues) + + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on March 08, 2015._ diff --git a/node_modules/glob-base/index.js b/node_modules/glob-base/index.js new file mode 100644 index 0000000..564b4a8 --- /dev/null +++ b/node_modules/glob-base/index.js @@ -0,0 +1,51 @@ +/*! + * glob-base + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var path = require('path'); +var parent = require('glob-parent'); +var isGlob = require('is-glob'); + +module.exports = function globBase(pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('glob-base expects a string.'); + } + + var res = {}; + res.base = parent(pattern); + res.isGlob = isGlob(pattern); + + if (res.base !== '.') { + res.glob = pattern.substr(res.base.length); + if (res.glob.charAt(0) === '/') { + res.glob = res.glob.substr(1); + } + } else { + res.glob = pattern; + } + + if (!res.isGlob) { + res.base = dirname(pattern); + res.glob = res.base !== '.' + ? pattern.substr(res.base.length) + : pattern; + } + + if (res.glob.substr(0, 2) === './') { + res.glob = res.glob.substr(2); + } + if (res.glob.charAt(0) === '/') { + res.glob = res.glob.substr(1); + } + return res; +}; + +function dirname(glob) { + if (glob.slice(-1) === '/') return glob; + return path.dirname(glob); +} diff --git a/node_modules/glob-base/package.json b/node_modules/glob-base/package.json new file mode 100644 index 0000000..8cb2dbf --- /dev/null +++ b/node_modules/glob-base/package.json @@ -0,0 +1,52 @@ +{ + "name": "glob-base", + "description": "Returns an object with the (non-glob) base path and the actual pattern.", + "version": "0.3.0", + "homepage": "https://github.com/jonschlinkert/glob-base", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/glob-base.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/glob-base/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/jonschlinkert/glob-base/blob/master/LICENSE" + }, + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "glob-parent": "^2.0.0", + "is-glob": "^2.0.0" + }, + "devDependencies": { + "mocha": "*", + "should": "^5.1.0" + }, + "keywords": [ + "base", + "directory", + "dirname", + "expression", + "glob", + "parent", + "path", + "pattern", + "regex", + "regular", + "root" + ] +} diff --git a/node_modules/glob-parent/.npmignore b/node_modules/glob-parent/.npmignore new file mode 100644 index 0000000..33e391f --- /dev/null +++ b/node_modules/glob-parent/.npmignore @@ -0,0 +1,4 @@ +node_modules +.DS_Store +npm-debug.log +coverage diff --git a/node_modules/glob-parent/.travis.yml b/node_modules/glob-parent/.travis.yml new file mode 100644 index 0000000..18fc42f --- /dev/null +++ b/node_modules/glob-parent/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "4" + - "iojs-v3" + - "iojs-v2" + - "iojs-v1" + - "0.12" + - "0.10" diff --git a/node_modules/glob-parent/LICENSE b/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..734076d --- /dev/null +++ b/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob-parent/README.md b/node_modules/glob-parent/README.md new file mode 100644 index 0000000..ff5310d --- /dev/null +++ b/node_modules/glob-parent/README.md @@ -0,0 +1,43 @@ +glob-parent [![Build Status](https://travis-ci.org/es128/glob-parent.svg)](https://travis-ci.org/es128/glob-parent) [![Coverage Status](https://img.shields.io/coveralls/es128/glob-parent.svg)](https://coveralls.io/r/es128/glob-parent?branch=master) +====== +Javascript module to extract the non-magic parent path from a glob string. + +[![NPM](https://nodei.co/npm/glob-parent.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/glob-parent/) +[![NPM](https://nodei.co/npm-dl/glob-parent.png?height=3&months=9)](https://nodei.co/npm-dl/glob-parent/) + +Usage +----- +```sh +npm install glob-parent --save +``` + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) + +``` + +Change Log +---------- +[See release notes page on GitHub](https://github.com/es128/glob-parent/releases) + +License +------- +[ISC](https://raw.github.com/es128/glob-parent/master/LICENSE) diff --git a/node_modules/glob-parent/index.js b/node_modules/glob-parent/index.js new file mode 100644 index 0000000..61615f1 --- /dev/null +++ b/node_modules/glob-parent/index.js @@ -0,0 +1,10 @@ +'use strict'; + +var path = require('path'); +var isglob = require('is-glob'); + +module.exports = function globParent(str) { + str += 'a'; // preserves full path in case of trailing path separator + do {str = path.dirname(str)} while (isglob(str)); + return str; +}; diff --git a/node_modules/glob-parent/package.json b/node_modules/glob-parent/package.json new file mode 100644 index 0000000..5499007 --- /dev/null +++ b/node_modules/glob-parent/package.json @@ -0,0 +1,35 @@ +{ + "name": "glob-parent", + "version": "2.0.0", + "description": "Strips glob magic from a string to provide the parent path", + "main": "index.js", + "scripts": { + "test": "istanbul cover _mocha && cat ./coverage/lcov.info | coveralls" + }, + "repository": { + "type": "git", + "url": "https://github.com/es128/glob-parent" + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "directory", + "base" + ], + "author": "Elan Shanker", + "license": "ISC", + "bugs": { + "url": "https://github.com/es128/glob-parent/issues" + }, + "homepage": "https://github.com/es128/glob-parent", + "dependencies": { + "is-glob": "^2.0.0" + }, + "devDependencies": { + "coveralls": "^2.11.2", + "istanbul": "^0.3.5", + "mocha": "^2.1.0" + } +} diff --git a/node_modules/glob-parent/test.js b/node_modules/glob-parent/test.js new file mode 100644 index 0000000..01156d2 --- /dev/null +++ b/node_modules/glob-parent/test.js @@ -0,0 +1,28 @@ +'use strict'; + +var gp = require('./'); +var assert = require('assert'); + +describe('glob-parent', function() { + it('should strip glob magic to return parent path', function() { + assert.equal(gp('path/to/*.js'), 'path/to'); + assert.equal(gp('/root/path/to/*.js'), '/root/path/to'); + assert.equal(gp('/*.js'), '/'); + assert.equal(gp('*.js'), '.'); + assert.equal(gp('**/*.js'), '.'); + assert.equal(gp('path/{to,from}'), 'path'); + assert.equal(gp('path/!(to|from)'), 'path'); + assert.equal(gp('path/?(to|from)'), 'path'); + assert.equal(gp('path/+(to|from)'), 'path'); + assert.equal(gp('path/*(to|from)'), 'path'); + assert.equal(gp('path/@(to|from)'), 'path'); + assert.equal(gp('path/**/*'), 'path'); + assert.equal(gp('path/**/subdir/foo.*'), 'path'); + }); + + it('should return parent dirname from non-glob paths', function() { + assert.equal(gp('path/foo/bar.js'), 'path/foo'); + assert.equal(gp('path/foo/'), 'path/foo'); + assert.equal(gp('path/foo'), 'path'); + }); +}); diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md new file mode 100644 index 0000000..baa1d1b --- /dev/null +++ b/node_modules/glob/README.md @@ -0,0 +1,368 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![](oh-my-glob.gif) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` diff --git a/node_modules/glob/changelog.md b/node_modules/glob/changelog.md new file mode 100644 index 0000000..4163677 --- /dev/null +++ b/node_modules/glob/changelog.md @@ -0,0 +1,67 @@ +## 7.0 + +- Raise error if `options.cwd` is specified, and not a directory + +## 6.0 + +- Remove comment and negation pattern support +- Ignore patterns are always in `dot:true` mode + +## 5.0 + +- Deprecate comment and negation patterns +- Fix regression in `mark` and `nodir` options from making all cache + keys absolute path. +- Abort if `fs.readdir` returns an error that's unexpected +- Don't emit `match` events for ignored items +- Treat ENOTSUP like ENOTDIR in readdir + +## 4.5 + +- Add `options.follow` to always follow directory symlinks in globstar +- Add `options.realpath` to call `fs.realpath` on all results +- Always cache based on absolute path + +## 4.4 + +- Add `options.ignore` +- Fix handling of broken symlinks + +## 4.3 + +- Bump minimatch to 2.x +- Pass all tests on Windows + +## 4.2 + +- Add `glob.hasMagic` function +- Add `options.nodir` flag + +## 4.1 + +- Refactor sync and async implementations for performance +- Throw if callback provided to sync glob function +- Treat symbolic links in globstar results the same as Bash 4.3 + +## 4.0 + +- Use `^` for dependency versions (bumped major because this breaks + older npm versions) +- Ensure callbacks are only ever called once +- switch to ISC license + +## 3.x + +- Rewrite in JavaScript +- Add support for setting root, cwd, and windows support +- Cache many fs calls +- Add globstar support +- emit match events + +## 2.x + +- Use `glob.h` and `fnmatch.h` from NetBSD + +## 1.x + +- `glob.h` static binding. diff --git a/node_modules/glob/common.js b/node_modules/glob/common.js new file mode 100644 index 0000000..66651bb --- /dev/null +++ b/node_modules/glob/common.js @@ -0,0 +1,240 @@ +exports.alphasort = alphasort +exports.alphasorti = alphasorti +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) +} + +function alphasort (a, b) { + return a.localeCompare(b) +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(self.nocase ? alphasorti : alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/glob/glob.js b/node_modules/glob/glob.js new file mode 100644 index 0000000..bfdd7a1 --- /dev/null +++ b/node_modules/glob/glob.js @@ -0,0 +1,792 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var fs = require('fs') +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + var n = this.minimatch.set.length + this._processing = 0 + this.matches = new Array(n) + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json new file mode 100644 index 0000000..e403b4f --- /dev/null +++ b/node_modules/glob/package.json @@ -0,0 +1,43 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.1.1", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.2", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^7.1.2", + "tick": "0.0.6" + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap test/*.js --cov", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC" +} diff --git a/node_modules/glob/sync.js b/node_modules/glob/sync.js new file mode 100644 index 0000000..c952134 --- /dev/null +++ b/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var fs = require('fs') +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/globals/globals.json b/node_modules/globals/globals.json new file mode 100644 index 0000000..9a25ed0 --- /dev/null +++ b/node_modules/globals/globals.json @@ -0,0 +1,1286 @@ +{ + "builtin": { + "Array": false, + "ArrayBuffer": false, + "Boolean": false, + "constructor": false, + "DataView": false, + "Date": false, + "decodeURI": false, + "decodeURIComponent": false, + "encodeURI": false, + "encodeURIComponent": false, + "Error": false, + "escape": false, + "eval": false, + "EvalError": false, + "Float32Array": false, + "Float64Array": false, + "Function": false, + "hasOwnProperty": false, + "Infinity": false, + "Int16Array": false, + "Int32Array": false, + "Int8Array": false, + "isFinite": false, + "isNaN": false, + "isPrototypeOf": false, + "JSON": false, + "Map": false, + "Math": false, + "NaN": false, + "Number": false, + "Object": false, + "parseFloat": false, + "parseInt": false, + "Promise": false, + "propertyIsEnumerable": false, + "Proxy": false, + "RangeError": false, + "ReferenceError": false, + "Reflect": false, + "RegExp": false, + "Set": false, + "String": false, + "Symbol": false, + "SyntaxError": false, + "System": false, + "toLocaleString": false, + "toString": false, + "TypeError": false, + "Uint16Array": false, + "Uint32Array": false, + "Uint8Array": false, + "Uint8ClampedArray": false, + "undefined": false, + "unescape": false, + "URIError": false, + "valueOf": false, + "WeakMap": false, + "WeakSet": false + }, + "es5": { + "Array": false, + "Boolean": false, + "constructor": false, + "Date": false, + "decodeURI": false, + "decodeURIComponent": false, + "encodeURI": false, + "encodeURIComponent": false, + "Error": false, + "escape": false, + "eval": false, + "EvalError": false, + "Function": false, + "hasOwnProperty": false, + "Infinity": false, + "isFinite": false, + "isNaN": false, + "isPrototypeOf": false, + "JSON": false, + "Math": false, + "NaN": false, + "Number": false, + "Object": false, + "parseFloat": false, + "parseInt": false, + "propertyIsEnumerable": false, + "RangeError": false, + "ReferenceError": false, + "RegExp": false, + "String": false, + "SyntaxError": false, + "toLocaleString": false, + "toString": false, + "TypeError": false, + "undefined": false, + "unescape": false, + "URIError": false, + "valueOf": false + }, + "es6": { + "Array": false, + "ArrayBuffer": false, + "Boolean": false, + "constructor": false, + "DataView": false, + "Date": false, + "decodeURI": false, + "decodeURIComponent": false, + "encodeURI": false, + "encodeURIComponent": false, + "Error": false, + "escape": false, + "eval": false, + "EvalError": false, + "Float32Array": false, + "Float64Array": false, + "Function": false, + "hasOwnProperty": false, + "Infinity": false, + "Int16Array": false, + "Int32Array": false, + "Int8Array": false, + "isFinite": false, + "isNaN": false, + "isPrototypeOf": false, + "JSON": false, + "Map": false, + "Math": false, + "NaN": false, + "Number": false, + "Object": false, + "parseFloat": false, + "parseInt": false, + "Promise": false, + "propertyIsEnumerable": false, + "Proxy": false, + "RangeError": false, + "ReferenceError": false, + "Reflect": false, + "RegExp": false, + "Set": false, + "String": false, + "Symbol": false, + "SyntaxError": false, + "System": false, + "toLocaleString": false, + "toString": false, + "TypeError": false, + "Uint16Array": false, + "Uint32Array": false, + "Uint8Array": false, + "Uint8ClampedArray": false, + "undefined": false, + "unescape": false, + "URIError": false, + "valueOf": false, + "WeakMap": false, + "WeakSet": false + }, + "browser": { + "addEventListener": false, + "alert": false, + "AnalyserNode": false, + "Animation": false, + "AnimationEffectReadOnly": false, + "AnimationEffectTiming": false, + "AnimationEffectTimingReadOnly": false, + "AnimationEvent": false, + "AnimationPlaybackEvent": false, + "AnimationTimeline": false, + "applicationCache": false, + "ApplicationCache": false, + "ApplicationCacheErrorEvent": false, + "atob": false, + "Attr": false, + "Audio": false, + "AudioBuffer": false, + "AudioBufferSourceNode": false, + "AudioContext": false, + "AudioDestinationNode": false, + "AudioListener": false, + "AudioNode": false, + "AudioParam": false, + "AudioProcessingEvent": false, + "AutocompleteErrorEvent": false, + "BarProp": false, + "BatteryManager": false, + "BeforeUnloadEvent": false, + "BiquadFilterNode": false, + "Blob": false, + "blur": false, + "btoa": false, + "Cache": false, + "caches": false, + "CacheStorage": false, + "cancelAnimationFrame": false, + "CanvasGradient": false, + "CanvasPattern": false, + "CanvasRenderingContext2D": false, + "CDATASection": false, + "ChannelMergerNode": false, + "ChannelSplitterNode": false, + "CharacterData": false, + "clearInterval": false, + "clearTimeout": false, + "clientInformation": false, + "ClientRect": false, + "ClientRectList": false, + "ClipboardEvent": false, + "close": false, + "closed": false, + "CloseEvent": false, + "Comment": false, + "CompositionEvent": false, + "confirm": false, + "console": false, + "ConvolverNode": false, + "Credential": false, + "CredentialsContainer": false, + "crypto": false, + "Crypto": false, + "CryptoKey": false, + "CSS": false, + "CSSAnimation": false, + "CSSFontFaceRule": false, + "CSSImportRule": false, + "CSSKeyframeRule": false, + "CSSKeyframesRule": false, + "CSSMediaRule": false, + "CSSPageRule": false, + "CSSRule": false, + "CSSRuleList": false, + "CSSStyleDeclaration": false, + "CSSStyleRule": false, + "CSSStyleSheet": false, + "CSSSupportsRule": false, + "CSSTransition": false, + "CSSUnknownRule": false, + "CSSViewportRule": false, + "CustomEvent": false, + "DataTransfer": false, + "DataTransferItem": false, + "DataTransferItemList": false, + "Debug": false, + "defaultStatus": false, + "defaultstatus": false, + "DelayNode": false, + "DeviceMotionEvent": false, + "DeviceOrientationEvent": false, + "devicePixelRatio": false, + "dispatchEvent": false, + "document": false, + "Document": false, + "DocumentFragment": false, + "DocumentTimeline": false, + "DocumentType": false, + "DOMError": false, + "DOMException": false, + "DOMImplementation": false, + "DOMParser": false, + "DOMSettableTokenList": false, + "DOMStringList": false, + "DOMStringMap": false, + "DOMTokenList": false, + "DragEvent": false, + "DynamicsCompressorNode": false, + "Element": false, + "ElementTimeControl": false, + "ErrorEvent": false, + "event": false, + "Event": false, + "EventSource": false, + "EventTarget": false, + "external": false, + "FederatedCredential": false, + "fetch": false, + "File": false, + "FileError": false, + "FileList": false, + "FileReader": false, + "find": false, + "focus": false, + "FocusEvent": false, + "FontFace": false, + "FormData": false, + "frameElement": false, + "frames": false, + "GainNode": false, + "Gamepad": false, + "GamepadButton": false, + "GamepadEvent": false, + "getComputedStyle": false, + "getSelection": false, + "HashChangeEvent": false, + "Headers": false, + "history": false, + "History": false, + "HTMLAllCollection": false, + "HTMLAnchorElement": false, + "HTMLAppletElement": false, + "HTMLAreaElement": false, + "HTMLAudioElement": false, + "HTMLBaseElement": false, + "HTMLBlockquoteElement": false, + "HTMLBodyElement": false, + "HTMLBRElement": false, + "HTMLButtonElement": false, + "HTMLCanvasElement": false, + "HTMLCollection": false, + "HTMLContentElement": false, + "HTMLDataListElement": false, + "HTMLDetailsElement": false, + "HTMLDialogElement": false, + "HTMLDirectoryElement": false, + "HTMLDivElement": false, + "HTMLDListElement": false, + "HTMLDocument": false, + "HTMLElement": false, + "HTMLEmbedElement": false, + "HTMLFieldSetElement": false, + "HTMLFontElement": false, + "HTMLFormControlsCollection": false, + "HTMLFormElement": false, + "HTMLFrameElement": false, + "HTMLFrameSetElement": false, + "HTMLHeadElement": false, + "HTMLHeadingElement": false, + "HTMLHRElement": false, + "HTMLHtmlElement": false, + "HTMLIFrameElement": false, + "HTMLImageElement": false, + "HTMLInputElement": false, + "HTMLIsIndexElement": false, + "HTMLKeygenElement": false, + "HTMLLabelElement": false, + "HTMLLayerElement": false, + "HTMLLegendElement": false, + "HTMLLIElement": false, + "HTMLLinkElement": false, + "HTMLMapElement": false, + "HTMLMarqueeElement": false, + "HTMLMediaElement": false, + "HTMLMenuElement": false, + "HTMLMetaElement": false, + "HTMLMeterElement": false, + "HTMLModElement": false, + "HTMLObjectElement": false, + "HTMLOListElement": false, + "HTMLOptGroupElement": false, + "HTMLOptionElement": false, + "HTMLOptionsCollection": false, + "HTMLOutputElement": false, + "HTMLParagraphElement": false, + "HTMLParamElement": false, + "HTMLPictureElement": false, + "HTMLPreElement": false, + "HTMLProgressElement": false, + "HTMLQuoteElement": false, + "HTMLScriptElement": false, + "HTMLSelectElement": false, + "HTMLShadowElement": false, + "HTMLSourceElement": false, + "HTMLSpanElement": false, + "HTMLStyleElement": false, + "HTMLTableCaptionElement": false, + "HTMLTableCellElement": false, + "HTMLTableColElement": false, + "HTMLTableElement": false, + "HTMLTableRowElement": false, + "HTMLTableSectionElement": false, + "HTMLTemplateElement": false, + "HTMLTextAreaElement": false, + "HTMLTitleElement": false, + "HTMLTrackElement": false, + "HTMLUListElement": false, + "HTMLUnknownElement": false, + "HTMLVideoElement": false, + "IDBCursor": false, + "IDBCursorWithValue": false, + "IDBDatabase": false, + "IDBEnvironment": false, + "IDBFactory": false, + "IDBIndex": false, + "IDBKeyRange": false, + "IDBObjectStore": false, + "IDBOpenDBRequest": false, + "IDBRequest": false, + "IDBTransaction": false, + "IDBVersionChangeEvent": false, + "Image": false, + "ImageBitmap": false, + "ImageData": false, + "indexedDB": false, + "innerHeight": false, + "innerWidth": false, + "InputEvent": false, + "InputMethodContext": false, + "IntersectionObserver": false, + "IntersectionObserverEntry": false, + "Intl": false, + "KeyboardEvent": false, + "KeyframeEffect": false, + "KeyframeEffectReadOnly": false, + "length": false, + "localStorage": false, + "location": false, + "Location": false, + "locationbar": false, + "matchMedia": false, + "MediaElementAudioSourceNode": false, + "MediaEncryptedEvent": false, + "MediaError": false, + "MediaKeyError": false, + "MediaKeyEvent": false, + "MediaKeyMessageEvent": false, + "MediaKeys": false, + "MediaKeySession": false, + "MediaKeyStatusMap": false, + "MediaKeySystemAccess": false, + "MediaList": false, + "MediaQueryList": false, + "MediaQueryListEvent": false, + "MediaSource": false, + "MediaStream": false, + "MediaStreamAudioDestinationNode": false, + "MediaStreamAudioSourceNode": false, + "MediaStreamEvent": false, + "MediaStreamTrack": false, + "menubar": false, + "MessageChannel": false, + "MessageEvent": false, + "MessagePort": false, + "MIDIAccess": false, + "MIDIConnectionEvent": false, + "MIDIInput": false, + "MIDIInputMap": false, + "MIDIMessageEvent": false, + "MIDIOutput": false, + "MIDIOutputMap": false, + "MIDIPort": false, + "MimeType": false, + "MimeTypeArray": false, + "MouseEvent": false, + "moveBy": false, + "moveTo": false, + "MutationEvent": false, + "MutationObserver": false, + "MutationRecord": false, + "name": false, + "NamedNodeMap": false, + "navigator": false, + "Navigator": false, + "Node": false, + "NodeFilter": false, + "NodeIterator": false, + "NodeList": false, + "Notification": false, + "OfflineAudioCompletionEvent": false, + "OfflineAudioContext": false, + "offscreenBuffering": false, + "onbeforeunload": true, + "onblur": true, + "onerror": true, + "onfocus": true, + "onload": true, + "onresize": true, + "onunload": true, + "open": false, + "openDatabase": false, + "opener": false, + "opera": false, + "Option": false, + "OscillatorNode": false, + "outerHeight": false, + "outerWidth": false, + "PageTransitionEvent": false, + "pageXOffset": false, + "pageYOffset": false, + "parent": false, + "PasswordCredential": false, + "Path2D": false, + "performance": false, + "Performance": false, + "PerformanceEntry": false, + "PerformanceMark": false, + "PerformanceMeasure": false, + "PerformanceNavigation": false, + "PerformanceResourceTiming": false, + "PerformanceTiming": false, + "PeriodicWave": false, + "Permissions": false, + "PermissionStatus": false, + "personalbar": false, + "Plugin": false, + "PluginArray": false, + "PopStateEvent": false, + "postMessage": false, + "print": false, + "ProcessingInstruction": false, + "ProgressEvent": false, + "prompt": false, + "PushManager": false, + "PushSubscription": false, + "RadioNodeList": false, + "Range": false, + "ReadableByteStream": false, + "ReadableStream": false, + "removeEventListener": false, + "Request": false, + "requestAnimationFrame": false, + "requestIdleCallback": false, + "resizeBy": false, + "resizeTo": false, + "Response": false, + "RTCIceCandidate": false, + "RTCSessionDescription": false, + "RTCPeerConnection": false, + "screen": false, + "Screen": false, + "screenLeft": false, + "ScreenOrientation": false, + "screenTop": false, + "screenX": false, + "screenY": false, + "ScriptProcessorNode": false, + "scroll": false, + "scrollbars": false, + "scrollBy": false, + "scrollTo": false, + "scrollX": false, + "scrollY": false, + "SecurityPolicyViolationEvent": false, + "Selection": false, + "self": false, + "ServiceWorker": false, + "ServiceWorkerContainer": false, + "ServiceWorkerRegistration": false, + "sessionStorage": false, + "setInterval": false, + "setTimeout": false, + "ShadowRoot": false, + "SharedKeyframeList": false, + "SharedWorker": false, + "showModalDialog": false, + "SiteBoundCredential": false, + "speechSynthesis": false, + "SpeechSynthesisEvent": false, + "SpeechSynthesisUtterance": false, + "status": false, + "statusbar": false, + "stop": false, + "Storage": false, + "StorageEvent": false, + "styleMedia": false, + "StyleSheet": false, + "StyleSheetList": false, + "SubtleCrypto": false, + "SVGAElement": false, + "SVGAltGlyphDefElement": false, + "SVGAltGlyphElement": false, + "SVGAltGlyphItemElement": false, + "SVGAngle": false, + "SVGAnimateColorElement": false, + "SVGAnimatedAngle": false, + "SVGAnimatedBoolean": false, + "SVGAnimatedEnumeration": false, + "SVGAnimatedInteger": false, + "SVGAnimatedLength": false, + "SVGAnimatedLengthList": false, + "SVGAnimatedNumber": false, + "SVGAnimatedNumberList": false, + "SVGAnimatedPathData": false, + "SVGAnimatedPoints": false, + "SVGAnimatedPreserveAspectRatio": false, + "SVGAnimatedRect": false, + "SVGAnimatedString": false, + "SVGAnimatedTransformList": false, + "SVGAnimateElement": false, + "SVGAnimateMotionElement": false, + "SVGAnimateTransformElement": false, + "SVGAnimationElement": false, + "SVGCircleElement": false, + "SVGClipPathElement": false, + "SVGColor": false, + "SVGColorProfileElement": false, + "SVGColorProfileRule": false, + "SVGComponentTransferFunctionElement": false, + "SVGCSSRule": false, + "SVGCursorElement": false, + "SVGDefsElement": false, + "SVGDescElement": false, + "SVGDiscardElement": false, + "SVGDocument": false, + "SVGElement": false, + "SVGElementInstance": false, + "SVGElementInstanceList": false, + "SVGEllipseElement": false, + "SVGEvent": false, + "SVGExternalResourcesRequired": false, + "SVGFEBlendElement": false, + "SVGFEColorMatrixElement": false, + "SVGFEComponentTransferElement": false, + "SVGFECompositeElement": false, + "SVGFEConvolveMatrixElement": false, + "SVGFEDiffuseLightingElement": false, + "SVGFEDisplacementMapElement": false, + "SVGFEDistantLightElement": false, + "SVGFEDropShadowElement": false, + "SVGFEFloodElement": false, + "SVGFEFuncAElement": false, + "SVGFEFuncBElement": false, + "SVGFEFuncGElement": false, + "SVGFEFuncRElement": false, + "SVGFEGaussianBlurElement": false, + "SVGFEImageElement": false, + "SVGFEMergeElement": false, + "SVGFEMergeNodeElement": false, + "SVGFEMorphologyElement": false, + "SVGFEOffsetElement": false, + "SVGFEPointLightElement": false, + "SVGFESpecularLightingElement": false, + "SVGFESpotLightElement": false, + "SVGFETileElement": false, + "SVGFETurbulenceElement": false, + "SVGFilterElement": false, + "SVGFilterPrimitiveStandardAttributes": false, + "SVGFitToViewBox": false, + "SVGFontElement": false, + "SVGFontFaceElement": false, + "SVGFontFaceFormatElement": false, + "SVGFontFaceNameElement": false, + "SVGFontFaceSrcElement": false, + "SVGFontFaceUriElement": false, + "SVGForeignObjectElement": false, + "SVGGElement": false, + "SVGGeometryElement": false, + "SVGGlyphElement": false, + "SVGGlyphRefElement": false, + "SVGGradientElement": false, + "SVGGraphicsElement": false, + "SVGHKernElement": false, + "SVGICCColor": false, + "SVGImageElement": false, + "SVGLangSpace": false, + "SVGLength": false, + "SVGLengthList": false, + "SVGLinearGradientElement": false, + "SVGLineElement": false, + "SVGLocatable": false, + "SVGMarkerElement": false, + "SVGMaskElement": false, + "SVGMatrix": false, + "SVGMetadataElement": false, + "SVGMissingGlyphElement": false, + "SVGMPathElement": false, + "SVGNumber": false, + "SVGNumberList": false, + "SVGPaint": false, + "SVGPathElement": false, + "SVGPathSeg": false, + "SVGPathSegArcAbs": false, + "SVGPathSegArcRel": false, + "SVGPathSegClosePath": false, + "SVGPathSegCurvetoCubicAbs": false, + "SVGPathSegCurvetoCubicRel": false, + "SVGPathSegCurvetoCubicSmoothAbs": false, + "SVGPathSegCurvetoCubicSmoothRel": false, + "SVGPathSegCurvetoQuadraticAbs": false, + "SVGPathSegCurvetoQuadraticRel": false, + "SVGPathSegCurvetoQuadraticSmoothAbs": false, + "SVGPathSegCurvetoQuadraticSmoothRel": false, + "SVGPathSegLinetoAbs": false, + "SVGPathSegLinetoHorizontalAbs": false, + "SVGPathSegLinetoHorizontalRel": false, + "SVGPathSegLinetoRel": false, + "SVGPathSegLinetoVerticalAbs": false, + "SVGPathSegLinetoVerticalRel": false, + "SVGPathSegList": false, + "SVGPathSegMovetoAbs": false, + "SVGPathSegMovetoRel": false, + "SVGPatternElement": false, + "SVGPoint": false, + "SVGPointList": false, + "SVGPolygonElement": false, + "SVGPolylineElement": false, + "SVGPreserveAspectRatio": false, + "SVGRadialGradientElement": false, + "SVGRect": false, + "SVGRectElement": false, + "SVGRenderingIntent": false, + "SVGScriptElement": false, + "SVGSetElement": false, + "SVGStopElement": false, + "SVGStringList": false, + "SVGStylable": false, + "SVGStyleElement": false, + "SVGSVGElement": false, + "SVGSwitchElement": false, + "SVGSymbolElement": false, + "SVGTests": false, + "SVGTextContentElement": false, + "SVGTextElement": false, + "SVGTextPathElement": false, + "SVGTextPositioningElement": false, + "SVGTitleElement": false, + "SVGTransform": false, + "SVGTransformable": false, + "SVGTransformList": false, + "SVGTRefElement": false, + "SVGTSpanElement": false, + "SVGUnitTypes": false, + "SVGURIReference": false, + "SVGUseElement": false, + "SVGViewElement": false, + "SVGViewSpec": false, + "SVGVKernElement": false, + "SVGZoomAndPan": false, + "SVGZoomEvent": false, + "Text": false, + "TextDecoder": false, + "TextEncoder": false, + "TextEvent": false, + "TextMetrics": false, + "TextTrack": false, + "TextTrackCue": false, + "TextTrackCueList": false, + "TextTrackList": false, + "TimeEvent": false, + "TimeRanges": false, + "toolbar": false, + "top": false, + "Touch": false, + "TouchEvent": false, + "TouchList": false, + "TrackEvent": false, + "TransitionEvent": false, + "TreeWalker": false, + "UIEvent": false, + "URL": false, + "URLSearchParams": false, + "ValidityState": false, + "VTTCue": false, + "WaveShaperNode": false, + "WebGLActiveInfo": false, + "WebGLBuffer": false, + "WebGLContextEvent": false, + "WebGLFramebuffer": false, + "WebGLProgram": false, + "WebGLRenderbuffer": false, + "WebGLRenderingContext": false, + "WebGLShader": false, + "WebGLShaderPrecisionFormat": false, + "WebGLTexture": false, + "WebGLUniformLocation": false, + "WebSocket": false, + "WheelEvent": false, + "window": false, + "Window": false, + "Worker": false, + "XDomainRequest": false, + "XMLDocument": false, + "XMLHttpRequest": false, + "XMLHttpRequestEventTarget": false, + "XMLHttpRequestProgressEvent": false, + "XMLHttpRequestUpload": false, + "XMLSerializer": false, + "XPathEvaluator": false, + "XPathException": false, + "XPathExpression": false, + "XPathNamespace": false, + "XPathNSResolver": false, + "XPathResult": false, + "XSLTProcessor": false + }, + "worker": { + "applicationCache": false, + "atob": false, + "Blob": false, + "BroadcastChannel": false, + "btoa": false, + "Cache": false, + "caches": false, + "clearInterval": false, + "clearTimeout": false, + "close": true, + "console": false, + "fetch": false, + "FileReaderSync": false, + "FormData": false, + "Headers": false, + "IDBCursor": false, + "IDBCursorWithValue": false, + "IDBDatabase": false, + "IDBFactory": false, + "IDBIndex": false, + "IDBKeyRange": false, + "IDBObjectStore": false, + "IDBOpenDBRequest": false, + "IDBRequest": false, + "IDBTransaction": false, + "IDBVersionChangeEvent": false, + "ImageData": false, + "importScripts": true, + "indexedDB": false, + "location": false, + "MessageChannel": false, + "MessagePort": false, + "name": false, + "navigator": false, + "Notification": false, + "onclose": true, + "onconnect": true, + "onerror": true, + "onlanguagechange": true, + "onmessage": true, + "onoffline": true, + "ononline": true, + "onrejectionhandled": true, + "onunhandledrejection": true, + "performance": false, + "Performance": false, + "PerformanceEntry": false, + "PerformanceMark": false, + "PerformanceMeasure": false, + "PerformanceNavigation": false, + "PerformanceResourceTiming": false, + "PerformanceTiming": false, + "postMessage": true, + "Promise": false, + "Request": false, + "Response": false, + "self": true, + "ServiceWorkerRegistration": false, + "setInterval": false, + "setTimeout": false, + "TextDecoder": false, + "TextEncoder": false, + "URL": false, + "URLSearchParams": false, + "WebSocket": false, + "Worker": false, + "XMLHttpRequest": false + }, + "node": { + "__dirname": false, + "__filename": false, + "arguments": false, + "Buffer": false, + "clearImmediate": false, + "clearInterval": false, + "clearTimeout": false, + "console": false, + "exports": true, + "GLOBAL": false, + "global": false, + "Intl": false, + "module": false, + "process": false, + "require": false, + "root": false, + "setImmediate": false, + "setInterval": false, + "setTimeout": false + }, + "commonjs": { + "exports": true, + "module": false, + "require": false, + "global": false + }, + "amd": { + "define": false, + "require": false + }, + "mocha": { + "after": false, + "afterEach": false, + "before": false, + "beforeEach": false, + "context": false, + "describe": false, + "it": false, + "mocha": false, + "run": false, + "setup": false, + "specify": false, + "suite": false, + "suiteSetup": false, + "suiteTeardown": false, + "teardown": false, + "test": false, + "xcontext": false, + "xdescribe": false, + "xit": false, + "xspecify": false + }, + "jasmine": { + "afterAll": false, + "afterEach": false, + "beforeAll": false, + "beforeEach": false, + "describe": false, + "expect": false, + "fail": false, + "fdescribe": false, + "fit": false, + "it": false, + "jasmine": false, + "pending": false, + "runs": false, + "spyOn": false, + "waits": false, + "waitsFor": false, + "xdescribe": false, + "xit": false + }, + "jest": { + "afterAll": false, + "afterEach": false, + "beforeAll": false, + "beforeEach": false, + "check": false, + "describe": false, + "expect": false, + "gen": false, + "it": false, + "fit": false, + "jest": false, + "pit": false, + "require": false, + "test": false, + "xdescribe": false, + "xit": false, + "xtest": false + }, + "qunit": { + "asyncTest": false, + "deepEqual": false, + "equal": false, + "expect": false, + "module": false, + "notDeepEqual": false, + "notEqual": false, + "notOk": false, + "notPropEqual": false, + "notStrictEqual": false, + "ok": false, + "propEqual": false, + "QUnit": false, + "raises": false, + "start": false, + "stop": false, + "strictEqual": false, + "test": false, + "throws": false + }, + "phantomjs": { + "console": true, + "exports": true, + "phantom": true, + "require": true, + "WebPage": true + }, + "couch": { + "emit": false, + "exports": false, + "getRow": false, + "log": false, + "module": false, + "provides": false, + "require": false, + "respond": false, + "send": false, + "start": false, + "sum": false + }, + "rhino": { + "defineClass": false, + "deserialize": false, + "gc": false, + "help": false, + "importClass": false, + "importPackage": false, + "java": false, + "load": false, + "loadClass": false, + "Packages": false, + "print": false, + "quit": false, + "readFile": false, + "readUrl": false, + "runCommand": false, + "seal": false, + "serialize": false, + "spawn": false, + "sync": false, + "toint32": false, + "version": false + }, + "nashorn": { + "__DIR__": false, + "__FILE__": false, + "__LINE__": false, + "com": false, + "edu": false, + "exit": false, + "Java": false, + "java": false, + "javafx": false, + "JavaImporter": false, + "javax": false, + "JSAdapter": false, + "load": false, + "loadWithNewGlobal": false, + "org": false, + "Packages": false, + "print": false, + "quit": false + }, + "wsh": { + "ActiveXObject": true, + "Enumerator": true, + "GetObject": true, + "ScriptEngine": true, + "ScriptEngineBuildVersion": true, + "ScriptEngineMajorVersion": true, + "ScriptEngineMinorVersion": true, + "VBArray": true, + "WScript": true, + "WSH": true, + "XDomainRequest": true + }, + "jquery": { + "$": false, + "jQuery": false + }, + "yui": { + "Y": false, + "YUI": false, + "YUI_config": false + }, + "shelljs": { + "cat": false, + "cd": false, + "chmod": false, + "config": false, + "cp": false, + "dirs": false, + "echo": false, + "env": false, + "error": false, + "exec": false, + "exit": false, + "find": false, + "grep": false, + "ls": false, + "ln": false, + "mkdir": false, + "mv": false, + "popd": false, + "pushd": false, + "pwd": false, + "rm": false, + "sed": false, + "set": false, + "target": false, + "tempdir": false, + "test": false, + "touch": false, + "which": false + }, + "prototypejs": { + "$": false, + "$$": false, + "$A": false, + "$break": false, + "$continue": false, + "$F": false, + "$H": false, + "$R": false, + "$w": false, + "Abstract": false, + "Ajax": false, + "Autocompleter": false, + "Builder": false, + "Class": false, + "Control": false, + "Draggable": false, + "Draggables": false, + "Droppables": false, + "Effect": false, + "Element": false, + "Enumerable": false, + "Event": false, + "Field": false, + "Form": false, + "Hash": false, + "Insertion": false, + "ObjectRange": false, + "PeriodicalExecuter": false, + "Position": false, + "Prototype": false, + "Scriptaculous": false, + "Selector": false, + "Sortable": false, + "SortableObserver": false, + "Sound": false, + "Template": false, + "Toggle": false, + "Try": false + }, + "meteor": { + "$": false, + "_": false, + "Accounts": false, + "AccountsClient": false, + "AccountsServer": false, + "AccountsCommon": false, + "App": false, + "Assets": false, + "Blaze": false, + "check": false, + "Cordova": false, + "DDP": false, + "DDPServer": false, + "DDPRateLimiter": false, + "Deps": false, + "EJSON": false, + "Email": false, + "HTTP": false, + "Log": false, + "Match": false, + "Meteor": false, + "Mongo": false, + "MongoInternals": false, + "Npm": false, + "Package": false, + "Plugin": false, + "process": false, + "Random": false, + "ReactiveDict": false, + "ReactiveVar": false, + "Router": false, + "ServiceConfiguration": false, + "Session": false, + "share": false, + "Spacebars": false, + "Template": false, + "Tinytest": false, + "Tracker": false, + "UI": false, + "Utils": false, + "WebApp": false, + "WebAppInternals": false + }, + "mongo": { + "_isWindows": false, + "_rand": false, + "BulkWriteResult": false, + "cat": false, + "cd": false, + "connect": false, + "db": false, + "getHostName": false, + "getMemInfo": false, + "hostname": false, + "ISODate": false, + "listFiles": false, + "load": false, + "ls": false, + "md5sumFile": false, + "mkdir": false, + "Mongo": false, + "NumberInt": false, + "NumberLong": false, + "ObjectId": false, + "PlanCache": false, + "print": false, + "printjson": false, + "pwd": false, + "quit": false, + "removeFile": false, + "rs": false, + "sh": false, + "UUID": false, + "version": false, + "WriteResult": false + }, + "applescript": { + "$": false, + "Application": false, + "Automation": false, + "console": false, + "delay": false, + "Library": false, + "ObjC": false, + "ObjectSpecifier": false, + "Path": false, + "Progress": false, + "Ref": false + }, + "serviceworker": { + "caches": false, + "Cache": false, + "CacheStorage": false, + "Client": false, + "clients": false, + "Clients": false, + "ExtendableEvent": false, + "ExtendableMessageEvent": false, + "FetchEvent": false, + "importScripts": false, + "registration": false, + "self": false, + "ServiceWorker": false, + "ServiceWorkerContainer": false, + "ServiceWorkerGlobalScope": false, + "ServiceWorkerMessageEvent": false, + "ServiceWorkerRegistration": false, + "skipWaiting": false, + "WindowClient": false + }, + "atomtest": { + "advanceClock": false, + "fakeClearInterval": false, + "fakeClearTimeout": false, + "fakeSetInterval": false, + "fakeSetTimeout": false, + "resetTimeouts": false, + "waitsForPromise": false + }, + "embertest": { + "andThen": false, + "click": false, + "currentPath": false, + "currentRouteName": false, + "currentURL": false, + "fillIn": false, + "find": false, + "findWithAssert": false, + "keyEvent": false, + "pauseTest": false, + "triggerEvent": false, + "visit": false + }, + "protractor": { + "$": false, + "$$": false, + "browser": false, + "By": false, + "by": false, + "DartObject": false, + "element": false, + "protractor": false + }, + "shared-node-browser": { + "clearInterval": false, + "clearTimeout": false, + "console": false, + "setInterval": false, + "setTimeout": false + }, + "webextensions": { + "browser": false, + "chrome": false, + "opr": false + }, + "greasemonkey": { + "GM_addStyle": false, + "GM_deleteValue": false, + "GM_getResourceText": false, + "GM_getResourceURL": false, + "GM_getValue": false, + "GM_info": false, + "GM_listValues": false, + "GM_log": false, + "GM_openInTab": false, + "GM_registerMenuCommand": false, + "GM_setClipboard": false, + "GM_setValue": false, + "GM_xmlhttpRequest": false, + "unsafeWindow": false + } +} diff --git a/node_modules/globals/index.js b/node_modules/globals/index.js new file mode 100644 index 0000000..a02ef24 --- /dev/null +++ b/node_modules/globals/index.js @@ -0,0 +1 @@ +module.exports = require('./globals.json'); diff --git a/node_modules/globals/license b/node_modules/globals/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/globals/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/globals/package.json b/node_modules/globals/package.json new file mode 100644 index 0000000..fd86623 --- /dev/null +++ b/node_modules/globals/package.json @@ -0,0 +1,35 @@ +{ + "name": "globals", + "version": "9.12.0", + "description": "Global identifiers from different JavaScript environments", + "license": "MIT", + "repository": "sindresorhus/globals", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "http://sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "files": [ + "index.js", + "globals.json" + ], + "keywords": [ + "globals", + "global", + "identifiers", + "variables", + "vars", + "jshint", + "eslint", + "environments" + ], + "devDependencies": { + "mocha": "*" + } +} diff --git a/node_modules/globals/readme.md b/node_modules/globals/readme.md new file mode 100644 index 0000000..5314bbb --- /dev/null +++ b/node_modules/globals/readme.md @@ -0,0 +1,41 @@ +# globals [![Build Status](https://travis-ci.org/sindresorhus/globals.svg?branch=master)](https://travis-ci.org/sindresorhus/globals) + +> Global identifiers from different JavaScript environments + +Extracted from [JSHint](https://github.com/jshint/jshint/blob/3a8efa979dbb157bfb5c10b5826603a55a33b9ad/src/vars.js) and [ESLint](https://github.com/eslint/eslint/blob/b648406218f8a2d7302b98f5565e23199f44eb31/conf/environments.json) and merged. + +It's just a [JSON file](globals.json), so use it in whatever environment you like. + +**This module [no longer accepts](https://github.com/sindresorhus/globals/issues/82) new environments. If you need it for ESLint, just [create a plugin](http://eslint.org/docs/developer-guide/working-with-plugins#environments-in-plugins).** + + +## Install + +``` +$ npm install --save globals +``` + + +## Usage + +```js +var globals = require('globals'); + +console.log(globals.browser); +/* +{ + addEventListener: false, + applicationCache: false, + ArrayBuffer: false, + atob: false, + ... +} +*/ +``` + +Each global is given a value of `true` or `false`. A value of `true` indicates that the variable may be overwritten. A value of `false` indicates that the variable should be considered read-only. This information is used by static analysis tools to flag incorrect behavior. We assume all variables should be `false` unless we hear otherwise. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/graceful-fs/LICENSE b/node_modules/graceful-fs/LICENSE new file mode 100644 index 0000000..9d2c803 --- /dev/null +++ b/node_modules/graceful-fs/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter, Ben Noordhuis, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/graceful-fs/README.md b/node_modules/graceful-fs/README.md new file mode 100644 index 0000000..5273a50 --- /dev/null +++ b/node_modules/graceful-fs/README.md @@ -0,0 +1,133 @@ +# graceful-fs + +graceful-fs functions as a drop-in replacement for the fs module, +making various improvements. + +The improvements are meant to normalize behavior across different +platforms and environments, and to make filesystem access more +resilient to errors. + +## Improvements over [fs module](https://nodejs.org/api/fs.html) + +* Queues up `open` and `readdir` calls, and retries them once + something closes if there is an EMFILE error from too many file + descriptors. +* fixes `lchmod` for Node versions prior to 0.6.2. +* implements `fs.lutimes` if possible. Otherwise it becomes a noop. +* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or + `lchown` if the user isn't root. +* makes `lchmod` and `lchown` become noops, if not available. +* retries reading a file if `read` results in EAGAIN error. + +On Windows, it retries renaming a file for up to one second if `EACCESS` +or `EPERM` error occurs, likely because antivirus software has locked +the directory. + +## USAGE + +```javascript +// use just like fs +var fs = require('graceful-fs') + +// now go and do stuff with it... +fs.readFileSync('some-file-or-whatever') +``` + +## Global Patching + +If you want to patch the global fs module (or any other fs-like +module) you can do this: + +```javascript +// Make sure to read the caveat below. +var realFs = require('fs') +var gracefulFs = require('graceful-fs') +gracefulFs.gracefulify(realFs) +``` + +This should only ever be done at the top-level application layer, in +order to delay on EMFILE errors from any fs-using dependencies. You +should **not** do this in a library, because it can cause unexpected +delays in other parts of the program. + +## Changes + +This module is fairly stable at this point, and used by a lot of +things. That being said, because it implements a subtle behavior +change in a core part of the node API, even modest changes can be +extremely breaking, and the versioning is thus biased towards +bumping the major when in doubt. + +The main change between major versions has been switching between +providing a fully-patched `fs` module vs monkey-patching the node core +builtin, and the approach by which a non-monkey-patched `fs` was +created. + +The goal is to trade `EMFILE` errors for slower fs operations. So, if +you try to open a zillion files, rather than crashing, `open` +operations will be queued up and wait for something else to `close`. + +There are advantages to each approach. Monkey-patching the fs means +that no `EMFILE` errors can possibly occur anywhere in your +application, because everything is using the same core `fs` module, +which is patched. However, it can also obviously cause undesirable +side-effects, especially if the module is loaded multiple times. + +Implementing a separate-but-identical patched `fs` module is more +surgical (and doesn't run the risk of patching multiple times), but +also imposes the challenge of keeping in sync with the core module. + +The current approach loads the `fs` module, and then creates a +lookalike object that has all the same methods, except a few that are +patched. It is safe to use in all versions of Node from 0.8 through +7.0. + +### v4 + +* Do not monkey-patch the fs module. This module may now be used as a + drop-in dep, and users can opt into monkey-patching the fs builtin + if their app requires it. + +### v3 + +* Monkey-patch fs, because the eval approach no longer works on recent + node. +* fixed possible type-error throw if rename fails on windows +* verify that we *never* get EMFILE errors +* Ignore ENOSYS from chmod/chown +* clarify that graceful-fs must be used as a drop-in + +### v2.1.0 + +* Use eval rather than monkey-patching fs. +* readdir: Always sort the results +* win32: requeue a file if error has an OK status + +### v2.0 + +* A return to monkey patching +* wrap process.cwd + +### v1.1 + +* wrap readFile +* Wrap fs.writeFile. +* readdir protection +* Don't clobber the fs builtin +* Handle fs.read EAGAIN errors by trying again +* Expose the curOpen counter +* No-op lchown/lchmod if not implemented +* fs.rename patch only for win32 +* Patch fs.rename to handle AV software on Windows +* Close #4 Chown should not fail on einval or eperm if non-root +* Fix isaacs/fstream#1 Only wrap fs one time +* Fix #3 Start at 1024 max files, then back off on EMFILE +* lutimes that doens't blow up on Linux +* A full on-rewrite using a queue instead of just swallowing the EMFILE error +* Wrap Read/Write streams as well + +### 1.0 + +* Update engines for node 0.6 +* Be lstat-graceful on Windows +* first diff --git a/node_modules/graceful-fs/fs.js b/node_modules/graceful-fs/fs.js new file mode 100644 index 0000000..8ad4a38 --- /dev/null +++ b/node_modules/graceful-fs/fs.js @@ -0,0 +1,21 @@ +'use strict' + +var fs = require('fs') + +module.exports = clone(fs) + +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj + + if (obj instanceof Object) + var copy = { __proto__: obj.__proto__ } + else + var copy = Object.create(null) + + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) + + return copy +} diff --git a/node_modules/graceful-fs/graceful-fs.js b/node_modules/graceful-fs/graceful-fs.js new file mode 100644 index 0000000..33b30d2 --- /dev/null +++ b/node_modules/graceful-fs/graceful-fs.js @@ -0,0 +1,262 @@ +var fs = require('fs') +var polyfills = require('./polyfills.js') +var legacy = require('./legacy-streams.js') +var queue = [] + +var util = require('util') + +function noop () {} + +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } + +if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(queue) + require('assert').equal(queue.length, 0) + }) +} + +module.exports = patch(require('./fs.js')) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH) { + module.exports = patch(fs) +} + +// Always patch fs.close/closeSync, because we want to +// retry() whenever a close happens *anywhere* in the program. +// This is essential when multiple graceful-fs instances are +// in play at the same time. +module.exports.close = +fs.close = (function (fs$close) { return function (fd, cb) { + return fs$close.call(fs, fd, function (err) { + if (!err) + retry() + + if (typeof cb === 'function') + cb.apply(this, arguments) + }) +}})(fs.close) + +module.exports.closeSync = +fs.closeSync = (function (fs$closeSync) { return function (fd) { + // Note that graceful-fs also retries when fs.closeSync() fails. + // Looks like a bug to me, although it's probably a harmless one. + var rval = fs$closeSync.apply(fs, arguments) + retry() + return rval +}})(fs.closeSync) + +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch + fs.FileReadStream = ReadStream; // Legacy name. + fs.FileWriteStream = WriteStream; // Legacy name. + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$appendFile(path, data, options, cb) + + function go$appendFile (path, data, options, cb) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + var fs$readdir = fs.readdir + fs.readdir = readdir + function readdir (path, options, cb) { + var args = [path] + if (typeof options !== 'function') { + args.push(options) + } else { + cb = options + } + args.push(go$readdir$cb) + + return go$readdir(args) + + function go$readdir$cb (err, files) { + if (files && files.sort) + files.sort() + + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readdir, [args]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + } + } + + function go$readdir (args) { + return fs$readdir.apply(fs, args) + } + + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } + + var fs$ReadStream = fs.ReadStream + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + + var fs$WriteStream = fs.WriteStream + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open + + fs.ReadStream = ReadStream + fs.WriteStream = WriteStream + + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) + } + + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() + + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) + } + + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } + + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + } + }) + } + + function createReadStream (path, options) { + return new ReadStream(path, options) + } + + function createWriteStream (path, options) { + return new WriteStream(path, options) + } + + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null + + return go$open(path, flags, mode, cb) + + function go$open (path, flags, mode, cb) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + return fs +} + +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + queue.push(elem) +} + +function retry () { + var elem = queue.shift() + if (elem) { + debug('RETRY', elem[0].name, elem[1]) + elem[0].apply(null, elem[1]) + } +} diff --git a/node_modules/graceful-fs/legacy-streams.js b/node_modules/graceful-fs/legacy-streams.js new file mode 100644 index 0000000..d617b50 --- /dev/null +++ b/node_modules/graceful-fs/legacy-streams.js @@ -0,0 +1,118 @@ +var Stream = require('stream').Stream + +module.exports = legacy + +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } + + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); + + Stream.call(this); + + var self = this; + + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; + + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.encoding) this.setEncoding(this.encoding); + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } + + if (this.start > this.end) { + throw new Error('start must be <= end'); + } + + this.pos = this.start; + } + + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } + + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } + + self.fd = fd; + self.emit('open', fd); + self._read(); + }) + } + + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); + + Stream.call(this); + + this.path = path; + this.fd = null; + this.writable = true; + + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } + + this.pos = this.start; + } + + this.busy = false; + this._queue = []; + + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } + } +} diff --git a/node_modules/graceful-fs/package.json b/node_modules/graceful-fs/package.json new file mode 100644 index 0000000..d8e11a5 --- /dev/null +++ b/node_modules/graceful-fs/package.json @@ -0,0 +1,47 @@ +{ + "name": "graceful-fs", + "description": "A drop-in replacement for fs, making various improvements.", + "version": "4.1.9", + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-graceful-fs" + }, + "main": "graceful-fs.js", + "engines": { + "node": ">=0.4.0" + }, + "directories": { + "test": "test" + }, + "scripts": { + "test": "node test.js | tap -" + }, + "keywords": [ + "fs", + "module", + "reading", + "retry", + "retries", + "queue", + "error", + "errors", + "handling", + "EMFILE", + "EAGAIN", + "EINVAL", + "EPERM", + "EACCESS" + ], + "license": "ISC", + "devDependencies": { + "mkdirp": "^0.5.0", + "rimraf": "^2.2.8", + "tap": "^5.4.2" + }, + "files": [ + "fs.js", + "graceful-fs.js", + "legacy-streams.js", + "polyfills.js" + ] +} diff --git a/node_modules/graceful-fs/polyfills.js b/node_modules/graceful-fs/polyfills.js new file mode 100644 index 0000000..cf474df --- /dev/null +++ b/node_modules/graceful-fs/polyfills.js @@ -0,0 +1,310 @@ +var fs = require('./fs.js') +var constants = require('constants') + +var origCwd = process.cwd +var cwd = null +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +var chdir = process.chdir +process.chdir = function(d) { + cwd = null + chdir.call(process, d) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) + + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) + + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) + + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (!fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (!fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 1 second. + if (process.platform === "win32") { + fs.rename = (function (fs$rename) { return function (from, to, cb) { + var start = Date.now() + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM") + && Date.now() - start < 1000) { + return fs$rename(from, to, CB) + } + if (cb) cb(er) + }) + }})(fs.rename) + } + + // if read() returns EAGAIN, then just try it again. + fs.read = (function (fs$read) { return function (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + }})(fs.read) + + fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) +} + +function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } + + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } +} + +function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK")) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } + + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + + } else { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } +} + +function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } +} + +function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } +} + + +function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } +} + +function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } +} + + +function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, cb) { + return orig.call(fs, target, function (er, stats) { + if (!stats) return cb.apply(this, arguments) + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + if (cb) cb.apply(this, arguments) + }) + } +} + +function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target) { + var stats = orig.call(fs, target) + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + return stats; + } +} + +// ENOSYS means that the fs doesn't support the op. Just ignore +// that, because it doesn't matter. +// +// if there's no getuid, or if getuid() is something other +// than 0, and the error is EINVAL or EPERM, then just ignore +// it. +// +// This specific case is a silent failure in cp, install, tar, +// and most other unix tools that manage permissions. +// +// When running as root, or if other types of errors are +// encountered, then it's strict. +function chownErOk (er) { + if (!er) + return true + + if (er.code === "ENOSYS") + return true + + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } + + return false +} diff --git a/node_modules/graceful-readlink/.npmignore b/node_modules/graceful-readlink/.npmignore new file mode 100644 index 0000000..3ac7d16 --- /dev/null +++ b/node_modules/graceful-readlink/.npmignore @@ -0,0 +1,3 @@ +.idea/ +.DS_Store +node_modules/ diff --git a/node_modules/graceful-readlink/.travis.yml b/node_modules/graceful-readlink/.travis.yml new file mode 100644 index 0000000..baf9be7 --- /dev/null +++ b/node_modules/graceful-readlink/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - "0.10" + - "0.12" + - "io.js" diff --git a/node_modules/graceful-readlink/LICENSE b/node_modules/graceful-readlink/LICENSE new file mode 100644 index 0000000..d1f842f --- /dev/null +++ b/node_modules/graceful-readlink/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Zhiye Li + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/graceful-readlink/README.md b/node_modules/graceful-readlink/README.md new file mode 100644 index 0000000..fc63b50 --- /dev/null +++ b/node_modules/graceful-readlink/README.md @@ -0,0 +1,17 @@ +# graceful-readlink +[![NPM Version](http://img.shields.io/npm/v/graceful-readlink.svg?style=flat)](https://www.npmjs.org/package/graceful-readlink) +[![NPM Downloads](https://img.shields.io/npm/dm/graceful-readlink.svg?style=flat)](https://www.npmjs.org/package/graceful-readlink) + + +## Usage + +```js +var readlinkSync = require('graceful-readlink').readlinkSync; +console.log(readlinkSync(f)); +// output +// the file pointed to when `f` is a symbolic link +// the `f` itself when `f` is not a symbolic link +``` +## Licence + +MIT License diff --git a/node_modules/graceful-readlink/index.js b/node_modules/graceful-readlink/index.js new file mode 100644 index 0000000..7e9fc70 --- /dev/null +++ b/node_modules/graceful-readlink/index.js @@ -0,0 +1,12 @@ +var fs = require('fs') + , lstat = fs.lstatSync; + +exports.readlinkSync = function (p) { + if (lstat(p).isSymbolicLink()) { + return fs.readlinkSync(p); + } else { + return p; + } +}; + + diff --git a/node_modules/graceful-readlink/package.json b/node_modules/graceful-readlink/package.json new file mode 100644 index 0000000..ac86c1f --- /dev/null +++ b/node_modules/graceful-readlink/package.json @@ -0,0 +1,18 @@ +{ + "name": "graceful-readlink", + "version": "1.0.1", + "description": "graceful fs.readlink", + "main": "index.js", + "repository": "git://github.com/zhiyelee/graceful-readlink.git", + "homepage": "https://github.com/zhiyelee/graceful-readlink", + "bugs": "https://github.com/zhiyelee/graceful-readlink/issues", + "keywords": [ + "fs.readlink", + "readlink" + ], + "author": "zhiyelee", + "license": "MIT", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + } +} diff --git a/node_modules/har-validator/LICENSE b/node_modules/har-validator/LICENSE new file mode 100644 index 0000000..ca55c91 --- /dev/null +++ b/node_modules/har-validator/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Ahmad Nassri + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/har-validator/README.md b/node_modules/har-validator/README.md new file mode 100644 index 0000000..91526e6 --- /dev/null +++ b/node_modules/har-validator/README.md @@ -0,0 +1,309 @@ +# HAR Validator [![version][npm-version]][npm-url] [![License][npm-license]][license-url] + +Extremely fast HTTP Archive ([HAR](http://www.softwareishard.com/blog/har-12-spec/)) validator using JSON Schema. + +[![Build Status][travis-image]][travis-url] +[![Downloads][npm-downloads]][npm-url] +[![Code Climate][codeclimate-quality]][codeclimate-url] +[![Coverage Status][codeclimate-coverage]][codeclimate-url] +[![Dependencies][david-image]][david-url] + +## Install + +```shell +# to use in cli +npm install --global har-validator + +# to use as a module +npm install --save har-validator +``` + +## Usage + +``` + + Usage: har-validator [options] + + Options: + + -h, --help output usage information + -V, --version output the version number + -s, --schema [name] validate schema name (log, request, response, etc ...) + +``` + +###### Example + +```shell +har-validator har.json + +har-validator --schema request request.json +``` + +## API + +**Note**: as of [`v2.0.0`](https://github.com/ahmadnassri/har-validator/releases/tag/v2.0.0) this module defaults to Promise based API. *For backward comptability with `v1.x` an [async/callback API](#callback-api) is provided* + +### Validate(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a full [HAR](http://www.softwareishard.com/blog/har-12-spec/) object + +```js +validate(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.log(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [log](http://www.softwareishard.com/blog/har-12-spec/#log) object + +```js +validate.log(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.cache(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [cache](http://www.softwareishard.com/blog/har-12-spec/#cache) object + +```js +validate.cache(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.cacheEntry(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a ["beforeRequest" or "afterRequest"](http://www.softwareishard.com/blog/har-12-spec/#cache) objects + +```js +validate.cacheEntry(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.content(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [content](http://www.softwareishard.com/blog/har-12-spec/#content) object + +```js +validate.content(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.cookie(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [cookie](http://www.softwareishard.com/blog/har-12-spec/#cookies) object + +```js +validate.cookie(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.creator(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [creator](http://www.softwareishard.com/blog/har-12-spec/#creator) object + +```js +validate.creator(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.entry(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + an [entry](http://www.softwareishard.com/blog/har-12-spec/#entries) object + +```js +validate.entry(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.log(data) + +alias of [`Validate(data)`](#validate-data-callback-) + +### Validate.page(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [page](http://www.softwareishard.com/blog/har-12-spec/#pages) object + +```js +validate.page(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.pageTimings(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [pageTimings](http://www.softwareishard.com/blog/har-12-spec/#pageTimings) object + +```js +validate.pageTimings(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.postData(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [postData](http://www.softwareishard.com/blog/har-12-spec/#postData) object + +```js +validate.postData(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.record(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [record](http://www.softwareishard.com/blog/har-12-spec/#headers) object + +```js +validate.record(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.request(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [request](http://www.softwareishard.com/blog/har-12-spec/#request) object + +```js +validate.request(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.response(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [response](http://www.softwareishard.com/blog/har-12-spec/#response) object + +```js +validate.cacheEntry(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +### Validate.timings(data) + +> Returns a promise that resolves to the valid object. + +- **data**: `Object` *(Required)* + a [timings](http://www.softwareishard.com/blog/har-12-spec/#timings) object + +```js +validate.timings(data) + .then(data => console.log('horray!')) + .catch(console.error) +``` + +---- + +## Callback API + +### Validate(data [, callback]) + +> Returns `true` or `false`. + +```js +var HAR = require('./har.json'); +var validate = require('har-validator/lib/async'); + +validate(HAR, function (e, valid) { + if (e) console.log(e.errors) + + if (valid) console.log('horray!'); +}); + +``` +The async API provides exactly the same methods as the [Promise API](#promise-api) + +---- + +## Support + +Donations are welcome to help support the continuous development of this project. + +[![Gratipay][gratipay-image]][gratipay-url] +[![PayPal][paypal-image]][paypal-url] +[![Flattr][flattr-image]][flattr-url] +[![Bitcoin][bitcoin-image]][bitcoin-url] + +## License + +[ISC License](LICENSE) © [Ahmad Nassri](https://www.ahmadnassri.com/) + +[license-url]: https://github.com/ahmadnassri/har-validator/blob/master/LICENSE + +[travis-url]: https://travis-ci.org/ahmadnassri/har-validator +[travis-image]: https://img.shields.io/travis/ahmadnassri/har-validator.svg?style=flat-square + +[npm-url]: https://www.npmjs.com/package/har-validator +[npm-license]: https://img.shields.io/npm/l/har-validator.svg?style=flat-square +[npm-version]: https://img.shields.io/npm/v/har-validator.svg?style=flat-square +[npm-downloads]: https://img.shields.io/npm/dm/har-validator.svg?style=flat-square + +[codeclimate-url]: https://codeclimate.com/github/ahmadnassri/har-validator +[codeclimate-quality]: https://img.shields.io/codeclimate/github/ahmadnassri/har-validator.svg?style=flat-square +[codeclimate-coverage]: https://img.shields.io/codeclimate/coverage/github/ahmadnassri/har-validator.svg?style=flat-square + +[david-url]: https://david-dm.org/ahmadnassri/har-validator +[david-image]: https://img.shields.io/david/ahmadnassri/har-validator.svg?style=flat-square + +[gratipay-url]: https://www.gratipay.com/ahmadnassri/ +[gratipay-image]: https://img.shields.io/gratipay/ahmadnassri.svg?style=flat-square + +[paypal-url]: https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=UJ2B2BTK9VLRS&on0=project&os0=har-validator +[paypal-image]: http://img.shields.io/badge/paypal-donate-green.svg?style=flat-square + +[flattr-url]: https://flattr.com/submit/auto?user_id=ahmadnassri&url=https://github.com/ahmadnassri/har-validator&title=har-validator&language=&tags=github&category=software +[flattr-image]: http://img.shields.io/badge/flattr-donate-green.svg?style=flat-square + +[bitcoin-image]: http://img.shields.io/badge/bitcoin-1Nb46sZRVG3or7pNaDjthcGJpWhvoPpCxy-green.svg?style=flat-square +[bitcoin-url]: https://www.coinbase.com/checkouts/ae383ae6bb931a2fa5ad11cec115191e?name=har-validator diff --git a/node_modules/har-validator/bin/har-validator b/node_modules/har-validator/bin/har-validator new file mode 100755 index 0000000..fd7cc0d --- /dev/null +++ b/node_modules/har-validator/bin/har-validator @@ -0,0 +1,56 @@ +#!/usr/bin/env node + +'use strict' + +var chalk = require('chalk') +var cmd = require('commander') +var fs = require('fs') +var path = require('path') +var pkg = require('../package.json') +var Promise = require('pinkie-promise') +var validate = require('..') +var ValidationError = require('../lib/error') + +cmd + .version(pkg.version) + .usage('[options] ') + .option('-s, --schema [name]', 'validate schema name (log, request, response, etc ...)') + .parse(process.argv) + +if (!cmd.args.length) { + cmd.help() +} + +cmd.args.map(function (fileName) { + var file = chalk.yellow.italic(path.basename(fileName)) + + new Promise(function (resolve, reject) { + fs.readFile(fileName, function (err, data) { + return err === null ? resolve(data) : reject(err) + }) + }) + + .then(JSON.parse) + + .then(cmd.schema ? validate[cmd.schema] : validate) + + .then(function (data) { + console.log('%s [%s] is valid', chalk.green('✓'), file) + }) + + .catch(function (err) { + if (err instanceof SyntaxError) { + return console.error('%s [%s] failed to read JSON: %s', chalk.red('✖'), file, chalk.red(err.message)) + } + + if (err instanceof ValidationError) { + err.errors.forEach(function (details) { + console.error('%s [%s] failed validation: (%s: %s) %s', chalk.red('✖'), file, chalk.cyan.italic(details.field), chalk.magenta.italic(details.value), chalk.red(details.message)) + }) + + return + } + + console.error('%s [%s] an unknown error has occured: %s', chalk.red('✖'), file, chalk.red(err.message)) + }) +}) diff --git a/node_modules/har-validator/lib/async.js b/node_modules/har-validator/lib/async.js new file mode 100644 index 0000000..77b99a7 --- /dev/null +++ b/node_modules/har-validator/lib/async.js @@ -0,0 +1,14 @@ +'use strict' + +var runner = require('./runner') +var schemas = require('./schemas') + +module.exports = function (data, cb) { + return runner(schemas.har, data, cb) +} + +Object.keys(schemas).map(function (name) { + module.exports[name] = function (data, cb) { + return runner(schemas[name], data, cb) + } +}) diff --git a/node_modules/har-validator/lib/error.js b/node_modules/har-validator/lib/error.js new file mode 100644 index 0000000..fc08a87 --- /dev/null +++ b/node_modules/har-validator/lib/error.js @@ -0,0 +1,10 @@ +'use strict' + +function ValidationError (errors) { + this.name = 'ValidationError' + this.errors = errors +} + +ValidationError.prototype = Error.prototype + +module.exports = ValidationError diff --git a/node_modules/har-validator/lib/index.js b/node_modules/har-validator/lib/index.js new file mode 100644 index 0000000..e8351b8 --- /dev/null +++ b/node_modules/har-validator/lib/index.js @@ -0,0 +1,22 @@ +'use strict' + +var Promise = require('pinkie-promise') +var runner = require('./runner') +var schemas = require('./schemas') + +var promisify = function (schema) { + return function (data) { + return new Promise(function (resolve, reject) { + runner(schema, data, function (err, valid) { + return err === null ? resolve(data) : reject(err) + }) + }) + } +} + +module.exports = promisify(schemas.har) + +// utility methods for all parts of the schema +Object.keys(schemas).map(function (name) { + module.exports[name] = promisify(schemas[name]) +}) diff --git a/node_modules/har-validator/lib/runner.js b/node_modules/har-validator/lib/runner.js new file mode 100644 index 0000000..f0ed484 --- /dev/null +++ b/node_modules/har-validator/lib/runner.js @@ -0,0 +1,29 @@ +'use strict' + +var schemas = require('./schemas') +var ValidationError = require('./error') +var validator = require('is-my-json-valid') + +module.exports = function (schema, data, cb) { + // default value + var valid = false + + // validator config + var validate = validator(schema, { + greedy: true, + verbose: true, + schemas: schemas + }) + + // execute is-my-json-valid + if (data !== undefined) { + valid = validate(data) + } + + // callback? + if (typeof cb === 'function') { + return cb(validate.errors ? new ValidationError(validate.errors) : null, valid) + } + + return valid +} diff --git a/node_modules/har-validator/lib/schemas/cache.json b/node_modules/har-validator/lib/schemas/cache.json new file mode 100644 index 0000000..a3ab682 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/cache.json @@ -0,0 +1,13 @@ +{ + "properties": { + "beforeRequest": { + "$ref": "#cacheEntry" + }, + "afterRequest": { + "$ref": "#cacheEntry" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/cacheEntry.json b/node_modules/har-validator/lib/schemas/cacheEntry.json new file mode 100644 index 0000000..a397439 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/cacheEntry.json @@ -0,0 +1,31 @@ +{ + "oneOf": [{ + "type": "object", + "optional": true, + "required": [ + "lastAccess", + "eTag", + "hitCount" + ], + "properties": { + "expires": { + "type": "string" + }, + "lastAccess": { + "type": "string" + }, + "eTag": { + "type": "string" + }, + "hitCount": { + "type": "integer" + }, + "comment": { + "type": "string" + } + } + }, { + "type": null, + "additionalProperties": false + }] +} diff --git a/node_modules/har-validator/lib/schemas/content.json b/node_modules/har-validator/lib/schemas/content.json new file mode 100644 index 0000000..3710d79 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/content.json @@ -0,0 +1,27 @@ +{ + "type": "object", + "required": [ + "size", + "mimeType" + ], + "properties": { + "size": { + "type": "integer" + }, + "compression": { + "type": "integer" + }, + "mimeType": { + "type": "string" + }, + "text": { + "type": "string" + }, + "encoding": { + "type": "string" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/cookie.json b/node_modules/har-validator/lib/schemas/cookie.json new file mode 100644 index 0000000..5768181 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/cookie.json @@ -0,0 +1,34 @@ +{ + "type": "object", + "required": [ + "name", + "value" + ], + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + }, + "path": { + "type": "string" + }, + "domain": { + "type": "string" + }, + "expires": { + "type": ["string", "null"], + "format": "date-time" + }, + "httpOnly": { + "type": "boolean" + }, + "secure": { + "type": "boolean" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/creator.json b/node_modules/har-validator/lib/schemas/creator.json new file mode 100644 index 0000000..5058600 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/creator.json @@ -0,0 +1,18 @@ +{ + "type": "object", + "required": [ + "name", + "version" + ], + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/entry.json b/node_modules/har-validator/lib/schemas/entry.json new file mode 100644 index 0000000..8a9c022 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/entry.json @@ -0,0 +1,51 @@ +{ + "type": "object", + "optional": true, + "required": [ + "startedDateTime", + "time", + "request", + "response", + "cache", + "timings" + ], + "properties": { + "pageref": { + "type": "string" + }, + "startedDateTime": { + "type": "string", + "format": "date-time", + "pattern": "^(\\d{4})(-)?(\\d\\d)(-)?(\\d\\d)(T)?(\\d\\d)(:)?(\\d\\d)(:)?(\\d\\d)(\\.\\d+)?(Z|([+-])(\\d\\d)(:)?(\\d\\d))" + }, + "time": { + "type": "number", + "min": 0 + }, + "request": { + "$ref": "#request" + }, + "response": { + "$ref": "#response" + }, + "cache": { + "$ref": "#cache" + }, + "timings": { + "$ref": "#timings" + }, + "serverIPAddress": { + "type": "string", + "oneOf": [ + { "format": "ipv4" }, + { "format": "ipv6" } + ] + }, + "connection": { + "type": "string" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/har.json b/node_modules/har-validator/lib/schemas/har.json new file mode 100644 index 0000000..b542782 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/har.json @@ -0,0 +1,11 @@ +{ + "type": "object", + "required": [ + "log" + ], + "properties": { + "log": { + "$ref": "#log" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/index.js b/node_modules/har-validator/lib/schemas/index.js new file mode 100644 index 0000000..7b6db7d --- /dev/null +++ b/node_modules/har-validator/lib/schemas/index.js @@ -0,0 +1,49 @@ +'use strict' + +var schemas = { + cache: require('./cache.json'), + cacheEntry: require('./cacheEntry.json'), + content: require('./content.json'), + cookie: require('./cookie.json'), + creator: require('./creator.json'), + entry: require('./entry.json'), + har: require('./har.json'), + log: require('./log.json'), + page: require('./page.json'), + pageTimings: require('./pageTimings.json'), + postData: require('./postData.json'), + record: require('./record.json'), + request: require('./request.json'), + response: require('./response.json'), + timings: require('./timings.json') +} + +// is-my-json-valid does not provide meaningful error messages for external schemas +// this is a workaround +schemas.cache.properties.beforeRequest = schemas.cacheEntry +schemas.cache.properties.afterRequest = schemas.cacheEntry + +schemas.page.properties.pageTimings = schemas.pageTimings + +schemas.request.properties.cookies.items = schemas.cookie +schemas.request.properties.headers.items = schemas.record +schemas.request.properties.queryString.items = schemas.record +schemas.request.properties.postData = schemas.postData + +schemas.response.properties.cookies.items = schemas.cookie +schemas.response.properties.headers.items = schemas.record +schemas.response.properties.content = schemas.content + +schemas.entry.properties.request = schemas.request +schemas.entry.properties.response = schemas.response +schemas.entry.properties.cache = schemas.cache +schemas.entry.properties.timings = schemas.timings + +schemas.log.properties.creator = schemas.creator +schemas.log.properties.browser = schemas.creator +schemas.log.properties.pages.items = schemas.page +schemas.log.properties.entries.items = schemas.entry + +schemas.har.properties.log = schemas.log + +module.exports = schemas diff --git a/node_modules/har-validator/lib/schemas/log.json b/node_modules/har-validator/lib/schemas/log.json new file mode 100644 index 0000000..0c91d38 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/log.json @@ -0,0 +1,34 @@ +{ + "type": "object", + "required": [ + "version", + "creator", + "entries" + ], + "properties": { + "version": { + "type": "string" + }, + "creator": { + "$ref": "#creator" + }, + "browser": { + "$ref": "#creator" + }, + "pages": { + "type": "array", + "items": { + "$ref": "#page" + } + }, + "entries": { + "type": "array", + "items": { + "$ref": "#entry" + } + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/page.json b/node_modules/har-validator/lib/schemas/page.json new file mode 100644 index 0000000..ef64abe --- /dev/null +++ b/node_modules/har-validator/lib/schemas/page.json @@ -0,0 +1,30 @@ +{ + "type": "object", + "optional": true, + "required": [ + "startedDateTime", + "id", + "title", + "pageTimings" + ], + "properties": { + "startedDateTime": { + "type": "string", + "format": "date-time", + "pattern": "^(\\d{4})(-)?(\\d\\d)(-)?(\\d\\d)(T)?(\\d\\d)(:)?(\\d\\d)(:)?(\\d\\d)(\\.\\d+)?(Z|([+-])(\\d\\d)(:)?(\\d\\d))" + }, + "id": { + "type": "string", + "unique": true + }, + "title": { + "type": "string" + }, + "pageTimings": { + "$ref": "#pageTimings" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/pageTimings.json b/node_modules/har-validator/lib/schemas/pageTimings.json new file mode 100644 index 0000000..adc83cc --- /dev/null +++ b/node_modules/har-validator/lib/schemas/pageTimings.json @@ -0,0 +1,16 @@ +{ + "type": "object", + "properties": { + "onContentLoad": { + "type": "number", + "min": -1 + }, + "onLoad": { + "type": "number", + "min": -1 + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/postData.json b/node_modules/har-validator/lib/schemas/postData.json new file mode 100644 index 0000000..91958b6 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/postData.json @@ -0,0 +1,41 @@ +{ + "type": "object", + "optional": true, + "required": [ + "mimeType" + ], + "properties": { + "mimeType": { + "type": "string" + }, + "text": { + "type": "string" + }, + "params": { + "type": "array", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + }, + "fileName": { + "type": "string" + }, + "contentType": { + "type": "string" + }, + "comment": { + "type": "string" + } + } + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/record.json b/node_modules/har-validator/lib/schemas/record.json new file mode 100644 index 0000000..04acd51 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/record.json @@ -0,0 +1,18 @@ +{ + "type": "object", + "required": [ + "name", + "value" + ], + "properties": { + "name": { + "type": "string" + }, + "value": { + "type": "string" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/request.json b/node_modules/har-validator/lib/schemas/request.json new file mode 100644 index 0000000..639af06 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/request.json @@ -0,0 +1,55 @@ +{ + "type": "object", + "required": [ + "method", + "url", + "httpVersion", + "cookies", + "headers", + "queryString", + "headersSize", + "bodySize" + ], + "properties": { + "method": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + }, + "httpVersion": { + "type": "string" + }, + "cookies": { + "type": "array", + "items": { + "$ref": "#cookie" + } + }, + "headers": { + "type": "array", + "items": { + "$ref": "#record" + } + }, + "queryString": { + "type": "array", + "items": { + "$ref": "#record" + } + }, + "postData": { + "$ref": "#postData" + }, + "headersSize": { + "type": "integer" + }, + "bodySize": { + "type": "integer" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/response.json b/node_modules/har-validator/lib/schemas/response.json new file mode 100644 index 0000000..de99c55 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/response.json @@ -0,0 +1,52 @@ +{ + "type": "object", + "required": [ + "status", + "statusText", + "httpVersion", + "cookies", + "headers", + "content", + "redirectURL", + "headersSize", + "bodySize" + ], + "properties": { + "status": { + "type": "integer" + }, + "statusText": { + "type": "string" + }, + "httpVersion": { + "type": "string" + }, + "cookies": { + "type": "array", + "items": { + "$ref": "#cookie" + } + }, + "headers": { + "type": "array", + "items": { + "$ref": "#record" + } + }, + "content": { + "$ref": "#content" + }, + "redirectURL": { + "type": "string" + }, + "headersSize": { + "type": "integer" + }, + "bodySize": { + "type": "integer" + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/lib/schemas/timings.json b/node_modules/har-validator/lib/schemas/timings.json new file mode 100644 index 0000000..066ef71 --- /dev/null +++ b/node_modules/har-validator/lib/schemas/timings.json @@ -0,0 +1,40 @@ +{ + "required": [ + "send", + "wait", + "receive" + ], + "properties": { + "dns": { + "type": "number", + "min": -1 + }, + "connect": { + "type": "number", + "min": -1 + }, + "blocked": { + "type": "number", + "min": -1 + }, + "send": { + "type": "number", + "min": -1 + }, + "wait": { + "type": "number", + "min": -1 + }, + "receive": { + "type": "number", + "min": -1 + }, + "ssl": { + "type": "number", + "min": -1 + }, + "comment": { + "type": "string" + } + } +} diff --git a/node_modules/har-validator/package.json b/node_modules/har-validator/package.json new file mode 100644 index 0000000..aaa606c --- /dev/null +++ b/node_modules/har-validator/package.json @@ -0,0 +1,55 @@ +{ + "version": "2.0.6", + "name": "har-validator", + "description": "Extremely fast HTTP Archive (HAR) validator using JSON Schema", + "author": "Ahmad Nassri (https://www.ahmadnassri.com/)", + "homepage": "https://github.com/ahmadnassri/har-validator", + "repository": "ahmadnassri/har-validator", + "license": "ISC", + "main": "lib/index", + "bin": "bin/har-validator", + "keywords": [ + "har", + "http", + "archive", + "validate", + "validator" + ], + "engines": { + "node": ">=0.10" + }, + "files": [ + "bin", + "lib" + ], + "bugs": { + "url": "https://github.com/ahmadnassri/har-validator/issues" + }, + "scripts": { + "pretest": "standard && echint", + "test": "mocha", + "posttest": "npm run coverage", + "coverage": "istanbul cover --dir coverage _mocha -- -R dot", + "codeclimate": "codeclimate < coverage/lcov.info" + }, + "echint": { + "ignore": [ + "coverage/**" + ] + }, + "devDependencies": { + "codeclimate-test-reporter": "0.2.1", + "echint": "^1.5.1", + "istanbul": "^0.4.2", + "mocha": "^2.3.4", + "require-directory": "^2.1.1", + "should": "^8.1.1", + "standard": "^5.4.1" + }, + "dependencies": { + "chalk": "^1.1.1", + "commander": "^2.9.0", + "is-my-json-valid": "^2.12.4", + "pinkie-promise": "^2.0.0" + } +} diff --git a/node_modules/has-ansi/index.js b/node_modules/has-ansi/index.js new file mode 100644 index 0000000..98fae06 --- /dev/null +++ b/node_modules/has-ansi/index.js @@ -0,0 +1,4 @@ +'use strict'; +var ansiRegex = require('ansi-regex'); +var re = new RegExp(ansiRegex().source); // remove the `g` flag +module.exports = re.test.bind(re); diff --git a/node_modules/has-ansi/license b/node_modules/has-ansi/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/has-ansi/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/has-ansi/package.json b/node_modules/has-ansi/package.json new file mode 100644 index 0000000..01e08d4 --- /dev/null +++ b/node_modules/has-ansi/package.json @@ -0,0 +1,55 @@ +{ + "name": "has-ansi", + "version": "2.0.0", + "description": "Check if a string has ANSI escape codes", + "license": "MIT", + "repository": "sindresorhus/has-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Appelman (jbnicolai.com)" + ], + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern", + "has" + ], + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/has-ansi/readme.md b/node_modules/has-ansi/readme.md new file mode 100644 index 0000000..02bc7c2 --- /dev/null +++ b/node_modules/has-ansi/readme.md @@ -0,0 +1,36 @@ +# has-ansi [![Build Status](https://travis-ci.org/sindresorhus/has-ansi.svg?branch=master)](https://travis-ci.org/sindresorhus/has-ansi) + +> Check if a string has [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save has-ansi +``` + + +## Usage + +```js +var hasAnsi = require('has-ansi'); + +hasAnsi('\u001b[4mcake\u001b[0m'); +//=> true + +hasAnsi('cake'); +//=> false +``` + + +## Related + +- [has-ansi-cli](https://github.com/sindresorhus/has-ansi-cli) - CLI for this module +- [strip-ansi](https://github.com/sindresorhus/strip-ansi) - Strip ANSI escape codes +- [ansi-regex](https://github.com/sindresorhus/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/sindresorhus/chalk) - Terminal string styling done right + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/has-color/index.js b/node_modules/has-color/index.js new file mode 100644 index 0000000..092d0ba --- /dev/null +++ b/node_modules/has-color/index.js @@ -0,0 +1,32 @@ +'use strict'; +module.exports = (function () { + if (process.argv.indexOf('--no-color') !== -1) { + return false; + } + + if (process.argv.indexOf('--color') !== -1) { + return true; + } + + if (process.stdout && !process.stdout.isTTY) { + return false; + } + + if (process.platform === 'win32') { + return true; + } + + if ('COLORTERM' in process.env) { + return true; + } + + if (process.env.TERM === 'dumb') { + return false; + } + + if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) { + return true; + } + + return false; +})(); diff --git a/node_modules/has-color/package.json b/node_modules/has-color/package.json new file mode 100644 index 0000000..c631c1c --- /dev/null +++ b/node_modules/has-color/package.json @@ -0,0 +1,43 @@ +{ + "name": "has-color", + "version": "0.1.7", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "sindresorhus/has-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "http://sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "files": [ + "index.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "capability", + "detect" + ], + "devDependencies": { + "mocha": "*" + } +} diff --git a/node_modules/has-color/readme.md b/node_modules/has-color/readme.md new file mode 100644 index 0000000..37bbd89 --- /dev/null +++ b/node_modules/has-color/readme.md @@ -0,0 +1,30 @@ +# has-color [![Build Status](https://travis-ci.org/sindresorhus/has-color.svg?branch=master)](https://travis-ci.org/sindresorhus/has-color) + +> Detect whether a terminal supports color. + +Used in the terminal color module [chalk](https://github.com/sindresorhus/chalk). + + +## Install + +```bash +$ npm install --save has-color +``` + + +## Usage + +```js +var hasColor = require('has-color'); + +if (hasColor) { + console.log('Terminal supports color.'); +} +``` + +It obeys the `--color` and `--no-color` CLI flags. + + +## License + +[MIT](http://opensource.org/licenses/MIT) © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/has-flag/index.js b/node_modules/has-flag/index.js new file mode 100644 index 0000000..fdcb342 --- /dev/null +++ b/node_modules/has-flag/index.js @@ -0,0 +1,10 @@ +'use strict'; +module.exports = function (flag, argv) { + argv = argv || process.argv; + + var terminatorPos = argv.indexOf('--'); + var prefix = /^--/.test(flag) ? '' : '--'; + var pos = argv.indexOf(prefix + flag); + + return pos !== -1 && (terminatorPos !== -1 ? pos < terminatorPos : true); +}; diff --git a/node_modules/has-flag/license b/node_modules/has-flag/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/has-flag/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/has-flag/package.json b/node_modules/has-flag/package.json new file mode 100644 index 0000000..930dc7f --- /dev/null +++ b/node_modules/has-flag/package.json @@ -0,0 +1,48 @@ +{ + "name": "has-flag", + "version": "1.0.0", + "description": "Check if argv has a specific flag", + "license": "MIT", + "repository": "sindresorhus/has-flag", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Appelman (jbnicolai.com)", + "JD Ballard (github.com/qix-)" + ], + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "has", + "check", + "detect", + "contains", + "find", + "flag", + "cli", + "command-line", + "argv", + "process", + "arg", + "args", + "argument", + "arguments", + "getopt", + "minimist", + "optimist" + ], + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/has-flag/readme.md b/node_modules/has-flag/readme.md new file mode 100644 index 0000000..ea5c817 --- /dev/null +++ b/node_modules/has-flag/readme.md @@ -0,0 +1,64 @@ +# has-flag [![Build Status](https://travis-ci.org/sindresorhus/has-flag.svg?branch=master)](https://travis-ci.org/sindresorhus/has-flag) + +> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag + +Correctly stops looking after an `--` argument terminator. + + +## Install + +``` +$ npm install --save has-flag +``` + + +## Usage + +```js +// foo.js +var hasFlag = require('has-flag'); + +hasFlag('unicorn'); +//=> true + +hasFlag('--unicorn'); +//=> true + +hasFlag('foo=bar'); +//=> true + +hasFlag('foo'); +//=> false + +hasFlag('rainbow'); +//=> false +``` + +``` +$ node foo.js --unicorn --foo=bar -- --rainbow +``` + + +## API + +### hasFlag(flag, [argv]) + +Returns a boolean whether the flag exists. + +#### flag + +Type: `string` + +CLI flag to look for. The `--` prefix is optional. + +#### argv + +Type: `array` +Default: `process.argv` + +CLI arguments. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/has-unicode/LICENSE b/node_modules/has-unicode/LICENSE new file mode 100644 index 0000000..d42e25e --- /dev/null +++ b/node_modules/has-unicode/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2014, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/has-unicode/README.md b/node_modules/has-unicode/README.md new file mode 100644 index 0000000..5a03e59 --- /dev/null +++ b/node_modules/has-unicode/README.md @@ -0,0 +1,43 @@ +has-unicode +=========== + +Try to guess if your terminal supports unicode + +```javascript +var hasUnicode = require("has-unicode") + +if (hasUnicode()) { + // the terminal probably has unicode support +} +``` +```javascript +var hasUnicode = require("has-unicode").tryHarder +hasUnicode(function(unicodeSupported) { + if (unicodeSupported) { + // the terminal probably has unicode support + } +}) +``` + +## Detecting Unicode + +What we actually detect is UTF-8 support, as that's what Node itself supports. +If you have a UTF-16 locale then you won't be detected as unicode capable. + +### Windows + +Since at least Windows 7, `cmd` and `powershell` have been unicode capable, +but unfortunately even then it's not guaranteed. In many localizations it +still uses legacy code pages and there's no facility short of running +programs or linking C++ that will let us detect this. As such, we +report any Windows installation as NOT unicode capable, and recommend +that you encourage your users to override this via config. + +### Unix Like Operating Systems + +We look at the environment variables `LC_ALL`, `LC_CTYPE`, and `LANG` in +that order. For `LC_ALL` and `LANG`, it looks for `.UTF-8` in the value. +For `LC_CTYPE` it looks to see if the value is `UTF-8`. This is sufficient +for most POSIX systems. While locale data can be put in `/etc/locale.conf` +as well, AFAIK it's always copied into the environment. + diff --git a/node_modules/has-unicode/index.js b/node_modules/has-unicode/index.js new file mode 100644 index 0000000..9b0fe44 --- /dev/null +++ b/node_modules/has-unicode/index.js @@ -0,0 +1,16 @@ +"use strict" +var os = require("os") + +var hasUnicode = module.exports = function () { + // Recent Win32 platforms (>XP) CAN support unicode in the console but + // don't have to, and in non-english locales often use traditional local + // code pages. There's no way, short of windows system calls or execing + // the chcp command line program to figure this out. As such, we default + // this to false and encourage your users to override it via config if + // appropriate. + if (os.type() == "Windows_NT") { return false } + + var isUTF8 = /UTF-?8$/i + var ctype = process.env.LC_ALL || process.env.LC_CTYPE || process.env.LANG + return isUTF8.test(ctype) +} diff --git a/node_modules/has-unicode/package.json b/node_modules/has-unicode/package.json new file mode 100644 index 0000000..ebe9d76 --- /dev/null +++ b/node_modules/has-unicode/package.json @@ -0,0 +1,30 @@ +{ + "name": "has-unicode", + "version": "2.0.1", + "description": "Try to guess if your terminal supports unicode", + "main": "index.js", + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/has-unicode" + }, + "keywords": [ + "unicode", + "terminal" + ], + "files": [ + "index.js" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/has-unicode/issues" + }, + "homepage": "https://github.com/iarna/has-unicode", + "devDependencies": { + "require-inject": "^1.3.0", + "tap": "^2.3.1" + } +} diff --git a/node_modules/hawk/.npmignore b/node_modules/hawk/.npmignore new file mode 100644 index 0000000..96ed091 --- /dev/null +++ b/node_modules/hawk/.npmignore @@ -0,0 +1,20 @@ +.idea +*.iml +npm-debug.log +dump.rdb +node_modules +components +build +results.tap +results.xml +npm-shrinkwrap.json +config.json +.DS_Store +*/.DS_Store +*/*/.DS_Store +._* +*/._* +*/*/._* +coverage.* +lib-cov + diff --git a/node_modules/hawk/.travis.yml b/node_modules/hawk/.travis.yml new file mode 100755 index 0000000..40ca59e --- /dev/null +++ b/node_modules/hawk/.travis.yml @@ -0,0 +1,5 @@ +language: node_js + +node_js: + - 0.10 + diff --git a/node_modules/hawk/LICENSE b/node_modules/hawk/LICENSE new file mode 100755 index 0000000..7880936 --- /dev/null +++ b/node_modules/hawk/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012-2014, Eran Hammer and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hueniverse/hawk/graphs/contributors diff --git a/node_modules/hawk/README.md b/node_modules/hawk/README.md new file mode 100755 index 0000000..b92cd7c --- /dev/null +++ b/node_modules/hawk/README.md @@ -0,0 +1,634 @@ +![hawk Logo](https://raw.github.com/hueniverse/hawk/master/images/hawk.png) + + **Hawk** is an HTTP authentication scheme using a message authentication code (MAC) algorithm to provide partial +HTTP request cryptographic verification. For more complex use cases such as access delegation, see [Oz](https://github.com/hueniverse/oz). + +Current version: **3.x** + +Note: 3.x and 2.x are the same exact protocol as 1.1. The version increments reflect changes in the node API. + +[![Build Status](https://secure.travis-ci.org/hueniverse/hawk.png)](http://travis-ci.org/hueniverse/hawk) + +# Table of Content + +- [**Introduction**](#introduction) + - [Replay Protection](#replay-protection) + - [Usage Example](#usage-example) + - [Protocol Example](#protocol-example) + - [Payload Validation](#payload-validation) + - [Response Payload Validation](#response-payload-validation) + - [Browser Support and Considerations](#browser-support-and-considerations) +

+- [**Single URI Authorization**](#single-uri-authorization) + - [Usage Example](#bewit-usage-example) +

+- [**Security Considerations**](#security-considerations) + - [MAC Keys Transmission](#mac-keys-transmission) + - [Confidentiality of Requests](#confidentiality-of-requests) + - [Spoofing by Counterfeit Servers](#spoofing-by-counterfeit-servers) + - [Plaintext Storage of Credentials](#plaintext-storage-of-credentials) + - [Entropy of Keys](#entropy-of-keys) + - [Coverage Limitations](#coverage-limitations) + - [Future Time Manipulation](#future-time-manipulation) + - [Client Clock Poisoning](#client-clock-poisoning) + - [Bewit Limitations](#bewit-limitations) + - [Host Header Forgery](#host-header-forgery) +

+- [**Frequently Asked Questions**](#frequently-asked-questions) +

+- [**Implementations**](#implementations) +- [**Acknowledgements**](#acknowledgements) + +# Introduction + +**Hawk** is an HTTP authentication scheme providing mechanisms for making authenticated HTTP requests with +partial cryptographic verification of the request and response, covering the HTTP method, request URI, host, +and optionally the request payload. + +Similar to the HTTP [Digest access authentication schemes](http://www.ietf.org/rfc/rfc2617.txt), **Hawk** uses a set of +client credentials which include an identifier (e.g. username) and key (e.g. password). Likewise, just as with the Digest scheme, +the key is never included in authenticated requests. Instead, it is used to calculate a request MAC value which is +included in its place. + +However, **Hawk** has several differences from Digest. In particular, while both use a nonce to limit the possibility of +replay attacks, in **Hawk** the client generates the nonce and uses it in combination with a timestamp, leading to less +"chattiness" (interaction with the server). + +Also unlike Digest, this scheme is not intended to protect the key itself (the password in Digest) because +the client and server must both have access to the key material in the clear. + +The primary design goals of this scheme are to: +* simplify and improve HTTP authentication for services that are unwilling or unable to deploy TLS for all resources, +* secure credentials against leakage (e.g., when the client uses some form of dynamic configuration to determine where + to send an authenticated request), and +* avoid the exposure of credentials sent to a malicious server over an unauthenticated secure channel due to client + failure to validate the server's identity as part of its TLS handshake. + +In addition, **Hawk** supports a method for granting third-parties temporary access to individual resources using +a query parameter called _bewit_ (in falconry, a leather strap used to attach a tracking device to the leg of a hawk). + +The **Hawk** scheme requires the establishment of a shared symmetric key between the client and the server, +which is beyond the scope of this module. Typically, the shared credentials are established via an initial +TLS-protected phase or derived from some other shared confidential information available to both the client +and the server. + + +## Replay Protection + +Without replay protection, an attacker can use a compromised (but otherwise valid and authenticated) request more +than once, gaining access to a protected resource. To mitigate this, clients include both a nonce and a timestamp when +making requests. This gives the server enough information to prevent replay attacks. + +The nonce is generated by the client, and is a string unique across all requests with the same timestamp and +key identifier combination. + +The timestamp enables the server to restrict the validity period of the credentials where requests occuring afterwards +are rejected. It also removes the need for the server to retain an unbounded number of nonce values for future checks. +By default, **Hawk** uses a time window of 1 minute to allow for time skew between the client and server (which in +practice translates to a maximum of 2 minutes as the skew can be positive or negative). + +Using a timestamp requires the client's clock to be in sync with the server's clock. **Hawk** requires both the client +clock and the server clock to use NTP to ensure synchronization. However, given the limitations of some client types +(e.g. browsers) to deploy NTP, the server provides the client with its current time (in seconds precision) in response +to a bad timestamp. + +There is no expectation that the client will adjust its system clock to match the server (in fact, this would be a +potential attack vector). Instead, the client only uses the server's time to calculate an offset used only +for communications with that particular server. The protocol rewards clients with synchronized clocks by reducing +the number of round trips required to authenticate the first request. + + +## Usage Example + +Server code: + +```javascript +var Http = require('http'); +var Hawk = require('hawk'); + + +// Credentials lookup function + +var credentialsFunc = function (id, callback) { + + var credentials = { + key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn', + algorithm: 'sha256', + user: 'Steve' + }; + + return callback(null, credentials); +}; + +// Create HTTP server + +var handler = function (req, res) { + + // Authenticate incoming request + + Hawk.server.authenticate(req, credentialsFunc, {}, function (err, credentials, artifacts) { + + // Prepare response + + var payload = (!err ? 'Hello ' + credentials.user + ' ' + artifacts.ext : 'Shoosh!'); + var headers = { 'Content-Type': 'text/plain' }; + + // Generate Server-Authorization response header + + var header = Hawk.server.header(credentials, artifacts, { payload: payload, contentType: headers['Content-Type'] }); + headers['Server-Authorization'] = header; + + // Send the response back + + res.writeHead(!err ? 200 : 401, headers); + res.end(payload); + }); +}; + +// Start server + +Http.createServer(handler).listen(8000, 'example.com'); +``` + +Client code: + +```javascript +var Request = require('request'); +var Hawk = require('hawk'); + + +// Client credentials + +var credentials = { + id: 'dh37fgj492je', + key: 'werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn', + algorithm: 'sha256' +} + +// Request options + +var requestOptions = { + uri: 'http://example.com:8000/resource/1?b=1&a=2', + method: 'GET', + headers: {} +}; + +// Generate Authorization request header + +var header = Hawk.client.header('http://example.com:8000/resource/1?b=1&a=2', 'GET', { credentials: credentials, ext: 'some-app-data' }); +requestOptions.headers.Authorization = header.field; + +// Send authenticated request + +Request(requestOptions, function (error, response, body) { + + // Authenticate the server's response + + var isValid = Hawk.client.authenticate(response, credentials, header.artifacts, { payload: body }); + + // Output results + + console.log(response.statusCode + ': ' + body + (isValid ? ' (valid)' : ' (invalid)')); +}); +``` + +**Hawk** utilized the [**SNTP**](https://github.com/hueniverse/sntp) module for time sync management. By default, the local +machine time is used. To automatically retrieve and synchronice the clock within the application, use the SNTP 'start()' method. + +```javascript +Hawk.sntp.start(); +``` + + +## Protocol Example + +The client attempts to access a protected resource without authentication, sending the following HTTP request to +the resource server: + +``` +GET /resource/1?b=1&a=2 HTTP/1.1 +Host: example.com:8000 +``` + +The resource server returns an authentication challenge. + +``` +HTTP/1.1 401 Unauthorized +WWW-Authenticate: Hawk +``` + +The client has previously obtained a set of **Hawk** credentials for accessing resources on the "http://example.com/" +server. The **Hawk** credentials issued to the client include the following attributes: + +* Key identifier: dh37fgj492je +* Key: werxhqb98rpaxn39848xrunpaw3489ruxnpa98w4rxn +* Algorithm: sha256 + +The client generates the authentication header by calculating a timestamp (e.g. the number of seconds since January 1, +1970 00:00:00 GMT), generating a nonce, and constructing the normalized request string (each value followed by a newline +character): + +``` +hawk.1.header +1353832234 +j4h3g2 +GET +/resource/1?b=1&a=2 +example.com +8000 + +some-app-ext-data + +``` + +The request MAC is calculated using HMAC with the specified hash algorithm "sha256" and the key over the normalized request string. +The result is base64-encoded to produce the request MAC: + +``` +6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE= +``` + +The client includes the **Hawk** key identifier, timestamp, nonce, application specific data, and request MAC with the request using +the HTTP `Authorization` request header field: + +``` +GET /resource/1?b=1&a=2 HTTP/1.1 +Host: example.com:8000 +Authorization: Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", ext="some-app-ext-data", mac="6R4rV5iE+NPoym+WwjeHzjAGXUtLNIxmo1vpMofpLAE=" +``` + +The server validates the request by calculating the request MAC again based on the request received and verifies the validity +and scope of the **Hawk** credentials. If valid, the server responds with the requested resource. + + +### Payload Validation + +**Hawk** provides optional payload validation. When generating the authentication header, the client calculates a payload hash +using the specified hash algorithm. The hash is calculated over the concatenated value of (each followed by a newline character): +* `hawk.1.payload` +* the content-type in lowercase, without any parameters (e.g. `application/json`) +* the request payload prior to any content encoding (the exact representation requirements should be specified by the server for payloads other than simple single-part ascii to ensure interoperability) + +For example: + +* Payload: `Thank you for flying Hawk` +* Content Type: `text/plain` +* Hash (sha256): `Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=` + +Results in the following input to the payload hash function (newline terminated values): + +``` +hawk.1.payload +text/plain +Thank you for flying Hawk + +``` + +Which produces the following hash value: + +``` +Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY= +``` + +The client constructs the normalized request string (newline terminated values): + +``` +hawk.1.header +1353832234 +j4h3g2 +POST +/resource/1?a=1&b=2 +example.com +8000 +Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY= +some-app-ext-data + +``` + +Then calculates the request MAC and includes the **Hawk** key identifier, timestamp, nonce, payload hash, application specific data, +and request MAC, with the request using the HTTP `Authorization` request header field: + +``` +POST /resource/1?a=1&b=2 HTTP/1.1 +Host: example.com:8000 +Authorization: Hawk id="dh37fgj492je", ts="1353832234", nonce="j4h3g2", hash="Yi9LfIIFRtBEPt74PVmbTF/xVAwPn7ub15ePICfgnuY=", ext="some-app-ext-data", mac="aSe1DERmZuRl3pI36/9BdZmnErTw3sNzOOAUlfeKjVw=" +``` + +It is up to the server if and when it validates the payload for any given request, based solely on it's security policy +and the nature of the data included. + +If the payload is available at the time of authentication, the server uses the hash value provided by the client to construct +the normalized string and validates the MAC. If the MAC is valid, the server calculates the payload hash and compares the value +with the provided payload hash in the header. In many cases, checking the MAC first is faster than calculating the payload hash. + +However, if the payload is not available at authentication time (e.g. too large to fit in memory, streamed elsewhere, or processed +at a different stage in the application), the server may choose to defer payload validation for later by retaining the hash value +provided by the client after validating the MAC. + +It is important to note that MAC validation does not mean the hash value provided by the client is valid, only that the value +included in the header was not modified. Without calculating the payload hash on the server and comparing it to the value provided +by the client, the payload may be modified by an attacker. + + +## Response Payload Validation + +**Hawk** provides partial response payload validation. The server includes the `Server-Authorization` response header which enables the +client to authenticate the response and ensure it is talking to the right server. **Hawk** defines the HTTP `Server-Authorization` header +as a response header using the exact same syntax as the `Authorization` request header field. + +The header is contructed using the same process as the client's request header. The server uses the same credentials and other +artifacts provided by the client to constructs the normalized request string. The `ext` and `hash` values are replaced with +new values based on the server response. The rest as identical to those used by the client. + +The result MAC digest is included with the optional `hash` and `ext` values: + +``` +Server-Authorization: Hawk mac="XIJRsMl/4oL+nn+vKoeVZPdCHXB4yJkNnBbTbHFZUYE=", hash="f9cDF/TDm7TkYRLnGwRMfeDzT6LixQVLvrIKhh0vgmM=", ext="response-specific" +``` + + +## Browser Support and Considerations + +A browser script is provided for including using a `'); + expect(encoded).to.equal('\\x3cscript\\x3ealert\\x281\\x29\\x3c\\x2fscript\\x3e'); + done(); + }); + + it('encodes \' characters', function (done) { + + var encoded = Hoek.escapeJavaScript('something(\'param\')'); + expect(encoded).to.equal('something\\x28\\x27param\\x27\\x29'); + done(); + }); + + it('encodes large unicode characters with the correct padding', function (done) { + + var encoded = Hoek.escapeJavaScript(String.fromCharCode(500) + String.fromCharCode(1000)); + expect(encoded).to.equal('\\u0500\\u1000'); + done(); + }); + + it('doesn\'t throw an exception when passed null', function (done) { + + var encoded = Hoek.escapeJavaScript(null); + expect(encoded).to.equal(''); + done(); + }); +}); + +describe('escapeHtml()', function () { + + it('encodes / characters', function (done) { + + var encoded = Hoek.escapeHtml(''); + expect(encoded).to.equal('<script>alert(1)</script>'); + done(); + }); + + it('encodes < and > as named characters', function (done) { + + var encoded = Hoek.escapeHtml(' + + diff --git a/node_modules/http-browserify/example/get/main.js b/node_modules/http-browserify/example/get/main.js new file mode 100644 index 0000000..3030d2d --- /dev/null +++ b/node_modules/http-browserify/example/get/main.js @@ -0,0 +1,14 @@ +var http = require('../../'); + +http.get({ path : '/beep' }, function (res) { + var div = document.getElementById('result'); + div.innerHTML += 'GET /beep
'; + + res.on('data', function (buf) { + div.innerHTML += buf; + }); + + res.on('end', function () { + div.innerHTML += '
__END__'; + }); +}); diff --git a/node_modules/http-browserify/example/get/server.js b/node_modules/http-browserify/example/get/server.js new file mode 100644 index 0000000..a048277 --- /dev/null +++ b/node_modules/http-browserify/example/get/server.js @@ -0,0 +1,12 @@ +var http = require('http'); +var ecstatic = require('ecstatic')(__dirname); +var server = http.createServer(function (req, res) { + if (req.url === '/beep') { + res.setHeader('content-type', 'text/plain'); + res.end('boop'); + } + else ecstatic(req, res); +}); + +console.log('Listening on :8082'); +server.listen(8082); diff --git a/node_modules/http-browserify/example/headers/index.html b/node_modules/http-browserify/example/headers/index.html new file mode 100644 index 0000000..7001c20 --- /dev/null +++ b/node_modules/http-browserify/example/headers/index.html @@ -0,0 +1,9 @@ + + + xhr + + +
+ + + diff --git a/node_modules/http-browserify/example/headers/main.js b/node_modules/http-browserify/example/headers/main.js new file mode 100644 index 0000000..247806c --- /dev/null +++ b/node_modules/http-browserify/example/headers/main.js @@ -0,0 +1,18 @@ +var http = require('../../'); + +var opts = { path : '/beep', method : 'GET' }; +var req = http.request(opts, function (res) { + var div = document.getElementById('result'); + + for (var key in res.headers) { + div.innerHTML += key + ': ' + res.getHeader(key) + '
'; + } + div.innerHTML += '
'; + + res.on('data', function (buf) { + div.innerHTML += buf; + }); +}); + +req.setHeader('bling', 'blong'); +req.end(); diff --git a/node_modules/http-browserify/example/headers/server.js b/node_modules/http-browserify/example/headers/server.js new file mode 100644 index 0000000..386e1fa --- /dev/null +++ b/node_modules/http-browserify/example/headers/server.js @@ -0,0 +1,15 @@ +var http = require('http'); +var ecstatic = require('ecstatic')(__dirname); +var server = http.createServer(function (req, res) { + if (req.url === '/beep') { + res.setHeader('content-type', 'text/plain'); + res.setHeader('foo', 'bar'); + res.setHeader('bling', req.headers.bling + '-blong'); + + res.end('boop'); + } + else ecstatic(req, res); +}); + +console.log('Listening on :8082'); +server.listen(8082); diff --git a/node_modules/http-browserify/example/post/index.html b/node_modules/http-browserify/example/post/index.html new file mode 100644 index 0000000..7001c20 --- /dev/null +++ b/node_modules/http-browserify/example/post/index.html @@ -0,0 +1,9 @@ + + + xhr + + +
+ + + diff --git a/node_modules/http-browserify/example/post/main.js b/node_modules/http-browserify/example/post/main.js new file mode 100644 index 0000000..dfb6746 --- /dev/null +++ b/node_modules/http-browserify/example/post/main.js @@ -0,0 +1,16 @@ +var http = require('../../'); + +var n = 100; +var opts = { path : '/plusone', method : 'post' }; + +var req = http.request(opts, function (res) { + var div = document.getElementById('result'); + div.innerHTML += n.toString() + ' + 1 = '; + + res.on('data', function (buf) { + div.innerHTML += buf; + }); +}); + +req.write(n); +req.end(); diff --git a/node_modules/http-browserify/example/post/server.js b/node_modules/http-browserify/example/post/server.js new file mode 100644 index 0000000..d16963f --- /dev/null +++ b/node_modules/http-browserify/example/post/server.js @@ -0,0 +1,19 @@ +var http = require('http'); +var ecstatic = require('ecstatic')(__dirname); +var server = http.createServer(function (req, res) { + if (req.method === 'POST' && req.url === '/plusone') { + res.setHeader('content-type', 'text/plain'); + + var s = ''; + req.on('data', function (buf) { s += buf.toString() }); + + req.on('end', function () { + var n = parseInt(s) + 1; + res.end(n.toString()); + }); + } + else ecstatic(req, res); +}); + +console.log('Listening on :8082'); +server.listen(8082); diff --git a/node_modules/http-browserify/example/streaming/index.html b/node_modules/http-browserify/example/streaming/index.html new file mode 100644 index 0000000..7001c20 --- /dev/null +++ b/node_modules/http-browserify/example/streaming/index.html @@ -0,0 +1,9 @@ + + + xhr + + +
+ + + diff --git a/node_modules/http-browserify/example/streaming/main.js b/node_modules/http-browserify/example/streaming/main.js new file mode 100644 index 0000000..40ee353 --- /dev/null +++ b/node_modules/http-browserify/example/streaming/main.js @@ -0,0 +1,16 @@ +var http = require('../../'); + +http.get({ path : '/doom' }, function (res) { + var div = document.getElementById('result'); + if (!div.style) div.style = {}; + div.style.color = 'rgb(80,80,80)'; + + res.on('data', function (buf) { + div.innerHTML += buf; + }); + + res.on('end', function () { + div.style.color = 'black'; + div.innerHTML += '!'; + }); +}); diff --git a/node_modules/http-browserify/example/streaming/server.js b/node_modules/http-browserify/example/streaming/server.js new file mode 100644 index 0000000..55c9ede --- /dev/null +++ b/node_modules/http-browserify/example/streaming/server.js @@ -0,0 +1,21 @@ +var http = require('http'); +var ecstatic = require('ecstatic')(__dirname); +var server = http.createServer(function (req, res) { + if (req.url === '/doom') { + res.setHeader('content-type', 'multipart/octet-stream'); + + res.write('d'); + var i = 0; + var iv = setInterval(function () { + res.write('o'); + if (i++ >= 10) { + clearInterval(iv); + res.end('m'); + } + }, 500); + } + else ecstatic(req, res); +}); + +console.log('Listening on :8082'); +server.listen(8082); diff --git a/node_modules/http-browserify/index.js b/node_modules/http-browserify/index.js new file mode 100644 index 0000000..fedd2a2 --- /dev/null +++ b/node_modules/http-browserify/index.js @@ -0,0 +1,145 @@ +var http = module.exports; +var EventEmitter = require('events').EventEmitter; +var Request = require('./lib/request'); +var url = require('url') + +http.request = function (params, cb) { + if (typeof params === 'string') { + params = url.parse(params) + } + if (!params) params = {}; + if (!params.host && !params.port) { + params.port = parseInt(window.location.port, 10); + } + if (!params.host && params.hostname) { + params.host = params.hostname; + } + + if (!params.protocol) { + if (params.scheme) { + params.protocol = params.scheme + ':'; + } else { + params.protocol = window.location.protocol; + } + } + + if (!params.host) { + params.host = window.location.hostname || window.location.host; + } + if (/:/.test(params.host)) { + if (!params.port) { + params.port = params.host.split(':')[1]; + } + params.host = params.host.split(':')[0]; + } + if (!params.port) params.port = params.protocol == 'https:' ? 443 : 80; + + var req = new Request(new xhrHttp, params); + if (cb) req.on('response', cb); + return req; +}; + +http.get = function (params, cb) { + params.method = 'GET'; + var req = http.request(params, cb); + req.end(); + return req; +}; + +http.Agent = function () {}; +http.Agent.defaultMaxSockets = 4; + +var xhrHttp = (function () { + if (typeof window === 'undefined') { + throw new Error('no window object present'); + } + else if (window.XMLHttpRequest) { + return window.XMLHttpRequest; + } + else if (window.ActiveXObject) { + var axs = [ + 'Msxml2.XMLHTTP.6.0', + 'Msxml2.XMLHTTP.3.0', + 'Microsoft.XMLHTTP' + ]; + for (var i = 0; i < axs.length; i++) { + try { + var ax = new(window.ActiveXObject)(axs[i]); + return function () { + if (ax) { + var ax_ = ax; + ax = null; + return ax_; + } + else { + return new(window.ActiveXObject)(axs[i]); + } + }; + } + catch (e) {} + } + throw new Error('ajax not supported in this browser') + } + else { + throw new Error('ajax not supported in this browser'); + } +})(); + +http.STATUS_CODES = { + 100 : 'Continue', + 101 : 'Switching Protocols', + 102 : 'Processing', // RFC 2518, obsoleted by RFC 4918 + 200 : 'OK', + 201 : 'Created', + 202 : 'Accepted', + 203 : 'Non-Authoritative Information', + 204 : 'No Content', + 205 : 'Reset Content', + 206 : 'Partial Content', + 207 : 'Multi-Status', // RFC 4918 + 300 : 'Multiple Choices', + 301 : 'Moved Permanently', + 302 : 'Moved Temporarily', + 303 : 'See Other', + 304 : 'Not Modified', + 305 : 'Use Proxy', + 307 : 'Temporary Redirect', + 400 : 'Bad Request', + 401 : 'Unauthorized', + 402 : 'Payment Required', + 403 : 'Forbidden', + 404 : 'Not Found', + 405 : 'Method Not Allowed', + 406 : 'Not Acceptable', + 407 : 'Proxy Authentication Required', + 408 : 'Request Time-out', + 409 : 'Conflict', + 410 : 'Gone', + 411 : 'Length Required', + 412 : 'Precondition Failed', + 413 : 'Request Entity Too Large', + 414 : 'Request-URI Too Large', + 415 : 'Unsupported Media Type', + 416 : 'Requested Range Not Satisfiable', + 417 : 'Expectation Failed', + 418 : 'I\'m a teapot', // RFC 2324 + 422 : 'Unprocessable Entity', // RFC 4918 + 423 : 'Locked', // RFC 4918 + 424 : 'Failed Dependency', // RFC 4918 + 425 : 'Unordered Collection', // RFC 4918 + 426 : 'Upgrade Required', // RFC 2817 + 428 : 'Precondition Required', // RFC 6585 + 429 : 'Too Many Requests', // RFC 6585 + 431 : 'Request Header Fields Too Large',// RFC 6585 + 500 : 'Internal Server Error', + 501 : 'Not Implemented', + 502 : 'Bad Gateway', + 503 : 'Service Unavailable', + 504 : 'Gateway Time-out', + 505 : 'HTTP Version Not Supported', + 506 : 'Variant Also Negotiates', // RFC 2295 + 507 : 'Insufficient Storage', // RFC 4918 + 509 : 'Bandwidth Limit Exceeded', + 510 : 'Not Extended', // RFC 2774 + 511 : 'Network Authentication Required' // RFC 6585 +}; \ No newline at end of file diff --git a/node_modules/http-browserify/lib/request.js b/node_modules/http-browserify/lib/request.js new file mode 100644 index 0000000..7b8b733 --- /dev/null +++ b/node_modules/http-browserify/lib/request.js @@ -0,0 +1,209 @@ +var Stream = require('stream'); +var Response = require('./response'); +var Base64 = require('Base64'); +var inherits = require('inherits'); + +var Request = module.exports = function (xhr, params) { + var self = this; + self.writable = true; + self.xhr = xhr; + self.body = []; + + self.uri = (params.protocol || 'http:') + '//' + + params.host + + (params.port ? ':' + params.port : '') + + (params.path || '/') + ; + + if (typeof params.withCredentials === 'undefined') { + params.withCredentials = true; + } + + try { xhr.withCredentials = params.withCredentials } + catch (e) {} + + if (params.responseType) try { xhr.responseType = params.responseType } + catch (e) {} + + xhr.open( + params.method || 'GET', + self.uri, + true + ); + + xhr.onerror = function(event) { + self.emit('error', new Error('Network error')); + }; + + self._headers = {}; + + if (params.headers) { + var keys = objectKeys(params.headers); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (!self.isSafeRequestHeader(key)) continue; + var value = params.headers[key]; + self.setHeader(key, value); + } + } + + if (params.auth) { + //basic auth + this.setHeader('Authorization', 'Basic ' + Base64.btoa(params.auth)); + } + + var res = new Response; + res.on('close', function () { + self.emit('close'); + }); + + res.on('ready', function () { + self.emit('response', res); + }); + + res.on('error', function (err) { + self.emit('error', err); + }); + + xhr.onreadystatechange = function () { + // Fix for IE9 bug + // SCRIPT575: Could not complete the operation due to error c00c023f + // It happens when a request is aborted, calling the success callback anyway with readyState === 4 + if (xhr.__aborted) return; + res.handle(xhr); + }; +}; + +inherits(Request, Stream); + +Request.prototype.setHeader = function (key, value) { + this._headers[key.toLowerCase()] = value +}; + +Request.prototype.getHeader = function (key) { + return this._headers[key.toLowerCase()] +}; + +Request.prototype.removeHeader = function (key) { + delete this._headers[key.toLowerCase()] +}; + +Request.prototype.write = function (s) { + this.body.push(s); +}; + +Request.prototype.destroy = function (s) { + this.xhr.__aborted = true; + this.xhr.abort(); + this.emit('close'); +}; + +Request.prototype.end = function (s) { + if (s !== undefined) this.body.push(s); + + var keys = objectKeys(this._headers); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + var value = this._headers[key]; + if (isArray(value)) { + for (var j = 0; j < value.length; j++) { + this.xhr.setRequestHeader(key, value[j]); + } + } + else this.xhr.setRequestHeader(key, value) + } + + if (this.body.length === 0) { + this.xhr.send(''); + } + else if (typeof this.body[0] === 'string') { + this.xhr.send(this.body.join('')); + } + else if (isArray(this.body[0])) { + var body = []; + for (var i = 0; i < this.body.length; i++) { + body.push.apply(body, this.body[i]); + } + this.xhr.send(body); + } + else if (/Array/.test(Object.prototype.toString.call(this.body[0]))) { + var len = 0; + for (var i = 0; i < this.body.length; i++) { + len += this.body[i].length; + } + var body = new(this.body[0].constructor)(len); + var k = 0; + + for (var i = 0; i < this.body.length; i++) { + var b = this.body[i]; + for (var j = 0; j < b.length; j++) { + body[k++] = b[j]; + } + } + this.xhr.send(body); + } + else if (isXHR2Compatible(this.body[0])) { + this.xhr.send(this.body[0]); + } + else { + var body = ''; + for (var i = 0; i < this.body.length; i++) { + body += this.body[i].toString(); + } + this.xhr.send(body); + } +}; + +// Taken from http://dxr.mozilla.org/mozilla/mozilla-central/content/base/src/nsXMLHttpRequest.cpp.html +Request.unsafeHeaders = [ + "accept-charset", + "accept-encoding", + "access-control-request-headers", + "access-control-request-method", + "connection", + "content-length", + "cookie", + "cookie2", + "content-transfer-encoding", + "date", + "expect", + "host", + "keep-alive", + "origin", + "referer", + "te", + "trailer", + "transfer-encoding", + "upgrade", + "user-agent", + "via" +]; + +Request.prototype.isSafeRequestHeader = function (headerName) { + if (!headerName) return false; + return indexOf(Request.unsafeHeaders, headerName.toLowerCase()) === -1; +}; + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + +var indexOf = function (xs, x) { + if (xs.indexOf) return xs.indexOf(x); + for (var i = 0; i < xs.length; i++) { + if (xs[i] === x) return i; + } + return -1; +}; + +var isXHR2Compatible = function (obj) { + if (typeof Blob !== 'undefined' && obj instanceof Blob) return true; + if (typeof ArrayBuffer !== 'undefined' && obj instanceof ArrayBuffer) return true; + if (typeof FormData !== 'undefined' && obj instanceof FormData) return true; +}; diff --git a/node_modules/http-browserify/lib/response.js b/node_modules/http-browserify/lib/response.js new file mode 100644 index 0000000..f83d761 --- /dev/null +++ b/node_modules/http-browserify/lib/response.js @@ -0,0 +1,120 @@ +var Stream = require('stream'); +var util = require('util'); + +var Response = module.exports = function (res) { + this.offset = 0; + this.readable = true; +}; + +util.inherits(Response, Stream); + +var capable = { + streaming : true, + status2 : true +}; + +function parseHeaders (res) { + var lines = res.getAllResponseHeaders().split(/\r?\n/); + var headers = {}; + for (var i = 0; i < lines.length; i++) { + var line = lines[i]; + if (line === '') continue; + + var m = line.match(/^([^:]+):\s*(.*)/); + if (m) { + var key = m[1].toLowerCase(), value = m[2]; + + if (headers[key] !== undefined) { + + if (isArray(headers[key])) { + headers[key].push(value); + } + else { + headers[key] = [ headers[key], value ]; + } + } + else { + headers[key] = value; + } + } + else { + headers[line] = true; + } + } + return headers; +} + +Response.prototype.getResponse = function (xhr) { + var respType = String(xhr.responseType).toLowerCase(); + if (respType === 'blob') return xhr.responseBlob || xhr.response; + if (respType === 'arraybuffer') return xhr.response; + return xhr.responseText; +} + +Response.prototype.getHeader = function (key) { + return this.headers[key.toLowerCase()]; +}; + +Response.prototype.handle = function (res) { + if (res.readyState === 2 && capable.status2) { + try { + this.statusCode = res.status; + this.headers = parseHeaders(res); + } + catch (err) { + capable.status2 = false; + } + + if (capable.status2) { + this.emit('ready'); + } + } + else if (capable.streaming && res.readyState === 3) { + try { + if (!this.statusCode) { + this.statusCode = res.status; + this.headers = parseHeaders(res); + this.emit('ready'); + } + } + catch (err) {} + + try { + this._emitData(res); + } + catch (err) { + capable.streaming = false; + } + } + else if (res.readyState === 4) { + if (!this.statusCode) { + this.statusCode = res.status; + this.emit('ready'); + } + this._emitData(res); + + if (res.error) { + this.emit('error', this.getResponse(res)); + } + else this.emit('end'); + + this.emit('close'); + } +}; + +Response.prototype._emitData = function (res) { + var respBody = this.getResponse(res); + if (respBody.toString().match(/ArrayBuffer/)) { + this.emit('data', new Uint8Array(respBody, this.offset)); + this.offset = respBody.byteLength; + return; + } + if (respBody.length > this.offset) { + this.emit('data', respBody.slice(this.offset)); + this.offset = respBody.length; + } +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/http-browserify/package.json b/node_modules/http-browserify/package.json new file mode 100644 index 0000000..6b28314 --- /dev/null +++ b/node_modules/http-browserify/package.json @@ -0,0 +1,40 @@ +{ + "name": "http-browserify", + "version": "1.7.0", + "description": "http module compatability for browserify", + "main": "index.js", + "browserify": "index.js", + "directories": { + "lib": ".", + "example": "example", + "test": "test" + }, + "scripts": { + "test": "tape test/*.js" + }, + "dependencies": { + "Base64": "~0.2.0", + "inherits": "~2.0.1" + }, + "devDependencies": { + "ecstatic": "~0.1.6", + "tape": "~2.3.2" + }, + "repository": { + "type": "git", + "url": "http://github.com/substack/http-browserify.git" + }, + "keywords": [ + "http", + "browserify", + "compatible", + "meatless", + "browser" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT/X11" +} diff --git a/node_modules/http-browserify/readme.markdown b/node_modules/http-browserify/readme.markdown new file mode 100644 index 0000000..bd5c14b --- /dev/null +++ b/node_modules/http-browserify/readme.markdown @@ -0,0 +1,131 @@ +# http-browserify + +The +[http](http://nodejs.org/docs/v0.4.10/api/all.html#hTTP) module from node.js, +but for browsers. + +When you `require('http')` in +[browserify](http://github.com/substack/node-browserify), +this module will be loaded. + +# example + +``` js +var http = require('http'); + +http.get({ path : '/beep' }, function (res) { + var div = document.getElementById('result'); + div.innerHTML += 'GET /beep
'; + + res.on('data', function (buf) { + div.innerHTML += buf; + }); + + res.on('end', function () { + div.innerHTML += '
__END__'; + }); +}); +``` + +# http methods + +var http = require('http'); + +## var req = http.request(opts, cb) + +where `opts` are: + +* `opts.method='GET'` - http method verb +* `opts.path` - path string, example: `'/foo/bar?baz=555'` +* `opts.headers={}` - as an object mapping key names to string or Array values +* `opts.host=window.location.host` - http host +* `opts.port=window.location.port` - http port +* `opts.responseType` - response type to set on the underlying xhr object + +The callback will be called with the response object. + +## var req = http.get(options, cb) + +A shortcut for + +``` js +options.method = 'GET'; +var req = http.request(options, cb); +req.end(); +``` + +# request methods + +## req.setHeader(key, value) + +Set an http header. + +## req.getHeader(key) + +Get an http header. + +## req.removeHeader(key) + +Remove an http header. + +## req.write(data) + +Write some data to the request body. + +If only 1 piece of data is written, `data` can be a FormData, Blob, or +ArrayBuffer instance. Otherwise, `data` should be a string or a buffer. + +## req.end(data) + +Close and send the request body, optionally with additional `data` to append. + +# response methods + +## res.getHeader(key) + +Return an http header, if set. `key` is case-insensitive. + +# response attributes + +* res.statusCode, the numeric http response code +* res.headers, an object with all lowercase keys + +# compatibility + +This module has been tested and works with: + +* Internet Explorer 5.5, 6, 7, 8, 9 +* Firefox 3.5 +* Chrome 7.0 +* Opera 10.6 +* Safari 5.0 + +Multipart streaming responses are buffered in all versions of Internet Explorer +and are somewhat buffered in Opera. In all the other browsers you get a nice +unbuffered stream of `"data"` events when you send down a content-type of +`multipart/octet-stream` or similar. + +# protip + +You can do: + +````javascript +var bundle = browserify({ + require : { http : 'http-browserify' } +}); +```` + +in order to map "http-browserify" over `require('http')` in your browserified +source. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install http-browserify +``` + +# license + +MIT diff --git a/node_modules/http-browserify/test/request_url.js b/node_modules/http-browserify/test/request_url.js new file mode 100644 index 0000000..1f58f2d --- /dev/null +++ b/node_modules/http-browserify/test/request_url.js @@ -0,0 +1,114 @@ +global.window = global; +global.location = { + host: 'localhost:8081', + port: 8081, + protocol: 'http:' +}; + +var noop = function() {}; +global.XMLHttpRequest = function() { + this.open = noop; + this.send = noop; +}; + +global.FormData = function () {}; +global.Blob = function () {}; +global.ArrayBuffer = function () {}; + +var test = require('tape').test; +var http = require('../index.js'); + + +test('Test simple url string', function(t) { + var url = { path: '/api/foo' }; + var request = http.get(url, noop); + + t.equal( request.uri, 'http://localhost:8081/api/foo', 'Url should be correct'); + t.end(); + +}); + + +test('Test full url object', function(t) { + var url = { + host: "localhost:8081", + hostname: "localhost", + href: "http://localhost:8081/api/foo?bar=baz", + method: "GET", + path: "/api/foo?bar=baz", + pathname: "/api/foo", + port: "8081", + protocol: "http:", + query: "bar=baz", + search: "?bar=baz", + slashes: true + }; + + var request = http.get(url, noop); + + t.equal( request.uri, 'http://localhost:8081/api/foo?bar=baz', 'Url should be correct'); + t.end(); + +}); + +test('Test alt protocol', function(t) { + var params = { + protocol: "foo:", + hostname: "localhost", + port: "3000", + path: "/bar" + }; + + var request = http.get(params, noop); + + t.equal( request.uri, 'foo://localhost:3000/bar', 'Url should be correct'); + t.end(); + +}); + +test('Test string as parameters', function(t) { + var url = '/api/foo'; + var request = http.get(url, noop); + + t.equal( request.uri, 'http://localhost:8081/api/foo', 'Url should be correct'); + t.end(); + +}); + +test('Test withCredentials param', function(t) { + var url = '/api/foo'; + + var request = http.request({ url: url, withCredentials: false }, noop); + t.equal( request.xhr.withCredentials, false, 'xhr.withCredentials should be false'); + + var request = http.request({ url: url, withCredentials: true }, noop); + t.equal( request.xhr.withCredentials, true, 'xhr.withCredentials should be true'); + + var request = http.request({ url: url }, noop); + t.equal( request.xhr.withCredentials, true, 'xhr.withCredentials should be true'); + + t.end(); +}); + +test('Test POST XHR2 types', function(t) { + t.plan(3); + var url = '/api/foo'; + + var request = http.request({ url: url, method: 'POST' }, noop); + request.xhr.send = function (data) { + t.ok(data instanceof global.ArrayBuffer, 'data should be instanceof ArrayBuffer'); + }; + request.end(new global.ArrayBuffer()); + + request = http.request({ url: url, method: 'POST' }, noop); + request.xhr.send = function (data) { + t.ok(data instanceof global.Blob, 'data should be instanceof Blob'); + }; + request.end(new global.Blob()); + + request = http.request({ url: url, method: 'POST' }, noop); + request.xhr.send = function (data) { + t.ok(data instanceof global.FormData, 'data should be instanceof FormData'); + }; + request.end(new global.FormData()); +}); diff --git a/node_modules/http-errors/HISTORY.md b/node_modules/http-errors/HISTORY.md new file mode 100644 index 0000000..0dfb7ac --- /dev/null +++ b/node_modules/http-errors/HISTORY.md @@ -0,0 +1,95 @@ +2016-05-18 / 1.5.0 +================== + + * Support new code `421 Misdirected Request` + * Use `setprototypeof` module to replace `__proto__` setting + * deps: statuses@'>= 1.3.0 < 2' + - Add `421 Misdirected Request` + - perf: enable strict mode + * perf: enable strict mode + +2016-01-28 / 1.4.0 +================== + + * Add `HttpError` export, for `err instanceof createError.HttpError` + * deps: inherits@2.0.1 + * deps: statuses@'>= 1.2.1 < 2' + - Fix message for status 451 + - Remove incorrect nginx status code + +2015-02-02 / 1.3.1 +================== + + * Fix regression where status can be overwritten in `createError` `props` + +2015-02-01 / 1.3.0 +================== + + * Construct errors using defined constructors from `createError` + * Fix error names that are not identifiers + - `createError["I'mateapot"]` is now `createError.ImATeapot` + * Set a meaningful `name` property on constructed errors + +2014-12-09 / 1.2.8 +================== + + * Fix stack trace from exported function + * Remove `arguments.callee` usage + +2014-10-14 / 1.2.7 +================== + + * Remove duplicate line + +2014-10-02 / 1.2.6 +================== + + * Fix `expose` to be `true` for `ClientError` constructor + +2014-09-28 / 1.2.5 +================== + + * deps: statuses@1 + +2014-09-21 / 1.2.4 +================== + + * Fix dependency version to work with old `npm`s + +2014-09-21 / 1.2.3 +================== + + * deps: statuses@~1.1.0 + +2014-09-21 / 1.2.2 +================== + + * Fix publish error + +2014-09-21 / 1.2.1 +================== + + * Support Node.js 0.6 + * Use `inherits` instead of `util` + +2014-09-09 / 1.2.0 +================== + + * Fix the way inheriting functions + * Support `expose` being provided in properties argument + +2014-09-08 / 1.1.0 +================== + + * Default status to 500 + * Support provided `error` to extend + +2014-09-08 / 1.0.1 +================== + + * Fix accepting string message + +2014-09-08 / 1.0.0 +================== + + * Initial release diff --git a/node_modules/http-errors/LICENSE b/node_modules/http-errors/LICENSE new file mode 100644 index 0000000..82af4df --- /dev/null +++ b/node_modules/http-errors/LICENSE @@ -0,0 +1,23 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong me@jongleberry.com +Copyright (c) 2016 Douglas Christopher Wilson doug@somethingdoug.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/http-errors/README.md b/node_modules/http-errors/README.md new file mode 100644 index 0000000..be80f9b --- /dev/null +++ b/node_modules/http-errors/README.md @@ -0,0 +1,112 @@ +# http-errors + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Create HTTP errors for Express, Koa, Connect, etc. with ease. + +## Example + +```js +var createError = require('http-errors'); + +app.use(function (req, res, next) { + if (!req.user) return next(createError(401, 'Please login to view this page.')); + next(); +}) +``` + +## API + +This is the current API, currently extracted from Koa and subject to change. + +All errors inherit from JavaScript `Error` and the exported `createError.HttpError`. + +### Error Properties + +- `expose` - can be used to signal if `message` should be sent to the client, defaulting to `false` when `status` >= 500 +- `message` +- `status` and `statusCode` - the status code of the error, defaulting to `500` + +### createError([status], [message], [properties]) + +```js +var err = createError(404, 'This video does not exist!'); +``` + +- `status: 500` - the status code as a number +- `message` - the message of the error, defaulting to node's text for that status code. +- `properties` - custom properties to attach to the object + +### new createError\[code || name\](\[msg]\)) + +```js +var err = new createError.NotFound(); +``` + +- `code` - the status code as a number +- `name` - the name of the error as a "bumpy case", i.e. `NotFound` or `InternalServerError`. + +#### List of all constructors + +|Status Code|Constructor Name | +|-----------|-----------------------------| +|400 |BadRequest | +|401 |Unauthorized | +|402 |PaymentRequired | +|403 |Forbidden | +|404 |NotFound | +|405 |MethodNotAllowed | +|406 |NotAcceptable | +|407 |ProxyAuthenticationRequired | +|408 |RequestTimeout | +|409 |Conflict | +|410 |Gone | +|411 |LengthRequired | +|412 |PreconditionFailed | +|413 |PayloadTooLarge | +|414 |URITooLong | +|415 |UnsupportedMediaType | +|416 |RangeNotSatisfiable | +|417 |ExpectationFailed | +|418 |ImATeapot | +|421 |MisdirectedRequest | +|422 |UnprocessableEntity | +|423 |Locked | +|424 |FailedDependency | +|425 |UnorderedCollection | +|426 |UpgradeRequired | +|428 |PreconditionRequired | +|429 |TooManyRequests | +|431 |RequestHeaderFieldsTooLarge | +|451 |UnavailableForLegalReasons | +|500 |InternalServerError | +|501 |NotImplemented | +|502 |BadGateway | +|503 |ServiceUnavailable | +|504 |GatewayTimeout | +|505 |HTTPVersionNotSupported | +|506 |VariantAlsoNegotiates | +|507 |InsufficientStorage | +|508 |LoopDetected | +|509 |BandwidthLimitExceeded | +|510 |NotExtended | +|511 |NetworkAuthenticationRequired| + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/http-errors.svg?style=flat +[npm-url]: https://npmjs.org/package/http-errors +[node-version-image]: https://img.shields.io/node/v/http-errors.svg?style=flat +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/http-errors.svg?style=flat +[travis-url]: https://travis-ci.org/jshttp/http-errors +[coveralls-image]: https://img.shields.io/coveralls/jshttp/http-errors.svg?style=flat +[coveralls-url]: https://coveralls.io/r/jshttp/http-errors +[downloads-image]: https://img.shields.io/npm/dm/http-errors.svg?style=flat +[downloads-url]: https://npmjs.org/package/http-errors diff --git a/node_modules/http-errors/index.js b/node_modules/http-errors/index.js new file mode 100644 index 0000000..6130db8 --- /dev/null +++ b/node_modules/http-errors/index.js @@ -0,0 +1,223 @@ +/*! + * http-errors + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var setPrototypeOf = require('setprototypeof') +var statuses = require('statuses') +var inherits = require('inherits') + +/** + * Module exports. + * @public + */ + +module.exports = createError +module.exports.HttpError = createHttpErrorConstructor() + +// Populate exports for all constructors +populateConstructorExports(module.exports, statuses.codes, module.exports.HttpError) + +/** + * Create a new HTTP Error. + * + * @returns {Error} + * @public + */ + +function createError () { + // so much arity going on ~_~ + var err + var msg + var status = 500 + var props = {} + for (var i = 0; i < arguments.length; i++) { + var arg = arguments[i] + if (arg instanceof Error) { + err = arg + status = err.status || err.statusCode || status + continue + } + switch (typeof arg) { + case 'string': + msg = arg + break + case 'number': + status = arg + break + case 'object': + props = arg + break + } + } + + if (typeof status !== 'number' || !statuses[status]) { + status = 500 + } + + // constructor + var HttpError = createError[status] + + if (!err) { + // create error + err = HttpError + ? new HttpError(msg) + : new Error(msg || statuses[status]) + Error.captureStackTrace(err, createError) + } + + if (!HttpError || !(err instanceof HttpError)) { + // add properties to generic error + err.expose = status < 500 + err.status = err.statusCode = status + } + + for (var key in props) { + if (key !== 'status' && key !== 'statusCode') { + err[key] = props[key] + } + } + + return err +} + +/** + * Create HTTP error abstract base class. + * @private + */ + +function createHttpErrorConstructor () { + function HttpError () { + throw new TypeError('cannot construct abstract class') + } + + inherits(HttpError, Error) + + return HttpError +} + +/** + * Create a constructor for a client error. + * @private + */ + +function createClientErrorConstructor (HttpError, name, code) { + var className = name.match(/Error$/) ? name : name + 'Error' + + function ClientError (message) { + // create the error object + var err = new Error(message != null ? message : statuses[code]) + + // capture a stack trace to the construction point + Error.captureStackTrace(err, ClientError) + + // adjust the [[Prototype]] + setPrototypeOf(err, ClientError.prototype) + + // redefine the error name + Object.defineProperty(err, 'name', { + enumerable: false, + configurable: true, + value: className, + writable: true + }) + + return err + } + + inherits(ClientError, HttpError) + + ClientError.prototype.status = code + ClientError.prototype.statusCode = code + ClientError.prototype.expose = true + + return ClientError +} + +/** + * Create a constructor for a server error. + * @private + */ + +function createServerErrorConstructor (HttpError, name, code) { + var className = name.match(/Error$/) ? name : name + 'Error' + + function ServerError (message) { + // create the error object + var err = new Error(message != null ? message : statuses[code]) + + // capture a stack trace to the construction point + Error.captureStackTrace(err, ServerError) + + // adjust the [[Prototype]] + setPrototypeOf(err, ServerError.prototype) + + // redefine the error name + Object.defineProperty(err, 'name', { + enumerable: false, + configurable: true, + value: className, + writable: true + }) + + return err + } + + inherits(ServerError, HttpError) + + ServerError.prototype.status = code + ServerError.prototype.statusCode = code + ServerError.prototype.expose = false + + return ServerError +} + +/** + * Populate the exports object with constructors for every error class. + * @private + */ + +function populateConstructorExports (exports, codes, HttpError) { + codes.forEach(function forEachCode (code) { + var CodeError + var name = toIdentifier(statuses[code]) + + switch (String(code).charAt(0)) { + case '4': + CodeError = createClientErrorConstructor(HttpError, name, code) + break + case '5': + CodeError = createServerErrorConstructor(HttpError, name, code) + break + } + + if (CodeError) { + // export the constructor + exports[code] = CodeError + exports[name] = CodeError + } + }) + + // backwards-compatibility + exports["I'mateapot"] = exports.ImATeapot +} + +/** + * Convert a string of words to a JavaScript identifier. + * @private + */ + +function toIdentifier (str) { + return str.split(' ').map(function (token) { + return token.slice(0, 1).toUpperCase() + token.slice(1) + }).join('').replace(/[^ _0-9a-z]/gi, '') +} diff --git a/node_modules/http-errors/node_modules/inherits/LICENSE b/node_modules/http-errors/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/node_modules/http-errors/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/http-errors/node_modules/inherits/README.md b/node_modules/http-errors/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/node_modules/http-errors/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/http-errors/node_modules/inherits/inherits.js b/node_modules/http-errors/node_modules/inherits/inherits.js new file mode 100644 index 0000000..29f5e24 --- /dev/null +++ b/node_modules/http-errors/node_modules/inherits/inherits.js @@ -0,0 +1 @@ +module.exports = require('util').inherits diff --git a/node_modules/http-errors/node_modules/inherits/inherits_browser.js b/node_modules/http-errors/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..c1e78a7 --- /dev/null +++ b/node_modules/http-errors/node_modules/inherits/inherits_browser.js @@ -0,0 +1,23 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} diff --git a/node_modules/http-errors/node_modules/inherits/package.json b/node_modules/http-errors/node_modules/inherits/package.json new file mode 100644 index 0000000..0e36965 --- /dev/null +++ b/node_modules/http-errors/node_modules/inherits/package.json @@ -0,0 +1,22 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.1", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "node test" + } +} diff --git a/node_modules/http-errors/node_modules/inherits/test.js b/node_modules/http-errors/node_modules/inherits/test.js new file mode 100644 index 0000000..fc53012 --- /dev/null +++ b/node_modules/http-errors/node_modules/inherits/test.js @@ -0,0 +1,25 @@ +var inherits = require('./inherits.js') +var assert = require('assert') + +function test(c) { + assert(c.constructor === Child) + assert(c.constructor.super_ === Parent) + assert(Object.getPrototypeOf(c) === Child.prototype) + assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype) + assert(c instanceof Child) + assert(c instanceof Parent) +} + +function Child() { + Parent.call(this) + test(this) +} + +function Parent() {} + +inherits(Child, Parent) + +var c = new Child +test(c) + +console.log('ok') diff --git a/node_modules/http-errors/package.json b/node_modules/http-errors/package.json new file mode 100644 index 0000000..bff321c --- /dev/null +++ b/node_modules/http-errors/package.json @@ -0,0 +1,44 @@ +{ + "name": "http-errors", + "description": "Create HTTP error objects", + "version": "1.5.0", + "author": "Jonathan Ong (http://jongleberry.com)", + "contributors": [ + "Alan Plum ", + "Douglas Christopher Wilson " + ], + "license": "MIT", + "repository": "jshttp/http-errors", + "dependencies": { + "inherits": "2.0.1", + "setprototypeof": "1.0.1", + "statuses": ">= 1.3.0 < 2" + }, + "devDependencies": { + "eslint": "2.10.2", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.1.0", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "1.21.5" + }, + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint **/*.js", + "test": "mocha --reporter spec --bail", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot" + }, + "keywords": [ + "http", + "error" + ], + "files": [ + "index.js", + "HISTORY.md", + "LICENSE", + "README.md" + ] +} diff --git a/node_modules/http-signature/.dir-locals.el b/node_modules/http-signature/.dir-locals.el new file mode 100644 index 0000000..3bc9235 --- /dev/null +++ b/node_modules/http-signature/.dir-locals.el @@ -0,0 +1,6 @@ +((nil . ((indent-tabs-mode . nil) + (tab-width . 8) + (fill-column . 80))) + (js-mode . ((js-indent-level . 2) + (indent-tabs-mode . nil) + ))) \ No newline at end of file diff --git a/node_modules/http-signature/.npmignore b/node_modules/http-signature/.npmignore new file mode 100644 index 0000000..c143fb3 --- /dev/null +++ b/node_modules/http-signature/.npmignore @@ -0,0 +1,7 @@ +.gitmodules +deps +docs +Makefile +node_modules +test +tools \ No newline at end of file diff --git a/node_modules/http-signature/CHANGES.md b/node_modules/http-signature/CHANGES.md new file mode 100644 index 0000000..6f69444 --- /dev/null +++ b/node_modules/http-signature/CHANGES.md @@ -0,0 +1,46 @@ +# node-http-signature changelog + +## 1.1.1 + +- Version of dependency `assert-plus` updated: old version was missing + some license information +- Corrected examples in `http_signing.md`, added auto-tests to + automatically validate these examples + +## 1.1.0 + +- Bump version of `sshpk` dependency, remove peerDependency on it since + it now supports exchanging objects between multiple versions of itself + where possible + +## 1.0.2 + +- Bump min version of `jsprim` dependency, to include fixes for using + http-signature with `browserify` + +## 1.0.1 + +- Bump minimum version of `sshpk` dependency, to include fixes for + whitespace tolerance in key parsing. + +## 1.0.0 + +- First semver release. +- #36: Ensure verifySignature does not leak useful timing information +- #42: Bring the library up to the latest version of the spec (including the + request-target changes) +- Support for ECDSA keys and signatures. +- Now uses `sshpk` for key parsing, validation and conversion. +- Fixes for #21, #47, #39 and compatibility with node 0.8 + +## 0.11.0 + +- Split up HMAC and Signature verification to avoid vulnerabilities where a + key intended for use with one can be validated against the other method + instead. + +## 0.10.2 + +- Updated versions of most dependencies. +- Utility functions exported for PEM => SSH-RSA conversion. +- Improvements to tests and examples. diff --git a/node_modules/http-signature/LICENSE b/node_modules/http-signature/LICENSE new file mode 100644 index 0000000..f6d947d --- /dev/null +++ b/node_modules/http-signature/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/http-signature/README.md b/node_modules/http-signature/README.md new file mode 100644 index 0000000..de487d3 --- /dev/null +++ b/node_modules/http-signature/README.md @@ -0,0 +1,79 @@ +# node-http-signature + +node-http-signature is a node.js library that has client and server components +for Joyent's [HTTP Signature Scheme](http_signing.md). + +## Usage + +Note the example below signs a request with the same key/cert used to start an +HTTP server. This is almost certainly not what you actually want, but is just +used to illustrate the API calls; you will need to provide your own key +management in addition to this library. + +### Client + +```js +var fs = require('fs'); +var https = require('https'); +var httpSignature = require('http-signature'); + +var key = fs.readFileSync('./key.pem', 'ascii'); + +var options = { + host: 'localhost', + port: 8443, + path: '/', + method: 'GET', + headers: {} +}; + +// Adds a 'Date' header in, signs it, and adds the +// 'Authorization' header in. +var req = https.request(options, function(res) { + console.log(res.statusCode); +}); + + +httpSignature.sign(req, { + key: key, + keyId: './cert.pem' +}); + +req.end(); +``` + +### Server + +```js +var fs = require('fs'); +var https = require('https'); +var httpSignature = require('http-signature'); + +var options = { + key: fs.readFileSync('./key.pem'), + cert: fs.readFileSync('./cert.pem') +}; + +https.createServer(options, function (req, res) { + var rc = 200; + var parsed = httpSignature.parseRequest(req); + var pub = fs.readFileSync(parsed.keyId, 'ascii'); + if (!httpSignature.verifySignature(parsed, pub)) + rc = 401; + + res.writeHead(rc); + res.end(); +}).listen(8443); +``` + +## Installation + + npm install http-signature + +## License + +MIT. + +## Bugs + +See . diff --git a/node_modules/http-signature/http_signing.md b/node_modules/http-signature/http_signing.md new file mode 100644 index 0000000..4f24d28 --- /dev/null +++ b/node_modules/http-signature/http_signing.md @@ -0,0 +1,363 @@ +# Abstract + +This document describes a way to add origin authentication, message integrity, +and replay resistance to HTTP REST requests. It is intended to be used over +the HTTPS protocol. + +# Copyright Notice + +Copyright (c) 2011 Joyent, Inc. and the persons identified as document authors. +All rights reserved. + +Code Components extracted from this document must include MIT License text. + +# Introduction + +This protocol is intended to provide a standard way for clients to sign HTTP +requests. RFC2617 (HTTP Authentication) defines Basic and Digest authentication +mechanisms, and RFC5246 (TLS 1.2) defines client-auth, both of which are widely +employed on the Internet today. However, it is common place that the burdens of +PKI prevent web service operators from deploying that methodology, and so many +fall back to Basic authentication, which has poor security characteristics. + +Additionally, OAuth provides a fully-specified alternative for authorization +of web service requests, but is not (always) ideal for machine to machine +communication, as the key acquisition steps (generally) imply a fixed +infrastructure that may not make sense to a service provider (e.g., symmetric +keys). + +Several web service providers have invented their own schemes for signing +HTTP requests, but to date, none have been placed in the public domain as a +standard. This document serves that purpose. There are no techniques in this +proposal that are novel beyond previous art, however, this aims to be a simple +mechanism for signing these requests. + +# Signature Authentication Scheme + +The "signature" authentication scheme is based on the model that the client must +authenticate itself with a digital signature produced by either a private +asymmetric key (e.g., RSA) or a shared symmetric key (e.g., HMAC). The scheme +is parameterized enough such that it is not bound to any particular key type or +signing algorithm. However, it does explicitly assume that clients can send an +HTTP `Date` header. + +## Authorization Header + +The client is expected to send an Authorization header (as defined in RFC 2617) +with the following parameterization: + + credentials := "Signature" params + params := 1#(keyId | algorithm | [headers] | [ext] | signature) + digitalSignature := plain-string + + keyId := "keyId" "=" <"> plain-string <"> + algorithm := "algorithm" "=" <"> plain-string <"> + headers := "headers" "=" <"> 1#headers-value <"> + ext := "ext" "=" <"> plain-string <"> + signature := "signature" "=" <"> plain-string <"> + + headers-value := plain-string + plain-string = 1*( %x20-21 / %x23-5B / %x5D-7E ) + +### Signature Parameters + +#### keyId + +REQUIRED. The `keyId` field is an opaque string that the server can use to look +up the component they need to validate the signature. It could be an SSH key +fingerprint, an LDAP DN, etc. Management of keys and assignment of `keyId` is +out of scope for this document. + +#### algorithm + +REQUIRED. The `algorithm` parameter is used if the client and server agree on a +non-standard digital signature algorithm. The full list of supported signature +mechanisms is listed below. + +#### headers + +OPTIONAL. The `headers` parameter is used to specify the list of HTTP headers +used to sign the request. If specified, it should be a quoted list of HTTP +header names, separated by a single space character. By default, only one +HTTP header is signed, which is the `Date` header. Note that the list MUST be +specified in the order the values are concatenated together during signing. To +include the HTTP request line in the signature calculation, use the special +`request-line` value. While this is overloading the definition of `headers` in +HTTP linguism, the request-line is defined in RFC 2616, and as the outlier from +headers in useful signature calculation, it is deemed simpler to simply use +`request-line` than to add a separate parameter for it. + +#### extensions + +OPTIONAL. The `extensions` parameter is used to include additional information +which is covered by the request. The content and format of the string is out of +scope for this document, and expected to be specified by implementors. + +#### signature + +REQUIRED. The `signature` parameter is a `Base64` encoded digital signature +generated by the client. The client uses the `algorithm` and `headers` request +parameters to form a canonicalized `signing string`. This `signing string` is +then signed with the key associated with `keyId` and the algorithm +corresponding to `algorithm`. The `signature` parameter is then set to the +`Base64` encoding of the signature. + +### Signing String Composition + +In order to generate the string that is signed with a key, the client MUST take +the values of each HTTP header specified by `headers` in the order they appear. + +1. If the header name is not `request-line` then append the lowercased header + name followed with an ASCII colon `:` and an ASCII space ` `. +2. If the header name is `request-line` then append the HTTP request line, + otherwise append the header value. +3. If value is not the last value then append an ASCII newline `\n`. The string + MUST NOT include a trailing ASCII newline. + +# Example Requests + +All requests refer to the following request (body omitted): + + POST /foo HTTP/1.1 + Host: example.org + Date: Tue, 07 Jun 2014 20:51:35 GMT + Content-Type: application/json + Digest: SHA-256=X48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE= + Content-Length: 18 + +The "rsa-key-1" keyId refers to a private key known to the client and a public +key known to the server. The "hmac-key-1" keyId refers to key known to the +client and server. + +## Default parameterization + +The authorization header and signature would be generated as: + + Authorization: Signature keyId="rsa-key-1",algorithm="rsa-sha256",signature="Base64(RSA-SHA256(signing string))" + +The client would compose the signing string as: + + date: Tue, 07 Jun 2014 20:51:35 GMT + +## Header List + +The authorization header and signature would be generated as: + + Authorization: Signature keyId="rsa-key-1",algorithm="rsa-sha256",headers="(request-target) date content-type digest",signature="Base64(RSA-SHA256(signing string))" + +The client would compose the signing string as (`+ "\n"` inserted for +readability): + + (request-target) post /foo + "\n" + date: Tue, 07 Jun 2011 20:51:35 GMT + "\n" + content-type: application/json + "\n" + digest: SHA-256=Base64(SHA256(Body)) + +## Algorithm + +The authorization header and signature would be generated as: + + Authorization: Signature keyId="hmac-key-1",algorithm="hmac-sha1",signature="Base64(HMAC-SHA1(signing string))" + +The client would compose the signing string as: + + date: Tue, 07 Jun 2011 20:51:35 GMT + +# Signing Algorithms + +Currently supported algorithm names are: + +* rsa-sha1 +* rsa-sha256 +* rsa-sha512 +* dsa-sha1 +* hmac-sha1 +* hmac-sha256 +* hmac-sha512 + +# Security Considerations + +## Default Parameters + +Note the default parameterization of the `Signature` scheme is only safe if all +requests are carried over a secure transport (i.e., TLS). Sending the default +scheme over a non-secure transport will leave the request vulnerable to +spoofing, tampering, replay/repudiation, and integrity violations (if using the +STRIDE threat-modeling methodology). + +## Insecure Transports + +If sending the request over plain HTTP, service providers SHOULD require clients +to sign ALL HTTP headers, and the `request-line`. Additionally, service +providers SHOULD require `Content-MD5` calculations to be performed to ensure +against any tampering from clients. + +## Nonces + +Nonces are out of scope for this document simply because many service providers +fail to implement them correctly, or do not adopt security specifications +because of the infrastructure complexity. Given the `header` parameterization, +a service provider is fully enabled to add nonce semantics into this scheme by +using something like an `x-request-nonce` header, and ensuring it is signed +with the `Date` header. + +## Clock Skew + +As the default scheme is to sign the `Date` header, service providers SHOULD +protect against logged replay attacks by enforcing a clock skew. The server +SHOULD be synchronized with NTP, and the recommendation in this specification +is to allow 300s of clock skew (in either direction). + +## Required Headers to Sign + +It is out of scope for this document to dictate what headers a service provider +will want to enforce, but service providers SHOULD at minimum include the +`Date` header. + +# References + +## Normative References + +* [RFC2616] Hypertext Transfer Protocol -- HTTP/1.1 +* [RFC2617] HTTP Authentication: Basic and Digest Access Authentication +* [RFC5246] The Transport Layer Security (TLS) Protocol Version 1.2 + +## Informative References + + Name: Mark Cavage (editor) + Company: Joyent, Inc. + Email: mark.cavage@joyent.com + URI: http://www.joyent.com + +# Appendix A - Test Values + +The following test data uses the RSA (1024b) keys, which we will refer +to as `keyId=Test` in the following samples: + + -----BEGIN PUBLIC KEY----- + MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDCFENGw33yGihy92pDjZQhl0C3 + 6rPJj+CvfSC8+q28hxA161QFNUd13wuCTUcq0Qd2qsBe/2hFyc2DCJJg0h1L78+6 + Z4UMR7EOcpfdUE9Hf3m/hs+FUR45uBJeDK1HSFHD8bHKD6kv8FPGfJTotc+2xjJw + oYi+1hqp1fIekaxsyQIDAQAB + -----END PUBLIC KEY----- + + -----BEGIN RSA PRIVATE KEY----- + MIICXgIBAAKBgQDCFENGw33yGihy92pDjZQhl0C36rPJj+CvfSC8+q28hxA161QF + NUd13wuCTUcq0Qd2qsBe/2hFyc2DCJJg0h1L78+6Z4UMR7EOcpfdUE9Hf3m/hs+F + UR45uBJeDK1HSFHD8bHKD6kv8FPGfJTotc+2xjJwoYi+1hqp1fIekaxsyQIDAQAB + AoGBAJR8ZkCUvx5kzv+utdl7T5MnordT1TvoXXJGXK7ZZ+UuvMNUCdN2QPc4sBiA + QWvLw1cSKt5DsKZ8UETpYPy8pPYnnDEz2dDYiaew9+xEpubyeW2oH4Zx71wqBtOK + kqwrXa/pzdpiucRRjk6vE6YY7EBBs/g7uanVpGibOVAEsqH1AkEA7DkjVH28WDUg + f1nqvfn2Kj6CT7nIcE3jGJsZZ7zlZmBmHFDONMLUrXR/Zm3pR5m0tCmBqa5RK95u + 412jt1dPIwJBANJT3v8pnkth48bQo/fKel6uEYyboRtA5/uHuHkZ6FQF7OUkGogc + mSJluOdc5t6hI1VsLn0QZEjQZMEOWr+wKSMCQQCC4kXJEsHAve77oP6HtG/IiEn7 + kpyUXRNvFsDE0czpJJBvL/aRFUJxuRK91jhjC68sA7NsKMGg5OXb5I5Jj36xAkEA + gIT7aFOYBFwGgQAQkWNKLvySgKbAZRTeLBacpHMuQdl1DfdntvAyqpAZ0lY0RKmW + G6aFKaqQfOXKCyWoUiVknQJAXrlgySFci/2ueKlIE1QqIiLSZ8V8OlpFLRnb1pzI + 7U1yQXnTAEFYM560yJlzUpOb1V4cScGd365tiSMvxLOvTA== + -----END RSA PRIVATE KEY----- + +And all examples use this request: + + + + POST /foo?param=value&pet=dog HTTP/1.1 + Host: example.com + Date: Thu, 05 Jan 2014 21:31:40 GMT + Content-Type: application/json + Digest: SHA-256=X48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE= + Content-Length: 18 + + {"hello": "world"} + + + +### Default + +The string to sign would be: + + + + + date: Thu, 05 Jan 2014 21:31:40 GMT + + + +The Authorization header would be: + + + + Authorization: Signature keyId="Test",algorithm="rsa-sha256",headers="date",signature="jKyvPcxB4JbmYY4mByyBY7cZfNl4OW9HpFQlG7N4YcJPteKTu4MWCLyk+gIr0wDgqtLWf9NLpMAMimdfsH7FSWGfbMFSrsVTHNTk0rK3usrfFnti1dxsM4jl0kYJCKTGI/UWkqiaxwNiKqGcdlEDrTcUhhsFsOIo8VhddmZTZ8w=" + + + +### All Headers + +Parameterized to include all headers, the string to sign would be (`+ "\n"` +inserted for readability): + + + + + (request-target): post /foo?param=value&pet=dog + host: example.com + date: Thu, 05 Jan 2014 21:31:40 GMT + content-type: application/json + digest: SHA-256=X48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE= + content-length: 18 + + + +The Authorization header would be: + + + + Authorization: Signature keyId="Test",algorithm="rsa-sha256",headers="(request-target) host date content-type digest content-length",signature="Ef7MlxLXoBovhil3AlyjtBwAL9g4TN3tibLj7uuNB3CROat/9KaeQ4hW2NiJ+pZ6HQEOx9vYZAyi+7cmIkmJszJCut5kQLAwuX+Ms/mUFvpKlSo9StS2bMXDBNjOh4Auj774GFj4gwjS+3NhFeoqyr/MuN6HsEnkvn6zdgfE2i0=" + + + +## Generating and verifying signatures using `openssl` + +The `openssl` commandline tool can be used to generate or verify the signatures listed above. + +Compose the signing string as usual, and pipe it into the the `openssl dgst` command, then into `openssl enc -base64`, as follows: + + $ printf 'date: Thu, 05 Jan 2014 21:31:40 GMT' | \ + openssl dgst -binary -sign /path/to/private.pem -sha256 | \ + openssl enc -base64 + jKyvPcxB4JbmYY4mByyBY7cZfNl4OW9Hp... + $ + +The `-sha256` option is necessary to produce an `rsa-sha256` signature. You can select other hash algorithms such as `sha1` by changing this argument. + +To verify a signature, first save the signature data, Base64-decoded, into a file, then use `openssl dgst` again with the `-verify` option: + + $ echo 'jKyvPcxB4JbmYY4mByy...' | openssl enc -A -d -base64 > signature + $ printf 'date: Thu, 05 Jan 2014 21:31:40 GMT' | \ + openssl dgst -sha256 -verify /path/to/public.pem -signature ./signature + Verified OK + $ + +## Generating and verifying signatures using `sshpk-sign` + +You can also generate and check signatures using the `sshpk-sign` tool which is +included with the `sshpk` package in `npm`. + +Compose the signing string as above, and pipe it into `sshpk-sign` as follows: + + $ printf 'date: Thu, 05 Jan 2014 21:31:40 GMT' | \ + sshpk-sign -i /path/to/private.pem + jKyvPcxB4JbmYY4mByyBY7cZfNl4OW9Hp... + $ + +This will produce an `rsa-sha256` signature by default, as you can see using +the `-v` option: + + sshpk-sign: using rsa-sha256 with a 1024 bit key + +You can also use `sshpk-verify` in a similar manner: + + $ printf 'date: Thu, 05 Jan 2014 21:31:40 GMT' | \ + sshpk-verify -i ./public.pem -s 'jKyvPcxB4JbmYY...' + OK + $ diff --git a/node_modules/http-signature/lib/index.js b/node_modules/http-signature/lib/index.js new file mode 100644 index 0000000..54d4603 --- /dev/null +++ b/node_modules/http-signature/lib/index.js @@ -0,0 +1,29 @@ +// Copyright 2015 Joyent, Inc. + +var parser = require('./parser'); +var signer = require('./signer'); +var verify = require('./verify'); +var utils = require('./utils'); + + + +///--- API + +module.exports = { + + parse: parser.parseRequest, + parseRequest: parser.parseRequest, + + sign: signer.signRequest, + signRequest: signer.signRequest, + createSigner: signer.createSigner, + isSigner: signer.isSigner, + + sshKeyToPEM: utils.sshKeyToPEM, + sshKeyFingerprint: utils.fingerprint, + pemToRsaSSHKey: utils.pemToRsaSSHKey, + + verify: verify.verifySignature, + verifySignature: verify.verifySignature, + verifyHMAC: verify.verifyHMAC +}; diff --git a/node_modules/http-signature/lib/parser.js b/node_modules/http-signature/lib/parser.js new file mode 100644 index 0000000..7c841b3 --- /dev/null +++ b/node_modules/http-signature/lib/parser.js @@ -0,0 +1,318 @@ +// Copyright 2012 Joyent, Inc. All rights reserved. + +var assert = require('assert-plus'); +var util = require('util'); +var utils = require('./utils'); + + + +///--- Globals + +var HASH_ALGOS = utils.HASH_ALGOS; +var PK_ALGOS = utils.PK_ALGOS; +var HttpSignatureError = utils.HttpSignatureError; +var InvalidAlgorithmError = utils.InvalidAlgorithmError; +var validateAlgorithm = utils.validateAlgorithm; + +var State = { + New: 0, + Params: 1 +}; + +var ParamsState = { + Name: 0, + Quote: 1, + Value: 2, + Comma: 3 +}; + + +///--- Specific Errors + + +function ExpiredRequestError(message) { + HttpSignatureError.call(this, message, ExpiredRequestError); +} +util.inherits(ExpiredRequestError, HttpSignatureError); + + +function InvalidHeaderError(message) { + HttpSignatureError.call(this, message, InvalidHeaderError); +} +util.inherits(InvalidHeaderError, HttpSignatureError); + + +function InvalidParamsError(message) { + HttpSignatureError.call(this, message, InvalidParamsError); +} +util.inherits(InvalidParamsError, HttpSignatureError); + + +function MissingHeaderError(message) { + HttpSignatureError.call(this, message, MissingHeaderError); +} +util.inherits(MissingHeaderError, HttpSignatureError); + +function StrictParsingError(message) { + HttpSignatureError.call(this, message, StrictParsingError); +} +util.inherits(StrictParsingError, HttpSignatureError); + +///--- Exported API + +module.exports = { + + /** + * Parses the 'Authorization' header out of an http.ServerRequest object. + * + * Note that this API will fully validate the Authorization header, and throw + * on any error. It will not however check the signature, or the keyId format + * as those are specific to your environment. You can use the options object + * to pass in extra constraints. + * + * As a response object you can expect this: + * + * { + * "scheme": "Signature", + * "params": { + * "keyId": "foo", + * "algorithm": "rsa-sha256", + * "headers": [ + * "date" or "x-date", + * "digest" + * ], + * "signature": "base64" + * }, + * "signingString": "ready to be passed to crypto.verify()" + * } + * + * @param {Object} request an http.ServerRequest. + * @param {Object} options an optional options object with: + * - clockSkew: allowed clock skew in seconds (default 300). + * - headers: required header names (def: date or x-date) + * - algorithms: algorithms to support (default: all). + * - strict: should enforce latest spec parsing + * (default: false). + * @return {Object} parsed out object (see above). + * @throws {TypeError} on invalid input. + * @throws {InvalidHeaderError} on an invalid Authorization header error. + * @throws {InvalidParamsError} if the params in the scheme are invalid. + * @throws {MissingHeaderError} if the params indicate a header not present, + * either in the request headers from the params, + * or not in the params from a required header + * in options. + * @throws {StrictParsingError} if old attributes are used in strict parsing + * mode. + * @throws {ExpiredRequestError} if the value of date or x-date exceeds skew. + */ + parseRequest: function parseRequest(request, options) { + assert.object(request, 'request'); + assert.object(request.headers, 'request.headers'); + if (options === undefined) { + options = {}; + } + if (options.headers === undefined) { + options.headers = [request.headers['x-date'] ? 'x-date' : 'date']; + } + assert.object(options, 'options'); + assert.arrayOfString(options.headers, 'options.headers'); + assert.optionalNumber(options.clockSkew, 'options.clockSkew'); + + if (!request.headers.authorization) + throw new MissingHeaderError('no authorization header present in ' + + 'the request'); + + options.clockSkew = options.clockSkew || 300; + + + var i = 0; + var state = State.New; + var substate = ParamsState.Name; + var tmpName = ''; + var tmpValue = ''; + + var parsed = { + scheme: '', + params: {}, + signingString: '', + + get algorithm() { + return this.params.algorithm.toUpperCase(); + }, + + get keyId() { + return this.params.keyId; + } + }; + + var authz = request.headers.authorization; + for (i = 0; i < authz.length; i++) { + var c = authz.charAt(i); + + switch (Number(state)) { + + case State.New: + if (c !== ' ') parsed.scheme += c; + else state = State.Params; + break; + + case State.Params: + switch (Number(substate)) { + + case ParamsState.Name: + var code = c.charCodeAt(0); + // restricted name of A-Z / a-z + if ((code >= 0x41 && code <= 0x5a) || // A-Z + (code >= 0x61 && code <= 0x7a)) { // a-z + tmpName += c; + } else if (c === '=') { + if (tmpName.length === 0) + throw new InvalidHeaderError('bad param format'); + substate = ParamsState.Quote; + } else { + throw new InvalidHeaderError('bad param format'); + } + break; + + case ParamsState.Quote: + if (c === '"') { + tmpValue = ''; + substate = ParamsState.Value; + } else { + throw new InvalidHeaderError('bad param format'); + } + break; + + case ParamsState.Value: + if (c === '"') { + parsed.params[tmpName] = tmpValue; + substate = ParamsState.Comma; + } else { + tmpValue += c; + } + break; + + case ParamsState.Comma: + if (c === ',') { + tmpName = ''; + substate = ParamsState.Name; + } else { + throw new InvalidHeaderError('bad param format'); + } + break; + + default: + throw new Error('Invalid substate'); + } + break; + + default: + throw new Error('Invalid substate'); + } + + } + + if (!parsed.params.headers || parsed.params.headers === '') { + if (request.headers['x-date']) { + parsed.params.headers = ['x-date']; + } else { + parsed.params.headers = ['date']; + } + } else { + parsed.params.headers = parsed.params.headers.split(' '); + } + + // Minimally validate the parsed object + if (!parsed.scheme || parsed.scheme !== 'Signature') + throw new InvalidHeaderError('scheme was not "Signature"'); + + if (!parsed.params.keyId) + throw new InvalidHeaderError('keyId was not specified'); + + if (!parsed.params.algorithm) + throw new InvalidHeaderError('algorithm was not specified'); + + if (!parsed.params.signature) + throw new InvalidHeaderError('signature was not specified'); + + // Check the algorithm against the official list + parsed.params.algorithm = parsed.params.algorithm.toLowerCase(); + try { + validateAlgorithm(parsed.params.algorithm); + } catch (e) { + if (e instanceof InvalidAlgorithmError) + throw (new InvalidParamsError(parsed.params.algorithm + ' is not ' + + 'supported')); + else + throw (e); + } + + // Build the signingString + for (i = 0; i < parsed.params.headers.length; i++) { + var h = parsed.params.headers[i].toLowerCase(); + parsed.params.headers[i] = h; + + if (h === 'request-line') { + if (!options.strict) { + /* + * We allow headers from the older spec drafts if strict parsing isn't + * specified in options. + */ + parsed.signingString += + request.method + ' ' + request.url + ' HTTP/' + request.httpVersion; + } else { + /* Strict parsing doesn't allow older draft headers. */ + throw (new StrictParsingError('request-line is not a valid header ' + + 'with strict parsing enabled.')); + } + } else if (h === '(request-target)') { + parsed.signingString += + '(request-target): ' + request.method.toLowerCase() + ' ' + + request.url; + } else { + var value = request.headers[h]; + if (value === undefined) + throw new MissingHeaderError(h + ' was not in the request'); + parsed.signingString += h + ': ' + value; + } + + if ((i + 1) < parsed.params.headers.length) + parsed.signingString += '\n'; + } + + // Check against the constraints + var date; + if (request.headers.date || request.headers['x-date']) { + if (request.headers['x-date']) { + date = new Date(request.headers['x-date']); + } else { + date = new Date(request.headers.date); + } + var now = new Date(); + var skew = Math.abs(now.getTime() - date.getTime()); + + if (skew > options.clockSkew * 1000) { + throw new ExpiredRequestError('clock skew of ' + + (skew / 1000) + + 's was greater than ' + + options.clockSkew + 's'); + } + } + + options.headers.forEach(function (hdr) { + // Remember that we already checked any headers in the params + // were in the request, so if this passes we're good. + if (parsed.params.headers.indexOf(hdr) < 0) + throw new MissingHeaderError(hdr + ' was not a signed header'); + }); + + if (options.algorithms) { + if (options.algorithms.indexOf(parsed.params.algorithm) === -1) + throw new InvalidParamsError(parsed.params.algorithm + + ' is not a supported algorithm'); + } + + return parsed; + } + +}; diff --git a/node_modules/http-signature/lib/signer.js b/node_modules/http-signature/lib/signer.js new file mode 100644 index 0000000..ef9946f --- /dev/null +++ b/node_modules/http-signature/lib/signer.js @@ -0,0 +1,399 @@ +// Copyright 2012 Joyent, Inc. All rights reserved. + +var assert = require('assert-plus'); +var crypto = require('crypto'); +var http = require('http'); +var util = require('util'); +var sshpk = require('sshpk'); +var jsprim = require('jsprim'); +var utils = require('./utils'); + +var sprintf = require('util').format; + +var HASH_ALGOS = utils.HASH_ALGOS; +var PK_ALGOS = utils.PK_ALGOS; +var InvalidAlgorithmError = utils.InvalidAlgorithmError; +var HttpSignatureError = utils.HttpSignatureError; +var validateAlgorithm = utils.validateAlgorithm; + +///--- Globals + +var AUTHZ_FMT = + 'Signature keyId="%s",algorithm="%s",headers="%s",signature="%s"'; + +///--- Specific Errors + +function MissingHeaderError(message) { + HttpSignatureError.call(this, message, MissingHeaderError); +} +util.inherits(MissingHeaderError, HttpSignatureError); + +function StrictParsingError(message) { + HttpSignatureError.call(this, message, StrictParsingError); +} +util.inherits(StrictParsingError, HttpSignatureError); + +/* See createSigner() */ +function RequestSigner(options) { + assert.object(options, 'options'); + + var alg = []; + if (options.algorithm !== undefined) { + assert.string(options.algorithm, 'options.algorithm'); + alg = validateAlgorithm(options.algorithm); + } + this.rs_alg = alg; + + /* + * RequestSigners come in two varieties: ones with an rs_signFunc, and ones + * with an rs_signer. + * + * rs_signFunc-based RequestSigners have to build up their entire signing + * string within the rs_lines array and give it to rs_signFunc as a single + * concat'd blob. rs_signer-based RequestSigners can add a line at a time to + * their signing state by using rs_signer.update(), thus only needing to + * buffer the hash function state and one line at a time. + */ + if (options.sign !== undefined) { + assert.func(options.sign, 'options.sign'); + this.rs_signFunc = options.sign; + + } else if (alg[0] === 'hmac' && options.key !== undefined) { + assert.string(options.keyId, 'options.keyId'); + this.rs_keyId = options.keyId; + + if (typeof (options.key) !== 'string' && !Buffer.isBuffer(options.key)) + throw (new TypeError('options.key for HMAC must be a string or Buffer')); + + /* + * Make an rs_signer for HMACs, not a rs_signFunc -- HMACs digest their + * data in chunks rather than requiring it all to be given in one go + * at the end, so they are more similar to signers than signFuncs. + */ + this.rs_signer = crypto.createHmac(alg[1].toUpperCase(), options.key); + this.rs_signer.sign = function () { + var digest = this.digest('base64'); + return ({ + hashAlgorithm: alg[1], + toString: function () { return (digest); } + }); + }; + + } else if (options.key !== undefined) { + var key = options.key; + if (typeof (key) === 'string' || Buffer.isBuffer(key)) + key = sshpk.parsePrivateKey(key); + + assert.ok(sshpk.PrivateKey.isPrivateKey(key, [1, 2]), + 'options.key must be a sshpk.PrivateKey'); + this.rs_key = key; + + assert.string(options.keyId, 'options.keyId'); + this.rs_keyId = options.keyId; + + if (!PK_ALGOS[key.type]) { + throw (new InvalidAlgorithmError(key.type.toUpperCase() + ' type ' + + 'keys are not supported')); + } + + if (alg[0] !== undefined && key.type !== alg[0]) { + throw (new InvalidAlgorithmError('options.key must be a ' + + alg[0].toUpperCase() + ' key, was given a ' + + key.type.toUpperCase() + ' key instead')); + } + + this.rs_signer = key.createSign(alg[1]); + + } else { + throw (new TypeError('options.sign (func) or options.key is required')); + } + + this.rs_headers = []; + this.rs_lines = []; +} + +/** + * Adds a header to be signed, with its value, into this signer. + * + * @param {String} header + * @param {String} value + * @return {String} value written + */ +RequestSigner.prototype.writeHeader = function (header, value) { + assert.string(header, 'header'); + header = header.toLowerCase(); + assert.string(value, 'value'); + + this.rs_headers.push(header); + + if (this.rs_signFunc) { + this.rs_lines.push(header + ': ' + value); + + } else { + var line = header + ': ' + value; + if (this.rs_headers.length > 0) + line = '\n' + line; + this.rs_signer.update(line); + } + + return (value); +}; + +/** + * Adds a default Date header, returning its value. + * + * @return {String} + */ +RequestSigner.prototype.writeDateHeader = function () { + return (this.writeHeader('date', jsprim.rfc1123(new Date()))); +}; + +/** + * Adds the request target line to be signed. + * + * @param {String} method, HTTP method (e.g. 'get', 'post', 'put') + * @param {String} path + */ +RequestSigner.prototype.writeTarget = function (method, path) { + assert.string(method, 'method'); + assert.string(path, 'path'); + method = method.toLowerCase(); + this.writeHeader('(request-target)', method + ' ' + path); +}; + +/** + * Calculate the value for the Authorization header on this request + * asynchronously. + * + * @param {Func} callback (err, authz) + */ +RequestSigner.prototype.sign = function (cb) { + assert.func(cb, 'callback'); + + if (this.rs_headers.length < 1) + throw (new Error('At least one header must be signed')); + + var alg, authz; + if (this.rs_signFunc) { + var data = this.rs_lines.join('\n'); + var self = this; + this.rs_signFunc(data, function (err, sig) { + if (err) { + cb(err); + return; + } + try { + assert.object(sig, 'signature'); + assert.string(sig.keyId, 'signature.keyId'); + assert.string(sig.algorithm, 'signature.algorithm'); + assert.string(sig.signature, 'signature.signature'); + alg = validateAlgorithm(sig.algorithm); + + authz = sprintf(AUTHZ_FMT, + sig.keyId, + sig.algorithm, + self.rs_headers.join(' '), + sig.signature); + } catch (e) { + cb(e); + return; + } + cb(null, authz); + }); + + } else { + try { + var sigObj = this.rs_signer.sign(); + } catch (e) { + cb(e); + return; + } + alg = (this.rs_alg[0] || this.rs_key.type) + '-' + sigObj.hashAlgorithm; + var signature = sigObj.toString(); + authz = sprintf(AUTHZ_FMT, + this.rs_keyId, + alg, + this.rs_headers.join(' '), + signature); + cb(null, authz); + } +}; + +///--- Exported API + +module.exports = { + /** + * Identifies whether a given object is a request signer or not. + * + * @param {Object} object, the object to identify + * @returns {Boolean} + */ + isSigner: function (obj) { + if (typeof (obj) === 'object' && obj instanceof RequestSigner) + return (true); + return (false); + }, + + /** + * Creates a request signer, used to asynchronously build a signature + * for a request (does not have to be an http.ClientRequest). + * + * @param {Object} options, either: + * - {String} keyId + * - {String|Buffer} key + * - {String} algorithm (optional, required for HMAC) + * or: + * - {Func} sign (data, cb) + * @return {RequestSigner} + */ + createSigner: function createSigner(options) { + return (new RequestSigner(options)); + }, + + /** + * Adds an 'Authorization' header to an http.ClientRequest object. + * + * Note that this API will add a Date header if it's not already set. Any + * other headers in the options.headers array MUST be present, or this + * will throw. + * + * You shouldn't need to check the return type; it's just there if you want + * to be pedantic. + * + * The optional flag indicates whether parsing should use strict enforcement + * of the version draft-cavage-http-signatures-04 of the spec or beyond. + * The default is to be loose and support + * older versions for compatibility. + * + * @param {Object} request an instance of http.ClientRequest. + * @param {Object} options signing parameters object: + * - {String} keyId required. + * - {String} key required (either a PEM or HMAC key). + * - {Array} headers optional; defaults to ['date']. + * - {String} algorithm optional (unless key is HMAC); + * default is the same as the sshpk default + * signing algorithm for the type of key given + * - {String} httpVersion optional; defaults to '1.1'. + * - {Boolean} strict optional; defaults to 'false'. + * @return {Boolean} true if Authorization (and optionally Date) were added. + * @throws {TypeError} on bad parameter types (input). + * @throws {InvalidAlgorithmError} if algorithm was bad or incompatible with + * the given key. + * @throws {sshpk.KeyParseError} if key was bad. + * @throws {MissingHeaderError} if a header to be signed was specified but + * was not present. + */ + signRequest: function signRequest(request, options) { + assert.object(request, 'request'); + assert.object(options, 'options'); + assert.optionalString(options.algorithm, 'options.algorithm'); + assert.string(options.keyId, 'options.keyId'); + assert.optionalArrayOfString(options.headers, 'options.headers'); + assert.optionalString(options.httpVersion, 'options.httpVersion'); + + if (!request.getHeader('Date')) + request.setHeader('Date', jsprim.rfc1123(new Date())); + if (!options.headers) + options.headers = ['date']; + if (!options.httpVersion) + options.httpVersion = '1.1'; + + var alg = []; + if (options.algorithm) { + options.algorithm = options.algorithm.toLowerCase(); + alg = validateAlgorithm(options.algorithm); + } + + var i; + var stringToSign = ''; + for (i = 0; i < options.headers.length; i++) { + if (typeof (options.headers[i]) !== 'string') + throw new TypeError('options.headers must be an array of Strings'); + + var h = options.headers[i].toLowerCase(); + + if (h === 'request-line') { + if (!options.strict) { + /** + * We allow headers from the older spec drafts if strict parsing isn't + * specified in options. + */ + stringToSign += + request.method + ' ' + request.path + ' HTTP/' + + options.httpVersion; + } else { + /* Strict parsing doesn't allow older draft headers. */ + throw (new StrictParsingError('request-line is not a valid header ' + + 'with strict parsing enabled.')); + } + } else if (h === '(request-target)') { + stringToSign += + '(request-target): ' + request.method.toLowerCase() + ' ' + + request.path; + } else { + var value = request.getHeader(h); + if (value === undefined || value === '') { + throw new MissingHeaderError(h + ' was not in the request'); + } + stringToSign += h + ': ' + value; + } + + if ((i + 1) < options.headers.length) + stringToSign += '\n'; + } + + /* This is just for unit tests. */ + if (request.hasOwnProperty('_stringToSign')) { + request._stringToSign = stringToSign; + } + + var signature; + if (alg[0] === 'hmac') { + if (typeof (options.key) !== 'string' && !Buffer.isBuffer(options.key)) + throw (new TypeError('options.key must be a string or Buffer')); + + var hmac = crypto.createHmac(alg[1].toUpperCase(), options.key); + hmac.update(stringToSign); + signature = hmac.digest('base64'); + + } else { + var key = options.key; + if (typeof (key) === 'string' || Buffer.isBuffer(key)) + key = sshpk.parsePrivateKey(options.key); + + assert.ok(sshpk.PrivateKey.isPrivateKey(key, [1, 2]), + 'options.key must be a sshpk.PrivateKey'); + + if (!PK_ALGOS[key.type]) { + throw (new InvalidAlgorithmError(key.type.toUpperCase() + ' type ' + + 'keys are not supported')); + } + + if (alg[0] !== undefined && key.type !== alg[0]) { + throw (new InvalidAlgorithmError('options.key must be a ' + + alg[0].toUpperCase() + ' key, was given a ' + + key.type.toUpperCase() + ' key instead')); + } + + var signer = key.createSign(alg[1]); + signer.update(stringToSign); + var sigObj = signer.sign(); + if (!HASH_ALGOS[sigObj.hashAlgorithm]) { + throw (new InvalidAlgorithmError(sigObj.hashAlgorithm.toUpperCase() + + ' is not a supported hash algorithm')); + } + options.algorithm = key.type + '-' + sigObj.hashAlgorithm; + signature = sigObj.toString(); + assert.notStrictEqual(signature, '', 'empty signature produced'); + } + + request.setHeader('Authorization', sprintf(AUTHZ_FMT, + options.keyId, + options.algorithm, + options.headers.join(' '), + signature)); + + return true; + } + +}; diff --git a/node_modules/http-signature/lib/utils.js b/node_modules/http-signature/lib/utils.js new file mode 100644 index 0000000..bbf2aa8 --- /dev/null +++ b/node_modules/http-signature/lib/utils.js @@ -0,0 +1,112 @@ +// Copyright 2012 Joyent, Inc. All rights reserved. + +var assert = require('assert-plus'); +var sshpk = require('sshpk'); +var util = require('util'); + +var HASH_ALGOS = { + 'sha1': true, + 'sha256': true, + 'sha512': true +}; + +var PK_ALGOS = { + 'rsa': true, + 'dsa': true, + 'ecdsa': true +}; + +function HttpSignatureError(message, caller) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, caller || HttpSignatureError); + + this.message = message; + this.name = caller.name; +} +util.inherits(HttpSignatureError, Error); + +function InvalidAlgorithmError(message) { + HttpSignatureError.call(this, message, InvalidAlgorithmError); +} +util.inherits(InvalidAlgorithmError, HttpSignatureError); + +function validateAlgorithm(algorithm) { + var alg = algorithm.toLowerCase().split('-'); + + if (alg.length !== 2) { + throw (new InvalidAlgorithmError(alg[0].toUpperCase() + ' is not a ' + + 'valid algorithm')); + } + + if (alg[0] !== 'hmac' && !PK_ALGOS[alg[0]]) { + throw (new InvalidAlgorithmError(alg[0].toUpperCase() + ' type keys ' + + 'are not supported')); + } + + if (!HASH_ALGOS[alg[1]]) { + throw (new InvalidAlgorithmError(alg[1].toUpperCase() + ' is not a ' + + 'supported hash algorithm')); + } + + return (alg); +} + +///--- API + +module.exports = { + + HASH_ALGOS: HASH_ALGOS, + PK_ALGOS: PK_ALGOS, + + HttpSignatureError: HttpSignatureError, + InvalidAlgorithmError: InvalidAlgorithmError, + + validateAlgorithm: validateAlgorithm, + + /** + * Converts an OpenSSH public key (rsa only) to a PKCS#8 PEM file. + * + * The intent of this module is to interoperate with OpenSSL only, + * specifically the node crypto module's `verify` method. + * + * @param {String} key an OpenSSH public key. + * @return {String} PEM encoded form of the RSA public key. + * @throws {TypeError} on bad input. + * @throws {Error} on invalid ssh key formatted data. + */ + sshKeyToPEM: function sshKeyToPEM(key) { + assert.string(key, 'ssh_key'); + + var k = sshpk.parseKey(key, 'ssh'); + return (k.toString('pem')); + }, + + + /** + * Generates an OpenSSH fingerprint from an ssh public key. + * + * @param {String} key an OpenSSH public key. + * @return {String} key fingerprint. + * @throws {TypeError} on bad input. + * @throws {Error} if what you passed doesn't look like an ssh public key. + */ + fingerprint: function fingerprint(key) { + assert.string(key, 'ssh_key'); + + var k = sshpk.parseKey(key, 'ssh'); + return (k.fingerprint('md5').toString('hex')); + }, + + /** + * Converts a PKGCS#8 PEM file to an OpenSSH public key (rsa) + * + * The reverse of the above function. + */ + pemToRsaSSHKey: function pemToRsaSSHKey(pem, comment) { + assert.equal('string', typeof (pem), 'typeof pem'); + + var k = sshpk.parseKey(pem, 'pem'); + k.comment = comment; + return (k.toString('ssh')); + } +}; diff --git a/node_modules/http-signature/lib/verify.js b/node_modules/http-signature/lib/verify.js new file mode 100644 index 0000000..b053fd6 --- /dev/null +++ b/node_modules/http-signature/lib/verify.js @@ -0,0 +1,88 @@ +// Copyright 2015 Joyent, Inc. + +var assert = require('assert-plus'); +var crypto = require('crypto'); +var sshpk = require('sshpk'); +var utils = require('./utils'); + +var HASH_ALGOS = utils.HASH_ALGOS; +var PK_ALGOS = utils.PK_ALGOS; +var InvalidAlgorithmError = utils.InvalidAlgorithmError; +var HttpSignatureError = utils.HttpSignatureError; +var validateAlgorithm = utils.validateAlgorithm; + +///--- Exported API + +module.exports = { + /** + * Verify RSA/DSA signature against public key. You are expected to pass in + * an object that was returned from `parse()`. + * + * @param {Object} parsedSignature the object you got from `parse`. + * @param {String} pubkey RSA/DSA private key PEM. + * @return {Boolean} true if valid, false otherwise. + * @throws {TypeError} if you pass in bad arguments. + * @throws {InvalidAlgorithmError} + */ + verifySignature: function verifySignature(parsedSignature, pubkey) { + assert.object(parsedSignature, 'parsedSignature'); + if (typeof (pubkey) === 'string' || Buffer.isBuffer(pubkey)) + pubkey = sshpk.parseKey(pubkey); + assert.ok(sshpk.Key.isKey(pubkey, [1, 1]), 'pubkey must be a sshpk.Key'); + + var alg = validateAlgorithm(parsedSignature.algorithm); + if (alg[0] === 'hmac' || alg[0] !== pubkey.type) + return (false); + + var v = pubkey.createVerify(alg[1]); + v.update(parsedSignature.signingString); + return (v.verify(parsedSignature.params.signature, 'base64')); + }, + + /** + * Verify HMAC against shared secret. You are expected to pass in an object + * that was returned from `parse()`. + * + * @param {Object} parsedSignature the object you got from `parse`. + * @param {String} secret HMAC shared secret. + * @return {Boolean} true if valid, false otherwise. + * @throws {TypeError} if you pass in bad arguments. + * @throws {InvalidAlgorithmError} + */ + verifyHMAC: function verifyHMAC(parsedSignature, secret) { + assert.object(parsedSignature, 'parsedHMAC'); + assert.string(secret, 'secret'); + + var alg = validateAlgorithm(parsedSignature.algorithm); + if (alg[0] !== 'hmac') + return (false); + + var hashAlg = alg[1].toUpperCase(); + + var hmac = crypto.createHmac(hashAlg, secret); + hmac.update(parsedSignature.signingString); + + /* + * Now double-hash to avoid leaking timing information - there's + * no easy constant-time compare in JS, so we use this approach + * instead. See for more info: + * https://www.isecpartners.com/blog/2011/february/double-hmac- + * verification.aspx + */ + var h1 = crypto.createHmac(hashAlg, secret); + h1.update(hmac.digest()); + h1 = h1.digest(); + var h2 = crypto.createHmac(hashAlg, secret); + h2.update(new Buffer(parsedSignature.params.signature, 'base64')); + h2 = h2.digest(); + + /* Node 0.8 returns strings from .digest(). */ + if (typeof (h1) === 'string') + return (h1 === h2); + /* And node 0.10 lacks the .equals() method on Buffers. */ + if (Buffer.isBuffer(h1) && !h1.equals) + return (h1.toString('binary') === h2.toString('binary')); + + return (h1.equals(h2)); + } +}; diff --git a/node_modules/http-signature/node_modules/.bin/sshpk-conv b/node_modules/http-signature/node_modules/.bin/sshpk-conv new file mode 120000 index 0000000..90cd5a1 --- /dev/null +++ b/node_modules/http-signature/node_modules/.bin/sshpk-conv @@ -0,0 +1 @@ +../../../sshpk/bin/sshpk-conv \ No newline at end of file diff --git a/node_modules/http-signature/node_modules/.bin/sshpk-sign b/node_modules/http-signature/node_modules/.bin/sshpk-sign new file mode 120000 index 0000000..823f106 --- /dev/null +++ b/node_modules/http-signature/node_modules/.bin/sshpk-sign @@ -0,0 +1 @@ +../../../sshpk/bin/sshpk-sign \ No newline at end of file diff --git a/node_modules/http-signature/node_modules/.bin/sshpk-verify b/node_modules/http-signature/node_modules/.bin/sshpk-verify new file mode 120000 index 0000000..9993bf3 --- /dev/null +++ b/node_modules/http-signature/node_modules/.bin/sshpk-verify @@ -0,0 +1 @@ +../../../sshpk/bin/sshpk-verify \ No newline at end of file diff --git a/node_modules/http-signature/package.json b/node_modules/http-signature/package.json new file mode 100644 index 0000000..5f10cea --- /dev/null +++ b/node_modules/http-signature/package.json @@ -0,0 +1,39 @@ +{ + "name": "http-signature", + "description": "Reference implementation of Joyent's HTTP Signature scheme.", + "version": "1.1.1", + "license": "MIT", + "author": "Joyent, Inc", + "contributors": [ + "Mark Cavage ", + "David I. Lehn ", + "Patrick Mooney " + ], + "repository": { + "type": "git", + "url": "git://github.com/joyent/node-http-signature.git" + }, + "homepage": "https://github.com/joyent/node-http-signature/", + "bugs": "https://github.com/joyent/node-http-signature/issues", + "keywords": [ + "https", + "request" + ], + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + }, + "main": "lib/index.js", + "scripts": { + "test": "tap test/*.js" + }, + "dependencies": { + "assert-plus": "^0.2.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "devDependencies": { + "node-uuid": "^1.4.1", + "tap": "0.4.2" + } +} diff --git a/node_modules/https-browserify/LICENSE b/node_modules/https-browserify/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/https-browserify/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/https-browserify/index.js b/node_modules/https-browserify/index.js new file mode 100644 index 0000000..1de879a --- /dev/null +++ b/node_modules/https-browserify/index.js @@ -0,0 +1,13 @@ +var http = require('http'); + +var https = module.exports; + +for (var key in http) { + if (http.hasOwnProperty(key)) https[key] = http[key]; +}; + +https.request = function (params, cb) { + if (!params) params = {}; + params.scheme = 'https'; + return http.request.call(this, params, cb); +} diff --git a/node_modules/https-browserify/package.json b/node_modules/https-browserify/package.json new file mode 100644 index 0000000..d3dd090 --- /dev/null +++ b/node_modules/https-browserify/package.json @@ -0,0 +1,22 @@ +{ + "name": "https-browserify", + "version": "0.0.0", + "description": "https module compatability for browserify", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/substack/https-browserify.git" + }, + "homepage": "https://github.com/substack/https-browserify", + "keywords": [ + "https", + "browser", + "browserify" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/https-browserify/readme.markdown b/node_modules/https-browserify/readme.markdown new file mode 100644 index 0000000..8638614 --- /dev/null +++ b/node_modules/https-browserify/readme.markdown @@ -0,0 +1,22 @@ +# https-browserify + +https module compatability for browserify + +# example + +``` js +var https = require('https-browserify'); +var r = https.request('https://github.com'); +r.on('request', function (res) { + console.log(res); +}); +``` + +# methods + +The API is the same as the client portion of the +[node core https module](http://nodejs.org/docs/latest/api/https.html). + +# license + +MIT diff --git a/node_modules/iconv-lite/.npmignore b/node_modules/iconv-lite/.npmignore new file mode 100644 index 0000000..5cd2673 --- /dev/null +++ b/node_modules/iconv-lite/.npmignore @@ -0,0 +1,6 @@ +*~ +*sublime-* +generation +test +wiki +coverage diff --git a/node_modules/iconv-lite/.travis.yml b/node_modules/iconv-lite/.travis.yml new file mode 100644 index 0000000..f5343f1 --- /dev/null +++ b/node_modules/iconv-lite/.travis.yml @@ -0,0 +1,20 @@ + sudo: false + env: + - CXX=g++-4.8 + language: node_js + node_js: + - "0.8" + - "0.10" + - "0.11" + - "0.12" + - "iojs" + - "4.0" + addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-4.8 + - g++-4.8 + before_install: + - "test $TRAVIS_NODE_VERSION != '0.8' || npm install -g npm@1.2.8000" diff --git a/node_modules/iconv-lite/Changelog.md b/node_modules/iconv-lite/Changelog.md new file mode 100644 index 0000000..421b1e2 --- /dev/null +++ b/node_modules/iconv-lite/Changelog.md @@ -0,0 +1,93 @@ + +# 0.4.13 / 2015-10-01 + + * Fix silly mistake in deprecation notice. + + +# 0.4.12 / 2015-09-26 + + * Node v4 support: + * Added CESU-8 decoding (#106) + * Added deprecation notice for `extendNodeEncodings` + * Added Travis tests for Node v4 and io.js latest (#105 by @Mithgol) + + +# 0.4.11 / 2015-07-03 + + * Added CESU-8 encoding. + + +# 0.4.10 / 2015-05-26 + + * Changed UTF-16 endianness heuristic to take into account any ASCII chars, not + just spaces. This should minimize the importance of "default" endianness. + + +# 0.4.9 / 2015-05-24 + + * Streamlined BOM handling: strip BOM by default, add BOM when encoding if + addBOM: true. Added docs to Readme. + * UTF16 now uses UTF16-LE by default. + * Fixed minor issue with big5 encoding. + * Added io.js testing on Travis; updated node-iconv version to test against. + Now we just skip testing SBCS encodings that node-iconv doesn't support. + * (internal refactoring) Updated codec interface to use classes. + * Use strict mode in all files. + + +# 0.4.8 / 2015-04-14 + + * added alias UNICODE-1-1-UTF-7 for UTF-7 encoding (#94) + + +# 0.4.7 / 2015-02-05 + + * stop official support of Node.js v0.8. Should still work, but no guarantees. + reason: Packages needed for testing are hard to get on Travis CI. + * work in environment where Object.prototype is monkey patched with enumerable + props (#89). + + +# 0.4.6 / 2015-01-12 + + * fix rare aliases of single-byte encodings (thanks @mscdex) + * double the timeout for dbcs tests to make them less flaky on travis + + +# 0.4.5 / 2014-11-20 + + * fix windows-31j and x-sjis encoding support (@nleush) + * minor fix: undefined variable reference when internal error happens + + +# 0.4.4 / 2014-07-16 + + * added encodings UTF-7 (RFC2152) and UTF-7-IMAP (RFC3501 Section 5.1.3) + * fixed streaming base64 encoding + + +# 0.4.3 / 2014-06-14 + + * added encodings UTF-16BE and UTF-16 with BOM + + +# 0.4.2 / 2014-06-12 + + * don't throw exception if `extendNodeEncodings()` is called more than once + + +# 0.4.1 / 2014-06-11 + + * codepage 808 added + + +# 0.4.0 / 2014-06-10 + + * code is rewritten from scratch + * all widespread encodings are supported + * streaming interface added + * browserify compatibility added + * (optional) extend core primitive encodings to make usage even simpler + * moved from vows to mocha as the testing framework + + diff --git a/node_modules/iconv-lite/LICENSE b/node_modules/iconv-lite/LICENSE new file mode 100644 index 0000000..d518d83 --- /dev/null +++ b/node_modules/iconv-lite/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011 Alexander Shtuchkin + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/iconv-lite/README.md b/node_modules/iconv-lite/README.md new file mode 100644 index 0000000..160b7cf --- /dev/null +++ b/node_modules/iconv-lite/README.md @@ -0,0 +1,157 @@ +## Pure JS character encoding conversion [![Build Status](https://travis-ci.org/ashtuchkin/iconv-lite.svg?branch=master)](https://travis-ci.org/ashtuchkin/iconv-lite) + + * Doesn't need native code compilation. Works on Windows and in sandboxed environments like [Cloud9](http://c9.io). + * Used in popular projects like [Express.js (body_parser)](https://github.com/expressjs/body-parser), + [Grunt](http://gruntjs.com/), [Nodemailer](http://www.nodemailer.com/), [Yeoman](http://yeoman.io/) and others. + * Faster than [node-iconv](https://github.com/bnoordhuis/node-iconv) (see below for performance comparison). + * Intuitive encode/decode API + * Streaming support for Node v0.10+ + * [Deprecated] Can extend Node.js primitives (buffers, streams) to support all iconv-lite encodings. + * In-browser usage via [Browserify](https://github.com/substack/node-browserify) (~180k gzip compressed with Buffer shim included). + * License: MIT. + +[![NPM Stats](https://nodei.co/npm/iconv-lite.png?downloads=true&downloadRank=true)](https://npmjs.org/packages/iconv-lite/) + +## Usage +### Basic API +```javascript +var iconv = require('iconv-lite'); + +// Convert from an encoded buffer to js string. +str = iconv.decode(new Buffer([0x68, 0x65, 0x6c, 0x6c, 0x6f]), 'win1251'); + +// Convert from js string to an encoded buffer. +buf = iconv.encode("Sample input string", 'win1251'); + +// Check if encoding is supported +iconv.encodingExists("us-ascii") +``` + +### Streaming API (Node v0.10+) +```javascript + +// Decode stream (from binary stream to js strings) +http.createServer(function(req, res) { + var converterStream = iconv.decodeStream('win1251'); + req.pipe(converterStream); + + converterStream.on('data', function(str) { + console.log(str); // Do something with decoded strings, chunk-by-chunk. + }); +}); + +// Convert encoding streaming example +fs.createReadStream('file-in-win1251.txt') + .pipe(iconv.decodeStream('win1251')) + .pipe(iconv.encodeStream('ucs2')) + .pipe(fs.createWriteStream('file-in-ucs2.txt')); + +// Sugar: all encode/decode streams have .collect(cb) method to accumulate data. +http.createServer(function(req, res) { + req.pipe(iconv.decodeStream('win1251')).collect(function(err, body) { + assert(typeof body == 'string'); + console.log(body); // full request body string + }); +}); +``` + +### [Deprecated] Extend Node.js own encodings +> NOTE: This doesn't work on latest Node versions. See [details](https://github.com/ashtuchkin/iconv-lite/wiki/Node-v4-compatibility). + +```javascript +// After this call all Node basic primitives will understand iconv-lite encodings. +iconv.extendNodeEncodings(); + +// Examples: +buf = new Buffer(str, 'win1251'); +buf.write(str, 'gbk'); +str = buf.toString('latin1'); +assert(Buffer.isEncoding('iso-8859-15')); +Buffer.byteLength(str, 'us-ascii'); + +http.createServer(function(req, res) { + req.setEncoding('big5'); + req.collect(function(err, body) { + console.log(body); + }); +}); + +fs.createReadStream("file.txt", "shift_jis"); + +// External modules are also supported (if they use Node primitives, which they probably do). +request = require('request'); +request({ + url: "http://github.com/", + encoding: "cp932" +}); + +// To remove extensions +iconv.undoExtendNodeEncodings(); +``` + +## Supported encodings + + * All node.js native encodings: utf8, ucs2 / utf16-le, ascii, binary, base64, hex. + * Additional unicode encodings: utf16, utf16-be, utf-7, utf-7-imap. + * All widespread singlebyte encodings: Windows 125x family, ISO-8859 family, + IBM/DOS codepages, Macintosh family, KOI8 family, all others supported by iconv library. + Aliases like 'latin1', 'us-ascii' also supported. + * All widespread multibyte encodings: CP932, CP936, CP949, CP950, GB2313, GBK, GB18030, Big5, Shift_JIS, EUC-JP. + +See [all supported encodings on wiki](https://github.com/ashtuchkin/iconv-lite/wiki/Supported-Encodings). + +Most singlebyte encodings are generated automatically from [node-iconv](https://github.com/bnoordhuis/node-iconv). Thank you Ben Noordhuis and libiconv authors! + +Multibyte encodings are generated from [Unicode.org mappings](http://www.unicode.org/Public/MAPPINGS/) and [WHATWG Encoding Standard mappings](http://encoding.spec.whatwg.org/). Thank you, respective authors! + + +## Encoding/decoding speed + +Comparison with node-iconv module (1000x256kb, on MacBook Pro, Core i5/2.6 GHz, Node v0.12.0). +Note: your results may vary, so please always check on your hardware. + + operation iconv@2.1.4 iconv-lite@0.4.7 + ---------------------------------------------------------- + encode('win1251') ~96 Mb/s ~320 Mb/s + decode('win1251') ~95 Mb/s ~246 Mb/s + +## BOM handling + + * Decoding: BOM is stripped by default, unless overridden by passing `stripBOM: false` in options + (f.ex. `iconv.decode(buf, enc, {stripBOM: false})`). + A callback might also be given as a `stripBOM` parameter - it'll be called if BOM character was actually found. + * Encoding: No BOM added, unless overridden by `addBOM: true` option. + +## UTF-16 Encodings + +This library supports UTF-16LE, UTF-16BE and UTF-16 encodings. First two are straightforward, but UTF-16 is trying to be +smart about endianness in the following ways: + * Decoding: uses BOM and 'spaces heuristic' to determine input endianness. Default is UTF-16LE, but can be + overridden with `defaultEncoding: 'utf-16be'` option. Strips BOM unless `stripBOM: false`. + * Encoding: uses UTF-16LE and writes BOM by default. Use `addBOM: false` to override. + +## Other notes + +When decoding, be sure to supply a Buffer to decode() method, otherwise [bad things usually happen](https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding). +Untranslatable characters are set to � or ?. No transliteration is currently supported. +Node versions 0.10.31 and 0.11.13 are buggy, don't use them (see #65, #77). + +## Testing + +```bash +$ git clone git@github.com:ashtuchkin/iconv-lite.git +$ cd iconv-lite +$ npm install +$ npm test + +$ # To view performance: +$ node test/performance.js + +$ # To view test coverage: +$ npm run coverage +$ open coverage/lcov-report/index.html +``` + +## Adoption +[![NPM](https://nodei.co/npm-dl/iconv-lite.png)](https://nodei.co/npm/iconv-lite/) +[![Codeship Status for ashtuchkin/iconv-lite](https://www.codeship.io/projects/81670840-fa72-0131-4520-4a01a6c01acc/status)](https://www.codeship.io/projects/29053) diff --git a/node_modules/iconv-lite/encodings/dbcs-codec.js b/node_modules/iconv-lite/encodings/dbcs-codec.js new file mode 100644 index 0000000..366809e --- /dev/null +++ b/node_modules/iconv-lite/encodings/dbcs-codec.js @@ -0,0 +1,554 @@ +"use strict" + +// Multibyte codec. In this scheme, a character is represented by 1 or more bytes. +// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences. +// To save memory and loading time, we read table files only when requested. + +exports._dbcs = DBCSCodec; + +var UNASSIGNED = -1, + GB18030_CODE = -2, + SEQ_START = -10, + NODE_START = -1000, + UNASSIGNED_NODE = new Array(0x100), + DEF_CHAR = -1; + +for (var i = 0; i < 0x100; i++) + UNASSIGNED_NODE[i] = UNASSIGNED; + + +// Class DBCSCodec reads and initializes mapping tables. +function DBCSCodec(codecOptions, iconv) { + this.encodingName = codecOptions.encodingName; + if (!codecOptions) + throw new Error("DBCS codec is called without the data.") + if (!codecOptions.table) + throw new Error("Encoding '" + this.encodingName + "' has no data."); + + // Load tables. + var mappingTable = codecOptions.table(); + + + // Decode tables: MBCS -> Unicode. + + // decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256. + // Trie root is decodeTables[0]. + // Values: >= 0 -> unicode character code. can be > 0xFFFF + // == UNASSIGNED -> unknown/unassigned sequence. + // == GB18030_CODE -> this is the end of a GB18030 4-byte sequence. + // <= NODE_START -> index of the next node in our trie to process next byte. + // <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq. + this.decodeTables = []; + this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node. + + // Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here. + this.decodeTableSeq = []; + + // Actual mapping tables consist of chunks. Use them to fill up decode tables. + for (var i = 0; i < mappingTable.length; i++) + this._addDecodeChunk(mappingTable[i]); + + this.defaultCharUnicode = iconv.defaultCharUnicode; + + + // Encode tables: Unicode -> DBCS. + + // `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance. + // Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null. + // Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.). + // == UNASSIGNED -> no conversion found. Output a default char. + // <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence. + this.encodeTable = []; + + // `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of + // objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key + // means end of sequence (needed when one sequence is a strict subsequence of another). + // Objects are kept separately from encodeTable to increase performance. + this.encodeTableSeq = []; + + // Some chars can be decoded, but need not be encoded. + var skipEncodeChars = {}; + if (codecOptions.encodeSkipVals) + for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) { + var val = codecOptions.encodeSkipVals[i]; + if (typeof val === 'number') + skipEncodeChars[val] = true; + else + for (var j = val.from; j <= val.to; j++) + skipEncodeChars[j] = true; + } + + // Use decode trie to recursively fill out encode tables. + this._fillEncodeTable(0, 0, skipEncodeChars); + + // Add more encoding pairs when needed. + if (codecOptions.encodeAdd) { + for (var uChar in codecOptions.encodeAdd) + if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar)) + this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]); + } + + this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)]; + if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?']; + if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0); + + + // Load & create GB18030 tables when needed. + if (typeof codecOptions.gb18030 === 'function') { + this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges. + + // Add GB18030 decode tables. + var thirdByteNodeIdx = this.decodeTables.length; + var thirdByteNode = this.decodeTables[thirdByteNodeIdx] = UNASSIGNED_NODE.slice(0); + + var fourthByteNodeIdx = this.decodeTables.length; + var fourthByteNode = this.decodeTables[fourthByteNodeIdx] = UNASSIGNED_NODE.slice(0); + + for (var i = 0x81; i <= 0xFE; i++) { + var secondByteNodeIdx = NODE_START - this.decodeTables[0][i]; + var secondByteNode = this.decodeTables[secondByteNodeIdx]; + for (var j = 0x30; j <= 0x39; j++) + secondByteNode[j] = NODE_START - thirdByteNodeIdx; + } + for (var i = 0x81; i <= 0xFE; i++) + thirdByteNode[i] = NODE_START - fourthByteNodeIdx; + for (var i = 0x30; i <= 0x39; i++) + fourthByteNode[i] = GB18030_CODE + } +} + +DBCSCodec.prototype.encoder = DBCSEncoder; +DBCSCodec.prototype.decoder = DBCSDecoder; + +// Decoder helpers +DBCSCodec.prototype._getDecodeTrieNode = function(addr) { + var bytes = []; + for (; addr > 0; addr >>= 8) + bytes.push(addr & 0xFF); + if (bytes.length == 0) + bytes.push(0); + + var node = this.decodeTables[0]; + for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie. + var val = node[bytes[i]]; + + if (val == UNASSIGNED) { // Create new node. + node[bytes[i]] = NODE_START - this.decodeTables.length; + this.decodeTables.push(node = UNASSIGNED_NODE.slice(0)); + } + else if (val <= NODE_START) { // Existing node. + node = this.decodeTables[NODE_START - val]; + } + else + throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16)); + } + return node; +} + + +DBCSCodec.prototype._addDecodeChunk = function(chunk) { + // First element of chunk is the hex mbcs code where we start. + var curAddr = parseInt(chunk[0], 16); + + // Choose the decoding node where we'll write our chars. + var writeTable = this._getDecodeTrieNode(curAddr); + curAddr = curAddr & 0xFF; + + // Write all other elements of the chunk to the table. + for (var k = 1; k < chunk.length; k++) { + var part = chunk[k]; + if (typeof part === "string") { // String, write as-is. + for (var l = 0; l < part.length;) { + var code = part.charCodeAt(l++); + if (0xD800 <= code && code < 0xDC00) { // Decode surrogate + var codeTrail = part.charCodeAt(l++); + if (0xDC00 <= codeTrail && codeTrail < 0xE000) + writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00); + else + throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]); + } + else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used) + var len = 0xFFF - code + 2; + var seq = []; + for (var m = 0; m < len; m++) + seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq. + + writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length; + this.decodeTableSeq.push(seq); + } + else + writeTable[curAddr++] = code; // Basic char + } + } + else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character. + var charCode = writeTable[curAddr - 1] + 1; + for (var l = 0; l < part; l++) + writeTable[curAddr++] = charCode++; + } + else + throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]); + } + if (curAddr > 0xFF) + throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr); +} + +// Encoder helpers +DBCSCodec.prototype._getEncodeBucket = function(uCode) { + var high = uCode >> 8; // This could be > 0xFF because of astral characters. + if (this.encodeTable[high] === undefined) + this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand. + return this.encodeTable[high]; +} + +DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) { + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + if (bucket[low] <= SEQ_START) + this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it. + else if (bucket[low] == UNASSIGNED) + bucket[low] = dbcsCode; +} + +DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) { + + // Get the root of character tree according to first character of the sequence. + var uCode = seq[0]; + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + + var node; + if (bucket[low] <= SEQ_START) { + // There's already a sequence with - use it. + node = this.encodeTableSeq[SEQ_START-bucket[low]]; + } + else { + // There was no sequence object - allocate a new one. + node = {}; + if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence. + bucket[low] = SEQ_START - this.encodeTableSeq.length; + this.encodeTableSeq.push(node); + } + + // Traverse the character tree, allocating new nodes as needed. + for (var j = 1; j < seq.length-1; j++) { + var oldVal = node[uCode]; + if (typeof oldVal === 'object') + node = oldVal; + else { + node = node[uCode] = {} + if (oldVal !== undefined) + node[DEF_CHAR] = oldVal + } + } + + // Set the leaf to given dbcsCode. + uCode = seq[seq.length-1]; + node[uCode] = dbcsCode; +} + +DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) { + var node = this.decodeTables[nodeIdx]; + for (var i = 0; i < 0x100; i++) { + var uCode = node[i]; + var mbCode = prefix + i; + if (skipEncodeChars[mbCode]) + continue; + + if (uCode >= 0) + this._setEncodeChar(uCode, mbCode); + else if (uCode <= NODE_START) + this._fillEncodeTable(NODE_START - uCode, mbCode << 8, skipEncodeChars); + else if (uCode <= SEQ_START) + this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode); + } +} + + + +// == Encoder ================================================================== + +function DBCSEncoder(options, codec) { + // Encoder state + this.leadSurrogate = -1; + this.seqObj = undefined; + + // Static data + this.encodeTable = codec.encodeTable; + this.encodeTableSeq = codec.encodeTableSeq; + this.defaultCharSingleByte = codec.defCharSB; + this.gb18030 = codec.gb18030; +} + +DBCSEncoder.prototype.write = function(str) { + var newBuf = new Buffer(str.length * (this.gb18030 ? 4 : 3)), + leadSurrogate = this.leadSurrogate, + seqObj = this.seqObj, nextChar = -1, + i = 0, j = 0; + + while (true) { + // 0. Get next character. + if (nextChar === -1) { + if (i == str.length) break; + var uCode = str.charCodeAt(i++); + } + else { + var uCode = nextChar; + nextChar = -1; + } + + // 1. Handle surrogates. + if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates. + if (uCode < 0xDC00) { // We've got lead surrogate. + if (leadSurrogate === -1) { + leadSurrogate = uCode; + continue; + } else { + leadSurrogate = uCode; + // Double lead surrogate found. + uCode = UNASSIGNED; + } + } else { // We've got trail surrogate. + if (leadSurrogate !== -1) { + uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00); + leadSurrogate = -1; + } else { + // Incomplete surrogate pair - only trail surrogate found. + uCode = UNASSIGNED; + } + + } + } + else if (leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char. + leadSurrogate = -1; + } + + // 2. Convert uCode character. + var dbcsCode = UNASSIGNED; + if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence + var resCode = seqObj[uCode]; + if (typeof resCode === 'object') { // Sequence continues. + seqObj = resCode; + continue; + + } else if (typeof resCode == 'number') { // Sequence finished. Write it. + dbcsCode = resCode; + + } else if (resCode == undefined) { // Current character is not part of the sequence. + + // Try default character for this sequence + resCode = seqObj[DEF_CHAR]; + if (resCode !== undefined) { + dbcsCode = resCode; // Found. Write it. + nextChar = uCode; // Current character will be written too in the next iteration. + + } else { + // TODO: What if we have no default? (resCode == undefined) + // Then, we should write first char of the sequence as-is and try the rest recursively. + // Didn't do it for now because no encoding has this situation yet. + // Currently, just skip the sequence and write current char. + } + } + seqObj = undefined; + } + else if (uCode >= 0) { // Regular character + var subtable = this.encodeTable[uCode >> 8]; + if (subtable !== undefined) + dbcsCode = subtable[uCode & 0xFF]; + + if (dbcsCode <= SEQ_START) { // Sequence start + seqObj = this.encodeTableSeq[SEQ_START-dbcsCode]; + continue; + } + + if (dbcsCode == UNASSIGNED && this.gb18030) { + // Use GB18030 algorithm to find character(s) to write. + var idx = findIdx(this.gb18030.uChars, uCode); + if (idx != -1) { + var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]); + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600; + newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260; + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10; + newBuf[j++] = 0x30 + dbcsCode; + continue; + } + } + } + + // 3. Write dbcsCode character. + if (dbcsCode === UNASSIGNED) + dbcsCode = this.defaultCharSingleByte; + + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else if (dbcsCode < 0x10000) { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + else { + newBuf[j++] = dbcsCode >> 16; + newBuf[j++] = (dbcsCode >> 8) & 0xFF; + newBuf[j++] = dbcsCode & 0xFF; + } + } + + this.seqObj = seqObj; + this.leadSurrogate = leadSurrogate; + return newBuf.slice(0, j); +} + +DBCSEncoder.prototype.end = function() { + if (this.leadSurrogate === -1 && this.seqObj === undefined) + return; // All clean. Most often case. + + var newBuf = new Buffer(10), j = 0; + + if (this.seqObj) { // We're in the sequence. + var dbcsCode = this.seqObj[DEF_CHAR]; + if (dbcsCode !== undefined) { // Write beginning of the sequence. + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + } else { + // See todo above. + } + this.seqObj = undefined; + } + + if (this.leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + newBuf[j++] = this.defaultCharSingleByte; + this.leadSurrogate = -1; + } + + return newBuf.slice(0, j); +} + +// Export for testing +DBCSEncoder.prototype.findIdx = findIdx; + + +// == Decoder ================================================================== + +function DBCSDecoder(options, codec) { + // Decoder state + this.nodeIdx = 0; + this.prevBuf = new Buffer(0); + + // Static data + this.decodeTables = codec.decodeTables; + this.decodeTableSeq = codec.decodeTableSeq; + this.defaultCharUnicode = codec.defaultCharUnicode; + this.gb18030 = codec.gb18030; +} + +DBCSDecoder.prototype.write = function(buf) { + var newBuf = new Buffer(buf.length*2), + nodeIdx = this.nodeIdx, + prevBuf = this.prevBuf, prevBufOffset = this.prevBuf.length, + seqStart = -this.prevBuf.length, // idx of the start of current parsed sequence. + uCode; + + if (prevBufOffset > 0) // Make prev buf overlap a little to make it easier to slice later. + prevBuf = Buffer.concat([prevBuf, buf.slice(0, 10)]); + + for (var i = 0, j = 0; i < buf.length; i++) { + var curByte = (i >= 0) ? buf[i] : prevBuf[i + prevBufOffset]; + + // Lookup in current trie node. + var uCode = this.decodeTables[nodeIdx][curByte]; + + if (uCode >= 0) { + // Normal character, just use it. + } + else if (uCode === UNASSIGNED) { // Unknown char. + // TODO: Callback with seq. + //var curSeq = (seqStart >= 0) ? buf.slice(seqStart, i+1) : prevBuf.slice(seqStart + prevBufOffset, i+1 + prevBufOffset); + i = seqStart; // Try to parse again, after skipping first byte of the sequence ('i' will be incremented by 'for' cycle). + uCode = this.defaultCharUnicode.charCodeAt(0); + } + else if (uCode === GB18030_CODE) { + var curSeq = (seqStart >= 0) ? buf.slice(seqStart, i+1) : prevBuf.slice(seqStart + prevBufOffset, i+1 + prevBufOffset); + var ptr = (curSeq[0]-0x81)*12600 + (curSeq[1]-0x30)*1260 + (curSeq[2]-0x81)*10 + (curSeq[3]-0x30); + var idx = findIdx(this.gb18030.gbChars, ptr); + uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx]; + } + else if (uCode <= NODE_START) { // Go to next trie node. + nodeIdx = NODE_START - uCode; + continue; + } + else if (uCode <= SEQ_START) { // Output a sequence of chars. + var seq = this.decodeTableSeq[SEQ_START - uCode]; + for (var k = 0; k < seq.length - 1; k++) { + uCode = seq[k]; + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + } + uCode = seq[seq.length-1]; + } + else + throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte); + + // Write the character to buffer, handling higher planes using surrogate pair. + if (uCode > 0xFFFF) { + uCode -= 0x10000; + var uCodeLead = 0xD800 + Math.floor(uCode / 0x400); + newBuf[j++] = uCodeLead & 0xFF; + newBuf[j++] = uCodeLead >> 8; + + uCode = 0xDC00 + uCode % 0x400; + } + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + + // Reset trie node. + nodeIdx = 0; seqStart = i+1; + } + + this.nodeIdx = nodeIdx; + this.prevBuf = (seqStart >= 0) ? buf.slice(seqStart) : prevBuf.slice(seqStart + prevBufOffset); + return newBuf.slice(0, j).toString('ucs2'); +} + +DBCSDecoder.prototype.end = function() { + var ret = ''; + + // Try to parse all remaining chars. + while (this.prevBuf.length > 0) { + // Skip 1 character in the buffer. + ret += this.defaultCharUnicode; + var buf = this.prevBuf.slice(1); + + // Parse remaining as usual. + this.prevBuf = new Buffer(0); + this.nodeIdx = 0; + if (buf.length > 0) + ret += this.write(buf); + } + + this.nodeIdx = 0; + return ret; +} + +// Binary search for GB18030. Returns largest i such that table[i] <= val. +function findIdx(table, val) { + if (table[0] > val) + return -1; + + var l = 0, r = table.length; + while (l < r-1) { // always table[l] <= val < table[r] + var mid = l + Math.floor((r-l+1)/2); + if (table[mid] <= val) + l = mid; + else + r = mid; + } + return l; +} + diff --git a/node_modules/iconv-lite/encodings/dbcs-data.js b/node_modules/iconv-lite/encodings/dbcs-data.js new file mode 100644 index 0000000..2bf7415 --- /dev/null +++ b/node_modules/iconv-lite/encodings/dbcs-data.js @@ -0,0 +1,170 @@ +"use strict" + +// Description of supported double byte encodings and aliases. +// Tables are not require()-d until they are needed to speed up library load. +// require()-s are direct to support Browserify. + +module.exports = { + + // == Japanese/ShiftJIS ==================================================== + // All japanese encodings are based on JIS X set of standards: + // JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF. + // JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes. + // Has several variations in 1978, 1983, 1990 and 1997. + // JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead. + // JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233. + // 2 planes, first is superset of 0208, second - revised 0212. + // Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx) + + // Byte encodings are: + // * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte + // encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC. + // Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI. + // * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes. + // 0x00-0x7F - lower part of 0201 + // 0x8E, 0xA1-0xDF - upper part of 0201 + // (0xA1-0xFE)x2 - 0208 plane (94x94). + // 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94). + // * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon. + // Used as-is in ISO2022 family. + // * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII, + // 0201-1976 Roman, 0208-1978, 0208-1983. + // * ISO2022-JP-1: Adds esc seq for 0212-1990. + // * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7. + // * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2. + // * ISO2022-JP-2004: Adds 0213-2004 Plane 1. + // + // After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes. + // + // Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html + + + 'shiftjis': { + type: '_dbcs', + table: function() { return require('./tables/shiftjis.json') }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + encodeSkipVals: [{from: 0xED40, to: 0xF940}], + }, + 'csshiftjis': 'shiftjis', + 'mskanji': 'shiftjis', + 'sjis': 'shiftjis', + 'windows31j': 'shiftjis', + 'xsjis': 'shiftjis', + 'windows932': 'shiftjis', + '932': 'shiftjis', + 'cp932': 'shiftjis', + + 'eucjp': { + type: '_dbcs', + table: function() { return require('./tables/eucjp.json') }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + }, + + // TODO: KDDI extension to Shift_JIS + // TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes. + // TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars. + + // == Chinese/GBK ========================================================== + // http://en.wikipedia.org/wiki/GBK + + // Oldest GB2312 (1981, ~7600 chars) is a subset of CP936 + 'gb2312': 'cp936', + 'gb231280': 'cp936', + 'gb23121980': 'cp936', + 'csgb2312': 'cp936', + 'csiso58gb231280': 'cp936', + 'euccn': 'cp936', + 'isoir58': 'gbk', + + // Microsoft's CP936 is a subset and approximation of GBK. + // TODO: Euro = 0x80 in cp936, but not in GBK (where it's valid but undefined) + 'windows936': 'cp936', + '936': 'cp936', + 'cp936': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json') }, + }, + + // GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other. + 'gbk': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) }, + }, + 'xgbk': 'gbk', + + // GB18030 is an algorithmic extension of GBK. + 'gb18030': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) }, + gb18030: function() { return require('./tables/gb18030-ranges.json') }, + }, + + 'chinese': 'gb18030', + + // TODO: Support GB18030 (~27000 chars + whole unicode mapping, cp54936) + // http://icu-project.org/docs/papers/gb18030.html + // http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml + // http://www.khngai.com/chinese/charmap/tblgbk.php?page=0 + + // == Korean =============================================================== + // EUC-KR, KS_C_5601 and KS X 1001 are exactly the same. + 'windows949': 'cp949', + '949': 'cp949', + 'cp949': { + type: '_dbcs', + table: function() { return require('./tables/cp949.json') }, + }, + + 'cseuckr': 'cp949', + 'csksc56011987': 'cp949', + 'euckr': 'cp949', + 'isoir149': 'cp949', + 'korean': 'cp949', + 'ksc56011987': 'cp949', + 'ksc56011989': 'cp949', + 'ksc5601': 'cp949', + + + // == Big5/Taiwan/Hong Kong ================================================ + // There are lots of tables for Big5 and cp950. Please see the following links for history: + // http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html + // Variations, in roughly number of defined chars: + // * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT + // * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/ + // * Big5-2003 (Taiwan standard) almost superset of cp950. + // * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers. + // * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard. + // many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years. + // Plus, it has 4 combining sequences. + // Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299 + // because big5-hkscs is the only encoding to include astral characters in non-algorithmic way. + // Implementations are not consistent within browsers; sometimes labeled as just big5. + // MS Internet Explorer switches from big5 to big5-hkscs when a patch applied. + // Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31 + // In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s. + // Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt + // http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt + // + // Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder + // Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong. + + 'windows950': 'cp950', + '950': 'cp950', + 'cp950': { + type: '_dbcs', + table: function() { return require('./tables/cp950.json') }, + }, + + // Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus. + 'big5': 'big5hkscs', + 'big5hkscs': { + type: '_dbcs', + table: function() { return require('./tables/cp950.json').concat(require('./tables/big5-added.json')) }, + encodeSkipVals: [0xa2cc], + }, + + 'cnbig5': 'big5hkscs', + 'csbig5': 'big5hkscs', + 'xxbig5': 'big5hkscs', + +}; diff --git a/node_modules/iconv-lite/encodings/index.js b/node_modules/iconv-lite/encodings/index.js new file mode 100644 index 0000000..f7892fa --- /dev/null +++ b/node_modules/iconv-lite/encodings/index.js @@ -0,0 +1,22 @@ +"use strict" + +// Update this array if you add/rename/remove files in this directory. +// We support Browserify by skipping automatic module discovery and requiring modules directly. +var modules = [ + require("./internal"), + require("./utf16"), + require("./utf7"), + require("./sbcs-codec"), + require("./sbcs-data"), + require("./sbcs-data-generated"), + require("./dbcs-codec"), + require("./dbcs-data"), +]; + +// Put all encoding/alias/codec definitions to single object and export it. +for (var i = 0; i < modules.length; i++) { + var module = modules[i]; + for (var enc in module) + if (Object.prototype.hasOwnProperty.call(module, enc)) + exports[enc] = module[enc]; +} diff --git a/node_modules/iconv-lite/encodings/internal.js b/node_modules/iconv-lite/encodings/internal.js new file mode 100644 index 0000000..a8ae512 --- /dev/null +++ b/node_modules/iconv-lite/encodings/internal.js @@ -0,0 +1,187 @@ +"use strict" + +// Export Node.js internal encodings. + +module.exports = { + // Encodings + utf8: { type: "_internal", bomAware: true}, + cesu8: { type: "_internal", bomAware: true}, + unicode11utf8: "utf8", + + ucs2: { type: "_internal", bomAware: true}, + utf16le: "ucs2", + + binary: { type: "_internal" }, + base64: { type: "_internal" }, + hex: { type: "_internal" }, + + // Codec. + _internal: InternalCodec, +}; + +//------------------------------------------------------------------------------ + +function InternalCodec(codecOptions, iconv) { + this.enc = codecOptions.encodingName; + this.bomAware = codecOptions.bomAware; + + if (this.enc === "base64") + this.encoder = InternalEncoderBase64; + else if (this.enc === "cesu8") { + this.enc = "utf8"; // Use utf8 for decoding. + this.encoder = InternalEncoderCesu8; + + // Add decoder for versions of Node not supporting CESU-8 + if (new Buffer("eda080", 'hex').toString().length == 3) { + this.decoder = InternalDecoderCesu8; + this.defaultCharUnicode = iconv.defaultCharUnicode; + } + } +} + +InternalCodec.prototype.encoder = InternalEncoder; +InternalCodec.prototype.decoder = InternalDecoder; + +//------------------------------------------------------------------------------ + +// We use node.js internal decoder. Its signature is the same as ours. +var StringDecoder = require('string_decoder').StringDecoder; + +if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method. + StringDecoder.prototype.end = function() {}; + + +function InternalDecoder(options, codec) { + StringDecoder.call(this, codec.enc); +} + +InternalDecoder.prototype = StringDecoder.prototype; + + +//------------------------------------------------------------------------------ +// Encoder is mostly trivial + +function InternalEncoder(options, codec) { + this.enc = codec.enc; +} + +InternalEncoder.prototype.write = function(str) { + return new Buffer(str, this.enc); +} + +InternalEncoder.prototype.end = function() { +} + + +//------------------------------------------------------------------------------ +// Except base64 encoder, which must keep its state. + +function InternalEncoderBase64(options, codec) { + this.prevStr = ''; +} + +InternalEncoderBase64.prototype.write = function(str) { + str = this.prevStr + str; + var completeQuads = str.length - (str.length % 4); + this.prevStr = str.slice(completeQuads); + str = str.slice(0, completeQuads); + + return new Buffer(str, "base64"); +} + +InternalEncoderBase64.prototype.end = function() { + return new Buffer(this.prevStr, "base64"); +} + + +//------------------------------------------------------------------------------ +// CESU-8 encoder is also special. + +function InternalEncoderCesu8(options, codec) { +} + +InternalEncoderCesu8.prototype.write = function(str) { + var buf = new Buffer(str.length * 3), bufIdx = 0; + for (var i = 0; i < str.length; i++) { + var charCode = str.charCodeAt(i); + // Naive implementation, but it works because CESU-8 is especially easy + // to convert from UTF-16 (which all JS strings are encoded in). + if (charCode < 0x80) + buf[bufIdx++] = charCode; + else if (charCode < 0x800) { + buf[bufIdx++] = 0xC0 + (charCode >>> 6); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + else { // charCode will always be < 0x10000 in javascript. + buf[bufIdx++] = 0xE0 + (charCode >>> 12); + buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + } + return buf.slice(0, bufIdx); +} + +InternalEncoderCesu8.prototype.end = function() { +} + +//------------------------------------------------------------------------------ +// CESU-8 decoder is not implemented in Node v4.0+ + +function InternalDecoderCesu8(options, codec) { + this.acc = 0; + this.contBytes = 0; + this.accBytes = 0; + this.defaultCharUnicode = codec.defaultCharUnicode; +} + +InternalDecoderCesu8.prototype.write = function(buf) { + var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes, + res = ''; + for (var i = 0; i < buf.length; i++) { + var curByte = buf[i]; + if ((curByte & 0xC0) !== 0x80) { // Leading byte + if (contBytes > 0) { // Previous code is invalid + res += this.defaultCharUnicode; + contBytes = 0; + } + + if (curByte < 0x80) { // Single-byte code + res += String.fromCharCode(curByte); + } else if (curByte < 0xE0) { // Two-byte code + acc = curByte & 0x1F; + contBytes = 1; accBytes = 1; + } else if (curByte < 0xF0) { // Three-byte code + acc = curByte & 0x0F; + contBytes = 2; accBytes = 1; + } else { // Four or more are not supported for CESU-8. + res += this.defaultCharUnicode; + } + } else { // Continuation byte + if (contBytes > 0) { // We're waiting for it. + acc = (acc << 6) | (curByte & 0x3f); + contBytes--; accBytes++; + if (contBytes === 0) { + // Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80) + if (accBytes === 2 && acc < 0x80 && acc > 0) + res += this.defaultCharUnicode; + else if (accBytes === 3 && acc < 0x800) + res += this.defaultCharUnicode; + else + // Actually add character. + res += String.fromCharCode(acc); + } + } else { // Unexpected continuation byte + res += this.defaultCharUnicode; + } + } + } + this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes; + return res; +} + +InternalDecoderCesu8.prototype.end = function() { + var res = 0; + if (this.contBytes > 0) + res += this.defaultCharUnicode; + return res; +} diff --git a/node_modules/iconv-lite/encodings/sbcs-codec.js b/node_modules/iconv-lite/encodings/sbcs-codec.js new file mode 100644 index 0000000..ca00171 --- /dev/null +++ b/node_modules/iconv-lite/encodings/sbcs-codec.js @@ -0,0 +1,72 @@ +"use strict" + +// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that +// correspond to encoded bytes (if 128 - then lower half is ASCII). + +exports._sbcs = SBCSCodec; +function SBCSCodec(codecOptions, iconv) { + if (!codecOptions) + throw new Error("SBCS codec is called without the data.") + + // Prepare char buffer for decoding. + if (!codecOptions.chars || (codecOptions.chars.length !== 128 && codecOptions.chars.length !== 256)) + throw new Error("Encoding '"+codecOptions.type+"' has incorrect 'chars' (must be of len 128 or 256)"); + + if (codecOptions.chars.length === 128) { + var asciiString = ""; + for (var i = 0; i < 128; i++) + asciiString += String.fromCharCode(i); + codecOptions.chars = asciiString + codecOptions.chars; + } + + this.decodeBuf = new Buffer(codecOptions.chars, 'ucs2'); + + // Encoding buffer. + var encodeBuf = new Buffer(65536); + encodeBuf.fill(iconv.defaultCharSingleByte.charCodeAt(0)); + + for (var i = 0; i < codecOptions.chars.length; i++) + encodeBuf[codecOptions.chars.charCodeAt(i)] = i; + + this.encodeBuf = encodeBuf; +} + +SBCSCodec.prototype.encoder = SBCSEncoder; +SBCSCodec.prototype.decoder = SBCSDecoder; + + +function SBCSEncoder(options, codec) { + this.encodeBuf = codec.encodeBuf; +} + +SBCSEncoder.prototype.write = function(str) { + var buf = new Buffer(str.length); + for (var i = 0; i < str.length; i++) + buf[i] = this.encodeBuf[str.charCodeAt(i)]; + + return buf; +} + +SBCSEncoder.prototype.end = function() { +} + + +function SBCSDecoder(options, codec) { + this.decodeBuf = codec.decodeBuf; +} + +SBCSDecoder.prototype.write = function(buf) { + // Strings are immutable in JS -> we use ucs2 buffer to speed up computations. + var decodeBuf = this.decodeBuf; + var newBuf = new Buffer(buf.length*2); + var idx1 = 0, idx2 = 0; + for (var i = 0; i < buf.length; i++) { + idx1 = buf[i]*2; idx2 = i*2; + newBuf[idx2] = decodeBuf[idx1]; + newBuf[idx2+1] = decodeBuf[idx1+1]; + } + return newBuf.toString('ucs2'); +} + +SBCSDecoder.prototype.end = function() { +} diff --git a/node_modules/iconv-lite/encodings/sbcs-data-generated.js b/node_modules/iconv-lite/encodings/sbcs-data-generated.js new file mode 100644 index 0000000..2308c91 --- /dev/null +++ b/node_modules/iconv-lite/encodings/sbcs-data-generated.js @@ -0,0 +1,451 @@ +"use strict" + +// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script. +module.exports = { + "437": "cp437", + "737": "cp737", + "775": "cp775", + "850": "cp850", + "852": "cp852", + "855": "cp855", + "856": "cp856", + "857": "cp857", + "858": "cp858", + "860": "cp860", + "861": "cp861", + "862": "cp862", + "863": "cp863", + "864": "cp864", + "865": "cp865", + "866": "cp866", + "869": "cp869", + "874": "windows874", + "922": "cp922", + "1046": "cp1046", + "1124": "cp1124", + "1125": "cp1125", + "1129": "cp1129", + "1133": "cp1133", + "1161": "cp1161", + "1162": "cp1162", + "1163": "cp1163", + "1250": "windows1250", + "1251": "windows1251", + "1252": "windows1252", + "1253": "windows1253", + "1254": "windows1254", + "1255": "windows1255", + "1256": "windows1256", + "1257": "windows1257", + "1258": "windows1258", + "28591": "iso88591", + "28592": "iso88592", + "28593": "iso88593", + "28594": "iso88594", + "28595": "iso88595", + "28596": "iso88596", + "28597": "iso88597", + "28598": "iso88598", + "28599": "iso88599", + "28600": "iso885910", + "28601": "iso885911", + "28603": "iso885913", + "28604": "iso885914", + "28605": "iso885915", + "28606": "iso885916", + "windows874": { + "type": "_sbcs", + "chars": "€����…�����������‘’“”•–—�������� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "win874": "windows874", + "cp874": "windows874", + "windows1250": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬­®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "win1250": "windows1250", + "cp1250": "windows1250", + "windows1251": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬­®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "win1251": "windows1251", + "cp1251": "windows1251", + "windows1252": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "win1252": "windows1252", + "cp1252": "windows1252", + "windows1253": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡�‰�‹�����‘’“”•–—�™�›���� ΅Ά£¤¥¦§¨©�«¬­®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "win1253": "windows1253", + "cp1253": "windows1253", + "windows1254": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ����‘’“”•–—˜™š›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "win1254": "windows1254", + "cp1254": "windows1254", + "windows1255": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹�����‘’“”•–—˜™�›���� ¡¢£₪¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹ�ֻּֽ־ֿ׀ׁׂ׃װױײ׳״�������אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "win1255": "windows1255", + "cp1255": "windows1255", + "windows1256": { + "type": "_sbcs", + "chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œ‌‍ں ،¢£¤¥¦§¨©ھ«¬­®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûü‎‏ے" + }, + "win1256": "windows1256", + "cp1256": "windows1256", + "windows1257": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰�‹�¨ˇ¸�‘’“”•–—�™�›�¯˛� �¢£¤�¦§Ø©Ŗ«¬­®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙" + }, + "win1257": "windows1257", + "cp1257": "windows1257", + "windows1258": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹Œ����‘’“”•–—˜™�›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "win1258": "windows1258", + "cp1258": "windows1258", + "iso88591": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28591": "iso88591", + "iso88592": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ą˘Ł¤ĽŚ§¨ŠŞŤŹ­ŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "cp28592": "iso88592", + "iso88593": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ħ˘£¤�Ĥ§¨İŞĞĴ­�Ż°ħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙" + }, + "cp28593": "iso88593", + "iso88594": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĸŖ¤ĨĻ§¨ŠĒĢŦ­Ž¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙" + }, + "cp28594": "iso88594", + "iso88595": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂЃЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ" + }, + "cp28595": "iso88595", + "iso88596": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ���¤�������،­�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������" + }, + "cp28596": "iso88596", + "iso88597": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ‘’£€₯¦§¨©ͺ«¬­�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "cp28597": "iso88597", + "iso88598": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �¢£¤¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "cp28598": "iso88598", + "iso88599": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "cp28599": "iso88599", + "iso885910": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĒĢĪĨĶ§ĻĐŠŦŽ­ŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ" + }, + "cp28600": "iso885910", + "iso885911": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "cp28601": "iso885911", + "iso885913": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ”¢£¤„¦§Ø©Ŗ«¬­®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’" + }, + "cp28603": "iso885913", + "iso885914": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ­®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ" + }, + "cp28604": "iso885914", + "iso885915": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥Š§š©ª«¬­®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28605": "iso885915", + "iso885916": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄąŁ€„Š§š©Ș«Ź­źŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ" + }, + "cp28606": "iso885916", + "cp437": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm437": "cp437", + "csibm437": "cp437", + "cp737": { + "type": "_sbcs", + "chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ " + }, + "ibm737": "cp737", + "csibm737": "cp737", + "cp775": { + "type": "_sbcs", + "chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£ØפĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’­±“¾¶§÷„°∙·¹³²■ " + }, + "ibm775": "cp775", + "csibm775": "cp775", + "cp850": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm850": "cp850", + "csibm850": "cp850", + "cp852": { + "type": "_sbcs", + "chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´­˝˛ˇ˘§÷¸°¨˙űŘř■ " + }, + "ibm852": "cp852", + "csibm852": "cp852", + "cp855": { + "type": "_sbcs", + "chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№­ыЫзЗшШэЭщЩчЧ§■ " + }, + "ibm855": "cp855", + "csibm855": "cp855", + "cp856": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת�£�×����������®¬½¼�«»░▒▓│┤���©╣║╗╝¢¥┐└┴┬├─┼��╚╔╩╦╠═╬¤���������┘┌█▄¦�▀������µ�������¯´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm856": "cp856", + "csibm856": "cp856", + "cp857": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ�ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ�×ÚÛÙìÿ¯´­±�¾¶§÷¸°¨·¹³²■ " + }, + "ibm857": "cp857", + "csibm857": "cp857", + "cp858": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm858": "cp858", + "csibm858": "cp858", + "cp860": { + "type": "_sbcs", + "chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm860": "cp860", + "csibm860": "cp860", + "cp861": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm861": "cp861", + "csibm861": "cp861", + "cp862": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm862": "cp862", + "csibm862": "cp862", + "cp863": { + "type": "_sbcs", + "chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm863": "cp863", + "csibm863": "cp863", + "cp864": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ��ﻻﻼ� ­ﺂ£¤ﺄ��ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■�" + }, + "ibm864": "cp864", + "csibm864": "cp864", + "cp865": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm865": "cp865", + "csibm865": "cp865", + "cp866": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ " + }, + "ibm866": "cp866", + "csibm866": "cp866", + "cp869": { + "type": "_sbcs", + "chars": "������Ά�·¬¦‘’Έ―ΉΊΪΌ��ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄­±υφχ§ψ΅°¨ωϋΰώ■ " + }, + "ibm869": "cp869", + "csibm869": "cp869", + "cp922": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖ×ØÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ" + }, + "ibm922": "cp922", + "csibm922": "cp922", + "cp1046": { + "type": "_sbcs", + "chars": "ﺈ×÷ﹱˆ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،­ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ�" + }, + "ibm1046": "cp1046", + "csibm1046": "cp1046", + "cp1124": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂҐЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ" + }, + "ibm1124": "cp1124", + "csibm1124": "cp1124", + "cp1125": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ " + }, + "ibm1125": "cp1125", + "csibm1125": "cp1125", + "cp1129": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1129": "cp1129", + "csibm1129": "cp1129", + "cp1133": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ���ຯະາຳິີຶືຸູຼັົຽ���ເແໂໃໄ່້໊໋໌ໍໆ�ໜໝ₭����������������໐໑໒໓໔໕໖໗໘໙��¢¬¦�" + }, + "ibm1133": "cp1133", + "csibm1133": "cp1133", + "cp1161": { + "type": "_sbcs", + "chars": "��������������������������������่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ " + }, + "ibm1161": "cp1161", + "csibm1161": "cp1161", + "cp1162": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "ibm1162": "cp1162", + "csibm1162": "cp1162", + "cp1163": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1163": "cp1163", + "csibm1163": "cp1163", + "maccroatian": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈Ć«Č… ÀÃÕŒœĐ—“”‘’÷◊�©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ" + }, + "maccyrillic": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "macgreek": { + "type": "_sbcs", + "chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦­ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ�" + }, + "maciceland": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüÝ°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macroman": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macromania": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macthai": { + "type": "_sbcs", + "chars": "«»…“”�•‘’� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู​–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©����" + }, + "macturkish": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙ�ˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macukraine": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "koi8r": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8u": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8ru": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8t": { + "type": "_sbcs", + "chars": "қғ‚Ғ„…†‡�‰ҳ‹ҲҷҶ�Қ‘’“”•–—�™�›�����ӯӮё¤ӣ¦§���«¬­®�°±²Ё�Ӣ¶·�№�»���©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "armscii8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚�" + }, + "rk1048": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—�™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬­®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "tcvn": { + "type": "_sbcs", + "chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ" + }, + "georgianacademy": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "georgianps": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "pt154": { + "type": "_sbcs", + "chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "viscii": { + "type": "_sbcs", + "chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ" + }, + "iso646cn": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "iso646jp": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "hproman8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±�" + }, + "macintosh": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "ascii": { + "type": "_sbcs", + "chars": "��������������������������������������������������������������������������������������������������������������������������������" + }, + "tis620": { + "type": "_sbcs", + "chars": "���������������������������������กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + } +} \ No newline at end of file diff --git a/node_modules/iconv-lite/encodings/sbcs-data.js b/node_modules/iconv-lite/encodings/sbcs-data.js new file mode 100644 index 0000000..2058a71 --- /dev/null +++ b/node_modules/iconv-lite/encodings/sbcs-data.js @@ -0,0 +1,169 @@ +"use strict" + +// Manually added data to be used by sbcs codec in addition to generated one. + +module.exports = { + // Not supported by iconv, not sure why. + "10029": "maccenteuro", + "maccenteuro": { + "type": "_sbcs", + "chars": "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ" + }, + + "808": "cp808", + "ibm808": "cp808", + "cp808": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ " + }, + + // Aliases of generated encodings. + "ascii8bit": "ascii", + "usascii": "ascii", + "ansix34": "ascii", + "ansix341968": "ascii", + "ansix341986": "ascii", + "csascii": "ascii", + "cp367": "ascii", + "ibm367": "ascii", + "isoir6": "ascii", + "iso646us": "ascii", + "iso646irv": "ascii", + "us": "ascii", + + "latin1": "iso88591", + "latin2": "iso88592", + "latin3": "iso88593", + "latin4": "iso88594", + "latin5": "iso88599", + "latin6": "iso885910", + "latin7": "iso885913", + "latin8": "iso885914", + "latin9": "iso885915", + "latin10": "iso885916", + + "csisolatin1": "iso88591", + "csisolatin2": "iso88592", + "csisolatin3": "iso88593", + "csisolatin4": "iso88594", + "csisolatincyrillic": "iso88595", + "csisolatinarabic": "iso88596", + "csisolatingreek" : "iso88597", + "csisolatinhebrew": "iso88598", + "csisolatin5": "iso88599", + "csisolatin6": "iso885910", + + "l1": "iso88591", + "l2": "iso88592", + "l3": "iso88593", + "l4": "iso88594", + "l5": "iso88599", + "l6": "iso885910", + "l7": "iso885913", + "l8": "iso885914", + "l9": "iso885915", + "l10": "iso885916", + + "isoir14": "iso646jp", + "isoir57": "iso646cn", + "isoir100": "iso88591", + "isoir101": "iso88592", + "isoir109": "iso88593", + "isoir110": "iso88594", + "isoir144": "iso88595", + "isoir127": "iso88596", + "isoir126": "iso88597", + "isoir138": "iso88598", + "isoir148": "iso88599", + "isoir157": "iso885910", + "isoir166": "tis620", + "isoir179": "iso885913", + "isoir199": "iso885914", + "isoir203": "iso885915", + "isoir226": "iso885916", + + "cp819": "iso88591", + "ibm819": "iso88591", + + "cyrillic": "iso88595", + + "arabic": "iso88596", + "arabic8": "iso88596", + "ecma114": "iso88596", + "asmo708": "iso88596", + + "greek" : "iso88597", + "greek8" : "iso88597", + "ecma118" : "iso88597", + "elot928" : "iso88597", + + "hebrew": "iso88598", + "hebrew8": "iso88598", + + "turkish": "iso88599", + "turkish8": "iso88599", + + "thai": "iso885911", + "thai8": "iso885911", + + "celtic": "iso885914", + "celtic8": "iso885914", + "isoceltic": "iso885914", + + "tis6200": "tis620", + "tis62025291": "tis620", + "tis62025330": "tis620", + + "10000": "macroman", + "10006": "macgreek", + "10007": "maccyrillic", + "10079": "maciceland", + "10081": "macturkish", + + "cspc8codepage437": "cp437", + "cspc775baltic": "cp775", + "cspc850multilingual": "cp850", + "cspcp852": "cp852", + "cspc862latinhebrew": "cp862", + "cpgr": "cp869", + + "msee": "cp1250", + "mscyrl": "cp1251", + "msansi": "cp1252", + "msgreek": "cp1253", + "msturk": "cp1254", + "mshebr": "cp1255", + "msarab": "cp1256", + "winbaltrim": "cp1257", + + "cp20866": "koi8r", + "20866": "koi8r", + "ibm878": "koi8r", + "cskoi8r": "koi8r", + + "cp21866": "koi8u", + "21866": "koi8u", + "ibm1168": "koi8u", + + "strk10482002": "rk1048", + + "tcvn5712": "tcvn", + "tcvn57121": "tcvn", + + "gb198880": "iso646cn", + "cn": "iso646cn", + + "csiso14jisc6220ro": "iso646jp", + "jisc62201969ro": "iso646jp", + "jp": "iso646jp", + + "cshproman8": "hproman8", + "r8": "hproman8", + "roman8": "hproman8", + "xroman8": "hproman8", + "ibm1051": "hproman8", + + "mac": "macintosh", + "csmacintosh": "macintosh", +}; + diff --git a/node_modules/iconv-lite/encodings/tables/big5-added.json b/node_modules/iconv-lite/encodings/tables/big5-added.json new file mode 100644 index 0000000..3c3d3c2 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/big5-added.json @@ -0,0 +1,122 @@ +[ +["8740","䏰䰲䘃䖦䕸𧉧䵷䖳𧲱䳢𧳅㮕䜶䝄䱇䱀𤊿𣘗𧍒𦺋𧃒䱗𪍑䝏䗚䲅𧱬䴇䪤䚡𦬣爥𥩔𡩣𣸆𣽡晍囻"], +["8767","綕夝𨮹㷴霴𧯯寛𡵞媤㘥𩺰嫑宷峼杮薓𩥅瑡璝㡵𡵓𣚞𦀡㻬"], +["87a1","𥣞㫵竼龗𤅡𨤍𣇪𠪊𣉞䌊蒄龖鐯䤰蘓墖靊鈘秐稲晠権袝瑌篅枂稬剏遆㓦珄𥶹瓆鿇垳䤯呌䄱𣚎堘穲𧭥讏䚮𦺈䆁𥶙箮𢒼鿈𢓁𢓉𢓌鿉蔄𣖻䂴鿊䓡𪷿拁灮鿋"], +["8840","㇀",4,"𠄌㇅𠃑𠃍㇆㇇𠃋𡿨㇈𠃊㇉㇊㇋㇌𠄎㇍㇎ĀÁǍÀĒÉĚÈŌÓǑÒ࿿Ê̄Ế࿿Ê̌ỀÊāáǎàɑēéěèīíǐìōóǒòūúǔùǖǘǚ"], +["88a1","ǜü࿿ê̄ế࿿ê̌ềêɡ⏚⏛"], +["8940","𪎩𡅅"], +["8943","攊"], +["8946","丽滝鵎釟"], +["894c","𧜵撑会伨侨兖兴农凤务动医华发变团声处备夲头学实実岚庆总斉柾栄桥济炼电纤纬纺织经统缆缷艺苏药视设询车轧轮"], +["89a1","琑糼緍楆竉刧"], +["89ab","醌碸酞肼"], +["89b0","贋胶𠧧"], +["89b5","肟黇䳍鷉鸌䰾𩷶𧀎鸊𪄳㗁"], +["89c1","溚舾甙"], +["89c5","䤑马骏龙禇𨑬𡷊𠗐𢫦两亁亀亇亿仫伷㑌侽㹈倃傈㑽㒓㒥円夅凛凼刅争剹劐匧㗇厩㕑厰㕓参吣㕭㕲㚁咓咣咴咹哐哯唘唣唨㖘唿㖥㖿嗗㗅"], +["8a40","𧶄唥"], +["8a43","𠱂𠴕𥄫喐𢳆㧬𠍁蹆𤶸𩓥䁓𨂾睺𢰸㨴䟕𨅝𦧲𤷪擝𠵼𠾴𠳕𡃴撍蹾𠺖𠰋𠽤𢲩𨉖𤓓"], +["8a64","𠵆𩩍𨃩䟴𤺧𢳂骲㩧𩗴㿭㔆𥋇𩟔𧣈𢵄鵮頕"], +["8a76","䏙𦂥撴哣𢵌𢯊𡁷㧻𡁯"], +["8aa1","𦛚𦜖𧦠擪𥁒𠱃蹨𢆡𨭌𠜱"], +["8aac","䠋𠆩㿺塳𢶍"], +["8ab2","𤗈𠓼𦂗𠽌𠶖啹䂻䎺"], +["8abb","䪴𢩦𡂝膪飵𠶜捹㧾𢝵跀嚡摼㹃"], +["8ac9","𪘁𠸉𢫏𢳉"], +["8ace","𡃈𣧂㦒㨆𨊛㕸𥹉𢃇噒𠼱𢲲𩜠㒼氽𤸻"], +["8adf","𧕴𢺋𢈈𪙛𨳍𠹺𠰴𦠜羓𡃏𢠃𢤹㗻𥇣𠺌𠾍𠺪㾓𠼰𠵇𡅏𠹌"], +["8af6","𠺫𠮩𠵈𡃀𡄽㿹𢚖搲𠾭"], +["8b40","𣏴𧘹𢯎𠵾𠵿𢱑𢱕㨘𠺘𡃇𠼮𪘲𦭐𨳒𨶙𨳊閪哌苄喹"], +["8b55","𩻃鰦骶𧝞𢷮煀腭胬尜𦕲脴㞗卟𨂽醶𠻺𠸏𠹷𠻻㗝𤷫㘉𠳖嚯𢞵𡃉𠸐𠹸𡁸𡅈𨈇𡑕𠹹𤹐𢶤婔𡀝𡀞𡃵𡃶垜𠸑"], +["8ba1","𧚔𨋍𠾵𠹻𥅾㜃𠾶𡆀𥋘𪊽𤧚𡠺𤅷𨉼墙剨㘚𥜽箲孨䠀䬬鼧䧧鰟鮍𥭴𣄽嗻㗲嚉丨夂𡯁屮靑𠂆乛亻㔾尣彑忄㣺扌攵歺氵氺灬爫丬犭𤣩罒礻糹罓𦉪㓁"], +["8bde","𦍋耂肀𦘒𦥑卝衤见𧢲讠贝钅镸长门𨸏韦页风飞饣𩠐鱼鸟黄歯龜丷𠂇阝户钢"], +["8c40","倻淾𩱳龦㷉袏𤅎灷峵䬠𥇍㕙𥴰愢𨨲辧釶熑朙玺𣊁𪄇㲋𡦀䬐磤琂冮𨜏䀉橣𪊺䈣蘏𠩯稪𩥇𨫪靕灍匤𢁾鏴盙𨧣龧矝亣俰傼丯众龨吴綋墒壐𡶶庒庙忂𢜒斋"], +["8ca1","𣏹椙橃𣱣泿"], +["8ca7","爀𤔅玌㻛𤨓嬕璹讃𥲤𥚕窓篬糃繬苸薗龩袐龪躹龫迏蕟駠鈡龬𨶹𡐿䁱䊢娚"], +["8cc9","顨杫䉶圽"], +["8cce","藖𤥻芿𧄍䲁𦵴嵻𦬕𦾾龭龮宖龯曧繛湗秊㶈䓃𣉖𢞖䎚䔶"], +["8ce6","峕𣬚諹屸㴒𣕑嵸龲煗䕘𤃬𡸣䱷㥸㑊𠆤𦱁諌侴𠈹妿腬顖𩣺弻"], +["8d40","𠮟"], +["8d42","𢇁𨥭䄂䚻𩁹㼇龳𪆵䃸㟖䛷𦱆䅼𨚲𧏿䕭㣔𥒚䕡䔛䶉䱻䵶䗪㿈𤬏㙡䓞䒽䇭崾嵈嵖㷼㠏嶤嶹㠠㠸幂庽弥徃㤈㤔㤿㥍惗愽峥㦉憷憹懏㦸戬抐拥挘㧸嚱"], +["8da1","㨃揢揻搇摚㩋擀崕嘡龟㪗斆㪽旿晓㫲暒㬢朖㭂枤栀㭘桊梄㭲㭱㭻椉楃牜楤榟榅㮼槖㯝橥橴橱檂㯬檙㯲檫檵櫔櫶殁毁毪汵沪㳋洂洆洦涁㳯涤涱渕渘温溆𨧀溻滢滚齿滨滩漤漴㵆𣽁澁澾㵪㵵熷岙㶊瀬㶑灐灔灯灿炉𠌥䏁㗱𠻘"], +["8e40","𣻗垾𦻓焾𥟠㙎榢𨯩孴穉𥣡𩓙穥穽𥦬窻窰竂竃燑𦒍䇊竚竝竪䇯咲𥰁笋筕笩𥌎𥳾箢筯莜𥮴𦱿篐萡箒箸𥴠㶭𥱥蒒篺簆簵𥳁籄粃𤢂粦晽𤕸糉糇糦籴糳糵糎"], +["8ea1","繧䔝𦹄絝𦻖璍綉綫焵綳緒𤁗𦀩緤㴓緵𡟹緥𨍭縝𦄡𦅚繮纒䌫鑬縧罀罁罇礶𦋐駡羗𦍑羣𡙡𠁨䕜𣝦䔃𨌺翺𦒉者耈耝耨耯𪂇𦳃耻耼聡𢜔䦉𦘦𣷣𦛨朥肧𨩈脇脚墰𢛶汿𦒘𤾸擧𡒊舘𡡞橓𤩥𤪕䑺舩𠬍𦩒𣵾俹𡓽蓢荢𦬊𤦧𣔰𡝳𣷸芪椛芳䇛"], +["8f40","蕋苐茚𠸖𡞴㛁𣅽𣕚艻苢茘𣺋𦶣𦬅𦮗𣗎㶿茝嗬莅䔋𦶥莬菁菓㑾𦻔橗蕚㒖𦹂𢻯葘𥯤葱㷓䓤檧葊𣲵祘蒨𦮖𦹷𦹃蓞萏莑䒠蒓蓤𥲑䉀𥳀䕃蔴嫲𦺙䔧蕳䔖枿蘖"], +["8fa1","𨘥𨘻藁𧂈蘂𡖂𧃍䕫䕪蘨㙈𡢢号𧎚虾蝱𪃸蟮𢰧螱蟚蠏噡虬桖䘏衅衆𧗠𣶹𧗤衞袜䙛袴袵揁装睷𧜏覇覊覦覩覧覼𨨥觧𧤤𧪽誜瞓釾誐𧩙竩𧬺𣾏䜓𧬸煼謌謟𥐰𥕥謿譌譍誩𤩺讐讛誯𡛟䘕衏貛𧵔𧶏貫㜥𧵓賖𧶘𧶽贒贃𡤐賛灜贑𤳉㻐起"], +["9040","趩𨀂𡀔𤦊㭼𨆼𧄌竧躭躶軃鋔輙輭𨍥𨐒辥錃𪊟𠩐辳䤪𨧞𨔽𣶻廸𣉢迹𪀔𨚼𨔁𢌥㦀𦻗逷𨔼𧪾遡𨕬𨘋邨𨜓郄𨛦邮都酧㫰醩釄粬𨤳𡺉鈎沟鉁鉢𥖹銹𨫆𣲛𨬌𥗛"], +["90a1","𠴱錬鍫𨫡𨯫炏嫃𨫢𨫥䥥鉄𨯬𨰹𨯿鍳鑛躼閅閦鐦閠濶䊹𢙺𨛘𡉼𣸮䧟氜陻隖䅬隣𦻕懚隶磵𨫠隽双䦡𦲸𠉴𦐐𩂯𩃥𤫑𡤕𣌊霱虂霶䨏䔽䖅𤫩灵孁霛靜𩇕靗孊𩇫靟鐥僐𣂷𣂼鞉鞟鞱鞾韀韒韠𥑬韮琜𩐳響韵𩐝𧥺䫑頴頳顋顦㬎𧅵㵑𠘰𤅜"], +["9140","𥜆飊颷飈飇䫿𦴧𡛓喰飡飦飬鍸餹𤨩䭲𩡗𩤅駵騌騻騐驘𥜥㛄𩂱𩯕髠髢𩬅髴䰎鬔鬭𨘀倴鬴𦦨㣃𣁽魐魀𩴾婅𡡣鮎𤉋鰂鯿鰌𩹨鷔𩾷𪆒𪆫𪃡𪄣𪇟鵾鶃𪄴鸎梈"], +["91a1","鷄𢅛𪆓𪈠𡤻𪈳鴹𪂹𪊴麐麕麞麢䴴麪麯𤍤黁㭠㧥㴝伲㞾𨰫鼂鼈䮖鐤𦶢鼗鼖鼹嚟嚊齅馸𩂋韲葿齢齩竜龎爖䮾𤥵𤦻煷𤧸𤍈𤩑玞𨯚𡣺禟𨥾𨸶鍩鏳𨩄鋬鎁鏋𨥬𤒹爗㻫睲穃烐𤑳𤏸煾𡟯炣𡢾𣖙㻇𡢅𥐯𡟸㜢𡛻𡠹㛡𡝴𡣑𥽋㜣𡛀坛𤨥𡏾𡊨"], +["9240","𡏆𡒶蔃𣚦蔃葕𤦔𧅥𣸱𥕜𣻻𧁒䓴𣛮𩦝𦼦柹㜳㰕㷧塬𡤢栐䁗𣜿𤃡𤂋𤄏𦰡哋嚞𦚱嚒𠿟𠮨𠸍鏆𨬓鎜仸儫㠙𤐶亼𠑥𠍿佋侊𥙑婨𠆫𠏋㦙𠌊𠐔㐵伩𠋀𨺳𠉵諚𠈌亘"], +["92a1","働儍侢伃𤨎𣺊佂倮偬傁俌俥偘僼兙兛兝兞湶𣖕𣸹𣺿浲𡢄𣺉冨凃𠗠䓝𠒣𠒒𠒑赺𨪜𠜎剙劤𠡳勡鍮䙺熌𤎌𠰠𤦬𡃤槑𠸝瑹㻞璙琔瑖玘䮎𤪼𤂍叐㖄爏𤃉喴𠍅响𠯆圝鉝雴鍦埝垍坿㘾壋媙𨩆𡛺𡝯𡜐娬妸銏婾嫏娒𥥆𡧳𡡡𤊕㛵洅瑃娡𥺃"], +["9340","媁𨯗𠐓鏠璌𡌃焅䥲鐈𨧻鎽㞠尞岞幞幈𡦖𡥼𣫮廍孏𡤃𡤄㜁𡢠㛝𡛾㛓脪𨩇𡶺𣑲𨦨弌弎𡤧𡞫婫𡜻孄蘔𧗽衠恾𢡠𢘫忛㺸𢖯𢖾𩂈𦽳懀𠀾𠁆𢘛憙憘恵𢲛𢴇𤛔𩅍"], +["93a1","摱𤙥𢭪㨩𢬢𣑐𩣪𢹸挷𪑛撶挱揑𤧣𢵧护𢲡搻敫楲㯴𣂎𣊭𤦉𣊫唍𣋠𡣙𩐿曎𣊉𣆳㫠䆐𥖄𨬢𥖏𡛼𥕛𥐥磮𣄃𡠪𣈴㑤𣈏𣆂𤋉暎𦴤晫䮓昰𧡰𡷫晣𣋒𣋡昞𥡲㣑𣠺𣞼㮙𣞢𣏾瓐㮖枏𤘪梶栞㯄檾㡣𣟕𤒇樳橒櫉欅𡤒攑梘橌㯗橺歗𣿀𣲚鎠鋲𨯪𨫋"], +["9440","銉𨀞𨧜鑧涥漋𤧬浧𣽿㶏渄𤀼娽渊塇洤硂焻𤌚𤉶烱牐犇犔𤞏𤜥兹𤪤𠗫瑺𣻸𣙟𤩊𤤗𥿡㼆㺱𤫟𨰣𣼵悧㻳瓌琼鎇琷䒟𦷪䕑疃㽣𤳙𤴆㽘畕癳𪗆㬙瑨𨫌𤦫𤦎㫻"], +["94a1","㷍𤩎㻿𤧅𤣳釺圲鍂𨫣𡡤僟𥈡𥇧睸𣈲眎眏睻𤚗𣞁㩞𤣰琸璛㺿𤪺𤫇䃈𤪖𦆮錇𥖁砞碍碈磒珐祙𧝁𥛣䄎禛蒖禥樭𣻺稺秴䅮𡛦䄲鈵秱𠵌𤦌𠊙𣶺𡝮㖗啫㕰㚪𠇔𠰍竢婙𢛵𥪯𥪜娍𠉛磰娪𥯆竾䇹籝籭䈑𥮳𥺼𥺦糍𤧹𡞰粎籼粮檲緜縇緓罎𦉡"], +["9540","𦅜𧭈綗𥺂䉪𦭵𠤖柖𠁎𣗏埄𦐒𦏸𤥢翝笧𠠬𥫩𥵃笌𥸎駦虅驣樜𣐿㧢𤧷𦖭騟𦖠蒀𧄧𦳑䓪脷䐂胆脉腂𦞴飃𦩂艢艥𦩑葓𦶧蘐𧈛媆䅿𡡀嬫𡢡嫤𡣘蚠蜨𣶏蠭𧐢娂"], +["95a1","衮佅袇袿裦襥襍𥚃襔𧞅𧞄𨯵𨯙𨮜𨧹㺭蒣䛵䛏㟲訽訜𩑈彍鈫𤊄旔焩烄𡡅鵭貟賩𧷜妚矃姰䍮㛔踪躧𤰉輰轊䋴汘澻𢌡䢛潹溋𡟚鯩㚵𤤯邻邗啱䤆醻鐄𨩋䁢𨫼鐧𨰝𨰻蓥訫閙閧閗閖𨴴瑅㻂𤣿𤩂𤏪㻧𣈥随𨻧𨹦𨹥㻌𤧭𤩸𣿮琒瑫㻼靁𩂰"], +["9640","桇䨝𩂓𥟟靝鍨𨦉𨰦𨬯𦎾銺嬑譩䤼珹𤈛鞛靱餸𠼦巁𨯅𤪲頟𩓚鋶𩗗釥䓀𨭐𤩧𨭤飜𨩅㼀鈪䤥萔餻饍𧬆㷽馛䭯馪驜𨭥𥣈檏騡嫾騯𩣱䮐𩥈馼䮽䮗鍽塲𡌂堢𤦸"], +["96a1","𡓨硄𢜟𣶸棅㵽鑘㤧慐𢞁𢥫愇鱏鱓鱻鰵鰐魿鯏𩸭鮟𪇵𪃾鴡䲮𤄄鸘䲰鴌𪆴𪃭𪃳𩤯鶥蒽𦸒𦿟𦮂藼䔳𦶤𦺄𦷰萠藮𦸀𣟗𦁤秢𣖜𣙀䤭𤧞㵢鏛銾鍈𠊿碹鉷鑍俤㑀遤𥕝砽硔碶硋𡝗𣇉𤥁㚚佲濚濙瀞瀞吔𤆵垻壳垊鴖埗焴㒯𤆬燫𦱀𤾗嬨𡞵𨩉"], +["9740","愌嫎娋䊼𤒈㜬䭻𨧼鎻鎸𡣖𠼝葲𦳀𡐓𤋺𢰦𤏁妔𣶷𦝁綨𦅛𦂤𤦹𤦋𨧺鋥珢㻩璴𨭣𡢟㻡𤪳櫘珳珻㻖𤨾𤪔𡟙𤩦𠎧𡐤𤧥瑈𤤖炥𤥶銄珦鍟𠓾錱𨫎𨨖鎆𨯧𥗕䤵𨪂煫"], +["97a1","𤥃𠳿嚤𠘚𠯫𠲸唂秄𡟺緾𡛂𤩐𡡒䔮鐁㜊𨫀𤦭妰𡢿𡢃𧒄媡㛢𣵛㚰鉟婹𨪁𡡢鍴㳍𠪴䪖㦊僴㵩㵌𡎜煵䋻𨈘渏𩃤䓫浗𧹏灧沯㳖𣿭𣸭渂漌㵯𠏵畑㚼㓈䚀㻚䡱姄鉮䤾轁𨰜𦯀堒埈㛖𡑒烾𤍢𤩱𢿣𡊰𢎽梹楧𡎘𣓥𧯴𣛟𨪃𣟖𣏺𤲟樚𣚭𦲷萾䓟䓎"], +["9840","𦴦𦵑𦲂𦿞漗𧄉茽𡜺菭𦲀𧁓𡟛妉媂𡞳婡婱𡤅𤇼㜭姯𡜼㛇熎鎐暚𤊥婮娫𤊓樫𣻹𧜶𤑛𤋊焝𤉙𨧡侰𦴨峂𤓎𧹍𤎽樌𤉖𡌄炦焳𤏩㶥泟勇𤩏繥姫崯㷳彜𤩝𡟟綤萦"], +["98a1","咅𣫺𣌀𠈔坾𠣕𠘙㿥𡾞𪊶瀃𩅛嵰玏糓𨩙𩐠俈翧狍猐𧫴猸猹𥛶獁獈㺩𧬘遬燵𤣲珡臶㻊県㻑沢国琙琞琟㻢㻰㻴㻺瓓㼎㽓畂畭畲疍㽼痈痜㿀癍㿗癴㿜発𤽜熈嘣覀塩䀝睃䀹条䁅㗛瞘䁪䁯属瞾矋売砘点砜䂨砹硇硑硦葈𥔵礳栃礲䄃"], +["9940","䄉禑禙辻稆込䅧窑䆲窼艹䇄竏竛䇏両筢筬筻簒簛䉠䉺类粜䊌粸䊔糭输烀𠳏総緔緐緽羮羴犟䎗耠耥笹耮耱联㷌垴炠肷胩䏭脌猪脎脒畠脔䐁㬹腖腙腚"], +["99a1","䐓堺腼膄䐥膓䐭膥埯臁臤艔䒏芦艶苊苘苿䒰荗险榊萅烵葤惣蒈䔄蒾蓡蓸蔐蔸蕒䔻蕯蕰藠䕷虲蚒蚲蛯际螋䘆䘗袮裿褤襇覑𧥧訩訸誔誴豑賔賲贜䞘塟跃䟭仮踺嗘坔蹱嗵躰䠷軎転軤軭軲辷迁迊迌逳駄䢭飠鈓䤞鈨鉘鉫銱銮銿"], +["9a40","鋣鋫鋳鋴鋽鍃鎄鎭䥅䥑麿鐗匁鐝鐭鐾䥪鑔鑹锭関䦧间阳䧥枠䨤靀䨵鞲韂噔䫤惨颹䬙飱塄餎餙冴餜餷饂饝饢䭰駅䮝騼鬏窃魩鮁鯝鯱鯴䱭鰠㝯𡯂鵉鰺"], +["9aa1","黾噐鶓鶽鷀鷼银辶鹻麬麱麽黆铜黢黱黸竈齄𠂔𠊷𠎠椚铃妬𠓗塀铁㞹𠗕𠘕𠙶𡚺块煳𠫂𠫍𠮿呪吆𠯋咞𠯻𠰻𠱓𠱥𠱼惧𠲍噺𠲵𠳝𠳭𠵯𠶲𠷈楕鰯螥𠸄𠸎𠻗𠾐𠼭𠹳尠𠾼帋𡁜𡁏𡁶朞𡁻𡂈𡂖㙇𡂿𡃓𡄯𡄻卤蒭𡋣𡍵𡌶讁𡕷𡘙𡟃𡟇乸炻𡠭𡥪"], +["9b40","𡨭𡩅𡰪𡱰𡲬𡻈拃𡻕𡼕熘桕𢁅槩㛈𢉼𢏗𢏺𢜪𢡱𢥏苽𢥧𢦓𢫕覥𢫨辠𢬎鞸𢬿顇骽𢱌"], +["9b62","𢲈𢲷𥯨𢴈𢴒𢶷𢶕𢹂𢽴𢿌𣀳𣁦𣌟𣏞徱晈暿𧩹𣕧𣗳爁𤦺矗𣘚𣜖纇𠍆墵朎"], +["9ba1","椘𣪧𧙗𥿢𣸑𣺹𧗾𢂚䣐䪸𤄙𨪚𤋮𤌍𤀻𤌴𤎖𤩅𠗊凒𠘑妟𡺨㮾𣳿𤐄𤓖垈𤙴㦛𤜯𨗨𩧉㝢𢇃譞𨭎駖𤠒𤣻𤨕爉𤫀𠱸奥𤺥𤾆𠝹軚𥀬劏圿煱𥊙𥐙𣽊𤪧喼𥑆𥑮𦭒釔㑳𥔿𧘲𥕞䜘𥕢𥕦𥟇𤤿𥡝偦㓻𣏌惞𥤃䝼𨥈𥪮𥮉𥰆𡶐垡煑澶𦄂𧰒遖𦆲𤾚譢𦐂𦑊"], +["9c40","嵛𦯷輶𦒄𡤜諪𤧶𦒈𣿯𦔒䯀𦖿𦚵𢜛鑥𥟡憕娧晉侻嚹𤔡𦛼乪𤤴陖涏𦲽㘘襷𦞙𦡮𦐑𦡞營𦣇筂𩃀𠨑𦤦鄄𦤹穅鷰𦧺騦𦨭㙟𦑩𠀡禃𦨴𦭛崬𣔙菏𦮝䛐𦲤画补𦶮墶"], +["9ca1","㜜𢖍𧁋𧇍㱔𧊀𧊅銁𢅺𧊋錰𧋦𤧐氹钟𧑐𠻸蠧裵𢤦𨑳𡞱溸𤨪𡠠㦤㚹尐秣䔿暶𩲭𩢤襃𧟌𧡘囖䃟𡘊㦡𣜯𨃨𡏅熭荦𧧝𩆨婧䲷𧂯𨦫𧧽𧨊𧬋𧵦𤅺筃祾𨀉澵𪋟樃𨌘厢𦸇鎿栶靝𨅯𨀣𦦵𡏭𣈯𨁈嶅𨰰𨂃圕頣𨥉嶫𤦈斾槕叒𤪥𣾁㰑朶𨂐𨃴𨄮𡾡𨅏"], +["9d40","𨆉𨆯𨈚𨌆𨌯𨎊㗊𨑨𨚪䣺揦𨥖砈鉕𨦸䏲𨧧䏟𨧨𨭆𨯔姸𨰉輋𨿅𩃬筑𩄐𩄼㷷𩅞𤫊运犏嚋𩓧𩗩𩖰𩖸𩜲𩣑𩥉𩥪𩧃𩨨𩬎𩵚𩶛纟𩻸𩼣䲤镇𪊓熢𪋿䶑递𪗋䶜𠲜达嗁"], +["9da1","辺𢒰边𤪓䔉繿潖檱仪㓤𨬬𧢝㜺躀𡟵𨀤𨭬𨮙𧨾𦚯㷫𧙕𣲷𥘵𥥖亚𥺁𦉘嚿𠹭踎孭𣺈𤲞揞拐𡟶𡡻攰嘭𥱊吚𥌑㷆𩶘䱽嘢嘞罉𥻘奵𣵀蝰东𠿪𠵉𣚺脗鵞贘瘻鱅癎瞹鍅吲腈苷嘥脲萘肽嗪祢噃吖𠺝㗎嘅嗱曱𨋢㘭甴嗰喺咗啲𠱁𠲖廐𥅈𠹶𢱢"], +["9e40","𠺢麫絚嗞𡁵抝靭咔賍燶酶揼掹揾啩𢭃鱲𢺳冚㓟𠶧冧呍唞唓癦踭𦢊疱肶蠄螆裇膶萜𡃁䓬猄𤜆宐茋𦢓噻𢛴𧴯𤆣𧵳𦻐𧊶酰𡇙鈈𣳼𪚩𠺬𠻹牦𡲢䝎𤿂𧿹𠿫䃺"], +["9ea1","鱝攟𢶠䣳𤟠𩵼𠿬𠸊恢𧖣𠿭"], +["9ead","𦁈𡆇熣纎鵐业丄㕷嬍沲卧㚬㧜卽㚥𤘘墚𤭮舭呋垪𥪕𠥹"], +["9ec5","㩒𢑥獴𩺬䴉鯭𣳾𩼰䱛𤾩𩖞𩿞葜𣶶𧊲𦞳𣜠挮紥𣻷𣸬㨪逈勌㹴㙺䗩𠒎癀嫰𠺶硺𧼮墧䂿噼鮋嵴癔𪐴麅䳡痹㟻愙𣃚𤏲"], +["9ef5","噝𡊩垧𤥣𩸆刴𧂮㖭汊鵼"], +["9f40","籖鬹埞𡝬屓擓𩓐𦌵𧅤蚭𠴨𦴢𤫢𠵱"], +["9f4f","凾𡼏嶎霃𡷑麁遌笟鬂峑箣扨挵髿篏鬪籾鬮籂粆鰕篼鬉鼗鰛𤤾齚啳寃俽麘俲剠㸆勑坧偖妷帒韈鶫轜呩鞴饀鞺匬愰"], +["9fa1","椬叚鰊鴂䰻陁榀傦畆𡝭駚剳"], +["9fae","酙隁酜"], +["9fb2","酑𨺗捿𦴣櫊嘑醎畺抅𠏼獏籰𥰡𣳽"], +["9fc1","𤤙盖鮝个𠳔莾衂"], +["9fc9","届槀僭坺刟巵从氱𠇲伹咜哚劚趂㗾弌㗳"], +["9fdb","歒酼龥鮗頮颴骺麨麄煺笔"], +["9fe7","毺蠘罸"], +["9feb","嘠𪙊蹷齓"], +["9ff0","跔蹏鸜踁抂𨍽踨蹵竓𤩷稾磘泪詧瘇"], +["a040","𨩚鼦泎蟖痃𪊲硓咢贌狢獱謭猂瓱賫𤪻蘯徺袠䒷"], +["a055","𡠻𦸅"], +["a058","詾𢔛"], +["a05b","惽癧髗鵄鍮鮏蟵"], +["a063","蠏賷猬霡鮰㗖犲䰇籑饊𦅙慙䰄麖慽"], +["a073","坟慯抦戹拎㩜懢厪𣏵捤栂㗒"], +["a0a1","嵗𨯂迚𨸹"], +["a0a6","僙𡵆礆匲阸𠼻䁥"], +["a0ae","矾"], +["a0b0","糂𥼚糚稭聦聣絍甅瓲覔舚朌聢𧒆聛瓰脃眤覉𦟌畓𦻑螩蟎臈螌詉貭譃眫瓸蓚㘵榲趦"], +["a0d4","覩瑨涹蟁𤀑瓧㷛煶悤憜㳑煢恷"], +["a0e2","罱𨬭牐惩䭾删㰘𣳇𥻗𧙖𥔱𡥄𡋾𩤃𦷜𧂭峁𦆭𨨏𣙷𠃮𦡆𤼎䕢嬟𦍌齐麦𦉫"], +["a3c0","␀",31,"␡"], +["c6a1","①",9,"⑴",9,"ⅰ",9,"丶丿亅亠冂冖冫勹匸卩厶夊宀巛⼳广廴彐彡攴无疒癶辵隶¨ˆヽヾゝゞ〃仝々〆〇ー[]✽ぁ",23], +["c740","す",58,"ァアィイ"], +["c7a1","ゥ",81,"А",5,"ЁЖ",4], +["c840","Л",26,"ёж",25,"⇧↸↹㇏𠃌乚𠂊刂䒑"], +["c8a1","龰冈龱𧘇"], +["c8cd","¬¦'"㈱№℡゛゜⺀⺄⺆⺇⺈⺊⺌⺍⺕⺜⺝⺥⺧⺪⺬⺮⺶⺼⺾⻆⻊⻌⻍⻏⻖⻗⻞⻣"], +["c8f5","ʃɐɛɔɵœøŋʊɪ"], +["f9fe","■"], +["fa40","𠕇鋛𠗟𣿅蕌䊵珯况㙉𤥂𨧤鍄𡧛苮𣳈砼杄拟𤤳𨦪𠊠𦮳𡌅侫𢓭倈𦴩𧪄𣘀𤪱𢔓倩𠍾徤𠎀𠍇滛𠐟偽儁㑺儎顬㝃萖𤦤𠒇兠𣎴兪𠯿𢃼𠋥𢔰𠖎𣈳𡦃宂蝽𠖳𣲙冲冸"], +["faa1","鴴凉减凑㳜凓𤪦决凢卂凭菍椾𣜭彻刋刦刼劵剗劔効勅簕蕂勠蘍𦬓包𨫞啉滙𣾀𠥔𣿬匳卄𠯢泋𡜦栛珕恊㺪㣌𡛨燝䒢卭却𨚫卾卿𡖖𡘓矦厓𨪛厠厫厮玧𥝲㽙玜叁叅汉义埾叙㪫𠮏叠𣿫𢶣叶𠱷吓灹唫晗浛呭𦭓𠵴啝咏咤䞦𡜍𠻝㶴𠵍"], +["fb40","𨦼𢚘啇䳭启琗喆喩嘅𡣗𤀺䕒𤐵暳𡂴嘷曍𣊊暤暭噍噏磱囱鞇叾圀囯园𨭦㘣𡉏坆𤆥汮炋坂㚱𦱾埦𡐖堃𡑔𤍣堦𤯵塜墪㕡壠壜𡈼壻寿坃𪅐𤉸鏓㖡够梦㛃湙"], +["fba1","𡘾娤啓𡚒蔅姉𠵎𦲁𦴪𡟜姙𡟻𡞲𦶦浱𡠨𡛕姹𦹅媫婣㛦𤦩婷㜈媖瑥嫓𦾡𢕔㶅𡤑㜲𡚸広勐孶斈孼𧨎䀄䡝𠈄寕慠𡨴𥧌𠖥寳宝䴐尅𡭄尓珎尔𡲥𦬨屉䣝岅峩峯嶋𡷹𡸷崐崘嵆𡺤岺巗苼㠭𤤁𢁉𢅳芇㠶㯂帮檊幵幺𤒼𠳓厦亷廐厨𡝱帉廴𨒂"], +["fc40","廹廻㢠廼栾鐛弍𠇁弢㫞䢮𡌺强𦢈𢏐彘𢑱彣鞽𦹮彲鍀𨨶徧嶶㵟𥉐𡽪𧃸𢙨釖𠊞𨨩怱暅𡡷㥣㷇㘹垐𢞴祱㹀悞悤悳𤦂𤦏𧩓璤僡媠慤萤慂慈𦻒憁凴𠙖憇宪𣾷"], +["fca1","𢡟懓𨮝𩥝懐㤲𢦀𢣁怣慜攞掋𠄘担𡝰拕𢸍捬𤧟㨗搸揸𡎎𡟼撐澊𢸶頔𤂌𥜝擡擥鑻㩦携㩗敍漖𤨨𤨣斅敭敟𣁾斵𤥀䬷旑䃘𡠩无旣忟𣐀昘𣇷𣇸晄𣆤𣆥晋𠹵晧𥇦晳晴𡸽𣈱𨗴𣇈𥌓矅𢣷馤朂𤎜𤨡㬫槺𣟂杞杧杢𤇍𩃭柗䓩栢湐鈼栁𣏦𦶠桝"], +["fd40","𣑯槡樋𨫟楳棃𣗍椁椀㴲㨁𣘼㮀枬楡𨩊䋼椶榘㮡𠏉荣傐槹𣙙𢄪橅𣜃檝㯳枱櫈𩆜㰍欝𠤣惞欵歴𢟍溵𣫛𠎵𡥘㝀吡𣭚毡𣻼毜氷𢒋𤣱𦭑汚舦汹𣶼䓅𣶽𤆤𤤌𤤀"], +["fda1","𣳉㛥㳫𠴲鮃𣇹𢒑羏样𦴥𦶡𦷫涖浜湼漄𤥿𤂅𦹲蔳𦽴凇沜渝萮𨬡港𣸯瑓𣾂秌湏媑𣁋濸㜍澝𣸰滺𡒗𤀽䕕鏰潄潜㵎潴𩅰㴻澟𤅄濓𤂑𤅕𤀹𣿰𣾴𤄿凟𤅖𤅗𤅀𦇝灋灾炧炁烌烕烖烟䄄㷨熴熖𤉷焫煅媈煊煮岜𤍥煏鍢𤋁焬𤑚𤨧𤨢熺𨯨炽爎"], +["fe40","鑂爕夑鑃爤鍁𥘅爮牀𤥴梽牕牗㹕𣁄栍漽犂猪猫𤠣𨠫䣭𨠄猨献珏玪𠰺𦨮珉瑉𤇢𡛧𤨤昣㛅𤦷𤦍𤧻珷琕椃𤨦琹𠗃㻗瑜𢢭瑠𨺲瑇珤瑶莹瑬㜰瑴鏱樬璂䥓𤪌"], +["fea1","𤅟𤩹𨮏孆𨰃𡢞瓈𡦈甎瓩甞𨻙𡩋寗𨺬鎅畍畊畧畮𤾂㼄𤴓疎瑝疞疴瘂瘬癑癏癯癶𦏵皐臯㟸𦤑𦤎皡皥皷盌𦾟葢𥂝𥅽𡸜眞眦着撯𥈠睘𣊬瞯𨥤𨥨𡛁矴砉𡍶𤨒棊碯磇磓隥礮𥗠磗礴碱𧘌辸袄𨬫𦂃𢘜禆褀椂禀𥡗禝𧬹礼禩渪𧄦㺨秆𩄍秔"] +] diff --git a/node_modules/iconv-lite/encodings/tables/cp936.json b/node_modules/iconv-lite/encodings/tables/cp936.json new file mode 100644 index 0000000..49ddb9a --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/cp936.json @@ -0,0 +1,264 @@ +[ +["0","\u0000",127,"€"], +["8140","丂丄丅丆丏丒丗丟丠両丣並丩丮丯丱丳丵丷丼乀乁乂乄乆乊乑乕乗乚乛乢乣乤乥乧乨乪",5,"乲乴",9,"乿",6,"亇亊"], +["8180","亐亖亗亙亜亝亞亣亪亯亰亱亴亶亷亸亹亼亽亾仈仌仏仐仒仚仛仜仠仢仦仧仩仭仮仯仱仴仸仹仺仼仾伀伂",6,"伋伌伒",4,"伜伝伡伣伨伩伬伭伮伱伳伵伷伹伻伾",4,"佄佅佇",5,"佒佔佖佡佢佦佨佪佫佭佮佱佲併佷佸佹佺佽侀侁侂侅來侇侊侌侎侐侒侓侕侖侘侙侚侜侞侟価侢"], +["8240","侤侫侭侰",4,"侶",8,"俀俁係俆俇俈俉俋俌俍俒",4,"俙俛俠俢俤俥俧俫俬俰俲俴俵俶俷俹俻俼俽俿",11], +["8280","個倎倐們倓倕倖倗倛倝倞倠倢倣値倧倫倯",10,"倻倽倿偀偁偂偄偅偆偉偊偋偍偐",4,"偖偗偘偙偛偝",7,"偦",5,"偭",8,"偸偹偺偼偽傁傂傃傄傆傇傉傊傋傌傎",20,"傤傦傪傫傭",4,"傳",6,"傼"], +["8340","傽",17,"僐",5,"僗僘僙僛",10,"僨僩僪僫僯僰僱僲僴僶",4,"僼",9,"儈"], +["8380","儉儊儌",5,"儓",13,"儢",28,"兂兇兊兌兎兏児兒兓兗兘兙兛兝",4,"兣兤兦內兩兪兯兲兺兾兿冃冄円冇冊冋冎冏冐冑冓冔冘冚冝冞冟冡冣冦",4,"冭冮冴冸冹冺冾冿凁凂凃凅凈凊凍凎凐凒",5], +["8440","凘凙凚凜凞凟凢凣凥",5,"凬凮凱凲凴凷凾刄刅刉刋刌刏刐刓刔刕刜刞刟刡刢刣別刦刧刪刬刯刱刲刴刵刼刾剄",5,"剋剎剏剒剓剕剗剘"], +["8480","剙剚剛剝剟剠剢剣剤剦剨剫剬剭剮剰剱剳",9,"剾劀劃",4,"劉",6,"劑劒劔",6,"劜劤劥劦劧劮劯劰労",9,"勀勁勂勄勅勆勈勊勌勍勎勏勑勓勔動勗務",5,"勠勡勢勣勥",10,"勱",7,"勻勼勽匁匂匃匄匇匉匊匋匌匎"], +["8540","匑匒匓匔匘匛匜匞匟匢匤匥匧匨匩匫匬匭匯",9,"匼匽區卂卄卆卋卌卍卐協単卙卛卝卥卨卪卬卭卲卶卹卻卼卽卾厀厁厃厇厈厊厎厏"], +["8580","厐",4,"厖厗厙厛厜厞厠厡厤厧厪厫厬厭厯",6,"厷厸厹厺厼厽厾叀參",4,"収叏叐叒叓叕叚叜叝叞叡叢叧叴叺叾叿吀吂吅吇吋吔吘吙吚吜吢吤吥吪吰吳吶吷吺吽吿呁呂呄呅呇呉呌呍呎呏呑呚呝",4,"呣呥呧呩",7,"呴呹呺呾呿咁咃咅咇咈咉咊咍咑咓咗咘咜咞咟咠咡"], +["8640","咢咥咮咰咲咵咶咷咹咺咼咾哃哅哊哋哖哘哛哠",4,"哫哬哯哰哱哴",5,"哻哾唀唂唃唄唅唈唊",4,"唒唓唕",5,"唜唝唞唟唡唥唦"], +["8680","唨唩唫唭唲唴唵唶唸唹唺唻唽啀啂啅啇啈啋",4,"啑啒啓啔啗",4,"啝啞啟啠啢啣啨啩啫啯",5,"啹啺啽啿喅喆喌喍喎喐喒喓喕喖喗喚喛喞喠",6,"喨",8,"喲喴営喸喺喼喿",4,"嗆嗇嗈嗊嗋嗎嗏嗐嗕嗗",4,"嗞嗠嗢嗧嗩嗭嗮嗰嗱嗴嗶嗸",4,"嗿嘂嘃嘄嘅"], +["8740","嘆嘇嘊嘋嘍嘐",7,"嘙嘚嘜嘝嘠嘡嘢嘥嘦嘨嘩嘪嘫嘮嘯嘰嘳嘵嘷嘸嘺嘼嘽嘾噀",11,"噏",4,"噕噖噚噛噝",4], +["8780","噣噥噦噧噭噮噯噰噲噳噴噵噷噸噹噺噽",7,"嚇",6,"嚐嚑嚒嚔",14,"嚤",10,"嚰",6,"嚸嚹嚺嚻嚽",12,"囋",8,"囕囖囘囙囜団囥",5,"囬囮囯囲図囶囷囸囻囼圀圁圂圅圇國",6], +["8840","園",9,"圝圞圠圡圢圤圥圦圧圫圱圲圴",4,"圼圽圿坁坃坄坅坆坈坉坋坒",4,"坘坙坢坣坥坧坬坮坰坱坲坴坵坸坹坺坽坾坿垀"], +["8880","垁垇垈垉垊垍",4,"垔",6,"垜垝垞垟垥垨垪垬垯垰垱垳垵垶垷垹",8,"埄",6,"埌埍埐埑埓埖埗埛埜埞埡埢埣埥",7,"埮埰埱埲埳埵埶執埻埼埾埿堁堃堄堅堈堉堊堌堎堏堐堒堓堔堖堗堘堚堛堜堝堟堢堣堥",4,"堫",4,"報堲堳場堶",7], +["8940","堾",5,"塅",6,"塎塏塐塒塓塕塖塗塙",4,"塟",5,"塦",4,"塭",16,"塿墂墄墆墇墈墊墋墌"], +["8980","墍",4,"墔",4,"墛墜墝墠",7,"墪",17,"墽墾墿壀壂壃壄壆",10,"壒壓壔壖",13,"壥",5,"壭壯壱売壴壵壷壸壺",7,"夃夅夆夈",4,"夎夐夑夒夓夗夘夛夝夞夠夡夢夣夦夨夬夰夲夳夵夶夻"], +["8a40","夽夾夿奀奃奅奆奊奌奍奐奒奓奙奛",4,"奡奣奤奦",12,"奵奷奺奻奼奾奿妀妅妉妋妌妎妏妐妑妔妕妘妚妛妜妝妟妠妡妢妦"], +["8a80","妧妬妭妰妱妳",5,"妺妼妽妿",6,"姇姈姉姌姍姎姏姕姖姙姛姞",4,"姤姦姧姩姪姫姭",11,"姺姼姽姾娀娂娊娋娍娎娏娐娒娔娕娖娗娙娚娛娝娞娡娢娤娦娧娨娪",6,"娳娵娷",4,"娽娾娿婁",4,"婇婈婋",9,"婖婗婘婙婛",5], +["8b40","婡婣婤婥婦婨婩婫",8,"婸婹婻婼婽婾媀",17,"媓",6,"媜",13,"媫媬"], +["8b80","媭",4,"媴媶媷媹",4,"媿嫀嫃",5,"嫊嫋嫍",4,"嫓嫕嫗嫙嫚嫛嫝嫞嫟嫢嫤嫥嫧嫨嫪嫬",4,"嫲",22,"嬊",11,"嬘",25,"嬳嬵嬶嬸",7,"孁",6], +["8c40","孈",7,"孒孖孞孠孡孧孨孫孭孮孯孲孴孶孷學孹孻孼孾孿宂宆宊宍宎宐宑宒宔宖実宧宨宩宬宭宮宯宱宲宷宺宻宼寀寁寃寈寉寊寋寍寎寏"], +["8c80","寑寔",8,"寠寢寣實寧審",4,"寯寱",6,"寽対尀専尃尅將專尋尌對導尐尒尓尗尙尛尞尟尠尡尣尦尨尩尪尫尭尮尯尰尲尳尵尶尷屃屄屆屇屌屍屒屓屔屖屗屘屚屛屜屝屟屢層屧",6,"屰屲",6,"屻屼屽屾岀岃",4,"岉岊岋岎岏岒岓岕岝",4,"岤",4], +["8d40","岪岮岯岰岲岴岶岹岺岻岼岾峀峂峃峅",5,"峌",5,"峓",5,"峚",6,"峢峣峧峩峫峬峮峯峱",9,"峼",4], +["8d80","崁崄崅崈",5,"崏",4,"崕崗崘崙崚崜崝崟",4,"崥崨崪崫崬崯",4,"崵",7,"崿",7,"嵈嵉嵍",10,"嵙嵚嵜嵞",10,"嵪嵭嵮嵰嵱嵲嵳嵵",12,"嶃",21,"嶚嶛嶜嶞嶟嶠"], +["8e40","嶡",21,"嶸",12,"巆",6,"巎",12,"巜巟巠巣巤巪巬巭"], +["8e80","巰巵巶巸",4,"巿帀帄帇帉帊帋帍帎帒帓帗帞",7,"帨",4,"帯帰帲",4,"帹帺帾帿幀幁幃幆",5,"幍",6,"幖",4,"幜幝幟幠幣",14,"幵幷幹幾庁庂広庅庈庉庌庍庎庒庘庛庝庡庢庣庤庨",4,"庮",4,"庴庺庻庼庽庿",6], +["8f40","廆廇廈廋",5,"廔廕廗廘廙廚廜",11,"廩廫",8,"廵廸廹廻廼廽弅弆弇弉弌弍弎弐弒弔弖弙弚弜弝弞弡弢弣弤"], +["8f80","弨弫弬弮弰弲",6,"弻弽弾弿彁",14,"彑彔彙彚彛彜彞彟彠彣彥彧彨彫彮彯彲彴彵彶彸彺彽彾彿徃徆徍徎徏徑従徔徖徚徛徝從徟徠徢",5,"復徫徬徯",5,"徶徸徹徺徻徾",4,"忇忈忊忋忎忓忔忕忚忛応忞忟忢忣忥忦忨忩忬忯忰忲忳忴忶忷忹忺忼怇"], +["9040","怈怉怋怌怐怑怓怗怘怚怞怟怢怣怤怬怭怮怰",4,"怶",4,"怽怾恀恄",6,"恌恎恏恑恓恔恖恗恘恛恜恞恟恠恡恥恦恮恱恲恴恵恷恾悀"], +["9080","悁悂悅悆悇悈悊悋悎悏悐悑悓悕悗悘悙悜悞悡悢悤悥悧悩悪悮悰悳悵悶悷悹悺悽",7,"惇惈惉惌",4,"惒惓惔惖惗惙惛惞惡",4,"惪惱惲惵惷惸惻",4,"愂愃愄愅愇愊愋愌愐",4,"愖愗愘愙愛愜愝愞愡愢愥愨愩愪愬",18,"慀",6], +["9140","慇慉態慍慏慐慒慓慔慖",6,"慞慟慠慡慣慤慥慦慩",6,"慱慲慳慴慶慸",18,"憌憍憏",4,"憕"], +["9180","憖",6,"憞",8,"憪憫憭",9,"憸",5,"憿懀懁懃",4,"應懌",4,"懓懕",16,"懧",13,"懶",8,"戀",5,"戇戉戓戔戙戜戝戞戠戣戦戧戨戩戫戭戯戰戱戲戵戶戸",4,"扂扄扅扆扊"], +["9240","扏扐払扖扗扙扚扜",6,"扤扥扨扱扲扴扵扷扸扺扻扽抁抂抃抅抆抇抈抋",5,"抔抙抜抝択抣抦抧抩抪抭抮抯抰抲抳抴抶抷抸抺抾拀拁"], +["9280","拃拋拏拑拕拝拞拠拡拤拪拫拰拲拵拸拹拺拻挀挃挄挅挆挊挋挌挍挏挐挒挓挔挕挗挘挙挜挦挧挩挬挭挮挰挱挳",5,"挻挼挾挿捀捁捄捇捈捊捑捒捓捔捖",7,"捠捤捥捦捨捪捫捬捯捰捲捳捴捵捸捹捼捽捾捿掁掃掄掅掆掋掍掑掓掔掕掗掙",6,"採掤掦掫掯掱掲掵掶掹掻掽掿揀"], +["9340","揁揂揃揅揇揈揊揋揌揑揓揔揕揗",6,"揟揢揤",4,"揫揬揮揯揰揱揳揵揷揹揺揻揼揾搃搄搆",4,"損搎搑搒搕",5,"搝搟搢搣搤"], +["9380","搥搧搨搩搫搮",5,"搵",4,"搻搼搾摀摂摃摉摋",6,"摓摕摖摗摙",4,"摟",7,"摨摪摫摬摮",9,"摻",6,"撃撆撈",8,"撓撔撗撘撚撛撜撝撟",4,"撥撦撧撨撪撫撯撱撲撳撴撶撹撻撽撾撿擁擃擄擆",6,"擏擑擓擔擕擖擙據"], +["9440","擛擜擝擟擠擡擣擥擧",24,"攁",7,"攊",7,"攓",4,"攙",8], +["9480","攢攣攤攦",4,"攬攭攰攱攲攳攷攺攼攽敀",4,"敆敇敊敋敍敎敐敒敓敔敗敘敚敜敟敠敡敤敥敧敨敩敪敭敮敯敱敳敵敶數",14,"斈斉斊斍斎斏斒斔斕斖斘斚斝斞斠斢斣斦斨斪斬斮斱",7,"斺斻斾斿旀旂旇旈旉旊旍旐旑旓旔旕旘",7,"旡旣旤旪旫"], +["9540","旲旳旴旵旸旹旻",4,"昁昄昅昇昈昉昋昍昐昑昒昖昗昘昚昛昜昞昡昢昣昤昦昩昪昫昬昮昰昲昳昷",4,"昽昿晀時晄",6,"晍晎晐晑晘"], +["9580","晙晛晜晝晞晠晢晣晥晧晩",4,"晱晲晳晵晸晹晻晼晽晿暀暁暃暅暆暈暉暊暋暍暎暏暐暒暓暔暕暘",4,"暞",8,"暩",4,"暯",4,"暵暶暷暸暺暻暼暽暿",25,"曚曞",7,"曧曨曪",5,"曱曵曶書曺曻曽朁朂會"], +["9640","朄朅朆朇朌朎朏朑朒朓朖朘朙朚朜朞朠",5,"朧朩朮朰朲朳朶朷朸朹朻朼朾朿杁杄杅杇杊杋杍杒杔杕杗",4,"杝杢杣杤杦杧杫杬杮東杴杶"], +["9680","杸杹杺杻杽枀枂枃枅枆枈枊枌枍枎枏枑枒枓枔枖枙枛枟枠枡枤枦枩枬枮枱枲枴枹",7,"柂柅",9,"柕柖柗柛柟柡柣柤柦柧柨柪柫柭柮柲柵",7,"柾栁栂栃栄栆栍栐栒栔栕栘",4,"栞栟栠栢",6,"栫",6,"栴栵栶栺栻栿桇桋桍桏桒桖",5], +["9740","桜桝桞桟桪桬",7,"桵桸",8,"梂梄梇",7,"梐梑梒梔梕梖梘",9,"梣梤梥梩梪梫梬梮梱梲梴梶梷梸"], +["9780","梹",6,"棁棃",5,"棊棌棎棏棐棑棓棔棖棗棙棛",4,"棡棢棤",9,"棯棲棳棴棶棷棸棻棽棾棿椀椂椃椄椆",4,"椌椏椑椓",11,"椡椢椣椥",7,"椮椯椱椲椳椵椶椷椸椺椻椼椾楀楁楃",16,"楕楖楘楙楛楜楟"], +["9840","楡楢楤楥楧楨楩楪楬業楯楰楲",4,"楺楻楽楾楿榁榃榅榊榋榌榎",5,"榖榗榙榚榝",9,"榩榪榬榮榯榰榲榳榵榶榸榹榺榼榽"], +["9880","榾榿槀槂",7,"構槍槏槑槒槓槕",5,"槜槝槞槡",11,"槮槯槰槱槳",9,"槾樀",9,"樋",11,"標",5,"樠樢",5,"権樫樬樭樮樰樲樳樴樶",6,"樿",4,"橅橆橈",7,"橑",6,"橚"], +["9940","橜",4,"橢橣橤橦",10,"橲",6,"橺橻橽橾橿檁檂檃檅",8,"檏檒",4,"檘",7,"檡",5], +["9980","檧檨檪檭",114,"欥欦欨",6], +["9a40","欯欰欱欳欴欵欶欸欻欼欽欿歀歁歂歄歅歈歊歋歍",11,"歚",7,"歨歩歫",13,"歺歽歾歿殀殅殈"], +["9a80","殌殎殏殐殑殔殕殗殘殙殜",4,"殢",7,"殫",7,"殶殸",6,"毀毃毄毆",4,"毌毎毐毑毘毚毜",4,"毢",7,"毬毭毮毰毱毲毴毶毷毸毺毻毼毾",6,"氈",4,"氎氒気氜氝氞氠氣氥氫氬氭氱氳氶氷氹氺氻氼氾氿汃汄汅汈汋",4,"汑汒汓汖汘"], +["9b40","汙汚汢汣汥汦汧汫",4,"汱汳汵汷汸決汻汼汿沀沄沇沊沋沍沎沑沒沕沖沗沘沚沜沝沞沠沢沨沬沯沰沴沵沶沷沺泀況泂泃泆泇泈泋泍泎泏泑泒泘"], +["9b80","泙泚泜泝泟泤泦泧泩泬泭泲泴泹泿洀洂洃洅洆洈洉洊洍洏洐洑洓洔洕洖洘洜洝洟",5,"洦洨洩洬洭洯洰洴洶洷洸洺洿浀浂浄浉浌浐浕浖浗浘浛浝浟浡浢浤浥浧浨浫浬浭浰浱浲浳浵浶浹浺浻浽",4,"涃涄涆涇涊涋涍涏涐涒涖",4,"涜涢涥涬涭涰涱涳涴涶涷涹",5,"淁淂淃淈淉淊"], +["9c40","淍淎淏淐淒淓淔淕淗淚淛淜淟淢淣淥淧淨淩淪淭淯淰淲淴淵淶淸淺淽",7,"渆渇済渉渋渏渒渓渕渘渙減渜渞渟渢渦渧渨渪測渮渰渱渳渵"], +["9c80","渶渷渹渻",7,"湅",7,"湏湐湑湒湕湗湙湚湜湝湞湠",10,"湬湭湯",14,"満溁溂溄溇溈溊",4,"溑",6,"溙溚溛溝溞溠溡溣溤溦溨溩溫溬溭溮溰溳溵溸溹溼溾溿滀滃滄滅滆滈滉滊滌滍滎滐滒滖滘滙滛滜滝滣滧滪",5], +["9d40","滰滱滲滳滵滶滷滸滺",7,"漃漄漅漇漈漊",4,"漐漑漒漖",9,"漡漢漣漥漦漧漨漬漮漰漲漴漵漷",6,"漿潀潁潂"], +["9d80","潃潄潅潈潉潊潌潎",9,"潙潚潛潝潟潠潡潣潤潥潧",5,"潯潰潱潳潵潶潷潹潻潽",6,"澅澆澇澊澋澏",12,"澝澞澟澠澢",4,"澨",10,"澴澵澷澸澺",5,"濁濃",5,"濊",6,"濓",10,"濟濢濣濤濥"], +["9e40","濦",7,"濰",32,"瀒",7,"瀜",6,"瀤",6], +["9e80","瀫",9,"瀶瀷瀸瀺",17,"灍灎灐",13,"灟",11,"灮灱灲灳灴灷灹灺灻災炁炂炃炄炆炇炈炋炌炍炏炐炑炓炗炘炚炛炞",12,"炰炲炴炵炶為炾炿烄烅烆烇烉烋",12,"烚"], +["9f40","烜烝烞烠烡烢烣烥烪烮烰",6,"烸烺烻烼烾",10,"焋",4,"焑焒焔焗焛",10,"焧",7,"焲焳焴"], +["9f80","焵焷",13,"煆煇煈煉煋煍煏",12,"煝煟",4,"煥煩",4,"煯煰煱煴煵煶煷煹煻煼煾",5,"熅",4,"熋熌熍熎熐熑熒熓熕熖熗熚",4,"熡",6,"熩熪熫熭",5,"熴熶熷熸熺",8,"燄",9,"燏",4], +["a040","燖",9,"燡燢燣燤燦燨",5,"燯",9,"燺",11,"爇",19], +["a080","爛爜爞",9,"爩爫爭爮爯爲爳爴爺爼爾牀",6,"牉牊牋牎牏牐牑牓牔牕牗牘牚牜牞牠牣牤牥牨牪牫牬牭牰牱牳牴牶牷牸牻牼牽犂犃犅",4,"犌犎犐犑犓",11,"犠",11,"犮犱犲犳犵犺",6,"狅狆狇狉狊狋狌狏狑狓狔狕狖狘狚狛"], +["a1a1"," 、。·ˉˇ¨〃々—~‖…‘’“”〔〕〈",7,"〖〗【】±×÷∶∧∨∑∏∪∩∈∷√⊥∥∠⌒⊙∫∮≡≌≈∽∝≠≮≯≤≥∞∵∴♂♀°′″℃$¤¢£‰§№☆★○●◎◇◆□■△▲※→←↑↓〓"], +["a2a1","ⅰ",9], +["a2b1","⒈",19,"⑴",19,"①",9], +["a2e5","㈠",9], +["a2f1","Ⅰ",11], +["a3a1","!"#¥%",88," ̄"], +["a4a1","ぁ",82], +["a5a1","ァ",85], +["a6a1","Α",16,"Σ",6], +["a6c1","α",16,"σ",6], +["a6e0","︵︶︹︺︿﹀︽︾﹁﹂﹃﹄"], +["a6ee","︻︼︷︸︱"], +["a6f4","︳︴"], +["a7a1","А",5,"ЁЖ",25], +["a7d1","а",5,"ёж",25], +["a840","ˊˋ˙–―‥‵℅℉↖↗↘↙∕∟∣≒≦≧⊿═",35,"▁",6], +["a880","█",7,"▓▔▕▼▽◢◣◤◥☉⊕〒〝〞"], +["a8a1","āáǎàēéěèīíǐìōóǒòūúǔùǖǘǚǜüêɑ"], +["a8bd","ńň"], +["a8c0","ɡ"], +["a8c5","ㄅ",36], +["a940","〡",8,"㊣㎎㎏㎜㎝㎞㎡㏄㏎㏑㏒㏕︰¬¦"], +["a959","℡㈱"], +["a95c","‐"], +["a960","ー゛゜ヽヾ〆ゝゞ﹉",9,"﹔﹕﹖﹗﹙",8], +["a980","﹢",4,"﹨﹩﹪﹫"], +["a996","〇"], +["a9a4","─",75], +["aa40","狜狝狟狢",5,"狪狫狵狶狹狽狾狿猀猂猄",5,"猋猌猍猏猐猑猒猔猘猙猚猟猠猣猤猦猧猨猭猯猰猲猳猵猶猺猻猼猽獀",8], +["aa80","獉獊獋獌獎獏獑獓獔獕獖獘",7,"獡",10,"獮獰獱"], +["ab40","獲",11,"獿",4,"玅玆玈玊玌玍玏玐玒玓玔玕玗玘玙玚玜玝玞玠玡玣",5,"玪玬玭玱玴玵玶玸玹玼玽玾玿珁珃",4], +["ab80","珋珌珎珒",6,"珚珛珜珝珟珡珢珣珤珦珨珪珫珬珮珯珰珱珳",4], +["ac40","珸",10,"琄琇琈琋琌琍琎琑",8,"琜",5,"琣琤琧琩琫琭琯琱琲琷",4,"琽琾琿瑀瑂",11], +["ac80","瑎",6,"瑖瑘瑝瑠",12,"瑮瑯瑱",4,"瑸瑹瑺"], +["ad40","瑻瑼瑽瑿璂璄璅璆璈璉璊璌璍璏璑",10,"璝璟",7,"璪",15,"璻",12], +["ad80","瓈",9,"瓓",8,"瓝瓟瓡瓥瓧",6,"瓰瓱瓲"], +["ae40","瓳瓵瓸",6,"甀甁甂甃甅",7,"甎甐甒甔甕甖甗甛甝甞甠",4,"甦甧甪甮甴甶甹甼甽甿畁畂畃畄畆畇畉畊畍畐畑畒畓畕畖畗畘"], +["ae80","畝",7,"畧畨畩畫",6,"畳畵當畷畺",4,"疀疁疂疄疅疇"], +["af40","疈疉疊疌疍疎疐疓疕疘疛疜疞疢疦",4,"疭疶疷疺疻疿痀痁痆痋痌痎痏痐痑痓痗痙痚痜痝痟痠痡痥痩痬痭痮痯痲痳痵痶痷痸痺痻痽痾瘂瘄瘆瘇"], +["af80","瘈瘉瘋瘍瘎瘏瘑瘒瘓瘔瘖瘚瘜瘝瘞瘡瘣瘧瘨瘬瘮瘯瘱瘲瘶瘷瘹瘺瘻瘽癁療癄"], +["b040","癅",6,"癎",5,"癕癗",4,"癝癟癠癡癢癤",6,"癬癭癮癰",7,"癹発發癿皀皁皃皅皉皊皌皍皏皐皒皔皕皗皘皚皛"], +["b080","皜",7,"皥",8,"皯皰皳皵",9,"盀盁盃啊阿埃挨哎唉哀皑癌蔼矮艾碍爱隘鞍氨安俺按暗岸胺案肮昂盎凹敖熬翱袄傲奥懊澳芭捌扒叭吧笆八疤巴拔跋靶把耙坝霸罢爸白柏百摆佰败拜稗斑班搬扳般颁板版扮拌伴瓣半办绊邦帮梆榜膀绑棒磅蚌镑傍谤苞胞包褒剥"], +["b140","盄盇盉盋盌盓盕盙盚盜盝盞盠",4,"盦",7,"盰盳盵盶盷盺盻盽盿眀眂眃眅眆眊県眎",10,"眛眜眝眞眡眣眤眥眧眪眫"], +["b180","眬眮眰",4,"眹眻眽眾眿睂睄睅睆睈",7,"睒",7,"睜薄雹保堡饱宝抱报暴豹鲍爆杯碑悲卑北辈背贝钡倍狈备惫焙被奔苯本笨崩绷甭泵蹦迸逼鼻比鄙笔彼碧蓖蔽毕毙毖币庇痹闭敝弊必辟壁臂避陛鞭边编贬扁便变卞辨辩辫遍标彪膘表鳖憋别瘪彬斌濒滨宾摈兵冰柄丙秉饼炳"], +["b240","睝睞睟睠睤睧睩睪睭",11,"睺睻睼瞁瞂瞃瞆",5,"瞏瞐瞓",11,"瞡瞣瞤瞦瞨瞫瞭瞮瞯瞱瞲瞴瞶",4], +["b280","瞼瞾矀",12,"矎",8,"矘矙矚矝",4,"矤病并玻菠播拨钵波博勃搏铂箔伯帛舶脖膊渤泊驳捕卜哺补埠不布步簿部怖擦猜裁材才财睬踩采彩菜蔡餐参蚕残惭惨灿苍舱仓沧藏操糙槽曹草厕策侧册测层蹭插叉茬茶查碴搽察岔差诧拆柴豺搀掺蝉馋谗缠铲产阐颤昌猖"], +["b340","矦矨矪矯矰矱矲矴矵矷矹矺矻矼砃",5,"砊砋砎砏砐砓砕砙砛砞砠砡砢砤砨砪砫砮砯砱砲砳砵砶砽砿硁硂硃硄硆硈硉硊硋硍硏硑硓硔硘硙硚"], +["b380","硛硜硞",11,"硯",7,"硸硹硺硻硽",6,"场尝常长偿肠厂敞畅唱倡超抄钞朝嘲潮巢吵炒车扯撤掣彻澈郴臣辰尘晨忱沉陈趁衬撑称城橙成呈乘程惩澄诚承逞骋秤吃痴持匙池迟弛驰耻齿侈尺赤翅斥炽充冲虫崇宠抽酬畴踌稠愁筹仇绸瞅丑臭初出橱厨躇锄雏滁除楚"], +["b440","碄碅碆碈碊碋碏碐碒碔碕碖碙碝碞碠碢碤碦碨",7,"碵碶碷碸確碻碼碽碿磀磂磃磄磆磇磈磌磍磎磏磑磒磓磖磗磘磚",9], +["b480","磤磥磦磧磩磪磫磭",4,"磳磵磶磸磹磻",5,"礂礃礄礆",6,"础储矗搐触处揣川穿椽传船喘串疮窗幢床闯创吹炊捶锤垂春椿醇唇淳纯蠢戳绰疵茨磁雌辞慈瓷词此刺赐次聪葱囱匆从丛凑粗醋簇促蹿篡窜摧崔催脆瘁粹淬翠村存寸磋撮搓措挫错搭达答瘩打大呆歹傣戴带殆代贷袋待逮"], +["b540","礍",5,"礔",9,"礟",4,"礥",14,"礵",4,"礽礿祂祃祄祅祇祊",8,"祔祕祘祙祡祣"], +["b580","祤祦祩祪祫祬祮祰",6,"祹祻",4,"禂禃禆禇禈禉禋禌禍禎禐禑禒怠耽担丹单郸掸胆旦氮但惮淡诞弹蛋当挡党荡档刀捣蹈倒岛祷导到稻悼道盗德得的蹬灯登等瞪凳邓堤低滴迪敌笛狄涤翟嫡抵底地蒂第帝弟递缔颠掂滇碘点典靛垫电佃甸店惦奠淀殿碉叼雕凋刁掉吊钓调跌爹碟蝶迭谍叠"], +["b640","禓",6,"禛",11,"禨",10,"禴",4,"禼禿秂秄秅秇秈秊秌秎秏秐秓秔秖秗秙",5,"秠秡秢秥秨秪"], +["b680","秬秮秱",6,"秹秺秼秾秿稁稄稅稇稈稉稊稌稏",4,"稕稖稘稙稛稜丁盯叮钉顶鼎锭定订丢东冬董懂动栋侗恫冻洞兜抖斗陡豆逗痘都督毒犊独读堵睹赌杜镀肚度渡妒端短锻段断缎堆兑队对墩吨蹲敦顿囤钝盾遁掇哆多夺垛躲朵跺舵剁惰堕蛾峨鹅俄额讹娥恶厄扼遏鄂饿恩而儿耳尔饵洱二"], +["b740","稝稟稡稢稤",14,"稴稵稶稸稺稾穀",5,"穇",9,"穒",4,"穘",16], +["b780","穩",6,"穱穲穳穵穻穼穽穾窂窅窇窉窊窋窌窎窏窐窓窔窙窚窛窞窡窢贰发罚筏伐乏阀法珐藩帆番翻樊矾钒繁凡烦反返范贩犯饭泛坊芳方肪房防妨仿访纺放菲非啡飞肥匪诽吠肺废沸费芬酚吩氛分纷坟焚汾粉奋份忿愤粪丰封枫蜂峰锋风疯烽逢冯缝讽奉凤佛否夫敷肤孵扶拂辐幅氟符伏俘服"], +["b840","窣窤窧窩窪窫窮",4,"窴",10,"竀",10,"竌",9,"竗竘竚竛竜竝竡竢竤竧",5,"竮竰竱竲竳"], +["b880","竴",4,"竻竼竾笀笁笂笅笇笉笌笍笎笐笒笓笖笗笘笚笜笝笟笡笢笣笧笩笭浮涪福袱弗甫抚辅俯釜斧脯腑府腐赴副覆赋复傅付阜父腹负富讣附妇缚咐噶嘎该改概钙盖溉干甘杆柑竿肝赶感秆敢赣冈刚钢缸肛纲岗港杠篙皋高膏羔糕搞镐稿告哥歌搁戈鸽胳疙割革葛格蛤阁隔铬个各给根跟耕更庚羹"], +["b940","笯笰笲笴笵笶笷笹笻笽笿",5,"筆筈筊筍筎筓筕筗筙筜筞筟筡筣",10,"筯筰筳筴筶筸筺筼筽筿箁箂箃箄箆",6,"箎箏"], +["b980","箑箒箓箖箘箙箚箛箞箟箠箣箤箥箮箯箰箲箳箵箶箷箹",7,"篂篃範埂耿梗工攻功恭龚供躬公宫弓巩汞拱贡共钩勾沟苟狗垢构购够辜菇咕箍估沽孤姑鼓古蛊骨谷股故顾固雇刮瓜剐寡挂褂乖拐怪棺关官冠观管馆罐惯灌贯光广逛瑰规圭硅归龟闺轨鬼诡癸桂柜跪贵刽辊滚棍锅郭国果裹过哈"], +["ba40","篅篈築篊篋篍篎篏篐篒篔",4,"篛篜篞篟篠篢篣篤篧篨篩篫篬篭篯篰篲",4,"篸篹篺篻篽篿",7,"簈簉簊簍簎簐",5,"簗簘簙"], +["ba80","簚",4,"簠",5,"簨簩簫",12,"簹",5,"籂骸孩海氦亥害骇酣憨邯韩含涵寒函喊罕翰撼捍旱憾悍焊汗汉夯杭航壕嚎豪毫郝好耗号浩呵喝荷菏核禾和何合盒貉阂河涸赫褐鹤贺嘿黑痕很狠恨哼亨横衡恒轰哄烘虹鸿洪宏弘红喉侯猴吼厚候后呼乎忽瑚壶葫胡蝴狐糊湖"], +["bb40","籃",9,"籎",36,"籵",5,"籾",9], +["bb80","粈粊",6,"粓粔粖粙粚粛粠粡粣粦粧粨粩粫粬粭粯粰粴",4,"粺粻弧虎唬护互沪户花哗华猾滑画划化话槐徊怀淮坏欢环桓还缓换患唤痪豢焕涣宦幻荒慌黄磺蝗簧皇凰惶煌晃幌恍谎灰挥辉徽恢蛔回毁悔慧卉惠晦贿秽会烩汇讳诲绘荤昏婚魂浑混豁活伙火获或惑霍货祸击圾基机畸稽积箕"], +["bc40","粿糀糂糃糄糆糉糋糎",6,"糘糚糛糝糞糡",6,"糩",5,"糰",7,"糹糺糼",13,"紋",5], +["bc80","紑",14,"紡紣紤紥紦紨紩紪紬紭紮細",6,"肌饥迹激讥鸡姬绩缉吉极棘辑籍集及急疾汲即嫉级挤几脊己蓟技冀季伎祭剂悸济寄寂计记既忌际妓继纪嘉枷夹佳家加荚颊贾甲钾假稼价架驾嫁歼监坚尖笺间煎兼肩艰奸缄茧检柬碱硷拣捡简俭剪减荐槛鉴践贱见键箭件"], +["bd40","紷",54,"絯",7], +["bd80","絸",32,"健舰剑饯渐溅涧建僵姜将浆江疆蒋桨奖讲匠酱降蕉椒礁焦胶交郊浇骄娇嚼搅铰矫侥脚狡角饺缴绞剿教酵轿较叫窖揭接皆秸街阶截劫节桔杰捷睫竭洁结解姐戒藉芥界借介疥诫届巾筋斤金今津襟紧锦仅谨进靳晋禁近烬浸"], +["be40","継",12,"綧",6,"綯",42], +["be80","線",32,"尽劲荆兢茎睛晶鲸京惊精粳经井警景颈静境敬镜径痉靖竟竞净炯窘揪究纠玖韭久灸九酒厩救旧臼舅咎就疚鞠拘狙疽居驹菊局咀矩举沮聚拒据巨具距踞锯俱句惧炬剧捐鹃娟倦眷卷绢撅攫抉掘倔爵觉决诀绝均菌钧军君峻"], +["bf40","緻",62], +["bf80","縺縼",4,"繂",4,"繈",21,"俊竣浚郡骏喀咖卡咯开揩楷凯慨刊堪勘坎砍看康慷糠扛抗亢炕考拷烤靠坷苛柯棵磕颗科壳咳可渴克刻客课肯啃垦恳坑吭空恐孔控抠口扣寇枯哭窟苦酷库裤夸垮挎跨胯块筷侩快宽款匡筐狂框矿眶旷况亏盔岿窥葵奎魁傀"], +["c040","繞",35,"纃",23,"纜纝纞"], +["c080","纮纴纻纼绖绤绬绹缊缐缞缷缹缻",6,"罃罆",9,"罒罓馈愧溃坤昆捆困括扩廓阔垃拉喇蜡腊辣啦莱来赖蓝婪栏拦篮阑兰澜谰揽览懒缆烂滥琅榔狼廊郎朗浪捞劳牢老佬姥酪烙涝勒乐雷镭蕾磊累儡垒擂肋类泪棱楞冷厘梨犁黎篱狸离漓理李里鲤礼莉荔吏栗丽厉励砾历利傈例俐"], +["c140","罖罙罛罜罝罞罠罣",4,"罫罬罭罯罰罳罵罶罷罸罺罻罼罽罿羀羂",7,"羋羍羏",4,"羕",4,"羛羜羠羢羣羥羦羨",6,"羱"], +["c180","羳",4,"羺羻羾翀翂翃翄翆翇翈翉翋翍翏",4,"翖翗翙",5,"翢翣痢立粒沥隶力璃哩俩联莲连镰廉怜涟帘敛脸链恋炼练粮凉梁粱良两辆量晾亮谅撩聊僚疗燎寥辽潦了撂镣廖料列裂烈劣猎琳林磷霖临邻鳞淋凛赁吝拎玲菱零龄铃伶羚凌灵陵岭领另令溜琉榴硫馏留刘瘤流柳六龙聋咙笼窿"], +["c240","翤翧翨翪翫翬翭翯翲翴",6,"翽翾翿耂耇耈耉耊耎耏耑耓耚耛耝耞耟耡耣耤耫",5,"耲耴耹耺耼耾聀聁聄聅聇聈聉聎聏聐聑聓聕聖聗"], +["c280","聙聛",13,"聫",5,"聲",11,"隆垄拢陇楼娄搂篓漏陋芦卢颅庐炉掳卤虏鲁麓碌露路赂鹿潞禄录陆戮驴吕铝侣旅履屡缕虑氯律率滤绿峦挛孪滦卵乱掠略抡轮伦仑沦纶论萝螺罗逻锣箩骡裸落洛骆络妈麻玛码蚂马骂嘛吗埋买麦卖迈脉瞒馒蛮满蔓曼慢漫"], +["c340","聾肁肂肅肈肊肍",5,"肔肕肗肙肞肣肦肧肨肬肰肳肵肶肸肹肻胅胇",4,"胏",6,"胘胟胠胢胣胦胮胵胷胹胻胾胿脀脁脃脄脅脇脈脋"], +["c380","脌脕脗脙脛脜脝脟",12,"脭脮脰脳脴脵脷脹",4,"脿谩芒茫盲氓忙莽猫茅锚毛矛铆卯茂冒帽貌贸么玫枚梅酶霉煤没眉媒镁每美昧寐妹媚门闷们萌蒙檬盟锰猛梦孟眯醚靡糜迷谜弥米秘觅泌蜜密幂棉眠绵冕免勉娩缅面苗描瞄藐秒渺庙妙蔑灭民抿皿敏悯闽明螟鸣铭名命谬摸"], +["c440","腀",5,"腇腉腍腎腏腒腖腗腘腛",4,"腡腢腣腤腦腨腪腫腬腯腲腳腵腶腷腸膁膃",4,"膉膋膌膍膎膐膒",5,"膙膚膞",4,"膤膥"], +["c480","膧膩膫",7,"膴",5,"膼膽膾膿臄臅臇臈臉臋臍",6,"摹蘑模膜磨摩魔抹末莫墨默沫漠寞陌谋牟某拇牡亩姆母墓暮幕募慕木目睦牧穆拿哪呐钠那娜纳氖乃奶耐奈南男难囊挠脑恼闹淖呢馁内嫩能妮霓倪泥尼拟你匿腻逆溺蔫拈年碾撵捻念娘酿鸟尿捏聂孽啮镊镍涅您柠狞凝宁"], +["c540","臔",14,"臤臥臦臨臩臫臮",4,"臵",5,"臽臿舃與",4,"舎舏舑舓舕",5,"舝舠舤舥舦舧舩舮舲舺舼舽舿"], +["c580","艀艁艂艃艅艆艈艊艌艍艎艐",7,"艙艛艜艝艞艠",7,"艩拧泞牛扭钮纽脓浓农弄奴努怒女暖虐疟挪懦糯诺哦欧鸥殴藕呕偶沤啪趴爬帕怕琶拍排牌徘湃派攀潘盘磐盼畔判叛乓庞旁耪胖抛咆刨炮袍跑泡呸胚培裴赔陪配佩沛喷盆砰抨烹澎彭蓬棚硼篷膨朋鹏捧碰坯砒霹批披劈琵毗"], +["c640","艪艫艬艭艱艵艶艷艸艻艼芀芁芃芅芆芇芉芌芐芓芔芕芖芚芛芞芠芢芣芧芲芵芶芺芻芼芿苀苂苃苅苆苉苐苖苙苚苝苢苧苨苩苪苬苭苮苰苲苳苵苶苸"], +["c680","苺苼",4,"茊茋茍茐茒茓茖茘茙茝",9,"茩茪茮茰茲茷茻茽啤脾疲皮匹痞僻屁譬篇偏片骗飘漂瓢票撇瞥拼频贫品聘乒坪苹萍平凭瓶评屏坡泼颇婆破魄迫粕剖扑铺仆莆葡菩蒲埔朴圃普浦谱曝瀑期欺栖戚妻七凄漆柒沏其棋奇歧畦崎脐齐旗祈祁骑起岂乞企启契砌器气迄弃汽泣讫掐"], +["c740","茾茿荁荂荄荅荈荊",4,"荓荕",4,"荝荢荰",6,"荹荺荾",6,"莇莈莊莋莌莍莏莐莑莔莕莖莗莙莚莝莟莡",6,"莬莭莮"], +["c780","莯莵莻莾莿菂菃菄菆菈菉菋菍菎菐菑菒菓菕菗菙菚菛菞菢菣菤菦菧菨菫菬菭恰洽牵扦钎铅千迁签仟谦乾黔钱钳前潜遣浅谴堑嵌欠歉枪呛腔羌墙蔷强抢橇锹敲悄桥瞧乔侨巧鞘撬翘峭俏窍切茄且怯窃钦侵亲秦琴勤芹擒禽寝沁青轻氢倾卿清擎晴氰情顷请庆琼穷秋丘邱球求囚酋泅趋区蛆曲躯屈驱渠"], +["c840","菮華菳",4,"菺菻菼菾菿萀萂萅萇萈萉萊萐萒",5,"萙萚萛萞",5,"萩",7,"萲",5,"萹萺萻萾",7,"葇葈葉"], +["c880","葊",6,"葒",4,"葘葝葞葟葠葢葤",4,"葪葮葯葰葲葴葷葹葻葼取娶龋趣去圈颧权醛泉全痊拳犬券劝缺炔瘸却鹊榷确雀裙群然燃冉染瓤壤攘嚷让饶扰绕惹热壬仁人忍韧任认刃妊纫扔仍日戎茸蓉荣融熔溶容绒冗揉柔肉茹蠕儒孺如辱乳汝入褥软阮蕊瑞锐闰润若弱撒洒萨腮鳃塞赛三叁"], +["c940","葽",4,"蒃蒄蒅蒆蒊蒍蒏",7,"蒘蒚蒛蒝蒞蒟蒠蒢",12,"蒰蒱蒳蒵蒶蒷蒻蒼蒾蓀蓂蓃蓅蓆蓇蓈蓋蓌蓎蓏蓒蓔蓕蓗"], +["c980","蓘",4,"蓞蓡蓢蓤蓧",4,"蓭蓮蓯蓱",10,"蓽蓾蔀蔁蔂伞散桑嗓丧搔骚扫嫂瑟色涩森僧莎砂杀刹沙纱傻啥煞筛晒珊苫杉山删煽衫闪陕擅赡膳善汕扇缮墒伤商赏晌上尚裳梢捎稍烧芍勺韶少哨邵绍奢赊蛇舌舍赦摄射慑涉社设砷申呻伸身深娠绅神沈审婶甚肾慎渗声生甥牲升绳"], +["ca40","蔃",8,"蔍蔎蔏蔐蔒蔔蔕蔖蔘蔙蔛蔜蔝蔞蔠蔢",8,"蔭",9,"蔾",4,"蕄蕅蕆蕇蕋",10], +["ca80","蕗蕘蕚蕛蕜蕝蕟",4,"蕥蕦蕧蕩",8,"蕳蕵蕶蕷蕸蕼蕽蕿薀薁省盛剩胜圣师失狮施湿诗尸虱十石拾时什食蚀实识史矢使屎驶始式示士世柿事拭誓逝势是嗜噬适仕侍释饰氏市恃室视试收手首守寿授售受瘦兽蔬枢梳殊抒输叔舒淑疏书赎孰熟薯暑曙署蜀黍鼠属术述树束戍竖墅庶数漱"], +["cb40","薂薃薆薈",6,"薐",10,"薝",6,"薥薦薧薩薫薬薭薱",5,"薸薺",6,"藂",6,"藊",4,"藑藒"], +["cb80","藔藖",5,"藝",6,"藥藦藧藨藪",14,"恕刷耍摔衰甩帅栓拴霜双爽谁水睡税吮瞬顺舜说硕朔烁斯撕嘶思私司丝死肆寺嗣四伺似饲巳松耸怂颂送宋讼诵搜艘擞嗽苏酥俗素速粟僳塑溯宿诉肃酸蒜算虽隋随绥髓碎岁穗遂隧祟孙损笋蓑梭唆缩琐索锁所塌他它她塔"], +["cc40","藹藺藼藽藾蘀",4,"蘆",10,"蘒蘓蘔蘕蘗",15,"蘨蘪",13,"蘹蘺蘻蘽蘾蘿虀"], +["cc80","虁",11,"虒虓處",4,"虛虜虝號虠虡虣",7,"獭挞蹋踏胎苔抬台泰酞太态汰坍摊贪瘫滩坛檀痰潭谭谈坦毯袒碳探叹炭汤塘搪堂棠膛唐糖倘躺淌趟烫掏涛滔绦萄桃逃淘陶讨套特藤腾疼誊梯剔踢锑提题蹄啼体替嚏惕涕剃屉天添填田甜恬舔腆挑条迢眺跳贴铁帖厅听烃"], +["cd40","虭虯虰虲",6,"蚃",6,"蚎",4,"蚔蚖",5,"蚞",4,"蚥蚦蚫蚭蚮蚲蚳蚷蚸蚹蚻",4,"蛁蛂蛃蛅蛈蛌蛍蛒蛓蛕蛖蛗蛚蛜"], +["cd80","蛝蛠蛡蛢蛣蛥蛦蛧蛨蛪蛫蛬蛯蛵蛶蛷蛺蛻蛼蛽蛿蜁蜄蜅蜆蜋蜌蜎蜏蜐蜑蜔蜖汀廷停亭庭挺艇通桐酮瞳同铜彤童桶捅筒统痛偷投头透凸秃突图徒途涂屠土吐兔湍团推颓腿蜕褪退吞屯臀拖托脱鸵陀驮驼椭妥拓唾挖哇蛙洼娃瓦袜歪外豌弯湾玩顽丸烷完碗挽晚皖惋宛婉万腕汪王亡枉网往旺望忘妄威"], +["ce40","蜙蜛蜝蜟蜠蜤蜦蜧蜨蜪蜫蜬蜭蜯蜰蜲蜳蜵蜶蜸蜹蜺蜼蜽蝀",6,"蝊蝋蝍蝏蝐蝑蝒蝔蝕蝖蝘蝚",5,"蝡蝢蝦",7,"蝯蝱蝲蝳蝵"], +["ce80","蝷蝸蝹蝺蝿螀螁螄螆螇螉螊螌螎",4,"螔螕螖螘",6,"螠",4,"巍微危韦违桅围唯惟为潍维苇萎委伟伪尾纬未蔚味畏胃喂魏位渭谓尉慰卫瘟温蚊文闻纹吻稳紊问嗡翁瓮挝蜗涡窝我斡卧握沃巫呜钨乌污诬屋无芜梧吾吴毋武五捂午舞伍侮坞戊雾晤物勿务悟误昔熙析西硒矽晰嘻吸锡牺"], +["cf40","螥螦螧螩螪螮螰螱螲螴螶螷螸螹螻螼螾螿蟁",4,"蟇蟈蟉蟌",4,"蟔",6,"蟜蟝蟞蟟蟡蟢蟣蟤蟦蟧蟨蟩蟫蟬蟭蟯",9], +["cf80","蟺蟻蟼蟽蟿蠀蠁蠂蠄",5,"蠋",7,"蠔蠗蠘蠙蠚蠜",4,"蠣稀息希悉膝夕惜熄烯溪汐犀檄袭席习媳喜铣洗系隙戏细瞎虾匣霞辖暇峡侠狭下厦夏吓掀锨先仙鲜纤咸贤衔舷闲涎弦嫌显险现献县腺馅羡宪陷限线相厢镶香箱襄湘乡翔祥详想响享项巷橡像向象萧硝霄削哮嚣销消宵淆晓"], +["d040","蠤",13,"蠳",5,"蠺蠻蠽蠾蠿衁衂衃衆",5,"衎",5,"衕衖衘衚",6,"衦衧衪衭衯衱衳衴衵衶衸衹衺"], +["d080","衻衼袀袃袆袇袉袊袌袎袏袐袑袓袔袕袗",4,"袝",4,"袣袥",5,"小孝校肖啸笑效楔些歇蝎鞋协挟携邪斜胁谐写械卸蟹懈泄泻谢屑薪芯锌欣辛新忻心信衅星腥猩惺兴刑型形邢行醒幸杏性姓兄凶胸匈汹雄熊休修羞朽嗅锈秀袖绣墟戌需虚嘘须徐许蓄酗叙旭序畜恤絮婿绪续轩喧宣悬旋玄"], +["d140","袬袮袯袰袲",4,"袸袹袺袻袽袾袿裀裃裄裇裈裊裋裌裍裏裐裑裓裖裗裚",4,"裠裡裦裧裩",6,"裲裵裶裷裺裻製裿褀褁褃",5], +["d180","褉褋",4,"褑褔",4,"褜",4,"褢褣褤褦褧褨褩褬褭褮褯褱褲褳褵褷选癣眩绚靴薛学穴雪血勋熏循旬询寻驯巡殉汛训讯逊迅压押鸦鸭呀丫芽牙蚜崖衙涯雅哑亚讶焉咽阉烟淹盐严研蜒岩延言颜阎炎沿奄掩眼衍演艳堰燕厌砚雁唁彦焰宴谚验殃央鸯秧杨扬佯疡羊洋阳氧仰痒养样漾邀腰妖瑶"], +["d240","褸",8,"襂襃襅",24,"襠",5,"襧",19,"襼"], +["d280","襽襾覀覂覄覅覇",26,"摇尧遥窑谣姚咬舀药要耀椰噎耶爷野冶也页掖业叶曳腋夜液一壹医揖铱依伊衣颐夷遗移仪胰疑沂宜姨彝椅蚁倚已乙矣以艺抑易邑屹亿役臆逸肄疫亦裔意毅忆义益溢诣议谊译异翼翌绎茵荫因殷音阴姻吟银淫寅饮尹引隐"], +["d340","覢",30,"觃觍觓觔觕觗觘觙觛觝觟觠觡觢觤觧觨觩觪觬觭觮觰觱觲觴",6], +["d380","觻",4,"訁",5,"計",21,"印英樱婴鹰应缨莹萤营荧蝇迎赢盈影颖硬映哟拥佣臃痈庸雍踊蛹咏泳涌永恿勇用幽优悠忧尤由邮铀犹油游酉有友右佑釉诱又幼迂淤于盂榆虞愚舆余俞逾鱼愉渝渔隅予娱雨与屿禹宇语羽玉域芋郁吁遇喻峪御愈欲狱育誉"], +["d440","訞",31,"訿",8,"詉",21], +["d480","詟",25,"詺",6,"浴寓裕预豫驭鸳渊冤元垣袁原援辕园员圆猿源缘远苑愿怨院曰约越跃钥岳粤月悦阅耘云郧匀陨允运蕴酝晕韵孕匝砸杂栽哉灾宰载再在咱攒暂赞赃脏葬遭糟凿藻枣早澡蚤躁噪造皂灶燥责择则泽贼怎增憎曾赠扎喳渣札轧"], +["d540","誁",7,"誋",7,"誔",46], +["d580","諃",32,"铡闸眨栅榨咋乍炸诈摘斋宅窄债寨瞻毡詹粘沾盏斩辗崭展蘸栈占战站湛绽樟章彰漳张掌涨杖丈帐账仗胀瘴障招昭找沼赵照罩兆肇召遮折哲蛰辙者锗蔗这浙珍斟真甄砧臻贞针侦枕疹诊震振镇阵蒸挣睁征狰争怔整拯正政"], +["d640","諤",34,"謈",27], +["d680","謤謥謧",30,"帧症郑证芝枝支吱蜘知肢脂汁之织职直植殖执值侄址指止趾只旨纸志挚掷至致置帜峙制智秩稚质炙痔滞治窒中盅忠钟衷终种肿重仲众舟周州洲诌粥轴肘帚咒皱宙昼骤珠株蛛朱猪诸诛逐竹烛煮拄瞩嘱主著柱助蛀贮铸筑"], +["d740","譆",31,"譧",4,"譭",25], +["d780","讇",24,"讬讱讻诇诐诪谉谞住注祝驻抓爪拽专砖转撰赚篆桩庄装妆撞壮状椎锥追赘坠缀谆准捉拙卓桌琢茁酌啄着灼浊兹咨资姿滋淄孜紫仔籽滓子自渍字鬃棕踪宗综总纵邹走奏揍租足卒族祖诅阻组钻纂嘴醉最罪尊遵昨左佐柞做作坐座"], +["d840","谸",8,"豂豃豄豅豈豊豋豍",7,"豖豗豘豙豛",5,"豣",6,"豬",6,"豴豵豶豷豻",6,"貃貄貆貇"], +["d880","貈貋貍",6,"貕貖貗貙",20,"亍丌兀丐廿卅丕亘丞鬲孬噩丨禺丿匕乇夭爻卮氐囟胤馗毓睾鼗丶亟鼐乜乩亓芈孛啬嘏仄厍厝厣厥厮靥赝匚叵匦匮匾赜卦卣刂刈刎刭刳刿剀剌剞剡剜蒯剽劂劁劐劓冂罔亻仃仉仂仨仡仫仞伛仳伢佤仵伥伧伉伫佞佧攸佚佝"], +["d940","貮",62], +["d980","賭",32,"佟佗伲伽佶佴侑侉侃侏佾佻侪佼侬侔俦俨俪俅俚俣俜俑俟俸倩偌俳倬倏倮倭俾倜倌倥倨偾偃偕偈偎偬偻傥傧傩傺僖儆僭僬僦僮儇儋仝氽佘佥俎龠汆籴兮巽黉馘冁夔勹匍訇匐凫夙兕亠兖亳衮袤亵脔裒禀嬴蠃羸冫冱冽冼"], +["da40","贎",14,"贠赑赒赗赟赥赨赩赪赬赮赯赱赲赸",8,"趂趃趆趇趈趉趌",4,"趒趓趕",9,"趠趡"], +["da80","趢趤",12,"趲趶趷趹趻趽跀跁跂跅跇跈跉跊跍跐跒跓跔凇冖冢冥讠讦讧讪讴讵讷诂诃诋诏诎诒诓诔诖诘诙诜诟诠诤诨诩诮诰诳诶诹诼诿谀谂谄谇谌谏谑谒谔谕谖谙谛谘谝谟谠谡谥谧谪谫谮谯谲谳谵谶卩卺阝阢阡阱阪阽阼陂陉陔陟陧陬陲陴隈隍隗隰邗邛邝邙邬邡邴邳邶邺"], +["db40","跕跘跙跜跠跡跢跥跦跧跩跭跮跰跱跲跴跶跼跾",6,"踆踇踈踋踍踎踐踑踒踓踕",7,"踠踡踤",4,"踫踭踰踲踳踴踶踷踸踻踼踾"], +["db80","踿蹃蹅蹆蹌",4,"蹓",5,"蹚",11,"蹧蹨蹪蹫蹮蹱邸邰郏郅邾郐郄郇郓郦郢郜郗郛郫郯郾鄄鄢鄞鄣鄱鄯鄹酃酆刍奂劢劬劭劾哿勐勖勰叟燮矍廴凵凼鬯厶弁畚巯坌垩垡塾墼壅壑圩圬圪圳圹圮圯坜圻坂坩垅坫垆坼坻坨坭坶坳垭垤垌垲埏垧垴垓垠埕埘埚埙埒垸埴埯埸埤埝"], +["dc40","蹳蹵蹷",4,"蹽蹾躀躂躃躄躆躈",6,"躑躒躓躕",6,"躝躟",11,"躭躮躰躱躳",6,"躻",7], +["dc80","軃",10,"軏",21,"堋堍埽埭堀堞堙塄堠塥塬墁墉墚墀馨鼙懿艹艽艿芏芊芨芄芎芑芗芙芫芸芾芰苈苊苣芘芷芮苋苌苁芩芴芡芪芟苄苎芤苡茉苷苤茏茇苜苴苒苘茌苻苓茑茚茆茔茕苠苕茜荑荛荜茈莒茼茴茱莛荞茯荏荇荃荟荀茗荠茭茺茳荦荥"], +["dd40","軥",62], +["dd80","輤",32,"荨茛荩荬荪荭荮莰荸莳莴莠莪莓莜莅荼莶莩荽莸荻莘莞莨莺莼菁萁菥菘堇萘萋菝菽菖萜萸萑萆菔菟萏萃菸菹菪菅菀萦菰菡葜葑葚葙葳蒇蒈葺蒉葸萼葆葩葶蒌蒎萱葭蓁蓍蓐蓦蒽蓓蓊蒿蒺蓠蒡蒹蒴蒗蓥蓣蔌甍蔸蓰蔹蔟蔺"], +["de40","轅",32,"轪辀辌辒辝辠辡辢辤辥辦辧辪辬辭辮辯農辳辴辵辷辸辺辻込辿迀迃迆"], +["de80","迉",4,"迏迒迖迗迚迠迡迣迧迬迯迱迲迴迵迶迺迻迼迾迿逇逈逌逎逓逕逘蕖蔻蓿蓼蕙蕈蕨蕤蕞蕺瞢蕃蕲蕻薤薨薇薏蕹薮薜薅薹薷薰藓藁藜藿蘧蘅蘩蘖蘼廾弈夼奁耷奕奚奘匏尢尥尬尴扌扪抟抻拊拚拗拮挢拶挹捋捃掭揶捱捺掎掴捭掬掊捩掮掼揲揸揠揿揄揞揎摒揆掾摅摁搋搛搠搌搦搡摞撄摭撖"], +["df40","這逜連逤逥逧",5,"逰",4,"逷逹逺逽逿遀遃遅遆遈",4,"過達違遖遙遚遜",5,"遤遦遧適遪遫遬遯",4,"遶",6,"遾邁"], +["df80","還邅邆邇邉邊邌",4,"邒邔邖邘邚邜邞邟邠邤邥邧邨邩邫邭邲邷邼邽邿郀摺撷撸撙撺擀擐擗擤擢攉攥攮弋忒甙弑卟叱叽叩叨叻吒吖吆呋呒呓呔呖呃吡呗呙吣吲咂咔呷呱呤咚咛咄呶呦咝哐咭哂咴哒咧咦哓哔呲咣哕咻咿哌哙哚哜咩咪咤哝哏哞唛哧唠哽唔哳唢唣唏唑唧唪啧喏喵啉啭啁啕唿啐唼"], +["e040","郂郃郆郈郉郋郌郍郒郔郕郖郘郙郚郞郟郠郣郤郥郩郪郬郮郰郱郲郳郵郶郷郹郺郻郼郿鄀鄁鄃鄅",19,"鄚鄛鄜"], +["e080","鄝鄟鄠鄡鄤",10,"鄰鄲",6,"鄺",8,"酄唷啖啵啶啷唳唰啜喋嗒喃喱喹喈喁喟啾嗖喑啻嗟喽喾喔喙嗪嗷嗉嘟嗑嗫嗬嗔嗦嗝嗄嗯嗥嗲嗳嗌嗍嗨嗵嗤辔嘞嘈嘌嘁嘤嘣嗾嘀嘧嘭噘嘹噗嘬噍噢噙噜噌噔嚆噤噱噫噻噼嚅嚓嚯囔囗囝囡囵囫囹囿圄圊圉圜帏帙帔帑帱帻帼"], +["e140","酅酇酈酑酓酔酕酖酘酙酛酜酟酠酦酧酨酫酭酳酺酻酼醀",4,"醆醈醊醎醏醓",6,"醜",5,"醤",5,"醫醬醰醱醲醳醶醷醸醹醻"], +["e180","醼",10,"釈釋釐釒",9,"針",8,"帷幄幔幛幞幡岌屺岍岐岖岈岘岙岑岚岜岵岢岽岬岫岱岣峁岷峄峒峤峋峥崂崃崧崦崮崤崞崆崛嵘崾崴崽嵬嵛嵯嵝嵫嵋嵊嵩嵴嶂嶙嶝豳嶷巅彳彷徂徇徉後徕徙徜徨徭徵徼衢彡犭犰犴犷犸狃狁狎狍狒狨狯狩狲狴狷猁狳猃狺"], +["e240","釦",62], +["e280","鈥",32,"狻猗猓猡猊猞猝猕猢猹猥猬猸猱獐獍獗獠獬獯獾舛夥飧夤夂饣饧",5,"饴饷饽馀馄馇馊馍馐馑馓馔馕庀庑庋庖庥庠庹庵庾庳赓廒廑廛廨廪膺忄忉忖忏怃忮怄忡忤忾怅怆忪忭忸怙怵怦怛怏怍怩怫怊怿怡恸恹恻恺恂"], +["e340","鉆",45,"鉵",16], +["e380","銆",7,"銏",24,"恪恽悖悚悭悝悃悒悌悛惬悻悱惝惘惆惚悴愠愦愕愣惴愀愎愫慊慵憬憔憧憷懔懵忝隳闩闫闱闳闵闶闼闾阃阄阆阈阊阋阌阍阏阒阕阖阗阙阚丬爿戕氵汔汜汊沣沅沐沔沌汨汩汴汶沆沩泐泔沭泷泸泱泗沲泠泖泺泫泮沱泓泯泾"], +["e440","銨",5,"銯",24,"鋉",31], +["e480","鋩",32,"洹洧洌浃浈洇洄洙洎洫浍洮洵洚浏浒浔洳涑浯涞涠浞涓涔浜浠浼浣渚淇淅淞渎涿淠渑淦淝淙渖涫渌涮渫湮湎湫溲湟溆湓湔渲渥湄滟溱溘滠漭滢溥溧溽溻溷滗溴滏溏滂溟潢潆潇漤漕滹漯漶潋潴漪漉漩澉澍澌潸潲潼潺濑"], +["e540","錊",51,"錿",10], +["e580","鍊",31,"鍫濉澧澹澶濂濡濮濞濠濯瀚瀣瀛瀹瀵灏灞宀宄宕宓宥宸甯骞搴寤寮褰寰蹇謇辶迓迕迥迮迤迩迦迳迨逅逄逋逦逑逍逖逡逵逶逭逯遄遑遒遐遨遘遢遛暹遴遽邂邈邃邋彐彗彖彘尻咫屐屙孱屣屦羼弪弩弭艴弼鬻屮妁妃妍妩妪妣"], +["e640","鍬",34,"鎐",27], +["e680","鎬",29,"鏋鏌鏍妗姊妫妞妤姒妲妯姗妾娅娆姝娈姣姘姹娌娉娲娴娑娣娓婀婧婊婕娼婢婵胬媪媛婷婺媾嫫媲嫒嫔媸嫠嫣嫱嫖嫦嫘嫜嬉嬗嬖嬲嬷孀尕尜孚孥孳孑孓孢驵驷驸驺驿驽骀骁骅骈骊骐骒骓骖骘骛骜骝骟骠骢骣骥骧纟纡纣纥纨纩"], +["e740","鏎",7,"鏗",54], +["e780","鐎",32,"纭纰纾绀绁绂绉绋绌绐绔绗绛绠绡绨绫绮绯绱绲缍绶绺绻绾缁缂缃缇缈缋缌缏缑缒缗缙缜缛缟缡",6,"缪缫缬缭缯",4,"缵幺畿巛甾邕玎玑玮玢玟珏珂珑玷玳珀珉珈珥珙顼琊珩珧珞玺珲琏琪瑛琦琥琨琰琮琬"], +["e840","鐯",14,"鐿",43,"鑬鑭鑮鑯"], +["e880","鑰",20,"钑钖钘铇铏铓铔铚铦铻锜锠琛琚瑁瑜瑗瑕瑙瑷瑭瑾璜璎璀璁璇璋璞璨璩璐璧瓒璺韪韫韬杌杓杞杈杩枥枇杪杳枘枧杵枨枞枭枋杷杼柰栉柘栊柩枰栌柙枵柚枳柝栀柃枸柢栎柁柽栲栳桠桡桎桢桄桤梃栝桕桦桁桧桀栾桊桉栩梵梏桴桷梓桫棂楮棼椟椠棹"], +["e940","锧锳锽镃镈镋镕镚镠镮镴镵長",7,"門",42], +["e980","閫",32,"椤棰椋椁楗棣椐楱椹楠楂楝榄楫榀榘楸椴槌榇榈槎榉楦楣楹榛榧榻榫榭槔榱槁槊槟榕槠榍槿樯槭樗樘橥槲橄樾檠橐橛樵檎橹樽樨橘橼檑檐檩檗檫猷獒殁殂殇殄殒殓殍殚殛殡殪轫轭轱轲轳轵轶轸轷轹轺轼轾辁辂辄辇辋"], +["ea40","闌",27,"闬闿阇阓阘阛阞阠阣",6,"阫阬阭阯阰阷阸阹阺阾陁陃陊陎陏陑陒陓陖陗"], +["ea80","陘陙陚陜陝陞陠陣陥陦陫陭",4,"陳陸",12,"隇隉隊辍辎辏辘辚軎戋戗戛戟戢戡戥戤戬臧瓯瓴瓿甏甑甓攴旮旯旰昊昙杲昃昕昀炅曷昝昴昱昶昵耆晟晔晁晏晖晡晗晷暄暌暧暝暾曛曜曦曩贲贳贶贻贽赀赅赆赈赉赇赍赕赙觇觊觋觌觎觏觐觑牮犟牝牦牯牾牿犄犋犍犏犒挈挲掰"], +["eb40","隌階隑隒隓隕隖隚際隝",9,"隨",7,"隱隲隴隵隷隸隺隻隿雂雃雈雊雋雐雑雓雔雖",9,"雡",6,"雫"], +["eb80","雬雭雮雰雱雲雴雵雸雺電雼雽雿霂霃霅霊霋霌霐霑霒霔霕霗",4,"霝霟霠搿擘耄毪毳毽毵毹氅氇氆氍氕氘氙氚氡氩氤氪氲攵敕敫牍牒牖爰虢刖肟肜肓肼朊肽肱肫肭肴肷胧胨胩胪胛胂胄胙胍胗朐胝胫胱胴胭脍脎胲胼朕脒豚脶脞脬脘脲腈腌腓腴腙腚腱腠腩腼腽腭腧塍媵膈膂膑滕膣膪臌朦臊膻"], +["ec40","霡",8,"霫霬霮霯霱霳",4,"霺霻霼霽霿",18,"靔靕靗靘靚靜靝靟靣靤靦靧靨靪",7], +["ec80","靲靵靷",4,"靽",7,"鞆",4,"鞌鞎鞏鞐鞓鞕鞖鞗鞙",4,"臁膦欤欷欹歃歆歙飑飒飓飕飙飚殳彀毂觳斐齑斓於旆旄旃旌旎旒旖炀炜炖炝炻烀炷炫炱烨烊焐焓焖焯焱煳煜煨煅煲煊煸煺熘熳熵熨熠燠燔燧燹爝爨灬焘煦熹戾戽扃扈扉礻祀祆祉祛祜祓祚祢祗祠祯祧祺禅禊禚禧禳忑忐"], +["ed40","鞞鞟鞡鞢鞤",6,"鞬鞮鞰鞱鞳鞵",46], +["ed80","韤韥韨韮",4,"韴韷",23,"怼恝恚恧恁恙恣悫愆愍慝憩憝懋懑戆肀聿沓泶淼矶矸砀砉砗砘砑斫砭砜砝砹砺砻砟砼砥砬砣砩硎硭硖硗砦硐硇硌硪碛碓碚碇碜碡碣碲碹碥磔磙磉磬磲礅磴礓礤礞礴龛黹黻黼盱眄眍盹眇眈眚眢眙眭眦眵眸睐睑睇睃睚睨"], +["ee40","頏",62], +["ee80","顎",32,"睢睥睿瞍睽瞀瞌瞑瞟瞠瞰瞵瞽町畀畎畋畈畛畲畹疃罘罡罟詈罨罴罱罹羁罾盍盥蠲钅钆钇钋钊钌钍钏钐钔钗钕钚钛钜钣钤钫钪钭钬钯钰钲钴钶",4,"钼钽钿铄铈",6,"铐铑铒铕铖铗铙铘铛铞铟铠铢铤铥铧铨铪"], +["ef40","顯",5,"颋颎颒颕颙颣風",37,"飏飐飔飖飗飛飜飝飠",4], +["ef80","飥飦飩",30,"铩铫铮铯铳铴铵铷铹铼铽铿锃锂锆锇锉锊锍锎锏锒",4,"锘锛锝锞锟锢锪锫锩锬锱锲锴锶锷锸锼锾锿镂锵镄镅镆镉镌镎镏镒镓镔镖镗镘镙镛镞镟镝镡镢镤",8,"镯镱镲镳锺矧矬雉秕秭秣秫稆嵇稃稂稞稔"], +["f040","餈",4,"餎餏餑",28,"餯",26], +["f080","饊",9,"饖",12,"饤饦饳饸饹饻饾馂馃馉稹稷穑黏馥穰皈皎皓皙皤瓞瓠甬鸠鸢鸨",4,"鸲鸱鸶鸸鸷鸹鸺鸾鹁鹂鹄鹆鹇鹈鹉鹋鹌鹎鹑鹕鹗鹚鹛鹜鹞鹣鹦",6,"鹱鹭鹳疒疔疖疠疝疬疣疳疴疸痄疱疰痃痂痖痍痣痨痦痤痫痧瘃痱痼痿瘐瘀瘅瘌瘗瘊瘥瘘瘕瘙"], +["f140","馌馎馚",10,"馦馧馩",47], +["f180","駙",32,"瘛瘼瘢瘠癀瘭瘰瘿瘵癃瘾瘳癍癞癔癜癖癫癯翊竦穸穹窀窆窈窕窦窠窬窨窭窳衤衩衲衽衿袂袢裆袷袼裉裢裎裣裥裱褚裼裨裾裰褡褙褓褛褊褴褫褶襁襦襻疋胥皲皴矜耒耔耖耜耠耢耥耦耧耩耨耱耋耵聃聆聍聒聩聱覃顸颀颃"], +["f240","駺",62], +["f280","騹",32,"颉颌颍颏颔颚颛颞颟颡颢颥颦虍虔虬虮虿虺虼虻蚨蚍蚋蚬蚝蚧蚣蚪蚓蚩蚶蛄蚵蛎蚰蚺蚱蚯蛉蛏蚴蛩蛱蛲蛭蛳蛐蜓蛞蛴蛟蛘蛑蜃蜇蛸蜈蜊蜍蜉蜣蜻蜞蜥蜮蜚蜾蝈蜴蜱蜩蜷蜿螂蜢蝽蝾蝻蝠蝰蝌蝮螋蝓蝣蝼蝤蝙蝥螓螯螨蟒"], +["f340","驚",17,"驲骃骉骍骎骔骕骙骦骩",6,"骲骳骴骵骹骻骽骾骿髃髄髆",4,"髍髎髏髐髒體髕髖髗髙髚髛髜"], +["f380","髝髞髠髢髣髤髥髧髨髩髪髬髮髰",8,"髺髼",6,"鬄鬅鬆蟆螈螅螭螗螃螫蟥螬螵螳蟋蟓螽蟑蟀蟊蟛蟪蟠蟮蠖蠓蟾蠊蠛蠡蠹蠼缶罂罄罅舐竺竽笈笃笄笕笊笫笏筇笸笪笙笮笱笠笥笤笳笾笞筘筚筅筵筌筝筠筮筻筢筲筱箐箦箧箸箬箝箨箅箪箜箢箫箴篑篁篌篝篚篥篦篪簌篾篼簏簖簋"], +["f440","鬇鬉",5,"鬐鬑鬒鬔",10,"鬠鬡鬢鬤",10,"鬰鬱鬳",7,"鬽鬾鬿魀魆魊魋魌魎魐魒魓魕",5], +["f480","魛",32,"簟簪簦簸籁籀臾舁舂舄臬衄舡舢舣舭舯舨舫舸舻舳舴舾艄艉艋艏艚艟艨衾袅袈裘裟襞羝羟羧羯羰羲籼敉粑粝粜粞粢粲粼粽糁糇糌糍糈糅糗糨艮暨羿翎翕翥翡翦翩翮翳糸絷綦綮繇纛麸麴赳趄趔趑趱赧赭豇豉酊酐酎酏酤"], +["f540","魼",62], +["f580","鮻",32,"酢酡酰酩酯酽酾酲酴酹醌醅醐醍醑醢醣醪醭醮醯醵醴醺豕鹾趸跫踅蹙蹩趵趿趼趺跄跖跗跚跞跎跏跛跆跬跷跸跣跹跻跤踉跽踔踝踟踬踮踣踯踺蹀踹踵踽踱蹉蹁蹂蹑蹒蹊蹰蹶蹼蹯蹴躅躏躔躐躜躞豸貂貊貅貘貔斛觖觞觚觜"], +["f640","鯜",62], +["f680","鰛",32,"觥觫觯訾謦靓雩雳雯霆霁霈霏霎霪霭霰霾龀龃龅",5,"龌黾鼋鼍隹隼隽雎雒瞿雠銎銮鋈錾鍪鏊鎏鐾鑫鱿鲂鲅鲆鲇鲈稣鲋鲎鲐鲑鲒鲔鲕鲚鲛鲞",5,"鲥",4,"鲫鲭鲮鲰",7,"鲺鲻鲼鲽鳄鳅鳆鳇鳊鳋"], +["f740","鰼",62], +["f780","鱻鱽鱾鲀鲃鲄鲉鲊鲌鲏鲓鲖鲗鲘鲙鲝鲪鲬鲯鲹鲾",4,"鳈鳉鳑鳒鳚鳛鳠鳡鳌",4,"鳓鳔鳕鳗鳘鳙鳜鳝鳟鳢靼鞅鞑鞒鞔鞯鞫鞣鞲鞴骱骰骷鹘骶骺骼髁髀髅髂髋髌髑魅魃魇魉魈魍魑飨餍餮饕饔髟髡髦髯髫髻髭髹鬈鬏鬓鬟鬣麽麾縻麂麇麈麋麒鏖麝麟黛黜黝黠黟黢黩黧黥黪黯鼢鼬鼯鼹鼷鼽鼾齄"], +["f840","鳣",62], +["f880","鴢",32], +["f940","鵃",62], +["f980","鶂",32], +["fa40","鶣",62], +["fa80","鷢",32], +["fb40","鸃",27,"鸤鸧鸮鸰鸴鸻鸼鹀鹍鹐鹒鹓鹔鹖鹙鹝鹟鹠鹡鹢鹥鹮鹯鹲鹴",9,"麀"], +["fb80","麁麃麄麅麆麉麊麌",5,"麔",8,"麞麠",5,"麧麨麩麪"], +["fc40","麫",8,"麵麶麷麹麺麼麿",4,"黅黆黇黈黊黋黌黐黒黓黕黖黗黙黚點黡黣黤黦黨黫黬黭黮黰",8,"黺黽黿",6], +["fc80","鼆",4,"鼌鼏鼑鼒鼔鼕鼖鼘鼚",5,"鼡鼣",8,"鼭鼮鼰鼱"], +["fd40","鼲",4,"鼸鼺鼼鼿",4,"齅",10,"齒",38], +["fd80","齹",5,"龁龂龍",11,"龜龝龞龡",4,"郎凉秊裏隣"], +["fe40","兀嗀﨎﨏﨑﨓﨔礼﨟蘒﨡﨣﨤﨧﨨﨩"] +] diff --git a/node_modules/iconv-lite/encodings/tables/cp949.json b/node_modules/iconv-lite/encodings/tables/cp949.json new file mode 100644 index 0000000..2022a00 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/cp949.json @@ -0,0 +1,273 @@ +[ +["0","\u0000",127], +["8141","갂갃갅갆갋",4,"갘갞갟갡갢갣갥",6,"갮갲갳갴"], +["8161","갵갶갷갺갻갽갾갿걁",9,"걌걎",5,"걕"], +["8181","걖걗걙걚걛걝",18,"걲걳걵걶걹걻",4,"겂겇겈겍겎겏겑겒겓겕",6,"겞겢",5,"겫겭겮겱",6,"겺겾겿곀곂곃곅곆곇곉곊곋곍",7,"곖곘",7,"곢곣곥곦곩곫곭곮곲곴곷",4,"곾곿괁괂괃괅괇",4,"괎괐괒괓"], +["8241","괔괕괖괗괙괚괛괝괞괟괡",7,"괪괫괮",5], +["8261","괶괷괹괺괻괽",6,"굆굈굊",5,"굑굒굓굕굖굗"], +["8281","굙",7,"굢굤",7,"굮굯굱굲굷굸굹굺굾궀궃",4,"궊궋궍궎궏궑",10,"궞",5,"궥",17,"궸",7,"귂귃귅귆귇귉",6,"귒귔",7,"귝귞귟귡귢귣귥",18], +["8341","귺귻귽귾긂",5,"긊긌긎",5,"긕",7], +["8361","긝",18,"긲긳긵긶긹긻긼"], +["8381","긽긾긿깂깄깇깈깉깋깏깑깒깓깕깗",4,"깞깢깣깤깦깧깪깫깭깮깯깱",6,"깺깾",5,"꺆",5,"꺍",46,"꺿껁껂껃껅",6,"껎껒",5,"껚껛껝",8], +["8441","껦껧껩껪껬껮",5,"껵껶껷껹껺껻껽",8], +["8461","꼆꼉꼊꼋꼌꼎꼏꼑",18], +["8481","꼤",7,"꼮꼯꼱꼳꼵",6,"꼾꽀꽄꽅꽆꽇꽊",5,"꽑",10,"꽞",5,"꽦",18,"꽺",5,"꾁꾂꾃꾅꾆꾇꾉",6,"꾒꾓꾔꾖",5,"꾝",26,"꾺꾻꾽꾾"], +["8541","꾿꿁",5,"꿊꿌꿏",4,"꿕",6,"꿝",4], +["8561","꿢",5,"꿪",5,"꿲꿳꿵꿶꿷꿹",6,"뀂뀃"], +["8581","뀅",6,"뀍뀎뀏뀑뀒뀓뀕",6,"뀞",9,"뀩",26,"끆끇끉끋끍끏끐끑끒끖끘끚끛끜끞",29,"끾끿낁낂낃낅",6,"낎낐낒",5,"낛낝낞낣낤"], +["8641","낥낦낧낪낰낲낶낷낹낺낻낽",6,"냆냊",5,"냒"], +["8661","냓냕냖냗냙",6,"냡냢냣냤냦",10], +["8681","냱",22,"넊넍넎넏넑넔넕넖넗넚넞",4,"넦넧넩넪넫넭",6,"넶넺",5,"녂녃녅녆녇녉",6,"녒녓녖녗녙녚녛녝녞녟녡",22,"녺녻녽녾녿놁놃",4,"놊놌놎놏놐놑놕놖놗놙놚놛놝"], +["8741","놞",9,"놩",15], +["8761","놹",18,"뇍뇎뇏뇑뇒뇓뇕"], +["8781","뇖",5,"뇞뇠",7,"뇪뇫뇭뇮뇯뇱",7,"뇺뇼뇾",5,"눆눇눉눊눍",6,"눖눘눚",5,"눡",18,"눵",6,"눽",26,"뉙뉚뉛뉝뉞뉟뉡",6,"뉪",4], +["8841","뉯",4,"뉶",5,"뉽",6,"늆늇늈늊",4], +["8861","늏늒늓늕늖늗늛",4,"늢늤늧늨늩늫늭늮늯늱늲늳늵늶늷"], +["8881","늸",15,"닊닋닍닎닏닑닓",4,"닚닜닞닟닠닡닣닧닩닪닰닱닲닶닼닽닾댂댃댅댆댇댉",6,"댒댖",5,"댝",54,"덗덙덚덝덠덡덢덣"], +["8941","덦덨덪덬덭덯덲덳덵덶덷덹",6,"뎂뎆",5,"뎍"], +["8961","뎎뎏뎑뎒뎓뎕",10,"뎢",5,"뎩뎪뎫뎭"], +["8981","뎮",21,"돆돇돉돊돍돏돑돒돓돖돘돚돜돞돟돡돢돣돥돦돧돩",18,"돽",18,"됑",6,"됙됚됛됝됞됟됡",6,"됪됬",7,"됵",15], +["8a41","둅",10,"둒둓둕둖둗둙",6,"둢둤둦"], +["8a61","둧",4,"둭",18,"뒁뒂"], +["8a81","뒃",4,"뒉",19,"뒞",5,"뒥뒦뒧뒩뒪뒫뒭",7,"뒶뒸뒺",5,"듁듂듃듅듆듇듉",6,"듑듒듓듔듖",5,"듞듟듡듢듥듧",4,"듮듰듲",5,"듹",26,"딖딗딙딚딝"], +["8b41","딞",5,"딦딫",4,"딲딳딵딶딷딹",6,"땂땆"], +["8b61","땇땈땉땊땎땏땑땒땓땕",6,"땞땢",8], +["8b81","땫",52,"떢떣떥떦떧떩떬떭떮떯떲떶",4,"떾떿뗁뗂뗃뗅",6,"뗎뗒",5,"뗙",18,"뗭",18], +["8c41","똀",15,"똒똓똕똖똗똙",4], +["8c61","똞",6,"똦",5,"똭",6,"똵",5], +["8c81","똻",12,"뙉",26,"뙥뙦뙧뙩",50,"뚞뚟뚡뚢뚣뚥",5,"뚭뚮뚯뚰뚲",16], +["8d41","뛃",16,"뛕",8], +["8d61","뛞",17,"뛱뛲뛳뛵뛶뛷뛹뛺"], +["8d81","뛻",4,"뜂뜃뜄뜆",33,"뜪뜫뜭뜮뜱",6,"뜺뜼",7,"띅띆띇띉띊띋띍",6,"띖",9,"띡띢띣띥띦띧띩",6,"띲띴띶",5,"띾띿랁랂랃랅",6,"랎랓랔랕랚랛랝랞"], +["8e41","랟랡",6,"랪랮",5,"랶랷랹",8], +["8e61","럂",4,"럈럊",19], +["8e81","럞",13,"럮럯럱럲럳럵",6,"럾렂",4,"렊렋렍렎렏렑",6,"렚렜렞",5,"렦렧렩렪렫렭",6,"렶렺",5,"롁롂롃롅",11,"롒롔",7,"롞롟롡롢롣롥",6,"롮롰롲",5,"롹롺롻롽",7], +["8f41","뢅",7,"뢎",17], +["8f61","뢠",7,"뢩",6,"뢱뢲뢳뢵뢶뢷뢹",4], +["8f81","뢾뢿룂룄룆",5,"룍룎룏룑룒룓룕",7,"룞룠룢",5,"룪룫룭룮룯룱",6,"룺룼룾",5,"뤅",18,"뤙",6,"뤡",26,"뤾뤿륁륂륃륅",6,"륍륎륐륒",5], +["9041","륚륛륝륞륟륡",6,"륪륬륮",5,"륶륷륹륺륻륽"], +["9061","륾",5,"릆릈릋릌릏",15], +["9081","릟",12,"릮릯릱릲릳릵",6,"릾맀맂",5,"맊맋맍맓",4,"맚맜맟맠맢맦맧맩맪맫맭",6,"맶맻",4,"먂",5,"먉",11,"먖",33,"먺먻먽먾먿멁멃멄멅멆"], +["9141","멇멊멌멏멐멑멒멖멗멙멚멛멝",6,"멦멪",5], +["9161","멲멳멵멶멷멹",9,"몆몈몉몊몋몍",5], +["9181","몓",20,"몪몭몮몯몱몳",4,"몺몼몾",5,"뫅뫆뫇뫉",14,"뫚",33,"뫽뫾뫿묁묂묃묅",7,"묎묐묒",5,"묙묚묛묝묞묟묡",6], +["9241","묨묪묬",7,"묷묹묺묿",4,"뭆뭈뭊뭋뭌뭎뭑뭒"], +["9261","뭓뭕뭖뭗뭙",7,"뭢뭤",7,"뭭",4], +["9281","뭲",21,"뮉뮊뮋뮍뮎뮏뮑",18,"뮥뮦뮧뮩뮪뮫뮭",6,"뮵뮶뮸",7,"믁믂믃믅믆믇믉",6,"믑믒믔",35,"믺믻믽믾밁"], +["9341","밃",4,"밊밎밐밒밓밙밚밠밡밢밣밦밨밪밫밬밮밯밲밳밵"], +["9361","밶밷밹",6,"뱂뱆뱇뱈뱊뱋뱎뱏뱑",8], +["9381","뱚뱛뱜뱞",37,"벆벇벉벊벍벏",4,"벖벘벛",4,"벢벣벥벦벩",6,"벲벶",5,"벾벿볁볂볃볅",7,"볎볒볓볔볖볗볙볚볛볝",22,"볷볹볺볻볽"], +["9441","볾",5,"봆봈봊",5,"봑봒봓봕",8], +["9461","봞",5,"봥",6,"봭",12], +["9481","봺",5,"뵁",6,"뵊뵋뵍뵎뵏뵑",6,"뵚",9,"뵥뵦뵧뵩",22,"붂붃붅붆붋",4,"붒붔붖붗붘붛붝",6,"붥",10,"붱",6,"붹",24], +["9541","뷒뷓뷖뷗뷙뷚뷛뷝",11,"뷪",5,"뷱"], +["9561","뷲뷳뷵뷶뷷뷹",6,"븁븂븄븆",5,"븎븏븑븒븓"], +["9581","븕",6,"븞븠",35,"빆빇빉빊빋빍빏",4,"빖빘빜빝빞빟빢빣빥빦빧빩빫",4,"빲빶",4,"빾빿뺁뺂뺃뺅",6,"뺎뺒",5,"뺚",13,"뺩",14], +["9641","뺸",23,"뻒뻓"], +["9661","뻕뻖뻙",6,"뻡뻢뻦",5,"뻭",8], +["9681","뻶",10,"뼂",5,"뼊",13,"뼚뼞",33,"뽂뽃뽅뽆뽇뽉",6,"뽒뽓뽔뽖",44], +["9741","뾃",16,"뾕",8], +["9761","뾞",17,"뾱",7], +["9781","뾹",11,"뿆",5,"뿎뿏뿑뿒뿓뿕",6,"뿝뿞뿠뿢",89,"쀽쀾쀿"], +["9841","쁀",16,"쁒",5,"쁙쁚쁛"], +["9861","쁝쁞쁟쁡",6,"쁪",15], +["9881","쁺",21,"삒삓삕삖삗삙",6,"삢삤삦",5,"삮삱삲삷",4,"삾샂샃샄샆샇샊샋샍샎샏샑",6,"샚샞",5,"샦샧샩샪샫샭",6,"샶샸샺",5,"섁섂섃섅섆섇섉",6,"섑섒섓섔섖",5,"섡섢섥섨섩섪섫섮"], +["9941","섲섳섴섵섷섺섻섽섾섿셁",6,"셊셎",5,"셖셗"], +["9961","셙셚셛셝",6,"셦셪",5,"셱셲셳셵셶셷셹셺셻"], +["9981","셼",8,"솆",5,"솏솑솒솓솕솗",4,"솞솠솢솣솤솦솧솪솫솭솮솯솱",11,"솾",5,"쇅쇆쇇쇉쇊쇋쇍",6,"쇕쇖쇙",6,"쇡쇢쇣쇥쇦쇧쇩",6,"쇲쇴",7,"쇾쇿숁숂숃숅",6,"숎숐숒",5,"숚숛숝숞숡숢숣"], +["9a41","숤숥숦숧숪숬숮숰숳숵",16], +["9a61","쉆쉇쉉",6,"쉒쉓쉕쉖쉗쉙",6,"쉡쉢쉣쉤쉦"], +["9a81","쉧",4,"쉮쉯쉱쉲쉳쉵",6,"쉾슀슂",5,"슊",5,"슑",6,"슙슚슜슞",5,"슦슧슩슪슫슮",5,"슶슸슺",33,"싞싟싡싢싥",5,"싮싰싲싳싴싵싷싺싽싾싿쌁",6,"쌊쌋쌎쌏"], +["9b41","쌐쌑쌒쌖쌗쌙쌚쌛쌝",6,"쌦쌧쌪",8], +["9b61","쌳",17,"썆",7], +["9b81","썎",25,"썪썫썭썮썯썱썳",4,"썺썻썾",5,"쎅쎆쎇쎉쎊쎋쎍",50,"쏁",22,"쏚"], +["9c41","쏛쏝쏞쏡쏣",4,"쏪쏫쏬쏮",5,"쏶쏷쏹",5], +["9c61","쏿",8,"쐉",6,"쐑",9], +["9c81","쐛",8,"쐥",6,"쐭쐮쐯쐱쐲쐳쐵",6,"쐾",9,"쑉",26,"쑦쑧쑩쑪쑫쑭",6,"쑶쑷쑸쑺",5,"쒁",18,"쒕",6,"쒝",12], +["9d41","쒪",13,"쒹쒺쒻쒽",8], +["9d61","쓆",25], +["9d81","쓠",8,"쓪",5,"쓲쓳쓵쓶쓷쓹쓻쓼쓽쓾씂",9,"씍씎씏씑씒씓씕",6,"씝",10,"씪씫씭씮씯씱",6,"씺씼씾",5,"앆앇앋앏앐앑앒앖앚앛앜앟앢앣앥앦앧앩",6,"앲앶",5,"앾앿얁얂얃얅얆얈얉얊얋얎얐얒얓얔"], +["9e41","얖얙얚얛얝얞얟얡",7,"얪",9,"얶"], +["9e61","얷얺얿",4,"엋엍엏엒엓엕엖엗엙",6,"엢엤엦엧"], +["9e81","엨엩엪엫엯엱엲엳엵엸엹엺엻옂옃옄옉옊옋옍옎옏옑",6,"옚옝",6,"옦옧옩옪옫옯옱옲옶옸옺옼옽옾옿왂왃왅왆왇왉",6,"왒왖",5,"왞왟왡",10,"왭왮왰왲",5,"왺왻왽왾왿욁",6,"욊욌욎",5,"욖욗욙욚욛욝",6,"욦"], +["9f41","욨욪",5,"욲욳욵욶욷욻",4,"웂웄웆",5,"웎"], +["9f61","웏웑웒웓웕",6,"웞웟웢",5,"웪웫웭웮웯웱웲"], +["9f81","웳",4,"웺웻웼웾",5,"윆윇윉윊윋윍",6,"윖윘윚",5,"윢윣윥윦윧윩",6,"윲윴윶윸윹윺윻윾윿읁읂읃읅",4,"읋읎읐읙읚읛읝읞읟읡",6,"읩읪읬",7,"읶읷읹읺읻읿잀잁잂잆잋잌잍잏잒잓잕잙잛",4,"잢잧",4,"잮잯잱잲잳잵잶잷"], +["a041","잸잹잺잻잾쟂",5,"쟊쟋쟍쟏쟑",6,"쟙쟚쟛쟜"], +["a061","쟞",5,"쟥쟦쟧쟩쟪쟫쟭",13], +["a081","쟻",4,"젂젃젅젆젇젉젋",4,"젒젔젗",4,"젞젟젡젢젣젥",6,"젮젰젲",5,"젹젺젻젽젾젿졁",6,"졊졋졎",5,"졕",26,"졲졳졵졶졷졹졻",4,"좂좄좈좉좊좎",5,"좕",7,"좞좠좢좣좤"], +["a141","좥좦좧좩",18,"좾좿죀죁"], +["a161","죂죃죅죆죇죉죊죋죍",6,"죖죘죚",5,"죢죣죥"], +["a181","죦",14,"죶",5,"죾죿줁줂줃줇",4,"줎 、。·‥…¨〃­―∥\∼‘’“”〔〕〈",9,"±×÷≠≤≥∞∴°′″℃Å¢£¥♂♀∠⊥⌒∂∇≡≒§※☆★○●◎◇◆□■△▲▽▼→←↑↓↔〓≪≫√∽∝∵∫∬∈∋⊆⊇⊂⊃∪∩∧∨¬"], +["a241","줐줒",5,"줙",18], +["a261","줭",6,"줵",18], +["a281","쥈",7,"쥒쥓쥕쥖쥗쥙",6,"쥢쥤",7,"쥭쥮쥯⇒⇔∀∃´~ˇ˘˝˚˙¸˛¡¿ː∮∑∏¤℉‰◁◀▷▶♤♠♡♥♧♣⊙◈▣◐◑▒▤▥▨▧▦▩♨☏☎☜☞¶†‡↕↗↙↖↘♭♩♪♬㉿㈜№㏇™㏂㏘℡€®"], +["a341","쥱쥲쥳쥵",6,"쥽",10,"즊즋즍즎즏"], +["a361","즑",6,"즚즜즞",16], +["a381","즯",16,"짂짃짅짆짉짋",4,"짒짔짗짘짛!",58,"₩]",32," ̄"], +["a441","짞짟짡짣짥짦짨짩짪짫짮짲",5,"짺짻짽짾짿쨁쨂쨃쨄"], +["a461","쨅쨆쨇쨊쨎",5,"쨕쨖쨗쨙",12], +["a481","쨦쨧쨨쨪",28,"ㄱ",93], +["a541","쩇",4,"쩎쩏쩑쩒쩓쩕",6,"쩞쩢",5,"쩩쩪"], +["a561","쩫",17,"쩾",5,"쪅쪆"], +["a581","쪇",16,"쪙",14,"ⅰ",9], +["a5b0","Ⅰ",9], +["a5c1","Α",16,"Σ",6], +["a5e1","α",16,"σ",6], +["a641","쪨",19,"쪾쪿쫁쫂쫃쫅"], +["a661","쫆",5,"쫎쫐쫒쫔쫕쫖쫗쫚",5,"쫡",6], +["a681","쫨쫩쫪쫫쫭",6,"쫵",18,"쬉쬊─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂┒┑┚┙┖┕┎┍┞┟┡┢┦┧┩┪┭┮┱┲┵┶┹┺┽┾╀╁╃",7], +["a741","쬋",4,"쬑쬒쬓쬕쬖쬗쬙",6,"쬢",7], +["a761","쬪",22,"쭂쭃쭄"], +["a781","쭅쭆쭇쭊쭋쭍쭎쭏쭑",6,"쭚쭛쭜쭞",5,"쭥",7,"㎕㎖㎗ℓ㎘㏄㎣㎤㎥㎦㎙",9,"㏊㎍㎎㎏㏏㎈㎉㏈㎧㎨㎰",9,"㎀",4,"㎺",5,"㎐",4,"Ω㏀㏁㎊㎋㎌㏖㏅㎭㎮㎯㏛㎩㎪㎫㎬㏝㏐㏓㏃㏉㏜㏆"], +["a841","쭭",10,"쭺",14], +["a861","쮉",18,"쮝",6], +["a881","쮤",19,"쮹",11,"ÆЪĦ"], +["a8a6","IJ"], +["a8a8","ĿŁØŒºÞŦŊ"], +["a8b1","㉠",27,"ⓐ",25,"①",14,"½⅓⅔¼¾⅛⅜⅝⅞"], +["a941","쯅",14,"쯕",10], +["a961","쯠쯡쯢쯣쯥쯦쯨쯪",18], +["a981","쯽",14,"찎찏찑찒찓찕",6,"찞찟찠찣찤æđðħıijĸŀłøœßþŧŋʼn㈀",27,"⒜",25,"⑴",14,"¹²³⁴ⁿ₁₂₃₄"], +["aa41","찥찦찪찫찭찯찱",6,"찺찿",4,"챆챇챉챊챋챍챎"], +["aa61","챏",4,"챖챚",5,"챡챢챣챥챧챩",6,"챱챲"], +["aa81","챳챴챶",29,"ぁ",82], +["ab41","첔첕첖첗첚첛첝첞첟첡",6,"첪첮",5,"첶첷첹"], +["ab61","첺첻첽",6,"쳆쳈쳊",5,"쳑쳒쳓쳕",5], +["ab81","쳛",8,"쳥",6,"쳭쳮쳯쳱",12,"ァ",85], +["ac41","쳾쳿촀촂",5,"촊촋촍촎촏촑",6,"촚촜촞촟촠"], +["ac61","촡촢촣촥촦촧촩촪촫촭",11,"촺",4], +["ac81","촿",28,"쵝쵞쵟А",5,"ЁЖ",25], +["acd1","а",5,"ёж",25], +["ad41","쵡쵢쵣쵥",6,"쵮쵰쵲",5,"쵹",7], +["ad61","춁",6,"춉",10,"춖춗춙춚춛춝춞춟"], +["ad81","춠춡춢춣춦춨춪",5,"춱",18,"췅"], +["ae41","췆",5,"췍췎췏췑",16], +["ae61","췢",5,"췩췪췫췭췮췯췱",6,"췺췼췾",4], +["ae81","츃츅츆츇츉츊츋츍",6,"츕츖츗츘츚",5,"츢츣츥츦츧츩츪츫"], +["af41","츬츭츮츯츲츴츶",19], +["af61","칊",13,"칚칛칝칞칢",5,"칪칬"], +["af81","칮",5,"칶칷칹칺칻칽",6,"캆캈캊",5,"캒캓캕캖캗캙"], +["b041","캚",5,"캢캦",5,"캮",12], +["b061","캻",5,"컂",19], +["b081","컖",13,"컦컧컩컪컭",6,"컶컺",5,"가각간갇갈갉갊감",7,"같",4,"갠갤갬갭갯갰갱갸갹갼걀걋걍걔걘걜거걱건걷걸걺검겁것겄겅겆겉겊겋게겐겔겜겝겟겠겡겨격겪견겯결겸겹겻겼경곁계곈곌곕곗고곡곤곧골곪곬곯곰곱곳공곶과곽관괄괆"], +["b141","켂켃켅켆켇켉",6,"켒켔켖",5,"켝켞켟켡켢켣"], +["b161","켥",6,"켮켲",5,"켹",11], +["b181","콅",14,"콖콗콙콚콛콝",6,"콦콨콪콫콬괌괍괏광괘괜괠괩괬괭괴괵괸괼굄굅굇굉교굔굘굡굣구국군굳굴굵굶굻굼굽굿궁궂궈궉권궐궜궝궤궷귀귁귄귈귐귑귓규균귤그극근귿글긁금급긋긍긔기긱긴긷길긺김깁깃깅깆깊까깍깎깐깔깖깜깝깟깠깡깥깨깩깬깰깸"], +["b241","콭콮콯콲콳콵콶콷콹",6,"쾁쾂쾃쾄쾆",5,"쾍"], +["b261","쾎",18,"쾢",5,"쾩"], +["b281","쾪",5,"쾱",18,"쿅",6,"깹깻깼깽꺄꺅꺌꺼꺽꺾껀껄껌껍껏껐껑께껙껜껨껫껭껴껸껼꼇꼈꼍꼐꼬꼭꼰꼲꼴꼼꼽꼿꽁꽂꽃꽈꽉꽐꽜꽝꽤꽥꽹꾀꾄꾈꾐꾑꾕꾜꾸꾹꾼꿀꿇꿈꿉꿋꿍꿎꿔꿜꿨꿩꿰꿱꿴꿸뀀뀁뀄뀌뀐뀔뀜뀝뀨끄끅끈끊끌끎끓끔끕끗끙"], +["b341","쿌",19,"쿢쿣쿥쿦쿧쿩"], +["b361","쿪",5,"쿲쿴쿶",5,"쿽쿾쿿퀁퀂퀃퀅",5], +["b381","퀋",5,"퀒",5,"퀙",19,"끝끼끽낀낄낌낍낏낑나낙낚난낟날낡낢남납낫",4,"낱낳내낵낸낼냄냅냇냈냉냐냑냔냘냠냥너넉넋넌널넒넓넘넙넛넜넝넣네넥넨넬넴넵넷넸넹녀녁년녈념녑녔녕녘녜녠노녹논놀놂놈놉놋농높놓놔놘놜놨뇌뇐뇔뇜뇝"], +["b441","퀮",5,"퀶퀷퀹퀺퀻퀽",6,"큆큈큊",5], +["b461","큑큒큓큕큖큗큙",6,"큡",10,"큮큯"], +["b481","큱큲큳큵",6,"큾큿킀킂",18,"뇟뇨뇩뇬뇰뇹뇻뇽누눅눈눋눌눔눕눗눙눠눴눼뉘뉜뉠뉨뉩뉴뉵뉼늄늅늉느늑는늘늙늚늠늡늣능늦늪늬늰늴니닉닌닐닒님닙닛닝닢다닥닦단닫",4,"닳담답닷",4,"닿대댁댄댈댐댑댓댔댕댜더덕덖던덛덜덞덟덤덥"], +["b541","킕",14,"킦킧킩킪킫킭",5], +["b561","킳킶킸킺",5,"탂탃탅탆탇탊",5,"탒탖",4], +["b581","탛탞탟탡탢탣탥",6,"탮탲",5,"탹",11,"덧덩덫덮데덱덴델뎀뎁뎃뎄뎅뎌뎐뎔뎠뎡뎨뎬도독돈돋돌돎돐돔돕돗동돛돝돠돤돨돼됐되된될됨됩됫됴두둑둔둘둠둡둣둥둬뒀뒈뒝뒤뒨뒬뒵뒷뒹듀듄듈듐듕드득든듣들듦듬듭듯등듸디딕딘딛딜딤딥딧딨딩딪따딱딴딸"], +["b641","턅",7,"턎",17], +["b661","턠",15,"턲턳턵턶턷턹턻턼턽턾"], +["b681","턿텂텆",5,"텎텏텑텒텓텕",6,"텞텠텢",5,"텩텪텫텭땀땁땃땄땅땋때땍땐땔땜땝땟땠땡떠떡떤떨떪떫떰떱떳떴떵떻떼떽뗀뗄뗌뗍뗏뗐뗑뗘뗬또똑똔똘똥똬똴뙈뙤뙨뚜뚝뚠뚤뚫뚬뚱뛔뛰뛴뛸뜀뜁뜅뜨뜩뜬뜯뜰뜸뜹뜻띄띈띌띔띕띠띤띨띰띱띳띵라락란랄람랍랏랐랑랒랖랗"], +["b741","텮",13,"텽",6,"톅톆톇톉톊"], +["b761","톋",20,"톢톣톥톦톧"], +["b781","톩",6,"톲톴톶톷톸톹톻톽톾톿퇁",14,"래랙랜랠램랩랫랬랭랴략랸럇량러럭런럴럼럽럿렀렁렇레렉렌렐렘렙렛렝려력련렬렴렵렷렸령례롄롑롓로록론롤롬롭롯롱롸롼뢍뢨뢰뢴뢸룀룁룃룅료룐룔룝룟룡루룩룬룰룸룹룻룽뤄뤘뤠뤼뤽륀륄륌륏륑류륙륜률륨륩"], +["b841","퇐",7,"퇙",17], +["b861","퇫",8,"퇵퇶퇷퇹",13], +["b881","툈툊",5,"툑",24,"륫륭르륵른를름릅릇릉릊릍릎리릭린릴림립릿링마막만많",4,"맘맙맛망맞맡맣매맥맨맬맴맵맷맸맹맺먀먁먈먕머먹먼멀멂멈멉멋멍멎멓메멕멘멜멤멥멧멨멩며멱면멸몃몄명몇몌모목몫몬몰몲몸몹못몽뫄뫈뫘뫙뫼"], +["b941","툪툫툮툯툱툲툳툵",6,"툾퉀퉂",5,"퉉퉊퉋퉌"], +["b961","퉍",14,"퉝",6,"퉥퉦퉧퉨"], +["b981","퉩",22,"튂튃튅튆튇튉튊튋튌묀묄묍묏묑묘묜묠묩묫무묵묶문묻물묽묾뭄뭅뭇뭉뭍뭏뭐뭔뭘뭡뭣뭬뮈뮌뮐뮤뮨뮬뮴뮷므믄믈믐믓미믹민믿밀밂밈밉밋밌밍및밑바",4,"받",4,"밤밥밧방밭배백밴밸뱀뱁뱃뱄뱅뱉뱌뱍뱐뱝버벅번벋벌벎범법벗"], +["ba41","튍튎튏튒튓튔튖",5,"튝튞튟튡튢튣튥",6,"튭"], +["ba61","튮튯튰튲",5,"튺튻튽튾틁틃",4,"틊틌",5], +["ba81","틒틓틕틖틗틙틚틛틝",6,"틦",9,"틲틳틵틶틷틹틺벙벚베벡벤벧벨벰벱벳벴벵벼벽변별볍볏볐병볕볘볜보복볶본볼봄봅봇봉봐봔봤봬뵀뵈뵉뵌뵐뵘뵙뵤뵨부북분붇불붉붊붐붑붓붕붙붚붜붤붰붸뷔뷕뷘뷜뷩뷰뷴뷸븀븃븅브븍븐블븜븝븟비빅빈빌빎빔빕빗빙빚빛빠빡빤"], +["bb41","틻",4,"팂팄팆",5,"팏팑팒팓팕팗",4,"팞팢팣"], +["bb61","팤팦팧팪팫팭팮팯팱",6,"팺팾",5,"퍆퍇퍈퍉"], +["bb81","퍊",31,"빨빪빰빱빳빴빵빻빼빽뺀뺄뺌뺍뺏뺐뺑뺘뺙뺨뻐뻑뻔뻗뻘뻠뻣뻤뻥뻬뼁뼈뼉뼘뼙뼛뼜뼝뽀뽁뽄뽈뽐뽑뽕뾔뾰뿅뿌뿍뿐뿔뿜뿟뿡쀼쁑쁘쁜쁠쁨쁩삐삑삔삘삠삡삣삥사삭삯산삳살삵삶삼삽삿샀상샅새색샌샐샘샙샛샜생샤"], +["bc41","퍪",17,"퍾퍿펁펂펃펅펆펇"], +["bc61","펈펉펊펋펎펒",5,"펚펛펝펞펟펡",6,"펪펬펮"], +["bc81","펯",4,"펵펶펷펹펺펻펽",6,"폆폇폊",5,"폑",5,"샥샨샬샴샵샷샹섀섄섈섐섕서",4,"섣설섦섧섬섭섯섰성섶세섹센셀셈셉셋셌셍셔셕션셜셤셥셧셨셩셰셴셸솅소속솎손솔솖솜솝솟송솥솨솩솬솰솽쇄쇈쇌쇔쇗쇘쇠쇤쇨쇰쇱쇳쇼쇽숀숄숌숍숏숑수숙순숟술숨숩숫숭"], +["bd41","폗폙",7,"폢폤",7,"폮폯폱폲폳폵폶폷"], +["bd61","폸폹폺폻폾퐀퐂",5,"퐉",13], +["bd81","퐗",5,"퐞",25,"숯숱숲숴쉈쉐쉑쉔쉘쉠쉥쉬쉭쉰쉴쉼쉽쉿슁슈슉슐슘슛슝스슥슨슬슭슴습슷승시식신싣실싫심십싯싱싶싸싹싻싼쌀쌈쌉쌌쌍쌓쌔쌕쌘쌜쌤쌥쌨쌩썅써썩썬썰썲썸썹썼썽쎄쎈쎌쏀쏘쏙쏜쏟쏠쏢쏨쏩쏭쏴쏵쏸쐈쐐쐤쐬쐰"], +["be41","퐸",7,"푁푂푃푅",14], +["be61","푔",7,"푝푞푟푡푢푣푥",7,"푮푰푱푲"], +["be81","푳",4,"푺푻푽푾풁풃",4,"풊풌풎",5,"풕",8,"쐴쐼쐽쑈쑤쑥쑨쑬쑴쑵쑹쒀쒔쒜쒸쒼쓩쓰쓱쓴쓸쓺쓿씀씁씌씐씔씜씨씩씬씰씸씹씻씽아악안앉않알앍앎앓암압앗았앙앝앞애액앤앨앰앱앳앴앵야약얀얄얇얌얍얏양얕얗얘얜얠얩어억언얹얻얼얽얾엄",6,"엌엎"], +["bf41","풞",10,"풪",14], +["bf61","풹",18,"퓍퓎퓏퓑퓒퓓퓕"], +["bf81","퓖",5,"퓝퓞퓠",7,"퓩퓪퓫퓭퓮퓯퓱",6,"퓹퓺퓼에엑엔엘엠엡엣엥여역엮연열엶엷염",5,"옅옆옇예옌옐옘옙옛옜오옥온올옭옮옰옳옴옵옷옹옻와왁완왈왐왑왓왔왕왜왝왠왬왯왱외왹왼욀욈욉욋욍요욕욘욜욤욥욧용우욱운울욹욺움웁웃웅워웍원월웜웝웠웡웨"], +["c041","퓾",5,"픅픆픇픉픊픋픍",6,"픖픘",5], +["c061","픞",25], +["c081","픸픹픺픻픾픿핁핂핃핅",6,"핎핐핒",5,"핚핛핝핞핟핡핢핣웩웬웰웸웹웽위윅윈윌윔윕윗윙유육윤율윰윱윳융윷으윽은을읊음읍읏응",7,"읜읠읨읫이익인일읽읾잃임입잇있잉잊잎자작잔잖잗잘잚잠잡잣잤장잦재잭잰잴잼잽잿쟀쟁쟈쟉쟌쟎쟐쟘쟝쟤쟨쟬저적전절젊"], +["c141","핤핦핧핪핬핮",5,"핶핷핹핺핻핽",6,"햆햊햋"], +["c161","햌햍햎햏햑",19,"햦햧"], +["c181","햨",31,"점접젓정젖제젝젠젤젬젭젯젱져젼졀졈졉졌졍졔조족존졸졺좀좁좃종좆좇좋좌좍좔좝좟좡좨좼좽죄죈죌죔죕죗죙죠죡죤죵주죽준줄줅줆줌줍줏중줘줬줴쥐쥑쥔쥘쥠쥡쥣쥬쥰쥴쥼즈즉즌즐즘즙즛증지직진짇질짊짐집짓"], +["c241","헊헋헍헎헏헑헓",4,"헚헜헞",5,"헦헧헩헪헫헭헮"], +["c261","헯",4,"헶헸헺",5,"혂혃혅혆혇혉",6,"혒"], +["c281","혖",5,"혝혞혟혡혢혣혥",7,"혮",9,"혺혻징짖짙짚짜짝짠짢짤짧짬짭짯짰짱째짹짼쨀쨈쨉쨋쨌쨍쨔쨘쨩쩌쩍쩐쩔쩜쩝쩟쩠쩡쩨쩽쪄쪘쪼쪽쫀쫄쫌쫍쫏쫑쫓쫘쫙쫠쫬쫴쬈쬐쬔쬘쬠쬡쭁쭈쭉쭌쭐쭘쭙쭝쭤쭸쭹쮜쮸쯔쯤쯧쯩찌찍찐찔찜찝찡찢찧차착찬찮찰참찹찻"], +["c341","혽혾혿홁홂홃홄홆홇홊홌홎홏홐홒홓홖홗홙홚홛홝",4], +["c361","홢",4,"홨홪",5,"홲홳홵",11], +["c381","횁횂횄횆",5,"횎횏횑횒횓횕",7,"횞횠횢",5,"횩횪찼창찾채책챈챌챔챕챗챘챙챠챤챦챨챰챵처척천철첨첩첫첬청체첵첸첼쳄쳅쳇쳉쳐쳔쳤쳬쳰촁초촉촌촐촘촙촛총촤촨촬촹최쵠쵤쵬쵭쵯쵱쵸춈추축춘출춤춥춧충춰췄췌췐취췬췰췸췹췻췽츄츈츌츔츙츠측츤츨츰츱츳층"], +["c441","횫횭횮횯횱",7,"횺횼",7,"훆훇훉훊훋"], +["c461","훍훎훏훐훒훓훕훖훘훚",5,"훡훢훣훥훦훧훩",4], +["c481","훮훯훱훲훳훴훶",5,"훾훿휁휂휃휅",11,"휒휓휔치칙친칟칠칡침칩칫칭카칵칸칼캄캅캇캉캐캑캔캘캠캡캣캤캥캬캭컁커컥컨컫컬컴컵컷컸컹케켁켄켈켐켑켓켕켜켠켤켬켭켯켰켱켸코콕콘콜콤콥콧콩콰콱콴콸쾀쾅쾌쾡쾨쾰쿄쿠쿡쿤쿨쿰쿱쿳쿵쿼퀀퀄퀑퀘퀭퀴퀵퀸퀼"], +["c541","휕휖휗휚휛휝휞휟휡",6,"휪휬휮",5,"휶휷휹"], +["c561","휺휻휽",6,"흅흆흈흊",5,"흒흓흕흚",4], +["c581","흟흢흤흦흧흨흪흫흭흮흯흱흲흳흵",6,"흾흿힀힂",5,"힊힋큄큅큇큉큐큔큘큠크큭큰클큼큽킁키킥킨킬킴킵킷킹타탁탄탈탉탐탑탓탔탕태택탠탤탬탭탯탰탱탸턍터턱턴털턺텀텁텃텄텅테텍텐텔템텝텟텡텨텬텼톄톈토톡톤톨톰톱톳통톺톼퇀퇘퇴퇸툇툉툐투툭툰툴툼툽툿퉁퉈퉜"], +["c641","힍힎힏힑",6,"힚힜힞",5], +["c6a1","퉤튀튁튄튈튐튑튕튜튠튤튬튱트특튼튿틀틂틈틉틋틔틘틜틤틥티틱틴틸팀팁팃팅파팍팎판팔팖팜팝팟팠팡팥패팩팬팰팸팹팻팼팽퍄퍅퍼퍽펀펄펌펍펏펐펑페펙펜펠펨펩펫펭펴편펼폄폅폈평폐폘폡폣포폭폰폴폼폽폿퐁"], +["c7a1","퐈퐝푀푄표푠푤푭푯푸푹푼푿풀풂품풉풋풍풔풩퓌퓐퓔퓜퓟퓨퓬퓰퓸퓻퓽프픈플픔픕픗피픽핀필핌핍핏핑하학한할핥함합핫항해핵핸핼햄햅햇했행햐향허헉헌헐헒험헙헛헝헤헥헨헬헴헵헷헹혀혁현혈혐협혓혔형혜혠"], +["c8a1","혤혭호혹혼홀홅홈홉홋홍홑화확환활홧황홰홱홴횃횅회획횐횔횝횟횡효횬횰횹횻후훅훈훌훑훔훗훙훠훤훨훰훵훼훽휀휄휑휘휙휜휠휨휩휫휭휴휵휸휼흄흇흉흐흑흔흖흗흘흙흠흡흣흥흩희흰흴흼흽힁히힉힌힐힘힙힛힝"], +["caa1","伽佳假價加可呵哥嘉嫁家暇架枷柯歌珂痂稼苛茄街袈訶賈跏軻迦駕刻却各恪慤殼珏脚覺角閣侃刊墾奸姦干幹懇揀杆柬桿澗癎看磵稈竿簡肝艮艱諫間乫喝曷渴碣竭葛褐蝎鞨勘坎堪嵌感憾戡敢柑橄減甘疳監瞰紺邯鑑鑒龕"], +["cba1","匣岬甲胛鉀閘剛堈姜岡崗康强彊慷江畺疆糠絳綱羌腔舡薑襁講鋼降鱇介价個凱塏愷愾慨改槪漑疥皆盖箇芥蓋豈鎧開喀客坑更粳羹醵倨去居巨拒据據擧渠炬祛距踞車遽鉅鋸乾件健巾建愆楗腱虔蹇鍵騫乞傑杰桀儉劍劒檢"], +["cca1","瞼鈐黔劫怯迲偈憩揭擊格檄激膈覡隔堅牽犬甄絹繭肩見譴遣鵑抉決潔結缺訣兼慊箝謙鉗鎌京俓倞傾儆勁勍卿坰境庚徑慶憬擎敬景暻更梗涇炅烱璟璥瓊痙硬磬竟競絅經耕耿脛莖警輕逕鏡頃頸驚鯨係啓堺契季屆悸戒桂械"], +["cda1","棨溪界癸磎稽系繫繼計誡谿階鷄古叩告呱固姑孤尻庫拷攷故敲暠枯槁沽痼皐睾稿羔考股膏苦苽菰藁蠱袴誥賈辜錮雇顧高鼓哭斛曲梏穀谷鵠困坤崑昆梱棍滾琨袞鯤汨滑骨供公共功孔工恐恭拱控攻珙空蚣貢鞏串寡戈果瓜"], +["cea1","科菓誇課跨過鍋顆廓槨藿郭串冠官寬慣棺款灌琯瓘管罐菅觀貫關館刮恝括适侊光匡壙廣曠洸炚狂珖筐胱鑛卦掛罫乖傀塊壞怪愧拐槐魁宏紘肱轟交僑咬喬嬌嶠巧攪敎校橋狡皎矯絞翹膠蕎蛟較轎郊餃驕鮫丘久九仇俱具勾"], +["cfa1","區口句咎嘔坵垢寇嶇廐懼拘救枸柩構歐毆毬求溝灸狗玖球瞿矩究絿耉臼舅舊苟衢謳購軀逑邱鉤銶駒驅鳩鷗龜國局菊鞠鞫麴君窘群裙軍郡堀屈掘窟宮弓穹窮芎躬倦券勸卷圈拳捲權淃眷厥獗蕨蹶闕机櫃潰詭軌饋句晷歸貴"], +["d0a1","鬼龜叫圭奎揆槻珪硅窺竅糾葵規赳逵閨勻均畇筠菌鈞龜橘克剋劇戟棘極隙僅劤勤懃斤根槿瑾筋芹菫覲謹近饉契今妗擒昑檎琴禁禽芩衾衿襟金錦伋及急扱汲級給亘兢矜肯企伎其冀嗜器圻基埼夔奇妓寄岐崎己幾忌技旗旣"], +["d1a1","朞期杞棋棄機欺氣汽沂淇玘琦琪璂璣畸畿碁磯祁祇祈祺箕紀綺羈耆耭肌記譏豈起錡錤飢饑騎騏驥麒緊佶吉拮桔金喫儺喇奈娜懦懶拏拿癩",5,"那樂",4,"諾酪駱亂卵暖欄煖爛蘭難鸞捏捺南嵐枏楠湳濫男藍襤拉"], +["d2a1","納臘蠟衲囊娘廊",4,"乃來內奈柰耐冷女年撚秊念恬拈捻寧寗努勞奴弩怒擄櫓爐瑙盧",5,"駑魯",10,"濃籠聾膿農惱牢磊腦賂雷尿壘",7,"嫩訥杻紐勒",5,"能菱陵尼泥匿溺多茶"], +["d3a1","丹亶但單團壇彖斷旦檀段湍短端簞緞蛋袒鄲鍛撻澾獺疸達啖坍憺擔曇淡湛潭澹痰聃膽蕁覃談譚錟沓畓答踏遝唐堂塘幢戇撞棠當糖螳黨代垈坮大對岱帶待戴擡玳臺袋貸隊黛宅德悳倒刀到圖堵塗導屠島嶋度徒悼挑掉搗桃"], +["d4a1","棹櫂淘渡滔濤燾盜睹禱稻萄覩賭跳蹈逃途道都鍍陶韜毒瀆牘犢獨督禿篤纛讀墩惇敦旽暾沌焞燉豚頓乭突仝冬凍動同憧東桐棟洞潼疼瞳童胴董銅兜斗杜枓痘竇荳讀豆逗頭屯臀芚遁遯鈍得嶝橙燈登等藤謄鄧騰喇懶拏癩羅"], +["d5a1","蘿螺裸邏樂洛烙珞絡落諾酪駱丹亂卵欄欒瀾爛蘭鸞剌辣嵐擥攬欖濫籃纜藍襤覽拉臘蠟廊朗浪狼琅瑯螂郞來崍徠萊冷掠略亮倆兩凉梁樑粮粱糧良諒輛量侶儷勵呂廬慮戾旅櫚濾礪藜蠣閭驢驪麗黎力曆歷瀝礫轢靂憐戀攣漣"], +["d6a1","煉璉練聯蓮輦連鍊冽列劣洌烈裂廉斂殮濂簾獵令伶囹寧岺嶺怜玲笭羚翎聆逞鈴零靈領齡例澧禮醴隷勞怒撈擄櫓潞瀘爐盧老蘆虜路輅露魯鷺鹵碌祿綠菉錄鹿麓論壟弄朧瀧瓏籠聾儡瀨牢磊賂賚賴雷了僚寮廖料燎療瞭聊蓼"], +["d7a1","遼鬧龍壘婁屢樓淚漏瘻累縷蔞褸鏤陋劉旒柳榴流溜瀏琉瑠留瘤硫謬類六戮陸侖倫崙淪綸輪律慄栗率隆勒肋凜凌楞稜綾菱陵俚利厘吏唎履悧李梨浬犁狸理璃異痢籬罹羸莉裏裡里釐離鯉吝潾燐璘藺躪隣鱗麟林淋琳臨霖砬"], +["d8a1","立笠粒摩瑪痲碼磨馬魔麻寞幕漠膜莫邈万卍娩巒彎慢挽晩曼滿漫灣瞞萬蔓蠻輓饅鰻唜抹末沫茉襪靺亡妄忘忙望網罔芒茫莽輞邙埋妹媒寐昧枚梅每煤罵買賣邁魅脈貊陌驀麥孟氓猛盲盟萌冪覓免冕勉棉沔眄眠綿緬面麵滅"], +["d9a1","蔑冥名命明暝椧溟皿瞑茗蓂螟酩銘鳴袂侮冒募姆帽慕摸摹暮某模母毛牟牡瑁眸矛耗芼茅謀謨貌木沐牧目睦穆鶩歿沒夢朦蒙卯墓妙廟描昴杳渺猫竗苗錨務巫憮懋戊拇撫无楙武毋無珷畝繆舞茂蕪誣貿霧鵡墨默們刎吻問文"], +["daa1","汶紊紋聞蚊門雯勿沕物味媚尾嵋彌微未梶楣渼湄眉米美薇謎迷靡黴岷悶愍憫敏旻旼民泯玟珉緡閔密蜜謐剝博拍搏撲朴樸泊珀璞箔粕縛膊舶薄迫雹駁伴半反叛拌搬攀斑槃泮潘班畔瘢盤盼磐磻礬絆般蟠返頒飯勃拔撥渤潑"], +["dba1","發跋醱鉢髮魃倣傍坊妨尨幇彷房放方旁昉枋榜滂磅紡肪膀舫芳蒡蚌訪謗邦防龐倍俳北培徘拜排杯湃焙盃背胚裴裵褙賠輩配陪伯佰帛柏栢白百魄幡樊煩燔番磻繁蕃藩飜伐筏罰閥凡帆梵氾汎泛犯範范法琺僻劈壁擘檗璧癖"], +["dca1","碧蘗闢霹便卞弁變辨辯邊別瞥鱉鼈丙倂兵屛幷昞昺柄棅炳甁病秉竝輧餠騈保堡報寶普步洑湺潽珤甫菩補褓譜輔伏僕匐卜宓復服福腹茯蔔複覆輹輻馥鰒本乶俸奉封峯峰捧棒烽熢琫縫蓬蜂逢鋒鳳不付俯傅剖副否咐埠夫婦"], +["dda1","孚孵富府復扶敷斧浮溥父符簿缶腐腑膚艀芙莩訃負賦賻赴趺部釜阜附駙鳧北分吩噴墳奔奮忿憤扮昐汾焚盆粉糞紛芬賁雰不佛弗彿拂崩朋棚硼繃鵬丕備匕匪卑妃婢庇悲憊扉批斐枇榧比毖毗毘沸泌琵痺砒碑秕秘粃緋翡肥"], +["dea1","脾臂菲蜚裨誹譬費鄙非飛鼻嚬嬪彬斌檳殯浜濱瀕牝玭貧賓頻憑氷聘騁乍事些仕伺似使俟僿史司唆嗣四士奢娑寫寺射巳師徙思捨斜斯柶査梭死沙泗渣瀉獅砂社祀祠私篩紗絲肆舍莎蓑蛇裟詐詞謝賜赦辭邪飼駟麝削數朔索"], +["dfa1","傘刪山散汕珊産疝算蒜酸霰乷撒殺煞薩三參杉森渗芟蔘衫揷澁鈒颯上傷像償商喪嘗孀尙峠常床庠廂想桑橡湘爽牀狀相祥箱翔裳觴詳象賞霜塞璽賽嗇塞穡索色牲生甥省笙墅壻嶼序庶徐恕抒捿敍暑曙書栖棲犀瑞筮絮緖署"], +["e0a1","胥舒薯西誓逝鋤黍鼠夕奭席惜昔晳析汐淅潟石碩蓆釋錫仙僊先善嬋宣扇敾旋渲煽琁瑄璇璿癬禪線繕羨腺膳船蘚蟬詵跣選銑鐥饍鮮卨屑楔泄洩渫舌薛褻設說雪齧剡暹殲纖蟾贍閃陝攝涉燮葉城姓宬性惺成星晟猩珹盛省筬"], +["e1a1","聖聲腥誠醒世勢歲洗稅笹細說貰召嘯塑宵小少巢所掃搔昭梳沼消溯瀟炤燒甦疏疎瘙笑篠簫素紹蔬蕭蘇訴逍遡邵銷韶騷俗屬束涑粟續謖贖速孫巽損蓀遜飡率宋悚松淞訟誦送頌刷殺灑碎鎖衰釗修受嗽囚垂壽嫂守岫峀帥愁"], +["e2a1","戍手授搜收數樹殊水洙漱燧狩獸琇璲瘦睡秀穗竪粹綏綬繡羞脩茱蒐蓚藪袖誰讐輸遂邃酬銖銹隋隧隨雖需須首髓鬚叔塾夙孰宿淑潚熟琡璹肅菽巡徇循恂旬栒楯橓殉洵淳珣盾瞬筍純脣舜荀蓴蕣詢諄醇錞順馴戌術述鉥崇崧"], +["e3a1","嵩瑟膝蝨濕拾習褶襲丞乘僧勝升承昇繩蠅陞侍匙嘶始媤尸屎屍市弑恃施是時枾柴猜矢示翅蒔蓍視試詩諡豕豺埴寔式息拭植殖湜熄篒蝕識軾食飾伸侁信呻娠宸愼新晨燼申神紳腎臣莘薪藎蜃訊身辛辰迅失室實悉審尋心沁"], +["e4a1","沈深瀋甚芯諶什十拾雙氏亞俄兒啞娥峨我牙芽莪蛾衙訝阿雅餓鴉鵝堊岳嶽幄惡愕握樂渥鄂鍔顎鰐齷安岸按晏案眼雁鞍顔鮟斡謁軋閼唵岩巖庵暗癌菴闇壓押狎鴨仰央怏昻殃秧鴦厓哀埃崖愛曖涯碍艾隘靄厄扼掖液縊腋額"], +["e5a1","櫻罌鶯鸚也倻冶夜惹揶椰爺耶若野弱掠略約若葯蒻藥躍亮佯兩凉壤孃恙揚攘敭暘梁楊樣洋瀁煬痒瘍禳穰糧羊良襄諒讓釀陽量養圄御於漁瘀禦語馭魚齬億憶抑檍臆偃堰彦焉言諺孼蘖俺儼嚴奄掩淹嶪業円予余勵呂女如廬"], +["e6a1","旅歟汝濾璵礖礪與艅茹輿轝閭餘驪麗黎亦力域役易曆歷疫繹譯轢逆驛嚥堧姸娟宴年延憐戀捐挻撚椽沇沿涎涓淵演漣烟然煙煉燃燕璉硏硯秊筵緣練縯聯衍軟輦蓮連鉛鍊鳶列劣咽悅涅烈熱裂說閱厭廉念捻染殮炎焰琰艶苒"], +["e7a1","簾閻髥鹽曄獵燁葉令囹塋寧嶺嶸影怜映暎楹榮永泳渶潁濚瀛瀯煐營獰玲瑛瑩瓔盈穎纓羚聆英詠迎鈴鍈零霙靈領乂倪例刈叡曳汭濊猊睿穢芮藝蘂禮裔詣譽豫醴銳隸霓預五伍俉傲午吾吳嗚塢墺奧娛寤悟惡懊敖旿晤梧汚澳"], +["e8a1","烏熬獒筽蜈誤鰲鼇屋沃獄玉鈺溫瑥瘟穩縕蘊兀壅擁瓮甕癰翁邕雍饔渦瓦窩窪臥蛙蝸訛婉完宛梡椀浣玩琓琬碗緩翫脘腕莞豌阮頑曰往旺枉汪王倭娃歪矮外嵬巍猥畏了僚僥凹堯夭妖姚寥寮尿嶢拗搖撓擾料曜樂橈燎燿瑤療"], +["e9a1","窈窯繇繞耀腰蓼蟯要謠遙遼邀饒慾欲浴縟褥辱俑傭冗勇埇墉容庸慂榕涌湧溶熔瑢用甬聳茸蓉踊鎔鏞龍于佑偶優又友右宇寓尤愚憂旴牛玗瑀盂祐禑禹紆羽芋藕虞迂遇郵釪隅雨雩勖彧旭昱栯煜稶郁頊云暈橒殞澐熉耘芸蕓"], +["eaa1","運隕雲韻蔚鬱亐熊雄元原員圓園垣媛嫄寃怨愿援沅洹湲源爰猿瑗苑袁轅遠阮院願鴛月越鉞位偉僞危圍委威尉慰暐渭爲瑋緯胃萎葦蔿蝟衛褘謂違韋魏乳侑儒兪劉唯喩孺宥幼幽庾悠惟愈愉揄攸有杻柔柚柳楡楢油洧流游溜"], +["eba1","濡猶猷琉瑜由留癒硫紐維臾萸裕誘諛諭踰蹂遊逾遺酉釉鍮類六堉戮毓肉育陸倫允奫尹崙淪潤玧胤贇輪鈗閏律慄栗率聿戎瀜絨融隆垠恩慇殷誾銀隱乙吟淫蔭陰音飮揖泣邑凝應膺鷹依倚儀宜意懿擬椅毅疑矣義艤薏蟻衣誼"], +["eca1","議醫二以伊利吏夷姨履已弛彛怡易李梨泥爾珥理異痍痢移罹而耳肄苡荑裏裡貽貳邇里離飴餌匿溺瀷益翊翌翼謚人仁刃印吝咽因姻寅引忍湮燐璘絪茵藺蚓認隣靭靷鱗麟一佚佾壹日溢逸鎰馹任壬妊姙恁林淋稔臨荏賃入卄"], +["eda1","立笠粒仍剩孕芿仔刺咨姉姿子字孜恣慈滋炙煮玆瓷疵磁紫者自茨蔗藉諮資雌作勺嚼斫昨灼炸爵綽芍酌雀鵲孱棧殘潺盞岑暫潛箴簪蠶雜丈仗匠場墻壯奬將帳庄張掌暲杖樟檣欌漿牆狀獐璋章粧腸臟臧莊葬蔣薔藏裝贓醬長"], +["eea1","障再哉在宰才材栽梓渽滓災縡裁財載齋齎爭箏諍錚佇低儲咀姐底抵杵楮樗沮渚狙猪疽箸紵苧菹著藷詛貯躇這邸雎齟勣吊嫡寂摘敵滴狄炙的積笛籍績翟荻謫賊赤跡蹟迪迹適鏑佃佺傳全典前剪塡塼奠專展廛悛戰栓殿氈澱"], +["efa1","煎琠田甸畑癲筌箋箭篆纏詮輾轉鈿銓錢鐫電顚顫餞切截折浙癤竊節絶占岾店漸点粘霑鮎點接摺蝶丁井亭停偵呈姃定幀庭廷征情挺政整旌晶晸柾楨檉正汀淀淨渟湞瀞炡玎珽町睛碇禎程穽精綎艇訂諪貞鄭酊釘鉦鋌錠霆靖"], +["f0a1","靜頂鼎制劑啼堤帝弟悌提梯濟祭第臍薺製諸蹄醍除際霽題齊俎兆凋助嘲弔彫措操早晁曺曹朝條棗槽漕潮照燥爪璪眺祖祚租稠窕粗糟組繰肇藻蚤詔調趙躁造遭釣阻雕鳥族簇足鏃存尊卒拙猝倧宗從悰慫棕淙琮種終綜縱腫"], +["f1a1","踪踵鍾鐘佐坐左座挫罪主住侏做姝胄呪周嗾奏宙州廚晝朱柱株注洲湊澍炷珠疇籌紂紬綢舟蛛註誅走躊輳週酎酒鑄駐竹粥俊儁准埈寯峻晙樽浚準濬焌畯竣蠢逡遵雋駿茁中仲衆重卽櫛楫汁葺增憎曾拯烝甑症繒蒸證贈之只"], +["f2a1","咫地址志持指摯支旨智枝枳止池沚漬知砥祉祗紙肢脂至芝芷蜘誌識贄趾遲直稙稷織職唇嗔塵振搢晉晋桭榛殄津溱珍瑨璡畛疹盡眞瞋秦縉縝臻蔯袗診賑軫辰進鎭陣陳震侄叱姪嫉帙桎瓆疾秩窒膣蛭質跌迭斟朕什執潗緝輯"], +["f3a1","鏶集徵懲澄且侘借叉嗟嵯差次此磋箚茶蹉車遮捉搾着窄錯鑿齪撰澯燦璨瓚竄簒纂粲纘讚贊鑽餐饌刹察擦札紮僭參塹慘慙懺斬站讒讖倉倡創唱娼廠彰愴敞昌昶暢槍滄漲猖瘡窓脹艙菖蒼債埰寀寨彩採砦綵菜蔡采釵冊柵策"], +["f4a1","責凄妻悽處倜刺剔尺慽戚拓擲斥滌瘠脊蹠陟隻仟千喘天川擅泉淺玔穿舛薦賤踐遷釧闡阡韆凸哲喆徹撤澈綴輟轍鐵僉尖沾添甛瞻簽籤詹諂堞妾帖捷牒疊睫諜貼輒廳晴淸聽菁請靑鯖切剃替涕滯締諦逮遞體初剿哨憔抄招梢"], +["f5a1","椒楚樵炒焦硝礁礎秒稍肖艸苕草蕉貂超酢醋醮促囑燭矗蜀觸寸忖村邨叢塚寵悤憁摠總聰蔥銃撮催崔最墜抽推椎楸樞湫皺秋芻萩諏趨追鄒酋醜錐錘鎚雛騶鰍丑畜祝竺筑築縮蓄蹙蹴軸逐春椿瑃出朮黜充忠沖蟲衝衷悴膵萃"], +["f6a1","贅取吹嘴娶就炊翠聚脆臭趣醉驟鷲側仄厠惻測層侈値嗤峙幟恥梔治淄熾痔痴癡稚穉緇緻置致蚩輜雉馳齒則勅飭親七柒漆侵寢枕沈浸琛砧針鍼蟄秤稱快他咤唾墮妥惰打拖朶楕舵陀馱駝倬卓啄坼度托拓擢晫柝濁濯琢琸託"], +["f7a1","鐸呑嘆坦彈憚歎灘炭綻誕奪脫探眈耽貪塔搭榻宕帑湯糖蕩兌台太怠態殆汰泰笞胎苔跆邰颱宅擇澤撑攄兎吐土討慟桶洞痛筒統通堆槌腿褪退頹偸套妬投透鬪慝特闖坡婆巴把播擺杷波派爬琶破罷芭跛頗判坂板版瓣販辦鈑"], +["f8a1","阪八叭捌佩唄悖敗沛浿牌狽稗覇貝彭澎烹膨愎便偏扁片篇編翩遍鞭騙貶坪平枰萍評吠嬖幣廢弊斃肺蔽閉陛佈包匍匏咆哺圃布怖抛抱捕暴泡浦疱砲胞脯苞葡蒲袍褒逋鋪飽鮑幅暴曝瀑爆輻俵剽彪慓杓標漂瓢票表豹飇飄驃"], +["f9a1","品稟楓諷豊風馮彼披疲皮被避陂匹弼必泌珌畢疋筆苾馝乏逼下何厦夏廈昰河瑕荷蝦賀遐霞鰕壑學虐謔鶴寒恨悍旱汗漢澣瀚罕翰閑閒限韓割轄函含咸啣喊檻涵緘艦銜陷鹹合哈盒蛤閤闔陜亢伉姮嫦巷恒抗杭桁沆港缸肛航"], +["faa1","行降項亥偕咳垓奚孩害懈楷海瀣蟹解該諧邂駭骸劾核倖幸杏荇行享向嚮珦鄕響餉饗香噓墟虛許憲櫶獻軒歇險驗奕爀赫革俔峴弦懸晛泫炫玄玹現眩睍絃絢縣舷衒見賢鉉顯孑穴血頁嫌俠協夾峽挾浹狹脅脇莢鋏頰亨兄刑型"], +["fba1","形泂滎瀅灐炯熒珩瑩荊螢衡逈邢鎣馨兮彗惠慧暳蕙蹊醯鞋乎互呼壕壺好岵弧戶扈昊晧毫浩淏湖滸澔濠濩灝狐琥瑚瓠皓祜糊縞胡芦葫蒿虎號蝴護豪鎬頀顥惑或酷婚昏混渾琿魂忽惚笏哄弘汞泓洪烘紅虹訌鴻化和嬅樺火畵"], +["fca1","禍禾花華話譁貨靴廓擴攫確碻穫丸喚奐宦幻患換歡晥桓渙煥環紈還驩鰥活滑猾豁闊凰幌徨恍惶愰慌晃晄榥況湟滉潢煌璜皇篁簧荒蝗遑隍黃匯回廻徊恢悔懷晦會檜淮澮灰獪繪膾茴蛔誨賄劃獲宖橫鐄哮嚆孝效斅曉梟涍淆"], +["fda1","爻肴酵驍侯候厚后吼喉嗅帿後朽煦珝逅勛勳塤壎焄熏燻薰訓暈薨喧暄煊萱卉喙毁彙徽揮暉煇諱輝麾休携烋畦虧恤譎鷸兇凶匈洶胸黑昕欣炘痕吃屹紇訖欠欽歆吸恰洽翕興僖凞喜噫囍姬嬉希憙憘戱晞曦熙熹熺犧禧稀羲詰"] +] diff --git a/node_modules/iconv-lite/encodings/tables/cp950.json b/node_modules/iconv-lite/encodings/tables/cp950.json new file mode 100644 index 0000000..d8bc871 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/cp950.json @@ -0,0 +1,177 @@ +[ +["0","\u0000",127], +["a140"," ,、。.‧;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗|–︱—︳╴︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚"], +["a1a1","﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅¯ ̄_ˍ﹉﹊﹍﹎﹋﹌﹟﹠﹡+-×÷±√<>=≦≧≠∞≒≡﹢",4,"~∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂⊕⊙↑↓←→↖↗↙↘∥∣/"], +["a240","\∕﹨$¥〒¢£%@℃℉﹩﹪﹫㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁",7,"▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭"], +["a2a1","╮╰╯═╞╪╡◢◣◥◤╱╲╳0",9,"Ⅰ",9,"〡",8,"十卄卅A",25,"a",21], +["a340","wxyzΑ",16,"Σ",6,"α",16,"σ",6,"ㄅ",10], +["a3a1","ㄐ",25,"˙ˉˊˇˋ"], +["a3e1","€"], +["a440","一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才"], +["a4a1","丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙"], +["a540","世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外"], +["a5a1","央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全"], +["a640","共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年"], +["a6a1","式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣"], +["a740","作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍"], +["a7a1","均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠"], +["a840","杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒"], +["a8a1","芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵"], +["a940","咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居"], +["a9a1","屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊"], +["aa40","昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠"], +["aaa1","炕炎炒炊炙爬爭爸版牧物狀狎狙狗狐玩玨玟玫玥甽疝疙疚的盂盲直知矽社祀祁秉秈空穹竺糾罔羌羋者肺肥肢肱股肫肩肴肪肯臥臾舍芳芝芙芭芽芟芹花芬芥芯芸芣芰芾芷虎虱初表軋迎返近邵邸邱邶采金長門阜陀阿阻附"], +["ab40","陂隹雨青非亟亭亮信侵侯便俠俑俏保促侶俘俟俊俗侮俐俄係俚俎俞侷兗冒冑冠剎剃削前剌剋則勇勉勃勁匍南卻厚叛咬哀咨哎哉咸咦咳哇哂咽咪品"], +["aba1","哄哈咯咫咱咻咩咧咿囿垂型垠垣垢城垮垓奕契奏奎奐姜姘姿姣姨娃姥姪姚姦威姻孩宣宦室客宥封屎屏屍屋峙峒巷帝帥帟幽庠度建弈弭彥很待徊律徇後徉怒思怠急怎怨恍恰恨恢恆恃恬恫恪恤扁拜挖按拼拭持拮拽指拱拷"], +["ac40","拯括拾拴挑挂政故斫施既春昭映昧是星昨昱昤曷柿染柱柔某柬架枯柵柩柯柄柑枴柚查枸柏柞柳枰柙柢柝柒歪殃殆段毒毗氟泉洋洲洪流津洌洱洞洗"], +["aca1","活洽派洶洛泵洹洧洸洩洮洵洎洫炫為炳炬炯炭炸炮炤爰牲牯牴狩狠狡玷珊玻玲珍珀玳甚甭畏界畎畋疫疤疥疢疣癸皆皇皈盈盆盃盅省盹相眉看盾盼眇矜砂研砌砍祆祉祈祇禹禺科秒秋穿突竿竽籽紂紅紀紉紇約紆缸美羿耄"], +["ad40","耐耍耑耶胖胥胚胃胄背胡胛胎胞胤胝致舢苧范茅苣苛苦茄若茂茉苒苗英茁苜苔苑苞苓苟苯茆虐虹虻虺衍衫要觔計訂訃貞負赴赳趴軍軌述迦迢迪迥"], +["ada1","迭迫迤迨郊郎郁郃酋酊重閂限陋陌降面革韋韭音頁風飛食首香乘亳倌倍倣俯倦倥俸倩倖倆值借倚倒們俺倀倔倨俱倡個候倘俳修倭倪俾倫倉兼冤冥冢凍凌准凋剖剜剔剛剝匪卿原厝叟哨唐唁唷哼哥哲唆哺唔哩哭員唉哮哪"], +["ae40","哦唧唇哽唏圃圄埂埔埋埃堉夏套奘奚娑娘娜娟娛娓姬娠娣娩娥娌娉孫屘宰害家宴宮宵容宸射屑展屐峭峽峻峪峨峰島崁峴差席師庫庭座弱徒徑徐恙"], +["aea1","恣恥恐恕恭恩息悄悟悚悍悔悌悅悖扇拳挈拿捎挾振捕捂捆捏捉挺捐挽挪挫挨捍捌效敉料旁旅時晉晏晃晒晌晅晁書朔朕朗校核案框桓根桂桔栩梳栗桌桑栽柴桐桀格桃株桅栓栘桁殊殉殷氣氧氨氦氤泰浪涕消涇浦浸海浙涓"], +["af40","浬涉浮浚浴浩涌涊浹涅浥涔烊烘烤烙烈烏爹特狼狹狽狸狷玆班琉珮珠珪珞畔畝畜畚留疾病症疲疳疽疼疹痂疸皋皰益盍盎眩真眠眨矩砰砧砸砝破砷"], +["afa1","砥砭砠砟砲祕祐祠祟祖神祝祗祚秤秣秧租秦秩秘窄窈站笆笑粉紡紗紋紊素索純紐紕級紜納紙紛缺罟羔翅翁耆耘耕耙耗耽耿胱脂胰脅胭胴脆胸胳脈能脊胼胯臭臬舀舐航舫舨般芻茫荒荔荊茸荐草茵茴荏茲茹茶茗荀茱茨荃"], +["b040","虔蚊蚪蚓蚤蚩蚌蚣蚜衰衷袁袂衽衹記訐討訌訕訊託訓訖訏訑豈豺豹財貢起躬軒軔軏辱送逆迷退迺迴逃追逅迸邕郡郝郢酒配酌釘針釗釜釙閃院陣陡"], +["b0a1","陛陝除陘陞隻飢馬骨高鬥鬲鬼乾偺偽停假偃偌做偉健偶偎偕偵側偷偏倏偯偭兜冕凰剪副勒務勘動匐匏匙匿區匾參曼商啪啦啄啞啡啃啊唱啖問啕唯啤唸售啜唬啣唳啁啗圈國圉域堅堊堆埠埤基堂堵執培夠奢娶婁婉婦婪婀"], +["b140","娼婢婚婆婊孰寇寅寄寂宿密尉專將屠屜屝崇崆崎崛崖崢崑崩崔崙崤崧崗巢常帶帳帷康庸庶庵庾張強彗彬彩彫得徙從徘御徠徜恿患悉悠您惋悴惦悽"], +["b1a1","情悻悵惜悼惘惕惆惟悸惚惇戚戛扈掠控捲掖探接捷捧掘措捱掩掉掃掛捫推掄授掙採掬排掏掀捻捩捨捺敝敖救教敗啟敏敘敕敔斜斛斬族旋旌旎晝晚晤晨晦晞曹勗望梁梯梢梓梵桿桶梱梧梗械梃棄梭梆梅梔條梨梟梡梂欲殺"], +["b240","毫毬氫涎涼淳淙液淡淌淤添淺清淇淋涯淑涮淞淹涸混淵淅淒渚涵淚淫淘淪深淮淨淆淄涪淬涿淦烹焉焊烽烯爽牽犁猜猛猖猓猙率琅琊球理現琍瓠瓶"], +["b2a1","瓷甜產略畦畢異疏痔痕疵痊痍皎盔盒盛眷眾眼眶眸眺硫硃硎祥票祭移窒窕笠笨笛第符笙笞笮粒粗粕絆絃統紮紹紼絀細紳組累終紲紱缽羞羚翌翎習耜聊聆脯脖脣脫脩脰脤舂舵舷舶船莎莞莘荸莢莖莽莫莒莊莓莉莠荷荻荼"], +["b340","莆莧處彪蛇蛀蚶蛄蚵蛆蛋蚱蚯蛉術袞袈被袒袖袍袋覓規訪訝訣訥許設訟訛訢豉豚販責貫貨貪貧赧赦趾趺軛軟這逍通逗連速逝逐逕逞造透逢逖逛途"], +["b3a1","部郭都酗野釵釦釣釧釭釩閉陪陵陳陸陰陴陶陷陬雀雪雩章竟頂頃魚鳥鹵鹿麥麻傢傍傅備傑傀傖傘傚最凱割剴創剩勞勝勛博厥啻喀喧啼喊喝喘喂喜喪喔喇喋喃喳單喟唾喲喚喻喬喱啾喉喫喙圍堯堪場堤堰報堡堝堠壹壺奠"], +["b440","婷媚婿媒媛媧孳孱寒富寓寐尊尋就嵌嵐崴嵇巽幅帽幀幃幾廊廁廂廄弼彭復循徨惑惡悲悶惠愜愣惺愕惰惻惴慨惱愎惶愉愀愒戟扉掣掌描揀揩揉揆揍"], +["b4a1","插揣提握揖揭揮捶援揪換摒揚揹敞敦敢散斑斐斯普晰晴晶景暑智晾晷曾替期朝棺棕棠棘棗椅棟棵森棧棹棒棲棣棋棍植椒椎棉棚楮棻款欺欽殘殖殼毯氮氯氬港游湔渡渲湧湊渠渥渣減湛湘渤湖湮渭渦湯渴湍渺測湃渝渾滋"], +["b540","溉渙湎湣湄湲湩湟焙焚焦焰無然煮焜牌犄犀猶猥猴猩琺琪琳琢琥琵琶琴琯琛琦琨甥甦畫番痢痛痣痙痘痞痠登發皖皓皴盜睏短硝硬硯稍稈程稅稀窘"], +["b5a1","窗窖童竣等策筆筐筒答筍筋筏筑粟粥絞結絨絕紫絮絲絡給絢絰絳善翔翕耋聒肅腕腔腋腑腎脹腆脾腌腓腴舒舜菩萃菸萍菠菅萋菁華菱菴著萊菰萌菌菽菲菊萸萎萄菜萇菔菟虛蛟蛙蛭蛔蛛蛤蛐蛞街裁裂袱覃視註詠評詞証詁"], +["b640","詔詛詐詆訴診訶詖象貂貯貼貳貽賁費賀貴買貶貿貸越超趁跎距跋跚跑跌跛跆軻軸軼辜逮逵週逸進逶鄂郵鄉郾酣酥量鈔鈕鈣鈉鈞鈍鈐鈇鈑閔閏開閑"], +["b6a1","間閒閎隊階隋陽隅隆隍陲隄雁雅雄集雇雯雲韌項順須飧飪飯飩飲飭馮馭黃黍黑亂傭債傲傳僅傾催傷傻傯僇剿剷剽募勦勤勢勣匯嗟嗨嗓嗦嗎嗜嗇嗑嗣嗤嗯嗚嗡嗅嗆嗥嗉園圓塞塑塘塗塚塔填塌塭塊塢塒塋奧嫁嫉嫌媾媽媼"], +["b740","媳嫂媲嵩嵯幌幹廉廈弒彙徬微愚意慈感想愛惹愁愈慎慌慄慍愾愴愧愍愆愷戡戢搓搾搞搪搭搽搬搏搜搔損搶搖搗搆敬斟新暗暉暇暈暖暄暘暍會榔業"], +["b7a1","楚楷楠楔極椰概楊楨楫楞楓楹榆楝楣楛歇歲毀殿毓毽溢溯滓溶滂源溝滇滅溥溘溼溺溫滑準溜滄滔溪溧溴煎煙煩煤煉照煜煬煦煌煥煞煆煨煖爺牒猷獅猿猾瑯瑚瑕瑟瑞瑁琿瑙瑛瑜當畸瘀痰瘁痲痱痺痿痴痳盞盟睛睫睦睞督"], +["b840","睹睪睬睜睥睨睢矮碎碰碗碘碌碉硼碑碓硿祺祿禁萬禽稜稚稠稔稟稞窟窠筷節筠筮筧粱粳粵經絹綑綁綏絛置罩罪署義羨群聖聘肆肄腱腰腸腥腮腳腫"], +["b8a1","腹腺腦舅艇蒂葷落萱葵葦葫葉葬葛萼萵葡董葩葭葆虞虜號蛹蜓蜈蜇蜀蛾蛻蜂蜃蜆蜊衙裟裔裙補裘裝裡裊裕裒覜解詫該詳試詩詰誇詼詣誠話誅詭詢詮詬詹詻訾詨豢貊貉賊資賈賄貲賃賂賅跡跟跨路跳跺跪跤跦躲較載軾輊"], +["b940","辟農運遊道遂達逼違遐遇遏過遍遑逾遁鄒鄗酬酪酩釉鈷鉗鈸鈽鉀鈾鉛鉋鉤鉑鈴鉉鉍鉅鈹鈿鉚閘隘隔隕雍雋雉雊雷電雹零靖靴靶預頑頓頊頒頌飼飴"], +["b9a1","飽飾馳馱馴髡鳩麂鼎鼓鼠僧僮僥僖僭僚僕像僑僱僎僩兢凳劃劂匱厭嗾嘀嘛嘗嗽嘔嘆嘉嘍嘎嗷嘖嘟嘈嘐嗶團圖塵塾境墓墊塹墅塽壽夥夢夤奪奩嫡嫦嫩嫗嫖嫘嫣孵寞寧寡寥實寨寢寤察對屢嶄嶇幛幣幕幗幔廓廖弊彆彰徹慇"], +["ba40","愿態慷慢慣慟慚慘慵截撇摘摔撤摸摟摺摑摧搴摭摻敲斡旗旖暢暨暝榜榨榕槁榮槓構榛榷榻榫榴槐槍榭槌榦槃榣歉歌氳漳演滾漓滴漩漾漠漬漏漂漢"], +["baa1","滿滯漆漱漸漲漣漕漫漯澈漪滬漁滲滌滷熔熙煽熊熄熒爾犒犖獄獐瑤瑣瑪瑰瑭甄疑瘧瘍瘋瘉瘓盡監瞄睽睿睡磁碟碧碳碩碣禎福禍種稱窪窩竭端管箕箋筵算箝箔箏箸箇箄粹粽精綻綰綜綽綾綠緊綴網綱綺綢綿綵綸維緒緇綬"], +["bb40","罰翠翡翟聞聚肇腐膀膏膈膊腿膂臧臺與舔舞艋蓉蒿蓆蓄蒙蒞蒲蒜蓋蒸蓀蓓蒐蒼蓑蓊蜿蜜蜻蜢蜥蜴蜘蝕蜷蜩裳褂裴裹裸製裨褚裯誦誌語誣認誡誓誤"], +["bba1","說誥誨誘誑誚誧豪貍貌賓賑賒赫趙趕跼輔輒輕輓辣遠遘遜遣遙遞遢遝遛鄙鄘鄞酵酸酷酴鉸銀銅銘銖鉻銓銜銨鉼銑閡閨閩閣閥閤隙障際雌雒需靼鞅韶頗領颯颱餃餅餌餉駁骯骰髦魁魂鳴鳶鳳麼鼻齊億儀僻僵價儂儈儉儅凜"], +["bc40","劇劈劉劍劊勰厲嘮嘻嘹嘲嘿嘴嘩噓噎噗噴嘶嘯嘰墀墟增墳墜墮墩墦奭嬉嫻嬋嫵嬌嬈寮寬審寫層履嶝嶔幢幟幡廢廚廟廝廣廠彈影德徵慶慧慮慝慕憂"], +["bca1","慼慰慫慾憧憐憫憎憬憚憤憔憮戮摩摯摹撞撲撈撐撰撥撓撕撩撒撮播撫撚撬撙撢撳敵敷數暮暫暴暱樣樟槨樁樞標槽模樓樊槳樂樅槭樑歐歎殤毅毆漿潼澄潑潦潔澆潭潛潸潮澎潺潰潤澗潘滕潯潠潟熟熬熱熨牖犛獎獗瑩璋璃"], +["bd40","瑾璀畿瘠瘩瘟瘤瘦瘡瘢皚皺盤瞎瞇瞌瞑瞋磋磅確磊碾磕碼磐稿稼穀稽稷稻窯窮箭箱範箴篆篇篁箠篌糊締練緯緻緘緬緝編緣線緞緩綞緙緲緹罵罷羯"], +["bda1","翩耦膛膜膝膠膚膘蔗蔽蔚蓮蔬蔭蔓蔑蔣蔡蔔蓬蔥蓿蔆螂蝴蝶蝠蝦蝸蝨蝙蝗蝌蝓衛衝褐複褒褓褕褊誼諒談諄誕請諸課諉諂調誰論諍誶誹諛豌豎豬賠賞賦賤賬賭賢賣賜質賡赭趟趣踫踐踝踢踏踩踟踡踞躺輝輛輟輩輦輪輜輞"], +["be40","輥適遮遨遭遷鄰鄭鄧鄱醇醉醋醃鋅銻銷鋪銬鋤鋁銳銼鋒鋇鋰銲閭閱霄霆震霉靠鞍鞋鞏頡頫頜颳養餓餒餘駝駐駟駛駑駕駒駙骷髮髯鬧魅魄魷魯鴆鴉"], +["bea1","鴃麩麾黎墨齒儒儘儔儐儕冀冪凝劑劓勳噙噫噹噩噤噸噪器噥噱噯噬噢噶壁墾壇壅奮嬝嬴學寰導彊憲憑憩憊懍憶憾懊懈戰擅擁擋撻撼據擄擇擂操撿擒擔撾整曆曉暹曄曇暸樽樸樺橙橫橘樹橄橢橡橋橇樵機橈歙歷氅濂澱澡"], +["bf40","濃澤濁澧澳激澹澶澦澠澴熾燉燐燒燈燕熹燎燙燜燃燄獨璜璣璘璟璞瓢甌甍瘴瘸瘺盧盥瞠瞞瞟瞥磨磚磬磧禦積穎穆穌穋窺篙簑築篤篛篡篩篦糕糖縊"], +["bfa1","縑縈縛縣縞縝縉縐罹羲翰翱翮耨膳膩膨臻興艘艙蕊蕙蕈蕨蕩蕃蕉蕭蕪蕞螃螟螞螢融衡褪褲褥褫褡親覦諦諺諫諱謀諜諧諮諾謁謂諷諭諳諶諼豫豭貓賴蹄踱踴蹂踹踵輻輯輸輳辨辦遵遴選遲遼遺鄴醒錠錶鋸錳錯錢鋼錫錄錚"], +["c040","錐錦錡錕錮錙閻隧隨險雕霎霑霖霍霓霏靛靜靦鞘頰頸頻頷頭頹頤餐館餞餛餡餚駭駢駱骸骼髻髭鬨鮑鴕鴣鴦鴨鴒鴛默黔龍龜優償儡儲勵嚎嚀嚐嚅嚇"], +["c0a1","嚏壕壓壑壎嬰嬪嬤孺尷屨嶼嶺嶽嶸幫彌徽應懂懇懦懋戲戴擎擊擘擠擰擦擬擱擢擭斂斃曙曖檀檔檄檢檜櫛檣橾檗檐檠歜殮毚氈濘濱濟濠濛濤濫濯澀濬濡濩濕濮濰燧營燮燦燥燭燬燴燠爵牆獰獲璩環璦璨癆療癌盪瞳瞪瞰瞬"], +["c140","瞧瞭矯磷磺磴磯礁禧禪穗窿簇簍篾篷簌篠糠糜糞糢糟糙糝縮績繆縷縲繃縫總縱繅繁縴縹繈縵縿縯罄翳翼聱聲聰聯聳臆臃膺臂臀膿膽臉膾臨舉艱薪"], +["c1a1","薄蕾薜薑薔薯薛薇薨薊虧蟀蟑螳蟒蟆螫螻螺蟈蟋褻褶襄褸褽覬謎謗謙講謊謠謝謄謐豁谿豳賺賽購賸賻趨蹉蹋蹈蹊轄輾轂轅輿避遽還邁邂邀鄹醣醞醜鍍鎂錨鍵鍊鍥鍋錘鍾鍬鍛鍰鍚鍔闊闋闌闈闆隱隸雖霜霞鞠韓顆颶餵騁"], +["c240","駿鮮鮫鮪鮭鴻鴿麋黏點黜黝黛鼾齋叢嚕嚮壙壘嬸彝懣戳擴擲擾攆擺擻擷斷曜朦檳檬櫃檻檸櫂檮檯歟歸殯瀉瀋濾瀆濺瀑瀏燻燼燾燸獷獵璧璿甕癖癘"], +["c2a1","癒瞽瞿瞻瞼礎禮穡穢穠竄竅簫簧簪簞簣簡糧織繕繞繚繡繒繙罈翹翻職聶臍臏舊藏薩藍藐藉薰薺薹薦蟯蟬蟲蟠覆覲觴謨謹謬謫豐贅蹙蹣蹦蹤蹟蹕軀轉轍邇邃邈醫醬釐鎔鎊鎖鎢鎳鎮鎬鎰鎘鎚鎗闔闖闐闕離雜雙雛雞霤鞣鞦"], +["c340","鞭韹額顏題顎顓颺餾餿餽餮馥騎髁鬃鬆魏魎魍鯊鯉鯽鯈鯀鵑鵝鵠黠鼕鼬儳嚥壞壟壢寵龐廬懲懷懶懵攀攏曠曝櫥櫝櫚櫓瀛瀟瀨瀚瀝瀕瀘爆爍牘犢獸"], +["c3a1","獺璽瓊瓣疇疆癟癡矇礙禱穫穩簾簿簸簽簷籀繫繭繹繩繪羅繳羶羹羸臘藩藝藪藕藤藥藷蟻蠅蠍蟹蟾襠襟襖襞譁譜識證譚譎譏譆譙贈贊蹼蹲躇蹶蹬蹺蹴轔轎辭邊邋醱醮鏡鏑鏟鏃鏈鏜鏝鏖鏢鏍鏘鏤鏗鏨關隴難霪霧靡韜韻類"], +["c440","願顛颼饅饉騖騙鬍鯨鯧鯖鯛鶉鵡鵲鵪鵬麒麗麓麴勸嚨嚷嚶嚴嚼壤孀孃孽寶巉懸懺攘攔攙曦朧櫬瀾瀰瀲爐獻瓏癢癥礦礪礬礫竇競籌籃籍糯糰辮繽繼"], +["c4a1","纂罌耀臚艦藻藹蘑藺蘆蘋蘇蘊蠔蠕襤覺觸議譬警譯譟譫贏贍躉躁躅躂醴釋鐘鐃鏽闡霰飄饒饑馨騫騰騷騵鰓鰍鹹麵黨鼯齟齣齡儷儸囁囀囂夔屬巍懼懾攝攜斕曩櫻欄櫺殲灌爛犧瓖瓔癩矓籐纏續羼蘗蘭蘚蠣蠢蠡蠟襪襬覽譴"], +["c540","護譽贓躊躍躋轟辯醺鐮鐳鐵鐺鐸鐲鐫闢霸霹露響顧顥饗驅驃驀騾髏魔魑鰭鰥鶯鶴鷂鶸麝黯鼙齜齦齧儼儻囈囊囉孿巔巒彎懿攤權歡灑灘玀瓤疊癮癬"], +["c5a1","禳籠籟聾聽臟襲襯觼讀贖贗躑躓轡酈鑄鑑鑒霽霾韃韁顫饕驕驍髒鬚鱉鰱鰾鰻鷓鷗鼴齬齪龔囌巖戀攣攫攪曬欐瓚竊籤籣籥纓纖纔臢蘸蘿蠱變邐邏鑣鑠鑤靨顯饜驚驛驗髓體髑鱔鱗鱖鷥麟黴囑壩攬灞癱癲矗罐羈蠶蠹衢讓讒"], +["c640","讖艷贛釀鑪靂靈靄韆顰驟鬢魘鱟鷹鷺鹼鹽鼇齷齲廳欖灣籬籮蠻觀躡釁鑲鑰顱饞髖鬣黌灤矚讚鑷韉驢驥纜讜躪釅鑽鑾鑼鱷鱸黷豔鑿鸚爨驪鬱鸛鸞籲"], +["c940","乂乜凵匚厂万丌乇亍囗兀屮彳丏冇与丮亓仂仉仈冘勼卬厹圠夃夬尐巿旡殳毌气爿丱丼仨仜仩仡仝仚刌匜卌圢圣夗夯宁宄尒尻屴屳帄庀庂忉戉扐氕"], +["c9a1","氶汃氿氻犮犰玊禸肊阞伎优伬仵伔仱伀价伈伝伂伅伢伓伄仴伒冱刓刉刐劦匢匟卍厊吇囡囟圮圪圴夼妀奼妅奻奾奷奿孖尕尥屼屺屻屾巟幵庄异弚彴忕忔忏扜扞扤扡扦扢扙扠扚扥旯旮朾朹朸朻机朿朼朳氘汆汒汜汏汊汔汋"], +["ca40","汌灱牞犴犵玎甪癿穵网艸艼芀艽艿虍襾邙邗邘邛邔阢阤阠阣佖伻佢佉体佤伾佧佒佟佁佘伭伳伿佡冏冹刜刞刡劭劮匉卣卲厎厏吰吷吪呔呅吙吜吥吘"], +["caa1","吽呏呁吨吤呇囮囧囥坁坅坌坉坋坒夆奀妦妘妠妗妎妢妐妏妧妡宎宒尨尪岍岏岈岋岉岒岊岆岓岕巠帊帎庋庉庌庈庍弅弝彸彶忒忑忐忭忨忮忳忡忤忣忺忯忷忻怀忴戺抃抌抎抏抔抇扱扻扺扰抁抈扷扽扲扴攷旰旴旳旲旵杅杇"], +["cb40","杙杕杌杈杝杍杚杋毐氙氚汸汧汫沄沋沏汱汯汩沚汭沇沕沜汦汳汥汻沎灴灺牣犿犽狃狆狁犺狅玕玗玓玔玒町甹疔疕皁礽耴肕肙肐肒肜芐芏芅芎芑芓"], +["cba1","芊芃芄豸迉辿邟邡邥邞邧邠阰阨阯阭丳侘佼侅佽侀侇佶佴侉侄佷佌侗佪侚佹侁佸侐侜侔侞侒侂侕佫佮冞冼冾刵刲刳剆刱劼匊匋匼厒厔咇呿咁咑咂咈呫呺呾呥呬呴呦咍呯呡呠咘呣呧呤囷囹坯坲坭坫坱坰坶垀坵坻坳坴坢"], +["cc40","坨坽夌奅妵妺姏姎妲姌姁妶妼姃姖妱妽姀姈妴姇孢孥宓宕屄屇岮岤岠岵岯岨岬岟岣岭岢岪岧岝岥岶岰岦帗帔帙弨弢弣弤彔徂彾彽忞忥怭怦怙怲怋"], +["cca1","怴怊怗怳怚怞怬怢怍怐怮怓怑怌怉怜戔戽抭抴拑抾抪抶拊抮抳抯抻抩抰抸攽斨斻昉旼昄昒昈旻昃昋昍昅旽昑昐曶朊枅杬枎枒杶杻枘枆构杴枍枌杺枟枑枙枃杽极杸杹枔欥殀歾毞氝沓泬泫泮泙沶泔沭泧沷泐泂沺泃泆泭泲"], +["cd40","泒泝沴沊沝沀泞泀洰泍泇沰泹泏泩泑炔炘炅炓炆炄炑炖炂炚炃牪狖狋狘狉狜狒狔狚狌狑玤玡玭玦玢玠玬玝瓝瓨甿畀甾疌疘皯盳盱盰盵矸矼矹矻矺"], +["cda1","矷祂礿秅穸穻竻籵糽耵肏肮肣肸肵肭舠芠苀芫芚芘芛芵芧芮芼芞芺芴芨芡芩苂芤苃芶芢虰虯虭虮豖迒迋迓迍迖迕迗邲邴邯邳邰阹阽阼阺陃俍俅俓侲俉俋俁俔俜俙侻侳俛俇俖侺俀侹俬剄剉勀勂匽卼厗厖厙厘咺咡咭咥哏"], +["ce40","哃茍咷咮哖咶哅哆咠呰咼咢咾呲哞咰垵垞垟垤垌垗垝垛垔垘垏垙垥垚垕壴复奓姡姞姮娀姱姝姺姽姼姶姤姲姷姛姩姳姵姠姾姴姭宨屌峐峘峌峗峋峛"], +["cea1","峞峚峉峇峊峖峓峔峏峈峆峎峟峸巹帡帢帣帠帤庰庤庢庛庣庥弇弮彖徆怷怹恔恲恞恅恓恇恉恛恌恀恂恟怤恄恘恦恮扂扃拏挍挋拵挎挃拫拹挏挌拸拶挀挓挔拺挕拻拰敁敃斪斿昶昡昲昵昜昦昢昳昫昺昝昴昹昮朏朐柁柲柈枺"], +["cf40","柜枻柸柘柀枷柅柫柤柟枵柍枳柷柶柮柣柂枹柎柧柰枲柼柆柭柌枮柦柛柺柉柊柃柪柋欨殂殄殶毖毘毠氠氡洨洴洭洟洼洿洒洊泚洳洄洙洺洚洑洀洝浂"], +["cfa1","洁洘洷洃洏浀洇洠洬洈洢洉洐炷炟炾炱炰炡炴炵炩牁牉牊牬牰牳牮狊狤狨狫狟狪狦狣玅珌珂珈珅玹玶玵玴珫玿珇玾珃珆玸珋瓬瓮甮畇畈疧疪癹盄眈眃眄眅眊盷盻盺矧矨砆砑砒砅砐砏砎砉砃砓祊祌祋祅祄秕种秏秖秎窀"], +["d040","穾竑笀笁籺籸籹籿粀粁紃紈紁罘羑羍羾耇耎耏耔耷胘胇胠胑胈胂胐胅胣胙胜胊胕胉胏胗胦胍臿舡芔苙苾苹茇苨茀苕茺苫苖苴苬苡苲苵茌苻苶苰苪"], +["d0a1","苤苠苺苳苭虷虴虼虳衁衎衧衪衩觓訄訇赲迣迡迮迠郱邽邿郕郅邾郇郋郈釔釓陔陏陑陓陊陎倞倅倇倓倢倰倛俵俴倳倷倬俶俷倗倜倠倧倵倯倱倎党冔冓凊凄凅凈凎剡剚剒剞剟剕剢勍匎厞唦哢唗唒哧哳哤唚哿唄唈哫唑唅哱"], +["d140","唊哻哷哸哠唎唃唋圁圂埌堲埕埒垺埆垽垼垸垶垿埇埐垹埁夎奊娙娖娭娮娕娏娗娊娞娳孬宧宭宬尃屖屔峬峿峮峱峷崀峹帩帨庨庮庪庬弳弰彧恝恚恧"], +["d1a1","恁悢悈悀悒悁悝悃悕悛悗悇悜悎戙扆拲挐捖挬捄捅挶捃揤挹捋捊挼挩捁挴捘捔捙挭捇挳捚捑挸捗捀捈敊敆旆旃旄旂晊晟晇晑朒朓栟栚桉栲栳栻桋桏栖栱栜栵栫栭栯桎桄栴栝栒栔栦栨栮桍栺栥栠欬欯欭欱欴歭肂殈毦毤"], +["d240","毨毣毢毧氥浺浣浤浶洍浡涒浘浢浭浯涑涍淯浿涆浞浧浠涗浰浼浟涂涘洯浨涋浾涀涄洖涃浻浽浵涐烜烓烑烝烋缹烢烗烒烞烠烔烍烅烆烇烚烎烡牂牸"], +["d2a1","牷牶猀狺狴狾狶狳狻猁珓珙珥珖玼珧珣珩珜珒珛珔珝珚珗珘珨瓞瓟瓴瓵甡畛畟疰痁疻痄痀疿疶疺皊盉眝眛眐眓眒眣眑眕眙眚眢眧砣砬砢砵砯砨砮砫砡砩砳砪砱祔祛祏祜祓祒祑秫秬秠秮秭秪秜秞秝窆窉窅窋窌窊窇竘笐"], +["d340","笄笓笅笏笈笊笎笉笒粄粑粊粌粈粍粅紞紝紑紎紘紖紓紟紒紏紌罜罡罞罠罝罛羖羒翃翂翀耖耾耹胺胲胹胵脁胻脀舁舯舥茳茭荄茙荑茥荖茿荁茦茜茢"], +["d3a1","荂荎茛茪茈茼荍茖茤茠茷茯茩荇荅荌荓茞茬荋茧荈虓虒蚢蚨蚖蚍蚑蚞蚇蚗蚆蚋蚚蚅蚥蚙蚡蚧蚕蚘蚎蚝蚐蚔衃衄衭衵衶衲袀衱衿衯袃衾衴衼訒豇豗豻貤貣赶赸趵趷趶軑軓迾迵适迿迻逄迼迶郖郠郙郚郣郟郥郘郛郗郜郤酐"], +["d440","酎酏釕釢釚陜陟隼飣髟鬯乿偰偪偡偞偠偓偋偝偲偈偍偁偛偊偢倕偅偟偩偫偣偤偆偀偮偳偗偑凐剫剭剬剮勖勓匭厜啵啶唼啍啐唴唪啑啢唶唵唰啒啅"], +["d4a1","唌唲啥啎唹啈唭唻啀啋圊圇埻堔埢埶埜埴堀埭埽堈埸堋埳埏堇埮埣埲埥埬埡堎埼堐埧堁堌埱埩埰堍堄奜婠婘婕婧婞娸娵婭婐婟婥婬婓婤婗婃婝婒婄婛婈媎娾婍娹婌婰婩婇婑婖婂婜孲孮寁寀屙崞崋崝崚崠崌崨崍崦崥崏"], +["d540","崰崒崣崟崮帾帴庱庴庹庲庳弶弸徛徖徟悊悐悆悾悰悺惓惔惏惤惙惝惈悱惛悷惊悿惃惍惀挲捥掊掂捽掽掞掭掝掗掫掎捯掇掐据掯捵掜捭掮捼掤挻掟"], +["d5a1","捸掅掁掑掍捰敓旍晥晡晛晙晜晢朘桹梇梐梜桭桮梮梫楖桯梣梬梩桵桴梲梏桷梒桼桫桲梪梀桱桾梛梖梋梠梉梤桸桻梑梌梊桽欶欳欷欸殑殏殍殎殌氪淀涫涴涳湴涬淩淢涷淶淔渀淈淠淟淖涾淥淜淝淛淴淊涽淭淰涺淕淂淏淉"], +["d640","淐淲淓淽淗淍淣涻烺焍烷焗烴焌烰焄烳焐烼烿焆焓焀烸烶焋焂焎牾牻牼牿猝猗猇猑猘猊猈狿猏猞玈珶珸珵琄琁珽琇琀珺珼珿琌琋珴琈畤畣痎痒痏"], +["d6a1","痋痌痑痐皏皉盓眹眯眭眱眲眴眳眽眥眻眵硈硒硉硍硊硌砦硅硐祤祧祩祪祣祫祡离秺秸秶秷窏窔窐笵筇笴笥笰笢笤笳笘笪笝笱笫笭笯笲笸笚笣粔粘粖粣紵紽紸紶紺絅紬紩絁絇紾紿絊紻紨罣羕羜羝羛翊翋翍翐翑翇翏翉耟"], +["d740","耞耛聇聃聈脘脥脙脛脭脟脬脞脡脕脧脝脢舑舸舳舺舴舲艴莐莣莨莍荺荳莤荴莏莁莕莙荵莔莩荽莃莌莝莛莪莋荾莥莯莈莗莰荿莦莇莮荶莚虙虖蚿蚷"], +["d7a1","蛂蛁蛅蚺蚰蛈蚹蚳蚸蛌蚴蚻蚼蛃蚽蚾衒袉袕袨袢袪袚袑袡袟袘袧袙袛袗袤袬袌袓袎覂觖觙觕訰訧訬訞谹谻豜豝豽貥赽赻赹趼跂趹趿跁軘軞軝軜軗軠軡逤逋逑逜逌逡郯郪郰郴郲郳郔郫郬郩酖酘酚酓酕釬釴釱釳釸釤釹釪"], +["d840","釫釷釨釮镺閆閈陼陭陫陱陯隿靪頄飥馗傛傕傔傞傋傣傃傌傎傝偨傜傒傂傇兟凔匒匑厤厧喑喨喥喭啷噅喢喓喈喏喵喁喣喒喤啽喌喦啿喕喡喎圌堩堷"], +["d8a1","堙堞堧堣堨埵塈堥堜堛堳堿堶堮堹堸堭堬堻奡媯媔媟婺媢媞婸媦婼媥媬媕媮娷媄媊媗媃媋媩婻婽媌媜媏媓媝寪寍寋寔寑寊寎尌尰崷嵃嵫嵁嵋崿崵嵑嵎嵕崳崺嵒崽崱嵙嵂崹嵉崸崼崲崶嵀嵅幄幁彘徦徥徫惉悹惌惢惎惄愔"], +["d940","惲愊愖愅惵愓惸惼惾惁愃愘愝愐惿愄愋扊掔掱掰揎揥揨揯揃撝揳揊揠揶揕揲揵摡揟掾揝揜揄揘揓揂揇揌揋揈揰揗揙攲敧敪敤敜敨敥斌斝斞斮旐旒"], +["d9a1","晼晬晻暀晱晹晪晲朁椌棓椄棜椪棬棪棱椏棖棷棫棤棶椓椐棳棡椇棌椈楰梴椑棯棆椔棸棐棽棼棨椋椊椗棎棈棝棞棦棴棑椆棔棩椕椥棇欹欻欿欼殔殗殙殕殽毰毲毳氰淼湆湇渟湉溈渼渽湅湢渫渿湁湝湳渜渳湋湀湑渻渃渮湞"], +["da40","湨湜湡渱渨湠湱湫渹渢渰湓湥渧湸湤湷湕湹湒湦渵渶湚焠焞焯烻焮焱焣焥焢焲焟焨焺焛牋牚犈犉犆犅犋猒猋猰猢猱猳猧猲猭猦猣猵猌琮琬琰琫琖"], +["daa1","琚琡琭琱琤琣琝琩琠琲瓻甯畯畬痧痚痡痦痝痟痤痗皕皒盚睆睇睄睍睅睊睎睋睌矞矬硠硤硥硜硭硱硪确硰硩硨硞硢祴祳祲祰稂稊稃稌稄窙竦竤筊笻筄筈筌筎筀筘筅粢粞粨粡絘絯絣絓絖絧絪絏絭絜絫絒絔絩絑絟絎缾缿罥"], +["db40","罦羢羠羡翗聑聏聐胾胔腃腊腒腏腇脽腍脺臦臮臷臸臹舄舼舽舿艵茻菏菹萣菀菨萒菧菤菼菶萐菆菈菫菣莿萁菝菥菘菿菡菋菎菖菵菉萉萏菞萑萆菂菳"], +["dba1","菕菺菇菑菪萓菃菬菮菄菻菗菢萛菛菾蛘蛢蛦蛓蛣蛚蛪蛝蛫蛜蛬蛩蛗蛨蛑衈衖衕袺裗袹袸裀袾袶袼袷袽袲褁裉覕覘覗觝觚觛詎詍訹詙詀詗詘詄詅詒詈詑詊詌詏豟貁貀貺貾貰貹貵趄趀趉跘跓跍跇跖跜跏跕跙跈跗跅軯軷軺"], +["dc40","軹軦軮軥軵軧軨軶軫軱軬軴軩逭逴逯鄆鄬鄄郿郼鄈郹郻鄁鄀鄇鄅鄃酡酤酟酢酠鈁鈊鈥鈃鈚鈦鈏鈌鈀鈒釿釽鈆鈄鈧鈂鈜鈤鈙鈗鈅鈖镻閍閌閐隇陾隈"], +["dca1","隉隃隀雂雈雃雱雰靬靰靮頇颩飫鳦黹亃亄亶傽傿僆傮僄僊傴僈僂傰僁傺傱僋僉傶傸凗剺剸剻剼嗃嗛嗌嗐嗋嗊嗝嗀嗔嗄嗩喿嗒喍嗏嗕嗢嗖嗈嗲嗍嗙嗂圔塓塨塤塏塍塉塯塕塎塝塙塥塛堽塣塱壼嫇嫄嫋媺媸媱媵媰媿嫈媻嫆"], +["dd40","媷嫀嫊媴媶嫍媹媐寖寘寙尟尳嵱嵣嵊嵥嵲嵬嵞嵨嵧嵢巰幏幎幊幍幋廅廌廆廋廇彀徯徭惷慉慊愫慅愶愲愮慆愯慏愩慀戠酨戣戥戤揅揱揫搐搒搉搠搤"], +["dda1","搳摃搟搕搘搹搷搢搣搌搦搰搨摁搵搯搊搚摀搥搧搋揧搛搮搡搎敯斒旓暆暌暕暐暋暊暙暔晸朠楦楟椸楎楢楱椿楅楪椹楂楗楙楺楈楉椵楬椳椽楥棰楸椴楩楀楯楄楶楘楁楴楌椻楋椷楜楏楑椲楒椯楻椼歆歅歃歂歈歁殛嗀毻毼"], +["de40","毹毷毸溛滖滈溏滀溟溓溔溠溱溹滆滒溽滁溞滉溷溰滍溦滏溲溾滃滜滘溙溒溎溍溤溡溿溳滐滊溗溮溣煇煔煒煣煠煁煝煢煲煸煪煡煂煘煃煋煰煟煐煓"], +["dea1","煄煍煚牏犍犌犑犐犎猼獂猻猺獀獊獉瑄瑊瑋瑒瑑瑗瑀瑏瑐瑎瑂瑆瑍瑔瓡瓿瓾瓽甝畹畷榃痯瘏瘃痷痾痼痹痸瘐痻痶痭痵痽皙皵盝睕睟睠睒睖睚睩睧睔睙睭矠碇碚碔碏碄碕碅碆碡碃硹碙碀碖硻祼禂祽祹稑稘稙稒稗稕稢稓"], +["df40","稛稐窣窢窞竫筦筤筭筴筩筲筥筳筱筰筡筸筶筣粲粴粯綈綆綀綍絿綅絺綎絻綃絼綌綔綄絽綒罭罫罧罨罬羦羥羧翛翜耡腤腠腷腜腩腛腢腲朡腞腶腧腯"], +["dfa1","腄腡舝艉艄艀艂艅蓱萿葖葶葹蒏蒍葥葑葀蒆葧萰葍葽葚葙葴葳葝蔇葞萷萺萴葺葃葸萲葅萩菙葋萯葂萭葟葰萹葎葌葒葯蓅蒎萻葇萶萳葨葾葄萫葠葔葮葐蜋蜄蛷蜌蛺蛖蛵蝍蛸蜎蜉蜁蛶蜍蜅裖裋裍裎裞裛裚裌裐覅覛觟觥觤"], +["e040","觡觠觢觜触詶誆詿詡訿詷誂誄詵誃誁詴詺谼豋豊豥豤豦貆貄貅賌赨赩趑趌趎趏趍趓趔趐趒跰跠跬跱跮跐跩跣跢跧跲跫跴輆軿輁輀輅輇輈輂輋遒逿"], +["e0a1","遄遉逽鄐鄍鄏鄑鄖鄔鄋鄎酮酯鉈鉒鈰鈺鉦鈳鉥鉞銃鈮鉊鉆鉭鉬鉏鉠鉧鉯鈶鉡鉰鈱鉔鉣鉐鉲鉎鉓鉌鉖鈲閟閜閞閛隒隓隑隗雎雺雽雸雵靳靷靸靲頏頍頎颬飶飹馯馲馰馵骭骫魛鳪鳭鳧麀黽僦僔僗僨僳僛僪僝僤僓僬僰僯僣僠"], +["e140","凘劀劁勩勫匰厬嘧嘕嘌嘒嗼嘏嘜嘁嘓嘂嗺嘝嘄嗿嗹墉塼墐墘墆墁塿塴墋塺墇墑墎塶墂墈塻墔墏壾奫嫜嫮嫥嫕嫪嫚嫭嫫嫳嫢嫠嫛嫬嫞嫝嫙嫨嫟孷寠"], +["e1a1","寣屣嶂嶀嵽嶆嵺嶁嵷嶊嶉嶈嵾嵼嶍嵹嵿幘幙幓廘廑廗廎廜廕廙廒廔彄彃彯徶愬愨慁慞慱慳慒慓慲慬憀慴慔慺慛慥愻慪慡慖戩戧戫搫摍摛摝摴摶摲摳摽摵摦撦摎撂摞摜摋摓摠摐摿搿摬摫摙摥摷敳斠暡暠暟朅朄朢榱榶槉"], +["e240","榠槎榖榰榬榼榑榙榎榧榍榩榾榯榿槄榽榤槔榹槊榚槏榳榓榪榡榞槙榗榐槂榵榥槆歊歍歋殞殟殠毃毄毾滎滵滱漃漥滸漷滻漮漉潎漙漚漧漘漻漒滭漊"], +["e2a1","漶潳滹滮漭潀漰漼漵滫漇漎潃漅滽滶漹漜滼漺漟漍漞漈漡熇熐熉熀熅熂熏煻熆熁熗牄牓犗犕犓獃獍獑獌瑢瑳瑱瑵瑲瑧瑮甀甂甃畽疐瘖瘈瘌瘕瘑瘊瘔皸瞁睼瞅瞂睮瞀睯睾瞃碲碪碴碭碨硾碫碞碥碠碬碢碤禘禊禋禖禕禔禓"], +["e340","禗禈禒禐稫穊稰稯稨稦窨窫窬竮箈箜箊箑箐箖箍箌箛箎箅箘劄箙箤箂粻粿粼粺綧綷緂綣綪緁緀緅綝緎緄緆緋緌綯綹綖綼綟綦綮綩綡緉罳翢翣翥翞"], +["e3a1","耤聝聜膉膆膃膇膍膌膋舕蒗蒤蒡蒟蒺蓎蓂蒬蒮蒫蒹蒴蓁蓍蒪蒚蒱蓐蒝蒧蒻蒢蒔蓇蓌蒛蒩蒯蒨蓖蒘蒶蓏蒠蓗蓔蓒蓛蒰蒑虡蜳蜣蜨蝫蝀蜮蜞蜡蜙蜛蝃蜬蝁蜾蝆蜠蜲蜪蜭蜼蜒蜺蜱蜵蝂蜦蜧蜸蜤蜚蜰蜑裷裧裱裲裺裾裮裼裶裻"], +["e440","裰裬裫覝覡覟覞觩觫觨誫誙誋誒誏誖谽豨豩賕賏賗趖踉踂跿踍跽踊踃踇踆踅跾踀踄輐輑輎輍鄣鄜鄠鄢鄟鄝鄚鄤鄡鄛酺酲酹酳銥銤鉶銛鉺銠銔銪銍"], +["e4a1","銦銚銫鉹銗鉿銣鋮銎銂銕銢鉽銈銡銊銆銌銙銧鉾銇銩銝銋鈭隞隡雿靘靽靺靾鞃鞀鞂靻鞄鞁靿韎韍頖颭颮餂餀餇馝馜駃馹馻馺駂馽駇骱髣髧鬾鬿魠魡魟鳱鳲鳵麧僿儃儰僸儆儇僶僾儋儌僽儊劋劌勱勯噈噂噌嘵噁噊噉噆噘"], +["e540","噚噀嘳嘽嘬嘾嘸嘪嘺圚墫墝墱墠墣墯墬墥墡壿嫿嫴嫽嫷嫶嬃嫸嬂嫹嬁嬇嬅嬏屧嶙嶗嶟嶒嶢嶓嶕嶠嶜嶡嶚嶞幩幝幠幜緳廛廞廡彉徲憋憃慹憱憰憢憉"], +["e5a1","憛憓憯憭憟憒憪憡憍慦憳戭摮摰撖撠撅撗撜撏撋撊撌撣撟摨撱撘敶敺敹敻斲斳暵暰暩暲暷暪暯樀樆樗槥槸樕槱槤樠槿槬槢樛樝槾樧槲槮樔槷槧橀樈槦槻樍槼槫樉樄樘樥樏槶樦樇槴樖歑殥殣殢殦氁氀毿氂潁漦潾澇濆澒"], +["e640","澍澉澌潢潏澅潚澖潶潬澂潕潲潒潐潗澔澓潝漀潡潫潽潧澐潓澋潩潿澕潣潷潪潻熲熯熛熰熠熚熩熵熝熥熞熤熡熪熜熧熳犘犚獘獒獞獟獠獝獛獡獚獙"], +["e6a1","獢璇璉璊璆璁瑽璅璈瑼瑹甈甇畾瘥瘞瘙瘝瘜瘣瘚瘨瘛皜皝皞皛瞍瞏瞉瞈磍碻磏磌磑磎磔磈磃磄磉禚禡禠禜禢禛歶稹窲窴窳箷篋箾箬篎箯箹篊箵糅糈糌糋緷緛緪緧緗緡縃緺緦緶緱緰緮緟罶羬羰羭翭翫翪翬翦翨聤聧膣膟"], +["e740","膞膕膢膙膗舖艏艓艒艐艎艑蔤蔻蔏蔀蔩蔎蔉蔍蔟蔊蔧蔜蓻蔫蓺蔈蔌蓴蔪蓲蔕蓷蓫蓳蓼蔒蓪蓩蔖蓾蔨蔝蔮蔂蓽蔞蓶蔱蔦蓧蓨蓰蓯蓹蔘蔠蔰蔋蔙蔯虢"], +["e7a1","蝖蝣蝤蝷蟡蝳蝘蝔蝛蝒蝡蝚蝑蝞蝭蝪蝐蝎蝟蝝蝯蝬蝺蝮蝜蝥蝏蝻蝵蝢蝧蝩衚褅褌褔褋褗褘褙褆褖褑褎褉覢覤覣觭觰觬諏諆誸諓諑諔諕誻諗誾諀諅諘諃誺誽諙谾豍貏賥賟賙賨賚賝賧趠趜趡趛踠踣踥踤踮踕踛踖踑踙踦踧"], +["e840","踔踒踘踓踜踗踚輬輤輘輚輠輣輖輗遳遰遯遧遫鄯鄫鄩鄪鄲鄦鄮醅醆醊醁醂醄醀鋐鋃鋄鋀鋙銶鋏鋱鋟鋘鋩鋗鋝鋌鋯鋂鋨鋊鋈鋎鋦鋍鋕鋉鋠鋞鋧鋑鋓"], +["e8a1","銵鋡鋆銴镼閬閫閮閰隤隢雓霅霈霂靚鞊鞎鞈韐韏頞頝頦頩頨頠頛頧颲餈飺餑餔餖餗餕駜駍駏駓駔駎駉駖駘駋駗駌骳髬髫髳髲髱魆魃魧魴魱魦魶魵魰魨魤魬鳼鳺鳽鳿鳷鴇鴀鳹鳻鴈鴅鴄麃黓鼏鼐儜儓儗儚儑凞匴叡噰噠噮"], +["e940","噳噦噣噭噲噞噷圜圛壈墽壉墿墺壂墼壆嬗嬙嬛嬡嬔嬓嬐嬖嬨嬚嬠嬞寯嶬嶱嶩嶧嶵嶰嶮嶪嶨嶲嶭嶯嶴幧幨幦幯廩廧廦廨廥彋徼憝憨憖懅憴懆懁懌憺"], +["e9a1","憿憸憌擗擖擐擏擉撽撉擃擛擳擙攳敿敼斢曈暾曀曊曋曏暽暻暺曌朣樴橦橉橧樲橨樾橝橭橶橛橑樨橚樻樿橁橪橤橐橏橔橯橩橠樼橞橖橕橍橎橆歕歔歖殧殪殫毈毇氄氃氆澭濋澣濇澼濎濈潞濄澽澞濊澨瀄澥澮澺澬澪濏澿澸"], +["ea40","澢濉澫濍澯澲澰燅燂熿熸燖燀燁燋燔燊燇燏熽燘熼燆燚燛犝犞獩獦獧獬獥獫獪瑿璚璠璔璒璕璡甋疀瘯瘭瘱瘽瘳瘼瘵瘲瘰皻盦瞚瞝瞡瞜瞛瞢瞣瞕瞙"], +["eaa1","瞗磝磩磥磪磞磣磛磡磢磭磟磠禤穄穈穇窶窸窵窱窷篞篣篧篝篕篥篚篨篹篔篪篢篜篫篘篟糒糔糗糐糑縒縡縗縌縟縠縓縎縜縕縚縢縋縏縖縍縔縥縤罃罻罼罺羱翯耪耩聬膱膦膮膹膵膫膰膬膴膲膷膧臲艕艖艗蕖蕅蕫蕍蕓蕡蕘"], +["eb40","蕀蕆蕤蕁蕢蕄蕑蕇蕣蔾蕛蕱蕎蕮蕵蕕蕧蕠薌蕦蕝蕔蕥蕬虣虥虤螛螏螗螓螒螈螁螖螘蝹螇螣螅螐螑螝螄螔螜螚螉褞褦褰褭褮褧褱褢褩褣褯褬褟觱諠"], +["eba1","諢諲諴諵諝謔諤諟諰諈諞諡諨諿諯諻貑貒貐賵賮賱賰賳赬赮趥趧踳踾踸蹀蹅踶踼踽蹁踰踿躽輶輮輵輲輹輷輴遶遹遻邆郺鄳鄵鄶醓醐醑醍醏錧錞錈錟錆錏鍺錸錼錛錣錒錁鍆錭錎錍鋋錝鋺錥錓鋹鋷錴錂錤鋿錩錹錵錪錔錌"], +["ec40","錋鋾錉錀鋻錖閼闍閾閹閺閶閿閵閽隩雔霋霒霐鞙鞗鞔韰韸頵頯頲餤餟餧餩馞駮駬駥駤駰駣駪駩駧骹骿骴骻髶髺髹髷鬳鮀鮅鮇魼魾魻鮂鮓鮒鮐魺鮕"], +["eca1","魽鮈鴥鴗鴠鴞鴔鴩鴝鴘鴢鴐鴙鴟麈麆麇麮麭黕黖黺鼒鼽儦儥儢儤儠儩勴嚓嚌嚍嚆嚄嚃噾嚂噿嚁壖壔壏壒嬭嬥嬲嬣嬬嬧嬦嬯嬮孻寱寲嶷幬幪徾徻懃憵憼懧懠懥懤懨懞擯擩擣擫擤擨斁斀斶旚曒檍檖檁檥檉檟檛檡檞檇檓檎"], +["ed40","檕檃檨檤檑橿檦檚檅檌檒歛殭氉濌澩濴濔濣濜濭濧濦濞濲濝濢濨燡燱燨燲燤燰燢獳獮獯璗璲璫璐璪璭璱璥璯甐甑甒甏疄癃癈癉癇皤盩瞵瞫瞲瞷瞶"], +["eda1","瞴瞱瞨矰磳磽礂磻磼磲礅磹磾礄禫禨穜穛穖穘穔穚窾竀竁簅簏篲簀篿篻簎篴簋篳簂簉簃簁篸篽簆篰篱簐簊糨縭縼繂縳顈縸縪繉繀繇縩繌縰縻縶繄縺罅罿罾罽翴翲耬膻臄臌臊臅臇膼臩艛艚艜薃薀薏薧薕薠薋薣蕻薤薚薞"], +["ee40","蕷蕼薉薡蕺蕸蕗薎薖薆薍薙薝薁薢薂薈薅蕹蕶薘薐薟虨螾螪螭蟅螰螬螹螵螼螮蟉蟃蟂蟌螷螯蟄蟊螴螶螿螸螽蟞螲褵褳褼褾襁襒褷襂覭覯覮觲觳謞"], +["eea1","謘謖謑謅謋謢謏謒謕謇謍謈謆謜謓謚豏豰豲豱豯貕貔賹赯蹎蹍蹓蹐蹌蹇轃轀邅遾鄸醚醢醛醙醟醡醝醠鎡鎃鎯鍤鍖鍇鍼鍘鍜鍶鍉鍐鍑鍠鍭鎏鍌鍪鍹鍗鍕鍒鍏鍱鍷鍻鍡鍞鍣鍧鎀鍎鍙闇闀闉闃闅閷隮隰隬霠霟霘霝霙鞚鞡鞜"], +["ef40","鞞鞝韕韔韱顁顄顊顉顅顃餥餫餬餪餳餲餯餭餱餰馘馣馡騂駺駴駷駹駸駶駻駽駾駼騃骾髾髽鬁髼魈鮚鮨鮞鮛鮦鮡鮥鮤鮆鮢鮠鮯鴳鵁鵧鴶鴮鴯鴱鴸鴰"], +["efa1","鵅鵂鵃鴾鴷鵀鴽翵鴭麊麉麍麰黈黚黻黿鼤鼣鼢齔龠儱儭儮嚘嚜嚗嚚嚝嚙奰嬼屩屪巀幭幮懘懟懭懮懱懪懰懫懖懩擿攄擽擸攁攃擼斔旛曚曛曘櫅檹檽櫡櫆檺檶檷櫇檴檭歞毉氋瀇瀌瀍瀁瀅瀔瀎濿瀀濻瀦濼濷瀊爁燿燹爃燽獶"], +["f040","璸瓀璵瓁璾璶璻瓂甔甓癜癤癙癐癓癗癚皦皽盬矂瞺磿礌礓礔礉礐礒礑禭禬穟簜簩簙簠簟簭簝簦簨簢簥簰繜繐繖繣繘繢繟繑繠繗繓羵羳翷翸聵臑臒"], +["f0a1","臐艟艞薴藆藀藃藂薳薵薽藇藄薿藋藎藈藅薱薶藒蘤薸薷薾虩蟧蟦蟢蟛蟫蟪蟥蟟蟳蟤蟔蟜蟓蟭蟘蟣螤蟗蟙蠁蟴蟨蟝襓襋襏襌襆襐襑襉謪謧謣謳謰謵譇謯謼謾謱謥謷謦謶謮謤謻謽謺豂豵貙貘貗賾贄贂贀蹜蹢蹠蹗蹖蹞蹥蹧"], +["f140","蹛蹚蹡蹝蹩蹔轆轇轈轋鄨鄺鄻鄾醨醥醧醯醪鎵鎌鎒鎷鎛鎝鎉鎧鎎鎪鎞鎦鎕鎈鎙鎟鎍鎱鎑鎲鎤鎨鎴鎣鎥闒闓闑隳雗雚巂雟雘雝霣霢霥鞬鞮鞨鞫鞤鞪"], +["f1a1","鞢鞥韗韙韖韘韺顐顑顒颸饁餼餺騏騋騉騍騄騑騊騅騇騆髀髜鬈鬄鬅鬩鬵魊魌魋鯇鯆鯃鮿鯁鮵鮸鯓鮶鯄鮹鮽鵜鵓鵏鵊鵛鵋鵙鵖鵌鵗鵒鵔鵟鵘鵚麎麌黟鼁鼀鼖鼥鼫鼪鼩鼨齌齕儴儵劖勷厴嚫嚭嚦嚧嚪嚬壚壝壛夒嬽嬾嬿巃幰"], +["f240","徿懻攇攐攍攉攌攎斄旞旝曞櫧櫠櫌櫑櫙櫋櫟櫜櫐櫫櫏櫍櫞歠殰氌瀙瀧瀠瀖瀫瀡瀢瀣瀩瀗瀤瀜瀪爌爊爇爂爅犥犦犤犣犡瓋瓅璷瓃甖癠矉矊矄矱礝礛"], +["f2a1","礡礜礗礞禰穧穨簳簼簹簬簻糬糪繶繵繸繰繷繯繺繲繴繨罋罊羃羆羷翽翾聸臗臕艤艡艣藫藱藭藙藡藨藚藗藬藲藸藘藟藣藜藑藰藦藯藞藢蠀蟺蠃蟶蟷蠉蠌蠋蠆蟼蠈蟿蠊蠂襢襚襛襗襡襜襘襝襙覈覷覶觶譐譈譊譀譓譖譔譋譕"], +["f340","譑譂譒譗豃豷豶貚贆贇贉趬趪趭趫蹭蹸蹳蹪蹯蹻軂轒轑轏轐轓辴酀鄿醰醭鏞鏇鏏鏂鏚鏐鏹鏬鏌鏙鎩鏦鏊鏔鏮鏣鏕鏄鏎鏀鏒鏧镽闚闛雡霩霫霬霨霦"], +["f3a1","鞳鞷鞶韝韞韟顜顙顝顗颿颽颻颾饈饇饃馦馧騚騕騥騝騤騛騢騠騧騣騞騜騔髂鬋鬊鬎鬌鬷鯪鯫鯠鯞鯤鯦鯢鯰鯔鯗鯬鯜鯙鯥鯕鯡鯚鵷鶁鶊鶄鶈鵱鶀鵸鶆鶋鶌鵽鵫鵴鵵鵰鵩鶅鵳鵻鶂鵯鵹鵿鶇鵨麔麑黀黼鼭齀齁齍齖齗齘匷嚲"], +["f440","嚵嚳壣孅巆巇廮廯忀忁懹攗攖攕攓旟曨曣曤櫳櫰櫪櫨櫹櫱櫮櫯瀼瀵瀯瀷瀴瀱灂瀸瀿瀺瀹灀瀻瀳灁爓爔犨獽獼璺皫皪皾盭矌矎矏矍矲礥礣礧礨礤礩"], +["f4a1","禲穮穬穭竷籉籈籊籇籅糮繻繾纁纀羺翿聹臛臙舋艨艩蘢藿蘁藾蘛蘀藶蘄蘉蘅蘌藽蠙蠐蠑蠗蠓蠖襣襦覹觷譠譪譝譨譣譥譧譭趮躆躈躄轙轖轗轕轘轚邍酃酁醷醵醲醳鐋鐓鏻鐠鐏鐔鏾鐕鐐鐨鐙鐍鏵鐀鏷鐇鐎鐖鐒鏺鐉鏸鐊鏿"], +["f540","鏼鐌鏶鐑鐆闞闠闟霮霯鞹鞻韽韾顠顢顣顟飁飂饐饎饙饌饋饓騲騴騱騬騪騶騩騮騸騭髇髊髆鬐鬒鬑鰋鰈鯷鰅鰒鯸鱀鰇鰎鰆鰗鰔鰉鶟鶙鶤鶝鶒鶘鶐鶛"], +["f5a1","鶠鶔鶜鶪鶗鶡鶚鶢鶨鶞鶣鶿鶩鶖鶦鶧麙麛麚黥黤黧黦鼰鼮齛齠齞齝齙龑儺儹劘劗囃嚽嚾孈孇巋巏廱懽攛欂櫼欃櫸欀灃灄灊灈灉灅灆爝爚爙獾甗癪矐礭礱礯籔籓糲纊纇纈纋纆纍罍羻耰臝蘘蘪蘦蘟蘣蘜蘙蘧蘮蘡蘠蘩蘞蘥"], +["f640","蠩蠝蠛蠠蠤蠜蠫衊襭襩襮襫觺譹譸譅譺譻贐贔趯躎躌轞轛轝酆酄酅醹鐿鐻鐶鐩鐽鐼鐰鐹鐪鐷鐬鑀鐱闥闤闣霵霺鞿韡顤飉飆飀饘饖騹騽驆驄驂驁騺"], +["f6a1","騿髍鬕鬗鬘鬖鬺魒鰫鰝鰜鰬鰣鰨鰩鰤鰡鶷鶶鶼鷁鷇鷊鷏鶾鷅鷃鶻鶵鷎鶹鶺鶬鷈鶱鶭鷌鶳鷍鶲鹺麜黫黮黭鼛鼘鼚鼱齎齥齤龒亹囆囅囋奱孋孌巕巑廲攡攠攦攢欋欈欉氍灕灖灗灒爞爟犩獿瓘瓕瓙瓗癭皭礵禴穰穱籗籜籙籛籚"], +["f740","糴糱纑罏羇臞艫蘴蘵蘳蘬蘲蘶蠬蠨蠦蠪蠥襱覿覾觻譾讄讂讆讅譿贕躕躔躚躒躐躖躗轠轢酇鑌鑐鑊鑋鑏鑇鑅鑈鑉鑆霿韣顪顩飋饔饛驎驓驔驌驏驈驊"], +["f7a1","驉驒驐髐鬙鬫鬻魖魕鱆鱈鰿鱄鰹鰳鱁鰼鰷鰴鰲鰽鰶鷛鷒鷞鷚鷋鷐鷜鷑鷟鷩鷙鷘鷖鷵鷕鷝麶黰鼵鼳鼲齂齫龕龢儽劙壨壧奲孍巘蠯彏戁戃戄攩攥斖曫欑欒欏毊灛灚爢玂玁玃癰矔籧籦纕艬蘺虀蘹蘼蘱蘻蘾蠰蠲蠮蠳襶襴襳觾"], +["f840","讌讎讋讈豅贙躘轤轣醼鑢鑕鑝鑗鑞韄韅頀驖驙鬞鬟鬠鱒鱘鱐鱊鱍鱋鱕鱙鱌鱎鷻鷷鷯鷣鷫鷸鷤鷶鷡鷮鷦鷲鷰鷢鷬鷴鷳鷨鷭黂黐黲黳鼆鼜鼸鼷鼶齃齏"], +["f8a1","齱齰齮齯囓囍孎屭攭曭曮欓灟灡灝灠爣瓛瓥矕礸禷禶籪纗羉艭虃蠸蠷蠵衋讔讕躞躟躠躝醾醽釂鑫鑨鑩雥靆靃靇韇韥驞髕魙鱣鱧鱦鱢鱞鱠鸂鷾鸇鸃鸆鸅鸀鸁鸉鷿鷽鸄麠鼞齆齴齵齶囔攮斸欘欙欗欚灢爦犪矘矙礹籩籫糶纚"], +["f940","纘纛纙臠臡虆虇虈襹襺襼襻觿讘讙躥躤躣鑮鑭鑯鑱鑳靉顲饟鱨鱮鱭鸋鸍鸐鸏鸒鸑麡黵鼉齇齸齻齺齹圞灦籯蠼趲躦釃鑴鑸鑶鑵驠鱴鱳鱱鱵鸔鸓黶鼊"], +["f9a1","龤灨灥糷虪蠾蠽蠿讞貜躩軉靋顳顴飌饡馫驤驦驧鬤鸕鸗齈戇欞爧虌躨钂钀钁驩驨鬮鸙爩虋讟钃鱹麷癵驫鱺鸝灩灪麤齾齉龘碁銹裏墻恒粧嫺╔╦╗╠╬╣╚╩╝╒╤╕╞╪╡╘╧╛╓╥╖╟╫╢╙╨╜║═╭╮╰╯▓"] +] diff --git a/node_modules/iconv-lite/encodings/tables/eucjp.json b/node_modules/iconv-lite/encodings/tables/eucjp.json new file mode 100644 index 0000000..4fa61ca --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/eucjp.json @@ -0,0 +1,182 @@ +[ +["0","\u0000",127], +["8ea1","。",62], +["a1a1"," 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈",9,"+-±×÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇"], +["a2a1","◆□■△▲▽▼※〒→←↑↓〓"], +["a2ba","∈∋⊆⊇⊂⊃∪∩"], +["a2ca","∧∨¬⇒⇔∀∃"], +["a2dc","∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬"], +["a2f2","ʼn♯♭♪†‡¶"], +["a2fe","◯"], +["a3b0","0",9], +["a3c1","A",25], +["a3e1","a",25], +["a4a1","ぁ",82], +["a5a1","ァ",85], +["a6a1","Α",16,"Σ",6], +["a6c1","α",16,"σ",6], +["a7a1","А",5,"ЁЖ",25], +["a7d1","а",5,"ёж",25], +["a8a1","─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂"], +["ada1","①",19,"Ⅰ",9], +["adc0","㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡"], +["addf","㍻〝〟№㏍℡㊤",4,"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪"], +["b0a1","亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭"], +["b1a1","院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応"], +["b2a1","押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改"], +["b3a1","魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱"], +["b4a1","粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄"], +["b5a1","機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京"], +["b6a1","供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈"], +["b7a1","掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲"], +["b8a1","検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向"], +["b9a1","后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込"], +["baa1","此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷"], +["bba1","察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時"], +["bca1","次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周"], +["bda1","宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償"], +["bea1","勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾"], +["bfa1","拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾"], +["c0a1","澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線"], +["c1a1","繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎"], +["c2a1","臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只"], +["c3a1","叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵"], +["c4a1","帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓"], +["c5a1","邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到"], +["c6a1","董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入"], +["c7a1","如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦"], +["c8a1","函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美"], +["c9a1","鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服"], +["caa1","福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋"], +["cba1","法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満"], +["cca1","漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒"], +["cda1","諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃"], +["cea1","痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯"], +["cfa1","蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕"], +["d0a1","弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲"], +["d1a1","僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨"], +["d2a1","辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨"], +["d3a1","咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉"], +["d4a1","圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩"], +["d5a1","奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓"], +["d6a1","屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏"], +["d7a1","廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚"], +["d8a1","悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛"], +["d9a1","戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼"], +["daa1","據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼"], +["dba1","曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍"], +["dca1","棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣"], +["dda1","檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾"], +["dea1","沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌"], +["dfa1","漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼"], +["e0a1","燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱"], +["e1a1","瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰"], +["e2a1","癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬"], +["e3a1","磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐"], +["e4a1","筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆"], +["e5a1","紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺"], +["e6a1","罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋"], +["e7a1","隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙"], +["e8a1","茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈"], +["e9a1","蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙"], +["eaa1","蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞"], +["eba1","襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫"], +["eca1","譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊"], +["eda1","蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸"], +["eea1","遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮"], +["efa1","錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞"], +["f0a1","陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰"], +["f1a1","顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷"], +["f2a1","髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈"], +["f3a1","鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠"], +["f4a1","堯槇遙瑤凜熙"], +["f9a1","纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德"], +["faa1","忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱"], +["fba1","犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚"], +["fca1","釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"], +["fcf1","ⅰ",9,"¬¦'""], +["8fa2af","˘ˇ¸˙˝¯˛˚~΄΅"], +["8fa2c2","¡¦¿"], +["8fa2eb","ºª©®™¤№"], +["8fa6e1","ΆΈΉΊΪ"], +["8fa6e7","Ό"], +["8fa6e9","ΎΫ"], +["8fa6ec","Ώ"], +["8fa6f1","άέήίϊΐόςύϋΰώ"], +["8fa7c2","Ђ",10,"ЎЏ"], +["8fa7f2","ђ",10,"ўџ"], +["8fa9a1","ÆĐ"], +["8fa9a4","Ħ"], +["8fa9a6","IJ"], +["8fa9a8","ŁĿ"], +["8fa9ab","ŊØŒ"], +["8fa9af","ŦÞ"], +["8fa9c1","æđðħıijĸłŀʼnŋøœßŧþ"], +["8faaa1","ÁÀÄÂĂǍĀĄÅÃĆĈČÇĊĎÉÈËÊĚĖĒĘ"], +["8faaba","ĜĞĢĠĤÍÌÏÎǏİĪĮĨĴĶĹĽĻŃŇŅÑÓÒÖÔǑŐŌÕŔŘŖŚŜŠŞŤŢÚÙÜÛŬǓŰŪŲŮŨǗǛǙǕŴÝŸŶŹŽŻ"], +["8faba1","áàäâăǎāąåãćĉčçċďéèëêěėēęǵĝğ"], +["8fabbd","ġĥíìïîǐ"], +["8fabc5","īįĩĵķĺľļńňņñóòöôǒőōõŕřŗśŝšşťţúùüûŭǔűūųůũǘǜǚǖŵýÿŷźžż"], +["8fb0a1","丂丄丅丌丒丟丣两丨丫丮丯丰丵乀乁乄乇乑乚乜乣乨乩乴乵乹乿亍亖亗亝亯亹仃仐仚仛仠仡仢仨仯仱仳仵份仾仿伀伂伃伈伋伌伒伕伖众伙伮伱你伳伵伷伹伻伾佀佂佈佉佋佌佒佔佖佘佟佣佪佬佮佱佷佸佹佺佽佾侁侂侄"], +["8fb1a1","侅侉侊侌侎侐侒侓侔侗侙侚侞侟侲侷侹侻侼侽侾俀俁俅俆俈俉俋俌俍俏俒俜俠俢俰俲俼俽俿倀倁倄倇倊倌倎倐倓倗倘倛倜倝倞倢倧倮倰倲倳倵偀偁偂偅偆偊偌偎偑偒偓偗偙偟偠偢偣偦偧偪偭偰偱倻傁傃傄傆傊傎傏傐"], +["8fb2a1","傒傓傔傖傛傜傞",4,"傪傯傰傹傺傽僀僃僄僇僌僎僐僓僔僘僜僝僟僢僤僦僨僩僯僱僶僺僾儃儆儇儈儋儌儍儎僲儐儗儙儛儜儝儞儣儧儨儬儭儯儱儳儴儵儸儹兂兊兏兓兕兗兘兟兤兦兾冃冄冋冎冘冝冡冣冭冸冺冼冾冿凂"], +["8fb3a1","凈减凑凒凓凕凘凞凢凥凮凲凳凴凷刁刂刅划刓刕刖刘刢刨刱刲刵刼剅剉剕剗剘剚剜剟剠剡剦剮剷剸剹劀劂劅劊劌劓劕劖劗劘劚劜劤劥劦劧劯劰劶劷劸劺劻劽勀勄勆勈勌勏勑勔勖勛勜勡勥勨勩勪勬勰勱勴勶勷匀匃匊匋"], +["8fb4a1","匌匑匓匘匛匜匞匟匥匧匨匩匫匬匭匰匲匵匼匽匾卂卌卋卙卛卡卣卥卬卭卲卹卾厃厇厈厎厓厔厙厝厡厤厪厫厯厲厴厵厷厸厺厽叀叅叏叒叓叕叚叝叞叠另叧叵吂吓吚吡吧吨吪启吱吴吵呃呄呇呍呏呞呢呤呦呧呩呫呭呮呴呿"], +["8fb5a1","咁咃咅咈咉咍咑咕咖咜咟咡咦咧咩咪咭咮咱咷咹咺咻咿哆哊响哎哠哪哬哯哶哼哾哿唀唁唅唈唉唌唍唎唕唪唫唲唵唶唻唼唽啁啇啉啊啍啐啑啘啚啛啞啠啡啤啦啿喁喂喆喈喎喏喑喒喓喔喗喣喤喭喲喿嗁嗃嗆嗉嗋嗌嗎嗑嗒"], +["8fb6a1","嗓嗗嗘嗛嗞嗢嗩嗶嗿嘅嘈嘊嘍",5,"嘙嘬嘰嘳嘵嘷嘹嘻嘼嘽嘿噀噁噃噄噆噉噋噍噏噔噞噠噡噢噣噦噩噭噯噱噲噵嚄嚅嚈嚋嚌嚕嚙嚚嚝嚞嚟嚦嚧嚨嚩嚫嚬嚭嚱嚳嚷嚾囅囉囊囋囏囐囌囍囙囜囝囟囡囤",4,"囱囫园"], +["8fb7a1","囶囷圁圂圇圊圌圑圕圚圛圝圠圢圣圤圥圩圪圬圮圯圳圴圽圾圿坅坆坌坍坒坢坥坧坨坫坭",4,"坳坴坵坷坹坺坻坼坾垁垃垌垔垗垙垚垜垝垞垟垡垕垧垨垩垬垸垽埇埈埌埏埕埝埞埤埦埧埩埭埰埵埶埸埽埾埿堃堄堈堉埡"], +["8fb8a1","堌堍堛堞堟堠堦堧堭堲堹堿塉塌塍塏塐塕塟塡塤塧塨塸塼塿墀墁墇墈墉墊墌墍墏墐墔墖墝墠墡墢墦墩墱墲壄墼壂壈壍壎壐壒壔壖壚壝壡壢壩壳夅夆夋夌夒夓夔虁夝夡夣夤夨夯夰夳夵夶夿奃奆奒奓奙奛奝奞奟奡奣奫奭"], +["8fb9a1","奯奲奵奶她奻奼妋妌妎妒妕妗妟妤妧妭妮妯妰妳妷妺妼姁姃姄姈姊姍姒姝姞姟姣姤姧姮姯姱姲姴姷娀娄娌娍娎娒娓娞娣娤娧娨娪娭娰婄婅婇婈婌婐婕婞婣婥婧婭婷婺婻婾媋媐媓媖媙媜媞媟媠媢媧媬媱媲媳媵媸媺媻媿"], +["8fbaa1","嫄嫆嫈嫏嫚嫜嫠嫥嫪嫮嫵嫶嫽嬀嬁嬈嬗嬴嬙嬛嬝嬡嬥嬭嬸孁孋孌孒孖孞孨孮孯孼孽孾孿宁宄宆宊宎宐宑宓宔宖宨宩宬宭宯宱宲宷宺宼寀寁寍寏寖",4,"寠寯寱寴寽尌尗尞尟尣尦尩尫尬尮尰尲尵尶屙屚屜屢屣屧屨屩"], +["8fbba1","屭屰屴屵屺屻屼屽岇岈岊岏岒岝岟岠岢岣岦岪岲岴岵岺峉峋峒峝峗峮峱峲峴崁崆崍崒崫崣崤崦崧崱崴崹崽崿嵂嵃嵆嵈嵕嵑嵙嵊嵟嵠嵡嵢嵤嵪嵭嵰嵹嵺嵾嵿嶁嶃嶈嶊嶒嶓嶔嶕嶙嶛嶟嶠嶧嶫嶰嶴嶸嶹巃巇巋巐巎巘巙巠巤"], +["8fbca1","巩巸巹帀帇帍帒帔帕帘帟帠帮帨帲帵帾幋幐幉幑幖幘幛幜幞幨幪",4,"幰庀庋庎庢庤庥庨庪庬庱庳庽庾庿廆廌廋廎廑廒廔廕廜廞廥廫异弆弇弈弎弙弜弝弡弢弣弤弨弫弬弮弰弴弶弻弽弿彀彄彅彇彍彐彔彘彛彠彣彤彧"], +["8fbda1","彯彲彴彵彸彺彽彾徉徍徏徖徜徝徢徧徫徤徬徯徰徱徸忄忇忈忉忋忐",4,"忞忡忢忨忩忪忬忭忮忯忲忳忶忺忼怇怊怍怓怔怗怘怚怟怤怭怳怵恀恇恈恉恌恑恔恖恗恝恡恧恱恾恿悂悆悈悊悎悑悓悕悘悝悞悢悤悥您悰悱悷"], +["8fbea1","悻悾惂惄惈惉惊惋惎惏惔惕惙惛惝惞惢惥惲惵惸惼惽愂愇愊愌愐",4,"愖愗愙愜愞愢愪愫愰愱愵愶愷愹慁慅慆慉慞慠慬慲慸慻慼慿憀憁憃憄憋憍憒憓憗憘憜憝憟憠憥憨憪憭憸憹憼懀懁懂懎懏懕懜懝懞懟懡懢懧懩懥"], +["8fbfa1","懬懭懯戁戃戄戇戓戕戜戠戢戣戧戩戫戹戽扂扃扄扆扌扐扑扒扔扖扚扜扤扭扯扳扺扽抍抎抏抐抦抨抳抶抷抺抾抿拄拎拕拖拚拪拲拴拼拽挃挄挊挋挍挐挓挖挘挩挪挭挵挶挹挼捁捂捃捄捆捊捋捎捒捓捔捘捛捥捦捬捭捱捴捵"], +["8fc0a1","捸捼捽捿掂掄掇掊掐掔掕掙掚掞掤掦掭掮掯掽揁揅揈揎揑揓揔揕揜揠揥揪揬揲揳揵揸揹搉搊搐搒搔搘搞搠搢搤搥搩搪搯搰搵搽搿摋摏摑摒摓摔摚摛摜摝摟摠摡摣摭摳摴摻摽撅撇撏撐撑撘撙撛撝撟撡撣撦撨撬撳撽撾撿"], +["8fc1a1","擄擉擊擋擌擎擐擑擕擗擤擥擩擪擭擰擵擷擻擿攁攄攈攉攊攏攓攔攖攙攛攞攟攢攦攩攮攱攺攼攽敃敇敉敐敒敔敟敠敧敫敺敽斁斅斊斒斕斘斝斠斣斦斮斲斳斴斿旂旈旉旎旐旔旖旘旟旰旲旴旵旹旾旿昀昄昈昉昍昑昒昕昖昝"], +["8fc2a1","昞昡昢昣昤昦昩昪昫昬昮昰昱昳昹昷晀晅晆晊晌晑晎晗晘晙晛晜晠晡曻晪晫晬晾晳晵晿晷晸晹晻暀晼暋暌暍暐暒暙暚暛暜暟暠暤暭暱暲暵暻暿曀曂曃曈曌曎曏曔曛曟曨曫曬曮曺朅朇朎朓朙朜朠朢朳朾杅杇杈杌杔杕杝"], +["8fc3a1","杦杬杮杴杶杻极构枎枏枑枓枖枘枙枛枰枱枲枵枻枼枽柹柀柂柃柅柈柉柒柗柙柜柡柦柰柲柶柷桒栔栙栝栟栨栧栬栭栯栰栱栳栻栿桄桅桊桌桕桗桘桛桫桮",4,"桵桹桺桻桼梂梄梆梈梖梘梚梜梡梣梥梩梪梮梲梻棅棈棌棏"], +["8fc4a1","棐棑棓棖棙棜棝棥棨棪棫棬棭棰棱棵棶棻棼棽椆椉椊椐椑椓椖椗椱椳椵椸椻楂楅楉楎楗楛楣楤楥楦楨楩楬楰楱楲楺楻楿榀榍榒榖榘榡榥榦榨榫榭榯榷榸榺榼槅槈槑槖槗槢槥槮槯槱槳槵槾樀樁樃樏樑樕樚樝樠樤樨樰樲"], +["8fc5a1","樴樷樻樾樿橅橆橉橊橎橐橑橒橕橖橛橤橧橪橱橳橾檁檃檆檇檉檋檑檛檝檞檟檥檫檯檰檱檴檽檾檿櫆櫉櫈櫌櫐櫔櫕櫖櫜櫝櫤櫧櫬櫰櫱櫲櫼櫽欂欃欆欇欉欏欐欑欗欛欞欤欨欫欬欯欵欶欻欿歆歊歍歒歖歘歝歠歧歫歮歰歵歽"], +["8fc6a1","歾殂殅殗殛殟殠殢殣殨殩殬殭殮殰殸殹殽殾毃毄毉毌毖毚毡毣毦毧毮毱毷毹毿氂氄氅氉氍氎氐氒氙氟氦氧氨氬氮氳氵氶氺氻氿汊汋汍汏汒汔汙汛汜汫汭汯汴汶汸汹汻沅沆沇沉沔沕沗沘沜沟沰沲沴泂泆泍泏泐泑泒泔泖"], +["8fc7a1","泚泜泠泧泩泫泬泮泲泴洄洇洊洎洏洑洓洚洦洧洨汧洮洯洱洹洼洿浗浞浟浡浥浧浯浰浼涂涇涑涒涔涖涗涘涪涬涴涷涹涽涿淄淈淊淎淏淖淛淝淟淠淢淥淩淯淰淴淶淼渀渄渞渢渧渲渶渹渻渼湄湅湈湉湋湏湑湒湓湔湗湜湝湞"], +["8fc8a1","湢湣湨湳湻湽溍溓溙溠溧溭溮溱溳溻溿滀滁滃滇滈滊滍滎滏滫滭滮滹滻滽漄漈漊漌漍漖漘漚漛漦漩漪漯漰漳漶漻漼漭潏潑潒潓潗潙潚潝潞潡潢潨潬潽潾澃澇澈澋澌澍澐澒澓澔澖澚澟澠澥澦澧澨澮澯澰澵澶澼濅濇濈濊"], +["8fc9a1","濚濞濨濩濰濵濹濼濽瀀瀅瀆瀇瀍瀗瀠瀣瀯瀴瀷瀹瀼灃灄灈灉灊灋灔灕灝灞灎灤灥灬灮灵灶灾炁炅炆炔",4,"炛炤炫炰炱炴炷烊烑烓烔烕烖烘烜烤烺焃",4,"焋焌焏焞焠焫焭焯焰焱焸煁煅煆煇煊煋煐煒煗煚煜煞煠"], +["8fcaa1","煨煹熀熅熇熌熒熚熛熠熢熯熰熲熳熺熿燀燁燄燋燌燓燖燙燚燜燸燾爀爇爈爉爓爗爚爝爟爤爫爯爴爸爹牁牂牃牅牎牏牐牓牕牖牚牜牞牠牣牨牫牮牯牱牷牸牻牼牿犄犉犍犎犓犛犨犭犮犱犴犾狁狇狉狌狕狖狘狟狥狳狴狺狻"], +["8fcba1","狾猂猄猅猇猋猍猒猓猘猙猞猢猤猧猨猬猱猲猵猺猻猽獃獍獐獒獖獘獝獞獟獠獦獧獩獫獬獮獯獱獷獹獼玀玁玃玅玆玎玐玓玕玗玘玜玞玟玠玢玥玦玪玫玭玵玷玹玼玽玿珅珆珉珋珌珏珒珓珖珙珝珡珣珦珧珩珴珵珷珹珺珻珽"], +["8fcca1","珿琀琁琄琇琊琑琚琛琤琦琨",9,"琹瑀瑃瑄瑆瑇瑋瑍瑑瑒瑗瑝瑢瑦瑧瑨瑫瑭瑮瑱瑲璀璁璅璆璇璉璏璐璑璒璘璙璚璜璟璠璡璣璦璨璩璪璫璮璯璱璲璵璹璻璿瓈瓉瓌瓐瓓瓘瓚瓛瓞瓟瓤瓨瓪瓫瓯瓴瓺瓻瓼瓿甆"], +["8fcda1","甒甖甗甠甡甤甧甩甪甯甶甹甽甾甿畀畃畇畈畎畐畒畗畞畟畡畯畱畹",5,"疁疅疐疒疓疕疙疜疢疤疴疺疿痀痁痄痆痌痎痏痗痜痟痠痡痤痧痬痮痯痱痹瘀瘂瘃瘄瘇瘈瘊瘌瘏瘒瘓瘕瘖瘙瘛瘜瘝瘞瘣瘥瘦瘩瘭瘲瘳瘵瘸瘹"], +["8fcea1","瘺瘼癊癀癁癃癄癅癉癋癕癙癟癤癥癭癮癯癱癴皁皅皌皍皕皛皜皝皟皠皢",6,"皪皭皽盁盅盉盋盌盎盔盙盠盦盨盬盰盱盶盹盼眀眆眊眎眒眔眕眗眙眚眜眢眨眭眮眯眴眵眶眹眽眾睂睅睆睊睍睎睏睒睖睗睜睞睟睠睢"], +["8fcfa1","睤睧睪睬睰睲睳睴睺睽瞀瞄瞌瞍瞔瞕瞖瞚瞟瞢瞧瞪瞮瞯瞱瞵瞾矃矉矑矒矕矙矞矟矠矤矦矪矬矰矱矴矸矻砅砆砉砍砎砑砝砡砢砣砭砮砰砵砷硃硄硇硈硌硎硒硜硞硠硡硣硤硨硪确硺硾碊碏碔碘碡碝碞碟碤碨碬碭碰碱碲碳"], +["8fd0a1","碻碽碿磇磈磉磌磎磒磓磕磖磤磛磟磠磡磦磪磲磳礀磶磷磺磻磿礆礌礐礚礜礞礟礠礥礧礩礭礱礴礵礻礽礿祄祅祆祊祋祏祑祔祘祛祜祧祩祫祲祹祻祼祾禋禌禑禓禔禕禖禘禛禜禡禨禩禫禯禱禴禸离秂秄秇秈秊秏秔秖秚秝秞"], +["8fd1a1","秠秢秥秪秫秭秱秸秼稂稃稇稉稊稌稑稕稛稞稡稧稫稭稯稰稴稵稸稹稺穄穅穇穈穌穕穖穙穜穝穟穠穥穧穪穭穵穸穾窀窂窅窆窊窋窐窑窔窞窠窣窬窳窵窹窻窼竆竉竌竎竑竛竨竩竫竬竱竴竻竽竾笇笔笟笣笧笩笪笫笭笮笯笰"], +["8fd2a1","笱笴笽笿筀筁筇筎筕筠筤筦筩筪筭筯筲筳筷箄箉箎箐箑箖箛箞箠箥箬箯箰箲箵箶箺箻箼箽篂篅篈篊篔篖篗篙篚篛篨篪篲篴篵篸篹篺篼篾簁簂簃簄簆簉簋簌簎簏簙簛簠簥簦簨簬簱簳簴簶簹簺籆籊籕籑籒籓籙",5], +["8fd3a1","籡籣籧籩籭籮籰籲籹籼籽粆粇粏粔粞粠粦粰粶粷粺粻粼粿糄糇糈糉糍糏糓糔糕糗糙糚糝糦糩糫糵紃紇紈紉紏紑紒紓紖紝紞紣紦紪紭紱紼紽紾絀絁絇絈絍絑絓絗絙絚絜絝絥絧絪絰絸絺絻絿綁綂綃綅綆綈綋綌綍綑綖綗綝"], +["8fd4a1","綞綦綧綪綳綶綷綹緂",4,"緌緍緎緗緙縀緢緥緦緪緫緭緱緵緶緹緺縈縐縑縕縗縜縝縠縧縨縬縭縯縳縶縿繄繅繇繎繐繒繘繟繡繢繥繫繮繯繳繸繾纁纆纇纊纍纑纕纘纚纝纞缼缻缽缾缿罃罄罇罏罒罓罛罜罝罡罣罤罥罦罭"], +["8fd5a1","罱罽罾罿羀羋羍羏羐羑羖羗羜羡羢羦羪羭羴羼羿翀翃翈翎翏翛翟翣翥翨翬翮翯翲翺翽翾翿耇耈耊耍耎耏耑耓耔耖耝耞耟耠耤耦耬耮耰耴耵耷耹耺耼耾聀聄聠聤聦聭聱聵肁肈肎肜肞肦肧肫肸肹胈胍胏胒胔胕胗胘胠胭胮"], +["8fd6a1","胰胲胳胶胹胺胾脃脋脖脗脘脜脞脠脤脧脬脰脵脺脼腅腇腊腌腒腗腠腡腧腨腩腭腯腷膁膐膄膅膆膋膎膖膘膛膞膢膮膲膴膻臋臃臅臊臎臏臕臗臛臝臞臡臤臫臬臰臱臲臵臶臸臹臽臿舀舃舏舓舔舙舚舝舡舢舨舲舴舺艃艄艅艆"], +["8fd7a1","艋艎艏艑艖艜艠艣艧艭艴艻艽艿芀芁芃芄芇芉芊芎芑芔芖芘芚芛芠芡芣芤芧芨芩芪芮芰芲芴芷芺芼芾芿苆苐苕苚苠苢苤苨苪苭苯苶苷苽苾茀茁茇茈茊茋荔茛茝茞茟茡茢茬茭茮茰茳茷茺茼茽荂荃荄荇荍荎荑荕荖荗荰荸"], +["8fd8a1","荽荿莀莂莄莆莍莒莔莕莘莙莛莜莝莦莧莩莬莾莿菀菇菉菏菐菑菔菝荓菨菪菶菸菹菼萁萆萊萏萑萕萙莭萯萹葅葇葈葊葍葏葑葒葖葘葙葚葜葠葤葥葧葪葰葳葴葶葸葼葽蒁蒅蒒蒓蒕蒞蒦蒨蒩蒪蒯蒱蒴蒺蒽蒾蓀蓂蓇蓈蓌蓏蓓"], +["8fd9a1","蓜蓧蓪蓯蓰蓱蓲蓷蔲蓺蓻蓽蔂蔃蔇蔌蔎蔐蔜蔞蔢蔣蔤蔥蔧蔪蔫蔯蔳蔴蔶蔿蕆蕏",4,"蕖蕙蕜",6,"蕤蕫蕯蕹蕺蕻蕽蕿薁薅薆薉薋薌薏薓薘薝薟薠薢薥薧薴薶薷薸薼薽薾薿藂藇藊藋藎薭藘藚藟藠藦藨藭藳藶藼"], +["8fdaa1","藿蘀蘄蘅蘍蘎蘐蘑蘒蘘蘙蘛蘞蘡蘧蘩蘶蘸蘺蘼蘽虀虂虆虒虓虖虗虘虙虝虠",4,"虩虬虯虵虶虷虺蚍蚑蚖蚘蚚蚜蚡蚦蚧蚨蚭蚱蚳蚴蚵蚷蚸蚹蚿蛀蛁蛃蛅蛑蛒蛕蛗蛚蛜蛠蛣蛥蛧蚈蛺蛼蛽蜄蜅蜇蜋蜎蜏蜐蜓蜔蜙蜞蜟蜡蜣"], +["8fdba1","蜨蜮蜯蜱蜲蜹蜺蜼蜽蜾蝀蝃蝅蝍蝘蝝蝡蝤蝥蝯蝱蝲蝻螃",6,"螋螌螐螓螕螗螘螙螞螠螣螧螬螭螮螱螵螾螿蟁蟈蟉蟊蟎蟕蟖蟙蟚蟜蟟蟢蟣蟤蟪蟫蟭蟱蟳蟸蟺蟿蠁蠃蠆蠉蠊蠋蠐蠙蠒蠓蠔蠘蠚蠛蠜蠞蠟蠨蠭蠮蠰蠲蠵"], +["8fdca1","蠺蠼衁衃衅衈衉衊衋衎衑衕衖衘衚衜衟衠衤衩衱衹衻袀袘袚袛袜袟袠袨袪袺袽袾裀裊",4,"裑裒裓裛裞裧裯裰裱裵裷褁褆褍褎褏褕褖褘褙褚褜褠褦褧褨褰褱褲褵褹褺褾襀襂襅襆襉襏襒襗襚襛襜襡襢襣襫襮襰襳襵襺"], +["8fdda1","襻襼襽覉覍覐覔覕覛覜覟覠覥覰覴覵覶覷覼觔",4,"觥觩觫觭觱觳觶觹觽觿訄訅訇訏訑訒訔訕訞訠訢訤訦訫訬訯訵訷訽訾詀詃詅詇詉詍詎詓詖詗詘詜詝詡詥詧詵詶詷詹詺詻詾詿誀誃誆誋誏誐誒誖誗誙誟誧誩誮誯誳"], +["8fdea1","誶誷誻誾諃諆諈諉諊諑諓諔諕諗諝諟諬諰諴諵諶諼諿謅謆謋謑謜謞謟謊謭謰謷謼譂",4,"譈譒譓譔譙譍譞譣譭譶譸譹譼譾讁讄讅讋讍讏讔讕讜讞讟谸谹谽谾豅豇豉豋豏豑豓豔豗豘豛豝豙豣豤豦豨豩豭豳豵豶豻豾貆"], +["8fdfa1","貇貋貐貒貓貙貛貜貤貹貺賅賆賉賋賏賖賕賙賝賡賨賬賯賰賲賵賷賸賾賿贁贃贉贒贗贛赥赩赬赮赿趂趄趈趍趐趑趕趞趟趠趦趫趬趯趲趵趷趹趻跀跅跆跇跈跊跎跑跔跕跗跙跤跥跧跬跰趼跱跲跴跽踁踄踅踆踋踑踔踖踠踡踢"], +["8fe0a1","踣踦踧踱踳踶踷踸踹踽蹀蹁蹋蹍蹎蹏蹔蹛蹜蹝蹞蹡蹢蹩蹬蹭蹯蹰蹱蹹蹺蹻躂躃躉躐躒躕躚躛躝躞躢躧躩躭躮躳躵躺躻軀軁軃軄軇軏軑軔軜軨軮軰軱軷軹軺軭輀輂輇輈輏輐輖輗輘輞輠輡輣輥輧輨輬輭輮輴輵輶輷輺轀轁"], +["8fe1a1","轃轇轏轑",4,"轘轝轞轥辝辠辡辤辥辦辵辶辸达迀迁迆迊迋迍运迒迓迕迠迣迤迨迮迱迵迶迻迾适逄逈逌逘逛逨逩逯逪逬逭逳逴逷逿遃遄遌遛遝遢遦遧遬遰遴遹邅邈邋邌邎邐邕邗邘邙邛邠邡邢邥邰邲邳邴邶邽郌邾郃"], +["8fe2a1","郄郅郇郈郕郗郘郙郜郝郟郥郒郶郫郯郰郴郾郿鄀鄄鄅鄆鄈鄍鄐鄔鄖鄗鄘鄚鄜鄞鄠鄥鄢鄣鄧鄩鄮鄯鄱鄴鄶鄷鄹鄺鄼鄽酃酇酈酏酓酗酙酚酛酡酤酧酭酴酹酺酻醁醃醅醆醊醎醑醓醔醕醘醞醡醦醨醬醭醮醰醱醲醳醶醻醼醽醿"], +["8fe3a1","釂釃釅釓釔釗釙釚釞釤釥釩釪釬",5,"釷釹釻釽鈀鈁鈄鈅鈆鈇鈉鈊鈌鈐鈒鈓鈖鈘鈜鈝鈣鈤鈥鈦鈨鈮鈯鈰鈳鈵鈶鈸鈹鈺鈼鈾鉀鉂鉃鉆鉇鉊鉍鉎鉏鉑鉘鉙鉜鉝鉠鉡鉥鉧鉨鉩鉮鉯鉰鉵",4,"鉻鉼鉽鉿銈銉銊銍銎銒銗"], +["8fe4a1","銙銟銠銤銥銧銨銫銯銲銶銸銺銻銼銽銿",4,"鋅鋆鋇鋈鋋鋌鋍鋎鋐鋓鋕鋗鋘鋙鋜鋝鋟鋠鋡鋣鋥鋧鋨鋬鋮鋰鋹鋻鋿錀錂錈錍錑錔錕錜錝錞錟錡錤錥錧錩錪錳錴錶錷鍇鍈鍉鍐鍑鍒鍕鍗鍘鍚鍞鍤鍥鍧鍩鍪鍭鍯鍰鍱鍳鍴鍶"], +["8fe5a1","鍺鍽鍿鎀鎁鎂鎈鎊鎋鎍鎏鎒鎕鎘鎛鎞鎡鎣鎤鎦鎨鎫鎴鎵鎶鎺鎩鏁鏄鏅鏆鏇鏉",4,"鏓鏙鏜鏞鏟鏢鏦鏧鏹鏷鏸鏺鏻鏽鐁鐂鐄鐈鐉鐍鐎鐏鐕鐖鐗鐟鐮鐯鐱鐲鐳鐴鐻鐿鐽鑃鑅鑈鑊鑌鑕鑙鑜鑟鑡鑣鑨鑫鑭鑮鑯鑱鑲钄钃镸镹"], +["8fe6a1","镾閄閈閌閍閎閝閞閟閡閦閩閫閬閴閶閺閽閿闆闈闉闋闐闑闒闓闙闚闝闞闟闠闤闦阝阞阢阤阥阦阬阱阳阷阸阹阺阼阽陁陒陔陖陗陘陡陮陴陻陼陾陿隁隂隃隄隉隑隖隚隝隟隤隥隦隩隮隯隳隺雊雒嶲雘雚雝雞雟雩雯雱雺霂"], +["8fe7a1","霃霅霉霚霛霝霡霢霣霨霱霳靁靃靊靎靏靕靗靘靚靛靣靧靪靮靳靶靷靸靻靽靿鞀鞉鞕鞖鞗鞙鞚鞞鞟鞢鞬鞮鞱鞲鞵鞶鞸鞹鞺鞼鞾鞿韁韄韅韇韉韊韌韍韎韐韑韔韗韘韙韝韞韠韛韡韤韯韱韴韷韸韺頇頊頙頍頎頔頖頜頞頠頣頦"], +["8fe8a1","頫頮頯頰頲頳頵頥頾顄顇顊顑顒顓顖顗顙顚顢顣顥顦顪顬颫颭颮颰颴颷颸颺颻颿飂飅飈飌飡飣飥飦飧飪飳飶餂餇餈餑餕餖餗餚餛餜餟餢餦餧餫餱",4,"餹餺餻餼饀饁饆饇饈饍饎饔饘饙饛饜饞饟饠馛馝馟馦馰馱馲馵"], +["8fe9a1","馹馺馽馿駃駉駓駔駙駚駜駞駧駪駫駬駰駴駵駹駽駾騂騃騄騋騌騐騑騖騞騠騢騣騤騧騭騮騳騵騶騸驇驁驄驊驋驌驎驑驔驖驝骪骬骮骯骲骴骵骶骹骻骾骿髁髃髆髈髎髐髒髕髖髗髛髜髠髤髥髧髩髬髲髳髵髹髺髽髿",4], +["8feaa1","鬄鬅鬈鬉鬋鬌鬍鬎鬐鬒鬖鬙鬛鬜鬠鬦鬫鬭鬳鬴鬵鬷鬹鬺鬽魈魋魌魕魖魗魛魞魡魣魥魦魨魪",4,"魳魵魷魸魹魿鮀鮄鮅鮆鮇鮉鮊鮋鮍鮏鮐鮔鮚鮝鮞鮦鮧鮩鮬鮰鮱鮲鮷鮸鮻鮼鮾鮿鯁鯇鯈鯎鯐鯗鯘鯝鯟鯥鯧鯪鯫鯯鯳鯷鯸"], +["8feba1","鯹鯺鯽鯿鰀鰂鰋鰏鰑鰖鰘鰙鰚鰜鰞鰢鰣鰦",4,"鰱鰵鰶鰷鰽鱁鱃鱄鱅鱉鱊鱎鱏鱐鱓鱔鱖鱘鱛鱝鱞鱟鱣鱩鱪鱜鱫鱨鱮鱰鱲鱵鱷鱻鳦鳲鳷鳹鴋鴂鴑鴗鴘鴜鴝鴞鴯鴰鴲鴳鴴鴺鴼鵅鴽鵂鵃鵇鵊鵓鵔鵟鵣鵢鵥鵩鵪鵫鵰鵶鵷鵻"], +["8feca1","鵼鵾鶃鶄鶆鶊鶍鶎鶒鶓鶕鶖鶗鶘鶡鶪鶬鶮鶱鶵鶹鶼鶿鷃鷇鷉鷊鷔鷕鷖鷗鷚鷞鷟鷠鷥鷧鷩鷫鷮鷰鷳鷴鷾鸊鸂鸇鸎鸐鸑鸒鸕鸖鸙鸜鸝鹺鹻鹼麀麂麃麄麅麇麎麏麖麘麛麞麤麨麬麮麯麰麳麴麵黆黈黋黕黟黤黧黬黭黮黰黱黲黵"], +["8feda1","黸黿鼂鼃鼉鼏鼐鼑鼒鼔鼖鼗鼙鼚鼛鼟鼢鼦鼪鼫鼯鼱鼲鼴鼷鼹鼺鼼鼽鼿齁齃",4,"齓齕齖齗齘齚齝齞齨齩齭",4,"齳齵齺齽龏龐龑龒龔龖龗龞龡龢龣龥"] +] diff --git a/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json b/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json new file mode 100644 index 0000000..85c6934 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json @@ -0,0 +1 @@ +{"uChars":[128,165,169,178,184,216,226,235,238,244,248,251,253,258,276,284,300,325,329,334,364,463,465,467,469,471,473,475,477,506,594,610,712,716,730,930,938,962,970,1026,1104,1106,8209,8215,8218,8222,8231,8241,8244,8246,8252,8365,8452,8454,8458,8471,8482,8556,8570,8596,8602,8713,8720,8722,8726,8731,8737,8740,8742,8748,8751,8760,8766,8777,8781,8787,8802,8808,8816,8854,8858,8870,8896,8979,9322,9372,9548,9588,9616,9622,9634,9652,9662,9672,9676,9680,9702,9735,9738,9793,9795,11906,11909,11913,11917,11928,11944,11947,11951,11956,11960,11964,11979,12284,12292,12312,12319,12330,12351,12436,12447,12535,12543,12586,12842,12850,12964,13200,13215,13218,13253,13263,13267,13270,13384,13428,13727,13839,13851,14617,14703,14801,14816,14964,15183,15471,15585,16471,16736,17208,17325,17330,17374,17623,17997,18018,18212,18218,18301,18318,18760,18811,18814,18820,18823,18844,18848,18872,19576,19620,19738,19887,40870,59244,59336,59367,59413,59417,59423,59431,59437,59443,59452,59460,59478,59493,63789,63866,63894,63976,63986,64016,64018,64021,64025,64034,64037,64042,65074,65093,65107,65112,65127,65132,65375,65510,65536],"gbChars":[0,36,38,45,50,81,89,95,96,100,103,104,105,109,126,133,148,172,175,179,208,306,307,308,309,310,311,312,313,341,428,443,544,545,558,741,742,749,750,805,819,820,7922,7924,7925,7927,7934,7943,7944,7945,7950,8062,8148,8149,8152,8164,8174,8236,8240,8262,8264,8374,8380,8381,8384,8388,8390,8392,8393,8394,8396,8401,8406,8416,8419,8424,8437,8439,8445,8482,8485,8496,8521,8603,8936,8946,9046,9050,9063,9066,9076,9092,9100,9108,9111,9113,9131,9162,9164,9218,9219,11329,11331,11334,11336,11346,11361,11363,11366,11370,11372,11375,11389,11682,11686,11687,11692,11694,11714,11716,11723,11725,11730,11736,11982,11989,12102,12336,12348,12350,12384,12393,12395,12397,12510,12553,12851,12962,12973,13738,13823,13919,13933,14080,14298,14585,14698,15583,15847,16318,16434,16438,16481,16729,17102,17122,17315,17320,17402,17418,17859,17909,17911,17915,17916,17936,17939,17961,18664,18703,18814,18962,19043,33469,33470,33471,33484,33485,33490,33497,33501,33505,33513,33520,33536,33550,37845,37921,37948,38029,38038,38064,38065,38066,38069,38075,38076,38078,39108,39109,39113,39114,39115,39116,39265,39394,189000]} \ No newline at end of file diff --git a/node_modules/iconv-lite/encodings/tables/gbk-added.json b/node_modules/iconv-lite/encodings/tables/gbk-added.json new file mode 100644 index 0000000..8abfa9f --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/gbk-added.json @@ -0,0 +1,55 @@ +[ +["a140","",62], +["a180","",32], +["a240","",62], +["a280","",32], +["a2ab","",5], +["a2e3","€"], +["a2ef",""], +["a2fd",""], +["a340","",62], +["a380","",31," "], +["a440","",62], +["a480","",32], +["a4f4","",10], +["a540","",62], +["a580","",32], +["a5f7","",7], +["a640","",62], +["a680","",32], +["a6b9","",7], +["a6d9","",6], +["a6ec",""], +["a6f3",""], +["a6f6","",8], +["a740","",62], +["a780","",32], +["a7c2","",14], +["a7f2","",12], +["a896","",10], +["a8bc",""], +["a8bf","ǹ"], +["a8c1",""], +["a8ea","",20], +["a958",""], +["a95b",""], +["a95d",""], +["a989","〾⿰",11], +["a997","",12], +["a9f0","",14], +["aaa1","",93], +["aba1","",93], +["aca1","",93], +["ada1","",93], +["aea1","",93], +["afa1","",93], +["d7fa","",4], +["f8a1","",93], +["f9a1","",93], +["faa1","",93], +["fba1","",93], +["fca1","",93], +["fda1","",93], +["fe50","⺁⺄㑳㑇⺈⺋㖞㘚㘎⺌⺗㥮㤘㧏㧟㩳㧐㭎㱮㳠⺧⺪䁖䅟⺮䌷⺳⺶⺷䎱䎬⺻䏝䓖䙡䙌"], +["fe80","䜣䜩䝼䞍⻊䥇䥺䥽䦂䦃䦅䦆䦟䦛䦷䦶䲣䲟䲠䲡䱷䲢䴓",6,"䶮",93] +] diff --git a/node_modules/iconv-lite/encodings/tables/shiftjis.json b/node_modules/iconv-lite/encodings/tables/shiftjis.json new file mode 100644 index 0000000..5a3a43c --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/shiftjis.json @@ -0,0 +1,125 @@ +[ +["0","\u0000",128], +["a1","。",62], +["8140"," 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈",9,"+-±×"], +["8180","÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓"], +["81b8","∈∋⊆⊇⊂⊃∪∩"], +["81c8","∧∨¬⇒⇔∀∃"], +["81da","∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬"], +["81f0","ʼn♯♭♪†‡¶"], +["81fc","◯"], +["824f","0",9], +["8260","A",25], +["8281","a",25], +["829f","ぁ",82], +["8340","ァ",62], +["8380","ム",22], +["839f","Α",16,"Σ",6], +["83bf","α",16,"σ",6], +["8440","А",5,"ЁЖ",25], +["8470","а",5,"ёж",7], +["8480","о",17], +["849f","─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂"], +["8740","①",19,"Ⅰ",9], +["875f","㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡"], +["877e","㍻"], +["8780","〝〟№㏍℡㊤",4,"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪"], +["889f","亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭"], +["8940","院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円"], +["8980","園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改"], +["8a40","魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫"], +["8a80","橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄"], +["8b40","機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救"], +["8b80","朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈"], +["8c40","掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨"], +["8c80","劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向"], +["8d40","后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降"], +["8d80","項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷"], +["8e40","察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止"], +["8e80","死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周"], +["8f40","宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳"], +["8f80","準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾"], +["9040","拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨"], +["9080","逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線"], +["9140","繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻"], +["9180","操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只"], +["9240","叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄"], +["9280","逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓"], +["9340","邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬"], +["9380","凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入"], +["9440","如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅"], +["9480","楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美"], +["9540","鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷"], +["9580","斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋"], +["9640","法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆"], +["9680","摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒"], +["9740","諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲"], +["9780","沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯"], +["9840","蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕"], +["989f","弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲"], +["9940","僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭"], +["9980","凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨"], +["9a40","咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸"], +["9a80","噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩"], +["9b40","奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀"], +["9b80","它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏"], +["9c40","廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠"], +["9c80","怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛"], +["9d40","戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫"], +["9d80","捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼"], +["9e40","曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎"], +["9e80","梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣"], +["9f40","檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯"], +["9f80","麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌"], +["e040","漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝"], +["e080","烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱"], +["e140","瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿"], +["e180","痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬"], +["e240","磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰"], +["e280","窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆"], +["e340","紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷"], +["e380","縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋"], +["e440","隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤"], +["e480","艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈"], +["e540","蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬"], +["e580","蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞"], +["e640","襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧"], +["e680","諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊"], +["e740","蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜"], +["e780","轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮"], +["e840","錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙"], +["e880","閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰"], +["e940","顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃"], +["e980","騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈"], +["ea40","鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯"], +["ea80","黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙"], +["ed40","纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏"], +["ed80","塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱"], +["ee40","犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙"], +["ee80","蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"], +["eeef","ⅰ",9,"¬¦'""], +["f040","",62], +["f080","",124], +["f140","",62], +["f180","",124], +["f240","",62], +["f280","",124], +["f340","",62], +["f380","",124], +["f440","",62], +["f480","",124], +["f540","",62], +["f580","",124], +["f640","",62], +["f680","",124], +["f740","",62], +["f780","",124], +["f840","",62], +["f880","",124], +["f940",""], +["fa40","ⅰ",9,"Ⅰ",9,"¬¦'"㈱№℡∵纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊"], +["fa80","兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯"], +["fb40","涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神"], +["fb80","祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙"], +["fc40","髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"] +] diff --git a/node_modules/iconv-lite/encodings/utf16.js b/node_modules/iconv-lite/encodings/utf16.js new file mode 100644 index 0000000..399f551 --- /dev/null +++ b/node_modules/iconv-lite/encodings/utf16.js @@ -0,0 +1,174 @@ +"use strict" + +// == UTF16-BE codec. ========================================================== + +exports.utf16be = Utf16BECodec; +function Utf16BECodec() { +} + +Utf16BECodec.prototype.encoder = Utf16BEEncoder; +Utf16BECodec.prototype.decoder = Utf16BEDecoder; +Utf16BECodec.prototype.bomAware = true; + + +// -- Encoding + +function Utf16BEEncoder() { +} + +Utf16BEEncoder.prototype.write = function(str) { + var buf = new Buffer(str, 'ucs2'); + for (var i = 0; i < buf.length; i += 2) { + var tmp = buf[i]; buf[i] = buf[i+1]; buf[i+1] = tmp; + } + return buf; +} + +Utf16BEEncoder.prototype.end = function() { +} + + +// -- Decoding + +function Utf16BEDecoder() { + this.overflowByte = -1; +} + +Utf16BEDecoder.prototype.write = function(buf) { + if (buf.length == 0) + return ''; + + var buf2 = new Buffer(buf.length + 1), + i = 0, j = 0; + + if (this.overflowByte !== -1) { + buf2[0] = buf[0]; + buf2[1] = this.overflowByte; + i = 1; j = 2; + } + + for (; i < buf.length-1; i += 2, j+= 2) { + buf2[j] = buf[i+1]; + buf2[j+1] = buf[i]; + } + + this.overflowByte = (i == buf.length-1) ? buf[buf.length-1] : -1; + + return buf2.slice(0, j).toString('ucs2'); +} + +Utf16BEDecoder.prototype.end = function() { +} + + +// == UTF-16 codec ============================================================= +// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic. +// Defaults to UTF-16LE, as it's prevalent and default in Node. +// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le +// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'}); + +// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false). + +exports.utf16 = Utf16Codec; +function Utf16Codec(codecOptions, iconv) { + this.iconv = iconv; +} + +Utf16Codec.prototype.encoder = Utf16Encoder; +Utf16Codec.prototype.decoder = Utf16Decoder; + + +// -- Encoding (pass-through) + +function Utf16Encoder(options, codec) { + options = options || {}; + if (options.addBOM === undefined) + options.addBOM = true; + this.encoder = codec.iconv.getEncoder('utf-16le', options); +} + +Utf16Encoder.prototype.write = function(str) { + return this.encoder.write(str); +} + +Utf16Encoder.prototype.end = function() { + return this.encoder.end(); +} + + +// -- Decoding + +function Utf16Decoder(options, codec) { + this.decoder = null; + this.initialBytes = []; + this.initialBytesLen = 0; + + this.options = options || {}; + this.iconv = codec.iconv; +} + +Utf16Decoder.prototype.write = function(buf) { + if (!this.decoder) { + // Codec is not chosen yet. Accumulate initial bytes. + this.initialBytes.push(buf); + this.initialBytesLen += buf.length; + + if (this.initialBytesLen < 16) // We need more bytes to use space heuristic (see below) + return ''; + + // We have enough bytes -> detect endianness. + var buf = Buffer.concat(this.initialBytes), + encoding = detectEncoding(buf, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + this.initialBytes.length = this.initialBytesLen = 0; + } + + return this.decoder.write(buf); +} + +Utf16Decoder.prototype.end = function() { + if (!this.decoder) { + var buf = Buffer.concat(this.initialBytes), + encoding = detectEncoding(buf, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var res = this.decoder.write(buf), + trail = this.decoder.end(); + + return trail ? (res + trail) : res; + } + return this.decoder.end(); +} + +function detectEncoding(buf, defaultEncoding) { + var enc = defaultEncoding || 'utf-16le'; + + if (buf.length >= 2) { + // Check BOM. + if (buf[0] == 0xFE && buf[1] == 0xFF) // UTF-16BE BOM + enc = 'utf-16be'; + else if (buf[0] == 0xFF && buf[1] == 0xFE) // UTF-16LE BOM + enc = 'utf-16le'; + else { + // No BOM found. Try to deduce encoding from initial content. + // Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon. + // So, we count ASCII as if it was LE or BE, and decide from that. + var asciiCharsLE = 0, asciiCharsBE = 0, // Counts of chars in both positions + _len = Math.min(buf.length - (buf.length % 2), 64); // Len is always even. + + for (var i = 0; i < _len; i += 2) { + if (buf[i] === 0 && buf[i+1] !== 0) asciiCharsBE++; + if (buf[i] !== 0 && buf[i+1] === 0) asciiCharsLE++; + } + + if (asciiCharsBE > asciiCharsLE) + enc = 'utf-16be'; + else if (asciiCharsBE < asciiCharsLE) + enc = 'utf-16le'; + } + } + + return enc; +} + + diff --git a/node_modules/iconv-lite/encodings/utf7.js b/node_modules/iconv-lite/encodings/utf7.js new file mode 100644 index 0000000..bab5099 --- /dev/null +++ b/node_modules/iconv-lite/encodings/utf7.js @@ -0,0 +1,289 @@ +"use strict" + +// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152 +// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3 + +exports.utf7 = Utf7Codec; +exports.unicode11utf7 = 'utf7'; // Alias UNICODE-1-1-UTF-7 +function Utf7Codec(codecOptions, iconv) { + this.iconv = iconv; +}; + +Utf7Codec.prototype.encoder = Utf7Encoder; +Utf7Codec.prototype.decoder = Utf7Decoder; +Utf7Codec.prototype.bomAware = true; + + +// -- Encoding + +var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g; + +function Utf7Encoder(options, codec) { + this.iconv = codec.iconv; +} + +Utf7Encoder.prototype.write = function(str) { + // Naive implementation. + // Non-direct chars are encoded as "+-"; single "+" char is encoded as "+-". + return new Buffer(str.replace(nonDirectChars, function(chunk) { + return "+" + (chunk === '+' ? '' : + this.iconv.encode(chunk, 'utf16-be').toString('base64').replace(/=+$/, '')) + + "-"; + }.bind(this))); +} + +Utf7Encoder.prototype.end = function() { +} + + +// -- Decoding + +function Utf7Decoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} + +var base64Regex = /[A-Za-z0-9\/+]/; +var base64Chars = []; +for (var i = 0; i < 256; i++) + base64Chars[i] = base64Regex.test(String.fromCharCode(i)); + +var plusChar = '+'.charCodeAt(0), + minusChar = '-'.charCodeAt(0), + andChar = '&'.charCodeAt(0); + +Utf7Decoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; + + // The decoder is more involved as we must handle chunks in stream. + + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '+' + if (buf[i] == plusChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64Chars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) {// "+-" -> "+" + res += "+"; + } else { + var b64str = base64Accum + buf.slice(lastI, i).toString(); + res += this.iconv.decode(new Buffer(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus is absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } + } + + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + buf.slice(lastI).toString(); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(new Buffer(b64str, 'base64'), "utf16-be"); + } + + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; +} + +Utf7Decoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(new Buffer(this.base64Accum, 'base64'), "utf16-be"); + + this.inBase64 = false; + this.base64Accum = ''; + return res; +} + + +// UTF-7-IMAP codec. +// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3) +// Differences: +// * Base64 part is started by "&" instead of "+" +// * Direct characters are 0x20-0x7E, except "&" (0x26) +// * In Base64, "," is used instead of "/" +// * Base64 must not be used to represent direct characters. +// * No implicit shift back from Base64 (should always end with '-') +// * String must end in non-shifted position. +// * "-&" while in base64 is not allowed. + + +exports.utf7imap = Utf7IMAPCodec; +function Utf7IMAPCodec(codecOptions, iconv) { + this.iconv = iconv; +}; + +Utf7IMAPCodec.prototype.encoder = Utf7IMAPEncoder; +Utf7IMAPCodec.prototype.decoder = Utf7IMAPDecoder; +Utf7IMAPCodec.prototype.bomAware = true; + + +// -- Encoding + +function Utf7IMAPEncoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = new Buffer(6); + this.base64AccumIdx = 0; +} + +Utf7IMAPEncoder.prototype.write = function(str) { + var inBase64 = this.inBase64, + base64Accum = this.base64Accum, + base64AccumIdx = this.base64AccumIdx, + buf = new Buffer(str.length*5 + 10), bufIdx = 0; + + for (var i = 0; i < str.length; i++) { + var uChar = str.charCodeAt(i); + if (0x20 <= uChar && uChar <= 0x7E) { // Direct character or '&'. + if (inBase64) { + if (base64AccumIdx > 0) { + bufIdx += buf.write(base64Accum.slice(0, base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + base64AccumIdx = 0; + } + + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + inBase64 = false; + } + + if (!inBase64) { + buf[bufIdx++] = uChar; // Write direct character + + if (uChar === andChar) // Ampersand -> '&-' + buf[bufIdx++] = minusChar; + } + + } else { // Non-direct character + if (!inBase64) { + buf[bufIdx++] = andChar; // Write '&', then go to base64 mode. + inBase64 = true; + } + if (inBase64) { + base64Accum[base64AccumIdx++] = uChar >> 8; + base64Accum[base64AccumIdx++] = uChar & 0xFF; + + if (base64AccumIdx == base64Accum.length) { + bufIdx += buf.write(base64Accum.toString('base64').replace(/\//g, ','), bufIdx); + base64AccumIdx = 0; + } + } + } + } + + this.inBase64 = inBase64; + this.base64AccumIdx = base64AccumIdx; + + return buf.slice(0, bufIdx); +} + +Utf7IMAPEncoder.prototype.end = function() { + var buf = new Buffer(10), bufIdx = 0; + if (this.inBase64) { + if (this.base64AccumIdx > 0) { + bufIdx += buf.write(this.base64Accum.slice(0, this.base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + this.base64AccumIdx = 0; + } + + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + this.inBase64 = false; + } + + return buf.slice(0, bufIdx); +} + + +// -- Decoding + +function Utf7IMAPDecoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} + +var base64IMAPChars = base64Chars.slice(); +base64IMAPChars[','.charCodeAt(0)] = true; + +Utf7IMAPDecoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; + + // The decoder is more involved as we must handle chunks in stream. + // It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end). + + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '&' + if (buf[i] == andChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64IMAPChars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) { // "&-" -> "&" + res += "&"; + } else { + var b64str = base64Accum + buf.slice(lastI, i).toString().replace(/,/g, '/'); + res += this.iconv.decode(new Buffer(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus may be absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } + } + + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + buf.slice(lastI).toString().replace(/,/g, '/'); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(new Buffer(b64str, 'base64'), "utf16-be"); + } + + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; +} + +Utf7IMAPDecoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(new Buffer(this.base64Accum, 'base64'), "utf16-be"); + + this.inBase64 = false; + this.base64Accum = ''; + return res; +} + + diff --git a/node_modules/iconv-lite/lib/bom-handling.js b/node_modules/iconv-lite/lib/bom-handling.js new file mode 100644 index 0000000..3f0ed93 --- /dev/null +++ b/node_modules/iconv-lite/lib/bom-handling.js @@ -0,0 +1,52 @@ +"use strict" + +var BOMChar = '\uFEFF'; + +exports.PrependBOM = PrependBOMWrapper +function PrependBOMWrapper(encoder, options) { + this.encoder = encoder; + this.addBOM = true; +} + +PrependBOMWrapper.prototype.write = function(str) { + if (this.addBOM) { + str = BOMChar + str; + this.addBOM = false; + } + + return this.encoder.write(str); +} + +PrependBOMWrapper.prototype.end = function() { + return this.encoder.end(); +} + + +//------------------------------------------------------------------------------ + +exports.StripBOM = StripBOMWrapper; +function StripBOMWrapper(decoder, options) { + this.decoder = decoder; + this.pass = false; + this.options = options || {}; +} + +StripBOMWrapper.prototype.write = function(buf) { + var res = this.decoder.write(buf); + if (this.pass || !res) + return res; + + if (res[0] === BOMChar) { + res = res.slice(1); + if (typeof this.options.stripBOM === 'function') + this.options.stripBOM(); + } + + this.pass = true; + return res; +} + +StripBOMWrapper.prototype.end = function() { + return this.decoder.end(); +} + diff --git a/node_modules/iconv-lite/lib/extend-node.js b/node_modules/iconv-lite/lib/extend-node.js new file mode 100644 index 0000000..1d8c953 --- /dev/null +++ b/node_modules/iconv-lite/lib/extend-node.js @@ -0,0 +1,214 @@ +"use strict" + +// == Extend Node primitives to use iconv-lite ================================= + +module.exports = function (iconv) { + var original = undefined; // Place to keep original methods. + + // Node authors rewrote Buffer internals to make it compatible with + // Uint8Array and we cannot patch key functions since then. + iconv.supportsNodeEncodingsExtension = !(new Buffer(0) instanceof Uint8Array); + + iconv.extendNodeEncodings = function extendNodeEncodings() { + if (original) return; + original = {}; + + if (!iconv.supportsNodeEncodingsExtension) { + console.error("ACTION NEEDED: require('iconv-lite').extendNodeEncodings() is not supported in your version of Node"); + console.error("See more info at https://github.com/ashtuchkin/iconv-lite/wiki/Node-v4-compatibility"); + return; + } + + var nodeNativeEncodings = { + 'hex': true, 'utf8': true, 'utf-8': true, 'ascii': true, 'binary': true, + 'base64': true, 'ucs2': true, 'ucs-2': true, 'utf16le': true, 'utf-16le': true, + }; + + Buffer.isNativeEncoding = function(enc) { + return enc && nodeNativeEncodings[enc.toLowerCase()]; + } + + // -- SlowBuffer ----------------------------------------------------------- + var SlowBuffer = require('buffer').SlowBuffer; + + original.SlowBufferToString = SlowBuffer.prototype.toString; + SlowBuffer.prototype.toString = function(encoding, start, end) { + encoding = String(encoding || 'utf8').toLowerCase(); + + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.SlowBufferToString.call(this, encoding, start, end); + + // Otherwise, use our decoding method. + if (typeof start == 'undefined') start = 0; + if (typeof end == 'undefined') end = this.length; + return iconv.decode(this.slice(start, end), encoding); + } + + original.SlowBufferWrite = SlowBuffer.prototype.write; + SlowBuffer.prototype.write = function(string, offset, length, encoding) { + // Support both (string, offset, length, encoding) + // and the legacy (string, encoding, offset, length) + if (isFinite(offset)) { + if (!isFinite(length)) { + encoding = length; + length = undefined; + } + } else { // legacy + var swap = encoding; + encoding = offset; + offset = length; + length = swap; + } + + offset = +offset || 0; + var remaining = this.length - offset; + if (!length) { + length = remaining; + } else { + length = +length; + if (length > remaining) { + length = remaining; + } + } + encoding = String(encoding || 'utf8').toLowerCase(); + + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.SlowBufferWrite.call(this, string, offset, length, encoding); + + if (string.length > 0 && (length < 0 || offset < 0)) + throw new RangeError('attempt to write beyond buffer bounds'); + + // Otherwise, use our encoding method. + var buf = iconv.encode(string, encoding); + if (buf.length < length) length = buf.length; + buf.copy(this, offset, 0, length); + return length; + } + + // -- Buffer --------------------------------------------------------------- + + original.BufferIsEncoding = Buffer.isEncoding; + Buffer.isEncoding = function(encoding) { + return Buffer.isNativeEncoding(encoding) || iconv.encodingExists(encoding); + } + + original.BufferByteLength = Buffer.byteLength; + Buffer.byteLength = SlowBuffer.byteLength = function(str, encoding) { + encoding = String(encoding || 'utf8').toLowerCase(); + + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.BufferByteLength.call(this, str, encoding); + + // Slow, I know, but we don't have a better way yet. + return iconv.encode(str, encoding).length; + } + + original.BufferToString = Buffer.prototype.toString; + Buffer.prototype.toString = function(encoding, start, end) { + encoding = String(encoding || 'utf8').toLowerCase(); + + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.BufferToString.call(this, encoding, start, end); + + // Otherwise, use our decoding method. + if (typeof start == 'undefined') start = 0; + if (typeof end == 'undefined') end = this.length; + return iconv.decode(this.slice(start, end), encoding); + } + + original.BufferWrite = Buffer.prototype.write; + Buffer.prototype.write = function(string, offset, length, encoding) { + var _offset = offset, _length = length, _encoding = encoding; + // Support both (string, offset, length, encoding) + // and the legacy (string, encoding, offset, length) + if (isFinite(offset)) { + if (!isFinite(length)) { + encoding = length; + length = undefined; + } + } else { // legacy + var swap = encoding; + encoding = offset; + offset = length; + length = swap; + } + + encoding = String(encoding || 'utf8').toLowerCase(); + + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.BufferWrite.call(this, string, _offset, _length, _encoding); + + offset = +offset || 0; + var remaining = this.length - offset; + if (!length) { + length = remaining; + } else { + length = +length; + if (length > remaining) { + length = remaining; + } + } + + if (string.length > 0 && (length < 0 || offset < 0)) + throw new RangeError('attempt to write beyond buffer bounds'); + + // Otherwise, use our encoding method. + var buf = iconv.encode(string, encoding); + if (buf.length < length) length = buf.length; + buf.copy(this, offset, 0, length); + return length; + + // TODO: Set _charsWritten. + } + + + // -- Readable ------------------------------------------------------------- + if (iconv.supportsStreams) { + var Readable = require('stream').Readable; + + original.ReadableSetEncoding = Readable.prototype.setEncoding; + Readable.prototype.setEncoding = function setEncoding(enc, options) { + // Use our own decoder, it has the same interface. + // We cannot use original function as it doesn't handle BOM-s. + this._readableState.decoder = iconv.getDecoder(enc, options); + this._readableState.encoding = enc; + } + + Readable.prototype.collect = iconv._collect; + } + } + + // Remove iconv-lite Node primitive extensions. + iconv.undoExtendNodeEncodings = function undoExtendNodeEncodings() { + if (!iconv.supportsNodeEncodingsExtension) + return; + if (!original) + throw new Error("require('iconv-lite').undoExtendNodeEncodings(): Nothing to undo; extendNodeEncodings() is not called.") + + delete Buffer.isNativeEncoding; + + var SlowBuffer = require('buffer').SlowBuffer; + + SlowBuffer.prototype.toString = original.SlowBufferToString; + SlowBuffer.prototype.write = original.SlowBufferWrite; + + Buffer.isEncoding = original.BufferIsEncoding; + Buffer.byteLength = original.BufferByteLength; + Buffer.prototype.toString = original.BufferToString; + Buffer.prototype.write = original.BufferWrite; + + if (iconv.supportsStreams) { + var Readable = require('stream').Readable; + + Readable.prototype.setEncoding = original.ReadableSetEncoding; + delete Readable.prototype.collect; + } + + original = undefined; + } +} diff --git a/node_modules/iconv-lite/lib/index.js b/node_modules/iconv-lite/lib/index.js new file mode 100644 index 0000000..ac1403c --- /dev/null +++ b/node_modules/iconv-lite/lib/index.js @@ -0,0 +1,141 @@ +"use strict" + +var bomHandling = require('./bom-handling'), + iconv = module.exports; + +// All codecs and aliases are kept here, keyed by encoding name/alias. +// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`. +iconv.encodings = null; + +// Characters emitted in case of error. +iconv.defaultCharUnicode = '�'; +iconv.defaultCharSingleByte = '?'; + +// Public API. +iconv.encode = function encode(str, encoding, options) { + str = "" + (str || ""); // Ensure string. + + var encoder = iconv.getEncoder(encoding, options); + + var res = encoder.write(str); + var trail = encoder.end(); + + return (trail && trail.length > 0) ? Buffer.concat([res, trail]) : res; +} + +iconv.decode = function decode(buf, encoding, options) { + if (typeof buf === 'string') { + if (!iconv.skipDecodeWarning) { + console.error('Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding'); + iconv.skipDecodeWarning = true; + } + + buf = new Buffer("" + (buf || ""), "binary"); // Ensure buffer. + } + + var decoder = iconv.getDecoder(encoding, options); + + var res = decoder.write(buf); + var trail = decoder.end(); + + return trail ? (res + trail) : res; +} + +iconv.encodingExists = function encodingExists(enc) { + try { + iconv.getCodec(enc); + return true; + } catch (e) { + return false; + } +} + +// Legacy aliases to convert functions +iconv.toEncoding = iconv.encode; +iconv.fromEncoding = iconv.decode; + +// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache. +iconv._codecDataCache = {}; +iconv.getCodec = function getCodec(encoding) { + if (!iconv.encodings) + iconv.encodings = require("../encodings"); // Lazy load all encoding definitions. + + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + var enc = (''+encoding).toLowerCase().replace(/[^0-9a-z]|:\d{4}$/g, ""); + + // Traverse iconv.encodings to find actual codec. + var codecOptions = {}; + while (true) { + var codec = iconv._codecDataCache[enc]; + if (codec) + return codec; + + var codecDef = iconv.encodings[enc]; + + switch (typeof codecDef) { + case "string": // Direct alias to other encoding. + enc = codecDef; + break; + + case "object": // Alias with options. Can be layered. + for (var key in codecDef) + codecOptions[key] = codecDef[key]; + + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + enc = codecDef.type; + break; + + case "function": // Codec itself. + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + // The codec function must load all tables and return object with .encoder and .decoder methods. + // It'll be called only once (for each different options object). + codec = new codecDef(codecOptions, iconv); + + iconv._codecDataCache[codecOptions.encodingName] = codec; // Save it to be reused later. + return codec; + + default: + throw new Error("Encoding not recognized: '" + encoding + "' (searched as: '"+enc+"')"); + } + } +} + +iconv.getEncoder = function getEncoder(encoding, options) { + var codec = iconv.getCodec(encoding), + encoder = new codec.encoder(options, codec); + + if (codec.bomAware && options && options.addBOM) + encoder = new bomHandling.PrependBOM(encoder, options); + + return encoder; +} + +iconv.getDecoder = function getDecoder(encoding, options) { + var codec = iconv.getCodec(encoding), + decoder = new codec.decoder(options, codec); + + if (codec.bomAware && !(options && options.stripBOM === false)) + decoder = new bomHandling.StripBOM(decoder, options); + + return decoder; +} + + +// Load extensions in Node. All of them are omitted in Browserify build via 'browser' field in package.json. +var nodeVer = typeof process !== 'undefined' && process.versions && process.versions.node; +if (nodeVer) { + + // Load streaming support in Node v0.10+ + var nodeVerArr = nodeVer.split(".").map(Number); + if (nodeVerArr[0] > 0 || nodeVerArr[1] >= 10) { + require("./streams")(iconv); + } + + // Load Node primitive extensions. + require("./extend-node")(iconv); +} + diff --git a/node_modules/iconv-lite/lib/streams.js b/node_modules/iconv-lite/lib/streams.js new file mode 100644 index 0000000..c95b26c --- /dev/null +++ b/node_modules/iconv-lite/lib/streams.js @@ -0,0 +1,120 @@ +"use strict" + +var Transform = require("stream").Transform; + + +// == Exports ================================================================== +module.exports = function(iconv) { + + // Additional Public API. + iconv.encodeStream = function encodeStream(encoding, options) { + return new IconvLiteEncoderStream(iconv.getEncoder(encoding, options), options); + } + + iconv.decodeStream = function decodeStream(encoding, options) { + return new IconvLiteDecoderStream(iconv.getDecoder(encoding, options), options); + } + + iconv.supportsStreams = true; + + + // Not published yet. + iconv.IconvLiteEncoderStream = IconvLiteEncoderStream; + iconv.IconvLiteDecoderStream = IconvLiteDecoderStream; + iconv._collect = IconvLiteDecoderStream.prototype.collect; +}; + + +// == Encoder stream ======================================================= +function IconvLiteEncoderStream(conv, options) { + this.conv = conv; + options = options || {}; + options.decodeStrings = false; // We accept only strings, so we don't need to decode them. + Transform.call(this, options); +} + +IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, { + constructor: { value: IconvLiteEncoderStream } +}); + +IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) { + if (typeof chunk != 'string') + return done(new Error("Iconv encoding stream needs strings as its input.")); + try { + var res = this.conv.write(chunk); + if (res && res.length) this.push(res); + done(); + } + catch (e) { + done(e); + } +} + +IconvLiteEncoderStream.prototype._flush = function(done) { + try { + var res = this.conv.end(); + if (res && res.length) this.push(res); + done(); + } + catch (e) { + done(e); + } +} + +IconvLiteEncoderStream.prototype.collect = function(cb) { + var chunks = []; + this.on('error', cb); + this.on('data', function(chunk) { chunks.push(chunk); }); + this.on('end', function() { + cb(null, Buffer.concat(chunks)); + }); + return this; +} + + +// == Decoder stream ======================================================= +function IconvLiteDecoderStream(conv, options) { + this.conv = conv; + options = options || {}; + options.encoding = this.encoding = 'utf8'; // We output strings. + Transform.call(this, options); +} + +IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, { + constructor: { value: IconvLiteDecoderStream } +}); + +IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) { + if (!Buffer.isBuffer(chunk)) + return done(new Error("Iconv decoding stream needs buffers as its input.")); + try { + var res = this.conv.write(chunk); + if (res && res.length) this.push(res, this.encoding); + done(); + } + catch (e) { + done(e); + } +} + +IconvLiteDecoderStream.prototype._flush = function(done) { + try { + var res = this.conv.end(); + if (res && res.length) this.push(res, this.encoding); + done(); + } + catch (e) { + done(e); + } +} + +IconvLiteDecoderStream.prototype.collect = function(cb) { + var res = ''; + this.on('error', cb); + this.on('data', function(chunk) { res += chunk; }); + this.on('end', function() { + cb(null, res); + }); + return this; +} + diff --git a/node_modules/iconv-lite/package.json b/node_modules/iconv-lite/package.json new file mode 100644 index 0000000..826225e --- /dev/null +++ b/node_modules/iconv-lite/package.json @@ -0,0 +1,52 @@ +{ + "name": "iconv-lite", + "description": "Convert character encodings in pure javascript.", + "version": "0.4.13", + "license": "MIT", + + "keywords": ["iconv", "convert", "charset", "icu"], + "author": "Alexander Shtuchkin ", + "contributors": [ + "Jinwu Zhan (https://github.com/jenkinv)", + "Adamansky Anton (https://github.com/adamansky)", + "George Stagas (https://github.com/stagas)", + "Mike D Pilsbury (https://github.com/pekim)", + "Niggler (https://github.com/Niggler)", + "wychi (https://github.com/wychi)", + "David Kuo (https://github.com/david50407)", + "ChangZhuo Chen (https://github.com/czchen)", + "Lee Treveil (https://github.com/leetreveil)", + "Brian White (https://github.com/mscdex)", + "Mithgol (https://github.com/Mithgol)", + "Nazar Leush (https://github.com/nleush)" + ], + + "main": "./lib/index.js", + "homepage": "https://github.com/ashtuchkin/iconv-lite", + "bugs": "https://github.com/ashtuchkin/iconv-lite/issues", + "repository": { + "type": "git", + "url": "git://github.com/ashtuchkin/iconv-lite.git" + }, + "engines": { + "node": ">=0.8.0" + }, + "scripts": { + "coverage": "istanbul cover _mocha -- --grep .", + "coverage-open": "open coverage/lcov-report/index.html", + "test": "mocha --reporter spec --grep ." + }, + "browser": { + "./extend-node": false, + "./streams": false + }, + "devDependencies": { + "mocha": "*", + "request": "2.47", + "unorm": "*", + "errto": "*", + "async": "*", + "istanbul": "*", + "iconv": "2.1" + } +} diff --git a/node_modules/ieee754/.travis.yml b/node_modules/ieee754/.travis.yml new file mode 100644 index 0000000..6c45b34 --- /dev/null +++ b/node_modules/ieee754/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: +- 'node' +env: + global: + - secure: f3NrmOV/A7oACn47J1mkIpH8Sn/LINtluZvo/9pGo3Ss4+D2lyt7UawpedHtnYgU9WEyjPSi7pDWopUrIzusQ2trLYRJr8WAOEyHlgaepDyy4BW3ghGMKHMsS05kilYLP8nu1sRd6y1AcUYKw+kUrrSPanI7kViWVQ5d5DuwXO8= + - secure: a6teILh33z5fbGQbh5/EkFfAyXfa2fPJG1upy9K+jLAbG4WZxXD+YmXG9Tz33/2NJm6UplGfTJ8IQEXgxEfAFk3ao3xfKxzm3i64XxtroSlXIFNSiQKogxDfLEtWDoNNCodPHaV3ATEqxGJ5rkkUeU1+ROWW0sjG5JR26k8/Hfg= diff --git a/node_modules/ieee754/.zuul.yml b/node_modules/ieee754/.zuul.yml new file mode 100644 index 0000000..b5ba0c4 --- /dev/null +++ b/node_modules/ieee754/.zuul.yml @@ -0,0 +1,20 @@ +ui: tape +scripts: + - "./test/_polyfill.js" +browsers: + - name: chrome + version: latest + - name: firefox + version: latest + - name: safari + version: latest + - name: ie + version: 11 + - name: microsoftedge + version: latest + - name: opera + version: latest + - name: android + version: latest + - name: iphone + version: latest diff --git a/node_modules/ieee754/LICENSE b/node_modules/ieee754/LICENSE new file mode 100644 index 0000000..f37a2eb --- /dev/null +++ b/node_modules/ieee754/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2008, Fair Oaks Labs, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of Fair Oaks Labs, Inc. nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/ieee754/README.md b/node_modules/ieee754/README.md new file mode 100644 index 0000000..11f4d40 --- /dev/null +++ b/node_modules/ieee754/README.md @@ -0,0 +1,47 @@ +# ieee754 [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][npm-url] + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[travis-image]: https://img.shields.io/travis/feross/ieee754/master.svg +[travis-url]: https://travis-ci.org/feross/ieee754 +[npm-image]: https://img.shields.io/npm/v/ieee754.svg +[npm-url]: https://npmjs.org/package/ieee754 +[downloads-image]: https://img.shields.io/npm/dm/ieee754.svg +[saucelabs-image]: https://saucelabs.com/browser-matrix/ieee754.svg +[saucelabs-url]: https://saucelabs.com/u/ieee754 + +### Read/write IEEE754 floating point numbers from/to a Buffer or array-like object. + +## install + +``` +npm install ieee754 +``` + +## methods + +`var ieee754 = require('ieee754')` + +The `ieee754` object has the following functions: + +``` +ieee754.read = function (buffer, offset, isLE, mLen, nBytes) +ieee754.write = function (buffer, value, offset, isLE, mLen, nBytes) +``` + +The arguments mean the following: + +- buffer = the buffer +- offset = offset into the buffer +- value = value to set (only for `write`) +- isLe = is little endian? +- mLen = mantissa length +- nBytes = number of bytes + +## what is ieee754? + +The IEEE Standard for Floating-Point Arithmetic (IEEE 754) is a technical standard for floating-point computation. [Read more](http://en.wikipedia.org/wiki/IEEE_floating_point). + +## license + +BSD 3 Clause. Copyright (c) 2008, Fair Oaks Labs, Inc. diff --git a/node_modules/ieee754/index.js b/node_modules/ieee754/index.js new file mode 100644 index 0000000..95e190c --- /dev/null +++ b/node_modules/ieee754/index.js @@ -0,0 +1,84 @@ +exports.read = function (buffer, offset, isLE, mLen, nBytes) { + var e, m + var eLen = nBytes * 8 - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var nBits = -7 + var i = isLE ? (nBytes - 1) : 0 + var d = isLE ? -1 : 1 + var s = buffer[offset + i] + + i += d + + e = s & ((1 << (-nBits)) - 1) + s >>= (-nBits) + nBits += eLen + for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {} + + m = e & ((1 << (-nBits)) - 1) + e >>= (-nBits) + nBits += mLen + for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {} + + if (e === 0) { + e = 1 - eBias + } else if (e === eMax) { + return m ? NaN : ((s ? -1 : 1) * Infinity) + } else { + m = m + Math.pow(2, mLen) + e = e - eBias + } + return (s ? -1 : 1) * m * Math.pow(2, e - mLen) +} + +exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { + var e, m, c + var eLen = nBytes * 8 - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) + var i = isLE ? 0 : (nBytes - 1) + var d = isLE ? 1 : -1 + var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 + + value = Math.abs(value) + + if (isNaN(value) || value === Infinity) { + m = isNaN(value) ? 1 : 0 + e = eMax + } else { + e = Math.floor(Math.log(value) / Math.LN2) + if (value * (c = Math.pow(2, -e)) < 1) { + e-- + c *= 2 + } + if (e + eBias >= 1) { + value += rt / c + } else { + value += rt * Math.pow(2, 1 - eBias) + } + if (value * c >= 2) { + e++ + c /= 2 + } + + if (e + eBias >= eMax) { + m = 0 + e = eMax + } else if (e + eBias >= 1) { + m = (value * c - 1) * Math.pow(2, mLen) + e = e + eBias + } else { + m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) + e = 0 + } + } + + for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} + + e = (e << mLen) | m + eLen += mLen + for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} + + buffer[offset + i - d] |= s * 128 +} diff --git a/node_modules/ieee754/package.json b/node_modules/ieee754/package.json new file mode 100644 index 0000000..681f0de --- /dev/null +++ b/node_modules/ieee754/package.json @@ -0,0 +1,37 @@ +{ + "name": "ieee754", + "description": "Read/write IEEE754 floating point numbers from/to a Buffer or array-like object", + "version": "1.1.8", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "contributors": [ + "Romain Beauxis " + ], + "devDependencies": { + "standard": "*", + "tape": "^4.0.0", + "zuul": "^3.0.0" + }, + "keywords": [ + "IEEE 754", + "buffer", + "convert", + "floating point", + "ieee754" + ], + "license": "BSD-3-Clause", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/ieee754.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "zuul -- test/*.js", + "test-browser-local": "zuul --local -- test/*.js", + "test-node": "tape test/*.js" + } +} diff --git a/node_modules/ieee754/test/basic.js b/node_modules/ieee754/test/basic.js new file mode 100644 index 0000000..58fae2b --- /dev/null +++ b/node_modules/ieee754/test/basic.js @@ -0,0 +1,23 @@ +var ieee754 = require('../') +var test = require('tape') + +var EPSILON = 0.00001 + +test('read float', function (t) { + var buf = new Buffer(4) + buf.writeFloatLE(42.42, 0) + var num = ieee754.read(buf, 0, true, 23, 4) + t.ok(Math.abs(num - 42.42) < EPSILON) + + t.end() +}) + +test('write float', function (t) { + var buf = new Buffer(4) + ieee754.write(buf, 42.42, 0, true, 23, 4) + + var num = buf.readFloatLE(0) + t.ok(Math.abs(num - 42.42) < EPSILON) + + t.end() +}) diff --git a/node_modules/ims-lti/.editorconfig b/node_modules/ims-lti/.editorconfig new file mode 100644 index 0000000..ce1f5bb --- /dev/null +++ b/node_modules/ims-lti/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +trim_trailing_whitespace = true +insert_final_newline = true +indent_style = space +indent_size = 2 diff --git a/node_modules/ims-lti/LICENSE b/node_modules/ims-lti/LICENSE new file mode 100644 index 0000000..a141f01 --- /dev/null +++ b/node_modules/ims-lti/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2014 Owen Smith +Original Copyright (c) 2013 OfficeHours + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ims-lti/README.md b/node_modules/ims-lti/README.md new file mode 100644 index 0000000..9c664ca --- /dev/null +++ b/node_modules/ims-lti/README.md @@ -0,0 +1,154 @@ +# ims-lti + +[![Build Status](https://travis-ci.org/omsmith/ims-lti.svg?branch=master)](https://travis-ci.org/omsmith/ims-lti) [![Coverage Status](https://coveralls.io/repos/omsmith/ims-lti/badge.png)](https://coveralls.io/r/omsmith/ims-lti) + +This is a nodejs library used to help create Tool Providers for the +[IMS LTI standard](http://www.imsglobal.org/lti/index.html). Tool Consumer implmentation is left as an excersise to the reader :P + +## Install +``` +npm install ims-lti --save +``` + +To require the library into your project +```coffeescript +require 'ims-lti' +``` + +## Supported LTI Versions + +* 1.0 - [Implementation Guide](http://www.imsglobal.org/lti/blti/bltiv1p0/ltiBLTIimgv1p0.html) +* 1.1 - [Implementation Guide](http://www.imsglobal.org/LTI/v1p1/ltiIMGv1p1.html) +* 1.1.1 - [Implementation Guide](http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html) + +## Usage + +The LTI standard won't be covered here, but it would be good to familiarize yourself with the specs. [LTI documentation](http://www.imsglobal.org/lti/index.html) + +This library doesn't help you manage or distribute the consumer keys and secrets. The POST +parameters will contain the `oauth_consumer_key` and your application should use that to look up the consumer secret from your own datastore. + +This library offers a few interfaces to use for managing the OAuth nonces to make sure the same nonce +isn't used twice with the same timestamp. [Read the LTI documentation on OAuth](http://www.imsglobal.org/LTI/v1p1pd/ltiIMGv1p1pd.html#_Toc309649687). They will be covered below. + +### Setting up a Tool Provider (TP) +As a TP your app will receive a POST request with [LTI launch data](http://www.imsglobal.org/lti/v1p1pd/ltiIMGv1p1pd.html#_Toc309649684) that will be signed with OAuth using a key/secret that both the TP and Tool Consumer (TC) share. This is all covered in the [LTI security model](http://www.imsglobal.org/lti/v1p1pd/ltiIMGv1p1pd.html#_Toc309649685) + +Once you find the `oauth_consumer_secret` based on the `oauth_consumer_key` in the POST request, you can initialize a `Provider` object with them and a few other optional parameters: + +```coffeescript +lti = require 'ims-lti' + +provider = new lti.Provider consumer_key, consumer_secret, [nonce_store=MemoryStore], [signature_method=HMAC_SHA1] +``` + +Once the provider has been initialized, a reqest object can be validated against it. During validation, OAuth signatures are checked against the passed consumer_secret and signautre_method ( HMAC_SHA1 assumed ). isValid returns true if the request is an lti request and is properly signed. + +```coffeescript +provider.valid_request req, (err, isValid) -> + # isValid = Boolean | always false if err + # err = Error object with method descibing error if err, null if no error +``` + +After validating the reqest, the provider object both stores the requests parameters (excluding oauth parameters) and provides convinience accessors to common onces including `provider.student`, `provider.ta`, `provider.username`, and more. All request data can be accessed through `provider.body` in an effort to namespace the values. + +Currently there is not an emplementation for posting back to the Tool Consumer, although there is a boolean accessor `provider.outcome_service` that will return true if the TC will accept a POSTback. + +### Nonce Stores + +`ims-lti` does not standardize the way in which the OAuth nonce/timestamp is to be stored. Since it is a crutial part of OAuth security, this library implements an Interface to allow the user to implement their own nonce-stores. + +#### Nonce Interface +All custom Nonce stores should extend the NonceStore class and implment `isNew` and `setUsed` +```coffeescript +class NonceStore + isNew: (nonce,timestamp,callback)=> + # Sets any new nonce to used + setUsed: (nonce,timestamp,callback)=> +``` + +Two nonce stores have been implemented for convinience. + +#### MemoryNonceStore +The default nonce store (if none is specified) is the Memory Nonce Store. This store simply keeps an array of nocne/timestamp keys. Timestamps must be valid within a 5 minute grace period. + +#### RedisNonceStore +A superior nonce store is the RedisNonceStore. This store requires a secondary input into the constructor, a redis-client. The redis client is used to store the nonce keys and set them to expire within a set amount of time (default 5 minutes). A RedisNonceStore is initialized like: + +```coffeescript +RedisNonceStore = require '../lib/redis-nonce-store' +client = require('redis').createClient() +store = new RedisNonceStore('consumer_key', client) + +provider = new lti.Provider consumer_key, consumer_secret, store +``` + +### Outcomes Extension + +The outcomes feature is part of the LTI 1.1 specification and is new to ims-lti 1.0. All of the behind-the-scenes work necessary to get the ball rolling with it is already implemented for you, all you need to do is submit grades. + +```coffeescript +provider = new lti.Provider consumer_key, consumer_secret + +provider.valid_request req, (err, is_valid) -> + # Check if the request is valid and if the outcomes service exists. + if (!is_valid || !provider.outcome_service) return false + + # Check if the outcome service supports the result data extension using the + # text format. Available formats include text and url. + console.log provider.outcome_service.supports_result_data('text') + + # Replace accepts a value between 0 and 1. + provider.outcome_service.send_replace_result .5, (err, result) -> + console.log result # True or false + + provider.outcome_service.send_read_result (err, result) -> + console.log result # Value of the result already submitted from this embed + + provider.outcome_service.send_delete_result (err, result) -> + console.log result # True or false + + provider.outcome_service.send_replace_result_with_text .5, 'Hello, world!', (err, result) -> + console.log result # True or false + + provider.outcome_service.send_replace_result_with_url .5, 'https://google.com', (err, result) -> + console.log result # True or false +``` + +### Content Extension + +The content extension is an extension supported by most LMS platforms. It provides LTI providers a way to send content back to the LMS in the form of urls, images, files, oembeds, iframes, and even lti launch urls. + +```coffeescript +provider = new lti.Provider consumer_key, consumer_secret + +provider.valid_request req, (err, is_valid) -> + #check if the request is valid and if the content extension is loaded. + if (!is_valid || !provider.ext_content) return false + + provider.ext_content.has_return_type 'file' # Does the consumer support files + provider.ext_content.has_file_extension 'jpg' # Does the consumer support jpg + + # All send requests take a response object as the first parameter. How the + # response object is manipulated can be overrided by replacing + # lti.Extensions.Content.redirector with your own function that accepts two + # parameters, the response object and the url to redirect to. + provider.ext_content.send_file res, file_url, text, content_mime_type + + provider.ext_content.send_iframe res, iframe_url, title_attribute, width, height + + provider.ext_content.send_image_url res, image_url, text, width, height + + provider.ext_content.send_lti_launch_url res, launch_url, title_attribute, text + + provider.ext_content.send_oembed res, oembed_url, endpoint + + provider.ext_content.send_url res, hyperlink_url, text, title_attribute, target_attribute +``` + +## Running Tests +To run the test suite first installing the dependencies: +``` +npm install +make test +``` diff --git a/node_modules/ims-lti/lib/consumer.js b/node_modules/ims-lti/lib/consumer.js new file mode 100644 index 0000000..beddf24 --- /dev/null +++ b/node_modules/ims-lti/lib/consumer.js @@ -0,0 +1,17 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var Consumer, exports; + + Consumer = (function() { + function Consumer() { + console.error('Consumer Not Implemented'); + false; + } + + return Consumer; + + })(); + + exports = module.exports = Consumer; + +}).call(this); diff --git a/node_modules/ims-lti/lib/errors.js b/node_modules/ims-lti/lib/errors.js new file mode 100644 index 0000000..b032b60 --- /dev/null +++ b/node_modules/ims-lti/lib/errors.js @@ -0,0 +1,101 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var ConsumerError, ExtensionError, NonceError, OutcomeResponseError, ParameterError, SignatureError, StoreError, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + ConsumerError = (function(superClass) { + extend(ConsumerError, superClass); + + function ConsumerError(message) { + this.message = message; + ConsumerError.__super__.constructor.apply(this, arguments); + } + + return ConsumerError; + + })(Error); + + ExtensionError = (function(superClass) { + extend(ExtensionError, superClass); + + function ExtensionError(message) { + this.message = message; + ExtensionError.__super__.constructor.apply(this, arguments); + } + + return ExtensionError; + + })(Error); + + StoreError = (function(superClass) { + extend(StoreError, superClass); + + function StoreError(message) { + this.message = message; + StoreError.__super__.constructor.apply(this, arguments); + } + + return StoreError; + + })(Error); + + ParameterError = (function(superClass) { + extend(ParameterError, superClass); + + function ParameterError(message) { + this.message = message; + ParameterError.__super__.constructor.apply(this, arguments); + } + + return ParameterError; + + })(Error); + + SignatureError = (function(superClass) { + extend(SignatureError, superClass); + + function SignatureError(message) { + this.message = message; + SignatureError.__super__.constructor.apply(this, arguments); + } + + return SignatureError; + + })(Error); + + NonceError = (function(superClass) { + extend(NonceError, superClass); + + function NonceError(message) { + this.message = message; + NonceError.__super__.constructor.apply(this, arguments); + } + + return NonceError; + + })(Error); + + OutcomeResponseError = (function(superClass) { + extend(OutcomeResponseError, superClass); + + function OutcomeResponseError(message) { + this.message = message; + OutcomeResponseError.__super__.constructor.apply(this, arguments); + } + + return OutcomeResponseError; + + })(Error); + + module.exports = { + ConsumerError: ConsumerError, + ExtensionError: ExtensionError, + StoreError: StoreError, + ParameterError: ParameterError, + SignatureError: SignatureError, + NonceError: NonceError, + OutcomeResponseError: OutcomeResponseError + }; + +}).call(this); diff --git a/node_modules/ims-lti/lib/extensions/content.js b/node_modules/ims-lti/lib/extensions/content.js new file mode 100644 index 0000000..f3d2d57 --- /dev/null +++ b/node_modules/ims-lti/lib/extensions/content.js @@ -0,0 +1,147 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var ContentExtension, FILE_RETURN_TYPE, IFRAME_RETURN_TYPE, IMAGE_URL_RETURN_TYPE, LTI_LAUNCH_URL_RETURN_TYPE, OEMBED_RETURN_TYPE, URL_RETURN_TYPE, errors, optional_url_property_setter, parse_url, url; + + url = require('url'); + + errors = require('../errors'); + + FILE_RETURN_TYPE = 'file'; + + IFRAME_RETURN_TYPE = 'iframe'; + + IMAGE_URL_RETURN_TYPE = 'image_url'; + + LTI_LAUNCH_URL_RETURN_TYPE = 'lti_launch_url'; + + OEMBED_RETURN_TYPE = 'oembed'; + + URL_RETURN_TYPE = 'url'; + + parse_url = function(raw_url) { + var return_url; + return_url = url.parse(raw_url, true); + delete return_url.path; + return return_url; + }; + + optional_url_property_setter = function(return_url) { + return function(property, value) { + if (typeof value !== 'undefined') { + return return_url.query[property] = value; + } + }; + }; + + ContentExtension = (function() { + function ContentExtension(params) { + this.return_types = params.ext_content_return_types.split(','); + this.return_url = params.ext_content_return_url || params.launch_presentation_return_url; + this.file_extensions = (params.ext_content_file_extensions && params.ext_content_file_extensions.split(',')) || []; + } + + ContentExtension.prototype.has_return_type = function(return_type) { + return this.return_types.indexOf(return_type) !== -1; + }; + + ContentExtension.prototype.has_file_extension = function(extension) { + return this.file_extensions.indexOf(extension) !== -1; + }; + + ContentExtension.prototype.send_file = function(res, file_url, text, content_type) { + var return_url, set_if_exists; + this._validate_return_type(FILE_RETURN_TYPE); + return_url = parse_url(this.return_url, true); + set_if_exists = optional_url_property_setter(return_url); + return_url.query.return_type = FILE_RETURN_TYPE; + return_url.query.url = file_url; + return_url.query.text = text; + set_if_exists('content_type', content_type); + return exports.redirector(res, url.format(return_url)); + }; + + ContentExtension.prototype.send_iframe = function(res, iframe_url, title, width, height) { + var return_url, set_if_exists; + this._validate_return_type(IFRAME_RETURN_TYPE); + return_url = parse_url(this.return_url, true); + set_if_exists = optional_url_property_setter(return_url); + return_url.query.return_type = IFRAME_RETURN_TYPE; + return_url.query.url = iframe_url; + set_if_exists("title", title); + set_if_exists("width", width); + set_if_exists("height", height); + return exports.redirector(res, url.format(return_url)); + }; + + ContentExtension.prototype.send_image_url = function(res, image_url, text, width, height) { + var return_url, set_if_exists; + this._validate_return_type(IMAGE_URL_RETURN_TYPE); + return_url = parse_url(this.return_url, true); + set_if_exists = optional_url_property_setter(return_url); + return_url.query.return_type = IMAGE_URL_RETURN_TYPE; + return_url.query.url = image_url; + set_if_exists("text", text); + set_if_exists("width", width); + set_if_exists("height", height); + return exports.redirector(res, url.format(return_url)); + }; + + ContentExtension.prototype.send_lti_launch_url = function(res, launch_url, title, text) { + var return_url, set_if_exists; + this._validate_return_type(LTI_LAUNCH_URL_RETURN_TYPE); + return_url = parse_url(this.return_url, true); + set_if_exists = optional_url_property_setter(return_url); + return_url.query.return_type = LTI_LAUNCH_URL_RETURN_TYPE; + return_url.query.url = launch_url; + set_if_exists("title", title); + set_if_exists("text", text); + return exports.redirector(res, url.format(return_url)); + }; + + ContentExtension.prototype.send_oembed = function(res, oembed_url, endpoint) { + var return_url, set_if_exists; + this._validate_return_type(OEMBED_RETURN_TYPE); + return_url = parse_url(this.return_url, true); + set_if_exists = optional_url_property_setter(return_url); + return_url.query.return_type = OEMBED_RETURN_TYPE; + return_url.query.url = oembed_url; + set_if_exists("endpoint", endpoint); + return exports.redirector(res, url.format(return_url)); + }; + + ContentExtension.prototype.send_url = function(res, hyperlink, text, title, target) { + var return_url, set_if_exists; + this._validate_return_type(URL_RETURN_TYPE); + return_url = parse_url(this.return_url, true); + set_if_exists = optional_url_property_setter(return_url); + return_url.query.return_type = URL_RETURN_TYPE; + return_url.query.url = hyperlink; + set_if_exists('text', text); + set_if_exists('title', title); + set_if_exists('target', target); + return exports.redirector(res, url.format(return_url)); + }; + + ContentExtension.prototype._validate_return_type = function(return_type) { + if (this.has_return_type(return_type) === false) { + throw new errors.ExtensionError('Invalid return type, valid options are ' + this.return_types.join(', ')); + } + }; + + return ContentExtension; + + })(); + + exports.init = function(provider) { + if (provider.body.ext_content_return_types) { + return provider.ext_content = new ContentExtension(provider.body); + } else { + return provider.ext_content = false; + } + }; + + exports.redirector = function(res, url) { + return res.redirect(303, url); + }; + +}).call(this); diff --git a/node_modules/ims-lti/lib/extensions/index.js b/node_modules/ims-lti/lib/extensions/index.js new file mode 100644 index 0000000..4fb3df2 --- /dev/null +++ b/node_modules/ims-lti/lib/extensions/index.js @@ -0,0 +1,7 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + exports.Content = require('./content'); + + exports.Outcomes = require('./outcomes'); + +}).call(this); diff --git a/node_modules/ims-lti/lib/extensions/outcomes.js b/node_modules/ims-lti/lib/extensions/outcomes.js new file mode 100644 index 0000000..c5e3620 --- /dev/null +++ b/node_modules/ims-lti/lib/extensions/outcomes.js @@ -0,0 +1,293 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var HMAC_SHA1, OutcomeDocument, OutcomeService, crypto, errors, http, https, navigateXml, url, utils, uuid, xml2js, xml_builder; + + crypto = require('crypto'); + + http = require('http'); + + https = require('https'); + + url = require('url'); + + uuid = require('node-uuid'); + + xml2js = require('xml2js'); + + xml_builder = require('xmlbuilder'); + + errors = require('../errors'); + + HMAC_SHA1 = require('../hmac-sha1'); + + utils = require('../utils'); + + navigateXml = function(xmlObject, path) { + var i, len, part, ref, ref1; + ref = path.split('.'); + for (i = 0, len = ref.length; i < len; i++) { + part = ref[i]; + xmlObject = xmlObject != null ? (ref1 = xmlObject[part]) != null ? ref1[0] : void 0 : void 0; + } + return xmlObject; + }; + + OutcomeDocument = (function() { + function OutcomeDocument(type, source_did, outcome_service) { + var xmldec; + this.outcome_service = outcome_service; + xmldec = { + version: '1.0', + encoding: 'UTF-8' + }; + this.doc = xml_builder.create('imsx_POXEnvelopeRequest', xmldec); + this.doc.attribute('xmlns', 'http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0'); + this.head = this.doc.ele('imsx_POXHeader').ele('imsx_POXRequestHeaderInfo'); + this.body = this.doc.ele('imsx_POXBody').ele(type + 'Request').ele('resultRecord'); + this.head.ele('imsx_version', 'V1.0'); + this.head.ele('imsx_messageIdentifier', uuid.v1()); + this.body.ele('sourcedGUID').ele('sourcedId', source_did); + } + + OutcomeDocument.prototype.add_score = function(score, language) { + var eScore; + if (typeof score !== 'number' || score < 0 || score > 1.0) { + throw new errors.ParameterError('Score must be a floating point number >= 0 and <= 1'); + } + eScore = this._result_ele().ele('resultScore'); + eScore.ele('language', language); + return eScore.ele('textString', score); + }; + + OutcomeDocument.prototype.add_text = function(text) { + return this._add_payload('text', text); + }; + + OutcomeDocument.prototype.add_url = function(url) { + return this._add_payload('url', url); + }; + + OutcomeDocument.prototype.finalize = function() { + return this.doc.end({ + pretty: true + }); + }; + + OutcomeDocument.prototype._result_ele = function() { + return this.result || (this.result = this.body.ele('result')); + }; + + OutcomeDocument.prototype._add_payload = function(type, value) { + if (this.has_payload) { + throw new errors.ExtensionError('Result data payload has already been set'); + } + if (!this.outcome_service.supports_result_data(type)) { + throw new errors.ExtensionError('Result data type is not supported'); + } + this._result_ele().ele('resultData').ele(type, value); + return this.has_payload = true; + }; + + return OutcomeDocument; + + })(); + + OutcomeService = (function() { + OutcomeService.prototype.REQUEST_REPLACE = 'replaceResult'; + + OutcomeService.prototype.REQUEST_READ = 'readResult'; + + OutcomeService.prototype.REQUEST_DELETE = 'deleteResult'; + + function OutcomeService(options) { + var parts; + if (options == null) { + options = {}; + } + this.consumer_key = options.consumer_key; + this.consumer_secret = options.consumer_secret; + this.service_url = options.service_url; + this.source_did = options.source_did; + this.result_data_types = options.result_data_types || []; + this.signer = options.signer || (new HMAC_SHA1()); + this.cert_authority = options.cert_authority || null; + this.language = options.language || 'en'; + parts = this.service_url_parts = url.parse(this.service_url, true); + this.service_url_oauth = parts.protocol + '//' + parts.host + parts.pathname; + } + + OutcomeService.prototype.send_replace_result = function(score, callback) { + var doc, err, error; + doc = new OutcomeDocument(this.REQUEST_REPLACE, this.source_did, this); + try { + doc.add_score(score, this.language); + return this._send_request(doc, callback); + } catch (error) { + err = error; + return callback(err, false); + } + }; + + OutcomeService.prototype.send_replace_result_with_text = function(score, text, callback) { + var doc, err, error; + doc = new OutcomeDocument(this.REQUEST_REPLACE, this.source_did, this); + try { + doc.add_score(score, this.language, doc.add_text(text)); + return this._send_request(doc, callback); + } catch (error) { + err = error; + return callback(err, false); + } + }; + + OutcomeService.prototype.send_replace_result_with_url = function(score, url, callback) { + var doc, err, error; + doc = new OutcomeDocument(this.REQUEST_REPLACE, this.source_did, this); + try { + doc.add_score(score, this.language, doc.add_url(url)); + return this._send_request(doc, callback); + } catch (error) { + err = error; + return callback(err, false); + } + }; + + OutcomeService.prototype.send_read_result = function(callback) { + var doc; + doc = new OutcomeDocument(this.REQUEST_READ, this.source_did, this); + return this._send_request(doc, (function(_this) { + return function(err, result, xml) { + var score; + if (err) { + return callback(err, result); + } + score = parseFloat(navigateXml(xml, 'imsx_POXBody.readResultResponse.result.resultScore.textString'), 10); + if (isNaN(score)) { + return callback(new errors.OutcomeResponseError('Invalid score response'), false); + } else { + return callback(null, score); + } + }; + })(this)); + }; + + OutcomeService.prototype.send_delete_result = function(callback) { + var doc; + doc = new OutcomeDocument(this.REQUEST_DELETE, this.source_did, this); + return this._send_request(doc, callback); + }; + + OutcomeService.prototype.supports_result_data = function(type) { + return this.result_data_types.length && (!type || this.result_data_types.indexOf(type) !== -1); + }; + + OutcomeService.prototype._send_request = function(doc, callback) { + var body, is_ssl, options, req, xml; + xml = doc.finalize(); + body = ''; + is_ssl = this.service_url_parts.protocol === 'https:'; + options = { + hostname: this.service_url_parts.hostname, + path: this.service_url_parts.path, + method: 'POST', + headers: this._build_headers(xml) + }; + if (this.cert_authority && is_ssl) { + options.ca = this.cert_authority; + } else { + options.agent = is_ssl ? https.globalAgent : http.globalAgent; + } + if (this.service_url_parts.port) { + options.port = this.service_url_parts.port; + } + req = (is_ssl ? https : http).request(options, (function(_this) { + return function(res) { + res.setEncoding('utf8'); + res.on('data', function(chunk) { + return body += chunk; + }); + return res.on('end', function() { + return _this._process_response(body, callback); + }); + }; + })(this)); + req.on('error', (function(_this) { + return function(err) { + return callback(err, false); + }; + })(this)); + req.write(xml); + return req.end(); + }; + + OutcomeService.prototype._build_headers = function(body) { + var headers, key, val; + headers = { + oauth_version: '1.0', + oauth_nonce: uuid.v4(), + oauth_timestamp: Math.round(Date.now() / 1000), + oauth_consumer_key: this.consumer_key, + oauth_body_hash: crypto.createHash('sha1').update(body).digest('base64'), + oauth_signature_method: 'HMAC-SHA1' + }; + headers.oauth_signature = this.signer.build_signature_raw(this.service_url_oauth, this.service_url_parts, 'POST', headers, this.consumer_secret); + return { + Authorization: 'OAuth realm="",' + ((function() { + var results; + results = []; + for (key in headers) { + val = headers[key]; + results.push(key + "=\"" + (utils.special_encode(val)) + "\""); + } + return results; + })()).join(','), + 'Content-Type': 'application/xml', + 'Content-Length': body.length + }; + }; + + OutcomeService.prototype._process_response = function(body, callback) { + return xml2js.parseString(body, { + trim: true + }, (function(_this) { + return function(err, result) { + var code, msg, response; + if (err) { + return callback(new errors.OutcomeResponseError('The server responsed with an invalid XML document'), false); + } + response = result != null ? result.imsx_POXEnvelopeResponse : void 0; + code = navigateXml(response, 'imsx_POXHeader.imsx_POXResponseHeaderInfo.imsx_statusInfo.imsx_codeMajor'); + if (code !== 'success') { + msg = navigateXml(response, 'imsx_POXHeader.imsx_POXResponseHeaderInfo.imsx_statusInfo.imsx_description'); + return callback(new errors.OutcomeResponseError(msg), false); + } else { + return callback(null, true, response); + } + }; + })(this)); + }; + + return OutcomeService; + + })(); + + exports.init = function(provider) { + var accepted_vals; + if (provider.body.lis_outcome_service_url && provider.body.lis_result_sourcedid) { + accepted_vals = provider.body.ext_outcome_data_values_accepted; + return provider.outcome_service = new OutcomeService({ + consumer_key: provider.consumer_key, + consumer_secret: provider.consumer_secret, + service_url: provider.body.lis_outcome_service_url, + source_did: provider.body.lis_result_sourcedid, + result_data_types: accepted_vals && accepted_vals.split(',') || [], + signer: provider.signer + }); + } else { + return provider.outcome_service = false; + } + }; + + exports.OutcomeService = OutcomeService; + +}).call(this); diff --git a/node_modules/ims-lti/lib/hmac-sha1.js b/node_modules/ims-lti/lib/hmac-sha1.js new file mode 100644 index 0000000..2605ed6 --- /dev/null +++ b/node_modules/ims-lti/lib/hmac-sha1.js @@ -0,0 +1,89 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var HMAC_SHA1, _clean_request_body, crypto, exports, url, utils; + + crypto = require('crypto'); + + url = require('url'); + + utils = require('./utils'); + + _clean_request_body = function(body, query) { + var cleanParams, encodeParam, out; + out = []; + encodeParam = function(key, val) { + return key + "=" + (utils.special_encode(val)); + }; + cleanParams = function(params) { + var i, key, len, val, vals; + if (typeof params !== 'object') { + return; + } + for (key in params) { + vals = params[key]; + if (key === 'oauth_signature') { + continue; + } + if (Array.isArray(vals) === true) { + for (i = 0, len = vals.length; i < len; i++) { + val = vals[i]; + out.push(encodeParam(key, val)); + } + } else { + out.push(encodeParam(key, vals)); + } + } + }; + cleanParams(body); + cleanParams(query); + return utils.special_encode(out.sort().join('&')); + }; + + HMAC_SHA1 = (function() { + function HMAC_SHA1() {} + + HMAC_SHA1.prototype.toString = function() { + return 'HMAC_SHA1'; + }; + + HMAC_SHA1.prototype.build_signature_raw = function(req_url, parsed_url, method, params, consumer_secret, token) { + var sig; + sig = [method.toUpperCase(), utils.special_encode(req_url), _clean_request_body(params, parsed_url.query)]; + return this.sign_string(sig.join('&'), consumer_secret, token); + }; + + HMAC_SHA1.prototype.build_signature = function(req, body, consumer_secret, token) { + var encrypted, hapiRawReq, hitUrl, originalUrl, parsedUrl, protocol; + hapiRawReq = req.raw && req.raw.req; + if (hapiRawReq) { + req = hapiRawReq; + } + originalUrl = req.originalUrl || req.url; + protocol = req.protocol; + if (body.tool_consumer_info_product_family_code === 'canvas') { + originalUrl = url.parse(originalUrl).pathname; + } + if (protocol === void 0) { + encrypted = req.connection.encrypted; + protocol = (encrypted && 'https') || 'http'; + } + parsedUrl = url.parse(originalUrl, true); + hitUrl = protocol + '://' + req.headers.host + parsedUrl.pathname; + return this.build_signature_raw(hitUrl, parsedUrl, req.method, body, consumer_secret, token); + }; + + HMAC_SHA1.prototype.sign_string = function(str, key, token) { + key = key + "&"; + if (token) { + key += token; + } + return crypto.createHmac('sha1', key).update(str).digest('base64'); + }; + + return HMAC_SHA1; + + })(); + + exports = module.exports = HMAC_SHA1; + +}).call(this); diff --git a/node_modules/ims-lti/lib/ims-lti.js b/node_modules/ims-lti/lib/ims-lti.js new file mode 100644 index 0000000..5c9ce83 --- /dev/null +++ b/node_modules/ims-lti/lib/ims-lti.js @@ -0,0 +1,22 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var exports, extensions; + + extensions = require('./extensions'); + + exports = module.exports = { + version: '0.0.0', + Provider: require('./provider'), + Consumer: require('./consumer'), + OutcomeService: extensions.Outcomes.OutcomeService, + Errors: require('./errors'), + Stores: { + RedisStore: require('./redis-nonce-store'), + MemoryStore: require('./memory-nonce-store'), + NonceStore: require('./nonce-store') + }, + Extensions: extensions, + supported_versions: ['LTI-1p0'] + }; + +}).call(this); diff --git a/node_modules/ims-lti/lib/memory-nonce-store.js b/node_modules/ims-lti/lib/memory-nonce-store.js new file mode 100644 index 0000000..e7f77c7 --- /dev/null +++ b/node_modules/ims-lti/lib/memory-nonce-store.js @@ -0,0 +1,74 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var EXPIRE_IN_SEC, MemoryNonceStore, NonceStore, exports, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NonceStore = require('./nonce-store'); + + EXPIRE_IN_SEC = 5 * 60; + + MemoryNonceStore = (function(superClass) { + extend(MemoryNonceStore, superClass); + + function MemoryNonceStore() { + this.used = Object.create(null); + } + + MemoryNonceStore.prototype.isNew = function(nonce, timestamp, next) { + var firstTimeSeen; + if (next == null) { + next = function() {}; + } + if (typeof nonce === 'undefined' || nonce === null || typeof nonce === 'function' || typeof timestamp === 'function' || typeof timestamp === 'undefined') { + return next(new Error('Invalid parameters'), false); + } + this._clearNonces(); + firstTimeSeen = this.used[nonce] === void 0; + if (!firstTimeSeen) { + return next(new Error('Nonce already seen'), false); + } + return this.setUsed(nonce, timestamp, function(err) { + var currentTime, timestampIsFresh; + if (typeof timestamp !== 'undefined' && timestamp !== null) { + timestamp = parseInt(timestamp, 10); + currentTime = Math.round(Date.now() / 1000); + timestampIsFresh = (currentTime - timestamp) <= EXPIRE_IN_SEC; + if (timestampIsFresh) { + return next(null, true); + } else { + return next(new Error('Expired timestamp'), false); + } + } else { + return next(new Error('Timestamp required'), false); + } + }); + }; + + MemoryNonceStore.prototype.setUsed = function(nonce, timestamp, next) { + if (next == null) { + next = function() {}; + } + this.used[nonce] = timestamp + EXPIRE_IN_SEC; + return next(null); + }; + + MemoryNonceStore.prototype._clearNonces = function() { + var expiry, nonce, now, ref; + now = Math.round(Date.now() / 1000); + ref = this.used; + for (nonce in ref) { + expiry = ref[nonce]; + if (expiry <= now) { + delete this.used[nonce]; + } + } + }; + + return MemoryNonceStore; + + })(NonceStore); + + exports = module.exports = MemoryNonceStore; + +}).call(this); diff --git a/node_modules/ims-lti/lib/nonce-store.js b/node_modules/ims-lti/lib/nonce-store.js new file mode 100644 index 0000000..1d872d4 --- /dev/null +++ b/node_modules/ims-lti/lib/nonce-store.js @@ -0,0 +1,42 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var NonceStore, exports, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }; + + NonceStore = (function() { + function NonceStore() { + this.setUsed = bind(this.setUsed, this); + this.isNew = bind(this.isNew, this); + } + + NonceStore.prototype.isNonceStore = function() { + return true; + }; + + NonceStore.prototype.isNew = function() { + var arg, i; + for (i in arguments) { + arg = arguments[i]; + if (typeof arg === 'function') { + return arg(new Error("NOT IMPLEMENTED"), false); + } + } + }; + + NonceStore.prototype.setUsed = function() { + var arg, i; + for (i in arguments) { + arg = arguments[i]; + if (typeof arg === 'function') { + return arg(new Error("NOT IMPLEMENTED"), false); + } + } + }; + + return NonceStore; + + })(); + + exports = module.exports = NonceStore; + +}).call(this); diff --git a/node_modules/ims-lti/lib/provider.js b/node_modules/ims-lti/lib/provider.js new file mode 100644 index 0000000..5a29a51 --- /dev/null +++ b/node_modules/ims-lti/lib/provider.js @@ -0,0 +1,148 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var HMAC_SHA1, MemoryNonceStore, Provider, errors, exports, extensions, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }; + + HMAC_SHA1 = require('./hmac-sha1'); + + MemoryNonceStore = require('./memory-nonce-store'); + + errors = require('./errors'); + + extensions = require('./extensions'); + + Provider = (function() { + function Provider(consumer_key, consumer_secret, nonceStore, signature_method) { + if (signature_method == null) { + signature_method = new HMAC_SHA1(); + } + this.parse_request = bind(this.parse_request, this); + this.valid_request = bind(this.valid_request, this); + if (typeof consumer_key === 'undefined' || consumer_key === null) { + throw new errors.ConsumerError('Must specify consumer_key'); + } + if (typeof consumer_secret === 'undefined' || consumer_secret === null) { + throw new errors.ConsumerError('Must specify consumer_secret'); + } + if (!nonceStore) { + nonceStore = new MemoryNonceStore(); + } + if (!(typeof nonceStore.isNonceStore === "function" ? nonceStore.isNonceStore() : void 0)) { + throw new errors.ParameterError('Fourth argument must be a nonceStore object'); + } + this.consumer_key = consumer_key; + this.consumer_secret = consumer_secret; + this.signer = signature_method; + this.nonceStore = nonceStore; + this.body = {}; + } + + Provider.prototype.valid_request = function(req, body, callback) { + if (!callback) { + callback = body; + body = void 0; + } + body = body || req.body || req.payload; + callback = callback || function() {}; + this.parse_request(req, body); + if (!this._valid_parameters(body)) { + return callback(new errors.ParameterError('Invalid LTI parameters'), false); + } + return this._valid_oauth(req, body, callback); + }; + + Provider.prototype._valid_parameters = function(body) { + var correct_message_type, correct_version, has_resource_link_id; + if (!body) { + return false; + } + correct_message_type = body.lti_message_type === 'basic-lti-launch-request'; + correct_version = require('./ims-lti').supported_versions.indexOf(body.lti_version) !== -1; + has_resource_link_id = body.resource_link_id != null; + return correct_message_type && correct_version && has_resource_link_id; + }; + + Provider.prototype._valid_oauth = function(req, body, callback) { + var generated, valid_signature; + generated = this.signer.build_signature(req, body, this.consumer_secret); + valid_signature = generated === body.oauth_signature; + if (!valid_signature) { + return callback(new errors.SignatureError('Invalid Signature'), false); + } + return this.nonceStore.isNew(body.oauth_nonce, body.oauth_timestamp, function(err, valid) { + if (!valid) { + return callback(new errors.NonceError('Expired nonce'), false); + } else { + return callback(null, true); + } + }); + }; + + Provider.prototype.parse_request = function(req, body) { + var extension, extension_name, id, key, results, val; + body = body || req.body || req.payload; + for (key in body) { + val = body[key]; + if (key.match(/^oauth_/)) { + continue; + } + this.body[key] = val; + } + if (typeof this.body.roles === 'string') { + this.body.roles = this.body.roles.split(','); + } + this.admin = this.has_role('Administrator'); + this.alumni = this.has_role('Alumni'); + this.content_developer = this.has_role('ContentDeveloper'); + this.guest = this.has_role('Guest'); + this.instructor = this.has_role('Instructor') || this.has_role('Faculty') || this.has_role('Staff'); + this.manager = this.has_role('Manager'); + this.member = this.has_role('Member'); + this.mentor = this.has_role('Mentor'); + this.none = this.has_role('None'); + this.observer = this.has_role('Observer'); + this.other = this.has_role('Other'); + this.prospective_student = this.has_role('ProspectiveStudent'); + this.student = this.has_role('Learner') || this.has_role('Student'); + this.ta = this.has_role('TeachingAssistant'); + this.launch_request = this.body.lti_message_type === 'basic-lti-launch-request'; + this.username = this.body.lis_person_name_given || this.body.lis_person_name_family || this.body.lis_person_name_full || ''; + this.userId = this.body.user_id; + if (typeof this.body.role_scope_mentor === 'string') { + this.mentor_user_ids = (function() { + var i, len, ref, results; + ref = this.body.role_scope_mentor.split(','); + results = []; + for (i = 0, len = ref.length; i < len; i++) { + id = ref[i]; + results.push(decodeURIComponent(id)); + } + return results; + }).call(this); + } + this.context_id = this.body.context_id; + this.context_label = this.body.context_label; + this.context_title = this.body.context_title; + results = []; + for (extension_name in extensions) { + extension = extensions[extension_name]; + results.push(extension.init(this)); + } + return results; + }; + + Provider.prototype.has_role = function(role) { + var regex; + regex = new RegExp("^(urn:lti:(sys|inst)?role:ims/lis/)?" + role + "(/.+)?$", 'i'); + return this.body.roles && this.body.roles.some(function(r) { + return regex.test(r); + }); + }; + + return Provider; + + })(); + + exports = module.exports = Provider; + +}).call(this); diff --git a/node_modules/ims-lti/lib/redis-nonce-store.js b/node_modules/ims-lti/lib/redis-nonce-store.js new file mode 100644 index 0000000..52cc21c --- /dev/null +++ b/node_modules/ims-lti/lib/redis-nonce-store.js @@ -0,0 +1,63 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + var EXPIRE_IN_SEC, NonceStore, RedisNonceStore, exports, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + NonceStore = require('./nonce-store'); + + EXPIRE_IN_SEC = 5 * 60; + + RedisNonceStore = (function(superClass) { + extend(RedisNonceStore, superClass); + + function RedisNonceStore(redisClient) { + if (typeof redisClient === 'string' && arguments.length === 2) { + redisClient = arguments[1]; + } + this.redis = redisClient; + } + + RedisNonceStore.prototype.isNew = function(nonce, timestamp, next) { + var currentTime, freshTimestamp; + if (next == null) { + next = function() {}; + } + if (typeof nonce === 'undefined' || nonce === null || typeof nonce === 'function' || typeof timestamp === 'function' || typeof timestamp === 'undefined') { + return next(new Error('Invalid parameters'), false); + } + if (typeof timestamp === 'undefined' || timestamp === null) { + return next(new Error('Timestamp required'), false); + } + currentTime = Math.round(Date.now() / 1000); + freshTimestamp = (currentTime - parseInt(timestamp, 10)) <= EXPIRE_IN_SEC; + if (!freshTimestamp) { + return next(new Error('Expired timestamp'), false); + } + return this.redis.get(nonce, (function(_this) { + return function(err, seen) { + if (seen) { + return next(new Error('Nonce already seen'), false); + } + _this.setUsed(nonce, timestamp); + return next(null, true); + }; + })(this)); + }; + + RedisNonceStore.prototype.setUsed = function(nonce, timestamp, next) { + if (next == null) { + next = function() {}; + } + this.redis.set(nonce, timestamp); + this.redis.expire(nonce, EXPIRE_IN_SEC); + return next(null); + }; + + return RedisNonceStore; + + })(NonceStore); + + exports = module.exports = RedisNonceStore; + +}).call(this); diff --git a/node_modules/ims-lti/lib/utils.js b/node_modules/ims-lti/lib/utils.js new file mode 100644 index 0000000..2823f08 --- /dev/null +++ b/node_modules/ims-lti/lib/utils.js @@ -0,0 +1,7 @@ +// Generated by CoffeeScript 1.10.0 +(function() { + exports.special_encode = function(string) { + return encodeURIComponent(string).replace(/[!'()]/g, escape).replace(/\*/g, '%2A'); + }; + +}).call(this); diff --git a/node_modules/ims-lti/node_modules/.bin/uuid b/node_modules/ims-lti/node_modules/.bin/uuid new file mode 120000 index 0000000..56fb209 --- /dev/null +++ b/node_modules/ims-lti/node_modules/.bin/uuid @@ -0,0 +1 @@ +../../../node-uuid/bin/uuid \ No newline at end of file diff --git a/node_modules/ims-lti/package.json b/node_modules/ims-lti/package.json new file mode 100644 index 0000000..a5bca1c --- /dev/null +++ b/node_modules/ims-lti/package.json @@ -0,0 +1,51 @@ +{ + "name": "ims-lti", + "version": "3.0.2", + "description": "Module for building an LTI Tool Provider and accept LTI launch requests", + "author": { + "name": "James Rundquist", + "email": "james@courseshark.com" + }, + "contributors": [ + { + "name": "Owen Smith", + "email": "owen@omsmith.ca" + }, + { + "name": "David Kosub", + "email": "dave@davidkosub.com" + } + ], + "license": "MIT", + "keywords": [ + "ims", + "lti", + "provider" + ], + "repository": { + "type": "git", + "url": "https://github.com/omsmith/ims-lti.git" + }, + "bugs": { + "url": "http://github.com/omsmith/ims-lti/issues" + }, + "main": "./lib/ims-lti", + "scripts": { + "prepublish": "make build" + }, + "dependencies": { + "xml2js": "~0.4.0", + "xmlbuilder": "~2.4.0", + "node-uuid": "~1.4.0" + }, + "devDependencies": { + "coffee-coverage": "^0.4.4", + "coffee-script": "^1.8.0", + "coveralls": "^2.11.2", + "mocha": "^2.1.0", + "mocha-lcov-reporter": "0.0.1", + "mocha-multi": "^0.6.0", + "redis": "^0.12.1", + "should": "^4.6.1" + } +} diff --git a/node_modules/indexof/.npmignore b/node_modules/indexof/.npmignore new file mode 100644 index 0000000..48a2e24 --- /dev/null +++ b/node_modules/indexof/.npmignore @@ -0,0 +1,2 @@ +components +build diff --git a/node_modules/indexof/Makefile b/node_modules/indexof/Makefile new file mode 100644 index 0000000..3f6119d --- /dev/null +++ b/node_modules/indexof/Makefile @@ -0,0 +1,11 @@ + +build: components index.js + @component build + +components: + @Component install + +clean: + rm -fr build components template.js + +.PHONY: clean diff --git a/node_modules/indexof/Readme.md b/node_modules/indexof/Readme.md new file mode 100644 index 0000000..99c8dfc --- /dev/null +++ b/node_modules/indexof/Readme.md @@ -0,0 +1,15 @@ + +# indexOf + + Lame indexOf thing, thanks microsoft + +## Example + +```js +var index = require('indexof'); +index(arr, obj); +``` + +## License + + MIT \ No newline at end of file diff --git a/node_modules/indexof/component.json b/node_modules/indexof/component.json new file mode 100644 index 0000000..e3430d7 --- /dev/null +++ b/node_modules/indexof/component.json @@ -0,0 +1,10 @@ +{ + "name": "indexof", + "description": "Microsoft sucks", + "version": "0.0.1", + "keywords": ["index", "array", "indexOf"], + "dependencies": {}, + "scripts": [ + "index.js" + ] +} \ No newline at end of file diff --git a/node_modules/indexof/index.js b/node_modules/indexof/index.js new file mode 100644 index 0000000..9d9667b --- /dev/null +++ b/node_modules/indexof/index.js @@ -0,0 +1,10 @@ + +var indexOf = [].indexOf; + +module.exports = function(arr, obj){ + if (indexOf) return arr.indexOf(obj); + for (var i = 0; i < arr.length; ++i) { + if (arr[i] === obj) return i; + } + return -1; +}; \ No newline at end of file diff --git a/node_modules/indexof/package.json b/node_modules/indexof/package.json new file mode 100644 index 0000000..79d009f --- /dev/null +++ b/node_modules/indexof/package.json @@ -0,0 +1,12 @@ +{ + "name": "indexof", + "description": "Microsoft sucks", + "version": "0.0.1", + "keywords": ["index", "array", "indexOf"], + "dependencies": {}, + "component": { + "scripts": { + "indexof/index.js": "index.js" + } + } +} \ No newline at end of file diff --git a/node_modules/inflight/LICENSE b/node_modules/inflight/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md new file mode 100644 index 0000000..6dc8929 --- /dev/null +++ b/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/node_modules/inflight/inflight.js b/node_modules/inflight/inflight.js new file mode 100644 index 0000000..48202b3 --- /dev/null +++ b/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/node_modules/inflight/package.json b/node_modules/inflight/package.json new file mode 100644 index 0000000..6084d35 --- /dev/null +++ b/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js new file mode 100644 index 0000000..3b94763 --- /dev/null +++ b/node_modules/inherits/inherits.js @@ -0,0 +1,7 @@ +try { + var util = require('util'); + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..c1e78a7 --- /dev/null +++ b/node_modules/inherits/inherits_browser.js @@ -0,0 +1,23 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json new file mode 100644 index 0000000..7cf62b9 --- /dev/null +++ b/node_modules/inherits/package.json @@ -0,0 +1,29 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.3", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "node test" + }, + "devDependencies": { + "tap": "^7.1.0" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] +} diff --git a/node_modules/ini/LICENSE b/node_modules/ini/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/ini/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/ini/README.md b/node_modules/ini/README.md new file mode 100644 index 0000000..33df258 --- /dev/null +++ b/node_modules/ini/README.md @@ -0,0 +1,102 @@ +An ini format parser and serializer for node. + +Sections are treated as nested objects. Items before the first +heading are saved on the object directly. + +## Usage + +Consider an ini-file `config.ini` that looks like this: + + ; this comment is being ignored + scope = global + + [database] + user = dbuser + password = dbpassword + database = use_this_database + + [paths.default] + datadir = /var/lib/data + array[] = first value + array[] = second value + array[] = third value + +You can read, manipulate and write the ini-file like so: + + var fs = require('fs') + , ini = require('ini') + + var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8')) + + config.scope = 'local' + config.database.database = 'use_another_database' + config.paths.default.tmpdir = '/tmp' + delete config.paths.default.datadir + config.paths.default.array.push('fourth value') + + fs.writeFileSync('./config_modified.ini', ini.stringify(config, { section: 'section' })) + +This will result in a file called `config_modified.ini` being written +to the filesystem with the following content: + + [section] + scope=local + [section.database] + user=dbuser + password=dbpassword + database=use_another_database + [section.paths.default] + tmpdir=/tmp + array[]=first value + array[]=second value + array[]=third value + array[]=fourth value + + +## API + +### decode(inistring) + +Decode the ini-style formatted `inistring` into a nested object. + +### parse(inistring) + +Alias for `decode(inistring)` + +### encode(object, [options]) + +Encode the object `object` into an ini-style formatted string. If the +optional parameter `section` is given, then all top-level properties +of the object are put into this section and the `section`-string is +prepended to all sub-sections, see the usage example above. + +The `options` object may contain the following: + +* `section` A string which will be the first `section` in the encoded + ini data. Defaults to none. +* `whitespace` Boolean to specify whether to put whitespace around the + `=` character. By default, whitespace is omitted, to be friendly to + some persnickety old parsers that don't tolerate it well. But some + find that it's more human-readable and pretty with the whitespace. + +For backwards compatibility reasons, if a `string` options is passed +in, then it is assumed to be the `section` value. + +### stringify(object, [options]) + +Alias for `encode(object, [options])` + +### safe(val) + +Escapes the string `val` such that it is safe to be used as a key or +value in an ini-file. Basically escapes quotes. For example + + ini.safe('"unsafe string"') + +would result in + + "\"unsafe string\"" + +### unsafe(val) + +Unescapes the string `val` diff --git a/node_modules/ini/ini.js b/node_modules/ini/ini.js new file mode 100644 index 0000000..ddf5bd9 --- /dev/null +++ b/node_modules/ini/ini.js @@ -0,0 +1,190 @@ + +exports.parse = exports.decode = decode +exports.stringify = exports.encode = encode + +exports.safe = safe +exports.unsafe = unsafe + +var eol = process.platform === "win32" ? "\r\n" : "\n" + +function encode (obj, opt) { + var children = [] + , out = "" + + if (typeof opt === "string") { + opt = { + section: opt, + whitespace: false + } + } else { + opt = opt || {} + opt.whitespace = opt.whitespace === true + } + + var separator = opt.whitespace ? " = " : "=" + + Object.keys(obj).forEach(function (k, _, __) { + var val = obj[k] + if (val && Array.isArray(val)) { + val.forEach(function(item) { + out += safe(k + "[]") + separator + safe(item) + "\n" + }) + } + else if (val && typeof val === "object") { + children.push(k) + } else { + out += safe(k) + separator + safe(val) + eol + } + }) + + if (opt.section && out.length) { + out = "[" + safe(opt.section) + "]" + eol + out + } + + children.forEach(function (k, _, __) { + var nk = dotSplit(k).join('\\.') + var section = (opt.section ? opt.section + "." : "") + nk + var child = encode(obj[k], { + section: section, + whitespace: opt.whitespace + }) + if (out.length && child.length) { + out += eol + } + out += child + }) + + return out +} + +function dotSplit (str) { + return str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002') + .replace(/\\\./g, '\u0001') + .split(/\./).map(function (part) { + return part.replace(/\1/g, '\\.') + .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001') + }) +} + +function decode (str) { + var out = {} + , p = out + , section = null + , state = "START" + // section |key = value + , re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i + , lines = str.split(/[\r\n]+/g) + , section = null + + lines.forEach(function (line, _, __) { + if (!line || line.match(/^\s*[;#]/)) return + var match = line.match(re) + if (!match) return + if (match[1] !== undefined) { + section = unsafe(match[1]) + p = out[section] = out[section] || {} + return + } + var key = unsafe(match[2]) + , value = match[3] ? unsafe((match[4] || "")) : true + switch (value) { + case 'true': + case 'false': + case 'null': value = JSON.parse(value) + } + + // Convert keys with '[]' suffix to an array + if (key.length > 2 && key.slice(-2) === "[]") { + key = key.substring(0, key.length - 2) + if (!p[key]) { + p[key] = [] + } + else if (!Array.isArray(p[key])) { + p[key] = [p[key]] + } + } + + // safeguard against resetting a previously defined + // array by accidentally forgetting the brackets + if (Array.isArray(p[key])) { + p[key].push(value) + } + else { + p[key] = value + } + }) + + // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} + // use a filter to return the keys that have to be deleted. + Object.keys(out).filter(function (k, _, __) { + if (!out[k] || typeof out[k] !== "object" || Array.isArray(out[k])) return false + // see if the parent section is also an object. + // if so, add it to that, and mark this one for deletion + var parts = dotSplit(k) + , p = out + , l = parts.pop() + , nl = l.replace(/\\\./g, '.') + parts.forEach(function (part, _, __) { + if (!p[part] || typeof p[part] !== "object") p[part] = {} + p = p[part] + }) + if (p === out && nl === l) return false + p[nl] = out[k] + return true + }).forEach(function (del, _, __) { + delete out[del] + }) + + return out +} + +function isQuoted (val) { + return (val.charAt(0) === "\"" && val.slice(-1) === "\"") + || (val.charAt(0) === "'" && val.slice(-1) === "'") +} + +function safe (val) { + return ( typeof val !== "string" + || val.match(/[=\r\n]/) + || val.match(/^\[/) + || (val.length > 1 + && isQuoted(val)) + || val !== val.trim() ) + ? JSON.stringify(val) + : val.replace(/;/g, '\\;').replace(/#/g, "\\#") +} + +function unsafe (val, doUnesc) { + val = (val || "").trim() + if (isQuoted(val)) { + // remove the single quotes before calling JSON.parse + if (val.charAt(0) === "'") { + val = val.substr(1, val.length - 2); + } + try { val = JSON.parse(val) } catch (_) {} + } else { + // walk the val to find the first not-escaped ; character + var esc = false + var unesc = ""; + for (var i = 0, l = val.length; i < l; i++) { + var c = val.charAt(i) + if (esc) { + if ("\\;#".indexOf(c) !== -1) + unesc += c + else + unesc += "\\" + c + esc = false + } else if (";#".indexOf(c) !== -1) { + break + } else if (c === "\\") { + esc = true + } else { + unesc += c + } + } + if (esc) + unesc += "\\" + return unesc + } + return val +} diff --git a/node_modules/ini/package.json b/node_modules/ini/package.json new file mode 100644 index 0000000..53445b6 --- /dev/null +++ b/node_modules/ini/package.json @@ -0,0 +1,25 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "ini", + "description": "An ini encoder/decoder for node", + "version": "1.3.4", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/ini.git" + }, + "main": "ini.js", + "scripts": { + "test": "tap test/*.js" + }, + "engines": { + "node": "*" + }, + "dependencies": {}, + "devDependencies": { + "tap": "^1.2.0" + }, + "license": "ISC", + "files": [ + "ini.js" + ] +} diff --git a/node_modules/interpret/CHANGELOG b/node_modules/interpret/CHANGELOG new file mode 100644 index 0000000..9f94ae5 --- /dev/null +++ b/node_modules/interpret/CHANGELOG @@ -0,0 +1,98 @@ +v0.6.6: + date: 2015-09-21 + changes: + - add support for ts-node (formerly typescript-node) +v0.6.5: + date: 2015-07-22 + changes: + - add support for typescript 1.5 via typescript-node +v0.6.4: + date: 2015-07-07 + changes: + - add support for earlgrey +v0.6.3: + date: 2015-07-03 + changes: + - prefer babel/core to babel +v0.6.2: + date: 2015-05-20 + changes: + - update module list for iced coffee-script +v0.6.1: + date: 2015-05-20 + changes: + - Fix toml loader. +v0.6.0: + date: 2015-05-19 + changes: + - Combine fallbacks and loaders into `extensions`. + - Provide implementation guidance. +v0.5.1: + date: 2015-03-01 + changes: + - Add support for CirruScript. +v0.5.0: + date: 2015-02-27 + changes: + - Refactor es6 support via Babel (formerly 6to5) +v0.4.3: + date: 2015-02-09 + changes: + - Switch support from typescript-require to typescript-register. +v0.4.2: + date: 2015-01-16 + changes: + - Add support for wisp. +v0.4.1: + date: 2015-01-10 + changes: + - Add support for 6to5 (es6) +v0.4.0: + date: 2014-01-09 + changes: + - Add support for fallback (legacy) modules + - Add support for module configurations +v0.3.10: + date: 2014-12-17 + changes: + - Add support for json5. +v0.3.9: + date: 2014-12-08 + changes: + - Add support for literate iced coffee. +v0.3.8: + date: 2014-11-20 + changes: + - Add support for [cjsx](https://github.com/jsdf/coffee-react). +v0.3.7: + date: 2014-09-08 + changes: + - Add support for [TypeScript](http://www.typescriptlang.org/). +v0.3.6: + date: 2014-08-25 + changes: + - Add support for coffee.md. +v0.3.5: + date: 2014-07-03 + changes: + - Add support for jsx. +v0.3.4: + date: 2014-06-27 + changes: + - Make .js first jsVariant entry. +v0.3.3: + date: 2014-06-02 + changes: + - Fix casing on livescript dependency. +v0.3.0: + date: 2014-04-20 + changes: + - Simplify loading of coffee-script and iced-coffee-script. +v0.2.0: + date: 2014-04-20 + changes: + - Move module loading into rechoir. +v0.1.0: + date: 2014-04-20 + changes: + - Initial public release. diff --git a/node_modules/interpret/LICENSE b/node_modules/interpret/LICENSE new file mode 100644 index 0000000..a55f5b7 --- /dev/null +++ b/node_modules/interpret/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2014 Tyler Kellen + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/interpret/README.md b/node_modules/interpret/README.md new file mode 100644 index 0000000..5b897ff --- /dev/null +++ b/node_modules/interpret/README.md @@ -0,0 +1,92 @@ +# interpret +> A dictionary of file extensions and associated module loaders. + +[![NPM](https://nodei.co/npm/interpret.png)](https://nodei.co/npm/interpret/) + +## What is it +This is used by [Liftoff](http://github.com/tkellen/node-liftoff) to automatically require dependencies for configuration files, and by [rechoir](http://github.com/tkellen/node-rechoir) for registering module loaders. + +## API + +### extensions +Map file types to modules which provide a [require.extensions] loader. + +```js +{ + '.babel.js': { + module: 'babel/register', + register: function (module) { + module({ + // register on .js extension due to https://github.com/joyent/node/blob/v0.12.0/lib/module.js#L353 + // which only captures the final extension (.babel.js -> .js) + extensions: '.js' + }) + } + }, + '.cirru': 'cirru-script/lib/register', + '.cjsx': 'node-cjsx/register', + '.co': 'coco', + '.coffee': ['coffee-script/register', 'coffee-script'], + '.coffee.md': ['coffee-script/register', 'coffee-script'], + '.csv': 'require-csv', + '.eg': 'earlgrey/register', + '.iced': ['iced-coffee-script/register', 'iced-coffee-script'], + '.iced.md': 'iced-coffee-script/register', + '.ini': 'require-ini', + '.js': null, + '.json': null, + '.json5': 'json5/lib/require', + '.jsx': [ + { + module: 'babel/register', + register: function (module) { + module({ + extensions: '.jsx' + }); + }, + }, + { + module: 'node-jsx', + register: function (module) { + module.install({ + extension: '.jsx', + harmony: true + }); + } + } + ], + '.litcoffee': ['coffee-script/register', 'coffee-script'], + '.liticed': 'iced-coffee-script/register', + '.ls': ['livescript', 'LiveScript'], + '.node': null, + '.toml': { + module: 'toml-require', + register: function (module) { + module.install(); + } + }, + '.ts': ['typescript-node/register', 'typescript-register', 'typescript-require'], + '.tsx': ['typescript-node/register'], + '.wisp': 'wisp/engine/node', + '.xml': 'require-xml', + '.yaml': 'require-yaml', + '.yml': 'require-yaml' +}; +``` + +### jsVariants +Same as above, but only include the extensions which are javascript variants. + +## How to use it + +Consumers should use the exported `extensions` or `jsVariants` object to determine which module should be loaded for a given extension. If a matching extension is found, consumers should do the following: + +1. If the value is null, do nothing. + +2. If the value is a string, try to require it. + +3. If the value is an object, try to require the `module` property. If successful, the `register` property (a function) should be called with the module passed as the first argument. + +4. If the value is an array, iterate over it, attempting step #2 or #3 until one of the attempts does not throw. + +[require.extensions]: http://nodejs.org/api/globals.html#globals_require_extensions diff --git a/node_modules/interpret/index.js b/node_modules/interpret/index.js new file mode 100644 index 0000000..3e498ff --- /dev/null +++ b/node_modules/interpret/index.js @@ -0,0 +1,105 @@ +const extensions = { + '.babel.js': [ + { + module: 'babel-core/register', + register: function (module) { + module({ + // register on .js extension due to https://github.com/joyent/node/blob/v0.12.0/lib/module.js#L353 + // which only captures the final extension (.babel.js -> .js) + extensions: '.js' + }); + } + }, + { + module: 'babel/register', + register: function (module) { + module({ + extensions: '.js' + }); + } + } + ], + '.cirru': 'cirru-script/lib/register', + '.cjsx': 'node-cjsx/register', + '.co': 'coco', + '.coffee': ['coffee-script/register', 'coffee-script'], + '.coffee.md': ['coffee-script/register', 'coffee-script'], + '.csv': 'require-csv', + '.eg': 'earlgrey/register', + '.iced': ['iced-coffee-script/register', 'iced-coffee-script'], + '.iced.md': 'iced-coffee-script/register', + '.ini': 'require-ini', + '.js': null, + '.json': null, + '.json5': 'json5/lib/require', + '.jsx': [ + { + module: 'babel-core/register', + register: function (module) { + module({ + extensions: '.jsx' + }); + } + }, + { + module: 'babel/register', + register: function (module) { + module({ + extensions: '.jsx' + }); + }, + }, + { + module: 'node-jsx', + register: function (module) { + module.install({ + extension: '.jsx', + harmony: true + }); + } + } + ], + '.litcoffee': ['coffee-script/register', 'coffee-script'], + '.liticed': 'iced-coffee-script/register', + '.ls': ['livescript', 'LiveScript'], + '.node': null, + '.toml': { + module: 'toml-require', + register: function (module) { + module.install(); + } + }, + '.ts': ['ts-node/register', 'typescript-node/register', 'typescript-register', 'typescript-require'], + '.tsx': ['ts-node/register', 'typescript-node/register'], + '.wisp': 'wisp/engine/node', + '.xml': 'require-xml', + '.yaml': 'require-yaml', + '.yml': 'require-yaml' +}; + +const jsVariantExtensions = [ + '.js', + '.babel.js', + '.cirru', + '.cjsx', + '.co', + '.coffee', + '.coffee.md', + '.eg', + '.iced', + '.iced.md', + '.jsx', + '.litcoffee', + '.liticed', + '.ls', + '.ts', + '.wisp' +]; + +module.exports = { + extensions: extensions, + jsVariants: jsVariantExtensions.reduce(function (result, ext) { + result[ext] = extensions[ext]; + return result; + }, {}) +}; diff --git a/node_modules/interpret/package.json b/node_modules/interpret/package.json new file mode 100644 index 0000000..e22ea75 --- /dev/null +++ b/node_modules/interpret/package.json @@ -0,0 +1,52 @@ +{ + "name": "interpret", + "description": "A dictionary of file extensions and associated module loaders.", + "version": "0.6.6", + "homepage": "https://github.com/tkellen/node-interpret", + "author": { + "name": "Tyler Kellen", + "url": "http://goingslowly.com/" + }, + "repository": { + "type": "git", + "url": "git://github.com/tkellen/node-interpret.git" + }, + "bugs": { + "url": "https://github.com/tkellen/node-interpret/issues" + }, + "license": "MIT", + "main": "index.js", + "keywords": [ + "cirru-script", + "cjsx", + "co", + "coco", + "coffee-script", + "coffee", + "coffee.md", + "csv", + "earlgrey", + "es", + "es6", + "iced", + "iced.md", + "iced-coffee-script", + "ini", + "js", + "json", + "json5", + "jsx", + "react", + "litcoffee", + "liticed", + "ls", + "livescript", + "toml", + "ts", + "typescript", + "wisp", + "xml", + "yaml", + "yml" + ] +} diff --git a/node_modules/invariant/CHANGELOG.md b/node_modules/invariant/CHANGELOG.md new file mode 100644 index 0000000..5326f1a --- /dev/null +++ b/node_modules/invariant/CHANGELOG.md @@ -0,0 +1,33 @@ +2.1.1 / 2015-09-20 +================== + + * Use correct SPDX license. + * Test "browser.js" using browserify. + * Switch from "envify" to "loose-envify". + +2.1.0 / 2015-06-03 +================== + + * Add "envify" as a dependency. + * Fixed license field in "package.json". + +2.0.0 / 2015-02-21 +================== + + * Switch to using the "browser" field. There are now browser and server versions that respect the "format" in production. + +1.0.2 / 2014-09-24 +================== + + * Added tests, npmignore and gitignore. + * Clarifications in README. + +1.0.1 / 2014-09-24 +================== + + * Actually include 'invariant.js'. + +1.0.0 / 2014-09-24 +================== + + * Initial release. diff --git a/node_modules/invariant/README.md b/node_modules/invariant/README.md new file mode 100644 index 0000000..813ee26 --- /dev/null +++ b/node_modules/invariant/README.md @@ -0,0 +1,35 @@ +# invariant + +[![Build Status](https://travis-ci.org/zertosh/invariant.svg?branch=master)](https://travis-ci.org/zertosh/invariant) + +A mirror of Facebook's `invariant` (e.g. [React](https://github.com/facebook/react/blob/v0.13.3/src/vendor/core/invariant.js), [flux](https://github.com/facebook/flux/blob/2.0.2/src/invariant.js)). + +## Install + +With [npm](http://npmjs.org) do: + +```sh +npm install invariant +``` + +## `invariant(condition, message)` + +```js +var invariant = require('invariant'); + +invariant(someTruthyVal, 'This will not throw'); +// No errors + +invariant(someFalseyVal, 'This will throw an error with this message'); +// Error: Invariant Violation: This will throw an error with this message +``` + +**Note:** When `process.env.NODE_ENV` is not `production`, the message is required. If omitted, `invariant` will throw regardless of the truthiness of the condition. When `process.env.NODE_ENV` is `production`, the message is optional – so they can be minified away. + +### Browser + +When used with [browserify](https://github.com/substack/node-browserify), it'll use `browser.js` (instead of `invariant.js`) and the [envify](https://github.com/hughsk/envify) transform will inline the value of `process.env.NODE_ENV`. + +### Node + +The node version is optimized around the performance implications of accessing `process.env`. The value of `process.env.NODE_ENV` is cached, and repeatedly used instead of reading `proces.env`. See [Server rendering is slower with npm react #812](https://github.com/facebook/react/issues/812) diff --git a/node_modules/invariant/browser.js b/node_modules/invariant/browser.js new file mode 100644 index 0000000..29ead50 --- /dev/null +++ b/node_modules/invariant/browser.js @@ -0,0 +1,51 @@ +/** + * Copyright 2013-2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the BSD-style license found in the + * LICENSE file in the root directory of this source tree. An additional grant + * of patent rights can be found in the PATENTS file in the same directory. + */ + +'use strict'; + +/** + * Use invariant() to assert state which your program assumes to be true. + * + * Provide sprintf-style format (only %s is supported) and arguments + * to provide information about what broke and what you were + * expecting. + * + * The invariant message will be stripped in production, but the invariant + * will remain to ensure logic does not differ in production. + */ + +var invariant = function(condition, format, a, b, c, d, e, f) { + if (process.env.NODE_ENV !== 'production') { + if (format === undefined) { + throw new Error('invariant requires an error message argument'); + } + } + + if (!condition) { + var error; + if (format === undefined) { + error = new Error( + 'Minified exception occurred; use the non-minified dev environment ' + + 'for the full error message and additional helpful warnings.' + ); + } else { + var args = [a, b, c, d, e, f]; + var argIndex = 0; + error = new Error( + format.replace(/%s/g, function() { return args[argIndex++]; }) + ); + error.name = 'Invariant Violation'; + } + + error.framesToPop = 1; // we don't care about invariant's own frame + throw error; + } +}; + +module.exports = invariant; diff --git a/node_modules/invariant/invariant.js b/node_modules/invariant/invariant.js new file mode 100644 index 0000000..1859a2a --- /dev/null +++ b/node_modules/invariant/invariant.js @@ -0,0 +1,53 @@ +/** + * Copyright 2013-2015, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the BSD-style license found in the + * LICENSE file in the root directory of this source tree. An additional grant + * of patent rights can be found in the PATENTS file in the same directory. + */ + +'use strict'; + +/** + * Use invariant() to assert state which your program assumes to be true. + * + * Provide sprintf-style format (only %s is supported) and arguments + * to provide information about what broke and what you were + * expecting. + * + * The invariant message will be stripped in production, but the invariant + * will remain to ensure logic does not differ in production. + */ + +var NODE_ENV = process.env.NODE_ENV; + +var invariant = function(condition, format, a, b, c, d, e, f) { + if (NODE_ENV !== 'production') { + if (format === undefined) { + throw new Error('invariant requires an error message argument'); + } + } + + if (!condition) { + var error; + if (format === undefined) { + error = new Error( + 'Minified exception occurred; use the non-minified dev environment ' + + 'for the full error message and additional helpful warnings.' + ); + } else { + var args = [a, b, c, d, e, f]; + var argIndex = 0; + error = new Error( + format.replace(/%s/g, function() { return args[argIndex++]; }) + ); + error.name = 'Invariant Violation'; + } + + error.framesToPop = 1; // we don't care about invariant's own frame + throw error; + } +}; + +module.exports = invariant; diff --git a/node_modules/invariant/node_modules/.bin/loose-envify b/node_modules/invariant/node_modules/.bin/loose-envify new file mode 120000 index 0000000..2a6b8df --- /dev/null +++ b/node_modules/invariant/node_modules/.bin/loose-envify @@ -0,0 +1 @@ +../../../loose-envify/cli.js \ No newline at end of file diff --git a/node_modules/invariant/package.json b/node_modules/invariant/package.json new file mode 100644 index 0000000..3d3ca24 --- /dev/null +++ b/node_modules/invariant/package.json @@ -0,0 +1,32 @@ +{ + "name": "invariant", + "version": "2.2.1", + "description": "invariant", + "keywords": [ + "test" + ], + "license": "BSD-3-Clause", + "author": "Andres Suarez ", + "files": [ + "invariant.js", + "browser.js" + ], + "repository": "zertosh/invariant", + "scripts": { + "test": "NODE_ENV=production tap test/*.js && NODE_ENV=development tap test/*.js" + }, + "dependencies": { + "loose-envify": "^1.0.0" + }, + "devDependencies": { + "browserify": "^11.0.1", + "tap": "^1.4.0" + }, + "main": "invariant.js", + "browser": "browser.js", + "browserify": { + "transform": [ + "loose-envify" + ] + } +} diff --git a/node_modules/ipaddr.js/.npmignore b/node_modules/ipaddr.js/.npmignore new file mode 100644 index 0000000..7a1537b --- /dev/null +++ b/node_modules/ipaddr.js/.npmignore @@ -0,0 +1,2 @@ +.idea +node_modules diff --git a/node_modules/ipaddr.js/.travis.yml b/node_modules/ipaddr.js/.travis.yml new file mode 100644 index 0000000..aa3d14a --- /dev/null +++ b/node_modules/ipaddr.js/.travis.yml @@ -0,0 +1,10 @@ +language: node_js + +node_js: + - "0.10" + - "0.11" + - "0.12" + - "4.0" + - "4.1" + - "4.2" + - "5" diff --git a/node_modules/ipaddr.js/Cakefile b/node_modules/ipaddr.js/Cakefile new file mode 100644 index 0000000..7fd355a --- /dev/null +++ b/node_modules/ipaddr.js/Cakefile @@ -0,0 +1,18 @@ +fs = require 'fs' +CoffeeScript = require 'coffee-script' +nodeunit = require 'nodeunit' +UglifyJS = require 'uglify-js' + +task 'build', 'build the JavaScript files from CoffeeScript source', build = (cb) -> + source = fs.readFileSync 'src/ipaddr.coffee' + fs.writeFileSync 'lib/ipaddr.js', CoffeeScript.compile source.toString() + + invoke 'test' + invoke 'compress' + +task 'test', 'run the bundled tests', (cb) -> + nodeunit.reporters.default.run ['test'] + +task 'compress', 'uglify the resulting javascript', (cb) -> + result = UglifyJS.minify('lib/ipaddr.js') + fs.writeFileSync('ipaddr.min.js', result.code) diff --git a/node_modules/ipaddr.js/LICENSE b/node_modules/ipaddr.js/LICENSE new file mode 100644 index 0000000..3493f0d --- /dev/null +++ b/node_modules/ipaddr.js/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2011 Peter Zotov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/ipaddr.js/README.md b/node_modules/ipaddr.js/README.md new file mode 100644 index 0000000..bfcb1c9 --- /dev/null +++ b/node_modules/ipaddr.js/README.md @@ -0,0 +1,209 @@ +# ipaddr.js — an IPv6 and IPv4 address manipulation library [![Build Status](https://travis-ci.org/whitequark/ipaddr.js.svg)](https://travis-ci.org/whitequark/ipaddr.js) + +ipaddr.js is a small (1.9K minified and gzipped) library for manipulating +IP addresses in JavaScript environments. It runs on both CommonJS runtimes +(e.g. [nodejs]) and in a web browser. + +ipaddr.js allows you to verify and parse string representation of an IP +address, match it against a CIDR range or range list, determine if it falls +into some reserved ranges (examples include loopback and private ranges), +and convert between IPv4 and IPv4-mapped IPv6 addresses. + +[nodejs]: http://nodejs.org + +## Installation + +`npm install ipaddr.js` + +or + +`bower install ipaddr.js` + +## API + +ipaddr.js defines one object in the global scope: `ipaddr`. In CommonJS, +it is exported from the module: + +```js +var ipaddr = require('ipaddr.js'); +``` + +The API consists of several global methods and two classes: ipaddr.IPv6 and ipaddr.IPv4. + +### Global methods + +There are three global methods defined: `ipaddr.isValid`, `ipaddr.parse` and +`ipaddr.process`. All of them receive a string as a single parameter. + +The `ipaddr.isValid` method returns `true` if the address is a valid IPv4 or +IPv6 address, and `false` otherwise. It does not throw any exceptions. + +The `ipaddr.parse` method returns an object representing the IP address, +or throws an `Error` if the passed string is not a valid representation of an +IP address. + +The `ipaddr.process` method works just like the `ipaddr.parse` one, but it +automatically converts IPv4-mapped IPv6 addresses to their IPv4 couterparts +before returning. It is useful when you have a Node.js instance listening +on an IPv6 socket, and the `net.ivp6.bindv6only` sysctl parameter (or its +equivalent on non-Linux OS) is set to 0. In this case, you can accept IPv4 +connections on your IPv6-only socket, but the remote address will be mangled. +Use `ipaddr.process` method to automatically demangle it. + +### Object representation + +Parsing methods return an object which descends from `ipaddr.IPv6` or +`ipaddr.IPv4`. These objects share some properties, but most of them differ. + +#### Shared properties + +One can determine the type of address by calling `addr.kind()`. It will return +either `"ipv6"` or `"ipv4"`. + +An address can be converted back to its string representation with `addr.toString()`. +Note that this method: + * does not return the original string used to create the object (in fact, there is + no way of getting that string) + * returns a compact representation (when it is applicable) + +A `match(range, bits)` method can be used to check if the address falls into a +certain CIDR range. +Note that an address can be (obviously) matched only against an address of the same type. + +For example: + +```js +var addr = ipaddr.parse("2001:db8:1234::1"); +var range = ipaddr.parse("2001:db8::"); + +addr.match(range, 32); // => true +``` + +Alternatively, `match` can also be called as `match([range, bits])`. In this way, +it can be used together with the `parseCIDR(string)` method, which parses an IP +address together with a CIDR range. + +For example: + +```js +var addr = ipaddr.parse("2001:db8:1234::1"); + +addr.match(ipaddr.parseCIDR("2001:db8::/32")); // => true +``` + +A `range()` method returns one of predefined names for several special ranges defined +by IP protocols. The exact names (and their respective CIDR ranges) can be looked up +in the source: [IPv6 ranges] and [IPv4 ranges]. Some common ones include `"unicast"` +(the default one) and `"reserved"`. + +You can match against your own range list by using +`ipaddr.subnetMatch(address, rangeList, defaultName)` method. It can work with both +IPv6 and IPv4 addresses, and accepts a name-to-subnet map as the range list. For example: + +```js +var rangeList = { + documentationOnly: [ ipaddr.parse('2001:db8::'), 32 ], + tunnelProviders: [ + [ ipaddr.parse('2001:470::'), 32 ], // he.net + [ ipaddr.parse('2001:5c0::'), 32 ] // freenet6 + ] +}; +ipaddr.subnetMatch(ipaddr.parse('2001:470:8:66::1'), rangeList, 'unknown'); // => "he.net" +``` + +The addresses can be converted to their byte representation with `toByteArray()`. +(Actually, JavaScript mostly does not know about byte buffers. They are emulated with +arrays of numbers, each in range of 0..255.) + +```js +var bytes = ipaddr.parse('2a00:1450:8007::68').toByteArray(); // ipv6.google.com +bytes // => [42, 0x00, 0x14, 0x50, 0x80, 0x07, 0x00, , 0x00, 0x68 ] +``` + +The `ipaddr.IPv4` and `ipaddr.IPv6` objects have some methods defined, too. All of them +have the same interface for both protocols, and are similar to global methods. + +`ipaddr.IPvX.isValid(string)` can be used to check if the string is a valid address +for particular protocol, and `ipaddr.IPvX.parse(string)` is the error-throwing parser. + +[IPv6 ranges]: https://github.com/whitequark/ipaddr.js/blob/master/src/ipaddr.coffee#L186 +[IPv4 ranges]: https://github.com/whitequark/ipaddr.js/blob/master/src/ipaddr.coffee#L71 + +#### IPv6 properties + +Sometimes you will want to convert IPv6 not to a compact string representation (with +the `::` substitution); the `toNormalizedString()` method will return an address where +all zeroes are explicit. + +For example: + +```js +var addr = ipaddr.parse("2001:0db8::0001"); +addr.toString(); // => "2001:db8::1" +addr.toNormalizedString(); // => "2001:db8:0:0:0:0:0:1" +``` + +The `isIPv4MappedAddress()` method will return `true` if this address is an IPv4-mapped +one, and `toIPv4Address()` will return an IPv4 object address. + +To access the underlying binary representation of the address, use `addr.parts`. + +```js +var addr = ipaddr.parse("2001:db8:10::1234:DEAD"); +addr.parts // => [0x2001, 0xdb8, 0x10, 0, 0, 0, 0x1234, 0xdead] +``` + +#### IPv4 properties + +`toIPv4MappedAddress()` will return a corresponding IPv4-mapped IPv6 address. + +To access the underlying representation of the address, use `addr.octets`. + +```js +var addr = ipaddr.parse("192.168.1.1"); +addr.octets // => [192, 168, 1, 1] +``` + +`prefixLengthFromSubnetMask()` will return a CIDR prefix length for a valid IPv4 netmask or +false if the netmask is not valid. + +```js +ipaddr.IPv4.parse('255.255.255.240').prefixLengthFromSubnetMask() == 28 +ipaddr.IPv4.parse('255.192.164.0').prefixLengthFromSubnetMask() == null +``` + +#### Conversion + +IPv4 and IPv6 can be converted bidirectionally to and from network byte order (MSB) byte arrays. + +The `fromByteArray()` method will take an array and create an appropriate IPv4 or IPv6 object +if the input satisfies the requirements. For IPv4 it has to be an array of four 8-bit values, +while for IPv6 it has to be an array of sixteen 8-bit values. + +For example: +```js +var addr = ipaddr.fromByteArray([0x7f, 0, 0, 1]); +addr.toString(); // => "127.0.0.1" +``` + +or + +```js +var addr = ipaddr.fromByteArray([0x20, 1, 0xd, 0xb8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) +addr.toString(); // => "2001:db8::1" +``` + +Both objects also offer a `toByteArray()` method, which returns an array in network byte order (MSB). + +For example: +```js +var addr = ipaddr.parse("127.0.0.1"); +addr.toByteArray(); // => [0x7f, 0, 0, 1] +``` + +or + +```js +var addr = ipaddr.parse("2001:db8::1"); +addr.toByteArray(); // => [0x20, 1, 0xd, 0xb8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1] +``` diff --git a/node_modules/ipaddr.js/bower.json b/node_modules/ipaddr.js/bower.json new file mode 100644 index 0000000..e494f97 --- /dev/null +++ b/node_modules/ipaddr.js/bower.json @@ -0,0 +1,29 @@ +{ + "name": "ipaddr.js", + "version": "1.1.1", + "homepage": "https://github.com/whitequark/ipaddr.js", + "authors": [ + "whitequark " + ], + "description": "IP address manipulation library in JavaScript (CoffeeScript, actually)", + "main": "lib/ipaddr.js", + "moduleType": [ + "globals", + "node" + ], + "keywords": [ + "javscript", + "ip", + "address", + "ipv4", + "ipv6" + ], + "license": "MIT", + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ] +} diff --git a/node_modules/ipaddr.js/ipaddr.min.js b/node_modules/ipaddr.js/ipaddr.min.js new file mode 100644 index 0000000..ffa32ff --- /dev/null +++ b/node_modules/ipaddr.js/ipaddr.min.js @@ -0,0 +1 @@ +(function(){var r,t,n,e,i,o,a,s;t={},s=this,"undefined"!=typeof module&&null!==module&&module.exports?module.exports=t:s.ipaddr=t,a=function(r,t,n,e){var i,o;if(r.length!==t.length)throw new Error("ipaddr: cannot match CIDR for objects with different lengths");for(i=0;e>0;){if(o=n-e,0>o&&(o=0),r[i]>>o!==t[i]>>o)return!1;e-=n,i+=1}return!0},t.subnetMatch=function(r,t,n){var e,i,o,a,s;null==n&&(n="unicast");for(e in t)for(i=t[e],!i[0]||i[0]instanceof Array||(i=[i]),a=0,s=i.length;s>a;a++)if(o=i[a],r.match.apply(r,o))return e;return n},t.IPv4=function(){function r(r){var t,n,e;if(4!==r.length)throw new Error("ipaddr: ipv4 octet count should be 4");for(n=0,e=r.length;e>n;n++)if(t=r[n],!(t>=0&&255>=t))throw new Error("ipaddr: ipv4 octet should fit in 8 bits");this.octets=r}return r.prototype.kind=function(){return"ipv4"},r.prototype.toString=function(){return this.octets.join(".")},r.prototype.toByteArray=function(){return this.octets.slice(0)},r.prototype.match=function(r,t){var n;if(void 0===t&&(n=r,r=n[0],t=n[1]),"ipv4"!==r.kind())throw new Error("ipaddr: cannot match ipv4 address with non-ipv4 one");return a(this.octets,r.octets,8,t)},r.prototype.SpecialRanges={unspecified:[[new r([0,0,0,0]),8]],broadcast:[[new r([255,255,255,255]),32]],multicast:[[new r([224,0,0,0]),4]],linkLocal:[[new r([169,254,0,0]),16]],loopback:[[new r([127,0,0,0]),8]],"private":[[new r([10,0,0,0]),8],[new r([172,16,0,0]),12],[new r([192,168,0,0]),16]],reserved:[[new r([192,0,0,0]),24],[new r([192,0,2,0]),24],[new r([192,88,99,0]),24],[new r([198,51,100,0]),24],[new r([203,0,113,0]),24],[new r([240,0,0,0]),4]]},r.prototype.range=function(){return t.subnetMatch(this,this.SpecialRanges)},r.prototype.toIPv4MappedAddress=function(){return t.IPv6.parse("::ffff:"+this.toString())},r.prototype.prefixLengthFromSubnetMask=function(){var r,t,n,e,i,o,a;for(o={0:8,128:7,192:6,224:5,240:4,248:3,252:2,254:1,255:0},r=0,e=!1,t=a=3;a>=0;t=a+=-1){if(n=this.octets[t],!(n in o))return null;if(i=o[n],e&&0!==i)return null;8!==i&&(e=!0),r+=i}return 32-r},r}(),n="(0?\\d+|0x[a-f0-9]+)",e={fourOctet:new RegExp("^"+n+"\\."+n+"\\."+n+"\\."+n+"$","i"),longValue:new RegExp("^"+n+"$","i")},t.IPv4.parser=function(r){var t,n,i,o,a;if(n=function(r){return"0"===r[0]&&"x"!==r[1]?parseInt(r,8):parseInt(r)},t=r.match(e.fourOctet))return function(){var r,e,o,a;for(o=t.slice(1,6),a=[],r=0,e=o.length;e>r;r++)i=o[r],a.push(n(i));return a}();if(t=r.match(e.longValue)){if(a=n(t[1]),a>4294967295||0>a)throw new Error("ipaddr: address outside defined range");return function(){var r,t;for(t=[],o=r=0;24>=r;o=r+=8)t.push(a>>o&255);return t}().reverse()}return null},t.IPv6=function(){function r(r){var t,n,e,i,o,a;if(16===r.length)for(this.parts=[],t=e=0;14>=e;t=e+=2)this.parts.push(r[t]<<8|r[t+1]);else{if(8!==r.length)throw new Error("ipaddr: ipv6 part count should be 8 or 16");this.parts=r}for(a=this.parts,i=0,o=a.length;o>i;i++)if(n=a[i],!(n>=0&&65535>=n))throw new Error("ipaddr: ipv6 part should fit in 16 bits")}return r.prototype.kind=function(){return"ipv6"},r.prototype.toString=function(){var r,t,n,e,i,o,a;for(i=function(){var r,n,e,i;for(e=this.parts,i=[],r=0,n=e.length;n>r;r++)t=e[r],i.push(t.toString(16));return i}.call(this),r=[],n=function(t){return r.push(t)},e=0,o=0,a=i.length;a>o;o++)switch(t=i[o],e){case 0:n("0"===t?"":t),e=1;break;case 1:"0"===t?e=2:n(t);break;case 2:"0"!==t&&(n(""),n(t),e=3);break;case 3:n(t)}return 2===e&&(n(""),n("")),r.join(":")},r.prototype.toByteArray=function(){var r,t,n,e,i;for(r=[],i=this.parts,n=0,e=i.length;e>n;n++)t=i[n],r.push(t>>8),r.push(255&t);return r},r.prototype.toNormalizedString=function(){var r;return function(){var t,n,e,i;for(e=this.parts,i=[],t=0,n=e.length;n>t;t++)r=e[t],i.push(r.toString(16));return i}.call(this).join(":")},r.prototype.match=function(r,t){var n;if(void 0===t&&(n=r,r=n[0],t=n[1]),"ipv6"!==r.kind())throw new Error("ipaddr: cannot match ipv6 address with non-ipv6 one");return a(this.parts,r.parts,16,t)},r.prototype.SpecialRanges={unspecified:[new r([0,0,0,0,0,0,0,0]),128],linkLocal:[new r([65152,0,0,0,0,0,0,0]),10],multicast:[new r([65280,0,0,0,0,0,0,0]),8],loopback:[new r([0,0,0,0,0,0,0,1]),128],uniqueLocal:[new r([64512,0,0,0,0,0,0,0]),7],ipv4Mapped:[new r([0,0,0,0,0,65535,0,0]),96],rfc6145:[new r([0,0,0,0,65535,0,0,0]),96],rfc6052:[new r([100,65435,0,0,0,0,0,0]),96],"6to4":[new r([8194,0,0,0,0,0,0,0]),16],teredo:[new r([8193,0,0,0,0,0,0,0]),32],reserved:[[new r([8193,3512,0,0,0,0,0,0]),32]]},r.prototype.range=function(){return t.subnetMatch(this,this.SpecialRanges)},r.prototype.isIPv4MappedAddress=function(){return"ipv4Mapped"===this.range()},r.prototype.toIPv4Address=function(){var r,n,e;if(!this.isIPv4MappedAddress())throw new Error("ipaddr: trying to convert a generic ipv6 address to ipv4");return e=this.parts.slice(-2),r=e[0],n=e[1],new t.IPv4([r>>8,255&r,n>>8,255&n])},r}(),i="(?:[0-9a-f]+::?)+",o={"native":new RegExp("^(::)?("+i+")?([0-9a-f]+)?(::)?$","i"),transitional:new RegExp("^((?:"+i+")|(?:::)(?:"+i+")?)"+(""+n+"\\."+n+"\\."+n+"\\."+n+"$"),"i")},r=function(r,t){var n,e,i,o,a;if(r.indexOf("::")!==r.lastIndexOf("::"))return null;for(n=0,e=-1;(e=r.indexOf(":",e+1))>=0;)n++;if("::"===r.substr(0,2)&&n--,"::"===r.substr(-2,2)&&n--,n>t)return null;for(a=t-n,o=":";a--;)o+="0:";return r=r.replace("::",o),":"===r[0]&&(r=r.slice(1)),":"===r[r.length-1]&&(r=r.slice(0,-1)),function(){var t,n,e,o;for(e=r.split(":"),o=[],t=0,n=e.length;n>t;t++)i=e[t],o.push(parseInt(i,16));return o}()},t.IPv6.parser=function(t){var n,e,i,a,s,p;if(t.match(o["native"]))return r(t,8);if((n=t.match(o.transitional))&&(a=r(n[1].slice(0,-1),6))){for(i=[parseInt(n[2]),parseInt(n[3]),parseInt(n[4]),parseInt(n[5])],s=0,p=i.length;p>s;s++)if(e=i[s],!(e>=0&&255>=e))return null;return a.push(i[0]<<8|i[1]),a.push(i[2]<<8|i[3]),a}return null},t.IPv4.isIPv4=t.IPv6.isIPv6=function(r){return null!==this.parser(r)},t.IPv4.isValid=function(r){var t;try{return new this(this.parser(r)),!0}catch(n){return t=n,!1}},t.IPv6.isValid=function(r){var t;if("string"==typeof r&&-1===r.indexOf(":"))return!1;try{return new this(this.parser(r)),!0}catch(n){return t=n,!1}},t.IPv4.parse=t.IPv6.parse=function(r){var t;if(t=this.parser(r),null===t)throw new Error("ipaddr: string is not formatted like ip address");return new this(t)},t.IPv4.parseCIDR=function(r){var t,n;if((n=r.match(/^(.+)\/(\d+)$/))&&(t=parseInt(n[2]),t>=0&&32>=t))return[this.parse(n[1]),t];throw new Error("ipaddr: string is not formatted like an IPv4 CIDR range")},t.IPv6.parseCIDR=function(r){var t,n;if((n=r.match(/^(.+)\/(\d+)$/))&&(t=parseInt(n[2]),t>=0&&128>=t))return[this.parse(n[1]),t];throw new Error("ipaddr: string is not formatted like an IPv6 CIDR range")},t.isValid=function(r){return t.IPv6.isValid(r)||t.IPv4.isValid(r)},t.parse=function(r){if(t.IPv6.isValid(r))return t.IPv6.parse(r);if(t.IPv4.isValid(r))return t.IPv4.parse(r);throw new Error("ipaddr: the address has neither IPv6 nor IPv4 format")},t.parseCIDR=function(r){var n;try{return t.IPv6.parseCIDR(r)}catch(e){n=e;try{return t.IPv4.parseCIDR(r)}catch(e){throw n=e,new Error("ipaddr: the address has neither IPv6 nor IPv4 CIDR format")}}},t.fromByteArray=function(r){var n;if(n=r.length,4===n)return new t.IPv4(r);if(16===n)return new t.IPv6(r);throw new Error("ipaddr: the binary input is neither an IPv6 nor IPv4 address")},t.process=function(r){var t;return t=this.parse(r),"ipv6"===t.kind()&&t.isIPv4MappedAddress()?t.toIPv4Address():t}}).call(this); \ No newline at end of file diff --git a/node_modules/ipaddr.js/lib/ipaddr.js b/node_modules/ipaddr.js/lib/ipaddr.js new file mode 100644 index 0000000..36774e7 --- /dev/null +++ b/node_modules/ipaddr.js/lib/ipaddr.js @@ -0,0 +1,526 @@ +(function() { + var expandIPv6, ipaddr, ipv4Part, ipv4Regexes, ipv6Part, ipv6Regexes, matchCIDR, root; + + ipaddr = {}; + + root = this; + + if ((typeof module !== "undefined" && module !== null) && module.exports) { + module.exports = ipaddr; + } else { + root['ipaddr'] = ipaddr; + } + + matchCIDR = function(first, second, partSize, cidrBits) { + var part, shift; + if (first.length !== second.length) { + throw new Error("ipaddr: cannot match CIDR for objects with different lengths"); + } + part = 0; + while (cidrBits > 0) { + shift = partSize - cidrBits; + if (shift < 0) { + shift = 0; + } + if (first[part] >> shift !== second[part] >> shift) { + return false; + } + cidrBits -= partSize; + part += 1; + } + return true; + }; + + ipaddr.subnetMatch = function(address, rangeList, defaultName) { + var rangeName, rangeSubnets, subnet, _i, _len; + if (defaultName == null) { + defaultName = 'unicast'; + } + for (rangeName in rangeList) { + rangeSubnets = rangeList[rangeName]; + if (rangeSubnets[0] && !(rangeSubnets[0] instanceof Array)) { + rangeSubnets = [rangeSubnets]; + } + for (_i = 0, _len = rangeSubnets.length; _i < _len; _i++) { + subnet = rangeSubnets[_i]; + if (address.match.apply(address, subnet)) { + return rangeName; + } + } + } + return defaultName; + }; + + ipaddr.IPv4 = (function() { + function IPv4(octets) { + var octet, _i, _len; + if (octets.length !== 4) { + throw new Error("ipaddr: ipv4 octet count should be 4"); + } + for (_i = 0, _len = octets.length; _i < _len; _i++) { + octet = octets[_i]; + if (!((0 <= octet && octet <= 255))) { + throw new Error("ipaddr: ipv4 octet should fit in 8 bits"); + } + } + this.octets = octets; + } + + IPv4.prototype.kind = function() { + return 'ipv4'; + }; + + IPv4.prototype.toString = function() { + return this.octets.join("."); + }; + + IPv4.prototype.toByteArray = function() { + return this.octets.slice(0); + }; + + IPv4.prototype.match = function(other, cidrRange) { + var _ref; + if (cidrRange === void 0) { + _ref = other, other = _ref[0], cidrRange = _ref[1]; + } + if (other.kind() !== 'ipv4') { + throw new Error("ipaddr: cannot match ipv4 address with non-ipv4 one"); + } + return matchCIDR(this.octets, other.octets, 8, cidrRange); + }; + + IPv4.prototype.SpecialRanges = { + unspecified: [[new IPv4([0, 0, 0, 0]), 8]], + broadcast: [[new IPv4([255, 255, 255, 255]), 32]], + multicast: [[new IPv4([224, 0, 0, 0]), 4]], + linkLocal: [[new IPv4([169, 254, 0, 0]), 16]], + loopback: [[new IPv4([127, 0, 0, 0]), 8]], + "private": [[new IPv4([10, 0, 0, 0]), 8], [new IPv4([172, 16, 0, 0]), 12], [new IPv4([192, 168, 0, 0]), 16]], + reserved: [[new IPv4([192, 0, 0, 0]), 24], [new IPv4([192, 0, 2, 0]), 24], [new IPv4([192, 88, 99, 0]), 24], [new IPv4([198, 51, 100, 0]), 24], [new IPv4([203, 0, 113, 0]), 24], [new IPv4([240, 0, 0, 0]), 4]] + }; + + IPv4.prototype.range = function() { + return ipaddr.subnetMatch(this, this.SpecialRanges); + }; + + IPv4.prototype.toIPv4MappedAddress = function() { + return ipaddr.IPv6.parse("::ffff:" + (this.toString())); + }; + + IPv4.prototype.prefixLengthFromSubnetMask = function() { + var cidr, i, octet, stop, zeros, zerotable, _i; + zerotable = { + 0: 8, + 128: 7, + 192: 6, + 224: 5, + 240: 4, + 248: 3, + 252: 2, + 254: 1, + 255: 0 + }; + cidr = 0; + stop = false; + for (i = _i = 3; _i >= 0; i = _i += -1) { + octet = this.octets[i]; + if (octet in zerotable) { + zeros = zerotable[octet]; + if (stop && zeros !== 0) { + return null; + } + if (zeros !== 8) { + stop = true; + } + cidr += zeros; + } else { + return null; + } + } + return 32 - cidr; + }; + + return IPv4; + + })(); + + ipv4Part = "(0?\\d+|0x[a-f0-9]+)"; + + ipv4Regexes = { + fourOctet: new RegExp("^" + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "$", 'i'), + longValue: new RegExp("^" + ipv4Part + "$", 'i') + }; + + ipaddr.IPv4.parser = function(string) { + var match, parseIntAuto, part, shift, value; + parseIntAuto = function(string) { + if (string[0] === "0" && string[1] !== "x") { + return parseInt(string, 8); + } else { + return parseInt(string); + } + }; + if (match = string.match(ipv4Regexes.fourOctet)) { + return (function() { + var _i, _len, _ref, _results; + _ref = match.slice(1, 6); + _results = []; + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + part = _ref[_i]; + _results.push(parseIntAuto(part)); + } + return _results; + })(); + } else if (match = string.match(ipv4Regexes.longValue)) { + value = parseIntAuto(match[1]); + if (value > 0xffffffff || value < 0) { + throw new Error("ipaddr: address outside defined range"); + } + return ((function() { + var _i, _results; + _results = []; + for (shift = _i = 0; _i <= 24; shift = _i += 8) { + _results.push((value >> shift) & 0xff); + } + return _results; + })()).reverse(); + } else { + return null; + } + }; + + ipaddr.IPv6 = (function() { + function IPv6(parts) { + var i, part, _i, _j, _len, _ref; + if (parts.length === 16) { + this.parts = []; + for (i = _i = 0; _i <= 14; i = _i += 2) { + this.parts.push((parts[i] << 8) | parts[i + 1]); + } + } else if (parts.length === 8) { + this.parts = parts; + } else { + throw new Error("ipaddr: ipv6 part count should be 8 or 16"); + } + _ref = this.parts; + for (_j = 0, _len = _ref.length; _j < _len; _j++) { + part = _ref[_j]; + if (!((0 <= part && part <= 0xffff))) { + throw new Error("ipaddr: ipv6 part should fit in 16 bits"); + } + } + } + + IPv6.prototype.kind = function() { + return 'ipv6'; + }; + + IPv6.prototype.toString = function() { + var compactStringParts, part, pushPart, state, stringParts, _i, _len; + stringParts = (function() { + var _i, _len, _ref, _results; + _ref = this.parts; + _results = []; + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + part = _ref[_i]; + _results.push(part.toString(16)); + } + return _results; + }).call(this); + compactStringParts = []; + pushPart = function(part) { + return compactStringParts.push(part); + }; + state = 0; + for (_i = 0, _len = stringParts.length; _i < _len; _i++) { + part = stringParts[_i]; + switch (state) { + case 0: + if (part === '0') { + pushPart(''); + } else { + pushPart(part); + } + state = 1; + break; + case 1: + if (part === '0') { + state = 2; + } else { + pushPart(part); + } + break; + case 2: + if (part !== '0') { + pushPart(''); + pushPart(part); + state = 3; + } + break; + case 3: + pushPart(part); + } + } + if (state === 2) { + pushPart(''); + pushPart(''); + } + return compactStringParts.join(":"); + }; + + IPv6.prototype.toByteArray = function() { + var bytes, part, _i, _len, _ref; + bytes = []; + _ref = this.parts; + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + part = _ref[_i]; + bytes.push(part >> 8); + bytes.push(part & 0xff); + } + return bytes; + }; + + IPv6.prototype.toNormalizedString = function() { + var part; + return ((function() { + var _i, _len, _ref, _results; + _ref = this.parts; + _results = []; + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + part = _ref[_i]; + _results.push(part.toString(16)); + } + return _results; + }).call(this)).join(":"); + }; + + IPv6.prototype.match = function(other, cidrRange) { + var _ref; + if (cidrRange === void 0) { + _ref = other, other = _ref[0], cidrRange = _ref[1]; + } + if (other.kind() !== 'ipv6') { + throw new Error("ipaddr: cannot match ipv6 address with non-ipv6 one"); + } + return matchCIDR(this.parts, other.parts, 16, cidrRange); + }; + + IPv6.prototype.SpecialRanges = { + unspecified: [new IPv6([0, 0, 0, 0, 0, 0, 0, 0]), 128], + linkLocal: [new IPv6([0xfe80, 0, 0, 0, 0, 0, 0, 0]), 10], + multicast: [new IPv6([0xff00, 0, 0, 0, 0, 0, 0, 0]), 8], + loopback: [new IPv6([0, 0, 0, 0, 0, 0, 0, 1]), 128], + uniqueLocal: [new IPv6([0xfc00, 0, 0, 0, 0, 0, 0, 0]), 7], + ipv4Mapped: [new IPv6([0, 0, 0, 0, 0, 0xffff, 0, 0]), 96], + rfc6145: [new IPv6([0, 0, 0, 0, 0xffff, 0, 0, 0]), 96], + rfc6052: [new IPv6([0x64, 0xff9b, 0, 0, 0, 0, 0, 0]), 96], + '6to4': [new IPv6([0x2002, 0, 0, 0, 0, 0, 0, 0]), 16], + teredo: [new IPv6([0x2001, 0, 0, 0, 0, 0, 0, 0]), 32], + reserved: [[new IPv6([0x2001, 0xdb8, 0, 0, 0, 0, 0, 0]), 32]] + }; + + IPv6.prototype.range = function() { + return ipaddr.subnetMatch(this, this.SpecialRanges); + }; + + IPv6.prototype.isIPv4MappedAddress = function() { + return this.range() === 'ipv4Mapped'; + }; + + IPv6.prototype.toIPv4Address = function() { + var high, low, _ref; + if (!this.isIPv4MappedAddress()) { + throw new Error("ipaddr: trying to convert a generic ipv6 address to ipv4"); + } + _ref = this.parts.slice(-2), high = _ref[0], low = _ref[1]; + return new ipaddr.IPv4([high >> 8, high & 0xff, low >> 8, low & 0xff]); + }; + + return IPv6; + + })(); + + ipv6Part = "(?:[0-9a-f]+::?)+"; + + ipv6Regexes = { + "native": new RegExp("^(::)?(" + ipv6Part + ")?([0-9a-f]+)?(::)?$", 'i'), + transitional: new RegExp(("^((?:" + ipv6Part + ")|(?:::)(?:" + ipv6Part + ")?)") + ("" + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "\\." + ipv4Part + "$"), 'i') + }; + + expandIPv6 = function(string, parts) { + var colonCount, lastColon, part, replacement, replacementCount; + if (string.indexOf('::') !== string.lastIndexOf('::')) { + return null; + } + colonCount = 0; + lastColon = -1; + while ((lastColon = string.indexOf(':', lastColon + 1)) >= 0) { + colonCount++; + } + if (string.substr(0, 2) === '::') { + colonCount--; + } + if (string.substr(-2, 2) === '::') { + colonCount--; + } + if (colonCount > parts) { + return null; + } + replacementCount = parts - colonCount; + replacement = ':'; + while (replacementCount--) { + replacement += '0:'; + } + string = string.replace('::', replacement); + if (string[0] === ':') { + string = string.slice(1); + } + if (string[string.length - 1] === ':') { + string = string.slice(0, -1); + } + return (function() { + var _i, _len, _ref, _results; + _ref = string.split(":"); + _results = []; + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + part = _ref[_i]; + _results.push(parseInt(part, 16)); + } + return _results; + })(); + }; + + ipaddr.IPv6.parser = function(string) { + var match, octet, octets, parts, _i, _len; + if (string.match(ipv6Regexes['native'])) { + return expandIPv6(string, 8); + } else if (match = string.match(ipv6Regexes['transitional'])) { + parts = expandIPv6(match[1].slice(0, -1), 6); + if (parts) { + octets = [parseInt(match[2]), parseInt(match[3]), parseInt(match[4]), parseInt(match[5])]; + for (_i = 0, _len = octets.length; _i < _len; _i++) { + octet = octets[_i]; + if (!((0 <= octet && octet <= 255))) { + return null; + } + } + parts.push(octets[0] << 8 | octets[1]); + parts.push(octets[2] << 8 | octets[3]); + return parts; + } + } + return null; + }; + + ipaddr.IPv4.isIPv4 = ipaddr.IPv6.isIPv6 = function(string) { + return this.parser(string) !== null; + }; + + ipaddr.IPv4.isValid = function(string) { + var e; + try { + new this(this.parser(string)); + return true; + } catch (_error) { + e = _error; + return false; + } + }; + + ipaddr.IPv6.isValid = function(string) { + var e; + if (typeof string === "string" && string.indexOf(":") === -1) { + return false; + } + try { + new this(this.parser(string)); + return true; + } catch (_error) { + e = _error; + return false; + } + }; + + ipaddr.IPv4.parse = ipaddr.IPv6.parse = function(string) { + var parts; + parts = this.parser(string); + if (parts === null) { + throw new Error("ipaddr: string is not formatted like ip address"); + } + return new this(parts); + }; + + ipaddr.IPv4.parseCIDR = function(string) { + var maskLength, match; + if (match = string.match(/^(.+)\/(\d+)$/)) { + maskLength = parseInt(match[2]); + if (maskLength >= 0 && maskLength <= 32) { + return [this.parse(match[1]), maskLength]; + } + } + throw new Error("ipaddr: string is not formatted like an IPv4 CIDR range"); + }; + + ipaddr.IPv6.parseCIDR = function(string) { + var maskLength, match; + if (match = string.match(/^(.+)\/(\d+)$/)) { + maskLength = parseInt(match[2]); + if (maskLength >= 0 && maskLength <= 128) { + return [this.parse(match[1]), maskLength]; + } + } + throw new Error("ipaddr: string is not formatted like an IPv6 CIDR range"); + }; + + ipaddr.isValid = function(string) { + return ipaddr.IPv6.isValid(string) || ipaddr.IPv4.isValid(string); + }; + + ipaddr.parse = function(string) { + if (ipaddr.IPv6.isValid(string)) { + return ipaddr.IPv6.parse(string); + } else if (ipaddr.IPv4.isValid(string)) { + return ipaddr.IPv4.parse(string); + } else { + throw new Error("ipaddr: the address has neither IPv6 nor IPv4 format"); + } + }; + + ipaddr.parseCIDR = function(string) { + var e; + try { + return ipaddr.IPv6.parseCIDR(string); + } catch (_error) { + e = _error; + try { + return ipaddr.IPv4.parseCIDR(string); + } catch (_error) { + e = _error; + throw new Error("ipaddr: the address has neither IPv6 nor IPv4 CIDR format"); + } + } + }; + + ipaddr.fromByteArray = function(bytes) { + var length; + length = bytes.length; + if (length === 4) { + return new ipaddr.IPv4(bytes); + } else if (length === 16) { + return new ipaddr.IPv6(bytes); + } else { + throw new Error("ipaddr: the binary input is neither an IPv6 nor IPv4 address"); + } + }; + + ipaddr.process = function(string) { + var addr; + addr = this.parse(string); + if (addr.kind() === 'ipv6' && addr.isIPv4MappedAddress()) { + return addr.toIPv4Address(); + } else { + return addr; + } + }; + +}).call(this); diff --git a/node_modules/ipaddr.js/package.json b/node_modules/ipaddr.js/package.json new file mode 100644 index 0000000..34bad9e --- /dev/null +++ b/node_modules/ipaddr.js/package.json @@ -0,0 +1,23 @@ +{ + "name": "ipaddr.js", + "description": "A library for manipulating IPv4 and IPv6 addresses in JavaScript.", + "version": "1.1.1", + "author": "whitequark ", + "directories": { + "lib": "./lib" + }, + "dependencies": {}, + "devDependencies": { + "coffee-script": "~1.6", + "nodeunit": ">=0.8.2 <0.8.7", + "uglify-js": "latest" + }, + "scripts": { + "test": "cake build test" + }, + "keywords": ["ip", "ipv4", "ipv6"], + "repository": "git://github.com/whitequark/ipaddr.js", + "main": "./lib/ipaddr", + "engines": { "node": ">= 0.10" }, + "license": "MIT" +} diff --git a/node_modules/ipaddr.js/src/ipaddr.coffee b/node_modules/ipaddr.js/src/ipaddr.coffee new file mode 100644 index 0000000..d2abf91 --- /dev/null +++ b/node_modules/ipaddr.js/src/ipaddr.coffee @@ -0,0 +1,450 @@ +# Define the main object +ipaddr = {} + +root = this + +# Export for both the CommonJS and browser-like environment +if module? && module.exports + module.exports = ipaddr +else + root['ipaddr'] = ipaddr + +# A generic CIDR (Classless Inter-Domain Routing) RFC1518 range matcher. +matchCIDR = (first, second, partSize, cidrBits) -> + if first.length != second.length + throw new Error "ipaddr: cannot match CIDR for objects with different lengths" + + part = 0 + while cidrBits > 0 + shift = partSize - cidrBits + shift = 0 if shift < 0 + + if first[part] >> shift != second[part] >> shift + return false + + cidrBits -= partSize + part += 1 + + return true + +# An utility function to ease named range matching. See examples below. +ipaddr.subnetMatch = (address, rangeList, defaultName='unicast') -> + for rangeName, rangeSubnets of rangeList + # ECMA5 Array.isArray isn't available everywhere + if rangeSubnets[0] && !(rangeSubnets[0] instanceof Array) + rangeSubnets = [ rangeSubnets ] + + for subnet in rangeSubnets + return rangeName if address.match.apply(address, subnet) + + return defaultName + +# An IPv4 address (RFC791). +class ipaddr.IPv4 + # Constructs a new IPv4 address from an array of four octets + # in network order (MSB first) + # Verifies the input. + constructor: (octets) -> + if octets.length != 4 + throw new Error "ipaddr: ipv4 octet count should be 4" + + for octet in octets + if !(0 <= octet <= 255) + throw new Error "ipaddr: ipv4 octet should fit in 8 bits" + + @octets = octets + + # The 'kind' method exists on both IPv4 and IPv6 classes. + kind: -> + return 'ipv4' + + # Returns the address in convenient, decimal-dotted format. + toString: -> + return @octets.join "." + + # Returns an array of byte-sized values in network order (MSB first) + toByteArray: -> + return @octets.slice(0) # octets.clone + + # Checks if this address matches other one within given CIDR range. + match: (other, cidrRange) -> + if cidrRange == undefined + [other, cidrRange] = other + + if other.kind() != 'ipv4' + throw new Error "ipaddr: cannot match ipv4 address with non-ipv4 one" + + return matchCIDR(this.octets, other.octets, 8, cidrRange) + + # Special IPv4 address ranges. + SpecialRanges: + unspecified: [ + [ new IPv4([0, 0, 0, 0]), 8 ] + ] + broadcast: [ + [ new IPv4([255, 255, 255, 255]), 32 ] + ] + multicast: [ # RFC3171 + [ new IPv4([224, 0, 0, 0]), 4 ] + ] + linkLocal: [ # RFC3927 + [ new IPv4([169, 254, 0, 0]), 16 ] + ] + loopback: [ # RFC5735 + [ new IPv4([127, 0, 0, 0]), 8 ] + ] + private: [ # RFC1918 + [ new IPv4([10, 0, 0, 0]), 8 ] + [ new IPv4([172, 16, 0, 0]), 12 ] + [ new IPv4([192, 168, 0, 0]), 16 ] + ] + reserved: [ # Reserved and testing-only ranges; RFCs 5735, 5737, 2544, 1700 + [ new IPv4([192, 0, 0, 0]), 24 ] + [ new IPv4([192, 0, 2, 0]), 24 ] + [ new IPv4([192, 88, 99, 0]), 24 ] + [ new IPv4([198, 51, 100, 0]), 24 ] + [ new IPv4([203, 0, 113, 0]), 24 ] + [ new IPv4([240, 0, 0, 0]), 4 ] + ] + + # Checks if the address corresponds to one of the special ranges. + range: -> + return ipaddr.subnetMatch(this, @SpecialRanges) + + # Convrets this IPv4 address to an IPv4-mapped IPv6 address. + toIPv4MappedAddress: -> + return ipaddr.IPv6.parse "::ffff:#{@toString()}" + + # returns a number of leading ones in IPv4 address, making sure that + # the rest is a solid sequence of 0's (valid netmask) + # returns either the CIDR length or null if mask is not valid + prefixLengthFromSubnetMask: -> + # number of zeroes in octet + zerotable = + 0: 8 + 128: 7 + 192: 6 + 224: 5 + 240: 4 + 248: 3 + 252: 2 + 254: 1 + 255: 0 + + cidr = 0 + # non-zero encountered stop scanning for zeroes + stop = false + for i in [3..0] by -1 + octet = @octets[i] + if octet of zerotable + zeros = zerotable[octet] + if stop and zeros != 0 + return null + unless zeros == 8 + stop = true + cidr += zeros + else + return null + return 32 - cidr + +# A list of regular expressions that match arbitrary IPv4 addresses, +# for which a number of weird notations exist. +# Note that an address like 0010.0xa5.1.1 is considered legal. +ipv4Part = "(0?\\d+|0x[a-f0-9]+)" +ipv4Regexes = + fourOctet: new RegExp "^#{ipv4Part}\\.#{ipv4Part}\\.#{ipv4Part}\\.#{ipv4Part}$", 'i' + longValue: new RegExp "^#{ipv4Part}$", 'i' + +# Classful variants (like a.b, where a is an octet, and b is a 24-bit +# value representing last three octets; this corresponds to a class C +# address) are omitted due to classless nature of modern Internet. +ipaddr.IPv4.parser = (string) -> + parseIntAuto = (string) -> + if string[0] == "0" && string[1] != "x" + parseInt(string, 8) + else + parseInt(string) + + # parseInt recognizes all that octal & hexadecimal weirdness for us + if match = string.match(ipv4Regexes.fourOctet) + return (parseIntAuto(part) for part in match[1..5]) + else if match = string.match(ipv4Regexes.longValue) + value = parseIntAuto(match[1]) + if value > 0xffffffff || value < 0 + throw new Error "ipaddr: address outside defined range" + return ((value >> shift) & 0xff for shift in [0..24] by 8).reverse() + else + return null + +# An IPv6 address (RFC2460) +class ipaddr.IPv6 + # Constructs an IPv6 address from an array of eight 16-bit parts + # or sixteen 8-bit parts in network order (MSB first). + # Throws an error if the input is invalid. + constructor: (parts) -> + if parts.length == 16 + @parts = [] + for i in [0..14] by 2 + @parts.push((parts[i] << 8) | parts[i + 1]) + else if parts.length == 8 + @parts = parts + else + throw new Error "ipaddr: ipv6 part count should be 8 or 16" + + for part in @parts + if !(0 <= part <= 0xffff) + throw new Error "ipaddr: ipv6 part should fit in 16 bits" + + # The 'kind' method exists on both IPv4 and IPv6 classes. + kind: -> + return 'ipv6' + + # Returns the address in compact, human-readable format like + # 2001:db8:8:66::1 + toString: -> + stringParts = (part.toString(16) for part in @parts) + + compactStringParts = [] + pushPart = (part) -> compactStringParts.push part + + state = 0 + for part in stringParts + switch state + when 0 + if part == '0' + pushPart('') + else + pushPart(part) + + state = 1 + when 1 + if part == '0' + state = 2 + else + pushPart(part) + when 2 + unless part == '0' + pushPart('') + pushPart(part) + state = 3 + when 3 + pushPart(part) + + if state == 2 + pushPart('') + pushPart('') + + return compactStringParts.join ":" + + # Returns an array of byte-sized values in network order (MSB first) + toByteArray: -> + bytes = [] + for part in @parts + bytes.push(part >> 8) + bytes.push(part & 0xff) + + return bytes + + # Returns the address in expanded format with all zeroes included, like + # 2001:db8:8:66:0:0:0:1 + toNormalizedString: -> + return (part.toString(16) for part in @parts).join ":" + + # Checks if this address matches other one within given CIDR range. + match: (other, cidrRange) -> + if cidrRange == undefined + [other, cidrRange] = other + + if other.kind() != 'ipv6' + throw new Error "ipaddr: cannot match ipv6 address with non-ipv6 one" + + return matchCIDR(this.parts, other.parts, 16, cidrRange) + + # Special IPv6 ranges + SpecialRanges: + unspecified: [ new IPv6([0, 0, 0, 0, 0, 0, 0, 0]), 128 ] # RFC4291, here and after + linkLocal: [ new IPv6([0xfe80, 0, 0, 0, 0, 0, 0, 0]), 10 ] + multicast: [ new IPv6([0xff00, 0, 0, 0, 0, 0, 0, 0]), 8 ] + loopback: [ new IPv6([0, 0, 0, 0, 0, 0, 0, 1]), 128 ] + uniqueLocal: [ new IPv6([0xfc00, 0, 0, 0, 0, 0, 0, 0]), 7 ] + ipv4Mapped: [ new IPv6([0, 0, 0, 0, 0, 0xffff, 0, 0]), 96 ] + rfc6145: [ new IPv6([0, 0, 0, 0, 0xffff, 0, 0, 0]), 96 ] # RFC6145 + rfc6052: [ new IPv6([0x64, 0xff9b, 0, 0, 0, 0, 0, 0]), 96 ] # RFC6052 + '6to4': [ new IPv6([0x2002, 0, 0, 0, 0, 0, 0, 0]), 16 ] # RFC3056 + teredo: [ new IPv6([0x2001, 0, 0, 0, 0, 0, 0, 0]), 32 ] # RFC6052, RFC6146 + reserved: [ + [ new IPv6([ 0x2001, 0xdb8, 0, 0, 0, 0, 0, 0]), 32 ] # RFC4291 + ] + + # Checks if the address corresponds to one of the special ranges. + range: -> + return ipaddr.subnetMatch(this, @SpecialRanges) + + # Checks if this address is an IPv4-mapped IPv6 address. + isIPv4MappedAddress: -> + return @range() == 'ipv4Mapped' + + # Converts this address to IPv4 address if it is an IPv4-mapped IPv6 address. + # Throws an error otherwise. + toIPv4Address: -> + unless @isIPv4MappedAddress() + throw new Error "ipaddr: trying to convert a generic ipv6 address to ipv4" + + [high, low] = @parts[-2..-1] + + return new ipaddr.IPv4([high >> 8, high & 0xff, low >> 8, low & 0xff]) + +# IPv6-matching regular expressions. +# For IPv6, the task is simpler: it is enough to match the colon-delimited +# hexadecimal IPv6 and a transitional variant with dotted-decimal IPv4 at +# the end. +ipv6Part = "(?:[0-9a-f]+::?)+" +ipv6Regexes = + native: new RegExp "^(::)?(#{ipv6Part})?([0-9a-f]+)?(::)?$", 'i' + transitional: new RegExp "^((?:#{ipv6Part})|(?:::)(?:#{ipv6Part})?)" + + "#{ipv4Part}\\.#{ipv4Part}\\.#{ipv4Part}\\.#{ipv4Part}$", 'i' + +# Expand :: in an IPv6 address or address part consisting of `parts` groups. +expandIPv6 = (string, parts) -> + # More than one '::' means invalid adddress + if string.indexOf('::') != string.lastIndexOf('::') + return null + + # How many parts do we already have? + colonCount = 0 + lastColon = -1 + while (lastColon = string.indexOf(':', lastColon + 1)) >= 0 + colonCount++ + + # 0::0 is two parts more than :: + colonCount-- if string.substr(0, 2) == '::' + colonCount-- if string.substr(-2, 2) == '::' + + # The following loop would hang if colonCount > parts + if colonCount > parts + return null + + # replacement = ':' + '0:' * (parts - colonCount) + replacementCount = parts - colonCount + replacement = ':' + while replacementCount-- + replacement += '0:' + + # Insert the missing zeroes + string = string.replace('::', replacement) + + # Trim any garbage which may be hanging around if :: was at the edge in + # the source string + string = string[1..-1] if string[0] == ':' + string = string[0..-2] if string[string.length-1] == ':' + + return (parseInt(part, 16) for part in string.split(":")) + +# Parse an IPv6 address. +ipaddr.IPv6.parser = (string) -> + if string.match(ipv6Regexes['native']) + return expandIPv6(string, 8) + + else if match = string.match(ipv6Regexes['transitional']) + parts = expandIPv6(match[1][0..-2], 6) + if parts + octets = [parseInt(match[2]), parseInt(match[3]), + parseInt(match[4]), parseInt(match[5])] + for octet in octets + if !(0 <= octet <= 255) + return null + + parts.push(octets[0] << 8 | octets[1]) + parts.push(octets[2] << 8 | octets[3]) + return parts + + return null + +# Checks if a given string is formatted like IPv4/IPv6 address. +ipaddr.IPv4.isIPv4 = ipaddr.IPv6.isIPv6 = (string) -> + return @parser(string) != null + +# Checks if a given string is a valid IPv4/IPv6 address. +ipaddr.IPv4.isValid = (string) -> + try + new this(@parser(string)) + return true + catch e + return false + +ipaddr.IPv6.isValid = (string) -> + # Since IPv6.isValid is always called first, this shortcut + # provides a substantial performance gain. + if typeof string == "string" and string.indexOf(":") == -1 + return false + + try + new this(@parser(string)) + return true + catch e + return false + +# Tries to parse and validate a string with IPv4/IPv6 address. +# Throws an error if it fails. +ipaddr.IPv4.parse = ipaddr.IPv6.parse = (string) -> + parts = @parser(string) + if parts == null + throw new Error "ipaddr: string is not formatted like ip address" + + return new this(parts) + +ipaddr.IPv4.parseCIDR = (string) -> + if match = string.match(/^(.+)\/(\d+)$/) + maskLength = parseInt(match[2]) + if maskLength >= 0 and maskLength <= 32 + return [@parse(match[1]), maskLength] + + throw new Error "ipaddr: string is not formatted like an IPv4 CIDR range" + +ipaddr.IPv6.parseCIDR = (string) -> + if match = string.match(/^(.+)\/(\d+)$/) + maskLength = parseInt(match[2]) + if maskLength >= 0 and maskLength <= 128 + return [@parse(match[1]), maskLength] + + throw new Error "ipaddr: string is not formatted like an IPv6 CIDR range" + +# Checks if the address is valid IP address +ipaddr.isValid = (string) -> + return ipaddr.IPv6.isValid(string) || ipaddr.IPv4.isValid(string) + +# Try to parse an address and throw an error if it is impossible +ipaddr.parse = (string) -> + if ipaddr.IPv6.isValid(string) + return ipaddr.IPv6.parse(string) + else if ipaddr.IPv4.isValid(string) + return ipaddr.IPv4.parse(string) + else + throw new Error "ipaddr: the address has neither IPv6 nor IPv4 format" + +ipaddr.parseCIDR = (string) -> + try + return ipaddr.IPv6.parseCIDR(string) + catch e + try + return ipaddr.IPv4.parseCIDR(string) + catch e + throw new Error "ipaddr: the address has neither IPv6 nor IPv4 CIDR format" + +# Try to parse an array in network order (MSB first) for IPv4 and IPv6 +ipaddr.fromByteArray = (bytes) -> + length = bytes.length + if length == 4 + return new ipaddr.IPv4(bytes) + else if length == 16 + return new ipaddr.IPv6(bytes) + else + throw new Error "ipaddr: the binary input is neither an IPv6 nor IPv4 address" + +# Parse an address and return plain IPv4 address if it is an IPv4-mapped address +ipaddr.process = (string) -> + addr = @parse(string) + if addr.kind() == 'ipv6' && addr.isIPv4MappedAddress() + return addr.toIPv4Address() + else + return addr diff --git a/node_modules/ipaddr.js/test/ipaddr.test.coffee b/node_modules/ipaddr.js/test/ipaddr.test.coffee new file mode 100644 index 0000000..5f07526 --- /dev/null +++ b/node_modules/ipaddr.js/test/ipaddr.test.coffee @@ -0,0 +1,339 @@ +ipaddr = require '../lib/ipaddr' + +module.exports = + 'should define main classes': (test) -> + test.ok(ipaddr.IPv4?, 'defines IPv4 class') + test.ok(ipaddr.IPv6?, 'defines IPv6 class') + test.done() + + 'can construct IPv4 from octets': (test) -> + test.doesNotThrow -> + new ipaddr.IPv4([192, 168, 1, 2]) + test.done() + + 'refuses to construct invalid IPv4': (test) -> + test.throws -> + new ipaddr.IPv4([300, 1, 2, 3]) + test.throws -> + new ipaddr.IPv4([8, 8, 8]) + test.done() + + 'converts IPv4 to string correctly': (test) -> + addr = new ipaddr.IPv4([192, 168, 1, 1]) + test.equal(addr.toString(), '192.168.1.1') + test.done() + + 'returns correct kind for IPv4': (test) -> + addr = new ipaddr.IPv4([1, 2, 3, 4]) + test.equal(addr.kind(), 'ipv4') + test.done() + + 'allows to access IPv4 octets': (test) -> + addr = new ipaddr.IPv4([42, 0, 0, 0]) + test.equal(addr.octets[0], 42) + test.done() + + 'checks IPv4 address format': (test) -> + test.equal(ipaddr.IPv4.isIPv4('192.168.007.0xa'), true) + test.equal(ipaddr.IPv4.isIPv4('1024.0.0.1'), true) + test.equal(ipaddr.IPv4.isIPv4('8.0xa.wtf.6'), false) + test.done() + + 'validates IPv4 addresses': (test) -> + test.equal(ipaddr.IPv4.isValid('192.168.007.0xa'), true) + test.equal(ipaddr.IPv4.isValid('1024.0.0.1'), false) + test.equal(ipaddr.IPv4.isValid('8.0xa.wtf.6'), false) + test.done() + + 'parses IPv4 in several weird formats': (test) -> + test.deepEqual(ipaddr.IPv4.parse('192.168.1.1').octets, [192, 168, 1, 1]) + test.deepEqual(ipaddr.IPv4.parse('0xc0.168.1.1').octets, [192, 168, 1, 1]) + test.deepEqual(ipaddr.IPv4.parse('192.0250.1.1').octets, [192, 168, 1, 1]) + test.deepEqual(ipaddr.IPv4.parse('0xc0a80101').octets, [192, 168, 1, 1]) + test.deepEqual(ipaddr.IPv4.parse('030052000401').octets, [192, 168, 1, 1]) + test.deepEqual(ipaddr.IPv4.parse('3232235777').octets, [192, 168, 1, 1]) + test.done() + + 'barfs at invalid IPv4': (test) -> + test.throws -> + ipaddr.IPv4.parse('10.0.0.wtf') + test.done() + + 'matches IPv4 CIDR correctly': (test) -> + addr = new ipaddr.IPv4([10, 5, 0, 1]) + test.equal(addr.match(ipaddr.IPv4.parse('0.0.0.0'), 0), true) + test.equal(addr.match(ipaddr.IPv4.parse('11.0.0.0'), 8), false) + test.equal(addr.match(ipaddr.IPv4.parse('10.0.0.0'), 8), true) + test.equal(addr.match(ipaddr.IPv4.parse('10.0.0.1'), 8), true) + test.equal(addr.match(ipaddr.IPv4.parse('10.0.0.10'), 8), true) + test.equal(addr.match(ipaddr.IPv4.parse('10.5.5.0'), 16), true) + test.equal(addr.match(ipaddr.IPv4.parse('10.4.5.0'), 16), false) + test.equal(addr.match(ipaddr.IPv4.parse('10.4.5.0'), 15), true) + test.equal(addr.match(ipaddr.IPv4.parse('10.5.0.2'), 32), false) + test.equal(addr.match(addr, 32), true) + test.done() + + 'parses IPv4 CIDR correctly': (test) -> + addr = new ipaddr.IPv4([10, 5, 0, 1]) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('0.0.0.0/0')), true) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('11.0.0.0/8')), false) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.0.0.0/8')), true) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.0.0.1/8')), true) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.0.0.10/8')), true) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.5.5.0/16')), true) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.4.5.0/16')), false) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.4.5.0/15')), true) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.5.0.2/32')), false) + test.equal(addr.match(ipaddr.IPv4.parseCIDR('10.5.0.1/32')), true) + test.throws -> + ipaddr.IPv4.parseCIDR('10.5.0.1') + test.throws -> + ipaddr.IPv4.parseCIDR('0.0.0.0/-1') + test.throws -> + ipaddr.IPv4.parseCIDR('0.0.0.0/33') + test.done() + + 'detects reserved IPv4 networks': (test) -> + test.equal(ipaddr.IPv4.parse('0.0.0.0').range(), 'unspecified') + test.equal(ipaddr.IPv4.parse('0.1.0.0').range(), 'unspecified') + test.equal(ipaddr.IPv4.parse('10.1.0.1').range(), 'private') + test.equal(ipaddr.IPv4.parse('192.168.2.1').range(), 'private') + test.equal(ipaddr.IPv4.parse('224.100.0.1').range(), 'multicast') + test.equal(ipaddr.IPv4.parse('169.254.15.0').range(), 'linkLocal') + test.equal(ipaddr.IPv4.parse('127.1.1.1').range(), 'loopback') + test.equal(ipaddr.IPv4.parse('255.255.255.255').range(), 'broadcast') + test.equal(ipaddr.IPv4.parse('240.1.2.3').range(), 'reserved') + test.equal(ipaddr.IPv4.parse('8.8.8.8').range(), 'unicast') + test.done() + + 'can construct IPv6 from 16bit parts': (test) -> + test.doesNotThrow -> + new ipaddr.IPv6([0x2001, 0xdb8, 0xf53a, 0, 0, 0, 0, 1]) + test.done() + + 'can construct IPv6 from 8bit parts': (test) -> + test.doesNotThrow -> + new ipaddr.IPv6([0x20, 0x01, 0xd, 0xb8, 0xf5, 0x3a, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) + test.deepEqual(new ipaddr.IPv6([0x20, 0x01, 0xd, 0xb8, 0xf5, 0x3a, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]), + new ipaddr.IPv6([0x2001, 0xdb8, 0xf53a, 0, 0, 0, 0, 1])) + test.done() + + 'refuses to construct invalid IPv6': (test) -> + test.throws -> + new ipaddr.IPv6([0xfffff, 0, 0, 0, 0, 0, 0, 1]) + test.throws -> + new ipaddr.IPv6([0xfffff, 0, 0, 0, 0, 0, 1]) + test.throws -> + new ipaddr.IPv6([0xffff, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]) + test.done() + + 'converts IPv6 to string correctly': (test) -> + addr = new ipaddr.IPv6([0x2001, 0xdb8, 0xf53a, 0, 0, 0, 0, 1]) + test.equal(addr.toNormalizedString(), '2001:db8:f53a:0:0:0:0:1') + test.equal(addr.toString(), '2001:db8:f53a::1') + test.equal(new ipaddr.IPv6([0, 0, 0, 0, 0, 0, 0, 1]).toString(), '::1') + test.equal(new ipaddr.IPv6([0x2001, 0xdb8, 0, 0, 0, 0, 0, 0]).toString(), '2001:db8::') + test.done() + + 'returns correct kind for IPv6': (test) -> + addr = new ipaddr.IPv6([0x2001, 0xdb8, 0xf53a, 0, 0, 0, 0, 1]) + test.equal(addr.kind(), 'ipv6') + test.done() + + 'allows to access IPv6 address parts': (test) -> + addr = new ipaddr.IPv6([0x2001, 0xdb8, 0xf53a, 0, 0, 42, 0, 1]) + test.equal(addr.parts[5], 42) + test.done() + + 'checks IPv6 address format': (test) -> + test.equal(ipaddr.IPv6.isIPv6('2001:db8:F53A::1'), true) + test.equal(ipaddr.IPv6.isIPv6('200001::1'), true) + test.equal(ipaddr.IPv6.isIPv6('::ffff:192.168.1.1'), true) + test.equal(ipaddr.IPv6.isIPv6('::ffff:300.168.1.1'), false) + test.equal(ipaddr.IPv6.isIPv6('::ffff:300.168.1.1:0'), false) + test.equal(ipaddr.IPv6.isIPv6('fe80::wtf'), false) + test.done() + + 'validates IPv6 addresses': (test) -> + test.equal(ipaddr.IPv6.isValid('2001:db8:F53A::1'), true) + test.equal(ipaddr.IPv6.isValid('200001::1'), false) + test.equal(ipaddr.IPv6.isValid('::ffff:192.168.1.1'), true) + test.equal(ipaddr.IPv6.isValid('::ffff:300.168.1.1'), false) + test.equal(ipaddr.IPv6.isValid('::ffff:300.168.1.1:0'), false) + test.equal(ipaddr.IPv6.isValid('::ffff:222.1.41.9000'), false) + test.equal(ipaddr.IPv6.isValid('2001:db8::F53A::1'), false) + test.equal(ipaddr.IPv6.isValid('fe80::wtf'), false) + test.equal(ipaddr.IPv6.isValid('2002::2:'), false) + test.equal(ipaddr.IPv6.isValid(undefined), false) + test.done() + + 'parses IPv6 in different formats': (test) -> + test.deepEqual(ipaddr.IPv6.parse('2001:db8:F53A:0:0:0:0:1').parts, [0x2001, 0xdb8, 0xf53a, 0, 0, 0, 0, 1]) + test.deepEqual(ipaddr.IPv6.parse('fe80::10').parts, [0xfe80, 0, 0, 0, 0, 0, 0, 0x10]) + test.deepEqual(ipaddr.IPv6.parse('2001:db8:F53A::').parts, [0x2001, 0xdb8, 0xf53a, 0, 0, 0, 0, 0]) + test.deepEqual(ipaddr.IPv6.parse('::1').parts, [0, 0, 0, 0, 0, 0, 0, 1]) + test.deepEqual(ipaddr.IPv6.parse('::').parts, [0, 0, 0, 0, 0, 0, 0, 0]) + test.done() + + 'barfs at invalid IPv6': (test) -> + test.throws -> + ipaddr.IPv6.parse('fe80::0::1') + test.done() + + 'matches IPv6 CIDR correctly': (test) -> + addr = ipaddr.IPv6.parse('2001:db8:f53a::1') + test.equal(addr.match(ipaddr.IPv6.parse('::'), 0), true) + test.equal(addr.match(ipaddr.IPv6.parse('2001:db8:f53a::1:1'), 64), true) + test.equal(addr.match(ipaddr.IPv6.parse('2001:db8:f53b::1:1'), 48), false) + test.equal(addr.match(ipaddr.IPv6.parse('2001:db8:f531::1:1'), 44), true) + test.equal(addr.match(ipaddr.IPv6.parse('2001:db8:f500::1'), 40), true) + test.equal(addr.match(ipaddr.IPv6.parse('2001:db9:f500::1'), 40), false) + test.equal(addr.match(addr, 128), true) + test.done() + + 'parses IPv6 CIDR correctly': (test) -> + addr = ipaddr.IPv6.parse('2001:db8:f53a::1') + test.equal(addr.match(ipaddr.IPv6.parseCIDR('::/0')), true) + test.equal(addr.match(ipaddr.IPv6.parseCIDR('2001:db8:f53a::1:1/64')), true) + test.equal(addr.match(ipaddr.IPv6.parseCIDR('2001:db8:f53b::1:1/48')), false) + test.equal(addr.match(ipaddr.IPv6.parseCIDR('2001:db8:f531::1:1/44')), true) + test.equal(addr.match(ipaddr.IPv6.parseCIDR('2001:db8:f500::1/40')), true) + test.equal(addr.match(ipaddr.IPv6.parseCIDR('2001:db9:f500::1/40')), false) + test.equal(addr.match(ipaddr.IPv6.parseCIDR('2001:db8:f53a::1/128')), true) + test.throws -> + ipaddr.IPv6.parseCIDR('2001:db8:f53a::1') + test.throws -> + ipaddr.IPv6.parseCIDR('2001:db8:f53a::1/-1') + test.throws -> + ipaddr.IPv6.parseCIDR('2001:db8:f53a::1/129') + test.done() + + 'converts between IPv4-mapped IPv6 addresses and IPv4 addresses': (test) -> + addr = ipaddr.IPv4.parse('77.88.21.11') + mapped = addr.toIPv4MappedAddress() + test.deepEqual(mapped.parts, [0, 0, 0, 0, 0, 0xffff, 0x4d58, 0x150b]) + test.deepEqual(mapped.toIPv4Address().octets, addr.octets) + test.done() + + 'refuses to convert non-IPv4-mapped IPv6 address to IPv4 address': (test) -> + test.throws -> + ipaddr.IPv6.parse('2001:db8::1').toIPv4Address() + test.done() + + 'detects reserved IPv6 networks': (test) -> + test.equal(ipaddr.IPv6.parse('::').range(), 'unspecified') + test.equal(ipaddr.IPv6.parse('fe80::1234:5678:abcd:0123').range(), 'linkLocal') + test.equal(ipaddr.IPv6.parse('ff00::1234').range(), 'multicast') + test.equal(ipaddr.IPv6.parse('::1').range(), 'loopback') + test.equal(ipaddr.IPv6.parse('fc00::').range(), 'uniqueLocal') + test.equal(ipaddr.IPv6.parse('::ffff:192.168.1.10').range(), 'ipv4Mapped') + test.equal(ipaddr.IPv6.parse('::ffff:0:192.168.1.10').range(), 'rfc6145') + test.equal(ipaddr.IPv6.parse('64:ff9b::1234').range(), 'rfc6052') + test.equal(ipaddr.IPv6.parse('2002:1f63:45e8::1').range(), '6to4') + test.equal(ipaddr.IPv6.parse('2001::4242').range(), 'teredo') + test.equal(ipaddr.IPv6.parse('2001:db8::3210').range(), 'reserved') + test.equal(ipaddr.IPv6.parse('2001:470:8:66::1').range(), 'unicast') + test.done() + + 'is able to determine IP address type': (test) -> + test.equal(ipaddr.parse('8.8.8.8').kind(), 'ipv4') + test.equal(ipaddr.parse('2001:db8:3312::1').kind(), 'ipv6') + test.done() + + 'throws an error if tried to parse an invalid address': (test) -> + test.throws -> + ipaddr.parse('::some.nonsense') + test.done() + + 'correctly processes IPv4-mapped addresses': (test) -> + test.equal(ipaddr.process('8.8.8.8').kind(), 'ipv4') + test.equal(ipaddr.process('2001:db8:3312::1').kind(), 'ipv6') + test.equal(ipaddr.process('::ffff:192.168.1.1').kind(), 'ipv4') + test.done() + + 'correctly converts IPv6 and IPv4 addresses to byte arrays': (test) -> + test.deepEqual(ipaddr.parse('1.2.3.4').toByteArray(), + [0x1, 0x2, 0x3, 0x4]); + # Fuck yeah. The first byte of Google's IPv6 address is 42. 42! + test.deepEqual(ipaddr.parse('2a00:1450:8007::68').toByteArray(), + [42, 0x00, 0x14, 0x50, 0x80, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x68 ]) + test.done() + + 'correctly parses 1 as an IPv4 address': (test) -> + test.equal(ipaddr.IPv6.isValid('1'), false) + test.equal(ipaddr.IPv4.isValid('1'), true) + test.deepEqual(new ipaddr.IPv4([0, 0, 0, 1]), ipaddr.parse('1')) + test.done() + + 'correctly detects IPv4 and IPv6 CIDR addresses': (test) -> + test.deepEqual([ipaddr.IPv6.parse('fc00::'), 64], + ipaddr.parseCIDR('fc00::/64')) + test.deepEqual([ipaddr.IPv4.parse('1.2.3.4'), 5], + ipaddr.parseCIDR('1.2.3.4/5')) + test.done() + + 'does not consider a very large or very small number a valid IP address': (test) -> + test.equal(ipaddr.isValid('4999999999'), false) + test.equal(ipaddr.isValid('-1'), false) + test.done() + + 'does not hang on ::8:8:8:8:8:8:8:8:8': (test) -> + test.equal(ipaddr.IPv6.isValid('::8:8:8:8:8:8:8:8:8'), false) + test.done() + + 'subnetMatch does not fail on empty range': (test) -> + ipaddr.subnetMatch(new ipaddr.IPv4([1,2,3,4]), {}, false) + ipaddr.subnetMatch(new ipaddr.IPv4([1,2,3,4]), {subnet: []}, false) + test.done() + + 'subnetMatch returns default subnet on empty range': (test) -> + test.equal(ipaddr.subnetMatch(new ipaddr.IPv4([1,2,3,4]), {}, false), false) + test.equal(ipaddr.subnetMatch(new ipaddr.IPv4([1,2,3,4]), {subnet: []}, false), false) + test.done() + + 'is able to determine IP address type from byte array input': (test) -> + test.equal(ipaddr.fromByteArray([0x7f, 0, 0, 1]).kind(), 'ipv4') + test.equal(ipaddr.fromByteArray([0x20, 0x01, 0xd, 0xb8, 0xf5, 0x3a, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]).kind(), 'ipv6') + test.throws -> + ipaddr.fromByteArray([1]) + test.done() + + 'prefixLengthFromSubnetMask returns proper CIDR notation for standard IPv4 masks': (test) -> + test.equal(ipaddr.IPv4.parse('255.255.255.255').prefixLengthFromSubnetMask(), 32) + test.equal(ipaddr.IPv4.parse('255.255.255.254').prefixLengthFromSubnetMask(), 31) + test.equal(ipaddr.IPv4.parse('255.255.255.252').prefixLengthFromSubnetMask(), 30) + test.equal(ipaddr.IPv4.parse('255.255.255.248').prefixLengthFromSubnetMask(), 29) + test.equal(ipaddr.IPv4.parse('255.255.255.240').prefixLengthFromSubnetMask(), 28) + test.equal(ipaddr.IPv4.parse('255.255.255.224').prefixLengthFromSubnetMask(), 27) + test.equal(ipaddr.IPv4.parse('255.255.255.192').prefixLengthFromSubnetMask(), 26) + test.equal(ipaddr.IPv4.parse('255.255.255.128').prefixLengthFromSubnetMask(), 25) + test.equal(ipaddr.IPv4.parse('255.255.255.0').prefixLengthFromSubnetMask(), 24) + test.equal(ipaddr.IPv4.parse('255.255.254.0').prefixLengthFromSubnetMask(), 23) + test.equal(ipaddr.IPv4.parse('255.255.252.0').prefixLengthFromSubnetMask(), 22) + test.equal(ipaddr.IPv4.parse('255.255.248.0').prefixLengthFromSubnetMask(), 21) + test.equal(ipaddr.IPv4.parse('255.255.240.0').prefixLengthFromSubnetMask(), 20) + test.equal(ipaddr.IPv4.parse('255.255.224.0').prefixLengthFromSubnetMask(), 19) + test.equal(ipaddr.IPv4.parse('255.255.192.0').prefixLengthFromSubnetMask(), 18) + test.equal(ipaddr.IPv4.parse('255.255.128.0').prefixLengthFromSubnetMask(), 17) + test.equal(ipaddr.IPv4.parse('255.255.0.0').prefixLengthFromSubnetMask(), 16) + test.equal(ipaddr.IPv4.parse('255.254.0.0').prefixLengthFromSubnetMask(), 15) + test.equal(ipaddr.IPv4.parse('255.252.0.0').prefixLengthFromSubnetMask(), 14) + test.equal(ipaddr.IPv4.parse('255.248.0.0').prefixLengthFromSubnetMask(), 13) + test.equal(ipaddr.IPv4.parse('255.240.0.0').prefixLengthFromSubnetMask(), 12) + test.equal(ipaddr.IPv4.parse('255.224.0.0').prefixLengthFromSubnetMask(), 11) + test.equal(ipaddr.IPv4.parse('255.192.0.0').prefixLengthFromSubnetMask(), 10) + test.equal(ipaddr.IPv4.parse('255.128.0.0').prefixLengthFromSubnetMask(), 9) + test.equal(ipaddr.IPv4.parse('255.0.0.0').prefixLengthFromSubnetMask(), 8) + test.equal(ipaddr.IPv4.parse('254.0.0.0').prefixLengthFromSubnetMask(), 7) + test.equal(ipaddr.IPv4.parse('252.0.0.0').prefixLengthFromSubnetMask(), 6) + test.equal(ipaddr.IPv4.parse('248.0.0.0').prefixLengthFromSubnetMask(), 5) + test.equal(ipaddr.IPv4.parse('240.0.0.0').prefixLengthFromSubnetMask(), 4) + test.equal(ipaddr.IPv4.parse('224.0.0.0').prefixLengthFromSubnetMask(), 3) + test.equal(ipaddr.IPv4.parse('192.0.0.0').prefixLengthFromSubnetMask(), 2) + test.equal(ipaddr.IPv4.parse('128.0.0.0').prefixLengthFromSubnetMask(), 1) + test.equal(ipaddr.IPv4.parse('0.0.0.0').prefixLengthFromSubnetMask(), 0) + # negative cases + test.equal(ipaddr.IPv4.parse('192.168.255.0').prefixLengthFromSubnetMask(), null) + test.equal(ipaddr.IPv4.parse('255.0.255.0').prefixLengthFromSubnetMask(), null) + test.done() + diff --git a/node_modules/is-binary-path/index.js b/node_modules/is-binary-path/index.js new file mode 100644 index 0000000..6c8c7e7 --- /dev/null +++ b/node_modules/is-binary-path/index.js @@ -0,0 +1,12 @@ +'use strict'; +var path = require('path'); +var binaryExtensions = require('binary-extensions'); +var exts = Object.create(null); + +binaryExtensions.forEach(function (el) { + exts[el] = true; +}); + +module.exports = function (filepath) { + return path.extname(filepath).slice(1).toLowerCase() in exts; +}; diff --git a/node_modules/is-binary-path/license b/node_modules/is-binary-path/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/is-binary-path/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-binary-path/package.json b/node_modules/is-binary-path/package.json new file mode 100644 index 0000000..cd21d88 --- /dev/null +++ b/node_modules/is-binary-path/package.json @@ -0,0 +1,39 @@ +{ + "name": "is-binary-path", + "version": "1.0.1", + "description": "Check if a filepath is a binary file", + "license": "MIT", + "repository": "sindresorhus/is-binary-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "bin", + "binary", + "ext", + "extensions", + "extension", + "file", + "path", + "check", + "detect", + "is" + ], + "dependencies": { + "binary-extensions": "^1.0.0" + }, + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/is-binary-path/readme.md b/node_modules/is-binary-path/readme.md new file mode 100644 index 0000000..a17d6a2 --- /dev/null +++ b/node_modules/is-binary-path/readme.md @@ -0,0 +1,34 @@ +# is-binary-path [![Build Status](https://travis-ci.org/sindresorhus/is-binary-path.svg?branch=master)](https://travis-ci.org/sindresorhus/is-binary-path) + +> Check if a filepath is a binary file + + +## Install + +``` +$ npm install --save is-binary-path +``` + + +## Usage + +```js +var isBinaryPath = require('is-binary-path'); + +isBinaryPath('src/unicorn.png'); +//=> true + +isBinaryPath('src/unicorn.txt'); +//=> false +``` + + +## Related + +- [`binary-extensions`](https://github.com/sindresorhus/binary-extensions) - List of binary file extensions +- [`is-text-path`](https://github.com/sindresorhus/is-text-path) - Check if a filepath is a text file + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-buffer/.travis.yml b/node_modules/is-buffer/.travis.yml new file mode 100644 index 0000000..24885a3 --- /dev/null +++ b/node_modules/is-buffer/.travis.yml @@ -0,0 +1,8 @@ +sudo: false +language: node_js +node_js: + - node +env: + global: + - secure: du27W3wTgZ3G183axW7w0I01lOIurx8kilMH9p45VMfNXCu8lo6FLtLIQZxJ1FYMoJLQ1yfJTu2G0rq39SotDfJumsk6tF7BjTY/HKCocZaHqCMgw0W2bcylb5kMAdLhBNPlzejpPoWa1x1axbAHNFOLQNVosG/Bavu3/kuIIps= + - secure: Ax/5aekM40o67NuTkvQqx1DhfP86ZlHTtKbv5yI+WFmbjD3FQM8b8G1J/o7doaBDev7Mp+1zDJOK2pFGtt+JGRl0lM2JUmLh6yh/b28obXyei5iuUkqzKJLfKZHMbY5QW/1i4DUM+zSXe6Kava0qnqYg5wBBnrF6gLdsVsCGNQk= diff --git a/node_modules/is-buffer/.zuul.yml b/node_modules/is-buffer/.zuul.yml new file mode 100644 index 0000000..eb3cae6 --- /dev/null +++ b/node_modules/is-buffer/.zuul.yml @@ -0,0 +1,14 @@ +ui: tape +browsers: + - name: chrome + version: 39..latest + - name: firefox + version: 34..latest + - name: safari + version: 5..latest + - name: microsoftedge + version: latest + - name: ie + version: 8..latest + - name: android + version: 5.0..latest diff --git a/node_modules/is-buffer/LICENSE b/node_modules/is-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/is-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-buffer/README.md b/node_modules/is-buffer/README.md new file mode 100644 index 0000000..cb6f356 --- /dev/null +++ b/node_modules/is-buffer/README.md @@ -0,0 +1,49 @@ +# is-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][npm-url] + +#### Determine if an object is a [`Buffer`](http://nodejs.org/api/buffer.html) (including the [browserify Buffer](https://github.com/feross/buffer)) + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[travis-image]: https://img.shields.io/travis/feross/is-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/is-buffer +[npm-image]: https://img.shields.io/npm/v/is-buffer.svg +[npm-url]: https://npmjs.org/package/is-buffer +[downloads-image]: https://img.shields.io/npm/dm/is-buffer.svg +[saucelabs-image]: https://saucelabs.com/browser-matrix/is-buffer.svg +[saucelabs-url]: https://saucelabs.com/u/is-buffer + +## Why not use `Buffer.isBuffer`? + +This module lets you check if an object is a `Buffer` without using `Buffer.isBuffer` (which includes the whole [buffer](https://github.com/feross/buffer) module in [browserify](http://browserify.org/)). + +It's future-proof and works in node too! + +## install + +```bash +npm install is-buffer +``` + +## usage + +```js +var isBuffer = require('is-buffer') + +isBuffer(new Buffer(4)) // true + +isBuffer(undefined) // false +isBuffer(null) // false +isBuffer('') // false +isBuffer(true) // false +isBuffer(false) // false +isBuffer(0) // false +isBuffer(1) // false +isBuffer(1.0) // false +isBuffer('string') // false +isBuffer({}) // false +isBuffer(function foo () {}) // false +``` + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/is-buffer/index.js b/node_modules/is-buffer/index.js new file mode 100644 index 0000000..36c808e --- /dev/null +++ b/node_modules/is-buffer/index.js @@ -0,0 +1,21 @@ +/*! + * Determine if an object is a Buffer + * + * @author Feross Aboukhadijeh + * @license MIT + */ + +// The _isBuffer check is for Safari 5-7 support, because it's missing +// Object.prototype.constructor. Remove this eventually +module.exports = function (obj) { + return obj != null && (isBuffer(obj) || isSlowBuffer(obj) || !!obj._isBuffer) +} + +function isBuffer (obj) { + return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) +} + +// For Node v0.10 support. Remove this eventually. +function isSlowBuffer (obj) { + return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0)) +} diff --git a/node_modules/is-buffer/package.json b/node_modules/is-buffer/package.json new file mode 100644 index 0000000..7915353 --- /dev/null +++ b/node_modules/is-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "is-buffer", + "description": "Determine if an object is a Buffer", + "version": "1.1.4", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org/" + }, + "bugs": { + "url": "https://github.com/feross/is-buffer/issues" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^7.0.0", + "tape": "^4.0.0", + "zuul": "^3.0.0" + }, + "keywords": [ + "buffer", + "buffers", + "type", + "core buffer", + "browser buffer", + "browserify", + "typed array", + "uint32array", + "int16array", + "int32array", + "float32array", + "float64array", + "browser", + "arraybuffer", + "dataview" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/is-buffer.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "zuul -- test/*.js", + "test-browser-local": "zuul --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "testling": { + "files": "test/*.js" + } +} diff --git a/node_modules/is-buffer/test/basic.js b/node_modules/is-buffer/test/basic.js new file mode 100644 index 0000000..43b7c82 --- /dev/null +++ b/node_modules/is-buffer/test/basic.js @@ -0,0 +1,25 @@ +var buffer = require('buffer') +var isBuffer = require('../') +var test = require('tape') + +test('is-buffer', function (t) { + t.equal(isBuffer(new Buffer(4)), true, 'new Buffer(4)') + t.equal(isBuffer(buffer.SlowBuffer(100)), true, 'SlowBuffer(100)') + + t.equal(isBuffer(undefined), false, 'undefined') + t.equal(isBuffer(null), false, 'null') + t.equal(isBuffer(''), false, 'empty string') + t.equal(isBuffer(true), false, 'true') + t.equal(isBuffer(false), false, 'false') + t.equal(isBuffer(0), false, '0') + t.equal(isBuffer(1), false, '1') + t.equal(isBuffer(1.0), false, '1.0') + t.equal(isBuffer('string'), false, 'string') + t.equal(isBuffer({}), false, '{}') + t.equal(isBuffer([]), false, '[]') + t.equal(isBuffer(function foo () {}), false, 'function foo () {}') + t.equal(isBuffer({ isBuffer: null }), false, '{ isBuffer: null }') + t.equal(isBuffer({ isBuffer: function () { throw new Error() } }), false, '{ isBuffer: function () { throw new Error() } }') + + t.end() +}) diff --git a/node_modules/is-dotfile/LICENSE b/node_modules/is-dotfile/LICENSE new file mode 100644 index 0000000..33754da --- /dev/null +++ b/node_modules/is-dotfile/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-dotfile/README.md b/node_modules/is-dotfile/README.md new file mode 100644 index 0000000..d17b55c --- /dev/null +++ b/node_modules/is-dotfile/README.md @@ -0,0 +1,74 @@ +# is-dotfile [![NPM version](https://badge.fury.io/js/is-dotfile.svg)](http://badge.fury.io/js/is-dotfile) + +> Return true if a file path is (or has) a dotfile. Returns false if the path is a dot directory. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i is-dotfile --save +``` + +## Usage + +```js +var isDotfile = require('is-dotfile'); +``` + +**false** + +All of the following return `false`: + +```js +isDotfile('a/b/c.js'); +isDotfile('/.git/foo'); +isDotfile('a/b/c/.git/foo'); +//=> false +``` + +**true** + +All of the following return `true`: + +```js +isDotfile('a/b/.gitignore'); +isDotfile('.gitignore'); +isDotfile('/.gitignore'); +//=> true +``` + +## Related projects + +* [dotfile-regex](https://www.npmjs.com/package/dotfile-regex): Regular expresson for matching dotfiles. | [homepage](https://github.com/regexps/dotfile-regex) +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob) +* [is-dotdir](https://www.npmjs.com/package/is-dotdir): Returns true if a path is a dot-directory. | [homepage](https://github.com/jonschlinkert/is-dotdir) +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern.… [more](https://www.npmjs.com/package/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob) + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-dotfile/issues/new). + +## Authors + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on October 20, 2015._ \ No newline at end of file diff --git a/node_modules/is-dotfile/index.js b/node_modules/is-dotfile/index.js new file mode 100644 index 0000000..5a0a0ca --- /dev/null +++ b/node_modules/is-dotfile/index.js @@ -0,0 +1,15 @@ +/*! + * is-dotfile + * + * Copyright (c) 2015 Jon Schlinkert, contributors. + * Licensed under the MIT license. + */ + +module.exports = function(str) { + if (str.charCodeAt(0) === 46 /* . */ && str.indexOf('/', 1) === -1) { + return true; + } + + var last = str.lastIndexOf('/'); + return last !== -1 ? str.charCodeAt(last + 1) === 46 /* . */ : false; +}; diff --git a/node_modules/is-dotfile/package.json b/node_modules/is-dotfile/package.json new file mode 100644 index 0000000..91bde22 --- /dev/null +++ b/node_modules/is-dotfile/package.json @@ -0,0 +1,53 @@ +{ + "name": "is-dotfile", + "description": "Return true if a file path is (or has) a dotfile. Returns false if the path is a dot directory.", + "version": "1.0.2", + "homepage": "https://github.com/jonschlinkert/is-dotfile", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-dotfile", + "bugs": { + "url": "https://github.com/jonschlinkert/is-dotfile/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "benchmarked": "^0.1.3", + "dotfile-regex": "^0.1.2", + "mocha": "*" + }, + "keywords": [ + "detect", + "dot", + "dotfile", + "expression", + "file", + "filepath", + "find", + "fs", + "is", + "match", + "path", + "regex", + "regexp", + "regular" + ], + "verb": { + "related": { + "list": [ + "has-glob", + "is-glob", + "dotfile-regex", + "is-dotdir" + ] + } + } +} diff --git a/node_modules/is-equal-shallow/LICENSE b/node_modules/is-equal-shallow/LICENSE new file mode 100644 index 0000000..65f90ac --- /dev/null +++ b/node_modules/is-equal-shallow/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-equal-shallow/README.md b/node_modules/is-equal-shallow/README.md new file mode 100644 index 0000000..1142276 --- /dev/null +++ b/node_modules/is-equal-shallow/README.md @@ -0,0 +1,90 @@ +# is-equal-shallow [![NPM version](https://badge.fury.io/js/is-equal-shallow.svg)](http://badge.fury.io/js/is-equal-shallow) [![Build Status](https://travis-ci.org/jonschlinkert/is-equal-shallow.svg)](https://travis-ci.org/jonschlinkert/is-equal-shallow) + +> Does a shallow comparison of two objects, returning false if the keys or values differ. + +The purpose of this lib is to do the fastest comparison possible of two objects when the values will predictably be primitives. + +* only compares objects. +* only compares the first level of each object +* values must be primitives. If a value is not a primitive, even if the values are the same, `false` is returned. + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i is-equal-shallow --save +``` + +## Usage + +```js +var equals = require('is-equal-shallow'); +equals(object_a, object_b); +``` + +**Examples** + +```js +equals({a: true, b: true}, {a: true, b: true}); +//=> 'true' + +equals({a: true, b: false}, {c: false, b: false}); +//=> 'false' + +equals({a: true, b: false}, {a: false, b: false}); +//=> 'false' +``` + +Strict comparison for equality: + +```js +equals({a: true, b: true}, {a: true, b: 'true'}); +//=> 'false' +``` + +When values are not primitives, `false` is always returned: + +```js +equals({ b: {}}, { b: {}}); +//=> 'false' + +equals({ b: []}, { b: []}); +//=> 'false' +``` + +## Related projects + +Other object utils: + +* [clone-deep](https://github.com/jonschlinkert/clone-deep): Recursively (deep) clone JavaScript native types, like Object, Array, RegExp, Date as well as primitives. +* [for-in](https://github.com/jonschlinkert/for-in): Iterate over the own and inherited enumerable properties of an objecte, and return an object… [more](https://github.com/jonschlinkert/for-in) +* [for-own](https://github.com/jonschlinkert/for-own): Iterate over the own enumerable properties of an object, and return an object with properties… [more](https://github.com/jonschlinkert/for-own) +* [is-plain-object](https://github.com/jonschlinkert/is-plain-object): Returns true if an object was created by the `Object` constructor. +* [isobject](https://github.com/jonschlinkert/isobject): Returns true if the value is an object and not an array or null. + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-equal-shallow/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on June 22, 2015._ \ No newline at end of file diff --git a/node_modules/is-equal-shallow/index.js b/node_modules/is-equal-shallow/index.js new file mode 100644 index 0000000..1006eef --- /dev/null +++ b/node_modules/is-equal-shallow/index.js @@ -0,0 +1,27 @@ +/*! + * is-equal-shallow + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isPrimitive = require('is-primitive'); + +module.exports = function isEqual(a, b) { + if (!a && !b) { return true; } + if (!a && b || a && !b) { return false; } + + var numKeysA = 0, numKeysB = 0, key; + for (key in b) { + numKeysB++; + if (!isPrimitive(b[key]) || !a.hasOwnProperty(key) || (a[key] !== b[key])) { + return false; + } + } + for (key in a) { + numKeysA++; + } + return numKeysA === numKeysB; +}; diff --git a/node_modules/is-equal-shallow/package.json b/node_modules/is-equal-shallow/package.json new file mode 100644 index 0000000..e35a8f5 --- /dev/null +++ b/node_modules/is-equal-shallow/package.json @@ -0,0 +1,54 @@ +{ + "name": "is-equal-shallow", + "description": "Does a shallow comparison of two objects, returning false if the keys or values differ.", + "version": "0.1.3", + "homepage": "https://github.com/jonschlinkert/is-equal-shallow", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/is-equal-shallow.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/is-equal-shallow/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-primitive": "^2.0.0" + }, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [ + "compare", + "comparison", + "equal", + "equals", + "is", + "is-equal", + "key", + "object", + "same", + "shallow", + "value" + ], + "verbiage": { + "related": { + "description": "Other object utils:", + "list": ["is-plain-object", "isobject", "for-in", "for-own", "clone-deep"] + } + } +} diff --git a/node_modules/is-extendable/LICENSE b/node_modules/is-extendable/LICENSE new file mode 100644 index 0000000..65f90ac --- /dev/null +++ b/node_modules/is-extendable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extendable/README.md b/node_modules/is-extendable/README.md new file mode 100644 index 0000000..e4cfaeb --- /dev/null +++ b/node_modules/is-extendable/README.md @@ -0,0 +1,72 @@ +# is-extendable [![NPM version](https://badge.fury.io/js/is-extendable.svg)](http://badge.fury.io/js/is-extendable) + +> Returns true if a value is any of the object types: array, regexp, plain object, function or date. This is useful for determining if a value can be extended, e.g. "can the value have keys?" + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i is-extendable --save +``` + +## Usage + +```js +var isExtendable = require('is-extendable'); +``` + +Returns true if the value is any of the following: + +* `array` +* `regexp` +* `plain object` +* `function` +* `date` +* `error` + +## Notes + +All objects in JavaScript can have keys, but it's a pain to check for this, since we ether need to verify that the value is not `null` or `undefined` and: + +* the value is not a primitive, or +* that the object is an `object`, `function` + +Also note that an `extendable` object is not the same as an [extensible object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/isExtensible), which is one that (in es6) is not sealed, frozen, or marked as non-extensible using `preventExtensions`. + +## Related projects + +* [assign-deep](https://github.com/jonschlinkert/assign-deep): Deeply assign the enumerable properties of source objects to a destination object. +* [extend-shallow](https://github.com/jonschlinkert/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. +* [isobject](https://github.com/jonschlinkert/isobject): Returns true if the value is an object and not an array or null. +* [is-plain-object](https://github.com/jonschlinkert/is-plain-object): Returns true if an object was created by the `Object` constructor. +* [is-equal-shallow](https://github.com/jonschlinkert/is-equal-shallow): Does a shallow comparison of two objects, returning false if the keys or values differ. +* [kind-of](https://github.com/jonschlinkert/kind-of): Get the native type of a value. + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-extendable/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on July 04, 2015._ \ No newline at end of file diff --git a/node_modules/is-extendable/index.js b/node_modules/is-extendable/index.js new file mode 100644 index 0000000..4ee71a4 --- /dev/null +++ b/node_modules/is-extendable/index.js @@ -0,0 +1,13 @@ +/*! + * is-extendable + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +module.exports = function isExtendable(val) { + return typeof val !== 'undefined' && val !== null + && (typeof val === 'object' || typeof val === 'function'); +}; diff --git a/node_modules/is-extendable/package.json b/node_modules/is-extendable/package.json new file mode 100644 index 0000000..5dd006e --- /dev/null +++ b/node_modules/is-extendable/package.json @@ -0,0 +1,51 @@ +{ + "name": "is-extendable", + "description": "Returns true if a value is any of the object types: array, regexp, plain object, function or date. This is useful for determining if a value can be extended, e.g. \"can the value have keys?\"", + "version": "0.1.1", + "homepage": "https://github.com/jonschlinkert/is-extendable", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extendable", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extendable/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "mocha": "*" + }, + "keywords": [ + "array", + "assign", + "check", + "date", + "extend", + "extensible", + "function", + "is", + "object", + "regex", + "test" + ], + "verbiage": { + "related": { + "list": [ + "isobject", + "is-plain-object", + "kind-of", + "is-extendable", + "is-equal-shallow", + "extend-shallow", + "assign-deep" + ] + } + } +} diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/is-extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md new file mode 100644 index 0000000..80e7128 --- /dev/null +++ b/node_modules/is-extglob/README.md @@ -0,0 +1,75 @@ +# is-extglob [![NPM version](https://badge.fury.io/js/is-extglob.svg)](http://badge.fury.io/js/is-extglob) [![Build Status](https://travis-ci.org/jonschlinkert/is-extglob.svg)](https://travis-ci.org/jonschlinkert/is-extglob) + +> Returns true if a string has an extglob. + +## Install with [npm](npmjs.org) + +```bash +npm i is-extglob --save +``` + +## Usage + +```js +var isExtglob = require('is-extglob'); +``` + +**True** + +```js +isExtglob('?(abc)'); +isExtglob('@(abc)'); +isExtglob('!(abc)'); +isExtglob('*(abc)'); +isExtglob('+(abc)'); +``` + +**False** + +Everything else... + +```js +isExtglob('foo.js'); +isExtglob('!foo.js'); +isExtglob('*.js'); +isExtglob('**/abc.js'); +isExtglob('abc/*.js'); +isExtglob('abc/(aaa|bbb).js'); +isExtglob('abc/[a-z].js'); +isExtglob('abc/{a,b}.js'); +isExtglob('abc/?.js'); +isExtglob('abc.js'); +isExtglob('abc/def/ghi.js'); +``` + +## Related +* [extglob](https://github.com/jonschlinkert/extglob): Extended globs. extglobs add the expressive power of regular expressions to glob patterns. +* [micromatch](https://github.com/jonschlinkert/micromatch): Glob matching for javascript/node.js. A faster alternative to minimatch (10-45x faster on avg), with all the features you're used to using in your Grunt and gulp tasks. +* [parse-glob](https://github.com/jonschlinkert/parse-glob): Parse a glob pattern into an object of tokens. + +## Run tests +Install dev dependencies. + +```bash +npm i -d && npm test +``` + + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-extglob/issues) + + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on March 06, 2015._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js new file mode 100644 index 0000000..803047f --- /dev/null +++ b/node_modules/is-extglob/index.js @@ -0,0 +1,11 @@ +/*! + * is-extglob + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + return typeof str === 'string' + && /[@?!+*]\(/.test(str); +}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json new file mode 100644 index 0000000..1056eac --- /dev/null +++ b/node_modules/is-extglob/package.json @@ -0,0 +1,48 @@ +{ + "name": "is-extglob", + "description": "Returns true if a string has an extglob.", + "version": "1.0.0", + "homepage": "https://github.com/jonschlinkert/is-extglob", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": "jonschlinkert/is-extglob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js" + }, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "extglob", + "expression", + "glob", + "globbing", + "globstar", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ] +} \ No newline at end of file diff --git a/node_modules/is-finite/index.js b/node_modules/is-finite/index.js new file mode 100644 index 0000000..8067387 --- /dev/null +++ b/node_modules/is-finite/index.js @@ -0,0 +1,6 @@ +'use strict'; +var numberIsNan = require('number-is-nan'); + +module.exports = Number.isFinite || function (val) { + return !(typeof val !== 'number' || numberIsNan(val) || val === Infinity || val === -Infinity); +}; diff --git a/node_modules/is-finite/license b/node_modules/is-finite/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/is-finite/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-finite/package.json b/node_modules/is-finite/package.json new file mode 100644 index 0000000..8d65d25 --- /dev/null +++ b/node_modules/is-finite/package.json @@ -0,0 +1,36 @@ +{ + "name": "is-finite", + "version": "1.0.2", + "description": "ES2015 Number.isFinite() ponyfill", + "license": "MIT", + "repository": "sindresorhus/is-finite", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "es2015", + "ponyfill", + "polyfill", + "shim", + "number", + "finite", + "is" + ], + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "devDependencies": { + "ava": "*" + } +} diff --git a/node_modules/is-finite/readme.md b/node_modules/is-finite/readme.md new file mode 100644 index 0000000..567710c --- /dev/null +++ b/node_modules/is-finite/readme.md @@ -0,0 +1,28 @@ +# is-finite [![Build Status](https://travis-ci.org/sindresorhus/is-finite.svg?branch=master)](https://travis-ci.org/sindresorhus/is-finite) + +> ES2015 [`Number.isFinite()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite) [ponyfill](https://ponyfill.com) + + +## Install + +```sh +$ npm install --save is-finite +``` + + +## Usage + +```js +var numIsFinite = require('is-finite'); + +numIsFinite(4); +//=> true + +numIsFinite(Infinity); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-fullwidth-code-point/index.js b/node_modules/is-fullwidth-code-point/index.js new file mode 100644 index 0000000..a7d3e38 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/index.js @@ -0,0 +1,46 @@ +'use strict'; +var numberIsNan = require('number-is-nan'); + +module.exports = function (x) { + if (numberIsNan(x)) { + return false; + } + + // https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1369 + + // code points are derived from: + // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt + if (x >= 0x1100 && ( + x <= 0x115f || // Hangul Jamo + 0x2329 === x || // LEFT-POINTING ANGLE BRACKET + 0x232a === x || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + 0x3250 <= x && x <= 0x4dbf || + // CJK Unified Ideographs .. Yi Radicals + 0x4e00 <= x && x <= 0xa4c6 || + // Hangul Jamo Extended-A + 0xa960 <= x && x <= 0xa97c || + // Hangul Syllables + 0xac00 <= x && x <= 0xd7a3 || + // CJK Compatibility Ideographs + 0xf900 <= x && x <= 0xfaff || + // Vertical Forms + 0xfe10 <= x && x <= 0xfe19 || + // CJK Compatibility Forms .. Small Form Variants + 0xfe30 <= x && x <= 0xfe6b || + // Halfwidth and Fullwidth Forms + 0xff01 <= x && x <= 0xff60 || + 0xffe0 <= x && x <= 0xffe6 || + // Kana Supplement + 0x1b000 <= x && x <= 0x1b001 || + // Enclosed Ideographic Supplement + 0x1f200 <= x && x <= 0x1f251 || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + 0x20000 <= x && x <= 0x3fffd)) { + return true; + } + + return false; +} diff --git a/node_modules/is-fullwidth-code-point/license b/node_modules/is-fullwidth-code-point/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/is-fullwidth-code-point/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-fullwidth-code-point/package.json b/node_modules/is-fullwidth-code-point/package.json new file mode 100644 index 0000000..b678d40 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/package.json @@ -0,0 +1,45 @@ +{ + "name": "is-fullwidth-code-point", + "version": "1.0.0", + "description": "Check if the character represented by a given Unicode code point is fullwidth", + "license": "MIT", + "repository": "sindresorhus/is-fullwidth-code-point", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "fullwidth", + "full-width", + "full", + "width", + "unicode", + "character", + "char", + "string", + "str", + "codepoint", + "code", + "point", + "is", + "detect", + "check" + ], + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "devDependencies": { + "ava": "0.0.4", + "code-point-at": "^1.0.0" + } +} diff --git a/node_modules/is-fullwidth-code-point/readme.md b/node_modules/is-fullwidth-code-point/readme.md new file mode 100644 index 0000000..4936464 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/readme.md @@ -0,0 +1,39 @@ +# is-fullwidth-code-point [![Build Status](https://travis-ci.org/sindresorhus/is-fullwidth-code-point.svg?branch=master)](https://travis-ci.org/sindresorhus/is-fullwidth-code-point) + +> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) + + +## Install + +``` +$ npm install --save is-fullwidth-code-point +``` + + +## Usage + +```js +var isFullwidthCodePoint = require('is-fullwidth-code-point'); + +isFullwidthCodePoint('谢'.codePointAt()); +//=> true + +isFullwidthCodePoint('a'.codePointAt()); +//=> false +``` + + +## API + +### isFullwidthCodePoint(input) + +#### input + +Type: `number` + +[Code point](https://en.wikipedia.org/wiki/Code_point) of a character. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/is-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md new file mode 100644 index 0000000..b162542 --- /dev/null +++ b/node_modules/is-glob/README.md @@ -0,0 +1,105 @@ +# is-glob [![NPM version](https://badge.fury.io/js/is-glob.svg)](http://badge.fury.io/js/is-glob) [![Build Status](https://travis-ci.org/jonschlinkert/is-glob.svg)](https://travis-ci.org/jonschlinkert/is-glob) + +> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. + +Also take a look at [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i is-glob --save +``` + +## Usage + +```js +var isGlob = require('is-glob'); +``` + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js'); +isGlob('*.js'); +isGlob('**/abc.js'); +isGlob('abc/*.js'); +isGlob('abc/(aaa|bbb).js'); +isGlob('abc/[a-z].js'); +isGlob('abc/{a,b}.js'); +isGlob('abc/?.js'); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js'); +isGlob('abc/!(a).js'); +isGlob('abc/+(a).js'); +isGlob('abc/*(a).js'); +isGlob('abc/?(a).js'); +//=> true +``` + +**False** + +Patterns that do not have glob patterns return `false`: + +```js +isGlob('abc.js'); +isGlob('abc/def/ghi.js'); +isGlob('foo.js'); +isGlob('abc/@.js'); +isGlob('abc/+.js'); +isGlob(); +isGlob(null); +//=> false +``` + +Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): + +```js +isGlob(['**/*.js']); +isGlob(['foo.js']); +//=> false +``` + +## Related + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob) +* [is-extglob](https://www.npmjs.com/package/is-extglob): Returns true if a string has an extglob. | [homepage](https://github.com/jonschlinkert/is-extglob) +* [is-posix-bracket](https://www.npmjs.com/package/is-posix-bracket): Returns true if the given string is a POSIX bracket expression (POSIX character class). | [homepage](https://github.com/jonschlinkert/is-posix-bracket) +* [is-valid-glob](https://www.npmjs.com/package/is-valid-glob): Return true if a value is a valid glob pattern or patterns. | [homepage](https://github.com/jonschlinkert/is-valid-glob) +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. Just… [more](https://www.npmjs.com/package/micromatch) | [homepage](https://github.com/jonschlinkert/micromatch) + +## Run tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-glob/issues/new). + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on October 02, 2015._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js new file mode 100644 index 0000000..ef27bba --- /dev/null +++ b/node_modules/is-glob/index.js @@ -0,0 +1,14 @@ +/*! + * is-glob + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +var isExtglob = require('is-extglob'); + +module.exports = function isGlob(str) { + return typeof str === 'string' + && (/[*!?{}(|)[\]]/.test(str) + || isExtglob(str)); +}; \ No newline at end of file diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json new file mode 100644 index 0000000..7cf6eef --- /dev/null +++ b/node_modules/is-glob/package.json @@ -0,0 +1,60 @@ +{ + "name": "is-glob", + "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", + "version": "2.0.1", + "homepage": "https://github.com/jonschlinkert/is-glob", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": "jonschlinkert/is-glob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-extglob": "^1.0.0" + }, + "devDependencies": { + "mocha": "*" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "extglob", + "expression", + "glob", + "globbing", + "globstar", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "related": { + "list": [ + "has-glob", + "is-extglob", + "is-posix-bracket", + "is-valid-glob", + "micromatch" + ] + } + } +} \ No newline at end of file diff --git a/node_modules/is-my-json-valid/.npmignore b/node_modules/is-my-json-valid/.npmignore new file mode 100644 index 0000000..dbb0721 --- /dev/null +++ b/node_modules/is-my-json-valid/.npmignore @@ -0,0 +1,2 @@ +node_modules +cosmicrealms.com diff --git a/node_modules/is-my-json-valid/.travis.yml b/node_modules/is-my-json-valid/.travis.yml new file mode 100644 index 0000000..6e5919d --- /dev/null +++ b/node_modules/is-my-json-valid/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "0.10" diff --git a/node_modules/is-my-json-valid/LICENSE b/node_modules/is-my-json-valid/LICENSE new file mode 100644 index 0000000..757562e --- /dev/null +++ b/node_modules/is-my-json-valid/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/is-my-json-valid/README.md b/node_modules/is-my-json-valid/README.md new file mode 100644 index 0000000..104a425 --- /dev/null +++ b/node_modules/is-my-json-valid/README.md @@ -0,0 +1,173 @@ +# is-my-json-valid + +A [JSONSchema](http://json-schema.org/) validator that uses code generation +to be extremely fast + +``` +npm install is-my-json-valid +``` + +It passes the entire JSONSchema v4 test suite except for `remoteRefs` and `maxLength`/`minLength` when using unicode surrogate pairs. + +[![build status](http://img.shields.io/travis/mafintosh/is-my-json-valid.svg?style=flat)](http://travis-ci.org/mafintosh/is-my-json-valid) + +## Usage + +Simply pass a schema to compile it + +``` js +var validator = require('is-my-json-valid') + +var validate = validator({ + required: true, + type: 'object', + properties: { + hello: { + required: true, + type: 'string' + } + } +}) + +console.log('should be valid', validate({hello: 'world'})) +console.log('should not be valid', validate({})) + +// get the last list of errors by checking validate.errors +// the following will print [{field: 'data.hello', message: 'is required'}] +console.log(validate.errors) +``` + +You can also pass the schema as a string + +``` js +var validate = validator('{"type": ... }') +``` + +Optionally you can use the require submodule to load a schema from `__dirname` + +``` js +var validator = require('is-my-json-valid/require') +var validate = validator('my-schema.json') +``` + +## Custom formats + +is-my-json-valid supports the formats specified in JSON schema v4 (such as date-time). +If you want to add your own custom formats pass them as the formats options to the validator + +``` js +var validate = validator({ + type: 'string', + required: true, + format: 'only-a' +}, { + formats: { + 'only-a': /^a+$/ + } +}) + +console.log(validate('aa')) // true +console.log(validate('ab')) // false +``` + +## External schemas + +You can pass in external schemas that you reference using the `$ref` attribute as the `schemas` option + +``` js +var ext = { + required: true, + type: 'string' +} + +var schema = { + $ref: '#ext' // references another schema called ext +} + +// pass the external schemas as an option +var validate = validator(schema, {schemas: {ext: ext}}) + +validate('hello') // returns true +validate(42) // return false +``` + +## Filtering away additional properties + +is-my-json-valid supports filtering away properties not in the schema + +``` js +var filter = validator.filter({ + required: true, + type: 'object', + properties: { + hello: {type: 'string', required: true} + }, + additionalProperties: false +}) + +var doc = {hello: 'world', notInSchema: true} +console.log(filter(doc)) // {hello: 'world'} +``` + +## Verbose mode outputs the value on errors + +is-my-json-valid outputs the value causing an error when verbose is set to true + +``` js +var validate = validator({ + required: true, + type: 'object', + properties: { + hello: { + required: true, + type: 'string' + } + } +}, { + verbose: true +}) + +validate({hello: 100}); +console.log(validate.errors) // {field: 'data.hello', message: 'is the wrong type', value: 100, type: 'string'} +``` + +## Greedy mode tries to validate as much as possible + +By default is-my-json-valid bails on first validation error but when greedy is +set to true it tries to validate as much as possible: + +``` js +var validate = validator({ + type: 'object', + properties: { + x: { + type: 'number' + } + }, + required: ['x', 'y'] +}, { + greedy: true +}); + +validate({x: 'string'}); +console.log(validate.errors) // [{field: 'data.y', message: 'is required'}, + // {field: 'data.x', message: 'is the wrong type'}] +``` + +## Performance + +is-my-json-valid uses code generation to turn your JSON schema into basic javascript code that is easily optimizeable by v8. + +At the time of writing, is-my-json-valid is the __fastest validator__ when running + +* [json-schema-benchmark](https://github.com/Muscula/json-schema-benchmark) +* [cosmicreals.com benchmark](http://cosmicrealms.com/blog/2014/08/29/benchmark-of-node-dot-js-json-validation-modules-part-3/) +* [jsck benchmark](https://github.com/pandastrike/jsck/issues/72#issuecomment-70992684) +* [themis benchmark](https://cdn.rawgit.com/playlyfe/themis/master/benchmark/results.html) +* [z-schema benchmark](https://rawgit.com/zaggino/z-schema/master/benchmark/results.html) + +If you know any other relevant benchmarks open a PR and I'll add them. + +## License + +MIT diff --git a/node_modules/is-my-json-valid/example.js b/node_modules/is-my-json-valid/example.js new file mode 100644 index 0000000..f70f4df --- /dev/null +++ b/node_modules/is-my-json-valid/example.js @@ -0,0 +1,18 @@ +var validator = require('./') + +var validate = validator({ + type: 'object', + properties: { + hello: { + required: true, + type: 'string' + } + } +}) + +console.log('should be valid', validate({hello: 'world'})) +console.log('should not be valid', validate({})) + +// get the last error message by checking validate.error +// the following will print "data.hello is required" +console.log('the errors were:', validate.errors) diff --git a/node_modules/is-my-json-valid/formats.js b/node_modules/is-my-json-valid/formats.js new file mode 100644 index 0000000..9cb8380 --- /dev/null +++ b/node_modules/is-my-json-valid/formats.js @@ -0,0 +1,14 @@ +exports['date-time'] = /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-[0-9]{2}[tT ]\d{2}:\d{2}:\d{2}(\.\d+)?([zZ]|[+-]\d{2}:\d{2})$/ +exports['date'] = /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-[0-9]{2}$/ +exports['time'] = /^\d{2}:\d{2}:\d{2}$/ +exports['email'] = /^\S+@\S+$/ +exports['ip-address'] = exports['ipv4'] = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/ +exports['ipv6'] = /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/ +exports['uri'] = /^[a-zA-Z][a-zA-Z0-9+-.]*:[^\s]*$/ +exports['color'] = /(#?([0-9A-Fa-f]{3,6})\b)|(aqua)|(black)|(blue)|(fuchsia)|(gray)|(green)|(lime)|(maroon)|(navy)|(olive)|(orange)|(purple)|(red)|(silver)|(teal)|(white)|(yellow)|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\))/ +exports['hostname'] = /^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$/ +exports['alpha'] = /^[a-zA-Z]+$/ +exports['alphanumeric'] = /^[a-zA-Z0-9]+$/ +exports['style'] = /\s*(.+?):\s*([^;]+);?/g +exports['phone'] = /^\+(?:[0-9] ?){6,14}[0-9]$/ +exports['utc-millisec'] = /^[0-9]{1,15}\.?[0-9]{0,15}$/ diff --git a/node_modules/is-my-json-valid/index.js b/node_modules/is-my-json-valid/index.js new file mode 100644 index 0000000..779cfe2 --- /dev/null +++ b/node_modules/is-my-json-valid/index.js @@ -0,0 +1,594 @@ +var genobj = require('generate-object-property') +var genfun = require('generate-function') +var jsonpointer = require('jsonpointer') +var xtend = require('xtend') +var formats = require('./formats') + +var get = function(obj, additionalSchemas, ptr) { + + var visit = function(sub) { + if (sub && sub.id === ptr) return sub + if (typeof sub !== 'object' || !sub) return null + return Object.keys(sub).reduce(function(res, k) { + return res || visit(sub[k]) + }, null) + } + + var res = visit(obj) + if (res) return res + + ptr = ptr.replace(/^#/, '') + ptr = ptr.replace(/\/$/, '') + + try { + return jsonpointer.get(obj, decodeURI(ptr)) + } catch (err) { + var end = ptr.indexOf('#') + var other + // external reference + if (end !== 0) { + // fragment doesn't exist. + if (end === -1) { + other = additionalSchemas[ptr] + } else { + var ext = ptr.slice(0, end) + other = additionalSchemas[ext] + var fragment = ptr.slice(end).replace(/^#/, '') + try { + return jsonpointer.get(other, fragment) + } catch (err) {} + } + } else { + other = additionalSchemas[ptr] + } + return other || null + } +} + +var formatName = function(field) { + field = JSON.stringify(field) + var pattern = /\[([^\[\]"]+)\]/ + while (pattern.test(field)) field = field.replace(pattern, '."+$1+"') + return field +} + +var types = {} + +types.any = function() { + return 'true' +} + +types.null = function(name) { + return name+' === null' +} + +types.boolean = function(name) { + return 'typeof '+name+' === "boolean"' +} + +types.array = function(name) { + return 'Array.isArray('+name+')' +} + +types.object = function(name) { + return 'typeof '+name+' === "object" && '+name+' && !Array.isArray('+name+')' +} + +types.number = function(name) { + return 'typeof '+name+' === "number"' +} + +types.integer = function(name) { + return 'typeof '+name+' === "number" && (Math.floor('+name+') === '+name+' || '+name+' > 9007199254740992 || '+name+' < -9007199254740992)' +} + +types.string = function(name) { + return 'typeof '+name+' === "string"' +} + +var unique = function(array) { + var list = [] + for (var i = 0; i < array.length; i++) { + list.push(typeof array[i] === 'object' ? JSON.stringify(array[i]) : array[i]) + } + for (var i = 1; i < list.length; i++) { + if (list.indexOf(list[i]) !== i) return false + } + return true +} + +var isMultipleOf = function(name, multipleOf) { + var res; + var factor = ((multipleOf | 0) !== multipleOf) ? Math.pow(10, multipleOf.toString().split('.').pop().length) : 1 + if (factor > 1) { + var factorName = ((name | 0) !== name) ? Math.pow(10, name.toString().split('.').pop().length) : 1 + if (factorName > factor) res = true + else res = Math.round(factor * name) % (factor * multipleOf) + } + else res = name % multipleOf; + return !res; +} + +var toType = function(node) { + return node.type +} + +var compile = function(schema, cache, root, reporter, opts) { + var fmts = opts ? xtend(formats, opts.formats) : formats + var scope = {unique:unique, formats:fmts, isMultipleOf:isMultipleOf} + var verbose = opts ? !!opts.verbose : false; + var greedy = opts && opts.greedy !== undefined ? + opts.greedy : false; + + var syms = {} + var gensym = function(name) { + return name+(syms[name] = (syms[name] || 0)+1) + } + + var reversePatterns = {} + var patterns = function(p) { + if (reversePatterns[p]) return reversePatterns[p] + var n = gensym('pattern') + scope[n] = new RegExp(p) + reversePatterns[p] = n + return n + } + + var vars = ['i','j','k','l','m','n','o','p','q','r','s','t','u','v','x','y','z'] + var genloop = function() { + var v = vars.shift() + vars.push(v+v[0]) + return v + } + + var visit = function(name, node, reporter, filter) { + var properties = node.properties + var type = node.type + var tuple = false + + if (Array.isArray(node.items)) { // tuple type + properties = {} + node.items.forEach(function(item, i) { + properties[i] = item + }) + type = 'array' + tuple = true + } + + var indent = 0 + var error = function(msg, prop, value) { + validate('errors++') + if (reporter === true) { + validate('if (validate.errors === null) validate.errors = []') + if (verbose) { + validate('validate.errors.push({field:%s,message:%s,value:%s,type:%s})', formatName(prop || name), JSON.stringify(msg), value || name, JSON.stringify(type)) + } else { + validate('validate.errors.push({field:%s,message:%s})', formatName(prop || name), JSON.stringify(msg)) + } + } + } + + if (node.required === true) { + indent++ + validate('if (%s === undefined) {', name) + error('is required') + validate('} else {') + } else { + indent++ + validate('if (%s !== undefined) {', name) + } + + var valid = [].concat(type) + .map(function(t) { + return types[t || 'any'](name) + }) + .join(' || ') || 'true' + + if (valid !== 'true') { + indent++ + validate('if (!(%s)) {', valid) + error('is the wrong type') + validate('} else {') + } + + if (tuple) { + if (node.additionalItems === false) { + validate('if (%s.length > %d) {', name, node.items.length) + error('has additional items') + validate('}') + } else if (node.additionalItems) { + var i = genloop() + validate('for (var %s = %d; %s < %s.length; %s++) {', i, node.items.length, i, name, i) + visit(name+'['+i+']', node.additionalItems, reporter, filter) + validate('}') + } + } + + if (node.format && fmts[node.format]) { + if (type !== 'string' && formats[node.format]) validate('if (%s) {', types.string(name)) + var n = gensym('format') + scope[n] = fmts[node.format] + + if (typeof scope[n] === 'function') validate('if (!%s(%s)) {', n, name) + else validate('if (!%s.test(%s)) {', n, name) + error('must be '+node.format+' format') + validate('}') + if (type !== 'string' && formats[node.format]) validate('}') + } + + if (Array.isArray(node.required)) { + var isUndefined = function(req) { + return genobj(name, req) + ' === undefined' + } + + var checkRequired = function (req) { + var prop = genobj(name, req); + validate('if (%s === undefined) {', prop) + error('is required', prop) + validate('missing++') + validate('}') + } + validate('if ((%s)) {', type !== 'object' ? types.object(name) : 'true') + validate('var missing = 0') + node.required.map(checkRequired) + validate('}'); + if (!greedy) { + validate('if (missing === 0) {') + indent++ + } + } + + if (node.uniqueItems) { + if (type !== 'array') validate('if (%s) {', types.array(name)) + validate('if (!(unique(%s))) {', name) + error('must be unique') + validate('}') + if (type !== 'array') validate('}') + } + + if (node.enum) { + var complex = node.enum.some(function(e) { + return typeof e === 'object' + }) + + var compare = complex ? + function(e) { + return 'JSON.stringify('+name+')'+' !== JSON.stringify('+JSON.stringify(e)+')' + } : + function(e) { + return name+' !== '+JSON.stringify(e) + } + + validate('if (%s) {', node.enum.map(compare).join(' && ') || 'false') + error('must be an enum value') + validate('}') + } + + if (node.dependencies) { + if (type !== 'object') validate('if (%s) {', types.object(name)) + + Object.keys(node.dependencies).forEach(function(key) { + var deps = node.dependencies[key] + if (typeof deps === 'string') deps = [deps] + + var exists = function(k) { + return genobj(name, k) + ' !== undefined' + } + + if (Array.isArray(deps)) { + validate('if (%s !== undefined && !(%s)) {', genobj(name, key), deps.map(exists).join(' && ') || 'true') + error('dependencies not set') + validate('}') + } + if (typeof deps === 'object') { + validate('if (%s !== undefined) {', genobj(name, key)) + visit(name, deps, reporter, filter) + validate('}') + } + }) + + if (type !== 'object') validate('}') + } + + if (node.additionalProperties || node.additionalProperties === false) { + if (type !== 'object') validate('if (%s) {', types.object(name)) + + var i = genloop() + var keys = gensym('keys') + + var toCompare = function(p) { + return keys+'['+i+'] !== '+JSON.stringify(p) + } + + var toTest = function(p) { + return '!'+patterns(p)+'.test('+keys+'['+i+'])' + } + + var additionalProp = Object.keys(properties || {}).map(toCompare) + .concat(Object.keys(node.patternProperties || {}).map(toTest)) + .join(' && ') || 'true' + + validate('var %s = Object.keys(%s)', keys, name) + ('for (var %s = 0; %s < %s.length; %s++) {', i, i, keys, i) + ('if (%s) {', additionalProp) + + if (node.additionalProperties === false) { + if (filter) validate('delete %s', name+'['+keys+'['+i+']]') + error('has additional properties', null, JSON.stringify(name+'.') + ' + ' + keys + '['+i+']') + } else { + visit(name+'['+keys+'['+i+']]', node.additionalProperties, reporter, filter) + } + + validate + ('}') + ('}') + + if (type !== 'object') validate('}') + } + + if (node.$ref) { + var sub = get(root, opts && opts.schemas || {}, node.$ref) + if (sub) { + var fn = cache[node.$ref] + if (!fn) { + cache[node.$ref] = function proxy(data) { + return fn(data) + } + fn = compile(sub, cache, root, false, opts) + } + var n = gensym('ref') + scope[n] = fn + validate('if (!(%s(%s))) {', n, name) + error('referenced schema does not match') + validate('}') + } + } + + if (node.not) { + var prev = gensym('prev') + validate('var %s = errors', prev) + visit(name, node.not, false, filter) + validate('if (%s === errors) {', prev) + error('negative schema matches') + validate('} else {') + ('errors = %s', prev) + ('}') + } + + if (node.items && !tuple) { + if (type !== 'array') validate('if (%s) {', types.array(name)) + + var i = genloop() + validate('for (var %s = 0; %s < %s.length; %s++) {', i, i, name, i) + visit(name+'['+i+']', node.items, reporter, filter) + validate('}') + + if (type !== 'array') validate('}') + } + + if (node.patternProperties) { + if (type !== 'object') validate('if (%s) {', types.object(name)) + var keys = gensym('keys') + var i = genloop() + validate + ('var %s = Object.keys(%s)', keys, name) + ('for (var %s = 0; %s < %s.length; %s++) {', i, i, keys, i) + + Object.keys(node.patternProperties).forEach(function(key) { + var p = patterns(key) + validate('if (%s.test(%s)) {', p, keys+'['+i+']') + visit(name+'['+keys+'['+i+']]', node.patternProperties[key], reporter, filter) + validate('}') + }) + + validate('}') + if (type !== 'object') validate('}') + } + + if (node.pattern) { + var p = patterns(node.pattern) + if (type !== 'string') validate('if (%s) {', types.string(name)) + validate('if (!(%s.test(%s))) {', p, name) + error('pattern mismatch') + validate('}') + if (type !== 'string') validate('}') + } + + if (node.allOf) { + node.allOf.forEach(function(sch) { + visit(name, sch, reporter, filter) + }) + } + + if (node.anyOf && node.anyOf.length) { + var prev = gensym('prev') + + node.anyOf.forEach(function(sch, i) { + if (i === 0) { + validate('var %s = errors', prev) + } else { + validate('if (errors !== %s) {', prev) + ('errors = %s', prev) + } + visit(name, sch, false, false) + }) + node.anyOf.forEach(function(sch, i) { + if (i) validate('}') + }) + validate('if (%s !== errors) {', prev) + error('no schemas match') + validate('}') + } + + if (node.oneOf && node.oneOf.length) { + var prev = gensym('prev') + var passes = gensym('passes') + + validate + ('var %s = errors', prev) + ('var %s = 0', passes) + + node.oneOf.forEach(function(sch, i) { + visit(name, sch, false, false) + validate('if (%s === errors) {', prev) + ('%s++', passes) + ('} else {') + ('errors = %s', prev) + ('}') + }) + + validate('if (%s !== 1) {', passes) + error('no (or more than one) schemas match') + validate('}') + } + + if (node.multipleOf !== undefined) { + if (type !== 'number' && type !== 'integer') validate('if (%s) {', types.number(name)) + + validate('if (!isMultipleOf(%s, %d)) {', name, node.multipleOf) + + error('has a remainder') + validate('}') + + if (type !== 'number' && type !== 'integer') validate('}') + } + + if (node.maxProperties !== undefined) { + if (type !== 'object') validate('if (%s) {', types.object(name)) + + validate('if (Object.keys(%s).length > %d) {', name, node.maxProperties) + error('has more properties than allowed') + validate('}') + + if (type !== 'object') validate('}') + } + + if (node.minProperties !== undefined) { + if (type !== 'object') validate('if (%s) {', types.object(name)) + + validate('if (Object.keys(%s).length < %d) {', name, node.minProperties) + error('has less properties than allowed') + validate('}') + + if (type !== 'object') validate('}') + } + + if (node.maxItems !== undefined) { + if (type !== 'array') validate('if (%s) {', types.array(name)) + + validate('if (%s.length > %d) {', name, node.maxItems) + error('has more items than allowed') + validate('}') + + if (type !== 'array') validate('}') + } + + if (node.minItems !== undefined) { + if (type !== 'array') validate('if (%s) {', types.array(name)) + + validate('if (%s.length < %d) {', name, node.minItems) + error('has less items than allowed') + validate('}') + + if (type !== 'array') validate('}') + } + + if (node.maxLength !== undefined) { + if (type !== 'string') validate('if (%s) {', types.string(name)) + + validate('if (%s.length > %d) {', name, node.maxLength) + error('has longer length than allowed') + validate('}') + + if (type !== 'string') validate('}') + } + + if (node.minLength !== undefined) { + if (type !== 'string') validate('if (%s) {', types.string(name)) + + validate('if (%s.length < %d) {', name, node.minLength) + error('has less length than allowed') + validate('}') + + if (type !== 'string') validate('}') + } + + if (node.minimum !== undefined) { + if (type !== 'number' && type !== 'integer') validate('if (%s) {', types.number(name)) + + validate('if (%s %s %d) {', name, node.exclusiveMinimum ? '<=' : '<', node.minimum) + error('is less than minimum') + validate('}') + + if (type !== 'number' && type !== 'integer') validate('}') + } + + if (node.maximum !== undefined) { + if (type !== 'number' && type !== 'integer') validate('if (%s) {', types.number(name)) + + validate('if (%s %s %d) {', name, node.exclusiveMaximum ? '>=' : '>', node.maximum) + error('is more than maximum') + validate('}') + + if (type !== 'number' && type !== 'integer') validate('}') + } + + if (properties) { + Object.keys(properties).forEach(function(p) { + if (Array.isArray(type) && type.indexOf('null') !== -1) validate('if (%s !== null) {', name) + + visit(genobj(name, p), properties[p], reporter, filter) + + if (Array.isArray(type) && type.indexOf('null') !== -1) validate('}') + }) + } + + while (indent--) validate('}') + } + + var validate = genfun + ('function validate(data) {') + // Since undefined is not a valid JSON value, we coerce to null and other checks will catch this + ('if (data === undefined) data = null') + ('validate.errors = null') + ('var errors = 0') + + visit('data', schema, reporter, opts && opts.filter) + + validate + ('return errors === 0') + ('}') + + validate = validate.toFunction(scope) + validate.errors = null + + if (Object.defineProperty) { + Object.defineProperty(validate, 'error', { + get: function() { + if (!validate.errors) return '' + return validate.errors.map(function(err) { + return err.field + ' ' + err.message; + }).join('\n') + } + }) + } + + validate.toJSON = function() { + return schema + } + + return validate +} + +module.exports = function(schema, opts) { + if (typeof schema === 'string') schema = JSON.parse(schema) + return compile(schema, {}, schema, true, opts) +} + +module.exports.filter = function(schema, opts) { + var validate = module.exports(schema, xtend(opts, {filter: true})) + return function(sch) { + validate(sch) + return sch + } +} diff --git a/node_modules/is-my-json-valid/package.json b/node_modules/is-my-json-valid/package.json new file mode 100644 index 0000000..12a98f8 --- /dev/null +++ b/node_modules/is-my-json-valid/package.json @@ -0,0 +1,34 @@ +{ + "name": "is-my-json-valid", + "version": "2.15.0", + "description": "A JSONSchema validator that uses code generation to be extremely fast", + "main": "index.js", + "dependencies": { + "generate-function": "^2.0.0", + "generate-object-property": "^1.1.0", + "jsonpointer": "^4.0.0", + "xtend": "^4.0.0" + }, + "devDependencies": { + "tape": "^2.13.4" + }, + "scripts": { + "test": "tape test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/is-my-json-valid" + }, + "keywords": [ + "json", + "schema", + "orderly", + "jsonschema" + ], + "author": "Mathias Buus", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/is-my-json-valid/issues" + }, + "homepage": "https://github.com/mafintosh/is-my-json-valid" +} diff --git a/node_modules/is-my-json-valid/require.js b/node_modules/is-my-json-valid/require.js new file mode 100644 index 0000000..0bfb8a2 --- /dev/null +++ b/node_modules/is-my-json-valid/require.js @@ -0,0 +1,12 @@ +var fs = require('fs') +var path = require('path') +var compile = require('./') + +delete require.cache[require.resolve(__filename)] + +module.exports = function(file, opts) { + file = path.join(path.dirname(module.parent.filename), file) + if (!fs.existsSync(file) && fs.existsSync(file+'.schema')) file += '.schema' + if (!fs.existsSync(file) && fs.existsSync(file+'.json')) file += '.json' + return compile(fs.readFileSync(file, 'utf-8'), opts) +} diff --git a/node_modules/is-my-json-valid/test/fixtures/cosmic.js b/node_modules/is-my-json-valid/test/fixtures/cosmic.js new file mode 100644 index 0000000..4e0a34b --- /dev/null +++ b/node_modules/is-my-json-valid/test/fixtures/cosmic.js @@ -0,0 +1,84 @@ +exports.valid = { + fullName : "John Doe", + age : 47, + state : "Massachusetts", + city : "Boston", + zip : 16417, + married : false, + dozen : 12, + dozenOrBakersDozen : 13, + favoriteEvenNumber : 14, + topThreeFavoriteColors : [ "red", "blue", "green" ], + favoriteSingleDigitWholeNumbers : [ 7 ], + favoriteFiveLetterWord : "coder", + emailAddresses : + [ + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@letters-in-local.org", + "01234567890@numbers-in-local.net", + "&'*+-./=?^_{}~@other-valid-characters-in-local.net", + "mixed-1234-in-{+^}-local@sld.net", + "a@single-character-in-local.org", + "\"quoted\"@sld.com", + "\"\\e\\s\\c\\a\\p\\e\\d\"@sld.com", + "\"quoted-at-sign@sld.org\"@sld.com", + "\"escaped\\\"quote\"@sld.com", + "\"back\\slash\"@sld.com", + "one-character-third-level@a.example.com", + "single-character-in-sld@x.org", + "local@dash-in-sld.com", + "letters-in-sld@123.com", + "one-letter-sld@x.org", + "uncommon-tld@sld.museum", + "uncommon-tld@sld.travel", + "uncommon-tld@sld.mobi", + "country-code-tld@sld.uk", + "country-code-tld@sld.rw", + "local@sld.newTLD", + "the-total-length@of-an-entire-address.cannot-be-longer-than-two-hundred-and-fifty-four-characters.and-this-address-is-254-characters-exactly.so-it-should-be-valid.and-im-going-to-add-some-more-words-here.to-increase-the-lenght-blah-blah-blah-blah-bla.org", + "the-character-limit@for-each-part.of-the-domain.is-sixty-three-characters.this-is-exactly-sixty-three-characters-so-it-is-valid-blah-blah.com", + "local@sub.domains.com" + ], + ipAddresses : [ "127.0.0.1", "24.48.64.2", "192.168.1.1", "209.68.44.3", "2.2.2.2" ] +} + +exports.invalid = { + fullName : null, + age : -1, + state : 47, + city : false, + zip : [null], + married : "yes", + dozen : 50, + dozenOrBakersDozen : "over 9000", + favoriteEvenNumber : 15, + topThreeFavoriteColors : [ "red", 5 ], + favoriteSingleDigitWholeNumbers : [ 78, 2, 999 ], + favoriteFiveLetterWord : "codernaut", + emailAddresses : [], + ipAddresses : [ "999.0.099.1", "294.48.64.2346", false, "2221409.64214128.42414.235233", "124124.12412412" ] +} + +exports.schema = { // from cosmic thingy + name : "test", + type : "object", + additionalProperties : false, + required : ["fullName", "age", "zip", "married", "dozen", "dozenOrBakersDozen", "favoriteEvenNumber", "topThreeFavoriteColors", "favoriteSingleDigitWholeNumbers", "favoriteFiveLetterWord", "emailAddresses", "ipAddresses"], + properties : + { + fullName : { type : "string" }, + age : { type : "integer", minimum : 0 }, + optionalItem : { type : "string" }, + state : { type : "string" }, + city : { type : "string" }, + zip : { type : "integer", minimum : 0, maximum : 99999 }, + married : { type : "boolean" }, + dozen : { type : "integer", minimum : 12, maximum : 12 }, + dozenOrBakersDozen : { type : "integer", minimum : 12, maximum : 13 }, + favoriteEvenNumber : { type : "integer", multipleOf : 2 }, + topThreeFavoriteColors : { type : "array", minItems : 3, maxItems : 3, uniqueItems : true, items : { type : "string" }}, + favoriteSingleDigitWholeNumbers : { type : "array", minItems : 1, maxItems : 10, uniqueItems : true, items : { type : "integer", minimum : 0, maximum : 9 }}, + favoriteFiveLetterWord : { type : "string", minLength : 5, maxLength : 5 }, + emailAddresses : { type : "array", minItems : 1, uniqueItems : true, items : { type : "string", format : "email" }}, + ipAddresses : { type : "array", uniqueItems : true, items : { type : "string", format : "ipv4" }}, + } + } \ No newline at end of file diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/additionalItems.json b/node_modules/is-my-json-valid/test/json-schema-draft4/additionalItems.json new file mode 100644 index 0000000..521745c --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/additionalItems.json @@ -0,0 +1,82 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [{}], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ null, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ null, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "items is schema, no additionalItems", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "no additional items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/additionalProperties.json b/node_modules/is-my-json-valid/test/json-schema-draft4/additionalProperties.json new file mode 100644 index 0000000..40831f9 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/additionalProperties.json @@ -0,0 +1,88 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": + "additionalProperties allows a schema which should validate", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/allOf.json b/node_modules/is-my-json-valid/test/json-schema-draft4/allOf.json new file mode 100644 index 0000000..bbb5f89 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/allOf.json @@ -0,0 +1,112 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/anyOf.json b/node_modules/is-my-json-valid/test/json-schema-draft4/anyOf.json new file mode 100644 index 0000000..a58714a --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/anyOf.json @@ -0,0 +1,68 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/bignum.json b/node_modules/is-my-json-valid/test/json-schema-draft4/bignum.json new file mode 100644 index 0000000..ccc7c17 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/bignum.json @@ -0,0 +1,107 @@ +[ + { + "description": "integer", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": {"type": "number"}, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "integer", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": {"type": "number"}, + "tests": [ + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": {"type": "string"}, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "integer comparison", + "schema": {"maximum": 18446744073709551615}, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "maximum": 972783798187987123879878123.18878137, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "integer comparison", + "schema": {"minimum": -18446744073709551615}, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "minimum": -972783798187987123879878123.18878137, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/default.json b/node_modules/is-my-json-valid/test/json-schema-draft4/default.json new file mode 100644 index 0000000..1762977 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/default.json @@ -0,0 +1,49 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/definitions.json b/node_modules/is-my-json-valid/test/json-schema-draft4/definitions.json new file mode 100644 index 0000000..cf935a3 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/definitions.json @@ -0,0 +1,32 @@ +[ + { + "description": "valid definition", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "valid definition schema", + "data": { + "definitions": { + "foo": {"type": "integer"} + } + }, + "valid": true + } + ] + }, + { + "description": "invalid definition", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "invalid definition schema", + "data": { + "definitions": { + "foo": {"type": 1} + } + }, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/dependencies.json b/node_modules/is-my-json-valid/test/json-schema-draft4/dependencies.json new file mode 100644 index 0000000..7b9b16a --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/dependencies.json @@ -0,0 +1,113 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": ["foo"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/enum.json b/node_modules/is-my-json-valid/test/json-schema-draft4/enum.json new file mode 100644 index 0000000..f124436 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/enum.json @@ -0,0 +1,72 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/format.json b/node_modules/is-my-json-valid/test/json-schema-draft4/format.json new file mode 100644 index 0000000..53c5d25 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/format.json @@ -0,0 +1,143 @@ +[ + { + "description": "validation of date-time strings", + "schema": {"format": "date-time"}, + "tests": [ + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + } + ] + }, + { + "description": "validation of URIs", + "schema": {"format": "uri"}, + "tests": [ + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "validation of e-mail addresses", + "schema": {"format": "email"}, + "tests": [ + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + }, + { + "description": "validation of IP addresses", + "schema": {"format": "ipv4"}, + "tests": [ + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + } + ] + }, + { + "description": "validation of IPv6 addresses", + "schema": {"format": "ipv6"}, + "tests": [ + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + } + ] + }, + { + "description": "validation of host names", + "schema": {"format": "hostname"}, + "tests": [ + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/items.json b/node_modules/is-my-json-valid/test/json-schema-draft4/items.json new file mode 100644 index 0000000..f5e18a1 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/items.json @@ -0,0 +1,46 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/maxItems.json b/node_modules/is-my-json-valid/test/json-schema-draft4/maxItems.json new file mode 100644 index 0000000..3b53a6b --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/maxItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/maxLength.json b/node_modules/is-my-json-valid/test/json-schema-draft4/maxLength.json new file mode 100644 index 0000000..48eb129 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/maxLength.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/maxProperties.json b/node_modules/is-my-json-valid/test/json-schema-draft4/maxProperties.json new file mode 100644 index 0000000..d282446 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/maxProperties.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/maximum.json b/node_modules/is-my-json-valid/test/json-schema-draft4/maximum.json new file mode 100644 index 0000000..86c7b89 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/maximum.json @@ -0,0 +1,42 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMaximum validation", + "schema": { + "maximum": 3.0, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "below the maximum is still valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/minItems.json b/node_modules/is-my-json-valid/test/json-schema-draft4/minItems.json new file mode 100644 index 0000000..ed51188 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/minItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/minLength.json b/node_modules/is-my-json-valid/test/json-schema-draft4/minLength.json new file mode 100644 index 0000000..e9c14b1 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/minLength.json @@ -0,0 +1,28 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/minProperties.json b/node_modules/is-my-json-valid/test/json-schema-draft4/minProperties.json new file mode 100644 index 0000000..a72c7d2 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/minProperties.json @@ -0,0 +1,28 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "", + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/minimum.json b/node_modules/is-my-json-valid/test/json-schema-draft4/minimum.json new file mode 100644 index 0000000..d5bf000 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/minimum.json @@ -0,0 +1,42 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMinimum validation", + "schema": { + "minimum": 1.1, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "above the minimum is still valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json b/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json new file mode 100644 index 0000000..c13b267 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json @@ -0,0 +1,96 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "by decimal number where floating point precision is wrong", + "schema": {"multipleOf": 0.01}, + "tests": [ + { + "description": "Number 2 is multiple of 0.01", + "data": 2, + "valid": true + }, + { + "description": "Number 2.1 is multiple of 0.01", + "data": 2.1, + "valid": true + }, + { + "description": "Number 2.2 is multiple of 0.01", + "data": 2.2, + "valid": true + }, + { + "description": "Number 2.3 is multiple of 0.01", + "data": 2.3, + "valid": true + }, + { + "description": "Number 2.4 is multiple of 0.01", + "data": 2.4, + "valid": true + }, + { + "description": "Number 1.211 is not multiple of 0.01", + "data": 1.211, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/not.json b/node_modules/is-my-json-valid/test/json-schema-draft4/not.json new file mode 100644 index 0000000..cbb7f46 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/not.json @@ -0,0 +1,96 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + } + +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/nullAndFormat.json b/node_modules/is-my-json-valid/test/json-schema-draft4/nullAndFormat.json new file mode 100644 index 0000000..d7fce9f --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/nullAndFormat.json @@ -0,0 +1,18 @@ +[ + { + "description": "validation of null and format", + "schema": {"type": ["null", "string"], "format": "date-time"}, + "tests": [ + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "allow null", + "data": null, + "valid": true + } + ] + } +] \ No newline at end of file diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/nullAndObject.json b/node_modules/is-my-json-valid/test/json-schema-draft4/nullAndObject.json new file mode 100644 index 0000000..c65c02c --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/nullAndObject.json @@ -0,0 +1,18 @@ +[ + { + "description": "multiple types of null and object containing properties", + "schema": { + "type": ["null", "object"], + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/oneOf.json b/node_modules/is-my-json-valid/test/json-schema-draft4/oneOf.json new file mode 100644 index 0000000..1eaa4e4 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/oneOf.json @@ -0,0 +1,68 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/pattern.json b/node_modules/is-my-json-valid/test/json-schema-draft4/pattern.json new file mode 100644 index 0000000..befc4b5 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/pattern.json @@ -0,0 +1,23 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores non-strings", + "data": true, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/patternProperties.json b/node_modules/is-my-json-valid/test/json-schema-draft4/patternProperties.json new file mode 100644 index 0000000..18586e5 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/patternProperties.json @@ -0,0 +1,110 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/properties.json b/node_modules/is-my-json-valid/test/json-schema-draft4/properties.json new file mode 100644 index 0000000..cd1644d --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/properties.json @@ -0,0 +1,92 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores non-objects", + "data": [], + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/ref.json b/node_modules/is-my-json-valid/test/json-schema-draft4/ref.json new file mode 100644 index 0000000..d8214bc --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/ref.json @@ -0,0 +1,128 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "tilda~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"}, + "properties": { + "tilda": {"$ref": "#/tilda~0field"}, + "slash": {"$ref": "#/slash~1field"}, + "percent": {"$ref": "#/percent%25field"} + } + }, + "tests": [ + { + "description": "slash", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilda", + "data": {"tilda": "aoeu"}, + "valid": false + }, + { + "description": "percent", + "data": {"percent": "aoeu"}, + "valid": false + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "$ref": "#/definitions/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/refRemote.json b/node_modules/is-my-json-valid/test/json-schema-draft4/refRemote.json new file mode 100644 index 0000000..4ca8047 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/refRemote.json @@ -0,0 +1,74 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "change resolution scope", + "schema": { + "id": "http://localhost:1234/", + "items": { + "id": "folder/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "changed scope ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "changed scope ref invalid", + "data": [["a"]], + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/required.json b/node_modules/is-my-json-valid/test/json-schema-draft4/required.json new file mode 100644 index 0000000..612f73f --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/required.json @@ -0,0 +1,39 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/type.json b/node_modules/is-my-json-valid/test/json-schema-draft4/type.json new file mode 100644 index 0000000..257f051 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/type.json @@ -0,0 +1,330 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is not an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a boolean", + "data": true, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "a boolean is not null", + "data": true, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema-draft4/uniqueItems.json b/node_modules/is-my-json-valid/test/json-schema-draft4/uniqueItems.json new file mode 100644 index 0000000..c1f4ab9 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema-draft4/uniqueItems.json @@ -0,0 +1,79 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + } + ] + } +] diff --git a/node_modules/is-my-json-valid/test/json-schema.js b/node_modules/is-my-json-valid/test/json-schema.js new file mode 100644 index 0000000..e68a263 --- /dev/null +++ b/node_modules/is-my-json-valid/test/json-schema.js @@ -0,0 +1,23 @@ +var tape = require('tape') +var fs = require('fs') +var validator = require('../') + +var files = fs.readdirSync(__dirname+'/json-schema-draft4') + .map(function(file) { + if (file === 'definitions.json') return null + if (file === 'refRemote.json') return null + return require('./json-schema-draft4/'+file) + }) + .filter(Boolean) + +files.forEach(function(file) { + file.forEach(function(f) { + tape('json-schema-test-suite '+f.description, function(t) { + var validate = validator(f.schema) + f.tests.forEach(function(test) { + t.same(validate(test.data), test.valid, test.description) + }) + t.end() + }) + }) +}) diff --git a/node_modules/is-my-json-valid/test/misc.js b/node_modules/is-my-json-valid/test/misc.js new file mode 100644 index 0000000..4ea36d5 --- /dev/null +++ b/node_modules/is-my-json-valid/test/misc.js @@ -0,0 +1,471 @@ +var tape = require('tape') +var cosmic = require('./fixtures/cosmic') +var validator = require('../') +var validatorRequire = require('../require') + +tape('simple', function(t) { + var schema = { + required: true, + type: 'object', + properties: { + hello: {type:'string', required:true} + } + } + + var validate = validator(schema) + + t.ok(validate({hello: 'world'}), 'should be valid') + t.notOk(validate(), 'should be invalid') + t.notOk(validate({}), 'should be invalid') + t.end() +}) + +tape('data is undefined', function (t) { + var validate = validator({type: 'string'}) + + t.notOk(validate(null)) + t.notOk(validate(undefined)) + t.end() +}) + +tape('advanced', function(t) { + var validate = validator(cosmic.schema) + + t.ok(validate(cosmic.valid), 'should be valid') + t.notOk(validate(cosmic.invalid), 'should be invalid') + t.end() +}) + +tape('greedy/false', function(t) { + var validate = validator({ + type: 'object', + properties: { + x: { + type: 'number' + } + }, + required: ['x', 'y'] + }); + t.notOk(validate({}), 'should be invalid') + t.strictEqual(validate.errors.length, 2); + t.strictEqual(validate.errors[0].field, 'data.x') + t.strictEqual(validate.errors[0].message, 'is required') + t.strictEqual(validate.errors[1].field, 'data.y') + t.strictEqual(validate.errors[1].message, 'is required') + t.notOk(validate({x: 'string'}), 'should be invalid') + t.strictEqual(validate.errors.length, 1); + t.strictEqual(validate.errors[0].field, 'data.y') + t.strictEqual(validate.errors[0].message, 'is required') + t.notOk(validate({x: 'string', y: 'value'}), 'should be invalid') + t.strictEqual(validate.errors.length, 1); + t.strictEqual(validate.errors[0].field, 'data.x') + t.strictEqual(validate.errors[0].message, 'is the wrong type') + t.end(); +}); + +tape('greedy/true', function(t) { + var validate = validator({ + type: 'object', + properties: { + x: { + type: 'number' + } + }, + required: ['x', 'y'] + }, { + greedy: true + }); + t.notOk(validate({}), 'should be invalid') + t.strictEqual(validate.errors.length, 2); + t.strictEqual(validate.errors[0].field, 'data.x') + t.strictEqual(validate.errors[0].message, 'is required') + t.strictEqual(validate.errors[1].field, 'data.y') + t.strictEqual(validate.errors[1].message, 'is required') + t.notOk(validate({x: 'string'}), 'should be invalid') + t.strictEqual(validate.errors.length, 2); + t.strictEqual(validate.errors[0].field, 'data.y') + t.strictEqual(validate.errors[0].message, 'is required') + t.strictEqual(validate.errors[1].field, 'data.x') + t.strictEqual(validate.errors[1].message, 'is the wrong type') + t.notOk(validate({x: 'string', y: 'value'}), 'should be invalid') + t.strictEqual(validate.errors.length, 1); + t.strictEqual(validate.errors[0].field, 'data.x') + t.strictEqual(validate.errors[0].message, 'is the wrong type') + t.ok(validate({x: 1, y: 'value'}), 'should be invalid') + t.end(); +}); + +tape('additional props', function(t) { + var validate = validator({ + type: 'object', + additionalProperties: false + }, { + verbose: true + }) + + t.ok(validate({})) + t.notOk(validate({foo:'bar'})) + t.ok(validate.errors[0].value === 'data.foo', 'should output the property not allowed in verbose mode') + t.strictEqual(validate.errors[0].type, 'object', 'error object should contain the type') + t.end() +}) + +tape('array', function(t) { + var validate = validator({ + type: 'array', + required: true, + items: { + type: 'string' + } + }) + + t.notOk(validate({}), 'wrong type') + t.notOk(validate(), 'is required') + t.ok(validate(['test'])) + t.end() +}) + +tape('nested array', function(t) { + var validate = validator({ + type: 'object', + properties: { + list: { + type: 'array', + required: true, + items: { + type: 'string' + } + } + } + }) + + t.notOk(validate({}), 'is required') + t.ok(validate({list:['test']})) + t.notOk(validate({list:[1]})) + t.end() +}) + +tape('enum', function(t) { + var validate = validator({ + type: 'object', + properties: { + foo: { + type: 'number', + required: true, + enum: [42] + } + } + }) + + t.notOk(validate({}), 'is required') + t.ok(validate({foo:42})) + t.notOk(validate({foo:43})) + t.end() +}) + +tape('minimum/maximum', function(t) { + var validate = validator({ + type: 'object', + properties: { + foo: { + type: 'number', + minimum: 0, + maximum: 0 + } + } + }) + + t.notOk(validate({foo:-42})) + t.ok(validate({foo:0})) + t.notOk(validate({foo:42})) + t.end() +}) + +tape('exclusiveMinimum/exclusiveMaximum', function(t) { + var validate = validator({ + type: 'object', + properties: { + foo: { + type: 'number', + minimum: 10, + maximum: 20, + exclusiveMinimum: true, + exclusiveMaximum: true + } + } + }) + + t.notOk(validate({foo:10})) + t.ok(validate({foo:11})) + t.notOk(validate({foo:20})) + t.ok(validate({foo:19})) + t.end() +}) + +tape('minimum/maximum number type', function(t) { + var validate = validator({ + type: ['integer', 'null'], + minimum: 1, + maximum: 100 + }) + + t.notOk(validate(-1)) + t.notOk(validate(0)) + t.ok(validate(null)) + t.ok(validate(1)) + t.ok(validate(100)) + t.notOk(validate(101)) + t.end() +}) + +tape('custom format', function(t) { + var validate = validator({ + type: 'object', + properties: { + foo: { + type: 'string', + format: 'as' + } + } + }, {formats: {as:/^a+$/}}) + + t.notOk(validate({foo:''}), 'not as') + t.notOk(validate({foo:'b'}), 'not as') + t.notOk(validate({foo:'aaab'}), 'not as') + t.ok(validate({foo:'a'}), 'as') + t.ok(validate({foo:'aaaaaa'}), 'as') + t.end() +}) + +tape('custom format function', function(t) { + var validate = validator({ + type: 'object', + properties: { + foo: { + type: 'string', + format: 'as' + } + } + }, {formats: {as:function(s) { return /^a+$/.test(s) } }}) + + t.notOk(validate({foo:''}), 'not as') + t.notOk(validate({foo:'b'}), 'not as') + t.notOk(validate({foo:'aaab'}), 'not as') + t.ok(validate({foo:'a'}), 'as') + t.ok(validate({foo:'aaaaaa'}), 'as') + t.end() +}) + +tape('do not mutate schema', function(t) { + var sch = { + items: [ + {} + ], + additionalItems: { + type: 'integer' + } + } + + var copy = JSON.parse(JSON.stringify(sch)) + + validator(sch) + + t.same(sch, copy, 'did not mutate') + t.end() +}) + +tape('#toJSON()', function(t) { + var schema = { + required: true, + type: 'object', + properties: { + hello: {type:'string', required:true} + } + } + + var validate = validator(schema) + + t.deepEqual(validate.toJSON(), schema, 'should return original schema') + t.end() +}) + +tape('external schemas', function(t) { + var ext = {type: 'string'} + var schema = { + required: true, + $ref: '#ext' + } + + var validate = validator(schema, {schemas: {ext:ext}}) + + t.ok(validate('hello string'), 'is a string') + t.notOk(validate(42), 'not a string') + t.end() +}) + +tape('external schema URIs', function(t) { + var ext = {type: 'string'} + var schema = { + required: true, + $ref: 'http://example.com/schemas/schemaURIs' + } + + var opts = {schemas:{}}; + opts.schemas['http://example.com/schemas/schemaURIs'] = ext; + var validate = validator(schema, opts) + + t.ok(validate('hello string'), 'is a string') + t.notOk(validate(42), 'not a string') + t.end() +}) + +tape('top-level external schema', function(t) { + var defs = { + "string": { + type: "string" + }, + "sex": { + type: "string", + enum: ["male", "female", "other"] + } + } + var schema = { + type: "object", + properties: { + "name": { $ref: "definitions.json#/string" }, + "sex": { $ref: "definitions.json#/sex" } + }, + required: ["name", "sex"] + } + + var validate = validator(schema, { + schemas: { + "definitions.json": defs + } + }) + t.ok(validate({name:"alice", sex:"female"}), 'is an object') + t.notOk(validate({name:"alice", sex: "bob"}), 'recognizes external schema') + t.notOk(validate({name:2, sex: "female"}), 'recognizes external schema') + t.end() +}) + +tape('nested required array decl', function(t) { + var schema = { + properties: { + x: { + type: 'object', + properties: { + y: { + type: 'object', + properties: { + z: { + type: 'string' + } + }, + required: ['z'] + } + } + } + }, + required: ['x'] + } + + var validate = validator(schema) + + t.ok(validate({x: {}}), 'should be valid') + t.notOk(validate({}), 'should not be valid') + t.strictEqual(validate.errors[0].field, 'data.x', 'should output the missing field') + t.end() +}) + +tape('verbose mode', function(t) { + var schema = { + required: true, + type: 'object', + properties: { + hello: { + required: true, + type: 'string' + } + } + }; + + var validate = validator(schema, {verbose: true}) + + t.ok(validate({hello: 'string'}), 'should be valid') + t.notOk(validate({hello: 100}), 'should not be valid') + t.strictEqual(validate.errors[0].value, 100, 'error object should contain the invalid value') + t.strictEqual(validate.errors[0].type, 'string', 'error object should contain the type') + t.end() +}) + +tape('additional props in verbose mode', function(t) { + var schema = { + type: 'object', + required: true, + additionalProperties: false, + properties: { + foo: { + type: 'string' + }, + 'hello world': { + type: 'object', + required: true, + additionalProperties: false, + properties: { + foo: { + type: 'string' + } + } + } + } + }; + + var validate = validator(schema, {verbose: true}) + + validate({'hello world': {bar: 'string'}}); + + t.strictEqual(validate.errors[0].value, 'data["hello world"].bar', 'should output the path to the additional prop in the error') + t.end() +}) + +tape('Date.now() is an integer', function(t) { + var schema = {type: 'integer'} + var validate = validator(schema) + + t.ok(validate(Date.now()), 'is integer') + t.end() +}) + +tape('field shows item index in arrays', function(t) { + var schema = { + type: 'array', + items: { + type: 'array', + items: { + properties: { + foo: { + type: 'string', + required: true + } + } + } + } + } + + var validate = validator(schema) + + validate([ + [ + { foo: 'test' }, + { foo: 'test' } + ], + [ + { foo: 'test' }, + { baz: 'test' } + ] + ]) + + t.strictEqual(validate.errors[0].field, 'data.1.1.foo', 'should output the field with specific index of failing item in the error') + t.end() +}) diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/is-number/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md new file mode 100644 index 0000000..8395f91 --- /dev/null +++ b/node_modules/is-number/README.md @@ -0,0 +1,103 @@ +# is-number [![NPM version](https://badge.fury.io/js/is-number.svg)](http://badge.fury.io/js/is-number) [![Build Status](https://travis-ci.org/jonschlinkert/is-number.svg)](https://travis-ci.org/jonschlinkert/is-number) + +> Returns true if the value is a number. comprehensive tests. + +To understand some of the rationale behind the decisions made in this library (and to learn about some oddities of number evaluation in JavaScript), [see this gist](https://gist.github.com/jonschlinkert/e30c70c713da325d0e81). + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i is-number --save +``` + +## Usage + +```js +var isNumber = require('is-number'); +``` + +### true + +See the [tests](./test.js) for more examples. + +```js +isNumber(5e3) //=> 'true' +isNumber(0xff) //=> 'true' +isNumber(-1.1) //=> 'true' +isNumber(0) //=> 'true' +isNumber(1) //=> 'true' +isNumber(1.1) //=> 'true' +isNumber(10) //=> 'true' +isNumber(10.10) //=> 'true' +isNumber(100) //=> 'true' +isNumber('-1.1') //=> 'true' +isNumber('0') //=> 'true' +isNumber('012') //=> 'true' +isNumber('0xff') //=> 'true' +isNumber('1') //=> 'true' +isNumber('1.1') //=> 'true' +isNumber('10') //=> 'true' +isNumber('10.10') //=> 'true' +isNumber('100') //=> 'true' +isNumber('5e3') //=> 'true' +isNumber(parseInt('012')) //=> 'true' +isNumber(parseFloat('012')) //=> 'true' +``` + +### False + +See the [tests](./test.js) for more examples. + +```js +isNumber('foo') //=> 'false' +isNumber([1]) //=> 'false' +isNumber([]) //=> 'false' +isNumber(function () {}) //=> 'false' +isNumber(Infinity) //=> 'false' +isNumber(NaN) //=> 'false' +isNumber(new Array('abc')) //=> 'false' +isNumber(new Array(2)) //=> 'false' +isNumber(new Buffer('abc')) //=> 'false' +isNumber(null) //=> 'false' +isNumber(undefined) //=> 'false' +isNumber({abc: 'abc'}) //=> 'false' +``` + +## Other projects + +* [even](https://www.npmjs.com/package/even): Get the even numbered items from an array. | [homepage](https://github.com/jonschlinkert/even) +* [is-even](https://www.npmjs.com/package/is-even): Return true if the given number is even. | [homepage](https://github.com/jonschlinkert/is-even) +* [is-odd](https://www.npmjs.com/package/is-odd): Returns true if the given number is odd. | [homepage](https://github.com/jonschlinkert/is-odd) +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive) +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of) +* [odd](https://www.npmjs.com/package/odd): Get the odd numbered items from an array. | [homepage](https://github.com/jonschlinkert/odd) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-number/issues/new). + +## Run tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on November 22, 2015._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js new file mode 100644 index 0000000..96ec66d --- /dev/null +++ b/node_modules/is-number/index.js @@ -0,0 +1,19 @@ +/*! + * is-number + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var typeOf = require('kind-of'); + +module.exports = function isNumber(num) { + var type = typeOf(num); + if (type !== 'number' && type !== 'string') { + return false; + } + var n = +num; + return (n - n + 1) >= 0 && num !== ''; +}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json new file mode 100644 index 0000000..8e30b13 --- /dev/null +++ b/node_modules/is-number/package.json @@ -0,0 +1,59 @@ +{ + "name": "is-number", + "description": "Returns true if the value is a number. comprehensive tests.", + "version": "2.1.0", + "homepage": "https://github.com/jonschlinkert/is-number", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-number", + "bugs": { + "url": "https://github.com/jonschlinkert/is-number/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "kind-of": "^3.0.2" + }, + "devDependencies": { + "benchmarked": "^0.1.3", + "chalk": "^0.5.1", + "mocha": "*" + }, + "keywords": [ + "check", + "coerce", + "coercion", + "integer", + "is", + "is number", + "is-number", + "istype", + "kind of", + "math", + "number", + "test", + "type", + "typeof", + "value" + ], + "verb": { + "related": { + "list": [ + "kind-of", + "is-primitive", + "even", + "odd", + "is-even", + "is-odd" + ] + } + } +} diff --git a/node_modules/is-posix-bracket/LICENSE b/node_modules/is-posix-bracket/LICENSE new file mode 100644 index 0000000..1e49edf --- /dev/null +++ b/node_modules/is-posix-bracket/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-posix-bracket/README.md b/node_modules/is-posix-bracket/README.md new file mode 100644 index 0000000..1d06a4d --- /dev/null +++ b/node_modules/is-posix-bracket/README.md @@ -0,0 +1,88 @@ +# is-posix-bracket [![NPM version](https://img.shields.io/npm/v/is-posix-bracket.svg?style=flat)](https://www.npmjs.com/package/is-posix-bracket) [![NPM downloads](https://img.shields.io/npm/dm/is-posix-bracket.svg?style=flat)](https://npmjs.org/package/is-posix-bracket) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-posix-bracket.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-posix-bracket) + +> Returns true if the given string is a POSIX bracket expression (POSIX character class). + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install is-posix-bracket --save +``` + +## Usage + +```js +var isPosixBracket = require('is-posix-bracket'); + +isPosixBracket('[foo:]]'); +//=> false +isPosixBracket('[xdigit:]]'); +//=> false +isPosixBracket('[[:xdigit:]]'); +//=> true +isPosixBracket('[[:xdigit:]]'); +//=> true +isPosixBracket('[[:alpha:]123]'); +//=> true +isPosixBracket('[[:alpha:]123]'); +//=> true +isPosixBracket('[a-c[:digit:]x-z]'); +//=> true +isPosixBracket('[:al:]'); +//=> true +isPosixBracket('[abc[:punct:][0-9]'); +//=> true +``` + +## Related projects + +You might also be interested in these projects: + +* [braces](https://www.npmjs.com/package/braces): Fastest brace expansion for node.js, with the most complete support for the Bash 4.3 braces… [more](https://www.npmjs.com/package/braces) | [homepage](https://github.com/jonschlinkert/braces) +* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/jonschlinkert/expand-brackets) +* [is-extglob](https://www.npmjs.com/package/is-extglob): Returns true if a string has an extglob. | [homepage](https://github.com/jonschlinkert/is-extglob) +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern.… [more](https://www.npmjs.com/package/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob) +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. Just… [more](https://www.npmjs.com/package/micromatch) | [homepage](https://github.com/jonschlinkert/micromatch) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/is-posix-bracket/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-posix-bracket/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v, on April 05, 2016._ \ No newline at end of file diff --git a/node_modules/is-posix-bracket/index.js b/node_modules/is-posix-bracket/index.js new file mode 100644 index 0000000..7cf1de5 --- /dev/null +++ b/node_modules/is-posix-bracket/index.js @@ -0,0 +1,10 @@ +/*! + * is-posix-bracket + * + * Copyright (c) 2015-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isPosixBracket(str) { + return typeof str === 'string' && /\[([:.=+])(?:[^\[\]]|)+\1\]/.test(str); +}; diff --git a/node_modules/is-posix-bracket/package.json b/node_modules/is-posix-bracket/package.json new file mode 100644 index 0000000..f6374c6 --- /dev/null +++ b/node_modules/is-posix-bracket/package.json @@ -0,0 +1,64 @@ +{ + "name": "is-posix-bracket", + "description": "Returns true if the given string is a POSIX bracket expression (POSIX character class).", + "version": "0.1.1", + "homepage": "https://github.com/jonschlinkert/is-posix-bracket", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-posix-bracket", + "bugs": { + "url": "https://github.com/jonschlinkert/is-posix-bracket/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.7", + "mocha": "^2.4.5" + }, + "keywords": [ + "braces", + "brackets", + "character", + "character-class", + "class", + "expression", + "posix", + "regex", + "regexp", + "regular" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "braces", + "expand-brackets", + "is-extglob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-primitive/LICENSE b/node_modules/is-primitive/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/is-primitive/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-primitive/README.md b/node_modules/is-primitive/README.md new file mode 100644 index 0000000..e1c3064 --- /dev/null +++ b/node_modules/is-primitive/README.md @@ -0,0 +1,57 @@ +# is-primitive [![NPM version](https://badge.fury.io/js/is-primitive.svg)](http://badge.fury.io/js/is-primitive) [![Build Status](https://travis-ci.org/jonschlinkert/is-primitive.svg)](https://travis-ci.org/jonschlinkert/is-primitive) + +> Returns `true` if the value is a primitive. + +## Install with [npm](npmjs.org) + +```bash +npm i is-primitive --save +``` + +## Running tests +Install dev dependencies. + +```bash +npm i -d && npm test +``` + +## Usage + +```js +var isPrimitive = require('is-primitive'); +isPrimitive('abc'); +//=> true + +isPrimitive(42); +//=> true + +isPrimitive(false); +//=> true + +isPrimitive(true); +//=> true + +isPrimitive({}); +//=> false + +isPrimitive([]); +//=> false + +isPrimitive(function(){}); +//=> false +``` + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2014-2015 Jon Schlinkert +Released under the MIT license + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on March 16, 2015._ \ No newline at end of file diff --git a/node_modules/is-primitive/index.js b/node_modules/is-primitive/index.js new file mode 100644 index 0000000..55f11cf --- /dev/null +++ b/node_modules/is-primitive/index.js @@ -0,0 +1,13 @@ +/*! + * is-primitive + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +// see http://jsperf.com/testing-value-is-primitive/7 +module.exports = function isPrimitive(value) { + return value == null || (typeof value !== 'function' && typeof value !== 'object'); +}; diff --git a/node_modules/is-primitive/package.json b/node_modules/is-primitive/package.json new file mode 100644 index 0000000..b716f4e --- /dev/null +++ b/node_modules/is-primitive/package.json @@ -0,0 +1,46 @@ +{ + "name": "is-primitive", + "description": "Returns `true` if the value is a primitive. ", + "version": "2.0.0", + "homepage": "https://github.com/jonschlinkert/is-primitive", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/is-primitive.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/is-primitive/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/jonschlinkert/is-primitive/blob/master/LICENSE" + }, + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "mocha": "*", + "should": "^4.0.4" + }, + "keywords": [ + "boolean", + "check", + "number", + "primitive", + "string", + "symbol", + "type", + "typeof", + "util" + ] +} diff --git a/node_modules/is-property/.npmignore b/node_modules/is-property/.npmignore new file mode 100644 index 0000000..8ecfa25 --- /dev/null +++ b/node_modules/is-property/.npmignore @@ -0,0 +1,17 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +npm-debug.log +node_modules/* +*.DS_Store +test/* \ No newline at end of file diff --git a/node_modules/is-property/LICENSE b/node_modules/is-property/LICENSE new file mode 100644 index 0000000..8ce206a --- /dev/null +++ b/node_modules/is-property/LICENSE @@ -0,0 +1,22 @@ + +The MIT License (MIT) + +Copyright (c) 2013 Mikola Lysenko + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-property/README.md b/node_modules/is-property/README.md new file mode 100644 index 0000000..ef1d00b --- /dev/null +++ b/node_modules/is-property/README.md @@ -0,0 +1,28 @@ +is-property +=========== +Tests if a property of a JavaScript object can be accessed using the dot (.) notation or if it must be enclosed in brackets, (ie use x[" ... "]) + +Example +------- + +```javascript +var isProperty = require("is-property") + +console.log(isProperty("foo")) //Prints true +console.log(isProperty("0")) //Prints false +``` + +Install +------- + + npm install is-property + +### `require("is-property")(str)` +Checks if str is a property + +* `str` is a string which we will test if it is a property or not + +**Returns** true or false depending if str is a property + +## Credits +(c) 2013 Mikola Lysenko. MIT License \ No newline at end of file diff --git a/node_modules/is-property/is-property.js b/node_modules/is-property/is-property.js new file mode 100644 index 0000000..db58b47 --- /dev/null +++ b/node_modules/is-property/is-property.js @@ -0,0 +1,5 @@ +"use strict" +function isProperty(str) { + return /^[$A-Z\_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f0\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua697\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc][$A-Z\_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f0\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua697\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc0-9\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u08fe\u0900-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c01-\u0c03\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c82\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d02\u0d03\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d82\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19b0-\u19c0\u19c8\u19c9\u19d0-\u19d9\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf2-\u1cf4\u1dc0-\u1de6\u1dfc-\u1dff\u200c\u200d\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua880\ua881\ua8b4-\ua8c4\ua8d0-\ua8d9\ua8e0-\ua8f1\ua900-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f]*$/.test(str) +} +module.exports = isProperty \ No newline at end of file diff --git a/node_modules/is-property/package.json b/node_modules/is-property/package.json new file mode 100644 index 0000000..2105f7b --- /dev/null +++ b/node_modules/is-property/package.json @@ -0,0 +1,36 @@ +{ + "name": "is-property", + "version": "1.0.2", + "description": "Tests if a JSON property can be accessed using . syntax", + "main": "is-property.js", + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "tape": "~1.0.4" + }, + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/mikolalysenko/is-property.git" + }, + "keywords": [ + "is", + "property", + "json", + "dot", + "bracket", + ".", + "[]" + ], + "author": "Mikola Lysenko", + "license": "MIT", + "readmeFilename": "README.md", + "gitHead": "0a85ea5b6b1264ea1cdecc6e5cf186adbb3ffc50", + "bugs": { + "url": "https://github.com/mikolalysenko/is-property/issues" + } +} diff --git a/node_modules/is-typedarray/LICENSE.md b/node_modules/is-typedarray/LICENSE.md new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/is-typedarray/LICENSE.md @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-typedarray/README.md b/node_modules/is-typedarray/README.md new file mode 100644 index 0000000..2752863 --- /dev/null +++ b/node_modules/is-typedarray/README.md @@ -0,0 +1,16 @@ +# is-typedarray [![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Detect whether or not an object is a +[Typed Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays). + +## Usage + +[![NPM](https://nodei.co/npm/is-typedarray.png)](https://nodei.co/npm/is-typedarray/) + +### isTypedArray(array) + +Returns `true` when array is a Typed Array, and `false` when it is not. + +## License + +MIT. See [LICENSE.md](http://github.com/hughsk/is-typedarray/blob/master/LICENSE.md) for details. diff --git a/node_modules/is-typedarray/index.js b/node_modules/is-typedarray/index.js new file mode 100644 index 0000000..5859603 --- /dev/null +++ b/node_modules/is-typedarray/index.js @@ -0,0 +1,41 @@ +module.exports = isTypedArray +isTypedArray.strict = isStrictTypedArray +isTypedArray.loose = isLooseTypedArray + +var toString = Object.prototype.toString +var names = { + '[object Int8Array]': true + , '[object Int16Array]': true + , '[object Int32Array]': true + , '[object Uint8Array]': true + , '[object Uint8ClampedArray]': true + , '[object Uint16Array]': true + , '[object Uint32Array]': true + , '[object Float32Array]': true + , '[object Float64Array]': true +} + +function isTypedArray(arr) { + return ( + isStrictTypedArray(arr) + || isLooseTypedArray(arr) + ) +} + +function isStrictTypedArray(arr) { + return ( + arr instanceof Int8Array + || arr instanceof Int16Array + || arr instanceof Int32Array + || arr instanceof Uint8Array + || arr instanceof Uint8ClampedArray + || arr instanceof Uint16Array + || arr instanceof Uint32Array + || arr instanceof Float32Array + || arr instanceof Float64Array + ) +} + +function isLooseTypedArray(arr) { + return names[toString.call(arr)] +} diff --git a/node_modules/is-typedarray/package.json b/node_modules/is-typedarray/package.json new file mode 100644 index 0000000..37f7ae3 --- /dev/null +++ b/node_modules/is-typedarray/package.json @@ -0,0 +1,30 @@ +{ + "name": "is-typedarray", + "version": "1.0.0", + "description": "Detect whether or not an object is a Typed Array", + "main": "index.js", + "scripts": { + "test": "node test" + }, + "author": "Hugh Kennedy (http://hughsk.io/)", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "tape": "^2.13.1" + }, + "repository": { + "type": "git", + "url": "git://github.com/hughsk/is-typedarray.git" + }, + "keywords": [ + "typed", + "array", + "detect", + "is", + "util" + ], + "bugs": { + "url": "https://github.com/hughsk/is-typedarray/issues" + }, + "homepage": "https://github.com/hughsk/is-typedarray" +} diff --git a/node_modules/is-typedarray/test.js b/node_modules/is-typedarray/test.js new file mode 100644 index 0000000..b0c176f --- /dev/null +++ b/node_modules/is-typedarray/test.js @@ -0,0 +1,34 @@ +var test = require('tape') +var ista = require('./') + +test('strict', function(t) { + t.ok(ista.strict(new Int8Array), 'Int8Array') + t.ok(ista.strict(new Int16Array), 'Int16Array') + t.ok(ista.strict(new Int32Array), 'Int32Array') + t.ok(ista.strict(new Uint8Array), 'Uint8Array') + t.ok(ista.strict(new Uint16Array), 'Uint16Array') + t.ok(ista.strict(new Uint32Array), 'Uint32Array') + t.ok(ista.strict(new Float32Array), 'Float32Array') + t.ok(ista.strict(new Float64Array), 'Float64Array') + + t.ok(!ista.strict(new Array), 'Array') + t.ok(!ista.strict([]), '[]') + + t.end() +}) + +test('loose', function(t) { + t.ok(ista.loose(new Int8Array), 'Int8Array') + t.ok(ista.loose(new Int16Array), 'Int16Array') + t.ok(ista.loose(new Int32Array), 'Int32Array') + t.ok(ista.loose(new Uint8Array), 'Uint8Array') + t.ok(ista.loose(new Uint16Array), 'Uint16Array') + t.ok(ista.loose(new Uint32Array), 'Uint32Array') + t.ok(ista.loose(new Float32Array), 'Float32Array') + t.ok(ista.loose(new Float64Array), 'Float64Array') + + t.ok(!ista.loose(new Array), 'Array') + t.ok(!ista.loose([]), '[]') + + t.end() +}) diff --git a/node_modules/isarray/.npmignore b/node_modules/isarray/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/isarray/.travis.yml b/node_modules/isarray/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/isarray/Makefile b/node_modules/isarray/Makefile new file mode 100644 index 0000000..787d56e --- /dev/null +++ b/node_modules/isarray/Makefile @@ -0,0 +1,6 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test + diff --git a/node_modules/isarray/README.md b/node_modules/isarray/README.md new file mode 100644 index 0000000..16d2c59 --- /dev/null +++ b/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/isarray/component.json b/node_modules/isarray/component.json new file mode 100644 index 0000000..9e31b68 --- /dev/null +++ b/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/isarray/index.js b/node_modules/isarray/index.js new file mode 100644 index 0000000..a57f634 --- /dev/null +++ b/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/node_modules/isarray/package.json b/node_modules/isarray/package.json new file mode 100644 index 0000000..1a4317a --- /dev/null +++ b/node_modules/isarray/package.json @@ -0,0 +1,45 @@ +{ + "name": "isarray", + "description": "Array#isArray for older browsers", + "version": "1.0.0", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "homepage": "https://github.com/juliangruber/isarray", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~2.13.4" + }, + "keywords": [ + "browser", + "isarray", + "array" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "scripts": { + "test": "tape test.js" + } +} diff --git a/node_modules/isarray/test.js b/node_modules/isarray/test.js new file mode 100644 index 0000000..e0c3444 --- /dev/null +++ b/node_modules/isarray/test.js @@ -0,0 +1,20 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); + diff --git a/node_modules/isobject/LICENSE b/node_modules/isobject/LICENSE new file mode 100644 index 0000000..39245ac --- /dev/null +++ b/node_modules/isobject/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/isobject/README.md b/node_modules/isobject/README.md new file mode 100644 index 0000000..9dd897a --- /dev/null +++ b/node_modules/isobject/README.md @@ -0,0 +1,112 @@ +# isobject [![NPM version](https://img.shields.io/npm/v/isobject.svg?style=flat)](https://www.npmjs.com/package/isobject) [![NPM downloads](https://img.shields.io/npm/dm/isobject.svg?style=flat)](https://npmjs.org/package/isobject) [![Build Status](https://img.shields.io/travis/jonschlinkert/isobject.svg?style=flat)](https://travis-ci.org/jonschlinkert/isobject) + +Returns true if the value is an object and not an array or null. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install isobject --save +``` + +Use [is-plain-object](https://github.com/jonschlinkert/is-plain-object) if you want only objects that are created by the `Object` constructor. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install isobject +``` + +Install with [bower](http://bower.io/) + +```sh +$ bower install isobject +``` + +## Usage + +```js +var isObject = require('isobject'); +``` + +**True** + +All of the following return `true`: + +```js +isObject({}); +isObject(Object.create({})); +isObject(Object.create(Object.prototype)); +isObject(Object.create(null)); +isObject({}); +isObject(new Foo); +isObject(/foo/); +``` + +**False** + +All of the following return `false`: + +```js +isObject(); +isObject(function () {}); +isObject(1); +isObject([]); +isObject(undefined); +isObject(null); +``` + +## Related projects + +You might also be interested in these projects: + +[merge-deep](https://www.npmjs.com/package/merge-deep): Recursively merge values in a javascript object. | [homepage](https://github.com/jonschlinkert/merge-deep) + +* [extend-shallow](https://www.npmjs.com/package/extend-shallow): Extend an object with the properties of additional objects. node.js/javascript util. | [homepage](https://github.com/jonschlinkert/extend-shallow) +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object) +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/isobject/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/isobject/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v0.9.0, on April 25, 2016._ \ No newline at end of file diff --git a/node_modules/isobject/index.js b/node_modules/isobject/index.js new file mode 100644 index 0000000..aa0dce0 --- /dev/null +++ b/node_modules/isobject/index.js @@ -0,0 +1,14 @@ +/*! + * isobject + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isArray = require('isarray'); + +module.exports = function isObject(val) { + return val != null && typeof val === 'object' && isArray(val) === false; +}; diff --git a/node_modules/isobject/package.json b/node_modules/isobject/package.json new file mode 100644 index 0000000..954f411 --- /dev/null +++ b/node_modules/isobject/package.json @@ -0,0 +1,67 @@ +{ + "name": "isobject", + "description": "Returns true if the value is an object and not an array or null.", + "version": "2.1.0", + "homepage": "https://github.com/jonschlinkert/isobject", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/isobject", + "bugs": { + "url": "https://github.com/jonschlinkert/isobject/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "isarray": "1.0.0" + }, + "devDependencies": { + "gulp-format-md": "^0.1.9", + "mocha": "^2.4.5" + }, + "keywords": [ + "check", + "is", + "is-object", + "isobject", + "kind", + "kind-of", + "kindof", + "native", + "object", + "type", + "typeof", + "value" + ], + "verb": { + "related": { + "list": [ + "merge-deep", + "extend-shallow", + "is-plain-object", + "kind-of" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/isstream/.jshintrc b/node_modules/isstream/.jshintrc new file mode 100644 index 0000000..c8ef3ca --- /dev/null +++ b/node_modules/isstream/.jshintrc @@ -0,0 +1,59 @@ +{ + "predef": [ ] + , "bitwise": false + , "camelcase": false + , "curly": false + , "eqeqeq": false + , "forin": false + , "immed": false + , "latedef": false + , "noarg": true + , "noempty": true + , "nonew": true + , "plusplus": false + , "quotmark": true + , "regexp": false + , "undef": true + , "unused": true + , "strict": false + , "trailing": true + , "maxlen": 120 + , "asi": true + , "boss": true + , "debug": true + , "eqnull": true + , "esnext": true + , "evil": true + , "expr": true + , "funcscope": false + , "globalstrict": false + , "iterator": false + , "lastsemic": true + , "laxbreak": true + , "laxcomma": true + , "loopfunc": true + , "multistr": false + , "onecase": false + , "proto": false + , "regexdash": false + , "scripturl": true + , "smarttabs": false + , "shadow": false + , "sub": true + , "supernew": false + , "validthis": true + , "browser": true + , "couch": false + , "devel": false + , "dojo": false + , "mootools": false + , "node": true + , "nonstandard": true + , "prototypejs": false + , "rhino": false + , "worker": true + , "wsh": false + , "nomen": false + , "onevar": false + , "passfail": false +} \ No newline at end of file diff --git a/node_modules/isstream/.npmignore b/node_modules/isstream/.npmignore new file mode 100644 index 0000000..aa1ec1e --- /dev/null +++ b/node_modules/isstream/.npmignore @@ -0,0 +1 @@ +*.tgz diff --git a/node_modules/isstream/.travis.yml b/node_modules/isstream/.travis.yml new file mode 100644 index 0000000..1fec2ab --- /dev/null +++ b/node_modules/isstream/.travis.yml @@ -0,0 +1,12 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.11" +branches: + only: + - master +notifications: + email: + - rod@vagg.org +script: npm test diff --git a/node_modules/isstream/LICENSE.md b/node_modules/isstream/LICENSE.md new file mode 100644 index 0000000..43f7153 --- /dev/null +++ b/node_modules/isstream/LICENSE.md @@ -0,0 +1,11 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2015 Rod Vagg +--------------------------- + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/isstream/README.md b/node_modules/isstream/README.md new file mode 100644 index 0000000..06770e8 --- /dev/null +++ b/node_modules/isstream/README.md @@ -0,0 +1,66 @@ +# isStream + +[![Build Status](https://secure.travis-ci.org/rvagg/isstream.png)](http://travis-ci.org/rvagg/isstream) + +**Test if an object is a `Stream`** + +[![NPM](https://nodei.co/npm/isstream.svg)](https://nodei.co/npm/isstream/) + +The missing `Stream.isStream(obj)`: determine if an object is standard Node.js `Stream`. Works for Node-core `Stream` objects (for 0.8, 0.10, 0.11, and in theory, older and newer versions) and all versions of **[readable-stream](https://github.com/isaacs/readable-stream)**. + +## Usage: + +```js +var isStream = require('isstream') +var Stream = require('stream') + +isStream(new Stream()) // true + +isStream({}) // false + +isStream(new Stream.Readable()) // true +isStream(new Stream.Writable()) // true +isStream(new Stream.Duplex()) // true +isStream(new Stream.Transform()) // true +isStream(new Stream.PassThrough()) // true +``` + +## But wait! There's more! + +You can also test for `isReadable(obj)`, `isWritable(obj)` and `isDuplex(obj)` to test for implementations of Streams2 (and Streams3) base classes. + +```js +var isReadable = require('isstream').isReadable +var isWritable = require('isstream').isWritable +var isDuplex = require('isstream').isDuplex +var Stream = require('stream') + +isReadable(new Stream()) // false +isWritable(new Stream()) // false +isDuplex(new Stream()) // false + +isReadable(new Stream.Readable()) // true +isReadable(new Stream.Writable()) // false +isReadable(new Stream.Duplex()) // true +isReadable(new Stream.Transform()) // true +isReadable(new Stream.PassThrough()) // true + +isWritable(new Stream.Readable()) // false +isWritable(new Stream.Writable()) // true +isWritable(new Stream.Duplex()) // true +isWritable(new Stream.Transform()) // true +isWritable(new Stream.PassThrough()) // true + +isDuplex(new Stream.Readable()) // false +isDuplex(new Stream.Writable()) // false +isDuplex(new Stream.Duplex()) // true +isDuplex(new Stream.Transform()) // true +isDuplex(new Stream.PassThrough()) // true +``` + +*Reminder: when implementing your own streams, please [use **readable-stream** rather than core streams](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).* + + +## License + +**isStream** is Copyright (c) 2015 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licenced under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/node_modules/isstream/isstream.js b/node_modules/isstream/isstream.js new file mode 100644 index 0000000..a1d104a --- /dev/null +++ b/node_modules/isstream/isstream.js @@ -0,0 +1,27 @@ +var stream = require('stream') + + +function isStream (obj) { + return obj instanceof stream.Stream +} + + +function isReadable (obj) { + return isStream(obj) && typeof obj._read == 'function' && typeof obj._readableState == 'object' +} + + +function isWritable (obj) { + return isStream(obj) && typeof obj._write == 'function' && typeof obj._writableState == 'object' +} + + +function isDuplex (obj) { + return isReadable(obj) && isWritable(obj) +} + + +module.exports = isStream +module.exports.isReadable = isReadable +module.exports.isWritable = isWritable +module.exports.isDuplex = isDuplex diff --git a/node_modules/isstream/package.json b/node_modules/isstream/package.json new file mode 100644 index 0000000..9ee8bf8 --- /dev/null +++ b/node_modules/isstream/package.json @@ -0,0 +1,33 @@ +{ + "name": "isstream", + "version": "0.1.2", + "description": "Determine if an object is a Stream", + "main": "isstream.js", + "scripts": { + "test": "tar --xform 's/^package/readable-stream-1.0/' -zxf readable-stream-1.0.*.tgz && tar --xform 's/^package/readable-stream-1.1/' -zxf readable-stream-1.1.*.tgz && node test.js; rm -rf readable-stream-1.?/" + }, + "repository": { + "type": "git", + "url": "https://github.com/rvagg/isstream.git" + }, + "keywords": [ + "stream", + "type", + "streams", + "readable-stream", + "hippo" + ], + "devDependencies": { + "tape": "~2.12.3", + "core-util-is": "~1.0.0", + "isarray": "0.0.1", + "string_decoder": "~0.10.x", + "inherits": "~2.0.1" + }, + "author": "Rod Vagg ", + "license": "MIT", + "bugs": { + "url": "https://github.com/rvagg/isstream/issues" + }, + "homepage": "https://github.com/rvagg/isstream" +} diff --git a/node_modules/isstream/test.js b/node_modules/isstream/test.js new file mode 100644 index 0000000..8c950c5 --- /dev/null +++ b/node_modules/isstream/test.js @@ -0,0 +1,168 @@ +var tape = require('tape') + , EE = require('events').EventEmitter + , util = require('util') + + + , isStream = require('./') + , isReadable = require('./').isReadable + , isWritable = require('./').isWritable + , isDuplex = require('./').isDuplex + + , CoreStreams = require('stream') + , ReadableStream10 = require('./readable-stream-1.0/') + , ReadableStream11 = require('./readable-stream-1.1/') + + +function test (pass, type, stream) { + tape('isStream(' + type + ')', function (t) { + t.plan(1) + t.ok(pass === isStream(stream), type) + }) +} + + +function testReadable (pass, type, stream) { + tape('isReadable(' + type + ')', function (t) { + t.plan(1) + t.ok(pass === isReadable(stream), type) + }) +} + + +function testWritable (pass, type, stream) { + tape('isWritable(' + type + ')', function (t) { + t.plan(1) + t.ok(pass === isWritable(stream), type) + }) +} + + +function testDuplex (pass, type, stream) { + tape('isDuplex(' + type + ')', function (t) { + t.plan(1) + t.ok(pass === isDuplex(stream), type) + }) +} + + +[ undefined, null, '', true, false, 0, 1, 1.0, 'string', {}, function foo () {} ].forEach(function (o) { + test(false, 'non-stream / primitive: ' + (JSON.stringify(o) || (o && o.toString()) || o), o) +}) + + +test(false, 'fake stream obj', { pipe: function () {} }) + + +;(function () { + + // looks like a stream! + + function Stream () { + EE.call(this) + } + util.inherits(Stream, EE) + Stream.prototype.pipe = function () {} + Stream.Stream = Stream + + test(false, 'fake stream "new Stream()"', new Stream()) + +}()) + + +test(true, 'CoreStreams.Stream', new (CoreStreams.Stream)()) +test(true, 'CoreStreams.Readable', new (CoreStreams.Readable)()) +test(true, 'CoreStreams.Writable', new (CoreStreams.Writable)()) +test(true, 'CoreStreams.Duplex', new (CoreStreams.Duplex)()) +test(true, 'CoreStreams.Transform', new (CoreStreams.Transform)()) +test(true, 'CoreStreams.PassThrough', new (CoreStreams.PassThrough)()) + +test(true, 'ReadableStream10.Readable', new (ReadableStream10.Readable)()) +test(true, 'ReadableStream10.Writable', new (ReadableStream10.Writable)()) +test(true, 'ReadableStream10.Duplex', new (ReadableStream10.Duplex)()) +test(true, 'ReadableStream10.Transform', new (ReadableStream10.Transform)()) +test(true, 'ReadableStream10.PassThrough', new (ReadableStream10.PassThrough)()) + +test(true, 'ReadableStream11.Readable', new (ReadableStream11.Readable)()) +test(true, 'ReadableStream11.Writable', new (ReadableStream11.Writable)()) +test(true, 'ReadableStream11.Duplex', new (ReadableStream11.Duplex)()) +test(true, 'ReadableStream11.Transform', new (ReadableStream11.Transform)()) +test(true, 'ReadableStream11.PassThrough', new (ReadableStream11.PassThrough)()) + + +testReadable(false, 'CoreStreams.Stream', new (CoreStreams.Stream)()) +testReadable(true, 'CoreStreams.Readable', new (CoreStreams.Readable)()) +testReadable(false, 'CoreStreams.Writable', new (CoreStreams.Writable)()) +testReadable(true, 'CoreStreams.Duplex', new (CoreStreams.Duplex)()) +testReadable(true, 'CoreStreams.Transform', new (CoreStreams.Transform)()) +testReadable(true, 'CoreStreams.PassThrough', new (CoreStreams.PassThrough)()) + +testReadable(true, 'ReadableStream10.Readable', new (ReadableStream10.Readable)()) +testReadable(false, 'ReadableStream10.Writable', new (ReadableStream10.Writable)()) +testReadable(true, 'ReadableStream10.Duplex', new (ReadableStream10.Duplex)()) +testReadable(true, 'ReadableStream10.Transform', new (ReadableStream10.Transform)()) +testReadable(true, 'ReadableStream10.PassThrough', new (ReadableStream10.PassThrough)()) + +testReadable(true, 'ReadableStream11.Readable', new (ReadableStream11.Readable)()) +testReadable(false, 'ReadableStream11.Writable', new (ReadableStream11.Writable)()) +testReadable(true, 'ReadableStream11.Duplex', new (ReadableStream11.Duplex)()) +testReadable(true, 'ReadableStream11.Transform', new (ReadableStream11.Transform)()) +testReadable(true, 'ReadableStream11.PassThrough', new (ReadableStream11.PassThrough)()) + + +testWritable(false, 'CoreStreams.Stream', new (CoreStreams.Stream)()) +testWritable(false, 'CoreStreams.Readable', new (CoreStreams.Readable)()) +testWritable(true, 'CoreStreams.Writable', new (CoreStreams.Writable)()) +testWritable(true, 'CoreStreams.Duplex', new (CoreStreams.Duplex)()) +testWritable(true, 'CoreStreams.Transform', new (CoreStreams.Transform)()) +testWritable(true, 'CoreStreams.PassThrough', new (CoreStreams.PassThrough)()) + +testWritable(false, 'ReadableStream10.Readable', new (ReadableStream10.Readable)()) +testWritable(true, 'ReadableStream10.Writable', new (ReadableStream10.Writable)()) +testWritable(true, 'ReadableStream10.Duplex', new (ReadableStream10.Duplex)()) +testWritable(true, 'ReadableStream10.Transform', new (ReadableStream10.Transform)()) +testWritable(true, 'ReadableStream10.PassThrough', new (ReadableStream10.PassThrough)()) + +testWritable(false, 'ReadableStream11.Readable', new (ReadableStream11.Readable)()) +testWritable(true, 'ReadableStream11.Writable', new (ReadableStream11.Writable)()) +testWritable(true, 'ReadableStream11.Duplex', new (ReadableStream11.Duplex)()) +testWritable(true, 'ReadableStream11.Transform', new (ReadableStream11.Transform)()) +testWritable(true, 'ReadableStream11.PassThrough', new (ReadableStream11.PassThrough)()) + + +testDuplex(false, 'CoreStreams.Stream', new (CoreStreams.Stream)()) +testDuplex(false, 'CoreStreams.Readable', new (CoreStreams.Readable)()) +testDuplex(false, 'CoreStreams.Writable', new (CoreStreams.Writable)()) +testDuplex(true, 'CoreStreams.Duplex', new (CoreStreams.Duplex)()) +testDuplex(true, 'CoreStreams.Transform', new (CoreStreams.Transform)()) +testDuplex(true, 'CoreStreams.PassThrough', new (CoreStreams.PassThrough)()) + +testDuplex(false, 'ReadableStream10.Readable', new (ReadableStream10.Readable)()) +testDuplex(false, 'ReadableStream10.Writable', new (ReadableStream10.Writable)()) +testDuplex(true, 'ReadableStream10.Duplex', new (ReadableStream10.Duplex)()) +testDuplex(true, 'ReadableStream10.Transform', new (ReadableStream10.Transform)()) +testDuplex(true, 'ReadableStream10.PassThrough', new (ReadableStream10.PassThrough)()) + +testDuplex(false, 'ReadableStream11.Readable', new (ReadableStream11.Readable)()) +testDuplex(false, 'ReadableStream11.Writable', new (ReadableStream11.Writable)()) +testDuplex(true, 'ReadableStream11.Duplex', new (ReadableStream11.Duplex)()) +testDuplex(true, 'ReadableStream11.Transform', new (ReadableStream11.Transform)()) +testDuplex(true, 'ReadableStream11.PassThrough', new (ReadableStream11.PassThrough)()) + + +;[ CoreStreams, ReadableStream10, ReadableStream11 ].forEach(function (p) { + [ 'Stream', 'Readable', 'Writable', 'Duplex', 'Transform', 'PassThrough' ].forEach(function (k) { + if (!p[k]) + return + + function SubStream () { + p[k].call(this) + } + util.inherits(SubStream, p[k]) + + test(true, 'Stream subclass: ' + p.name + '.' + k, new SubStream()) + + }) +}) + + + diff --git a/node_modules/jodid25519/.npmignore b/node_modules/jodid25519/.npmignore new file mode 100644 index 0000000..877830c --- /dev/null +++ b/node_modules/jodid25519/.npmignore @@ -0,0 +1,11 @@ +# Editor, IDE and dev environment stuff +*~ +.project +.settings + +# Build files and directories +/coverage +/doc/api +/build/ +/test/ +/jodid25519-*.tgz diff --git a/node_modules/jodid25519/.travis.yml b/node_modules/jodid25519/.travis.yml new file mode 100644 index 0000000..92a990f --- /dev/null +++ b/node_modules/jodid25519/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - "0.10" + - "0.11" +branches: + only: + - master diff --git a/node_modules/jodid25519/AUTHORS.md b/node_modules/jodid25519/AUTHORS.md new file mode 100644 index 0000000..0c17097 --- /dev/null +++ b/node_modules/jodid25519/AUTHORS.md @@ -0,0 +1,3 @@ +* Michele Bini (original Curve25519 core code: curve25519.js) +* Ron Garret (original Ed25519 code: fast-djbec.js) +* Guy Kloss (package refactoring, unit testing) diff --git a/node_modules/jodid25519/LICENSE b/node_modules/jodid25519/LICENSE new file mode 100644 index 0000000..c722113 --- /dev/null +++ b/node_modules/jodid25519/LICENSE @@ -0,0 +1,23 @@ +The MIT License (MIT) + +Copyright (c) 2012 Ron Garret +Copyright (c) 2007, 2013, 2014 Michele Bini +Copyright (c) 2014 Mega Limited + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/jodid25519/README.md b/node_modules/jodid25519/README.md new file mode 100644 index 0000000..5335b2d --- /dev/null +++ b/node_modules/jodid25519/README.md @@ -0,0 +1,51 @@ +Jodid25519 [![Build Status](https://secure.travis-ci.org/meganz/jodid25519.png)](https://travis-ci.org/meganz/jodid25519) +=================================================================================================================================== + +Javascript implementation of the Curve25519 and Ed25519 elliptic cryptography functions by Daniel J. Bernstein. + +For the API, please consult the generated documentation under doc/ (you can run `make` to generate it). + +To run the tests do the following on the console from the project's root directory: + + $ npm install + $ make test + + +Contributors +------------ + +If you are one of the contributors and want to add yourself or change the information here, please do submit a pull request. Contributors appear in no particular order. + +### For the Curve25519 submodule + +* [Graydon Hoare](https://github.com/graydon): suggested clamping the private key by default for increased safety and uniformity with other implementations. +* [liliakai](https://github.com/liliakai): spotted an unused argument in some of the functions +* [RyanC](https://github.com/ryancdotorg): removed dependency of a function to the Javascript Math library +* [Guy Kloss](https://github.com/pohutukawa): performance improvements through bit-shift operations, performance and conformance testing, documentation, compatibility with the npm package ecosystem, and more +* [Michele Bini](https://github.com/rev22): originally wrote the Javascript implementation + + +Copyright and MIT licensing +--------------------------- + +* Copyright (c) 2012 Ron Garret +* Copyright (c) 2007, 2013, 2014 Michele Bini +* Copyright (c) 2014 Mega Limited + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is furnished +to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/jodid25519/almond.0 b/node_modules/jodid25519/almond.0 new file mode 100644 index 0000000..55ffcc4 --- /dev/null +++ b/node_modules/jodid25519/almond.0 @@ -0,0 +1,42 @@ +/* + * Curve 25519-based cryptography collection. + * + * EC Diffie-Hellman (ECDH) based on Curve25519 and digital signatures (EdDSA) + * based on Ed25519. + * + * Copyright (c) 2012 Ron Garret + * Copyright (c) 2007, 2013, 2014 Michele Bini + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * You should have received a copy of the license along with this program. + */ +// See https://github.com/jrburke/almond#exporting-a-public-api +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // Allow using this built library as an AMD module + // in another project. That other project will only + // see this AMD call, not the internal modules in + // the closure below. + define([], factory); + } else if (typeof module === 'object' && module.exports) { + // Allow using this built library as a CommonJS module + module.exports = factory(); + } else { + // Browser globals case. Just assign the + // result to a property on the global. + root.jodid25519 = factory(); + } +}(this, function () { + if (typeof module === 'object' && module.exports) { + // If we're running under CommonJS, our dependencies get confused and + // each clobber module.exports which leads to bad behaviour because + // almond does asynchronous loading. So just pretend we're in the + // browser globals case, and make them write to those values instead. + // TODO: ditch requirejs/almond and use browserify or something. + var __oldModule = module; + var __oldExports = exports; + var window = global; + module = undefined; + exports = undefined; + } diff --git a/node_modules/jodid25519/almond.1 b/node_modules/jodid25519/almond.1 new file mode 100644 index 0000000..cdb5e67 --- /dev/null +++ b/node_modules/jodid25519/almond.1 @@ -0,0 +1,13 @@ + if (typeof module === 'object' && module.exports) { + // Restore CommonJS exports once our dependencies have all finished + // loading. + module = __oldModule; + exports = __oldExports; + } + // The modules for your project will be inlined above + // this snippet. Ask almond to synchronously require the + // module value for 'main' here and return it as the + // value to use for the public API for the built file. + return require('jodid25519'); +})); +// See https://github.com/jrburke/almond#exporting-a-public-api diff --git a/node_modules/jodid25519/index.js b/node_modules/jodid25519/index.js new file mode 100644 index 0000000..8709839 --- /dev/null +++ b/node_modules/jodid25519/index.js @@ -0,0 +1,35 @@ +"use strict"; + +/* + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * Authors: Guy K. Kloss + * + * You should have received a copy of the license along with this program. + */ + +var dh = require('./lib/dh'); +var eddsa = require('./lib/eddsa'); +var curve255 = require('./lib/curve255'); +var utils = require('./lib/utils'); + + /** + * @exports jodid25519 + * Curve 25519-based cryptography collection. + * + * @description + * EC Diffie-Hellman (ECDH) based on Curve25519 and digital signatures + * (EdDSA) based on Ed25519. + */ + var ns = {}; + + /** Module version indicator as string (format: [major.minor.patch]). */ + ns.VERSION = '0.7.1'; + + ns.dh = dh; + ns.eddsa = eddsa; + ns.curve255 = curve255; + ns.utils = utils; + +module.exports = ns; diff --git a/node_modules/jodid25519/jsdoc.json b/node_modules/jodid25519/jsdoc.json new file mode 100644 index 0000000..21eba9b --- /dev/null +++ b/node_modules/jodid25519/jsdoc.json @@ -0,0 +1,19 @@ +{ + "templates": { + "applicationName": "jodid25519 Library", + "disqus": "", + "googleAnalytics": "", + "openGraph": { + "title": "jodid25519 Library", + "type": "website", + "image": "", + "site_name": "", + "url": "" + }, + "meta": { + "title": "jodid25519 Library", + "description": "", + "keyword": "" + } + } +} diff --git a/node_modules/jodid25519/lib/core.js b/node_modules/jodid25519/lib/core.js new file mode 100644 index 0000000..f78fd74 --- /dev/null +++ b/node_modules/jodid25519/lib/core.js @@ -0,0 +1,481 @@ +"use strict"; +/** + * @fileOverview + * Core operations on curve 25519 required for the higher level modules. + */ + +/* + * Copyright (c) 2007, 2013, 2014 Michele Bini + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * Authors: Guy K. Kloss, Michele Bini + * + * You should have received a copy of the license along with this program. + */ + +var crypto = require('crypto'); + + /** + * @exports jodid25519/core + * Core operations on curve 25519 required for the higher level modules. + * + * @description + * Core operations on curve 25519 required for the higher level modules. + * + *

+ * This core code is extracted from Michele Bini's curve255.js implementation, + * which is used as a base for Curve25519 ECDH and Ed25519 EdDSA operations. + *

+ */ + var ns = {}; + + function _setbit(n, c, v) { + var i = c >> 4; + var a = n[i]; + a = a + (1 << (c & 0xf)) * v; + n[i] = a; + } + + function _getbit(n, c) { + return (n[c >> 4] >> (c & 0xf)) & 1; + } + + function _ZERO() { + return [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + } + + function _ONE() { + return [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + } + + // Basepoint. + function _BASE() { + return [9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + } + + // return -1, 0, +1 when a is less than, equal, or greater than b + function _bigintcmp(a, b) { + // The following code is a bit tricky to avoid code branching + var c, abs_r, mask; + var r = 0; + for (c = 15; c >= 0; c--) { + var x = a[c]; + var y = b[c]; + r = r + (x - y) * (1 - r * r); + // http://graphics.stanford.edu/~seander/bithacks.html#IntegerAbs + // correct for [-294967295, 294967295] + mask = r >> 31; + abs_r = (r + mask) ^ mask; + // http://stackoverflow.com/questions/596467/how-do-i-convert-a-number-to-an-integer-in-javascript + // this rounds towards zero + r = ~~((r << 1) / (abs_r + 1)); + } + return r; + } + + function _bigintadd(a, b) { + var r = []; + var v; + r[0] = (v = a[0] + b[0]) & 0xffff; + r[1] = (v = (v >>> 16) + a[1] + b[1]) & 0xffff; + r[2] = (v = (v >>> 16) + a[2] + b[2]) & 0xffff; + r[3] = (v = (v >>> 16) + a[3] + b[3]) & 0xffff; + r[4] = (v = (v >>> 16) + a[4] + b[4]) & 0xffff; + r[5] = (v = (v >>> 16) + a[5] + b[5]) & 0xffff; + r[6] = (v = (v >>> 16) + a[6] + b[6]) & 0xffff; + r[7] = (v = (v >>> 16) + a[7] + b[7]) & 0xffff; + r[8] = (v = (v >>> 16) + a[8] + b[8]) & 0xffff; + r[9] = (v = (v >>> 16) + a[9] + b[9]) & 0xffff; + r[10] = (v = (v >>> 16) + a[10] + b[10]) & 0xffff; + r[11] = (v = (v >>> 16) + a[11] + b[11]) & 0xffff; + r[12] = (v = (v >>> 16) + a[12] + b[12]) & 0xffff; + r[13] = (v = (v >>> 16) + a[13] + b[13]) & 0xffff; + r[14] = (v = (v >>> 16) + a[14] + b[14]) & 0xffff; + r[15] = (v >>> 16) + a[15] + b[15]; + return r; + } + + function _bigintsub(a, b) { + var r = []; + var v; + r[0] = (v = 0x80000 + a[0] - b[0]) & 0xffff; + r[1] = (v = (v >>> 16) + 0x7fff8 + a[1] - b[1]) & 0xffff; + r[2] = (v = (v >>> 16) + 0x7fff8 + a[2] - b[2]) & 0xffff; + r[3] = (v = (v >>> 16) + 0x7fff8 + a[3] - b[3]) & 0xffff; + r[4] = (v = (v >>> 16) + 0x7fff8 + a[4] - b[4]) & 0xffff; + r[5] = (v = (v >>> 16) + 0x7fff8 + a[5] - b[5]) & 0xffff; + r[6] = (v = (v >>> 16) + 0x7fff8 + a[6] - b[6]) & 0xffff; + r[7] = (v = (v >>> 16) + 0x7fff8 + a[7] - b[7]) & 0xffff; + r[8] = (v = (v >>> 16) + 0x7fff8 + a[8] - b[8]) & 0xffff; + r[9] = (v = (v >>> 16) + 0x7fff8 + a[9] - b[9]) & 0xffff; + r[10] = (v = (v >>> 16) + 0x7fff8 + a[10] - b[10]) & 0xffff; + r[11] = (v = (v >>> 16) + 0x7fff8 + a[11] - b[11]) & 0xffff; + r[12] = (v = (v >>> 16) + 0x7fff8 + a[12] - b[12]) & 0xffff; + r[13] = (v = (v >>> 16) + 0x7fff8 + a[13] - b[13]) & 0xffff; + r[14] = (v = (v >>> 16) + 0x7fff8 + a[14] - b[14]) & 0xffff; + r[15] = (v >>> 16) - 8 + a[15] - b[15]; + return r; + } + + function _sqr8h(a7, a6, a5, a4, a3, a2, a1, a0) { + // 'division by 0x10000' can not be replaced by '>> 16' because + // more than 32 bits of precision are needed similarly + // 'multiplication by 2' cannot be replaced by '<< 1' + var r = []; + var v; + r[0] = (v = a0 * a0) & 0xffff; + r[1] = (v = (0 | (v / 0x10000)) + 2 * a0 * a1) & 0xffff; + r[2] = (v = (0 | (v / 0x10000)) + 2 * a0 * a2 + a1 * a1) & 0xffff; + r[3] = (v = (0 | (v / 0x10000)) + 2 * a0 * a3 + 2 * a1 * a2) & 0xffff; + r[4] = (v = (0 | (v / 0x10000)) + 2 * a0 * a4 + 2 * a1 * a3 + a2 + * a2) & 0xffff; + r[5] = (v = (0 | (v / 0x10000)) + 2 * a0 * a5 + 2 * a1 * a4 + 2 + * a2 * a3) & 0xffff; + r[6] = (v = (0 | (v / 0x10000)) + 2 * a0 * a6 + 2 * a1 * a5 + 2 + * a2 * a4 + a3 * a3) & 0xffff; + r[7] = (v = (0 | (v / 0x10000)) + 2 * a0 * a7 + 2 * a1 * a6 + 2 + * a2 * a5 + 2 * a3 * a4) & 0xffff; + r[8] = (v = (0 | (v / 0x10000)) + 2 * a1 * a7 + 2 * a2 * a6 + 2 + * a3 * a5 + a4 * a4) & 0xffff; + r[9] = (v = (0 | (v / 0x10000)) + 2 * a2 * a7 + 2 * a3 * a6 + 2 + * a4 * a5) & 0xffff; + r[10] = (v = (0 | (v / 0x10000)) + 2 * a3 * a7 + 2 * a4 * a6 + + a5 * a5) & 0xffff; + r[11] = (v = (0 | (v / 0x10000)) + 2 * a4 * a7 + 2 * a5 * a6) & 0xffff; + r[12] = (v = (0 | (v / 0x10000)) + 2 * a5 * a7 + a6 * a6) & 0xffff; + r[13] = (v = (0 | (v / 0x10000)) + 2 * a6 * a7) & 0xffff; + r[14] = (v = (0 | (v / 0x10000)) + a7 * a7) & 0xffff; + r[15] = 0 | (v / 0x10000); + return r; + } + + function _sqrmodp(a) { + var x = _sqr8h(a[15], a[14], a[13], a[12], a[11], a[10], a[9], + a[8]); + var z = _sqr8h(a[7], a[6], a[5], a[4], a[3], a[2], a[1], a[0]); + var y = _sqr8h(a[15] + a[7], a[14] + a[6], a[13] + a[5], a[12] + + a[4], + a[11] + a[3], a[10] + a[2], a[9] + a[1], a[8] + + a[0]); + var r = []; + var v; + r[0] = (v = 0x800000 + z[0] + (y[8] - x[8] - z[8] + x[0] - 0x80) + * 38) & 0xffff; + r[1] = (v = 0x7fff80 + (v >>> 16) + z[1] + + (y[9] - x[9] - z[9] + x[1]) * 38) & 0xffff; + r[2] = (v = 0x7fff80 + (v >>> 16) + z[2] + + (y[10] - x[10] - z[10] + x[2]) * 38) & 0xffff; + r[3] = (v = 0x7fff80 + (v >>> 16) + z[3] + + (y[11] - x[11] - z[11] + x[3]) * 38) & 0xffff; + r[4] = (v = 0x7fff80 + (v >>> 16) + z[4] + + (y[12] - x[12] - z[12] + x[4]) * 38) & 0xffff; + r[5] = (v = 0x7fff80 + (v >>> 16) + z[5] + + (y[13] - x[13] - z[13] + x[5]) * 38) & 0xffff; + r[6] = (v = 0x7fff80 + (v >>> 16) + z[6] + + (y[14] - x[14] - z[14] + x[6]) * 38) & 0xffff; + r[7] = (v = 0x7fff80 + (v >>> 16) + z[7] + + (y[15] - x[15] - z[15] + x[7]) * 38) & 0xffff; + r[8] = (v = 0x7fff80 + (v >>> 16) + z[8] + y[0] - x[0] - z[0] + + x[8] * 38) & 0xffff; + r[9] = (v = 0x7fff80 + (v >>> 16) + z[9] + y[1] - x[1] - z[1] + + x[9] * 38) & 0xffff; + r[10] = (v = 0x7fff80 + (v >>> 16) + z[10] + y[2] - x[2] - z[2] + + x[10] * 38) & 0xffff; + r[11] = (v = 0x7fff80 + (v >>> 16) + z[11] + y[3] - x[3] - z[3] + + x[11] * 38) & 0xffff; + r[12] = (v = 0x7fff80 + (v >>> 16) + z[12] + y[4] - x[4] - z[4] + + x[12] * 38) & 0xffff; + r[13] = (v = 0x7fff80 + (v >>> 16) + z[13] + y[5] - x[5] - z[5] + + x[13] * 38) & 0xffff; + r[14] = (v = 0x7fff80 + (v >>> 16) + z[14] + y[6] - x[6] - z[6] + + x[14] * 38) & 0xffff; + r[15] = 0x7fff80 + (v >>> 16) + z[15] + y[7] - x[7] - z[7] + + x[15] * 38; + _reduce(r); + return r; + } + + function _mul8h(a7, a6, a5, a4, a3, a2, a1, a0, b7, b6, b5, b4, b3, + b2, b1, b0) { + // 'division by 0x10000' can not be replaced by '>> 16' because + // more than 32 bits of precision are needed + var r = []; + var v; + r[0] = (v = a0 * b0) & 0xffff; + r[1] = (v = (0 | (v / 0x10000)) + a0 * b1 + a1 * b0) & 0xffff; + r[2] = (v = (0 | (v / 0x10000)) + a0 * b2 + a1 * b1 + a2 * b0) & 0xffff; + r[3] = (v = (0 | (v / 0x10000)) + a0 * b3 + a1 * b2 + a2 * b1 + + a3 * b0) & 0xffff; + r[4] = (v = (0 | (v / 0x10000)) + a0 * b4 + a1 * b3 + a2 * b2 + + a3 * b1 + a4 * b0) & 0xffff; + r[5] = (v = (0 | (v / 0x10000)) + a0 * b5 + a1 * b4 + a2 * b3 + + a3 * b2 + a4 * b1 + a5 * b0) & 0xffff; + r[6] = (v = (0 | (v / 0x10000)) + a0 * b6 + a1 * b5 + a2 * b4 + + a3 * b3 + a4 * b2 + a5 * b1 + a6 * b0) & 0xffff; + r[7] = (v = (0 | (v / 0x10000)) + a0 * b7 + a1 * b6 + a2 * b5 + + a3 * b4 + a4 * b3 + a5 * b2 + a6 * b1 + a7 * b0) & 0xffff; + r[8] = (v = (0 | (v / 0x10000)) + a1 * b7 + a2 * b6 + a3 * b5 + + a4 * b4 + a5 * b3 + a6 * b2 + a7 * b1) & 0xffff; + r[9] = (v = (0 | (v / 0x10000)) + a2 * b7 + a3 * b6 + a4 * b5 + + a5 * b4 + a6 * b3 + a7 * b2) & 0xffff; + r[10] = (v = (0 | (v / 0x10000)) + a3 * b7 + a4 * b6 + a5 * b5 + + a6 * b4 + a7 * b3) & 0xffff; + r[11] = (v = (0 | (v / 0x10000)) + a4 * b7 + a5 * b6 + a6 * b5 + + a7 * b4) & 0xffff; + r[12] = (v = (0 | (v / 0x10000)) + a5 * b7 + a6 * b6 + a7 * b5) & 0xffff; + r[13] = (v = (0 | (v / 0x10000)) + a6 * b7 + a7 * b6) & 0xffff; + r[14] = (v = (0 | (v / 0x10000)) + a7 * b7) & 0xffff; + r[15] = (0 | (v / 0x10000)); + return r; + } + + function _mulmodp(a, b) { + // Karatsuba multiplication scheme: x*y = (b^2+b)*x1*y1 - + // b*(x1-x0)*(y1-y0) + (b+1)*x0*y0 + var x = _mul8h(a[15], a[14], a[13], a[12], a[11], a[10], a[9], + a[8], b[15], b[14], b[13], b[12], b[11], b[10], + b[9], b[8]); + var z = _mul8h(a[7], a[6], a[5], a[4], a[3], a[2], a[1], a[0], + b[7], b[6], b[5], b[4], b[3], b[2], b[1], b[0]); + var y = _mul8h(a[15] + a[7], a[14] + a[6], a[13] + a[5], a[12] + + a[4], + a[11] + a[3], a[10] + a[2], a[9] + a[1], a[8] + + a[0], + b[15] + b[7], b[14] + b[6], b[13] + b[5], b[12] + + b[4], + b[11] + b[3], b[10] + b[2], b[9] + b[1], b[8] + + b[0]); + var r = []; + var v; + r[0] = (v = 0x800000 + z[0] + (y[8] - x[8] - z[8] + x[0] - 0x80) + * 38) & 0xffff; + r[1] = (v = 0x7fff80 + (v >>> 16) + z[1] + + (y[9] - x[9] - z[9] + x[1]) * 38) & 0xffff; + r[2] = (v = 0x7fff80 + (v >>> 16) + z[2] + + (y[10] - x[10] - z[10] + x[2]) * 38) & 0xffff; + r[3] = (v = 0x7fff80 + (v >>> 16) + z[3] + + (y[11] - x[11] - z[11] + x[3]) * 38) & 0xffff; + r[4] = (v = 0x7fff80 + (v >>> 16) + z[4] + + (y[12] - x[12] - z[12] + x[4]) * 38) & 0xffff; + r[5] = (v = 0x7fff80 + (v >>> 16) + z[5] + + (y[13] - x[13] - z[13] + x[5]) * 38) & 0xffff; + r[6] = (v = 0x7fff80 + (v >>> 16) + z[6] + + (y[14] - x[14] - z[14] + x[6]) * 38) & 0xffff; + r[7] = (v = 0x7fff80 + (v >>> 16) + z[7] + + (y[15] - x[15] - z[15] + x[7]) * 38) & 0xffff; + r[8] = (v = 0x7fff80 + (v >>> 16) + z[8] + y[0] - x[0] - z[0] + + x[8] * 38) & 0xffff; + r[9] = (v = 0x7fff80 + (v >>> 16) + z[9] + y[1] - x[1] - z[1] + + x[9] * 38) & 0xffff; + r[10] = (v = 0x7fff80 + (v >>> 16) + z[10] + y[2] - x[2] - z[2] + + x[10] * 38) & 0xffff; + r[11] = (v = 0x7fff80 + (v >>> 16) + z[11] + y[3] - x[3] - z[3] + + x[11] * 38) & 0xffff; + r[12] = (v = 0x7fff80 + (v >>> 16) + z[12] + y[4] - x[4] - z[4] + + x[12] * 38) & 0xffff; + r[13] = (v = 0x7fff80 + (v >>> 16) + z[13] + y[5] - x[5] - z[5] + + x[13] * 38) & 0xffff; + r[14] = (v = 0x7fff80 + (v >>> 16) + z[14] + y[6] - x[6] - z[6] + + x[14] * 38) & 0xffff; + r[15] = 0x7fff80 + (v >>> 16) + z[15] + y[7] - x[7] - z[7] + + x[15] * 38; + _reduce(r); + return r; + } + + function _reduce(arr) { + var aCopy = arr.slice(0); + var choice = [arr, aCopy]; + var v = arr[15]; + // Use the dummy copy instead of just returning to be more constant time. + var a = choice[(v < 0x8000) & 1]; + a[15] = v & 0x7fff; + // >32-bits of precision are required here so '/ 0x8000' can not be + // replaced by the arithmetic equivalent '>>> 15' + v = (0 | (v / 0x8000)) * 19; + a[0] = (v += a[0]) & 0xffff; + v = v >>> 16; + a[1] = (v += a[1]) & 0xffff; + v = v >>> 16; + a[2] = (v += a[2]) & 0xffff; + v = v >>> 16; + a[3] = (v += a[3]) & 0xffff; + v = v >>> 16; + a[4] = (v += a[4]) & 0xffff; + v = v >>> 16; + a[5] = (v += a[5]) & 0xffff; + v = v >>> 16; + a[6] = (v += a[6]) & 0xffff; + v = v >>> 16; + a[7] = (v += a[7]) & 0xffff; + v = v >>> 16; + a[8] = (v += a[8]) & 0xffff; + v = v >>> 16; + a[9] = (v += a[9]) & 0xffff; + v = v >>> 16; + a[10] = (v += a[10]) & 0xffff; + v = v >>> 16; + a[11] = (v += a[11]) & 0xffff; + v = v >>> 16; + a[12] = (v += a[12]) & 0xffff; + v = v >>> 16; + a[13] = (v += a[13]) & 0xffff; + v = v >>> 16; + a[14] = (v += a[14]) & 0xffff; + v = v >>> 16; + a[15] += v; + } + + function _addmodp(a, b) { + var r = []; + var v; + r[0] = (v = ((0 | (a[15] >>> 15)) + (0 | (b[15] >>> 15))) * 19 + + a[0] + b[0]) & 0xffff; + r[1] = (v = (v >>> 16) + a[1] + b[1]) & 0xffff; + r[2] = (v = (v >>> 16) + a[2] + b[2]) & 0xffff; + r[3] = (v = (v >>> 16) + a[3] + b[3]) & 0xffff; + r[4] = (v = (v >>> 16) + a[4] + b[4]) & 0xffff; + r[5] = (v = (v >>> 16) + a[5] + b[5]) & 0xffff; + r[6] = (v = (v >>> 16) + a[6] + b[6]) & 0xffff; + r[7] = (v = (v >>> 16) + a[7] + b[7]) & 0xffff; + r[8] = (v = (v >>> 16) + a[8] + b[8]) & 0xffff; + r[9] = (v = (v >>> 16) + a[9] + b[9]) & 0xffff; + r[10] = (v = (v >>> 16) + a[10] + b[10]) & 0xffff; + r[11] = (v = (v >>> 16) + a[11] + b[11]) & 0xffff; + r[12] = (v = (v >>> 16) + a[12] + b[12]) & 0xffff; + r[13] = (v = (v >>> 16) + a[13] + b[13]) & 0xffff; + r[14] = (v = (v >>> 16) + a[14] + b[14]) & 0xffff; + r[15] = (v >>> 16) + (a[15] & 0x7fff) + (b[15] & 0x7fff); + return r; + } + + function _submodp(a, b) { + var r = []; + var v; + r[0] = (v = 0x80000 + + ((0 | (a[15] >>> 15)) - (0 | (b[15] >>> 15)) - 1) + * 19 + a[0] - b[0]) & 0xffff; + r[1] = (v = (v >>> 16) + 0x7fff8 + a[1] - b[1]) & 0xffff; + r[2] = (v = (v >>> 16) + 0x7fff8 + a[2] - b[2]) & 0xffff; + r[3] = (v = (v >>> 16) + 0x7fff8 + a[3] - b[3]) & 0xffff; + r[4] = (v = (v >>> 16) + 0x7fff8 + a[4] - b[4]) & 0xffff; + r[5] = (v = (v >>> 16) + 0x7fff8 + a[5] - b[5]) & 0xffff; + r[6] = (v = (v >>> 16) + 0x7fff8 + a[6] - b[6]) & 0xffff; + r[7] = (v = (v >>> 16) + 0x7fff8 + a[7] - b[7]) & 0xffff; + r[8] = (v = (v >>> 16) + 0x7fff8 + a[8] - b[8]) & 0xffff; + r[9] = (v = (v >>> 16) + 0x7fff8 + a[9] - b[9]) & 0xffff; + r[10] = (v = (v >>> 16) + 0x7fff8 + a[10] - b[10]) & 0xffff; + r[11] = (v = (v >>> 16) + 0x7fff8 + a[11] - b[11]) & 0xffff; + r[12] = (v = (v >>> 16) + 0x7fff8 + a[12] - b[12]) & 0xffff; + r[13] = (v = (v >>> 16) + 0x7fff8 + a[13] - b[13]) & 0xffff; + r[14] = (v = (v >>> 16) + 0x7fff8 + a[14] - b[14]) & 0xffff; + r[15] = (v >>> 16) + 0x7ff8 + (a[15] & 0x7fff) + - (b[15] & 0x7fff); + return r; + } + + function _invmodp(a) { + var c = a; + var i = 250; + while (--i) { + a = _sqrmodp(a); + a = _mulmodp(a, c); + } + a = _sqrmodp(a); + a = _sqrmodp(a); + a = _mulmodp(a, c); + a = _sqrmodp(a); + a = _sqrmodp(a); + a = _mulmodp(a, c); + a = _sqrmodp(a); + a = _mulmodp(a, c); + return a; + } + + function _mulasmall(a) { + // 'division by 0x10000' can not be replaced by '>> 16' because + // more than 32 bits of precision are needed + var m = 121665; + var r = []; + var v; + r[0] = (v = a[0] * m) & 0xffff; + r[1] = (v = (0 | (v / 0x10000)) + a[1] * m) & 0xffff; + r[2] = (v = (0 | (v / 0x10000)) + a[2] * m) & 0xffff; + r[3] = (v = (0 | (v / 0x10000)) + a[3] * m) & 0xffff; + r[4] = (v = (0 | (v / 0x10000)) + a[4] * m) & 0xffff; + r[5] = (v = (0 | (v / 0x10000)) + a[5] * m) & 0xffff; + r[6] = (v = (0 | (v / 0x10000)) + a[6] * m) & 0xffff; + r[7] = (v = (0 | (v / 0x10000)) + a[7] * m) & 0xffff; + r[8] = (v = (0 | (v / 0x10000)) + a[8] * m) & 0xffff; + r[9] = (v = (0 | (v / 0x10000)) + a[9] * m) & 0xffff; + r[10] = (v = (0 | (v / 0x10000)) + a[10] * m) & 0xffff; + r[11] = (v = (0 | (v / 0x10000)) + a[11] * m) & 0xffff; + r[12] = (v = (0 | (v / 0x10000)) + a[12] * m) & 0xffff; + r[13] = (v = (0 | (v / 0x10000)) + a[13] * m) & 0xffff; + r[14] = (v = (0 | (v / 0x10000)) + a[14] * m) & 0xffff; + r[15] = (0 | (v / 0x10000)) + a[15] * m; + _reduce(r); + return r; + } + + function _dbl(x, z) { + var x_2, z_2, m, n, o; + m = _sqrmodp(_addmodp(x, z)); + n = _sqrmodp(_submodp(x, z)); + o = _submodp(m, n); + x_2 = _mulmodp(n, m); + z_2 = _mulmodp(_addmodp(_mulasmall(o), m), o); + return [x_2, z_2]; + } + + function _sum(x, z, x_p, z_p, x_1) { + var x_3, z_3, p, q; + p = _mulmodp(_submodp(x, z), _addmodp(x_p, z_p)); + q = _mulmodp(_addmodp(x, z), _submodp(x_p, z_p)); + x_3 = _sqrmodp(_addmodp(p, q)); + z_3 = _mulmodp(_sqrmodp(_submodp(p, q)), x_1); + return [x_3, z_3]; + } + + function _generateKey(curve25519) { + var buffer = crypto.randomBytes(32); + + // For Curve25519 DH keys, we need to apply some bit mask on generated + // keys: + // * clear bit 0, 1, 2 of first byte + // * clear bit 7 of last byte + // * set bit 6 of last byte + if (curve25519 === true) { + buffer[0] &= 0xf8; + buffer[31] = (buffer[31] & 0x7f) | 0x40; + } + var result = []; + for (var i = 0; i < buffer.length; i++) { + result.push(String.fromCharCode(buffer[i])); + } + return result.join(''); + } + + // Expose some functions to the outside through this name space. + // Note: This is not part of the public API. + ns.getbit = _getbit; + ns.setbit = _setbit; + ns.addmodp = _addmodp; + ns.invmodp = _invmodp; + ns.mulmodp = _mulmodp; + ns.reduce = _reduce; + ns.dbl = _dbl; + ns.sum = _sum; + ns.ZERO = _ZERO; + ns.ONE = _ONE; + ns.BASE = _BASE; + ns.bigintadd = _bigintadd; + ns.bigintsub = _bigintsub; + ns.bigintcmp = _bigintcmp; + ns.mulmodp = _mulmodp; + ns.sqrmodp = _sqrmodp; + ns.generateKey = _generateKey; + + +module.exports = ns; diff --git a/node_modules/jodid25519/lib/curve255.js b/node_modules/jodid25519/lib/curve255.js new file mode 100644 index 0000000..3978b46 --- /dev/null +++ b/node_modules/jodid25519/lib/curve255.js @@ -0,0 +1,221 @@ +"use strict"; +/** + * @fileOverview + * Core operations on curve 25519 required for the higher level modules. + */ + +/* + * Copyright (c) 2007, 2013, 2014 Michele Bini + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * Authors: Guy K. Kloss, Michele Bini + * + * You should have received a copy of the license along with this program. + */ + +var core = require('./core'); +var utils = require('./utils'); + + /** + * @exports jodid25519/curve255 + * Legacy compatibility module for Michele Bini's previous curve255.js. + * + * @description + * Legacy compatibility module for Michele Bini's previous curve255.js. + * + *

+ * This code presents an API with all key formats as previously available + * from Michele Bini's curve255.js implementation. + *

+ */ + var ns = {}; + + function curve25519_raw(f, c) { + var a, x_1, q; + + x_1 = c; + a = core.dbl(x_1, core.ONE()); + q = [x_1, core.ONE()]; + + var n = 255; + + while (core.getbit(f, n) == 0) { + n--; + // For correct constant-time operation, bit 255 should always be + // set to 1 so the following 'while' loop is never entered. + if (n < 0) { + return core.ZERO(); + } + } + n--; + + var aq = [a, q]; + + while (n >= 0) { + var r, s; + var b = core.getbit(f, n); + r = core.sum(aq[0][0], aq[0][1], aq[1][0], aq[1][1], x_1); + s = core.dbl(aq[1 - b][0], aq[1 - b][1]); + aq[1 - b] = s; + aq[b] = r; + n--; + } + q = aq[1]; + + q[1] = core.invmodp(q[1]); + q[0] = core.mulmodp(q[0], q[1]); + core.reduce(q[0]); + return q[0]; + } + + function curve25519b32(a, b) { + return _base32encode(curve25519(_base32decode(a), + _base32decode(b))); + } + + function curve25519(f, c) { + if (!c) { + c = core.BASE(); + } + f[0] &= 0xFFF8; + f[15] = (f[15] & 0x7FFF) | 0x4000; + return curve25519_raw(f, c); + } + + function _hexEncodeVector(k) { + var hexKey = utils.hexEncode(k); + // Pad with '0' at the front. + hexKey = new Array(64 + 1 - hexKey.length).join('0') + hexKey; + // Invert bytes. + return hexKey.split(/(..)/).reverse().join(''); + } + + function _hexDecodeVector(v) { + // assert(length(x) == 64); + // Invert bytes. + var hexKey = v.split(/(..)/).reverse().join(''); + return utils.hexDecode(hexKey); + } + + + // Expose some functions to the outside through this name space. + + /** + * Computes the scalar product of a point on the curve 25519. + * + * This function is used for the DH key-exchange protocol. + * + * Before multiplication, some bit operations are applied to the + * private key to ensure it is a valid Curve25519 secret key. + * It is the user's responsibility to make sure that the private + * key is a uniformly random, secret value. + * + * @function + * @param f {array} + * Private key. + * @param c {array} + * Public point on the curve. If not given, the curve's base point is used. + * @returns {array} + * Key point resulting from scalar product. + */ + ns.curve25519 = curve25519; + + /** + * Computes the scalar product of a point on the curve 25519. + * + * This variant does not make sure that the private key is valid. + * The user has the responsibility to ensure the private key is + * valid or that this results in a safe protocol. Unless you know + * exactly what you are doing, you should not use this variant, + * please use 'curve25519' instead. + * + * @function + * @param f {array} + * Private key. + * @param c {array} + * Public point on the curve. If not given, the curve's base point is used. + * @returns {array} + * Key point resulting from scalar product. + */ + ns.curve25519_raw = curve25519_raw; + + /** + * Encodes the internal representation of a key to a canonical hex + * representation. + * + * This is the format commonly used in other libraries and for + * test vectors, and is equivalent to the hex dump of the key in + * little-endian binary format. + * + * @function + * @param n {array} + * Array representation of key. + * @returns {string} + * Hexadecimal string representation of key. + */ + ns.hexEncodeVector = _hexEncodeVector; + + /** + * Decodes a canonical hex representation of a key + * to an internally compatible array representation. + * + * @function + * @param n {string} + * Hexadecimal string representation of key. + * @returns {array} + * Array representation of key. + */ + ns.hexDecodeVector = _hexDecodeVector; + + /** + * Encodes the internal representation of a key into a + * hexadecimal representation. + * + * This is a strict positional notation, most significant digit first. + * + * @function + * @param n {array} + * Array representation of key. + * @returns {string} + * Hexadecimal string representation of key. + */ + ns.hexencode = utils.hexEncode; + + /** + * Decodes a hex representation of a key to an internally + * compatible array representation. + * + * @function + * @param n {string} + * Hexadecimal string representation of key. + * @returns {array} + * Array representation of key. + */ + ns.hexdecode = utils.hexDecode; + + /** + * Encodes the internal representation of a key to a base32 + * representation. + * + * @function + * @param n {array} + * Array representation of key. + * @returns {string} + * Base32 string representation of key. + */ + ns.base32encode = utils.base32encode; + + /** + * Decodes a base32 representation of a key to an internally + * compatible array representation. + * + * @function + * @param n {string} + * Base32 string representation of key. + * @returns {array} + * Array representation of key. + */ + ns.base32decode = utils.base32decode; + +module.exports = ns; diff --git a/node_modules/jodid25519/lib/dh.js b/node_modules/jodid25519/lib/dh.js new file mode 100644 index 0000000..2f75494 --- /dev/null +++ b/node_modules/jodid25519/lib/dh.js @@ -0,0 +1,111 @@ +"use strict"; +/** + * @fileOverview + * EC Diffie-Hellman operations on Curve25519. + */ + +/* + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * Authors: Guy K. Kloss + * + * You should have received a copy of the license along with this program. + */ + +var core = require('./core'); +var utils = require('./utils'); +var curve255 = require('./curve255'); + + + /** + * @exports jodid25519/dh + * EC Diffie-Hellman operations on Curve25519. + * + * @description + * EC Diffie-Hellman operations on Curve25519. + */ + var ns = {}; + + + function _toString(vector) { + var u = new Uint16Array(vector); + return (new Buffer(new Uint8Array(u.buffer))); + } + + function _fromString(vector) { + if (Buffer.isBuffer(vector)) { + var u = new Uint8Array(vector); + return (new Uint16Array(u.buffer)); + } + + var result = new Array(16); + for (var i = 0, l = 0; i < vector.length; i += 2) { + result[l] = (vector.charCodeAt(i + 1) << 8) | vector.charCodeAt(i); + l++; + } + return result; + } + + + /** + * Computes a key through scalar multiplication of a point on the curve 25519. + * + * This function is used for the DH key-exchange protocol. It computes a + * key based on a secret key with a public component (opponent's public key + * or curve base point if not given) by using scalar multiplication. + * + * Before multiplication, some bit operations are applied to the + * private key to ensure it is a valid Curve25519 secret key. + * It is the user's responsibility to make sure that the private + * key is a uniformly random, secret value. + * + * @function + * @param privateComponent {string} + * Private point as byte string on the curve. + * @param publicComponent {string} + * Public point as byte string on the curve. If not given, the curve's + * base point is used. + * @returns {string} + * Key point as byte string resulting from scalar product. + */ + ns.computeKey = function(privateComponent, publicComponent) { + if (publicComponent) { + return _toString(curve255.curve25519(_fromString(privateComponent), + _fromString(publicComponent))); + } else { + return _toString(curve255.curve25519(_fromString(privateComponent))); + } + }; + + /** + * Computes the public key to a private key on the curve 25519. + * + * Before multiplication, some bit operations are applied to the + * private key to ensure it is a valid Curve25519 secret key. + * It is the user's responsibility to make sure that the private + * key is a uniformly random, secret value. + * + * @function + * @param privateKey {string} + * Private point as byte string on the curve. + * @returns {string} + * Public key point as byte string resulting from scalar product. + */ + ns.publicKey = function(privateKey) { + return _toString(curve255.curve25519(_fromString(privateKey))); + }; + + + /** + * Generates a new random private key of 32 bytes length (256 bit). + * + * @function + * @returns {string} + * Byte string containing a new random private key seed. + */ + ns.generateKey = function() { + return core.generateKey(true); + }; + +module.exports = ns; diff --git a/node_modules/jodid25519/lib/eddsa.js b/node_modules/jodid25519/lib/eddsa.js new file mode 100644 index 0000000..c384f32 --- /dev/null +++ b/node_modules/jodid25519/lib/eddsa.js @@ -0,0 +1,573 @@ +"use strict"; +/** + * @fileOverview + * Digital signature scheme based on Curve25519 (Ed25519 or EdDSA). + */ + +/* + * Copyright (c) 2011, 2012, 2014 Ron Garret + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * Authors: Guy K. Kloss, Ron Garret + * + * You should have received a copy of the license along with this program. + */ + +var core = require('./core'); +var curve255 = require('./curve255'); +var utils = require('./utils'); +var BigInteger = require('jsbn').BigInteger; +var crypto = require('crypto'); + + /** + * @exports jodid25519/eddsa + * Digital signature scheme based on Curve25519 (Ed25519 or EdDSA). + * + * @description + * Digital signature scheme based on Curve25519 (Ed25519 or EdDSA). + * + *

+ * This code is adapted from fast-djbec.js, a faster but more complicated + * version of the Ed25519 encryption scheme (as compared to djbec.js). + * It uses two different representations for big integers: The jsbn + * BigInteger class, which can represent arbitrary-length numbers, and a + * special fixed-length representation optimised for 256-bit integers. + * The reason both are needed is that the Ed25519 algorithm requires some + * 512-bit numbers.

+ */ + var ns = {}; + + function _bi255(value) { + if (!(this instanceof _bi255)) { + return new _bi255(value); + } + if (typeof value === 'undefined') { + return _ZERO; + } + var c = value.constructor; + if ((c === Array || c === Uint16Array || c === Uint32Array) && (value.length === 16)) { + this.n = value; + } else if ((c === Array) && (value.length === 32)) { + this.n = _bytes2bi255(value).n; + } else if (c === String) { + this.n = utils.hexDecode(value); + } else if (c === Number) { + this.n = [value & 0xffff, + value >> 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + } else if (value instanceof _bi255) { + this.n = value.n.slice(0); // Copy constructor + } else { + throw "Bad argument for bignum: " + value; + } + } + + _bi255.prototype = { + 'toString' : function() { + return utils.hexEncode(this.n); + }, + 'toSource' : function() { + return '_' + utils.hexEncode(this.n); + }, + 'plus' : function(n1) { + return _bi255(core.bigintadd(this.n, n1.n)); + }, + 'minus' : function(n1) { + return _bi255(core.bigintsub(this.n, n1.n)).modq(); + }, + 'times' : function(n1) { + return _bi255(core.mulmodp(this.n, n1.n)); + }, + 'divide' : function(n1) { + return this.times(n1.inv()); + }, + 'sqr' : function() { + return _bi255(core.sqrmodp(this.n)); + }, + 'cmp' : function(n1) { + return core.bigintcmp(this.n, n1.n); + }, + 'equals' : function(n1) { + return this.cmp(n1) === 0; + }, + 'isOdd' : function() { + return (this.n[0] & 1) === 1; + }, + 'shiftLeft' : function(cnt) { + _shiftL(this.n, cnt); + return this; + }, + 'shiftRight' : function(cnt) { + _shiftR(this.n, cnt); + return this; + }, + 'inv' : function() { + return _bi255(core.invmodp(this.n)); + }, + 'pow' : function(e) { + return _bi255(_pow(this.n, e.n)); + }, + 'modq' : function() { + return _modq(this); + }, + 'bytes' : function() { + return _bi255_bytes(this); + } + }; + + function _shiftL(n, cnt) { + var lastcarry = 0; + for (var i = 0; i < 16; i++) { + var carry = n[i] >> (16 - cnt); + n[i] = (n[i] << cnt) & 0xffff | lastcarry; + lastcarry = carry; + } + return n; + } + + function _shiftR(n, cnt) { + var lastcarry = 0; + for (var i = 15; i >= 0; i--) { + var carry = n[i] << (16 - cnt) & 0xffff; + n[i] = (n[i] >> cnt) | lastcarry; + lastcarry = carry; + } + return n; + } + + function _bi255_bytes(n) { + n = _bi255(n); // Make a copy because shiftRight is destructive + var a = new Array(32); + for (var i = 31; i >= 0; i--) { + a[i] = n.n[0] & 0xff; + n.shiftRight(8); + } + return a; + } + + function _bytes2bi255(a) { + var n = _ZERO; + for (var i = 0; i < 32; i++) { + n.shiftLeft(8); + n = n.plus(_bi255(a[i])); + } + return n; + } + + function _pow(n, e) { + var result = core.ONE(); + for (var i = 0; i < 256; i++) { + if (core.getbit(e, i) === 1) { + result = core.mulmodp(result, n); + } + n = core.sqrmodp(n); + } + return result; + } + + var _ZERO = _bi255(0); + var _ONE = _bi255(1); + var _TWO = _bi255(2); + // This is the core prime. + var _Q = _bi255([0xffff - 18, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, + 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, + 0xffff, 0xffff, 0x7fff]); + + function _modq(n) { + core.reduce(n.n); + if (n.cmp(_Q) >= 0) { + return _modq(n.minus(_Q)); + } + if (n.cmp(_ZERO) === -1) { + return _modq(n.plus(_Q)); + } else { + return n; + } + } + + // _RECOVERY_EXPONENT = _Q.plus(_bi255(3)).divide(_bi255(8)); + var _RECOVERY_EXPONENT = _bi255('0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe'); + // _D = _Q.minus(_bi255(121665)).divide(_bi255(121666)); + var _D = _bi255('52036cee2b6ffe738cc740797779e89800700a4d4141d8ab75eb4dca135978a3'); + // _I = _TWO.pow(_Q.minus(_ONE).divide(_bi255(4))); + var _I = _bi255('2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0'); + // _L = _TWO.pow(_bi255(252)).plus(_bi255('14def9dea2f79cd65812631a5cf5d3ed')); + var _L = _bi255('1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed'); + var _L_BI = _bi('1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed', 16); + + + // //////////////////////////////////////////////////////////// + + function _isoncurve(p) { + var x = p[0]; + var y = p[1]; + var xsqr = x.sqr(); + var ysqr = y.sqr(); + var v = _D.times(xsqr).times(ysqr); + return ysqr.minus(xsqr).minus(_ONE).minus(v).modq().equals(_ZERO); + } + + function _xrecover(y) { + var ysquared = y.sqr(); + var xx = ysquared.minus(_ONE).divide(_ONE.plus(_D.times(ysquared))); + var x = xx.pow(_RECOVERY_EXPONENT); + if (!(x.times(x).minus(xx).equals(_ZERO))) { + x = x.times(_I); + } + if (x.isOdd()) { + x = _Q.minus(x); + } + return x; + } + + function _x_pt_add(pt1, pt2) { + var x1 = pt1[0]; + var y1 = pt1[1]; + var z1 = pt1[2]; + var t1 = pt1[3]; + var x2 = pt2[0]; + var y2 = pt2[1]; + var z2 = pt2[2]; + var t2 = pt2[3]; + var A = y1.minus(x1).times(y2.plus(x2)); + var B = y1.plus(x1).times(y2.minus(x2)); + var C = z1.times(_TWO).times(t2); + var D = t1.times(_TWO).times(z2); + var E = D.plus(C); + var F = B.minus(A); + var G = B.plus(A); + var H = D.minus(C); + return [E.times(F), G.times(H), F.times(G), E.times(H)]; + } + + function _xpt_double(pt1) { + var x1 = pt1[0]; + var y1 = pt1[1]; + var z1 = pt1[2]; + var A = x1.times(x1); + var B = y1.times(y1); + var C = _TWO.times(z1).times(z1); + var D = _Q.minus(A); + var J = x1.plus(y1); + var E = J.times(J).minus(A).minus(B); + var G = D.plus(B); + var F = G.minus(C); + var H = D.minus(B); + return [E.times(F), G.times(H), F.times(G), E.times(H)]; + } + + function _xpt_mult(pt, n) { + if (n.equals(_ZERO)) { + return [_ZERO, _ONE, _ONE, _ZERO]; + } + var odd = n.isOdd(); + n.shiftRight(1); + var value = _xpt_double(_xpt_mult(pt, n)); + return odd ? _x_pt_add(value, pt) : value; + } + + function _pt_xform(pt) { + var x = pt[0]; + var y = pt[1]; + return [x, y, _ONE, x.times(y)]; + } + + function _pt_unxform(pt) { + var x = pt[0]; + var y = pt[1]; + var z = pt[2]; + var invz = z.inv(); + return [x.times(invz), y.times(invz)]; + } + + function _scalarmult(pt, n) { + return _pt_unxform(_xpt_mult(_pt_xform(pt), n)); + } + + function _bytesgetbit(bytes, n) { + return (bytes[bytes.length - (n >>> 3) - 1] >> (n & 7)) & 1; + } + + function _xpt_mult_bytes(pt, bytes) { + var r = [_ZERO, _ONE, _ONE, _ZERO]; + for (var i = (bytes.length << 3) - 1; i >= 0; i--) { + r = _xpt_double(r); + if (_bytesgetbit(bytes, i) === 1) { + r = _x_pt_add(r, pt); + } + } + return r; + } + + function _scalarmultBytes(pt, bytes) { + return _pt_unxform(_xpt_mult_bytes(_pt_xform(pt), bytes)); + } + + var _by = _bi255(4).divide(_bi255(5)); + var _bx = _xrecover(_by); + var _bp = [_bx, _by]; + + function _encodeint(n) { + return n.bytes(32).reverse(); + } + function _decodeint(b) { + return _bi255(b.slice(0).reverse()); + } + + function _encodepoint(p) { + var v = _encodeint(p[1]); + if (p[0].isOdd()) { + v[31] |= 0x80; + } + return v; + } + + function _decodepoint(v) { + v = v.slice(0); + var signbit = v[31] >> 7; + v[31] &= 127; + var y = _decodeint(v); + var x = _xrecover(y); + if ((x.n[0] & 1) !== signbit) { + x = _Q.minus(x); + } + var p = [x, y]; + if (!_isoncurve(p)) { + throw ('Point is not on curve'); + } + return p; + } + + // ////////////////////////////////////////////////// + + /** + * Factory function to create a suitable BigInteger. + * + * @param value + * The value for the big integer. + * @param base {integer} + * Base of the conversion of elements in ``value``. + * @returns + * A BigInteger object. + */ + function _bi(value, base) { + if (base !== undefined) { + if (base === 256) { + return _bi(utils.string2bytes(value)); + } + return new BigInteger(value, base); + } else if (typeof value === 'string') { + return new BigInteger(value, 10); + } else if ((value instanceof Array) || (value instanceof Uint8Array) + || Buffer.isBuffer(value)) { + return new BigInteger(value); + } else if (typeof value === 'number') { + return new BigInteger(value.toString(), 10); + } else { + throw "Can't convert " + value + " to BigInteger"; + } + } + + function _bi2bytes(n, cnt) { + if (cnt === undefined) { + cnt = (n.bitLength() + 7) >>> 3; + } + var bytes = new Array(cnt); + for (var i = cnt - 1; i >= 0; i--) { + bytes[i] = n[0] & 255; // n.and(0xff); + n = n.shiftRight(8); + } + return bytes; + } + + BigInteger.prototype.bytes = function(n) { + return _bi2bytes(this, n); + }; + + // ///////////////////////////////////////////////////////// + + function _bytehash(s) { + var sha = crypto.createHash('sha512').update(s).digest(); + return _bi2bytes(_bi(sha), 64).reverse(); + } + + function _stringhash(s) { + var sha = crypto.createHash('sha512').update(s).digest(); + return _map(_chr, _bi2bytes(_bi(sha), 64)).join(''); + } + + function _inthash(s) { + // Need a leading 0 to prevent sign extension + return _bi([0].concat(_bytehash(s))); + } + + function _inthash_lo(s) { + return _bi255(_bytehash(s).slice(32, 64)); + } + + function _inthash_mod_l(s) { + return _inthash(s).mod(_L_BI); + } + + function _get_a(sk) { + var a = _inthash_lo(sk); + a.n[0] &= 0xfff8; + a.n[15] &= 0x3fff; + a.n[15] |= 0x4000; + return a; + } + + function _publickey(sk) { + return _encodepoint(_scalarmult(_bp, _get_a(sk))); + } + + function _map(f, l) { + var result = new Array(l.length); + for (var i = 0; i < l.length; i++) { + result[i] = f(l[i]); + } + return result; + } + + function _chr(n) { + return String.fromCharCode(n); + } + + function _ord(c) { + return c.charCodeAt(0); + } + + function _pt_add(p1, p2) { + return _pt_unxform(_x_pt_add(_pt_xform(p1), _pt_xform(p2))); + } + + + // Exports for the API. + + /** + * Checks whether a point is on the curve. + * + * @function + * @param point {string} + * The point to check for in a byte string representation. + * @returns {boolean} + * true if the point is on the curve, false otherwise. + */ + ns.isOnCurve = function(point) { + try { + _isoncurve(_decodepoint(utils.string2bytes(point))); + } catch(e) { + if (e === 'Point is not on curve') { + return false; + } else { + throw e; + } + } + return true; + }; + + + /** + * Computes the EdDSA public key. + * + *

Note: Seeds should be a byte string, not a unicode string containing + * multi-byte characters.

+ * + * @function + * @param keySeed {string} + * Private key seed in the form of a byte string. + * @returns {string} + * Public key as byte string computed from the private key seed + * (32 bytes). + */ + ns.publicKey = function(keySeed) { + return utils.bytes2string(_publickey(keySeed)); + }; + + + /** + * Computes an EdDSA signature of a message. + * + *

Notes:

+ * + *
    + *
  • Unicode messages need to be converted to a byte representation + * (e. g. UTF-8).
  • + *
  • If `publicKey` is given, and it is *not* a point of the curve, + * the signature will be faulty, but no error will be thrown.
  • + *
+ * + * @function + * @param message {string} + * Message in the form of a byte string. + * @param keySeed {string} + * Private key seed in the form of a byte string. + * @param publicKey {string} + * Public key as byte string (if not present, it will be computed from + * the private key seed). + * @returns {string} + * Detached message signature in the form of a byte string (64 bytes). + */ + ns.sign = function(message, keySeed, publicKey) { + if (publicKey === undefined) { + publicKey = _publickey(keySeed); + } else { + publicKey = utils.string2bytes(publicKey); + } + var a = _bi(_get_a(keySeed).toString(), 16); + var hs = _stringhash(keySeed); + var r = _bytehash(hs.slice(32, 64) + message); + var rp = _scalarmultBytes(_bp, r); + var erp = _encodepoint(rp); + r = _bi(r).mod(_bi(1, 10).shiftLeft(512)); + var s = _map(_chr, erp).join('') + _map(_chr, publicKey).join('') + message; + s = _inthash_mod_l(s).multiply(a).add(r).mod(_L_BI); + return utils.bytes2string(erp.concat(_encodeint(s))); + }; + + + /** + * Verifies an EdDSA signature of a message with the public key. + * + *

Note: Unicode messages need to be converted to a byte representation + * (e. g. UTF-8).

+ * + * @function + * @param signature {string} + * Message signature in the form of a byte string. Can be detached + * (64 bytes), or attached to be sliced off. + * @param message {string} + * Message in the form of a byte string. + * @param publicKey {string} + * Public key as byte string (if not present, it will be computed from + * the private key seed). + * @returns {boolean} + * true, if the signature verifies. + */ + ns.verify = function(signature, message, publicKey) { + signature = utils.string2bytes(signature.slice(0, 64)); + publicKey = utils.string2bytes(publicKey); + var rpe = signature.slice(0, 32); + var rp = _decodepoint(rpe); + var a = _decodepoint(publicKey); + var s = _decodeint(signature.slice(32, 64)); + var h = _inthash(utils.bytes2string(rpe.concat(publicKey)) + message); + var v1 = _scalarmult(_bp, s); + var value = _scalarmultBytes(a, _bi2bytes(h)); + var v2 = _pt_add(rp, value); + return v1[0].equals(v2[0]) && v1[1].equals(v2[1]); + }; + + + /** + * Generates a new random private key seed of 32 bytes length (256 bit). + * + * @function + * @returns {string} + * Byte string containing a new random private key seed. + */ + ns.generateKeySeed = function() { + return core.generateKey(false); + }; + +module.exports = ns; diff --git a/node_modules/jodid25519/lib/utils.js b/node_modules/jodid25519/lib/utils.js new file mode 100644 index 0000000..c795231 --- /dev/null +++ b/node_modules/jodid25519/lib/utils.js @@ -0,0 +1,198 @@ +"use strict"; +/** + * @fileOverview + * A collection of general utility functions.. + */ + +/* + * Copyright (c) 2011, 2012, 2014 Ron Garret + * Copyright (c) 2007, 2013, 2014 Michele Bini + * Copyright (c) 2014 Mega Limited + * under the MIT License. + * + * Authors: Guy K. Kloss, Michele Bini, Ron Garret + * + * You should have received a copy of the license along with this program. + */ + +var core = require('./core'); + + /** + * @exports jodid25519/utils + * A collection of general utility functions.. + * + * @description + * A collection of general utility functions.. + */ + var ns = {}; + + var _HEXCHARS = "0123456789abcdef"; + + function _hexencode(vector) { + var result = []; + for (var i = vector.length - 1; i >= 0; i--) { + var value = vector[i]; + result.push(_HEXCHARS.substr((value >>> 12) & 0x0f, 1)); + result.push(_HEXCHARS.substr((value >>> 8) & 0x0f, 1)); + result.push(_HEXCHARS.substr((value >>> 4) & 0x0f, 1)); + result.push(_HEXCHARS.substr(value & 0x0f, 1)); + } + return result.join(''); + } + + function _hexdecode(vector) { + var result = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + for (var i = vector.length - 1, l = 0; i >= 0; i -= 4) { + result[l] = (_HEXCHARS.indexOf(vector.charAt(i))) + | (_HEXCHARS.indexOf(vector.charAt(i - 1)) << 4) + | (_HEXCHARS.indexOf(vector.charAt(i - 2)) << 8) + | (_HEXCHARS.indexOf(vector.charAt(i - 3)) << 12); + l++; + } + return result; + } + + var _BASE32CHARS = "abcdefghijklmnopqrstuvwxyz234567"; + + var _BASE32VALUES = (function () { + var result = {}; + for (var i = 0; i < _BASE32CHARS.length; i++) { + result[_BASE32CHARS.charAt(i)] = i; + } + return result; + })(); + + function _base32encode(n) { + var c; + var r = ""; + for (c = 0; c < 255; c += 5) { + r = _BASE32CHARS.substr(core.getbit(n, c) + + (core.getbit(n, c + 1) << 1) + + (core.getbit(n, c + 2) << 2) + + (core.getbit(n, c + 3) << 3) + + (core.getbit(n, c + 4) << 4), 1) + + r; + } + return r; + } + + function _base32decode(n) { + var c = 0; + var r = core.ZERO(); + var l = n.length; + for (c = 0; (l > 0) && (c < 255); c += 5) { + l--; + var v = _BASE32VALUES[n.substr(l, 1)]; + core.setbit(r, c, v & 1); + v >>= 1; + core.setbit(r, c + 1, v & 1); + v >>= 1; + core.setbit(r, c + 2, v & 1); + v >>= 1; + core.setbit(r, c + 3, v & 1); + v >>= 1; + core.setbit(r, c + 4, v & 1); + } + return r; + } + + function _map(f, l) { + var result = new Array(l.length); + for (var i = 0; i < l.length; i++) { + result[i] = f(l[i]); + } + return result; + } + + function _chr(n) { + return String.fromCharCode(n); + } + + function _ord(c) { + return c.charCodeAt(0); + } + + function _bytes2string(bytes) { + return _map(_chr, bytes).join(''); + } + + function _string2bytes(s) { + return _map(_ord, s); + } + + + // Expose some functions to the outside through this name space. + + /** + * Encodes an array of unsigned 8-bit integers to a hex string. + * + * @function + * @param vector {array} + * Array containing the byte values. + * @returns {string} + * String containing vector in a hexadecimal representation. + */ + ns.hexEncode = _hexencode; + + + /** + * Decodes a hex string to an array of unsigned 8-bit integers. + * + * @function + * @param vector {string} + * String containing vector in a hexadecimal representation. + * @returns {array} + * Array containing the byte values. + */ + ns.hexDecode = _hexdecode; + + + /** + * Encodes an array of unsigned 8-bit integers using base32 encoding. + * + * @function + * @param vector {array} + * Array containing the byte values. + * @returns {string} + * String containing vector in a hexadecimal representation. + */ + ns.base32encode = _base32encode; + + + /** + * Decodes a base32 encoded string to an array of unsigned 8-bit integers. + * + * @function + * @param vector {string} + * String containing vector in a hexadecimal representation. + * @returns {array} + * Array containing the byte values. + */ + ns.base32decode = _base32decode; + + + /** + * Converts an unsigned 8-bit integer array representation to a byte string. + * + * @function + * @param vector {array} + * Array containing the byte values. + * @returns {string} + * Byte string representation of vector. + */ + ns.bytes2string = _bytes2string; + + + /** + * Converts a byte string representation to an array of unsigned + * 8-bit integers. + * + * @function + * @param vector {array} + * Array containing the byte values. + * @returns {string} + * Byte string representation of vector. + */ + ns.string2bytes = _string2bytes; + +module.exports = ns; diff --git a/node_modules/jodid25519/package.json b/node_modules/jodid25519/package.json new file mode 100644 index 0000000..87e5a42 --- /dev/null +++ b/node_modules/jodid25519/package.json @@ -0,0 +1,48 @@ +{ + "name": "jodid25519", + "version": "1.0.2", + "description": "jodid25519 - Curve 25519-based cryptography", + "main": "index.js", + "directories": { + "src": "src", + "test": "test", + "doc": "doc" + }, + "scripts": { + "test": "mocha test/*_test.js" + }, + "homepage": "https://github.com/meganz/jodid25519", + "repository": { + "type": "git", + "url": "https://github.com/meganz/jodid25519.git" + }, + "bugs": { + "url": "https://github.com/meganz/jodid25519/issues" + }, + "keywords": [ + "Curve25519", + "Ed25519", + "ECDH", + "EdDSA", + "ECDSA", + "encryption", + "signing" + ], + "author": "Michele Bini, Ron Garret, Guy K. Kloss", + "license": "MIT", + "readmeFilename": "README.md", + "dependencies": { + "jsbn": "~0.1.0" + }, + "devDependencies": { + "almond": "~0.3.1", + "chai": "^3.0.0", + "mocha": "~2.0.1", + "istanbul": "~0.3.5", + "ibrik": "~2.0.0", + "dateformat": "~1.0.7-1.2.3", + "sinon": "~1.10.3", + "sinon-chai": "^2.8.0", + "jsdoc": "<=3.3.0" + } +} diff --git a/node_modules/js-tokens/LICENSE b/node_modules/js-tokens/LICENSE new file mode 100644 index 0000000..c9a4e1b --- /dev/null +++ b/node_modules/js-tokens/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014, 2015, 2016 Simon Lydell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/js-tokens/changelog.md b/node_modules/js-tokens/changelog.md new file mode 100644 index 0000000..b789fdd --- /dev/null +++ b/node_modules/js-tokens/changelog.md @@ -0,0 +1,82 @@ +### Version 2.0.0 (2016-06-19) ### + +- Added: Support for ES2016. In other words, support for the `**` exponentiation + operator. + +These are the breaking changes: + +- `'**'.match(jsTokens)` no longer returns `['*', '*']`, but `['**']`. +- `'**='.match(jsTokens)` no longer returns `['*', '*=']`, but `['**=']`. + + +### Version 1.0.3 (2016-03-27) ### + +- Improved: Made the regex ever so slightly smaller. +- Updated: The readme. + + +### Version 1.0.2 (2015-10-18) ### + +- Improved: Limited npm package contents for a smaller download. Thanks to + @zertosh! + + +### Version 1.0.1 (2015-06-20) ### + +- Fixed: Declared an undeclared variable. + + +### Version 1.0.0 (2015-02-26) ### + +- Changed: Merged the 'operator' and 'punctuation' types into 'punctuator'. That + type is now equivalent to the Punctuator token in the ECMAScript + specification. (Backwards-incompatible change.) +- Fixed: A `-` followed by a number is now correctly matched as a punctuator + followed by a number. It used to be matched as just a number, but there is no + such thing as negative number literals. (Possibly backwards-incompatible + change.) + + +### Version 0.4.1 (2015-02-21) ### + +- Added: Support for the regex `u` flag. + + +### Version 0.4.0 (2015-02-21) ### + +- Improved: `jsTokens.matchToToken` performance. +- Added: Support for octal and binary number literals. +- Added: Support for template strings. + + +### Version 0.3.1 (2015-01-06) ### + +- Fixed: Support for unicode spaces. They used to be allowed in names (which is + very confusing), and some unicode newlines were wrongly allowed in strings and + regexes. + + +### Version 0.3.0 (2014-12-19) ### + +- Changed: The `jsTokens.names` array has been replaced with the + `jsTokens.matchToToken` function. The capturing groups of `jsTokens` are no + longer part of the public API; instead use said function. See this [gist] for + an example. (Backwards-incompatible change.) +- Changed: The empty string is now considered an “invalid” token, instead an + “empty” token (its own group). (Backwards-incompatible change.) +- Removed: component support. (Backwards-incompatible change.) + +[gist]: https://gist.github.com/lydell/be49dbf80c382c473004 + + +### Version 0.2.0 (2014-06-19) ### + +- Changed: Match ES6 function arrows (`=>`) as an operator, instead of its own + category (“functionArrow”), for simplicity. (Backwards-incompatible change.) +- Added: ES6 splats (`...`) are now matched as an operator (instead of three + punctuations). (Backwards-incompatible change.) + + +### Version 0.1.0 (2014-03-08) ### + +- Initial release. diff --git a/node_modules/js-tokens/index.js b/node_modules/js-tokens/index.js new file mode 100644 index 0000000..2e07040 --- /dev/null +++ b/node_modules/js-tokens/index.js @@ -0,0 +1,19 @@ +// Copyright 2014, 2015, 2016 Simon Lydell +// X11 (“MIT”) Licensed. (See LICENSE.) + +// This regex comes from regex.coffee, and is inserted here by generate-index.js +// (run `npm run build`). +module.exports = /((['"])(?:(?!\2|\\).|\\(?:\r\n|[\s\S]))*(\2)?|`(?:[^`\\$]|\\[\s\S]|\$(?!\{)|\$\{(?:[^{}]|\{[^}]*\}?)*\}?)*(`)?)|(\/\/.*)|(\/\*(?:[^*]|\*(?!\/))*(\*\/)?)|(\/(?!\*)(?:\[(?:(?![\]\\]).|\\.)*\]|(?![\/\]\\]).|\\.)+\/(?:(?!\s*(?:\b|[\u0080-\uFFFF$\\'"~({]|[+\-!](?!=)|\.?\d))|[gmiyu]{1,5}\b(?![\u0080-\uFFFF$\\]|\s*(?:[+\-*%&|^<>!=?({]|\/(?![\/*])))))|(0[xX][\da-fA-F]+|0[oO][0-7]+|0[bB][01]+|(?:\d*\.\d+|\d+\.?)(?:[eE][+-]?\d+)?)|((?!\d)(?:(?!\s)[$\w\u0080-\uFFFF]|\\u[\da-fA-F]{4}|\\u\{[\da-fA-F]{1,6}\})+)|(--|\+\+|&&|\|\||=>|\.{3}|(?:[+\-\/%&|^]|\*{1,2}|<{1,2}|>{1,3}|!=?|={1,2})=?|[?~.,:;[\](){}])|(\s+)|(^$|[\s\S])/g + +module.exports.matchToToken = function(match) { + var token = {type: "invalid", value: match[0]} + if (match[ 1]) token.type = "string" , token.closed = !!(match[3] || match[4]) + else if (match[ 5]) token.type = "comment" + else if (match[ 6]) token.type = "comment", token.closed = !!match[7] + else if (match[ 8]) token.type = "regex" + else if (match[ 9]) token.type = "number" + else if (match[10]) token.type = "name" + else if (match[11]) token.type = "punctuator" + else if (match[12]) token.type = "whitespace" + return token +} diff --git a/node_modules/js-tokens/package.json b/node_modules/js-tokens/package.json new file mode 100644 index 0000000..35b0d86 --- /dev/null +++ b/node_modules/js-tokens/package.json @@ -0,0 +1,30 @@ +{ + "name": "js-tokens", + "version": "2.0.0", + "author": "Simon Lydell", + "license": "MIT", + "description": "A regex that tokenizes JavaScript.", + "keywords": [ + "JavaScript", + "js", + "token", + "tokenize", + "regex" + ], + "files": [ + "index.js" + ], + "repository": "lydell/js-tokens", + "scripts": { + "test": "mocha --ui tdd", + "esprima-compare": "node esprima-compare ./index.js everything.js/es5.js", + "build": "node generate-index.js", + "dev": "npm run build && npm test" + }, + "devDependencies": { + "coffee-script": "~1.10.0", + "esprima": "^2.7.2", + "everything.js": "^1.0.3", + "mocha": "^2.5.3" + } +} diff --git a/node_modules/js-tokens/readme.md b/node_modules/js-tokens/readme.md new file mode 100644 index 0000000..68e470f --- /dev/null +++ b/node_modules/js-tokens/readme.md @@ -0,0 +1,217 @@ +Overview [![Build Status](https://travis-ci.org/lydell/js-tokens.png?branch=master)](https://travis-ci.org/lydell/js-tokens) +======== + +A regex that tokenizes JavaScript. + +```js +var jsTokens = require("js-tokens") + +var jsString = "var foo=opts.foo;\n..." + +jsString.match(jsTokens) +// ["var", " ", "foo", "=", "opts", ".", "foo", ";", "\n", ...] +``` + + +Installation +============ + +`npm install js-tokens` + +```js +var jsTokens = require("js-tokens") +``` + + +Usage +===== + +### `jsTokens` ### + +A regex with the `g` flag that matches JavaScript tokens. + +The regex _always_ matches, even invalid JavaScript and the empty string. + +The next match is always directly after the previous. + +### `var token = jsTokens.matchToToken(match)` ### + +Takes a `match` returned by `jsTokens.exec(string)`, and returns a `{type: +String, value: String}` object. The following types are available: + +- string +- comment +- regex +- number +- name +- punctuator +- whitespace +- invalid + +Multi-line comments and strings also have a `closed` property indicating if the +token was closed or not (see below). + +Comments and strings both come in several flavors. To distinguish them, check if +the token starts with `//`, `/*`, `'`, `"` or `` ` ``. + +Names are ECMAScript IdentifierNames, that is, including both identifiers and +keywords. You may use [is-keyword-js] to tell them apart. + +Whitespace includes both line terminators and other whitespace. + +For example usage, please see this [gist]. + +[is-keyword-js]: https://github.com/crissdev/is-keyword-js +[gist]: https://gist.github.com/lydell/be49dbf80c382c473004 + + +ECMAScript support +================== + +The intention is to always support the latest stable ECMAScript version. + +If adding support for a newer version requires changes, a new version with a +major verion bump will be released. + +Currently, [ECMAScript 2016] is supported. + +[ECMAScript 2016]: http://www.ecma-international.org/ecma-262/7.0/index.html + + +Invalid code handling +===================== + +Unterminated strings are still matched as strings. JavaScript strings cannot +contain (unescaped) newlines, so unterminated strings simply end at the end of +the line. Unterminated template strings can contain unescaped newlines, though, +so they go on to the end of input. + +Unterminated multi-line comments are also still matched as comments. They +simply go on to the end of the input. + +Unterminated regex literals are likely matched as division and whatever is +inside the regex. + +Invalid ASCII characters have their own capturing group. + +Invalid non-ASCII characters are treated as names, to simplify the matching of +names (except unicode spaces which are treated as whitespace). + +Regex literals may contain invalid regex syntax. They are still matched as +regex literals. They may also contain repeated regex flags, to keep the regex +simple. + +Strings may contain invalid escape sequences. + + +Limitations +=========== + +Tokenizing JavaScript using regexes—in fact, _one single regex_—won’t be +perfect. But that’s not the point either. + +You may compare jsTokens with [esprima] by using `esprima-compare.js`. +See `npm run esprima-compare`! + +[esprima]: http://esprima.org/ + +### Template string interpolation ### + +Template strings are matched as single tokens, from the starting `` ` `` to the +ending `` ` ``, including interpolations (whose tokens are not matched +individually). + +Matching template string interpolations requires recursive balancing of `{` and +`}`—something that JavaScript regexes cannot do. Only one level of nesting is +supported. + +### Division and regex literals collision ### + +Consider this example: + +```js +var g = 9.82 +var number = bar / 2/g + +var regex = / 2/g +``` + +A human can easily understand that in the `number` line we’re dealing with +division, and in the `regex` line we’re dealing with a regex literal. How come? +Because humans can look at the whole code to put the `/` characters in context. +A JavaScript regex cannot. It only sees forwards. + +When the `jsTokens` regex scans throught the above, it will see the following +at the end of both the `number` and `regex` rows: + +```js +/ 2/g +``` + +It is then impossible to know if that is a regex literal, or part of an +expression dealing with division. + +Here is a similar case: + +```js +foo /= 2/g +foo(/= 2/g) +``` + +The first line divides the `foo` variable with `2/g`. The second line calls the +`foo` function with the regex literal `/= 2/g`. Again, since `jsTokens` only +sees forwards, it cannot tell the two cases apart. + +There are some cases where we _can_ tell division and regex literals apart, +though. + +First off, we have the simple cases where there’s only one slash in the line: + +```js +var foo = 2/g +foo /= 2 +``` + +Regex literals cannot contain newlines, so the above cases are correctly +identified as division. Things are only problematic when there are more than +one non-comment slash in a single line. + +Secondly, not every character is a valid regex flag. + +```js +var number = bar / 2/e +``` + +The above example is also correctly identified as division, because `e` is not a +valid regex flag. I initially wanted to future-proof by allowing `[a-zA-Z]*` +(any letter) as flags, but it is not worth it since it increases the amount of +ambigous cases. So only the standard `g`, `m`, `i`, `y` and `u` flags are +allowed. This means that the above example will be identified as division as +long as you don’t rename the `e` variable to some permutation of `gmiyu` 1 to 5 +characters long. + +Lastly, we can look _forward_ for information. + +- If the token following what looks like a regex literal is not valid after a + regex literal, but is valid in a division expression, then the regex literal + is treated as division instead. For example, a flagless regex cannot be + followed by a string, number or name, but all of those three can be the + denominator of a division. +- Generally, if what looks like a regex literal is followed by an operator, the + regex literal is treated as division instead. This is because regexes are + seldomly used with operators (such as `+`, `*`, `&&` and `==`), but division + could likely be part of such an expression. + +Please consult the regex source and the test cases for precise information on +when regex or division is matched (should you need to know). In short, you +could sum it up as: + +If the end of a statement looks like a regex literal (even if it isn’t), it +will be treated as one. Otherwise it should work as expected (if you write sane +code). + + +License +======= + +[The X11 (“MIT”) License](LICENSE). diff --git a/node_modules/jsbn/.npmignore b/node_modules/jsbn/.npmignore new file mode 100644 index 0000000..28f1ba7 --- /dev/null +++ b/node_modules/jsbn/.npmignore @@ -0,0 +1,2 @@ +node_modules +.DS_Store \ No newline at end of file diff --git a/node_modules/jsbn/LICENSE b/node_modules/jsbn/LICENSE new file mode 100644 index 0000000..2a6457e --- /dev/null +++ b/node_modules/jsbn/LICENSE @@ -0,0 +1,40 @@ +Licensing +--------- + +This software is covered under the following copyright: + +/* + * Copyright (c) 2003-2005 Tom Wu + * All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, + * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY + * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + * + * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL, + * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER + * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF + * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT + * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * In addition, the following condition applies: + * + * All redistributions must retain an intact copy of this copyright notice + * and disclaimer. + */ + +Address all questions regarding this license to: + + Tom Wu + tjw@cs.Stanford.EDU \ No newline at end of file diff --git a/node_modules/jsbn/README.md b/node_modules/jsbn/README.md new file mode 100644 index 0000000..7aac67f --- /dev/null +++ b/node_modules/jsbn/README.md @@ -0,0 +1,175 @@ +# jsbn: javascript big number + +[Tom Wu's Original Website](http://www-cs-students.stanford.edu/~tjw/jsbn/) + +I felt compelled to put this on github and publish to npm. I haven't tested every other big integer library out there, but the few that I have tested in comparison to this one have not even come close in performance. I am aware of the `bi` module on npm, however it has been modified and I wanted to publish the original without modifications. This is jsbn and jsbn2 from Tom Wu's original website above, with the modular pattern applied to prevent global leaks and to allow for use with node.js on the server side. + +## usage + + var BigInteger = require('jsbn'); + + var a = new BigInteger('91823918239182398123'); + alert(a.bitLength()); // 67 + + +## API + +### bi.toString() + +returns the base-10 number as a string + +### bi.negate() + +returns a new BigInteger equal to the negation of `bi` + +### bi.abs + +returns new BI of absolute value + +### bi.compareTo + + + +### bi.bitLength + + + +### bi.mod + + + +### bi.modPowInt + + + +### bi.clone + + + +### bi.intValue + + + +### bi.byteValue + + + +### bi.shortValue + + + +### bi.signum + + + +### bi.toByteArray + + + +### bi.equals + + + +### bi.min + + + +### bi.max + + + +### bi.and + + + +### bi.or + + + +### bi.xor + + + +### bi.andNot + + + +### bi.not + + + +### bi.shiftLeft + + + +### bi.shiftRight + + + +### bi.getLowestSetBit + + + +### bi.bitCount + + + +### bi.testBit + + + +### bi.setBit + + + +### bi.clearBit + + + +### bi.flipBit + + + +### bi.add + + + +### bi.subtract + + + +### bi.multiply + + + +### bi.divide + + + +### bi.remainder + + + +### bi.divideAndRemainder + + + +### bi.modPow + + + +### bi.modInverse + + + +### bi.pow + + + +### bi.gcd + + + +### bi.isProbablePrime + + diff --git a/node_modules/jsbn/example.html b/node_modules/jsbn/example.html new file mode 100644 index 0000000..7c26a56 --- /dev/null +++ b/node_modules/jsbn/example.html @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/jsbn/example.js b/node_modules/jsbn/example.js new file mode 100644 index 0000000..664c1b4 --- /dev/null +++ b/node_modules/jsbn/example.js @@ -0,0 +1,3 @@ +var BigInteger = require('./'); +var a = new BigInteger('91823918239182398123'); +console.log(a.bitLength()); \ No newline at end of file diff --git a/node_modules/jsbn/index.js b/node_modules/jsbn/index.js new file mode 100644 index 0000000..e32fe13 --- /dev/null +++ b/node_modules/jsbn/index.js @@ -0,0 +1,1358 @@ +(function(){ + + // Copyright (c) 2005 Tom Wu + // All Rights Reserved. + // See "LICENSE" for details. + + // Basic JavaScript BN library - subset useful for RSA encryption. + + // Bits per digit + var dbits; + + // JavaScript engine analysis + var canary = 0xdeadbeefcafe; + var j_lm = ((canary&0xffffff)==0xefcafe); + + // (public) Constructor + function BigInteger(a,b,c) { + if(a != null) + if("number" == typeof a) this.fromNumber(a,b,c); + else if(b == null && "string" != typeof a) this.fromString(a,256); + else this.fromString(a,b); + } + + // return new, unset BigInteger + function nbi() { return new BigInteger(null); } + + // am: Compute w_j += (x*this_i), propagate carries, + // c is initial carry, returns final carry. + // c < 3*dvalue, x < 2*dvalue, this_i < dvalue + // We need to select the fastest one that works in this environment. + + // am1: use a single mult and divide to get the high bits, + // max digit bits should be 26 because + // max internal value = 2*dvalue^2-2*dvalue (< 2^53) + function am1(i,x,w,j,c,n) { + while(--n >= 0) { + var v = x*this[i++]+w[j]+c; + c = Math.floor(v/0x4000000); + w[j++] = v&0x3ffffff; + } + return c; + } + // am2 avoids a big mult-and-extract completely. + // Max digit bits should be <= 30 because we do bitwise ops + // on values up to 2*hdvalue^2-hdvalue-1 (< 2^31) + function am2(i,x,w,j,c,n) { + var xl = x&0x7fff, xh = x>>15; + while(--n >= 0) { + var l = this[i]&0x7fff; + var h = this[i++]>>15; + var m = xh*l+h*xl; + l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff); + c = (l>>>30)+(m>>>15)+xh*h+(c>>>30); + w[j++] = l&0x3fffffff; + } + return c; + } + // Alternately, set max digit bits to 28 since some + // browsers slow down when dealing with 32-bit numbers. + function am3(i,x,w,j,c,n) { + var xl = x&0x3fff, xh = x>>14; + while(--n >= 0) { + var l = this[i]&0x3fff; + var h = this[i++]>>14; + var m = xh*l+h*xl; + l = xl*l+((m&0x3fff)<<14)+w[j]+c; + c = (l>>28)+(m>>14)+xh*h; + w[j++] = l&0xfffffff; + } + return c; + } + var inBrowser = typeof navigator !== "undefined"; + if(inBrowser && j_lm && (navigator.appName == "Microsoft Internet Explorer")) { + BigInteger.prototype.am = am2; + dbits = 30; + } + else if(inBrowser && j_lm && (navigator.appName != "Netscape")) { + BigInteger.prototype.am = am1; + dbits = 26; + } + else { // Mozilla/Netscape seems to prefer am3 + BigInteger.prototype.am = am3; + dbits = 28; + } + + BigInteger.prototype.DB = dbits; + BigInteger.prototype.DM = ((1<= 0; --i) r[i] = this[i]; + r.t = this.t; + r.s = this.s; + } + + // (protected) set from integer value x, -DV <= x < DV + function bnpFromInt(x) { + this.t = 1; + this.s = (x<0)?-1:0; + if(x > 0) this[0] = x; + else if(x < -1) this[0] = x+this.DV; + else this.t = 0; + } + + // return bigint initialized to value + function nbv(i) { var r = nbi(); r.fromInt(i); return r; } + + // (protected) set from string and radix + function bnpFromString(s,b) { + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 256) k = 8; // byte array + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else { this.fromRadix(s,b); return; } + this.t = 0; + this.s = 0; + var i = s.length, mi = false, sh = 0; + while(--i >= 0) { + var x = (k==8)?s[i]&0xff:intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-") mi = true; + continue; + } + mi = false; + if(sh == 0) + this[this.t++] = x; + else if(sh+k > this.DB) { + this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh)); + } + else + this[this.t-1] |= x<= this.DB) sh -= this.DB; + } + if(k == 8 && (s[0]&0x80) != 0) { + this.s = -1; + if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this[this.t-1] == c) --this.t; + } + + // (public) return string representation in given radix + function bnToString(b) { + if(this.s < 0) return "-"+this.negate().toString(b); + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else return this.toRadix(b); + var km = (1< 0) { + if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); } + while(i >= 0) { + if(p < k) { + d = (this[i]&((1<>(p+=this.DB-k); + } + else { + d = (this[i]>>(p-=k))&km; + if(p <= 0) { p += this.DB; --i; } + } + if(d > 0) m = true; + if(m) r += int2char(d); + } + } + return m?r:"0"; + } + + // (public) -this + function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; } + + // (public) |this| + function bnAbs() { return (this.s<0)?this.negate():this; } + + // (public) return + if this > a, - if this < a, 0 if equal + function bnCompareTo(a) { + var r = this.s-a.s; + if(r != 0) return r; + var i = this.t; + r = i-a.t; + if(r != 0) return (this.s<0)?-r:r; + while(--i >= 0) if((r=this[i]-a[i]) != 0) return r; + return 0; + } + + // returns bit length of the integer x + function nbits(x) { + var r = 1, t; + if((t=x>>>16) != 0) { x = t; r += 16; } + if((t=x>>8) != 0) { x = t; r += 8; } + if((t=x>>4) != 0) { x = t; r += 4; } + if((t=x>>2) != 0) { x = t; r += 2; } + if((t=x>>1) != 0) { x = t; r += 1; } + return r; + } + + // (public) return the number of bits in "this" + function bnBitLength() { + if(this.t <= 0) return 0; + return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM)); + } + + // (protected) r = this << n*DB + function bnpDLShiftTo(n,r) { + var i; + for(i = this.t-1; i >= 0; --i) r[i+n] = this[i]; + for(i = n-1; i >= 0; --i) r[i] = 0; + r.t = this.t+n; + r.s = this.s; + } + + // (protected) r = this >> n*DB + function bnpDRShiftTo(n,r) { + for(var i = n; i < this.t; ++i) r[i-n] = this[i]; + r.t = Math.max(this.t-n,0); + r.s = this.s; + } + + // (protected) r = this << n + function bnpLShiftTo(n,r) { + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<= 0; --i) { + r[i+ds+1] = (this[i]>>cbs)|c; + c = (this[i]&bm)<= 0; --i) r[i] = 0; + r[ds] = c; + r.t = this.t+ds+1; + r.s = this.s; + r.clamp(); + } + + // (protected) r = this >> n + function bnpRShiftTo(n,r) { + r.s = this.s; + var ds = Math.floor(n/this.DB); + if(ds >= this.t) { r.t = 0; return; } + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<>bs; + for(var i = ds+1; i < this.t; ++i) { + r[i-ds-1] |= (this[i]&bm)<>bs; + } + if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB; + } + if(a.t < this.t) { + c -= a.s; + while(i < this.t) { + c += this[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } + else { + c += this.s; + while(i < a.t) { + c -= a[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c -= a.s; + } + r.s = (c<0)?-1:0; + if(c < -1) r[i++] = this.DV+c; + else if(c > 0) r[i++] = c; + r.t = i; + r.clamp(); + } + + // (protected) r = this * a, r != this,a (HAC 14.12) + // "this" should be the larger one if appropriate. + function bnpMultiplyTo(a,r) { + var x = this.abs(), y = a.abs(); + var i = x.t; + r.t = i+y.t; + while(--i >= 0) r[i] = 0; + for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t); + r.s = 0; + r.clamp(); + if(this.s != a.s) BigInteger.ZERO.subTo(r,r); + } + + // (protected) r = this^2, r != this (HAC 14.16) + function bnpSquareTo(r) { + var x = this.abs(); + var i = r.t = 2*x.t; + while(--i >= 0) r[i] = 0; + for(i = 0; i < x.t-1; ++i) { + var c = x.am(i,x[i],r,2*i,0,1); + if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) { + r[i+x.t] -= x.DV; + r[i+x.t+1] = 1; + } + } + if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1); + r.s = 0; + r.clamp(); + } + + // (protected) divide this by m, quotient and remainder to q, r (HAC 14.20) + // r != q, this != m. q or r may be null. + function bnpDivRemTo(m,q,r) { + var pm = m.abs(); + if(pm.t <= 0) return; + var pt = this.abs(); + if(pt.t < pm.t) { + if(q != null) q.fromInt(0); + if(r != null) this.copyTo(r); + return; + } + if(r == null) r = nbi(); + var y = nbi(), ts = this.s, ms = m.s; + var nsh = this.DB-nbits(pm[pm.t-1]); // normalize modulus + if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } + else { pm.copyTo(y); pt.copyTo(r); } + var ys = y.t; + var y0 = y[ys-1]; + if(y0 == 0) return; + var yt = y0*(1<1)?y[ys-2]>>this.F2:0); + var d1 = this.FV/yt, d2 = (1<= 0) { + r[r.t++] = 1; + r.subTo(t,r); + } + BigInteger.ONE.dlShiftTo(ys,t); + t.subTo(y,y); // "negative" y so we can replace sub with am later + while(y.t < ys) y[y.t++] = 0; + while(--j >= 0) { + // Estimate quotient digit + var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2); + if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out + y.dlShiftTo(j,t); + r.subTo(t,r); + while(r[i] < --qd) r.subTo(t,r); + } + } + if(q != null) { + r.drShiftTo(ys,q); + if(ts != ms) BigInteger.ZERO.subTo(q,q); + } + r.t = ys; + r.clamp(); + if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder + if(ts < 0) BigInteger.ZERO.subTo(r,r); + } + + // (public) this mod a + function bnMod(a) { + var r = nbi(); + this.abs().divRemTo(a,null,r); + if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r); + return r; + } + + // Modular reduction using "classic" algorithm + function Classic(m) { this.m = m; } + function cConvert(x) { + if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m); + else return x; + } + function cRevert(x) { return x; } + function cReduce(x) { x.divRemTo(this.m,null,x); } + function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + + Classic.prototype.convert = cConvert; + Classic.prototype.revert = cRevert; + Classic.prototype.reduce = cReduce; + Classic.prototype.mulTo = cMulTo; + Classic.prototype.sqrTo = cSqrTo; + + // (protected) return "-1/this % 2^DB"; useful for Mont. reduction + // justification: + // xy == 1 (mod m) + // xy = 1+km + // xy(2-xy) = (1+km)(1-km) + // x[y(2-xy)] = 1-k^2m^2 + // x[y(2-xy)] == 1 (mod m^2) + // if y is 1/x mod m, then y(2-xy) is 1/x mod m^2 + // should reduce x and y(2-xy) by m^2 at each step to keep size bounded. + // JS multiply "overflows" differently from C/C++, so care is needed here. + function bnpInvDigit() { + if(this.t < 1) return 0; + var x = this[0]; + if((x&1) == 0) return 0; + var y = x&3; // y == 1/x mod 2^2 + y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4 + y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8 + y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16 + // last step - calculate inverse mod DV directly; + // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints + y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits + // we really want the negative inverse, and -DV < y < DV + return (y>0)?this.DV-y:-y; + } + + // Montgomery reduction + function Montgomery(m) { + this.m = m; + this.mp = m.invDigit(); + this.mpl = this.mp&0x7fff; + this.mph = this.mp>>15; + this.um = (1<<(m.DB-15))-1; + this.mt2 = 2*m.t; + } + + // xR mod m + function montConvert(x) { + var r = nbi(); + x.abs().dlShiftTo(this.m.t,r); + r.divRemTo(this.m,null,r); + if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r); + return r; + } + + // x/R mod m + function montRevert(x) { + var r = nbi(); + x.copyTo(r); + this.reduce(r); + return r; + } + + // x = x/R mod m (HAC 14.32) + function montReduce(x) { + while(x.t <= this.mt2) // pad x so am has enough room later + x[x.t++] = 0; + for(var i = 0; i < this.m.t; ++i) { + // faster way of calculating u0 = x[i]*mp mod DV + var j = x[i]&0x7fff; + var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM; + // use am to combine the multiply-shift-add into one call + j = i+this.m.t; + x[j] += this.m.am(0,u0,x,i,0,this.m.t); + // propagate carry + while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; } + } + x.clamp(); + x.drShiftTo(this.m.t,x); + if(x.compareTo(this.m) >= 0) x.subTo(this.m,x); + } + + // r = "x^2/R mod m"; x != r + function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + + // r = "xy/R mod m"; x,y != r + function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + + Montgomery.prototype.convert = montConvert; + Montgomery.prototype.revert = montRevert; + Montgomery.prototype.reduce = montReduce; + Montgomery.prototype.mulTo = montMulTo; + Montgomery.prototype.sqrTo = montSqrTo; + + // (protected) true iff this is even + function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; } + + // (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79) + function bnpExp(e,z) { + if(e > 0xffffffff || e < 1) return BigInteger.ONE; + var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1; + g.copyTo(r); + while(--i >= 0) { + z.sqrTo(r,r2); + if((e&(1< 0) z.mulTo(r2,g,r); + else { var t = r; r = r2; r2 = t; } + } + return z.revert(r); + } + + // (public) this^e % m, 0 <= e < 2^32 + function bnModPowInt(e,m) { + var z; + if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m); + return this.exp(e,z); + } + + // protected + BigInteger.prototype.copyTo = bnpCopyTo; + BigInteger.prototype.fromInt = bnpFromInt; + BigInteger.prototype.fromString = bnpFromString; + BigInteger.prototype.clamp = bnpClamp; + BigInteger.prototype.dlShiftTo = bnpDLShiftTo; + BigInteger.prototype.drShiftTo = bnpDRShiftTo; + BigInteger.prototype.lShiftTo = bnpLShiftTo; + BigInteger.prototype.rShiftTo = bnpRShiftTo; + BigInteger.prototype.subTo = bnpSubTo; + BigInteger.prototype.multiplyTo = bnpMultiplyTo; + BigInteger.prototype.squareTo = bnpSquareTo; + BigInteger.prototype.divRemTo = bnpDivRemTo; + BigInteger.prototype.invDigit = bnpInvDigit; + BigInteger.prototype.isEven = bnpIsEven; + BigInteger.prototype.exp = bnpExp; + + // public + BigInteger.prototype.toString = bnToString; + BigInteger.prototype.negate = bnNegate; + BigInteger.prototype.abs = bnAbs; + BigInteger.prototype.compareTo = bnCompareTo; + BigInteger.prototype.bitLength = bnBitLength; + BigInteger.prototype.mod = bnMod; + BigInteger.prototype.modPowInt = bnModPowInt; + + // "constants" + BigInteger.ZERO = nbv(0); + BigInteger.ONE = nbv(1); + + // Copyright (c) 2005-2009 Tom Wu + // All Rights Reserved. + // See "LICENSE" for details. + + // Extended JavaScript BN functions, required for RSA private ops. + + // Version 1.1: new BigInteger("0", 10) returns "proper" zero + // Version 1.2: square() API, isProbablePrime fix + + // (public) + function bnClone() { var r = nbi(); this.copyTo(r); return r; } + + // (public) return value as integer + function bnIntValue() { + if(this.s < 0) { + if(this.t == 1) return this[0]-this.DV; + else if(this.t == 0) return -1; + } + else if(this.t == 1) return this[0]; + else if(this.t == 0) return 0; + // assumes 16 < DB < 32 + return ((this[1]&((1<<(32-this.DB))-1))<>24; } + + // (public) return value as short (assumes DB>=16) + function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; } + + // (protected) return x s.t. r^x < DV + function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); } + + // (public) 0 if this == 0, 1 if this > 0 + function bnSigNum() { + if(this.s < 0) return -1; + else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0; + else return 1; + } + + // (protected) convert to radix string + function bnpToRadix(b) { + if(b == null) b = 10; + if(this.signum() == 0 || b < 2 || b > 36) return "0"; + var cs = this.chunkSize(b); + var a = Math.pow(b,cs); + var d = nbv(a), y = nbi(), z = nbi(), r = ""; + this.divRemTo(d,y,z); + while(y.signum() > 0) { + r = (a+z.intValue()).toString(b).substr(1) + r; + y.divRemTo(d,y,z); + } + return z.intValue().toString(b) + r; + } + + // (protected) convert from radix string + function bnpFromRadix(s,b) { + this.fromInt(0); + if(b == null) b = 10; + var cs = this.chunkSize(b); + var d = Math.pow(b,cs), mi = false, j = 0, w = 0; + for(var i = 0; i < s.length; ++i) { + var x = intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-" && this.signum() == 0) mi = true; + continue; + } + w = b*w+x; + if(++j >= cs) { + this.dMultiply(d); + this.dAddOffset(w,0); + j = 0; + w = 0; + } + } + if(j > 0) { + this.dMultiply(Math.pow(b,j)); + this.dAddOffset(w,0); + } + if(mi) BigInteger.ZERO.subTo(this,this); + } + + // (protected) alternate constructor + function bnpFromNumber(a,b,c) { + if("number" == typeof b) { + // new BigInteger(int,int,RNG) + if(a < 2) this.fromInt(1); + else { + this.fromNumber(a,c); + if(!this.testBit(a-1)) // force MSB set + this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this); + if(this.isEven()) this.dAddOffset(1,0); // force odd + while(!this.isProbablePrime(b)) { + this.dAddOffset(2,0); + if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this); + } + } + } + else { + // new BigInteger(int,RNG) + var x = new Array(), t = a&7; + x.length = (a>>3)+1; + b.nextBytes(x); + if(t > 0) x[0] &= ((1< 0) { + if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p) + r[k++] = d|(this.s<<(this.DB-p)); + while(i >= 0) { + if(p < 8) { + d = (this[i]&((1<>(p+=this.DB-8); + } + else { + d = (this[i]>>(p-=8))&0xff; + if(p <= 0) { p += this.DB; --i; } + } + if((d&0x80) != 0) d |= -256; + if(k == 0 && (this.s&0x80) != (d&0x80)) ++k; + if(k > 0 || d != this.s) r[k++] = d; + } + } + return r; + } + + function bnEquals(a) { return(this.compareTo(a)==0); } + function bnMin(a) { return(this.compareTo(a)<0)?this:a; } + function bnMax(a) { return(this.compareTo(a)>0)?this:a; } + + // (protected) r = this op a (bitwise) + function bnpBitwiseTo(a,op,r) { + var i, f, m = Math.min(a.t,this.t); + for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]); + if(a.t < this.t) { + f = a.s&this.DM; + for(i = m; i < this.t; ++i) r[i] = op(this[i],f); + r.t = this.t; + } + else { + f = this.s&this.DM; + for(i = m; i < a.t; ++i) r[i] = op(f,a[i]); + r.t = a.t; + } + r.s = op(this.s,a.s); + r.clamp(); + } + + // (public) this & a + function op_and(x,y) { return x&y; } + function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; } + + // (public) this | a + function op_or(x,y) { return x|y; } + function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; } + + // (public) this ^ a + function op_xor(x,y) { return x^y; } + function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; } + + // (public) this & ~a + function op_andnot(x,y) { return x&~y; } + function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; } + + // (public) ~this + function bnNot() { + var r = nbi(); + for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i]; + r.t = this.t; + r.s = ~this.s; + return r; + } + + // (public) this << n + function bnShiftLeft(n) { + var r = nbi(); + if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r); + return r; + } + + // (public) this >> n + function bnShiftRight(n) { + var r = nbi(); + if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r); + return r; + } + + // return index of lowest 1-bit in x, x < 2^31 + function lbit(x) { + if(x == 0) return -1; + var r = 0; + if((x&0xffff) == 0) { x >>= 16; r += 16; } + if((x&0xff) == 0) { x >>= 8; r += 8; } + if((x&0xf) == 0) { x >>= 4; r += 4; } + if((x&3) == 0) { x >>= 2; r += 2; } + if((x&1) == 0) ++r; + return r; + } + + // (public) returns index of lowest 1-bit (or -1 if none) + function bnGetLowestSetBit() { + for(var i = 0; i < this.t; ++i) + if(this[i] != 0) return i*this.DB+lbit(this[i]); + if(this.s < 0) return this.t*this.DB; + return -1; + } + + // return number of 1 bits in x + function cbit(x) { + var r = 0; + while(x != 0) { x &= x-1; ++r; } + return r; + } + + // (public) return number of set bits + function bnBitCount() { + var r = 0, x = this.s&this.DM; + for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x); + return r; + } + + // (public) true iff nth bit is set + function bnTestBit(n) { + var j = Math.floor(n/this.DB); + if(j >= this.t) return(this.s!=0); + return((this[j]&(1<<(n%this.DB)))!=0); + } + + // (protected) this op (1<>= this.DB; + } + if(a.t < this.t) { + c += a.s; + while(i < this.t) { + c += this[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } + else { + c += this.s; + while(i < a.t) { + c += a[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += a.s; + } + r.s = (c<0)?-1:0; + if(c > 0) r[i++] = c; + else if(c < -1) r[i++] = this.DV+c; + r.t = i; + r.clamp(); + } + + // (public) this + a + function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; } + + // (public) this - a + function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; } + + // (public) this * a + function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; } + + // (public) this^2 + function bnSquare() { var r = nbi(); this.squareTo(r); return r; } + + // (public) this / a + function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; } + + // (public) this % a + function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; } + + // (public) [this/a,this%a] + function bnDivideAndRemainder(a) { + var q = nbi(), r = nbi(); + this.divRemTo(a,q,r); + return new Array(q,r); + } + + // (protected) this *= n, this >= 0, 1 < n < DV + function bnpDMultiply(n) { + this[this.t] = this.am(0,n-1,this,0,0,this.t); + ++this.t; + this.clamp(); + } + + // (protected) this += n << w words, this >= 0 + function bnpDAddOffset(n,w) { + if(n == 0) return; + while(this.t <= w) this[this.t++] = 0; + this[w] += n; + while(this[w] >= this.DV) { + this[w] -= this.DV; + if(++w >= this.t) this[this.t++] = 0; + ++this[w]; + } + } + + // A "null" reducer + function NullExp() {} + function nNop(x) { return x; } + function nMulTo(x,y,r) { x.multiplyTo(y,r); } + function nSqrTo(x,r) { x.squareTo(r); } + + NullExp.prototype.convert = nNop; + NullExp.prototype.revert = nNop; + NullExp.prototype.mulTo = nMulTo; + NullExp.prototype.sqrTo = nSqrTo; + + // (public) this^e + function bnPow(e) { return this.exp(e,new NullExp()); } + + // (protected) r = lower n words of "this * a", a.t <= n + // "this" should be the larger one if appropriate. + function bnpMultiplyLowerTo(a,n,r) { + var i = Math.min(this.t+a.t,n); + r.s = 0; // assumes a,this >= 0 + r.t = i; + while(i > 0) r[--i] = 0; + var j; + for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t); + for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i); + r.clamp(); + } + + // (protected) r = "this * a" without lower n words, n > 0 + // "this" should be the larger one if appropriate. + function bnpMultiplyUpperTo(a,n,r) { + --n; + var i = r.t = this.t+a.t-n; + r.s = 0; // assumes a,this >= 0 + while(--i >= 0) r[i] = 0; + for(i = Math.max(n-this.t,0); i < a.t; ++i) + r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n); + r.clamp(); + r.drShiftTo(1,r); + } + + // Barrett modular reduction + function Barrett(m) { + // setup Barrett + this.r2 = nbi(); + this.q3 = nbi(); + BigInteger.ONE.dlShiftTo(2*m.t,this.r2); + this.mu = this.r2.divide(m); + this.m = m; + } + + function barrettConvert(x) { + if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m); + else if(x.compareTo(this.m) < 0) return x; + else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; } + } + + function barrettRevert(x) { return x; } + + // x = x mod m (HAC 14.42) + function barrettReduce(x) { + x.drShiftTo(this.m.t-1,this.r2); + if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); } + this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3); + this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2); + while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1); + x.subTo(this.r2,x); + while(x.compareTo(this.m) >= 0) x.subTo(this.m,x); + } + + // r = x^2 mod m; x != r + function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + + // r = x*y mod m; x,y != r + function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + + Barrett.prototype.convert = barrettConvert; + Barrett.prototype.revert = barrettRevert; + Barrett.prototype.reduce = barrettReduce; + Barrett.prototype.mulTo = barrettMulTo; + Barrett.prototype.sqrTo = barrettSqrTo; + + // (public) this^e % m (HAC 14.85) + function bnModPow(e,m) { + var i = e.bitLength(), k, r = nbv(1), z; + if(i <= 0) return r; + else if(i < 18) k = 1; + else if(i < 48) k = 3; + else if(i < 144) k = 4; + else if(i < 768) k = 5; + else k = 6; + if(i < 8) + z = new Classic(m); + else if(m.isEven()) + z = new Barrett(m); + else + z = new Montgomery(m); + + // precomputation + var g = new Array(), n = 3, k1 = k-1, km = (1< 1) { + var g2 = nbi(); + z.sqrTo(g[1],g2); + while(n <= km) { + g[n] = nbi(); + z.mulTo(g2,g[n-2],g[n]); + n += 2; + } + } + + var j = e.t-1, w, is1 = true, r2 = nbi(), t; + i = nbits(e[j])-1; + while(j >= 0) { + if(i >= k1) w = (e[j]>>(i-k1))&km; + else { + w = (e[j]&((1<<(i+1))-1))<<(k1-i); + if(j > 0) w |= e[j-1]>>(this.DB+i-k1); + } + + n = k; + while((w&1) == 0) { w >>= 1; --n; } + if((i -= n) < 0) { i += this.DB; --j; } + if(is1) { // ret == 1, don't bother squaring or multiplying it + g[w].copyTo(r); + is1 = false; + } + else { + while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; } + if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; } + z.mulTo(r2,g[w],r); + } + + while(j >= 0 && (e[j]&(1< 0) { + x.rShiftTo(g,x); + y.rShiftTo(g,y); + } + while(x.signum() > 0) { + if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x); + if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y); + if(x.compareTo(y) >= 0) { + x.subTo(y,x); + x.rShiftTo(1,x); + } + else { + y.subTo(x,y); + y.rShiftTo(1,y); + } + } + if(g > 0) y.lShiftTo(g,y); + return y; + } + + // (protected) this % n, n < 2^26 + function bnpModInt(n) { + if(n <= 0) return 0; + var d = this.DV%n, r = (this.s<0)?n-1:0; + if(this.t > 0) + if(d == 0) r = this[0]%n; + else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n; + return r; + } + + // (public) 1/this % m (HAC 14.61) + function bnModInverse(m) { + var ac = m.isEven(); + if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO; + var u = m.clone(), v = this.clone(); + var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1); + while(u.signum() != 0) { + while(u.isEven()) { + u.rShiftTo(1,u); + if(ac) { + if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); } + a.rShiftTo(1,a); + } + else if(!b.isEven()) b.subTo(m,b); + b.rShiftTo(1,b); + } + while(v.isEven()) { + v.rShiftTo(1,v); + if(ac) { + if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); } + c.rShiftTo(1,c); + } + else if(!d.isEven()) d.subTo(m,d); + d.rShiftTo(1,d); + } + if(u.compareTo(v) >= 0) { + u.subTo(v,u); + if(ac) a.subTo(c,a); + b.subTo(d,b); + } + else { + v.subTo(u,v); + if(ac) c.subTo(a,c); + d.subTo(b,d); + } + } + if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO; + if(d.compareTo(m) >= 0) return d.subtract(m); + if(d.signum() < 0) d.addTo(m,d); else return d; + if(d.signum() < 0) return d.add(m); else return d; + } + + var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997]; + var lplim = (1<<26)/lowprimes[lowprimes.length-1]; + + // (public) test primality with certainty >= 1-.5^t + function bnIsProbablePrime(t) { + var i, x = this.abs(); + if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) { + for(i = 0; i < lowprimes.length; ++i) + if(x[0] == lowprimes[i]) return true; + return false; + } + if(x.isEven()) return false; + i = 1; + while(i < lowprimes.length) { + var m = lowprimes[i], j = i+1; + while(j < lowprimes.length && m < lplim) m *= lowprimes[j++]; + m = x.modInt(m); + while(i < j) if(m%lowprimes[i++] == 0) return false; + } + return x.millerRabin(t); + } + + // (protected) true if probably prime (HAC 4.24, Miller-Rabin) + function bnpMillerRabin(t) { + var n1 = this.subtract(BigInteger.ONE); + var k = n1.getLowestSetBit(); + if(k <= 0) return false; + var r = n1.shiftRight(k); + t = (t+1)>>1; + if(t > lowprimes.length) t = lowprimes.length; + var a = nbi(); + for(var i = 0; i < t; ++i) { + //Pick bases at random, instead of starting at 2 + a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]); + var y = a.modPow(r,this); + if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) { + var j = 1; + while(j++ < k && y.compareTo(n1) != 0) { + y = y.modPowInt(2,this); + if(y.compareTo(BigInteger.ONE) == 0) return false; + } + if(y.compareTo(n1) != 0) return false; + } + } + return true; + } + + // protected + BigInteger.prototype.chunkSize = bnpChunkSize; + BigInteger.prototype.toRadix = bnpToRadix; + BigInteger.prototype.fromRadix = bnpFromRadix; + BigInteger.prototype.fromNumber = bnpFromNumber; + BigInteger.prototype.bitwiseTo = bnpBitwiseTo; + BigInteger.prototype.changeBit = bnpChangeBit; + BigInteger.prototype.addTo = bnpAddTo; + BigInteger.prototype.dMultiply = bnpDMultiply; + BigInteger.prototype.dAddOffset = bnpDAddOffset; + BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo; + BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo; + BigInteger.prototype.modInt = bnpModInt; + BigInteger.prototype.millerRabin = bnpMillerRabin; + + // public + BigInteger.prototype.clone = bnClone; + BigInteger.prototype.intValue = bnIntValue; + BigInteger.prototype.byteValue = bnByteValue; + BigInteger.prototype.shortValue = bnShortValue; + BigInteger.prototype.signum = bnSigNum; + BigInteger.prototype.toByteArray = bnToByteArray; + BigInteger.prototype.equals = bnEquals; + BigInteger.prototype.min = bnMin; + BigInteger.prototype.max = bnMax; + BigInteger.prototype.and = bnAnd; + BigInteger.prototype.or = bnOr; + BigInteger.prototype.xor = bnXor; + BigInteger.prototype.andNot = bnAndNot; + BigInteger.prototype.not = bnNot; + BigInteger.prototype.shiftLeft = bnShiftLeft; + BigInteger.prototype.shiftRight = bnShiftRight; + BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit; + BigInteger.prototype.bitCount = bnBitCount; + BigInteger.prototype.testBit = bnTestBit; + BigInteger.prototype.setBit = bnSetBit; + BigInteger.prototype.clearBit = bnClearBit; + BigInteger.prototype.flipBit = bnFlipBit; + BigInteger.prototype.add = bnAdd; + BigInteger.prototype.subtract = bnSubtract; + BigInteger.prototype.multiply = bnMultiply; + BigInteger.prototype.divide = bnDivide; + BigInteger.prototype.remainder = bnRemainder; + BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder; + BigInteger.prototype.modPow = bnModPow; + BigInteger.prototype.modInverse = bnModInverse; + BigInteger.prototype.pow = bnPow; + BigInteger.prototype.gcd = bnGCD; + BigInteger.prototype.isProbablePrime = bnIsProbablePrime; + + // JSBN-specific extension + BigInteger.prototype.square = bnSquare; + + // Expose the Barrett function + BigInteger.prototype.Barrett = Barrett + + // BigInteger interfaces not implemented in jsbn: + + // BigInteger(int signum, byte[] magnitude) + // double doubleValue() + // float floatValue() + // int hashCode() + // long longValue() + // static BigInteger valueOf(long val) + + // Random number generator - requires a PRNG backend, e.g. prng4.js + + // For best results, put code like + // + // in your main HTML document. + + var rng_state; + var rng_pool; + var rng_pptr; + + // Mix in a 32-bit integer into the pool + function rng_seed_int(x) { + rng_pool[rng_pptr++] ^= x & 255; + rng_pool[rng_pptr++] ^= (x >> 8) & 255; + rng_pool[rng_pptr++] ^= (x >> 16) & 255; + rng_pool[rng_pptr++] ^= (x >> 24) & 255; + if(rng_pptr >= rng_psize) rng_pptr -= rng_psize; + } + + // Mix in the current time (w/milliseconds) into the pool + function rng_seed_time() { + rng_seed_int(new Date().getTime()); + } + + // Initialize the pool with junk if needed. + if(rng_pool == null) { + rng_pool = new Array(); + rng_pptr = 0; + var t; + if(typeof window !== "undefined" && window.crypto) { + if (window.crypto.getRandomValues) { + // Use webcrypto if available + var ua = new Uint8Array(32); + window.crypto.getRandomValues(ua); + for(t = 0; t < 32; ++t) + rng_pool[rng_pptr++] = ua[t]; + } + else if(navigator.appName == "Netscape" && navigator.appVersion < "5") { + // Extract entropy (256 bits) from NS4 RNG if available + var z = window.crypto.random(32); + for(t = 0; t < z.length; ++t) + rng_pool[rng_pptr++] = z.charCodeAt(t) & 255; + } + } + while(rng_pptr < rng_psize) { // extract some randomness from Math.random() + t = Math.floor(65536 * Math.random()); + rng_pool[rng_pptr++] = t >>> 8; + rng_pool[rng_pptr++] = t & 255; + } + rng_pptr = 0; + rng_seed_time(); + //rng_seed_int(window.screenX); + //rng_seed_int(window.screenY); + } + + function rng_get_byte() { + if(rng_state == null) { + rng_seed_time(); + rng_state = prng_newstate(); + rng_state.init(rng_pool); + for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr) + rng_pool[rng_pptr] = 0; + rng_pptr = 0; + //rng_pool = null; + } + // TODO: allow reseeding after first request + return rng_state.next(); + } + + function rng_get_bytes(ba) { + var i; + for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte(); + } + + function SecureRandom() {} + + SecureRandom.prototype.nextBytes = rng_get_bytes; + + // prng4.js - uses Arcfour as a PRNG + + function Arcfour() { + this.i = 0; + this.j = 0; + this.S = new Array(); + } + + // Initialize arcfour context from key, an array of ints, each from [0..255] + function ARC4init(key) { + var i, j, t; + for(i = 0; i < 256; ++i) + this.S[i] = i; + j = 0; + for(i = 0; i < 256; ++i) { + j = (j + this.S[i] + key[i % key.length]) & 255; + t = this.S[i]; + this.S[i] = this.S[j]; + this.S[j] = t; + } + this.i = 0; + this.j = 0; + } + + function ARC4next() { + var t; + this.i = (this.i + 1) & 255; + this.j = (this.j + this.S[this.i]) & 255; + t = this.S[this.i]; + this.S[this.i] = this.S[this.j]; + this.S[this.j] = t; + return this.S[(t + this.S[this.i]) & 255]; + } + + Arcfour.prototype.init = ARC4init; + Arcfour.prototype.next = ARC4next; + + // Plug in your RNG constructor here + function prng_newstate() { + return new Arcfour(); + } + + // Pool size must be a multiple of 4 and greater than 32. + // An array of bytes the size of the pool will be passed to init() + var rng_psize = 256; + + if (typeof exports !== 'undefined') { + exports = module.exports = { + BigInteger: BigInteger, + SecureRandom: SecureRandom, + }; + } else { + this.BigInteger = BigInteger; + this.SecureRandom = SecureRandom; + } + +}).call(this); diff --git a/node_modules/jsbn/package.json b/node_modules/jsbn/package.json new file mode 100644 index 0000000..4c4943e --- /dev/null +++ b/node_modules/jsbn/package.json @@ -0,0 +1,21 @@ +{ + "name": "jsbn", + "version": "0.1.0", + "description": "The jsbn library is a fast, portable implementation of large-number math in pure JavaScript, enabling public-key crypto and other applications on desktop and mobile browsers.", + "main": "index.js", + "scripts": { + "test": "mocha test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/andyperlitch/jsbn.git" + }, + "keywords": [ + "biginteger", + "bignumber", + "big", + "integer" + ], + "author": "Tom Wu", + "license": "BSD" +} diff --git a/node_modules/jsesc/LICENSE-MIT.txt b/node_modules/jsesc/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/jsesc/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/jsesc/bin/jsesc b/node_modules/jsesc/bin/jsesc new file mode 100755 index 0000000..e9a541d --- /dev/null +++ b/node_modules/jsesc/bin/jsesc @@ -0,0 +1,148 @@ +#!/usr/bin/env node +(function() { + + var fs = require('fs'); + var stringEscape = require('../jsesc.js'); + var strings = process.argv.splice(2); + var stdin = process.stdin; + var data; + var timeout; + var isObject = false; + var options = {}; + var log = console.log; + + var main = function() { + var option = strings[0]; + + if (/^(?:-h|--help|undefined)$/.test(option)) { + log( + 'jsesc v%s - https://mths.be/jsesc', + stringEscape.version + ); + log([ + '\nUsage:\n', + '\tjsesc [string]', + '\tjsesc [-s | --single-quotes] [string]', + '\tjsesc [-d | --double-quotes] [string]', + '\tjsesc [-w | --wrap] [string]', + '\tjsesc [-e | --escape-everything] [string]', + '\tjsesc [-t | --escape-etago] [string]', + '\tjsesc [-6 | --es6] [string]', + '\tjsesc [-l | --lowercase-hex] [string]', + '\tjsesc [-j | --json] [string]', + '\tjsesc [-o | --object] [stringified_object]', // `JSON.parse()` the argument + '\tjsesc [-p | --pretty] [string]', // `compact: false` + '\tjsesc [-v | --version]', + '\tjsesc [-h | --help]', + '\nExamples:\n', + '\tjsesc \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tjsesc --json \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tjsesc --json --escape-everything \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tjsesc --double-quotes --wrap \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\techo \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\' | jsesc' + ].join('\n')); + return process.exit(1); + } + + if (/^(?:-v|--version)$/.test(option)) { + log('v%s', stringEscape.version); + return process.exit(1); + } + + strings.forEach(function(string) { + // Process options + if (/^(?:-s|--single-quotes)$/.test(string)) { + options.quotes = 'single'; + return; + } + if (/^(?:-d|--double-quotes)$/.test(string)) { + options.quotes = 'double'; + return; + } + if (/^(?:-w|--wrap)$/.test(string)) { + options.wrap = true; + return; + } + if (/^(?:-e|--escape-everything)$/.test(string)) { + options.escapeEverything = true; + return; + } + if (/^(?:-t|--escape-etago)$/.test(string)) { + options.escapeEtago = true; + return; + } + if (/^(?:-6|--es6)$/.test(string)) { + options.es6 = true; + return; + } + if (/^(?:-l|--lowercase-hex)$/.test(string)) { + options.lowercaseHex = true; + return; + } + if (/^(?:-j|--json)$/.test(string)) { + options.json = true; + return; + } + if (/^(?:-o|--object)$/.test(string)) { + isObject = true; + return; + } + if (/^(?:-p|--pretty)$/.test(string)) { + isObject = true; + options.compact = false; + return; + } + + // Process string(s) + var result; + try { + if (isObject) { + string = JSON.parse(string); + } + result = stringEscape(string, options); + log(result); + } catch(error) { + log(error.message + '\n'); + log('Error: failed to escape.'); + log('If you think this is a bug in jsesc, please report it:'); + log('https://github.com/mathiasbynens/jsesc/issues/new'); + log( + '\nStack trace using jsesc@%s:\n', + stringEscape.version + ); + log(error.stack); + return process.exit(1); + } + }); + // Return with exit status 0 outside of the `forEach` loop, in case + // multiple strings were passed in. + return process.exit(0); + + }; + + if (stdin.isTTY) { + // handle shell arguments + main(); + } else { + // Either the script is called from within a non-TTY context, + // or `stdin` content is being piped in. + if (!process.stdout.isTTY) { // called from a non-TTY context + timeout = setTimeout(function() { + // if no piped data arrived after a while, handle shell arguments + main(); + }, 250); + } + + data = ''; + stdin.on('data', function(chunk) { + clearTimeout(timeout); + data += chunk; + }); + stdin.on('end', function() { + strings.push(data.trim()); + main(); + }); + stdin.resume(); + } + +}()); diff --git a/node_modules/jsesc/jsesc.js b/node_modules/jsesc/jsesc.js new file mode 100644 index 0000000..460353f --- /dev/null +++ b/node_modules/jsesc/jsesc.js @@ -0,0 +1,345 @@ +/*! https://mths.be/jsesc v1.3.0 by @mathias */ +;(function(root) { + + // Detect free variables `exports` + var freeExports = typeof exports == 'object' && exports; + + // Detect free variable `module` + var freeModule = typeof module == 'object' && module && + module.exports == freeExports && module; + + // Detect free variable `global`, from Node.js or Browserified code, + // and use it as `root` + var freeGlobal = typeof global == 'object' && global; + if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) { + root = freeGlobal; + } + + /*--------------------------------------------------------------------------*/ + + var object = {}; + var hasOwnProperty = object.hasOwnProperty; + var forOwn = function(object, callback) { + var key; + for (key in object) { + if (hasOwnProperty.call(object, key)) { + callback(key, object[key]); + } + } + }; + + var extend = function(destination, source) { + if (!source) { + return destination; + } + forOwn(source, function(key, value) { + destination[key] = value; + }); + return destination; + }; + + var forEach = function(array, callback) { + var length = array.length; + var index = -1; + while (++index < length) { + callback(array[index]); + } + }; + + var toString = object.toString; + var isArray = function(value) { + return toString.call(value) == '[object Array]'; + }; + var isObject = function(value) { + // This is a very simple check, but it’s good enough for what we need. + return toString.call(value) == '[object Object]'; + }; + var isString = function(value) { + return typeof value == 'string' || + toString.call(value) == '[object String]'; + }; + var isNumber = function(value) { + return typeof value == 'number' || + toString.call(value) == '[object Number]'; + }; + var isFunction = function(value) { + // In a perfect world, the `typeof` check would be sufficient. However, + // in Chrome 1–12, `typeof /x/ == 'object'`, and in IE 6–8 + // `typeof alert == 'object'` and similar for other host objects. + return typeof value == 'function' || + toString.call(value) == '[object Function]'; + }; + var isMap = function(value) { + return toString.call(value) == '[object Map]'; + }; + var isSet = function(value) { + return toString.call(value) == '[object Set]'; + }; + + /*--------------------------------------------------------------------------*/ + + // https://mathiasbynens.be/notes/javascript-escapes#single + var singleEscapes = { + '"': '\\"', + '\'': '\\\'', + '\\': '\\\\', + '\b': '\\b', + '\f': '\\f', + '\n': '\\n', + '\r': '\\r', + '\t': '\\t' + // `\v` is omitted intentionally, because in IE < 9, '\v' == 'v'. + // '\v': '\\x0B' + }; + var regexSingleEscape = /["'\\\b\f\n\r\t]/; + + var regexDigit = /[0-9]/; + var regexWhitelist = /[ !#-&\(-\[\]-~]/; + + var jsesc = function(argument, options) { + // Handle options + var defaults = { + 'escapeEverything': false, + 'escapeEtago': false, + 'quotes': 'single', + 'wrap': false, + 'es6': false, + 'json': false, + 'compact': true, + 'lowercaseHex': false, + 'numbers': 'decimal', + 'indent': '\t', + '__indent__': '', + '__inline1__': false, + '__inline2__': false + }; + var json = options && options.json; + if (json) { + defaults.quotes = 'double'; + defaults.wrap = true; + } + options = extend(defaults, options); + if (options.quotes != 'single' && options.quotes != 'double') { + options.quotes = 'single'; + } + var quote = options.quotes == 'double' ? '"' : '\''; + var compact = options.compact; + var indent = options.indent; + var lowercaseHex = options.lowercaseHex; + var oldIndent = ''; + var inline1 = options.__inline1__; + var inline2 = options.__inline2__; + var newLine = compact ? '' : '\n'; + var result; + var isEmpty = true; + var useBinNumbers = options.numbers == 'binary'; + var useOctNumbers = options.numbers == 'octal'; + var useDecNumbers = options.numbers == 'decimal'; + var useHexNumbers = options.numbers == 'hexadecimal'; + + if (json && argument && isFunction(argument.toJSON)) { + argument = argument.toJSON(); + } + + if (!isString(argument)) { + if (isMap(argument)) { + if (argument.size == 0) { + return 'new Map()'; + } + if (!compact) { + options.__inline1__ = true; + } + return 'new Map(' + jsesc(Array.from(argument), options) + ')'; + } + if (isSet(argument)) { + if (argument.size == 0) { + return 'new Set()'; + } + return 'new Set(' + jsesc(Array.from(argument), options) + ')'; + } + if (isArray(argument)) { + result = []; + options.wrap = true; + if (inline1) { + options.__inline1__ = false; + options.__inline2__ = true; + } else { + oldIndent = options.__indent__; + indent += oldIndent; + options.__indent__ = indent; + } + forEach(argument, function(value) { + isEmpty = false; + if (inline2) { + options.__inline2__ = false; + } + result.push( + (compact || inline2 ? '' : indent) + + jsesc(value, options) + ); + }); + if (isEmpty) { + return '[]'; + } + if (inline2) { + return '[' + result.join(', ') + ']'; + } + return '[' + newLine + result.join(',' + newLine) + newLine + + (compact ? '' : oldIndent) + ']'; + } else if (isNumber(argument)) { + if (json) { + // Some number values (e.g. `Infinity`) cannot be represented in JSON. + return JSON.stringify(argument); + } + if (useDecNumbers) { + return String(argument); + } + if (useHexNumbers) { + var tmp = argument.toString(16); + if (!lowercaseHex) { + tmp = tmp.toUpperCase(); + } + return '0x' + tmp; + } + if (useBinNumbers) { + return '0b' + argument.toString(2); + } + if (useOctNumbers) { + return '0o' + argument.toString(8); + } + } else if (!isObject(argument)) { + if (json) { + // For some values (e.g. `undefined`, `function` objects), + // `JSON.stringify(value)` returns `undefined` (which isn’t valid + // JSON) instead of `'null'`. + return JSON.stringify(argument) || 'null'; + } + return String(argument); + } else { // it’s an object + result = []; + options.wrap = true; + oldIndent = options.__indent__; + indent += oldIndent; + options.__indent__ = indent; + forOwn(argument, function(key, value) { + isEmpty = false; + result.push( + (compact ? '' : indent) + + jsesc(key, options) + ':' + + (compact ? '' : ' ') + + jsesc(value, options) + ); + }); + if (isEmpty) { + return '{}'; + } + return '{' + newLine + result.join(',' + newLine) + newLine + + (compact ? '' : oldIndent) + '}'; + } + } + + var string = argument; + // Loop over each code unit in the string and escape it + var index = -1; + var length = string.length; + var first; + var second; + var codePoint; + result = ''; + while (++index < length) { + var character = string.charAt(index); + if (options.es6) { + first = string.charCodeAt(index); + if ( // check if it’s the start of a surrogate pair + first >= 0xD800 && first <= 0xDBFF && // high surrogate + length > index + 1 // there is a next code unit + ) { + second = string.charCodeAt(index + 1); + if (second >= 0xDC00 && second <= 0xDFFF) { // low surrogate + // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + codePoint = (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; + var hexadecimal = codePoint.toString(16); + if (!lowercaseHex) { + hexadecimal = hexadecimal.toUpperCase(); + } + result += '\\u{' + hexadecimal + '}'; + index++; + continue; + } + } + } + if (!options.escapeEverything) { + if (regexWhitelist.test(character)) { + // It’s a printable ASCII character that is not `"`, `'` or `\`, + // so don’t escape it. + result += character; + continue; + } + if (character == '"') { + result += quote == character ? '\\"' : character; + continue; + } + if (character == '\'') { + result += quote == character ? '\\\'' : character; + continue; + } + } + if ( + character == '\0' && + !json && + !regexDigit.test(string.charAt(index + 1)) + ) { + result += '\\0'; + continue; + } + if (regexSingleEscape.test(character)) { + // no need for a `hasOwnProperty` check here + result += singleEscapes[character]; + continue; + } + var charCode = character.charCodeAt(0); + var hexadecimal = charCode.toString(16); + if (!lowercaseHex) { + hexadecimal = hexadecimal.toUpperCase(); + } + var longhand = hexadecimal.length > 2 || json; + var escaped = '\\' + (longhand ? 'u' : 'x') + + ('0000' + hexadecimal).slice(longhand ? -4 : -2); + result += escaped; + continue; + } + if (options.wrap) { + result = quote + result + quote; + } + if (options.escapeEtago) { + // https://mathiasbynens.be/notes/etago + return result.replace(/<\/(script|style)/gi, '<\\/$1'); + } + return result; + }; + + jsesc.version = '1.3.0'; + + /*--------------------------------------------------------------------------*/ + + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define(function() { + return jsesc; + }); + } else if (freeExports && !freeExports.nodeType) { + if (freeModule) { // in Node.js or RingoJS v0.8.0+ + freeModule.exports = jsesc; + } else { // in Narwhal or RingoJS v0.7.0- + freeExports.jsesc = jsesc; + } + } else { // in Rhino or a web browser + root.jsesc = jsesc; + } + +}(this)); diff --git a/node_modules/jsesc/man/jsesc.1 b/node_modules/jsesc/man/jsesc.1 new file mode 100644 index 0000000..2655ee7 --- /dev/null +++ b/node_modules/jsesc/man/jsesc.1 @@ -0,0 +1,94 @@ +.Dd May 13, 2016 +.Dt jsesc 1 +.Sh NAME +.Nm jsesc +.Nd escape strings for use in JavaScript string literals +.Sh SYNOPSIS +.Nm +.Op Fl s | -single-quotes Ar string +.br +.Op Fl d | -double-quotes Ar string +.br +.Op Fl w | -wrap Ar string +.br +.Op Fl e | -escape-everything Ar string +.br +.Op Fl 6 | -es6 Ar string +.br +.Op Fl l | -lowercase-hex Ar string +.br +.Op Fl j | -json Ar string +.br +.Op Fl p | -object Ar string +.br +.Op Fl p | -pretty Ar string +.br +.Op Fl v | -version +.br +.Op Fl h | -help +.Sh DESCRIPTION +.Nm +escapes strings for use in JavaScript string literals while generating the shortest possible valid ASCII-only output. +.Sh OPTIONS +.Bl -ohang -offset +.It Sy "-s, --single-quotes" +Escape any occurrences of ' in the input string as \\', so that the output can be used in a JavaScript string literal wrapped in single quotes. +.It Sy "-d, --double-quotes" +Escape any occurrences of " in the input string as \\", so that the output can be used in a JavaScript string literal wrapped in double quotes. +.It Sy "-w, --wrap" +Make sure the output is a valid JavaScript string literal wrapped in quotes. The type of quotes can be specified using the +.Ar -s | --single-quotes +or +.Ar -d | --double-quotes +settings. +.It Sy "-6, --es6" +Escape any astral Unicode symbols using ECMAScript 6 Unicode code point escape sequences. +.It Sy "-e, --escape-everything" +Escape all the symbols in the output, even printable ASCII symbols. +.It Sy "-j, --json" +Make sure the output is valid JSON. Hexadecimal character escape sequences and the \\v or \\0 escape sequences will not be used. Setting this flag enables the +.Ar -d | --double-quotes +and +.Ar -w | --wrap +settings. +.It Sy "-o, --object" +Treat the input as a JavaScript object rather than a string. Accepted values are flat arrays containing only string values, and flat objects containing only string values. +.It Sy "-p, --pretty" +Pretty-print the output for objects, using whitespace to make it more readable. Setting this flag enables the +.It Sy "-l, --lowercase-hex" +Use lowercase for alphabetical hexadecimal digits in escape sequences. +.Ar -o | --object +setting. +.It Sy "-v, --version" +Print jsesc's version. +.It Sy "-h, --help" +Show the help screen. +.El +.Sh EXIT STATUS +The +.Nm jsesc +utility exits with one of the following values: +.Pp +.Bl -tag -width flag -compact +.It Li 0 +.Nm +successfully escaped the given string and printed the result. +.It Li 1 +.Nm +wasn't instructed to escape anything (for example, the +.Ar --help +flag was set); or, an error occurred. +.El +.Sh EXAMPLES +.Bl -ohang -offset +.It Sy "jsesc 'foo bar baz'" +Print an escaped version of the given string. +.It Sy echo\ 'foo bar baz'\ |\ jsesc +Print an escaped version of the string that gets piped in. +.El +.Sh BUGS +jsesc's bug tracker is located at . +.Sh AUTHOR +Mathias Bynens +.Sh WWW + diff --git a/node_modules/jsesc/package.json b/node_modules/jsesc/package.json new file mode 100644 index 0000000..1ae439f --- /dev/null +++ b/node_modules/jsesc/package.json @@ -0,0 +1,46 @@ +{ + "name": "jsesc", + "version": "1.3.0", + "description": "A JavaScript library for escaping JavaScript strings while generating the shortest possible valid output.", + "homepage": "https://mths.be/jsesc", + "main": "jsesc.js", + "bin": "bin/jsesc", + "man": "man/jsesc.1", + "keywords": [ + "string", + "escape", + "javascript", + "tool" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/jsesc.git" + }, + "bugs": "https://github.com/mathiasbynens/jsesc/issues", + "files": [ + "LICENSE-MIT.txt", + "jsesc.js", + "bin/", + "man/" + ], + "scripts": { + "test": "node tests/tests.js", + "build": "grunt template" + }, + "devDependencies": { + "coveralls": "^2.11.6", + "grunt": "^0.4.5", + "grunt-shell": "^1.1.2", + "grunt-template": "^0.2.3", + "istanbul": "^0.4.2", + "qunit-extras": "^1.4.5", + "qunitjs": "~1.11.0", + "regenerate": "^1.2.1", + "requirejs": "^2.1.22" + } +} diff --git a/node_modules/json-loader/README.md b/node_modules/json-loader/README.md new file mode 100644 index 0000000..45da09f --- /dev/null +++ b/node_modules/json-loader/README.md @@ -0,0 +1,19 @@ +# json loader for webpack + +## Installation + +`npm install json-loader` + +## Usage + +``` javascript +var json = require("json!./file.json"); +// => returns file.json content as json parsed object +``` + +Don't forget to polyfill `require` if you want to use it in node. +See `webpack` documentation. + +## License + +MIT (http://www.opensource.org/licenses/mit-license.php) diff --git a/node_modules/json-loader/index.js b/node_modules/json-loader/index.js new file mode 100644 index 0000000..a51ba3a --- /dev/null +++ b/node_modules/json-loader/index.js @@ -0,0 +1,10 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +module.exports = function(source) { + this.cacheable && this.cacheable(); + var value = typeof source === "string" ? JSON.parse(source) : source; + this.value = [value]; + return "module.exports = " + JSON.stringify(value, undefined, "\t") + ";"; +} diff --git a/node_modules/json-loader/package.json b/node_modules/json-loader/package.json new file mode 100644 index 0000000..71422c5 --- /dev/null +++ b/node_modules/json-loader/package.json @@ -0,0 +1,11 @@ +{ + "name": "json-loader", + "version": "0.5.4", + "author": "Tobias Koppers @sokra", + "description": "json loader module for webpack", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/webpack/json-loader.git" + } +} diff --git a/node_modules/json-schema/README.md b/node_modules/json-schema/README.md new file mode 100644 index 0000000..ccc591b --- /dev/null +++ b/node_modules/json-schema/README.md @@ -0,0 +1,5 @@ +JSON Schema is a repository for the JSON Schema specification, reference schemas and a CommonJS implementation of JSON Schema (not the only JavaScript implementation of JSON Schema, JSV is another excellent JavaScript validator). + +Code is licensed under the AFL or BSD license as part of the Persevere +project which is administered under the Dojo foundation, +and all contributions require a Dojo CLA. \ No newline at end of file diff --git a/node_modules/json-schema/draft-00/hyper-schema b/node_modules/json-schema/draft-00/hyper-schema new file mode 100644 index 0000000..12fe26b --- /dev/null +++ b/node_modules/json-schema/draft-00/hyper-schema @@ -0,0 +1,68 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-00/links#"}, + "optional" : true + }, + + "fragmentResolution" : { + "type" : "string", + "optional" : true, + "default" : "dot-delimited" + }, + + "root" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pathStart" : { + "type" : "string", + "optional" : true, + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "optional" : true, + "format" : "media-type" + }, + + "alternate" : { + "type" : "array", + "items" : {"$ref" : "#"}, + "optional" : true + } + }, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited", + "extends" : {"$ref" : "http://json-schema.org/draft-00/schema#"} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-00/json-ref b/node_modules/json-schema/draft-00/json-ref new file mode 100644 index 0000000..0c825bc --- /dev/null +++ b/node_modules/json-schema/draft-00/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/json-ref#", + + "items" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-00/links b/node_modules/json-schema/draft-00/links new file mode 100644 index 0000000..c9b5517 --- /dev/null +++ b/node_modules/json-schema/draft-00/links @@ -0,0 +1,33 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string" + }, + + "rel" : { + "type" : "string" + }, + + "method" : { + "type" : "string", + "default" : "GET", + "optional" : true + }, + + "enctype" : { + "type" : "string", + "requires" : "method", + "optional" : true + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-00/hyper-schema#"}, + "optional" : true + } + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-00/schema b/node_modules/json-schema/draft-00/schema new file mode 100644 index 0000000..a3a2144 --- /dev/null +++ b/node_modules/json-schema/draft-00/schema @@ -0,0 +1,155 @@ +{ + "$schema" : "http://json-schema.org/draft-00/hyper-schema#", + "id" : "http://json-schema.org/draft-00/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "optional" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "optional" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "optional" : true, + "default" : {} + }, + + "requires" : { + "type" : ["string", {"$ref" : "#"}], + "optional" : true + }, + + "minimum" : { + "type" : "number", + "optional" : true + }, + + "maximum" : { + "type" : "number", + "optional" : true + }, + + "minimumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "minimum", + "default" : true + }, + + "maximumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "maximum", + "default" : true + }, + + "minItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "pattern" : { + "type" : "string", + "optional" : true, + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer", + "optional" : true + }, + + "enum" : { + "type" : "array", + "optional" : true, + "minItems" : 1 + }, + + "title" : { + "type" : "string", + "optional" : true + }, + + "description" : { + "type" : "string", + "optional" : true + }, + + "format" : { + "type" : "string", + "optional" : true + }, + + "contentEncoding" : { + "type" : "string", + "optional" : true + }, + + "default" : { + "type" : "any", + "optional" : true + }, + + "maxDecimal" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : {"type" : "string"}, + "optional" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + } + }, + + "optional" : true, + "default" : {} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-01/hyper-schema b/node_modules/json-schema/draft-01/hyper-schema new file mode 100644 index 0000000..66e835b --- /dev/null +++ b/node_modules/json-schema/draft-01/hyper-schema @@ -0,0 +1,68 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-01/links#"}, + "optional" : true + }, + + "fragmentResolution" : { + "type" : "string", + "optional" : true, + "default" : "dot-delimited" + }, + + "root" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pathStart" : { + "type" : "string", + "optional" : true, + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "optional" : true, + "format" : "media-type" + }, + + "alternate" : { + "type" : "array", + "items" : {"$ref" : "#"}, + "optional" : true + } + }, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited", + "extends" : {"$ref" : "http://json-schema.org/draft-01/schema#"} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-01/json-ref b/node_modules/json-schema/draft-01/json-ref new file mode 100644 index 0000000..f2ad55b --- /dev/null +++ b/node_modules/json-schema/draft-01/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/json-ref#", + + "items" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-01/links b/node_modules/json-schema/draft-01/links new file mode 100644 index 0000000..cb183c4 --- /dev/null +++ b/node_modules/json-schema/draft-01/links @@ -0,0 +1,33 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string" + }, + + "rel" : { + "type" : "string" + }, + + "method" : { + "type" : "string", + "default" : "GET", + "optional" : true + }, + + "enctype" : { + "type" : "string", + "requires" : "method", + "optional" : true + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-01/hyper-schema#"}, + "optional" : true + } + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-01/schema b/node_modules/json-schema/draft-01/schema new file mode 100644 index 0000000..e6b6aea --- /dev/null +++ b/node_modules/json-schema/draft-01/schema @@ -0,0 +1,155 @@ +{ + "$schema" : "http://json-schema.org/draft-01/hyper-schema#", + "id" : "http://json-schema.org/draft-01/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "optional" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "optional" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "optional" : true, + "default" : {} + }, + + "requires" : { + "type" : ["string", {"$ref" : "#"}], + "optional" : true + }, + + "minimum" : { + "type" : "number", + "optional" : true + }, + + "maximum" : { + "type" : "number", + "optional" : true + }, + + "minimumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "minimum", + "default" : true + }, + + "maximumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "maximum", + "default" : true + }, + + "minItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "pattern" : { + "type" : "string", + "optional" : true, + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer", + "optional" : true + }, + + "enum" : { + "type" : "array", + "optional" : true, + "minItems" : 1 + }, + + "title" : { + "type" : "string", + "optional" : true + }, + + "description" : { + "type" : "string", + "optional" : true + }, + + "format" : { + "type" : "string", + "optional" : true + }, + + "contentEncoding" : { + "type" : "string", + "optional" : true + }, + + "default" : { + "type" : "any", + "optional" : true + }, + + "maxDecimal" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : {"type" : "string"}, + "optional" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + } + }, + + "optional" : true, + "default" : {} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-02/hyper-schema b/node_modules/json-schema/draft-02/hyper-schema new file mode 100644 index 0000000..2d2bc68 --- /dev/null +++ b/node_modules/json-schema/draft-02/hyper-schema @@ -0,0 +1,68 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-02/links#"}, + "optional" : true + }, + + "fragmentResolution" : { + "type" : "string", + "optional" : true, + "default" : "slash-delimited" + }, + + "root" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pathStart" : { + "type" : "string", + "optional" : true, + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "optional" : true, + "format" : "media-type" + }, + + "alternate" : { + "type" : "array", + "items" : {"$ref" : "#"}, + "optional" : true + } + }, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "slash-delimited", + "extends" : {"$ref" : "http://json-schema.org/draft-02/schema#"} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-02/json-ref b/node_modules/json-schema/draft-02/json-ref new file mode 100644 index 0000000..2b23fcd --- /dev/null +++ b/node_modules/json-schema/draft-02/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/json-ref#", + + "items" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + }, + + { + "href" : "{id}", + "rel" : "self" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-02/links b/node_modules/json-schema/draft-02/links new file mode 100644 index 0000000..ab971b7 --- /dev/null +++ b/node_modules/json-schema/draft-02/links @@ -0,0 +1,35 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string" + }, + + "rel" : { + "type" : "string" + }, + + "targetSchema" : {"$ref" : "http://json-schema.org/draft-02/hyper-schema#"}, + + "method" : { + "type" : "string", + "default" : "GET", + "optional" : true + }, + + "enctype" : { + "type" : "string", + "requires" : "method", + "optional" : true + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-02/hyper-schema#"}, + "optional" : true + } + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-02/schema b/node_modules/json-schema/draft-02/schema new file mode 100644 index 0000000..cc2b669 --- /dev/null +++ b/node_modules/json-schema/draft-02/schema @@ -0,0 +1,166 @@ +{ + "$schema" : "http://json-schema.org/draft-02/hyper-schema#", + "id" : "http://json-schema.org/draft-02/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "optional" : true, + "uniqueItems" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + }, + + "optional" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "optional" : true, + "default" : {} + }, + + "requires" : { + "type" : ["string", {"$ref" : "#"}], + "optional" : true + }, + + "minimum" : { + "type" : "number", + "optional" : true + }, + + "maximum" : { + "type" : "number", + "optional" : true + }, + + "minimumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "minimum", + "default" : true + }, + + "maximumCanEqual" : { + "type" : "boolean", + "optional" : true, + "requires" : "maximum", + "default" : true + }, + + "minItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "optional" : true, + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "optional" : true, + "default" : false + }, + + "pattern" : { + "type" : "string", + "optional" : true, + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "optional" : true, + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer", + "optional" : true + }, + + "enum" : { + "type" : "array", + "optional" : true, + "minItems" : 1, + "uniqueItems" : true + }, + + "title" : { + "type" : "string", + "optional" : true + }, + + "description" : { + "type" : "string", + "optional" : true + }, + + "format" : { + "type" : "string", + "optional" : true + }, + + "contentEncoding" : { + "type" : "string", + "optional" : true + }, + + "default" : { + "type" : "any", + "optional" : true + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "minimumCanEqual" : false, + "optional" : true, + "default" : 1 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : {"type" : "string"}, + "optional" : true, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "optional" : true, + "default" : {} + } + }, + + "optional" : true, + "default" : {} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-03/examples/address b/node_modules/json-schema/draft-03/examples/address new file mode 100644 index 0000000..401f20f --- /dev/null +++ b/node_modules/json-schema/draft-03/examples/address @@ -0,0 +1,20 @@ +{ + "description" : "An Address following the convention of http://microformats.org/wiki/hcard", + "type" : "object", + "properties" : { + "post-office-box" : { "type" : "string" }, + "extended-address" : { "type" : "string" }, + "street-address" : { "type":"string" }, + "locality" : { "type" : "string", "required" : true }, + "region" : { "type" : "string", "required" : true }, + "postal-code" : { "type" : "string" }, + "country-name" : { "type" : "string", "required" : true } + }, + "dependencies" : { + "post-office-box" : "street-address", + "extended-address" : "street-address", + "street-address" : "region", + "locality" : "region", + "region" : "country-name" + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-03/examples/calendar b/node_modules/json-schema/draft-03/examples/calendar new file mode 100644 index 0000000..0ec47c2 --- /dev/null +++ b/node_modules/json-schema/draft-03/examples/calendar @@ -0,0 +1,53 @@ +{ + "description" : "A representation of an event", + "type" : "object", + "properties" : { + "dtstart" : { + "format" : "date-time", + "type" : "string", + "description" : "Event starting time", + "required":true + }, + "summary" : { + "type":"string", + "required":true + }, + "location" : { + "type" : "string" + }, + "url" : { + "type" : "string", + "format" : "url" + }, + "dtend" : { + "format" : "date-time", + "type" : "string", + "description" : "Event ending time" + }, + "duration" : { + "format" : "date", + "type" : "string", + "description" : "Event duration" + }, + "rdate" : { + "format" : "date-time", + "type" : "string", + "description" : "Recurrence date" + }, + "rrule" : { + "type" : "string", + "description" : "Recurrence rule" + }, + "category" : { + "type" : "string" + }, + "description" : { + "type" : "string" + }, + "geo" : { "$ref" : "http://json-schema.org/draft-03/geo" } + } +} + + + + diff --git a/node_modules/json-schema/draft-03/examples/card b/node_modules/json-schema/draft-03/examples/card new file mode 100644 index 0000000..a5667ff --- /dev/null +++ b/node_modules/json-schema/draft-03/examples/card @@ -0,0 +1,105 @@ +{ + "description":"A representation of a person, company, organization, or place", + "type":"object", + "properties":{ + "fn":{ + "description":"Formatted Name", + "type":"string" + }, + "familyName":{ + "type":"string", + "required":true + }, + "givenName":{ + "type":"string", + "required":true + }, + "additionalName":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "honorificPrefix":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "honorificSuffix":{ + "type":"array", + "items":{ + "type":"string" + } + }, + "nickname":{ + "type":"string" + }, + "url":{ + "type":"string", + "format":"url" + }, + "email":{ + "type":"object", + "properties":{ + "type":{ + "type":"string" + }, + "value":{ + "type":"string", + "format":"email" + } + } + }, + "tel":{ + "type":"object", + "properties":{ + "type":{ + "type":"string" + }, + "value":{ + "type":"string", + "format":"phone" + } + } + }, + "adr":{"$ref" : "http://json-schema.org/address"}, + "geo":{"$ref" : "http://json-schema.org/geo"}, + "tz":{ + "type":"string" + }, + "photo":{ + "format":"image", + "type":"string" + }, + "logo":{ + "format":"image", + "type":"string" + }, + "sound":{ + "format":"attachment", + "type":"string" + }, + "bday":{ + "type":"string", + "format":"date" + }, + "title":{ + "type":"string" + }, + "role":{ + "type":"string" + }, + "org":{ + "type":"object", + "properties":{ + "organizationName":{ + "type":"string" + }, + "organizationUnit":{ + "type":"string" + } + } + } + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-03/examples/geo b/node_modules/json-schema/draft-03/examples/geo new file mode 100644 index 0000000..4357a90 --- /dev/null +++ b/node_modules/json-schema/draft-03/examples/geo @@ -0,0 +1,8 @@ +{ + "description" : "A geographical coordinate", + "type" : "object", + "properties" : { + "latitude" : { "type" : "number" }, + "longitude" : { "type" : "number" } + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-03/examples/interfaces b/node_modules/json-schema/draft-03/examples/interfaces new file mode 100644 index 0000000..b8532f2 --- /dev/null +++ b/node_modules/json-schema/draft-03/examples/interfaces @@ -0,0 +1,23 @@ +{ + "extends":"http://json-schema.org/hyper-schema", + "description":"A schema for schema interface definitions that describe programmatic class structures using JSON schema syntax", + "properties":{ + "methods":{ + "type":"object", + "description":"This defines the set of methods available to the class instances", + "additionalProperties":{ + "type":"object", + "description":"The definition of the method", + "properties":{ + "parameters":{ + "type":"array", + "description":"The set of parameters that should be passed to the method when it is called", + "items":{"$ref":"#"}, + "required": true + }, + "returns":{"$ref":"#"} + } + } + } + } +} diff --git a/node_modules/json-schema/draft-03/hyper-schema b/node_modules/json-schema/draft-03/hyper-schema new file mode 100644 index 0000000..38ca2e1 --- /dev/null +++ b/node_modules/json-schema/draft-03/hyper-schema @@ -0,0 +1,60 @@ +{ + "$schema" : "http://json-schema.org/draft-03/hyper-schema#", + "extends" : {"$ref" : "http://json-schema.org/draft-03/schema#"}, + "id" : "http://json-schema.org/draft-03/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-03/links#"} + }, + + "fragmentResolution" : { + "type" : "string", + "default" : "slash-delimited" + }, + + "root" : { + "type" : "boolean", + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "default" : false + }, + + "contentEncoding" : { + "type" : "string" + }, + + "pathStart" : { + "type" : "string", + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "format" : "media-type" + } + }, + + "links" : [ + { + "href" : "{id}", + "rel" : "self" + }, + + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + } + ], + + "fragmentResolution" : "slash-delimited" +} diff --git a/node_modules/json-schema/draft-03/json-ref b/node_modules/json-schema/draft-03/json-ref new file mode 100644 index 0000000..66e08f2 --- /dev/null +++ b/node_modules/json-schema/draft-03/json-ref @@ -0,0 +1,26 @@ +{ + "$schema" : "http://json-schema.org/draft-03/hyper-schema#", + "id" : "http://json-schema.org/draft-03/json-ref#", + + "additionalItems" : {"$ref" : "#"}, + "additionalProperties" : {"$ref" : "#"}, + + "links" : [ + { + "href" : "{id}", + "rel" : "self" + }, + + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + } + ], + + "fragmentResolution" : "dot-delimited" +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-03/links b/node_modules/json-schema/draft-03/links new file mode 100644 index 0000000..9fa63f9 --- /dev/null +++ b/node_modules/json-schema/draft-03/links @@ -0,0 +1,35 @@ +{ + "$schema" : "http://json-schema.org/draft-03/hyper-schema#", + "id" : "http://json-schema.org/draft-03/links#", + "type" : "object", + + "properties" : { + "href" : { + "type" : "string", + "required" : true, + "format" : "link-description-object-template" + }, + + "rel" : { + "type" : "string", + "required" : true + }, + + "targetSchema" : {"$ref" : "http://json-schema.org/draft-03/hyper-schema#"}, + + "method" : { + "type" : "string", + "default" : "GET" + }, + + "enctype" : { + "type" : "string", + "requires" : "method" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-03/hyper-schema#"} + } + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-03/schema b/node_modules/json-schema/draft-03/schema new file mode 100644 index 0000000..29d9469 --- /dev/null +++ b/node_modules/json-schema/draft-03/schema @@ -0,0 +1,174 @@ +{ + "$schema" : "http://json-schema.org/draft-03/schema#", + "id" : "http://json-schema.org/draft-03/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true, + "default" : "any" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "patternProperties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalItems" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "required" : { + "type" : "boolean", + "default" : false + }, + + "dependencies" : { + "type" : "object", + "additionalProperties" : { + "type" : ["string", "array", {"$ref" : "#"}], + "items" : { + "type" : "string" + } + }, + "default" : {} + }, + + "minimum" : { + "type" : "number" + }, + + "maximum" : { + "type" : "number" + }, + + "exclusiveMinimum" : { + "type" : "boolean", + "default" : false + }, + + "exclusiveMaximum" : { + "type" : "boolean", + "default" : false + }, + + "minItems" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "default" : false + }, + + "pattern" : { + "type" : "string", + "format" : "regex" + }, + + "minLength" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer" + }, + + "enum" : { + "type" : "array", + "minItems" : 1, + "uniqueItems" : true + }, + + "default" : { + "type" : "any" + }, + + "title" : { + "type" : "string" + }, + + "description" : { + "type" : "string" + }, + + "format" : { + "type" : "string" + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "exclusiveMinimum" : true, + "default" : 1 + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "id" : { + "type" : "string", + "format" : "uri" + }, + + "$ref" : { + "type" : "string", + "format" : "uri" + }, + + "$schema" : { + "type" : "string", + "format" : "uri" + } + }, + + "dependencies" : { + "exclusiveMinimum" : "minimum", + "exclusiveMaximum" : "maximum" + }, + + "default" : {} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-04/hyper-schema b/node_modules/json-schema/draft-04/hyper-schema new file mode 100644 index 0000000..63fb34d --- /dev/null +++ b/node_modules/json-schema/draft-04/hyper-schema @@ -0,0 +1,60 @@ +{ + "$schema" : "http://json-schema.org/draft-04/hyper-schema#", + "extends" : {"$ref" : "http://json-schema.org/draft-04/schema#"}, + "id" : "http://json-schema.org/draft-04/hyper-schema#", + + "properties" : { + "links" : { + "type" : "array", + "items" : {"$ref" : "http://json-schema.org/draft-04/links#"} + }, + + "fragmentResolution" : { + "type" : "string", + "default" : "json-pointer" + }, + + "root" : { + "type" : "boolean", + "default" : false + }, + + "readonly" : { + "type" : "boolean", + "default" : false + }, + + "contentEncoding" : { + "type" : "string" + }, + + "pathStart" : { + "type" : "string", + "format" : "uri" + }, + + "mediaType" : { + "type" : "string", + "format" : "media-type" + } + }, + + "links" : [ + { + "href" : "{id}", + "rel" : "self" + }, + + { + "href" : "{$ref}", + "rel" : "full" + }, + + { + "href" : "{$schema}", + "rel" : "describedby" + } + ], + + "fragmentResolution" : "json-pointer" +} diff --git a/node_modules/json-schema/draft-04/links b/node_modules/json-schema/draft-04/links new file mode 100644 index 0000000..6c06d29 --- /dev/null +++ b/node_modules/json-schema/draft-04/links @@ -0,0 +1,41 @@ +{ + "$schema" : "http://json-schema.org/draft-04/hyper-schema#", + "id" : "http://json-schema.org/draft-04/links#", + "type" : "object", + + "properties" : { + "rel" : { + "type" : "string" + }, + + "href" : { + "type" : "string" + }, + + "template" : { + "type" : "string" + }, + + "targetSchema" : {"$ref" : "http://json-schema.org/draft-04/hyper-schema#"}, + + "method" : { + "type" : "string", + "default" : "GET" + }, + + "enctype" : { + "type" : "string" + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "http://json-schema.org/draft-04/hyper-schema#"} + } + }, + + "required" : ["rel", "href"], + + "dependencies" : { + "enctype" : "method" + } +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-04/schema b/node_modules/json-schema/draft-04/schema new file mode 100644 index 0000000..4231b16 --- /dev/null +++ b/node_modules/json-schema/draft-04/schema @@ -0,0 +1,189 @@ +{ + "$schema" : "http://json-schema.org/draft-04/schema#", + "id" : "http://json-schema.org/draft-04/schema#", + "type" : "object", + + "properties" : { + "type" : { + "type" : [ + { + "id" : "#simple-type", + "type" : "string", + "enum" : ["object", "array", "string", "number", "boolean", "null", "any"] + }, + "array" + ], + "items" : { + "type" : [ + {"$ref" : "#simple-type"}, + {"$ref" : "#"} + ] + }, + "uniqueItems" : true, + "default" : "any" + }, + + "disallow" : { + "type" : ["string", "array"], + "items" : { + "type" : ["string", {"$ref" : "#"}] + }, + "uniqueItems" : true + }, + + "extends" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "enum" : { + "type" : "array", + "minItems" : 1, + "uniqueItems" : true + }, + + "minimum" : { + "type" : "number" + }, + + "maximum" : { + "type" : "number" + }, + + "exclusiveMinimum" : { + "type" : "boolean", + "default" : false + }, + + "exclusiveMaximum" : { + "type" : "boolean", + "default" : false + }, + + "divisibleBy" : { + "type" : "number", + "minimum" : 0, + "exclusiveMinimum" : true, + "default" : 1 + }, + + "minLength" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxLength" : { + "type" : "integer" + }, + + "pattern" : { + "type" : "string" + }, + + "items" : { + "type" : [{"$ref" : "#"}, "array"], + "items" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalItems" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "minItems" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxItems" : { + "type" : "integer", + "minimum" : 0 + }, + + "uniqueItems" : { + "type" : "boolean", + "default" : false + }, + + "properties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "patternProperties" : { + "type" : "object", + "additionalProperties" : {"$ref" : "#"}, + "default" : {} + }, + + "additionalProperties" : { + "type" : [{"$ref" : "#"}, "boolean"], + "default" : {} + }, + + "minProperties" : { + "type" : "integer", + "minimum" : 0, + "default" : 0 + }, + + "maxProperties" : { + "type" : "integer", + "minimum" : 0 + }, + + "required" : { + "type" : "array", + "items" : { + "type" : "string" + } + }, + + "dependencies" : { + "type" : "object", + "additionalProperties" : { + "type" : ["string", "array", {"$ref" : "#"}], + "items" : { + "type" : "string" + } + }, + "default" : {} + }, + + "id" : { + "type" : "string" + }, + + "$ref" : { + "type" : "string" + }, + + "$schema" : { + "type" : "string" + }, + + "title" : { + "type" : "string" + }, + + "description" : { + "type" : "string" + }, + + "default" : { + "type" : "any" + } + }, + + "dependencies" : { + "exclusiveMinimum" : "minimum", + "exclusiveMaximum" : "maximum" + }, + + "default" : {} +} \ No newline at end of file diff --git a/node_modules/json-schema/draft-zyp-json-schema-03.xml b/node_modules/json-schema/draft-zyp-json-schema-03.xml new file mode 100644 index 0000000..cf60620 --- /dev/null +++ b/node_modules/json-schema/draft-zyp-json-schema-03.xml @@ -0,0 +1,1120 @@ + + + + + + + + + + + + + + + +]> + + + + + + + + + A JSON Media Type for Describing the Structure and Meaning of JSON Documents + + + SitePen (USA) +
+ + 530 Lytton Avenue + Palo Alto, CA 94301 + USA + + +1 650 968 8787 + kris@sitepen.com +
+
+ + +
+ + + Calgary, AB + Canada + + gary.court@gmail.com +
+
+ + + Internet Engineering Task Force + JSON + Schema + JavaScript + Object + Notation + Hyper Schema + Hypermedia + + + + JSON (JavaScript Object Notation) Schema defines the media type "application/schema+json", + a JSON based format for defining + the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + + +
+ + +
+ + JSON (JavaScript Object Notation) Schema is a JSON media type for defining + the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + +
+ +
+ + + + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", + "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be + interpreted as described in RFC 2119. + +
+ + + +
+ + JSON Schema defines the media type "application/schema+json" for + describing the structure of other + JSON documents. JSON Schema is JSON-based and includes facilities + for describing the structure of JSON documents in terms of + allowable values, descriptions, and interpreting relations with other resources. + + + JSON Schema format is organized into several separate definitions. The first + definition is the core schema specification. This definition is primary + concerned with describing a JSON structure and specifying valid elements + in the structure. The second definition is the Hyper Schema specification + which is intended define elements in a structure that can be interpreted as + hyperlinks. + Hyper Schema builds on JSON Schema to describe the hyperlink structure of + other JSON documents and elements of interaction. This allows user agents to be able to successfully navigate + JSON documents based on their schemas. + + + Cumulatively JSON Schema acts as a meta-document that can be used to define the required type and constraints on + property values, as well as define the meaning of the property values + for the purpose of describing a resource and determining hyperlinks + within the representation. + +
+ An example JSON Schema that describes products might look like: + + + + + This schema defines the properties of the instance JSON documents, + the required properties (id, name, and price), as well as an optional + property (tags). This also defines the link relations of the instance + JSON documents. + +
+ +
+ + For this specification, schema will be used to denote a JSON Schema + definition, and an instance refers to a JSON value that the schema + will be describing and validating. + +
+ +
+ + The JSON Schema media type does not attempt to dictate the structure of JSON + representations that contain data, but rather provides a separate format + for flexibly communicating how a JSON representation should be + interpreted and validated, such that user agents can properly understand + acceptable structures and extrapolate hyperlink information + with the JSON document. It is acknowledged that JSON documents come + in a variety of structures, and JSON is unique in that the structure + of stored data structures often prescribes a non-ambiguous definite + JSON representation. Attempting to force a specific structure is generally + not viable, and therefore JSON Schema allows for a great flexibility + in the structure of the JSON data that it describes. + + + This specification is protocol agnostic. + The underlying protocol (such as HTTP) should sufficiently define the + semantics of the client-server interface, the retrieval of resource + representations linked to by JSON representations, and modification of + those resources. The goal of this + format is to sufficiently describe JSON structures such that one can + utilize existing information available in existing JSON + representations from a large variety of services that leverage a representational state transfer + architecture using existing protocols. + +
+
+ +
+ + JSON Schema instances are correlated to their schema by the "describedby" + relation, where the schema is defined to be the target of the relation. + Instance representations may be of the "application/json" media type or + any other subtype. Consequently, dictating how an instance + representation should specify the relation to the schema is beyond the normative scope + of this document (since this document specifically defines the JSON + Schema media type, and no other), but it is recommended that instances + specify their schema so that user agents can interpret the instance + representation and messages may retain the self-descriptive + characteristic, avoiding the need for out-of-band information about + instance data. Two approaches are recommended for declaring the + relation to the schema that describes the meaning of a JSON instance's (or collection + of instances) structure. A MIME type parameter named + "profile" or a relation of "describedby" (which could be defined by a Link header) may be used: + +
+ + + +
+ + or if the content is being transferred by a protocol (such as HTTP) that + provides headers, a Link header can be used: + +
+ +; rel="describedby" +]]> + +
+ + Instances MAY specify multiple schemas, to indicate all the schemas that + are applicable to the data, and the data SHOULD be valid by all the schemas. + The instance data MAY have multiple schemas + that it is defined by (the instance data SHOULD be valid for those schemas). + Or if the document is a collection of instances, the collection MAY contain + instances from different schemas. When collections contain heterogeneous + instances, the "pathStart" attribute MAY be specified in the + schema to disambiguate which schema should be applied for each item in the + collection. However, ultimately, the mechanism for referencing a schema is up to the + media type of the instance documents (if they choose to specify that schemas + can be referenced). +
+ +
+ + JSON Schemas can themselves be described using JSON Schemas. + A self-describing JSON Schema for the core JSON Schema can + be found at http://json-schema.org/schema for the latest version or + http://json-schema.org/draft-03/schema for the draft-03 version. The hyper schema + self-description can be found at http://json-schema.org/hyper-schema + or http://json-schema.org/draft-03/hyper-schema. All schemas + used within a protocol with media type definitions + SHOULD include a MIME parameter that refers to the self-descriptive + hyper schema or another schema that extends this hyper schema: + +
+ + + +
+
+
+
+ +
+ + A JSON Schema is a JSON Object that defines various attributes + (including usage and valid values) of a JSON value. JSON + Schema has recursive capabilities; there are a number of elements + in the structure that allow for nested JSON Schemas. + + +
+ An example JSON Schema definition could look like: + + + +
+ + + A JSON Schema object may have any of the following properties, called schema + attributes (all attributes are optional): + + +
+ + This attribute defines what the primitive type or the schema of the instance MUST be in order to validate. + This attribute can take one of two forms: + + + + A string indicating a primitive or simple type. The following are acceptable string values: + + + Value MUST be a string. + Value MUST be a number, floating point numbers are allowed. + Value MUST be an integer, no floating point numbers are allowed. This is a subset of the number type. + Value MUST be a boolean. + Value MUST be an object. + Value MUST be an array. + Value MUST be null. Note this is mainly for purpose of being able use union types to define nullability. If this type is not included in a union, null values are not allowed (the primitives listed above do not allow nulls on their own). + Value MAY be of any type including null. + + + If the property is not defined or is not in this list, then any type of value is acceptable. + Other type values MAY be used for custom purposes, but minimal validators of the specification + implementation can allow any instance value on unknown type values. + + + + An array of two or more simple type definitions. Each item in the array MUST be a simple type definition or a schema. + The instance value is valid if it is of the same type as one of the simple type definitions, or valid by one of the schemas, in the array. + + + + +
+ For example, a schema that defines if an instance can be a string or a number would be: + + +
+
+ +
+ This attribute is an object with property definitions that define the valid values of instance object property values. When the instance value is an object, the property values of the instance object MUST conform to the property definitions in this object. In this object, each property definition's value MUST be a schema, and the property's name MUST be the name of the instance property that it defines. The instance property value MUST be valid according to the schema from the property definition. Properties are considered unordered, the order of the instance properties MAY be in any order. +
+ +
+ This attribute is an object that defines the schema for a set of property names of an object instance. The name of each property of this attribute's object is a regular expression pattern in the ECMA 262/Perl 5 format, while the value is a schema. If the pattern matches the name of a property on the instance object, the value of the instance's property MUST be valid against the pattern name's schema value. +
+ +
+ This attribute defines a schema for all properties that are not explicitly defined in an object type definition. If specified, the value MUST be a schema or a boolean. If false is provided, no additional properties are allowed beyond the properties defined in the schema. The default value is an empty schema which allows any value for additional properties. +
+ +
+ This attribute defines the allowed items in an instance array, and MUST be a schema or an array of schemas. The default value is an empty schema which allows any value for items in the instance array. + When this attribute value is a schema and the instance value is an array, then all the items in the array MUST be valid according to the schema. + When this attribute value is an array of schemas and the instance value is an array, each position in the instance array MUST conform to the schema in the corresponding position for this array. This called tuple typing. When tuple typing is used, additional items are allowed, disallowed, or constrained by the "additionalItems" attribute using the same rules as "additionalProperties" for objects. +
+ +
+ This provides a definition for additional items in an array instance when tuple definitions of the items is provided. This can be false to indicate additional items in the array are not allowed, or it can be a schema that defines the schema of the additional items. +
+ +
+ This attribute indicates if the instance must have a value, and not be undefined. This is false by default, making the instance optional. +
+ +
+ This attribute is an object that defines the requirements of a property on an instance object. If an object instance has a property with the same name as a property in this attribute's object, then the instance must be valid against the attribute's property value (hereafter referred to as the "dependency value"). + + The dependency value can take one of two forms: + + + + If the dependency value is a string, then the instance object MUST have a property with the same name as the dependency value. + If the dependency value is an array of strings, then the instance object MUST have a property with the same name as each string in the dependency value's array. + + + If the dependency value is a schema, then the instance object MUST be valid against the schema. + + + +
+ +
+ This attribute defines the minimum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute defines the maximum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "minimum" attribute. This is false by default, meaning the instance value can be greater then or equal to the minimum value. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "maximum" attribute. This is false by default, meaning the instance value can be less then or equal to the maximum value. +
+ +
+ This attribute defines the minimum number of values in an array when the array is the instance value. +
+ +
+ This attribute defines the maximum number of values in an array when the array is the instance value. +
+ +
+ This attribute indicates that all items in an array instance MUST be unique (contains no two identical values). + + Two instance are consider equal if they are both of the same type and: + + + are null; or + are booleans/numbers/strings and have the same value; or + are arrays, contains the same number of items, and each item in the array is equal to the corresponding item in the other array; or + are objects, contains the same property names, and each property in the object is equal to the corresponding property in the other object. + + +
+ +
+ When the instance value is a string, this provides a regular expression that a string instance MUST match in order to be valid. Regular expressions SHOULD follow the regular expression specification from ECMA 262/Perl 5 +
+ +
+ When the instance value is a string, this defines the minimum length of the string. +
+ +
+ When the instance value is a string, this defines the maximum length of the string. +
+ +
+ This provides an enumeration of all possible values that are valid for the instance property. This MUST be an array, and each item in the array represents a possible value for the instance value. If this attribute is defined, the instance value MUST be one of the values in the array in order for the schema to be valid. Comparison of enum values uses the same algorithm as defined in "uniqueItems". +
+ +
+ This attribute defines the default value of the instance when the instance is undefined. +
+ +
+ This attribute is a string that provides a short description of the instance property. +
+ +
+ This attribute is a string that provides a full description of the of purpose the instance property. +
+ +
+ This property defines the type of data, content type, or microformat to be expected in the instance property values. A format attribute MAY be one of the values listed below, and if so, SHOULD adhere to the semantics describing for the format. A format SHOULD only be used to give meaning to primitive types (string, integer, number, or boolean). Validators MAY (but are not required to) validate that the instance values conform to a format. + + + The following formats are predefined: + + + This SHOULD be a date in ISO 8601 format of YYYY-MM-DDThh:mm:ssZ in UTC time. This is the recommended form of date/timestamp. + This SHOULD be a date in the format of YYYY-MM-DD. It is recommended that you use the "date-time" format instead of "date" unless you need to transfer only the date part. + This SHOULD be a time in the format of hh:mm:ss. It is recommended that you use the "date-time" format instead of "time" unless you need to transfer only the time part. + This SHOULD be the difference, measured in milliseconds, between the specified time and midnight, 00:00 of January 1, 1970 UTC. The value SHOULD be a number (integer or float). + A regular expression, following the regular expression specification from ECMA 262/Perl 5. + This is a CSS color (like "#FF0000" or "red"), based on CSS 2.1. + This is a CSS style definition (like "color: red; background-color:#FFF"), based on CSS 2.1. + This SHOULD be a phone number (format MAY follow E.123). + This value SHOULD be a URI. + This SHOULD be an email address. + This SHOULD be an ip version 4 address. + This SHOULD be an ip version 6 address. + This SHOULD be a host-name. + + + + Additional custom formats MAY be created. These custom formats MAY be expressed as an URI, and this URI MAY reference a schema of that format. +
+ +
+ This attribute defines what value the number instance must be divisible by with no remainder (the result of the division must be an integer.) The value of this attribute SHOULD NOT be 0. +
+ +
+ This attribute takes the same values as the "type" attribute, however if the instance matches the type or if this value is an array and the instance matches any type or schema in the array, then this instance is not valid. +
+ +
+ The value of this property MUST be another schema which will provide a base schema which the current schema will inherit from. The inheritance rules are such that any instance that is valid according to the current schema MUST be valid according to the referenced schema. This MAY also be an array, in which case, the instance MUST be valid for all the schemas in the array. A schema that extends another schema MAY define additional attributes, constrain existing attributes, or add other constraints. + + Conceptually, the behavior of extends can be seen as validating an + instance against all constraints in the extending schema as well as + the extended schema(s). More optimized implementations that merge + schemas are possible, but are not required. Some examples of using "extends": + +
+ + + +
+ +
+ + + +
+
+
+ +
+ + This attribute defines the current URI of this schema (this attribute is + effectively a "self" link). This URI MAY be relative or absolute. If + the URI is relative it is resolved against the current URI of the parent + schema it is contained in. If this schema is not contained in any + parent schema, the current URI of the parent schema is held to be the + URI under which this schema was addressed. If id is missing, the current URI of a schema is + defined to be that of the parent schema. The current URI of the schema + is also used to construct relative references such as for $ref. + +
+ +
+ + This attribute defines a URI of a schema that contains the full representation of this schema. + When a validator encounters this attribute, it SHOULD replace the current schema with the schema referenced by the value's URI (if known and available) and re-validate the instance. + This URI MAY be relative or absolute, and relative URIs SHOULD be resolved against the URI of the current schema. + +
+ +
+ + This attribute defines a URI of a JSON Schema that is the schema of the current schema. + When this attribute is defined, a validator SHOULD use the schema referenced by the value's URI (if known and available) when resolving Hyper Schemalinks. + + + + A validator MAY use this attribute's value to determine which version of JSON Schema the current schema is written in, and provide the appropriate validation features and behavior. + Therefore, it is RECOMMENDED that all schema authors include this attribute in their schemas to prevent conflicts with future JSON Schema specification changes. + +
+
+ +
+ + The following attributes are specified in addition to those + attributes that already provided by the core schema with the specific + purpose of informing user agents of relations between resources based + on JSON data. Just as with JSON + schema attributes, all the attributes in hyper schemas are optional. + Therefore, an empty object is a valid (non-informative) schema, and + essentially describes plain JSON (no constraints on the structures). + Addition of attributes provides additive information for user agents. + + +
+ + The value of the links property MUST be an array, where each item + in the array is a link description object which describes the link + relations of the instances. + + +
+ + A link description object is used to describe link relations. In + the context of a schema, it defines the link relations of the + instances of the schema, and can be parameterized by the instance + values. The link description format can be used on its own in + regular (non-schema documents), and use of this format can + be declared by referencing the normative link description + schema as the the schema for the data structure that uses the + links. The URI of the normative link description schema is: + http://json-schema.org/links (latest version) or + http://json-schema.org/draft-03/links (draft-03 version). + + +
+ + The value of the "href" link description property + indicates the target URI of the related resource. The value + of the instance property SHOULD be resolved as a URI-Reference per RFC 3986 + and MAY be a relative URI. The base URI to be used for relative resolution + SHOULD be the URI used to retrieve the instance object (not the schema) + when used within a schema. Also, when links are used within a schema, the URI + SHOULD be parametrized by the property values of the instance + object, if property values exist for the corresponding variables + in the template (otherwise they MAY be provided from alternate sources, like user input). + + + + Instance property values SHOULD be substituted into the URIs where + matching braces ('{', '}') are found surrounding zero or more characters, + creating an expanded URI. Instance property value substitutions are resolved + by using the text between the braces to denote the property name + from the instance to get the value to substitute. + +
+ For example, if an href value is defined: + + + + Then it would be resolved by replace the value of the "id" property value from the instance object. +
+ +
+ If the value of the "id" property was "45", the expanded URI would be: + + + +
+ + If matching braces are found with the string "@" (no quotes) between the braces, then the + actual instance value SHOULD be used to replace the braces, rather than a property value. + This should only be used in situations where the instance is a scalar (string, + boolean, or number), and not for objects or arrays. +
+
+ +
+ + The value of the "rel" property indicates the name of the + relation to the target resource. The relation to the target SHOULD be interpreted as specifically from the instance object that the schema (or sub-schema) applies to, not just the top level resource that contains the object within its hierarchy. If a resource JSON representation contains a sub object with a property interpreted as a link, that sub-object holds the relation with the target. A relation to target from the top level resource MUST be indicated with the schema describing the top level JSON representation. + + + + Relationship definitions SHOULD NOT be media type dependent, and users are encouraged to utilize existing accepted relation definitions, including those in existing relation registries (see RFC 4287). However, we define these relations here for clarity of normative interpretation within the context of JSON hyper schema defined relations: + + + + If the relation value is "self", when this property is encountered in + the instance object, the object represents a resource and the instance object is + treated as a full representation of the target resource identified by + the specified URI. + + + + This indicates that the target of the link is the full representation for the instance object. The object that contains this link possibly may not be the full representation. + + + + This indicates the target of the link is the schema for the instance object. This MAY be used to specifically denote the schemas of objects within a JSON object hierarchy, facilitating polymorphic type data structures. + + + + This relation indicates that the target of the link + SHOULD be treated as the root or the body of the representation for the + purposes of user agent interaction or fragment resolution. All other + properties of the instance objects can be regarded as meta-data + descriptions for the data. + + + + + + The following relations are applicable for schemas (the schema as the "from" resource in the relation): + + + This indicates the target resource that represents collection of instances of a schema. + This indicates a target to use for creating new instances of a schema. This link definition SHOULD be a submission link with a non-safe method (like POST). + + + + +
+ For example, if a schema is defined: + + + +
+ +
+ And if a collection of instance resource's JSON representation was retrieved: + + + +
+ + This would indicate that for the first item in the collection, its own + (self) URI would resolve to "/Resource/thing" and the first item's "up" + relation SHOULD be resolved to the resource at "/Resource/parent". + The "children" collection would be located at "/Resource/?upId=thing". +
+
+ +
+ This property value is a schema that defines the expected structure of the JSON representation of the target of the link. +
+ +
+ + The following properties also apply to link definition objects, and + provide functionality analogous to HTML forms, in providing a + means for submitting extra (often user supplied) information to send to a server. + + +
+ + This attribute defines which method can be used to access the target resource. + In an HTTP environment, this would be "GET" or "POST" (other HTTP methods + such as "PUT" and "DELETE" have semantics that are clearly implied by + accessed resources, and do not need to be defined here). + This defaults to "GET". + +
+ +
+ + If present, this property indicates a query media type format that the server + supports for querying or posting to the collection of instances at the target + resource. The query can be + suffixed to the target URI to query the collection with + property-based constraints on the resources that SHOULD be returned from + the server or used to post data to the resource (depending on the method). + +
+ For example, with the following schema: + + + + This indicates that the client can query the server for instances that have a specific name. +
+ +
+ For example: + + + +
+ + If no enctype or method is specified, only the single URI specified by + the href property is defined. If the method is POST, "application/json" is + the default media type. +
+
+ +
+ + This attribute contains a schema which defines the acceptable structure of the submitted + request (for a GET request, this schema would define the properties for the query string + and for a POST request, this would define the body). + +
+
+
+
+ +
+ + This property indicates the fragment resolution protocol to use for + resolving fragment identifiers in URIs within the instance + representations. This applies to the instance object URIs and all + children of the instance object's URIs. The default fragment resolution + protocol is "slash-delimited", which is defined below. Other fragment + resolution protocols MAY be used, but are not defined in this document. + + + + The fragment identifier is based on RFC 2396, Sec 5, and defines the + mechanism for resolving references to entities within a document. + + +
+ + With the slash-delimited fragment resolution protocol, the fragment + identifier is interpreted as a series of property reference tokens that start with and + are delimited by the "/" character (\x2F). Each property reference token + is a series of unreserved or escaped URI characters. Each property + reference token SHOULD be interpreted, starting from the beginning of + the fragment identifier, as a path reference in the target JSON + structure. The final target value of the fragment can be determined by + starting with the root of the JSON structure from the representation of + the resource identified by the pre-fragment URI. If the target is a JSON + object, then the new target is the value of the property with the name + identified by the next property reference token in the fragment. If the + target is a JSON array, then the target is determined by finding the + item in array the array with the index defined by the next property + reference token (which MUST be a number). The target is successively + updated for each property reference token, until the entire fragment has + been traversed. + + + + Property names SHOULD be URI-encoded. In particular, any "/" in a + property name MUST be encoded to avoid being interpreted as a property + delimiter. + + + +
+ For example, for the following JSON representation: + + + +
+ +
+ The following fragment identifiers would be resolved: + + + +
+
+
+ +
+ + The dot-delimited fragment resolution protocol is the same as + slash-delimited fragment resolution protocol except that the "." character + (\x2E) is used as the delimiter between property names (instead of "/") and + the path does not need to start with a ".". For example, #.foo and #foo are a valid fragment + identifiers for referencing the value of the foo propery. + +
+
+ +
+ This attribute indicates that the instance property SHOULD NOT be changed. Attempts by a user agent to modify the value of this property are expected to be rejected by a server. +
+ +
+ If the instance property value is a string, this attribute defines that the string SHOULD be interpreted as binary data and decoded using the encoding named by this schema property. RFC 2045, Sec 6.1 lists the possible values for this property. +
+ +
+ + This attribute is a URI that defines what the instance's URI MUST start with in order to validate. + The value of the "pathStart" attribute MUST be resolved as per RFC 3986, Sec 5, + and is relative to the instance's URI. + + + + When multiple schemas have been referenced for an instance, the user agent + can determine if this schema is applicable for a particular instance by + determining if the URI of the instance begins with the the value of the "pathStart" + attribute. If the URI of the instance does not start with this URI, + or if another schema specifies a starting URI that is longer and also matches the + instance, this schema SHOULD NOT be applied to the instance. Any schema + that does not have a pathStart attribute SHOULD be considered applicable + to all the instances for which it is referenced. + +
+ +
+ This attribute defines the media type of the instance representations that this schema is defining. +
+
+ +
+ + This specification is a sub-type of the JSON format, and + consequently the security considerations are generally the same as RFC 4627. + However, an additional issue is that when link relation of "self" + is used to denote a full representation of an object, the user agent + SHOULD NOT consider the representation to be the authoritative representation + of the resource denoted by the target URI if the target URI is not + equivalent to or a sub-path of the the URI used to request the resource + representation which contains the target URI with the "self" link. + +
+ For example, if a hyper schema was defined: + + + +
+ +
+ And a resource was requested from somesite.com: + + + +
+ +
+ With a response of: + + + +
+
+
+ +
+ The proposed MIME media type for JSON Schema is "application/schema+json". + Type name: application + Subtype name: schema+json + Required parameters: profile + + The value of the profile parameter SHOULD be a URI (relative or absolute) that + refers to the schema used to define the structure of this structure (the + meta-schema). Normally the value would be http://json-schema.org/draft-03/hyper-schema, + but it is allowable to use other schemas that extend the hyper schema's meta- + schema. + + Optional parameters: pretty + The value of the pretty parameter MAY be true or false to indicate if additional whitespace has been included to make the JSON representation easier to read. + +
+ + This registry is maintained by IANA per RFC 4287 and this specification adds + four values: "full", "create", "instances", "root". New + assignments are subject to IESG Approval, as outlined in RFC 5226. + Requests should be made by email to IANA, which will then forward the + request to the IESG, requesting approval. + +
+
+
+ + + + + &rfc2045; + &rfc2119; + &rfc2396; + &rfc3339; + &rfc3986; + &rfc4287; + + + &rfc2616; + &rfc4627; + &rfc5226; + &iddiscovery; + &uritemplate; + &linkheader; + &html401; + &css21; + + +
+ + + + + Added example and verbiage to "extends" attribute. + Defined slash-delimited to use a leading slash. + Made "root" a relation instead of an attribute. + Removed address values, and MIME media type from format to reduce confusion (mediaType already exists, so it can be used for MIME types). + Added more explanation of nullability. + Removed "alternate" attribute. + Upper cased many normative usages of must, may, and should. + Replaced the link submission "properties" attribute to "schema" attribute. + Replaced "optional" attribute with "required" attribute. + Replaced "maximumCanEqual" attribute with "exclusiveMaximum" attribute. + Replaced "minimumCanEqual" attribute with "exclusiveMinimum" attribute. + Replaced "requires" attribute with "dependencies" attribute. + Moved "contentEncoding" attribute to hyper schema. + Added "additionalItems" attribute. + Added "id" attribute. + Switched self-referencing variable substitution from "-this" to "@" to align with reserved characters in URI template. + Added "patternProperties" attribute. + Schema URIs are now namespace versioned. + Added "$ref" and "$schema" attributes. + + + + + + Replaced "maxDecimal" attribute with "divisibleBy" attribute. + Added slash-delimited fragment resolution protocol and made it the default. + Added language about using links outside of schemas by referencing its normative URI. + Added "uniqueItems" attribute. + Added "targetSchema" attribute to link description object. + + + + + + Fixed category and updates from template. + + + + + + Initial draft. + + + + +
+ +
+ + + Should we give a preference to MIME headers over Link headers (or only use one)? + Should "root" be a MIME parameter? + Should "format" be renamed to "mediaType" or "contentType" to reflect the usage MIME media types that are allowed? + How should dates be handled? + + +
+
+
diff --git a/node_modules/json-schema/draft-zyp-json-schema-04.xml b/node_modules/json-schema/draft-zyp-json-schema-04.xml new file mode 100644 index 0000000..8ede6bf --- /dev/null +++ b/node_modules/json-schema/draft-zyp-json-schema-04.xml @@ -0,0 +1,1072 @@ + + + + + + + + + + + + + + +]> + + + + + + + + + A JSON Media Type for Describing the Structure and Meaning of JSON Documents + + + SitePen (USA) +
+ + 530 Lytton Avenue + Palo Alto, CA 94301 + USA + + +1 650 968 8787 + kris@sitepen.com +
+
+ + +
+ + + Calgary, AB + Canada + + gary.court@gmail.com +
+
+ + + Internet Engineering Task Force + JSON + Schema + JavaScript + Object + Notation + Hyper Schema + Hypermedia + + + + JSON (JavaScript Object Notation) Schema defines the media type "application/schema+json", + a JSON based format for defining the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + + +
+ + +
+ + JSON (JavaScript Object Notation) Schema is a JSON media type for defining + the structure of JSON data. JSON Schema provides a contract for what JSON + data is required for a given application and how to interact with it. JSON + Schema is intended to define validation, documentation, hyperlink + navigation, and interaction control of JSON data. + +
+ +
+ + + + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", + "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be + interpreted as described in RFC 2119. + + + + The terms "JSON", "JSON text", "JSON value", "member", "element", "object", + "array", "number", "string", "boolean", "true", "false", and "null" in this + document are to be interpreted as defined in RFC 4627. + + + + This specification also uses the following defined terms: + + + A JSON Schema object. + Equivalent to "JSON value" as defined in RFC 4627. + Equivalent to "member" as defined in RFC 4627. + Equivalent to "element" as defined in RFC 4627. + A property of a JSON Schema object. + + +
+ +
+ + JSON Schema defines the media type "application/schema+json" for + describing the structure of JSON text. JSON Schemas are also written in JSON and includes facilities + for describing the structure of JSON in terms of + allowable values, descriptions, and interpreting relations with other resources. + + + This document is organized into several separate definitions. The first + definition is the core schema specification. This definition is primary + concerned with describing a JSON structure and specifying valid elements + in the structure. The second definition is the Hyper Schema specification + which is intended to define elements in a structure that can be interpreted as + hyperlinks. + Hyper Schema builds on JSON Schema to describe the hyperlink structure of + JSON values. This allows user agents to be able to successfully navigate + documents containing JSON based on their schemas. + + + Cumulatively JSON Schema acts as meta-JSON that can be used to define the + required type and constraints on JSON values, as well as define the meaning + of the JSON values for the purpose of describing a resource and determining + hyperlinks within the representation. + +
+ An example JSON Schema that describes products might look like: + + + + + This schema defines the properties of the instance, + the required properties (id, name, and price), as well as an optional + property (tags). This also defines the link relations of the instance. + +
+ +
+ + The JSON Schema media type does not attempt to dictate the structure of JSON + values that contain data, but rather provides a separate format + for flexibly communicating how a JSON value should be + interpreted and validated, such that user agents can properly understand + acceptable structures and extrapolate hyperlink information + from the JSON. It is acknowledged that JSON values come + in a variety of structures, and JSON is unique in that the structure + of stored data structures often prescribes a non-ambiguous definite + JSON representation. Attempting to force a specific structure is generally + not viable, and therefore JSON Schema allows for a great flexibility + in the structure of the JSON data that it describes. + + + This specification is protocol agnostic. + The underlying protocol (such as HTTP) should sufficiently define the + semantics of the client-server interface, the retrieval of resource + representations linked to by JSON representations, and modification of + those resources. The goal of this + format is to sufficiently describe JSON structures such that one can + utilize existing information available in existing JSON + representations from a large variety of services that leverage a representational state transfer + architecture using existing protocols. + +
+
+ +
+ + JSON values are correlated to their schema by the "describedby" + relation, where the schema is the target of the relation. + JSON values MUST be of the "application/json" media type or + any other subtype. Consequently, dictating how a JSON value should + specify the relation to the schema is beyond the normative scope + of this document since this document specifically defines the JSON + Schema media type, and no other. It is RECOMMNENDED that JSON values + specify their schema so that user agents can interpret the instance + and retain the self-descriptive characteristics. This avoides the need for out-of-band information about + instance data. Two approaches are recommended for declaring the + relation to the schema that describes the meaning of a JSON instance's (or collection + of instances) structure. A MIME type parameter named + "profile" or a relation of "describedby" (which could be specified by a Link header) may be used: + +
+ + + +
+ + or if the content is being transferred by a protocol (such as HTTP) that + provides headers, a Link header can be used: + +
+ +; rel="describedby" +]]> + +
+ + Instances MAY specify multiple schemas, to indicate all the schemas that + are applicable to the data, and the data SHOULD be valid by all the schemas. + The instance data MAY have multiple schemas + that it is described by (the instance data SHOULD be valid for those schemas). + Or if the document is a collection of instances, the collection MAY contain + instances from different schemas. The mechanism for referencing a schema is + determined by the media type of the instance (if it provides a method for + referencing schemas). +
+ +
+ + JSON Schemas can themselves be described using JSON Schemas. + A self-describing JSON Schema for the core JSON Schema can + be found at http://json-schema.org/schema for the latest version or + http://json-schema.org/draft-04/schema for the draft-04 version. The hyper schema + self-description can be found at http://json-schema.org/hyper-schema + or http://json-schema.org/draft-04/hyper-schema. All schemas + used within a protocol with a media type specified SHOULD include a MIME parameter that refers to the self-descriptive + hyper schema or another schema that extends this hyper schema: + +
+ + + +
+
+
+
+ +
+ + A JSON Schema is a JSON object that defines various attributes + (including usage and valid values) of a JSON value. JSON + Schema has recursive capabilities; there are a number of elements + in the structure that allow for nested JSON Schemas. + + +
+ An example JSON Schema could look like: + + + +
+ + + A JSON Schema object MAY have any of the following optional properties: + + + + + +
+ + This attribute defines what the primitive type or the schema of the instance MUST be in order to validate. + This attribute can take one of two forms: + + + + A string indicating a primitive or simple type. The string MUST be one of the following values: + + + Instance MUST be an object. + Instance MUST be an array. + Instance MUST be a string. + Instance MUST be a number, including floating point numbers. + Instance MUST be the JSON literal "true" or "false". + Instance MUST be the JSON literal "null". Note that without this type, null values are not allowed. + Instance MAY be of any type, including null. + + + + + An array of one or more simple or schema types. + The instance value is valid if it is of the same type as one of the simple types, or valid by one of the schemas, in the array. + + + + If this attribute is not specified, then all value types are accepted. + + +
+ For example, a schema that defines if an instance can be a string or a number would be: + + +
+
+ +
+ + This attribute is an object with properties that specify the schemas for the properties of the instance object. + In this attribute's object, each property value MUST be a schema. + When the instance value is an object, the value of the instance's properties MUST be valid according to the schemas with the same property names specified in this attribute. + Objects are unordered, so therefore the order of the instance properties or attribute properties MUST NOT determine validation success. + +
+ +
+ + This attribute is an object that defines the schema for a set of property names of an object instance. + The name of each property of this attribute's object is a regular expression pattern in the ECMA 262/Perl 5 format, while the value is a schema. + If the pattern matches the name of a property on the instance object, the value of the instance's property MUST be valid against the pattern name's schema value. + +
+ +
+ This attribute specifies how any instance property that is not explicitly defined by either the "properties" or "patternProperties" attributes (hereafter referred to as "additional properties") is handled. If specified, the value MUST be a schema or a boolean. + If a schema is provided, then all additional properties MUST be valid according to the schema. + If false is provided, then no additional properties are allowed. + The default value is an empty schema, which allows any value for additional properties. +
+ +
+ This attribute provides the allowed items in an array instance. If specified, this attribute MUST be a schema or an array of schemas. + When this attribute value is a schema and the instance value is an array, then all the items in the array MUST be valid according to the schema. + When this attribute value is an array of schemas and the instance value is an array, each position in the instance array MUST be valid according to the schema in the corresponding position for this array. This called tuple typing. When tuple typing is used, additional items are allowed, disallowed, or constrained by the "additionalItems" attribute the same way as "additionalProperties" for objects is. +
+ +
+ This attribute specifies how any item in the array instance that is not explicitly defined by "items" (hereafter referred to as "additional items") is handled. If specified, the value MUST be a schema or a boolean. + If a schema is provided: + + If the "items" attribute is unspecified, then all items in the array instance must be valid against this schema. + If the "items" attribute is a schema, then this attribute is ignored. + If the "items" attribute is an array (during tuple typing), then any additional items MUST be valid against this schema. + + + If false is provided, then any additional items in the array are not allowed. + The default value is an empty schema, which allows any value for additional items. +
+ +
+ This attribute is an array of strings that defines all the property names that must exist on the object instance. +
+ +
+ This attribute is an object that specifies the requirements of a property on an object instance. If an object instance has a property with the same name as a property in this attribute's object, then the instance must be valid against the attribute's property value (hereafter referred to as the "dependency value"). + + The dependency value can take one of two forms: + + + + If the dependency value is a string, then the instance object MUST have a property with the same name as the dependency value. + If the dependency value is an array of strings, then the instance object MUST have a property with the same name as each string in the dependency value's array. + + + If the dependency value is a schema, then the instance object MUST be valid against the schema. + + + +
+ +
+ This attribute defines the minimum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute defines the maximum value of the instance property when the type of the instance value is a number. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "minimum" attribute. This is false by default, meaning the instance value can be greater then or equal to the minimum value. +
+ +
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "maximum" attribute. This is false by default, meaning the instance value can be less then or equal to the maximum value. +
+ +
+ This attribute defines the minimum number of values in an array when the array is the instance value. +
+ +
+ This attribute defines the maximum number of values in an array when the array is the instance value. +
+ +
+ This attribute defines the minimum number of properties required on an object instance. +
+ +
+ This attribute defines the maximum number of properties the object instance can have. +
+ +
+ This attribute indicates that all items in an array instance MUST be unique (contains no two identical values). + + Two instance are consider equal if they are both of the same type and: + + + are null; or + are booleans/numbers/strings and have the same value; or + are arrays, contains the same number of items, and each item in the array is equal to the item at the corresponding index in the other array; or + are objects, contains the same property names, and each property in the object is equal to the corresponding property in the other object. + + +
+ +
+ When the instance value is a string, this provides a regular expression that a string instance MUST match in order to be valid. Regular expressions SHOULD follow the regular expression specification from ECMA 262/Perl 5 +
+ +
+ When the instance value is a string, this defines the minimum length of the string. +
+ +
+ When the instance value is a string, this defines the maximum length of the string. +
+ +
+ This provides an enumeration of all possible values that are valid for the instance property. This MUST be an array, and each item in the array represents a possible value for the instance value. If this attribute is defined, the instance value MUST be one of the values in the array in order for the schema to be valid. Comparison of enum values uses the same algorithm as defined in "uniqueItems". +
+ +
+ This attribute defines the default value of the instance when the instance is undefined. +
+ +
+ This attribute is a string that provides a short description of the instance property. +
+ +
+ This attribute is a string that provides a full description of the of purpose the instance property. +
+ +
+ This attribute defines what value the number instance must be divisible by with no remainder (the result of the division must be an integer.) The value of this attribute SHOULD NOT be 0. +
+ +
+ This attribute takes the same values as the "type" attribute, however if the instance matches the type or if this value is an array and the instance matches any type or schema in the array, then this instance is not valid. +
+ +
+ The value of this property MUST be another schema which will provide a base schema which the current schema will inherit from. The inheritance rules are such that any instance that is valid according to the current schema MUST be valid according to the referenced schema. This MAY also be an array, in which case, the instance MUST be valid for all the schemas in the array. A schema that extends another schema MAY define additional attributes, constrain existing attributes, or add other constraints. + + Conceptually, the behavior of extends can be seen as validating an + instance against all constraints in the extending schema as well as + the extended schema(s). More optimized implementations that merge + schemas are possible, but are not required. Some examples of using "extends": + +
+ + + +
+ +
+ + + +
+
+
+ +
+ + This attribute defines the current URI of this schema (this attribute is + effectively a "self" link). This URI MAY be relative or absolute. If + the URI is relative it is resolved against the current URI of the parent + schema it is contained in. If this schema is not contained in any + parent schema, the current URI of the parent schema is held to be the + URI under which this schema was addressed. If id is missing, the current URI of a schema is + defined to be that of the parent schema. The current URI of the schema + is also used to construct relative references such as for $ref. + +
+ +
+ + This attribute defines a URI of a schema that contains the full representation of this schema. + When a validator encounters this attribute, it SHOULD replace the current schema with the schema referenced by the value's URI (if known and available) and re-validate the instance. + This URI MAY be relative or absolute, and relative URIs SHOULD be resolved against the URI of the current schema. + +
+ +
+ + This attribute defines a URI of a JSON Schema that is the schema of the current schema. + When this attribute is defined, a validator SHOULD use the schema referenced by the value's URI (if known and available) when resolving Hyper Schemalinks. + + + + A validator MAY use this attribute's value to determine which version of JSON Schema the current schema is written in, and provide the appropriate validation features and behavior. + Therefore, it is RECOMMENDED that all schema authors include this attribute in their schemas to prevent conflicts with future JSON Schema specification changes. + +
+
+ +
+ + The following attributes are specified in addition to those + attributes that already provided by the core schema with the specific + purpose of informing user agents of relations between resources based + on JSON data. Just as with JSON + schema attributes, all the attributes in hyper schemas are optional. + Therefore, an empty object is a valid (non-informative) schema, and + essentially describes plain JSON (no constraints on the structures). + Addition of attributes provides additive information for user agents. + + +
+ + The value of the links property MUST be an array, where each item + in the array is a link description object which describes the link + relations of the instances. + + + + +
+ + A link description object is used to describe link relations. In + the context of a schema, it defines the link relations of the + instances of the schema, and can be parameterized by the instance + values. The link description format can be used without JSON Schema, + and use of this format can + be declared by referencing the normative link description + schema as the the schema for the data structure that uses the + links. The URI of the normative link description schema is: + http://json-schema.org/links (latest version) or + http://json-schema.org/draft-04/links (draft-04 version). + + +
+ + The value of the "href" link description property + indicates the target URI of the related resource. The value + of the instance property SHOULD be resolved as a URI-Reference per RFC 3986 + and MAY be a relative URI. The base URI to be used for relative resolution + SHOULD be the URI used to retrieve the instance object (not the schema) + when used within a schema. Also, when links are used within a schema, the URI + SHOULD be parametrized by the property values of the instance + object, if property values exist for the corresponding variables + in the template (otherwise they MAY be provided from alternate sources, like user input). + + + + Instance property values SHOULD be substituted into the URIs where + matching braces ('{', '}') are found surrounding zero or more characters, + creating an expanded URI. Instance property value substitutions are resolved + by using the text between the braces to denote the property name + from the instance to get the value to substitute. + +
+ For example, if an href value is defined: + + + + Then it would be resolved by replace the value of the "id" property value from the instance object. +
+ +
+ If the value of the "id" property was "45", the expanded URI would be: + + + +
+ + If matching braces are found with the string "@" (no quotes) between the braces, then the + actual instance value SHOULD be used to replace the braces, rather than a property value. + This should only be used in situations where the instance is a scalar (string, + boolean, or number), and not for objects or arrays. +
+
+ +
+ + The value of the "rel" property indicates the name of the + relation to the target resource. The relation to the target SHOULD be interpreted as specifically from the instance object that the schema (or sub-schema) applies to, not just the top level resource that contains the object within its hierarchy. If a resource JSON representation contains a sub object with a property interpreted as a link, that sub-object holds the relation with the target. A relation to target from the top level resource MUST be indicated with the schema describing the top level JSON representation. + + + + Relationship definitions SHOULD NOT be media type dependent, and users are encouraged to utilize existing accepted relation definitions, including those in existing relation registries (see RFC 4287). However, we define these relations here for clarity of normative interpretation within the context of JSON hyper schema defined relations: + + + + If the relation value is "self", when this property is encountered in + the instance object, the object represents a resource and the instance object is + treated as a full representation of the target resource identified by + the specified URI. + + + + This indicates that the target of the link is the full representation for the instance object. The object that contains this link possibly may not be the full representation. + + + + This indicates the target of the link is the schema for the instance object. This MAY be used to specifically denote the schemas of objects within a JSON object hierarchy, facilitating polymorphic type data structures. + + + + This relation indicates that the target of the link + SHOULD be treated as the root or the body of the representation for the + purposes of user agent interaction or fragment resolution. All other + properties of the instance objects can be regarded as meta-data + descriptions for the data. + + + + + + The following relations are applicable for schemas (the schema as the "from" resource in the relation): + + + This indicates the target resource that represents collection of instances of a schema. + This indicates a target to use for creating new instances of a schema. This link definition SHOULD be a submission link with a non-safe method (like POST). + + + + +
+ For example, if a schema is defined: + + + +
+ +
+ And if a collection of instance resource's JSON representation was retrieved: + + + +
+ + This would indicate that for the first item in the collection, its own + (self) URI would resolve to "/Resource/thing" and the first item's "up" + relation SHOULD be resolved to the resource at "/Resource/parent". + The "children" collection would be located at "/Resource/?upId=thing". +
+
+ +
+ This property value is a string that defines the templating language used in the "href" attribute. If no templating language is defined, then the default Link Description Object templating langauge is used. +
+ +
+ This property value is a schema that defines the expected structure of the JSON representation of the target of the link. +
+ +
+ + The following properties also apply to link definition objects, and + provide functionality analogous to HTML forms, in providing a + means for submitting extra (often user supplied) information to send to a server. + + +
+ + This attribute defines which method can be used to access the target resource. + In an HTTP environment, this would be "GET" or "POST" (other HTTP methods + such as "PUT" and "DELETE" have semantics that are clearly implied by + accessed resources, and do not need to be defined here). + This defaults to "GET". + +
+ +
+ + If present, this property indicates a query media type format that the server + supports for querying or posting to the collection of instances at the target + resource. The query can be + suffixed to the target URI to query the collection with + property-based constraints on the resources that SHOULD be returned from + the server or used to post data to the resource (depending on the method). + +
+ For example, with the following schema: + + + + This indicates that the client can query the server for instances that have a specific name. +
+ +
+ For example: + + + +
+ + If no enctype or method is specified, only the single URI specified by + the href property is defined. If the method is POST, "application/json" is + the default media type. +
+
+ +
+ + This attribute contains a schema which defines the acceptable structure of the submitted + request (for a GET request, this schema would define the properties for the query string + and for a POST request, this would define the body). + +
+
+
+
+ +
+ + This property indicates the fragment resolution protocol to use for + resolving fragment identifiers in URIs within the instance + representations. This applies to the instance object URIs and all + children of the instance object's URIs. The default fragment resolution + protocol is "json-pointer", which is defined below. Other fragment + resolution protocols MAY be used, but are not defined in this document. + + + + The fragment identifier is based on RFC 3986, Sec 5, and defines the + mechanism for resolving references to entities within a document. + + +
+ The "json-pointer" fragment resolution protocol uses a JSON Pointer to resolve fragment identifiers in URIs within instance representations. +
+
+ + + +
+ This attribute indicates that the instance value SHOULD NOT be changed. Attempts by a user agent to modify the value of this property are expected to be rejected by a server. +
+ +
+ If the instance property value is a string, this attribute defines that the string SHOULD be interpreted as binary data and decoded using the encoding named by this schema property. RFC 2045, Sec 6.1 lists the possible values for this property. +
+ +
+ + This attribute is a URI that defines what the instance's URI MUST start with in order to validate. + The value of the "pathStart" attribute MUST be resolved as per RFC 3986, Sec 5, + and is relative to the instance's URI. + + + + When multiple schemas have been referenced for an instance, the user agent + can determine if this schema is applicable for a particular instance by + determining if the URI of the instance begins with the the value of the "pathStart" + attribute. If the URI of the instance does not start with this URI, + or if another schema specifies a starting URI that is longer and also matches the + instance, this schema SHOULD NOT be applied to the instance. Any schema + that does not have a pathStart attribute SHOULD be considered applicable + to all the instances for which it is referenced. + +
+ +
+ This attribute defines the media type of the instance representations that this schema is defining. +
+
+ +
+ + This specification is a sub-type of the JSON format, and + consequently the security considerations are generally the same as RFC 4627. + However, an additional issue is that when link relation of "self" + is used to denote a full representation of an object, the user agent + SHOULD NOT consider the representation to be the authoritative representation + of the resource denoted by the target URI if the target URI is not + equivalent to or a sub-path of the the URI used to request the resource + representation which contains the target URI with the "self" link. + +
+ For example, if a hyper schema was defined: + + + +
+ +
+ And a resource was requested from somesite.com: + + + +
+ +
+ With a response of: + + + +
+
+
+ +
+ The proposed MIME media type for JSON Schema is "application/schema+json". + Type name: application + Subtype name: schema+json + Required parameters: profile + + The value of the profile parameter SHOULD be a URI (relative or absolute) that + refers to the schema used to define the structure of this structure (the + meta-schema). Normally the value would be http://json-schema.org/draft-04/hyper-schema, + but it is allowable to use other schemas that extend the hyper schema's meta- + schema. + + Optional parameters: pretty + The value of the pretty parameter MAY be true or false to indicate if additional whitespace has been included to make the JSON representation easier to read. + +
+ + This registry is maintained by IANA per RFC 4287 and this specification adds + four values: "full", "create", "instances", "root". New + assignments are subject to IESG Approval, as outlined in RFC 5226. + Requests should be made by email to IANA, which will then forward the + request to the IESG, requesting approval. + +
+
+
+ + + + + &rfc2045; + &rfc2119; + &rfc3339; + &rfc3986; + &rfc4287; + + + JSON Pointer + + ForgeRock US, Inc. + + + SitePen (USA) + + + + + + + &rfc2616; + &rfc4627; + &rfc5226; + &iddiscovery; + &uritemplate; + &linkheader; + &html401; + &css21; + + +
+ + + + + Changed "required" attribute to an array of strings. + Removed "format" attribute. + Added "minProperties" and "maxProperties" attributes. + Replaced "slash-delimited" fragment resolution with "json-pointer". + Added "template" LDO attribute. + Removed irrelevant "Open Issues" section. + Merged Conventions and Terminology sections. + Defined terms used in specification. + Removed "integer" type in favor of {"type":"number", "divisibleBy":1}. + Restricted "type" to only the core JSON types. + Improved wording of many sections. + + + + + + Added example and verbiage to "extends" attribute. + Defined slash-delimited to use a leading slash. + Made "root" a relation instead of an attribute. + Removed address values, and MIME media type from format to reduce confusion (mediaType already exists, so it can be used for MIME types). + Added more explanation of nullability. + Removed "alternate" attribute. + Upper cased many normative usages of must, may, and should. + Replaced the link submission "properties" attribute to "schema" attribute. + Replaced "optional" attribute with "required" attribute. + Replaced "maximumCanEqual" attribute with "exclusiveMaximum" attribute. + Replaced "minimumCanEqual" attribute with "exclusiveMinimum" attribute. + Replaced "requires" attribute with "dependencies" attribute. + Moved "contentEncoding" attribute to hyper schema. + Added "additionalItems" attribute. + Added "id" attribute. + Switched self-referencing variable substitution from "-this" to "@" to align with reserved characters in URI template. + Added "patternProperties" attribute. + Schema URIs are now namespace versioned. + Added "$ref" and "$schema" attributes. + + + + + + Replaced "maxDecimal" attribute with "divisibleBy" attribute. + Added slash-delimited fragment resolution protocol and made it the default. + Added language about using links outside of schemas by referencing its normative URI. + Added "uniqueItems" attribute. + Added "targetSchema" attribute to link description object. + + + + + + Fixed category and updates from template. + + + + + + Initial draft. + + + + +
+
+
diff --git a/node_modules/json-schema/lib/links.js b/node_modules/json-schema/lib/links.js new file mode 100644 index 0000000..8a87f02 --- /dev/null +++ b/node_modules/json-schema/lib/links.js @@ -0,0 +1,66 @@ +/** + * JSON Schema link handler + * Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com) + * Licensed under the MIT (MIT-LICENSE.txt) license. + */ +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], function () { + return factory(); + }); + } else if (typeof module === 'object' && module.exports) { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(); + } else { + // Browser globals + root.jsonSchemaLinks = factory(); + } +}(this, function () {// setup primitive classes to be JSON Schema types +var exports = {}; +exports.cacheLinks = true; +exports.getLink = function(relation, instance, schema){ + // gets the URI of the link for the given relation based on the instance and schema + // for example: + // getLink( + // "brother", + // {"brother_id":33}, + // {links:[{rel:"brother", href:"Brother/{brother_id}"}]}) -> + // "Brother/33" + var links = schema.__linkTemplates; + if(!links){ + links = {}; + var schemaLinks = schema.links; + if(schemaLinks && schemaLinks instanceof Array){ + schemaLinks.forEach(function(link){ + /* // TODO: allow for multiple same-name relations + if(links[link.rel]){ + if(!(links[link.rel] instanceof Array)){ + links[link.rel] = [links[link.rel]]; + } + }*/ + links[link.rel] = link.href; + }); + } + if(exports.cacheLinks){ + schema.__linkTemplates = links; + } + } + var linkTemplate = links[relation]; + return linkTemplate && exports.substitute(linkTemplate, instance); +}; + +exports.substitute = function(linkTemplate, instance){ + return linkTemplate.replace(/\{([^\}]*)\}/g, function(t, property){ + var value = instance[decodeURIComponent(property)]; + if(value instanceof Array){ + // the value is an array, it should produce a URI like /Table/(4,5,8) and store.get() should handle that as an array of values + return '(' + value.join(',') + ')'; + } + return value; + }); +}; +return exports; +})); \ No newline at end of file diff --git a/node_modules/json-schema/lib/validate.js b/node_modules/json-schema/lib/validate.js new file mode 100644 index 0000000..e4dc151 --- /dev/null +++ b/node_modules/json-schema/lib/validate.js @@ -0,0 +1,273 @@ +/** + * JSONSchema Validator - Validates JavaScript objects using JSON Schemas + * (http://www.json.com/json-schema-proposal/) + * + * Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com) + * Licensed under the MIT (MIT-LICENSE.txt) license. +To use the validator call the validate function with an instance object and an optional schema object. +If a schema is provided, it will be used to validate. If the instance object refers to a schema (self-validating), +that schema will be used to validate and the schema parameter is not necessary (if both exist, +both validations will occur). +The validate method will return an array of validation errors. If there are no errors, then an +empty list will be returned. A validation error will have two properties: +"property" which indicates which property had the error +"message" which indicates what the error was + */ +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], function () { + return factory(); + }); + } else if (typeof module === 'object' && module.exports) { + // Node. Does not work with strict CommonJS, but + // only CommonJS-like environments that support module.exports, + // like Node. + module.exports = factory(); + } else { + // Browser globals + root.jsonSchema = factory(); + } +}(this, function () {// setup primitive classes to be JSON Schema types +var exports = validate +exports.Integer = {type:"integer"}; +var primitiveConstructors = { + String: String, + Boolean: Boolean, + Number: Number, + Object: Object, + Array: Array, + Date: Date +} +exports.validate = validate; +function validate(/*Any*/instance,/*Object*/schema) { + // Summary: + // To use the validator call JSONSchema.validate with an instance object and an optional schema object. + // If a schema is provided, it will be used to validate. If the instance object refers to a schema (self-validating), + // that schema will be used to validate and the schema parameter is not necessary (if both exist, + // both validations will occur). + // The validate method will return an object with two properties: + // valid: A boolean indicating if the instance is valid by the schema + // errors: An array of validation errors. If there are no errors, then an + // empty list will be returned. A validation error will have two properties: + // property: which indicates which property had the error + // message: which indicates what the error was + // + return validate(instance, schema, {changing: false});//, coerce: false, existingOnly: false}); + }; +exports.checkPropertyChange = function(/*Any*/value,/*Object*/schema, /*String*/property) { + // Summary: + // The checkPropertyChange method will check to see if an value can legally be in property with the given schema + // This is slightly different than the validate method in that it will fail if the schema is readonly and it will + // not check for self-validation, it is assumed that the passed in value is already internally valid. + // The checkPropertyChange method will return the same object type as validate, see JSONSchema.validate for + // information. + // + return validate(value, schema, {changing: property || "property"}); + }; +var validate = exports._validate = function(/*Any*/instance,/*Object*/schema,/*Object*/options) { + + if (!options) options = {}; + var _changing = options.changing; + + function getType(schema){ + return schema.type || (primitiveConstructors[schema.name] == schema && schema.name.toLowerCase()); + } + var errors = []; + // validate a value against a property definition + function checkProp(value, schema, path,i){ + + var l; + path += path ? typeof i == 'number' ? '[' + i + ']' : typeof i == 'undefined' ? '' : '.' + i : i; + function addError(message){ + errors.push({property:path,message:message}); + } + + if((typeof schema != 'object' || schema instanceof Array) && (path || typeof schema != 'function') && !(schema && getType(schema))){ + if(typeof schema == 'function'){ + if(!(value instanceof schema)){ + addError("is not an instance of the class/constructor " + schema.name); + } + }else if(schema){ + addError("Invalid schema/property definition " + schema); + } + return null; + } + if(_changing && schema.readonly){ + addError("is a readonly field, it can not be changed"); + } + if(schema['extends']){ // if it extends another schema, it must pass that schema as well + checkProp(value,schema['extends'],path,i); + } + // validate a value against a type definition + function checkType(type,value){ + if(type){ + if(typeof type == 'string' && type != 'any' && + (type == 'null' ? value !== null : typeof value != type) && + !(value instanceof Array && type == 'array') && + !(value instanceof Date && type == 'date') && + !(type == 'integer' && value%1===0)){ + return [{property:path,message:(typeof value) + " value found, but a " + type + " is required"}]; + } + if(type instanceof Array){ + var unionErrors=[]; + for(var j = 0; j < type.length; j++){ // a union type + if(!(unionErrors=checkType(type[j],value)).length){ + break; + } + } + if(unionErrors.length){ + return unionErrors; + } + }else if(typeof type == 'object'){ + var priorErrors = errors; + errors = []; + checkProp(value,type,path); + var theseErrors = errors; + errors = priorErrors; + return theseErrors; + } + } + return []; + } + if(value === undefined){ + if(schema.required){ + addError("is missing and it is required"); + } + }else{ + errors = errors.concat(checkType(getType(schema),value)); + if(schema.disallow && !checkType(schema.disallow,value).length){ + addError(" disallowed value was matched"); + } + if(value !== null){ + if(value instanceof Array){ + if(schema.items){ + var itemsIsArray = schema.items instanceof Array; + var propDef = schema.items; + for (i = 0, l = value.length; i < l; i += 1) { + if (itemsIsArray) + propDef = schema.items[i]; + if (options.coerce) + value[i] = options.coerce(value[i], propDef); + errors.concat(checkProp(value[i],propDef,path,i)); + } + } + if(schema.minItems && value.length < schema.minItems){ + addError("There must be a minimum of " + schema.minItems + " in the array"); + } + if(schema.maxItems && value.length > schema.maxItems){ + addError("There must be a maximum of " + schema.maxItems + " in the array"); + } + }else if(schema.properties || schema.additionalProperties){ + errors.concat(checkObj(value, schema.properties, path, schema.additionalProperties)); + } + if(schema.pattern && typeof value == 'string' && !value.match(schema.pattern)){ + addError("does not match the regex pattern " + schema.pattern); + } + if(schema.maxLength && typeof value == 'string' && value.length > schema.maxLength){ + addError("may only be " + schema.maxLength + " characters long"); + } + if(schema.minLength && typeof value == 'string' && value.length < schema.minLength){ + addError("must be at least " + schema.minLength + " characters long"); + } + if(typeof schema.minimum !== undefined && typeof value == typeof schema.minimum && + schema.minimum > value){ + addError("must have a minimum value of " + schema.minimum); + } + if(typeof schema.maximum !== undefined && typeof value == typeof schema.maximum && + schema.maximum < value){ + addError("must have a maximum value of " + schema.maximum); + } + if(schema['enum']){ + var enumer = schema['enum']; + l = enumer.length; + var found; + for(var j = 0; j < l; j++){ + if(enumer[j]===value){ + found=1; + break; + } + } + if(!found){ + addError("does not have a value in the enumeration " + enumer.join(", ")); + } + } + if(typeof schema.maxDecimal == 'number' && + (value.toString().match(new RegExp("\\.[0-9]{" + (schema.maxDecimal + 1) + ",}")))){ + addError("may only have " + schema.maxDecimal + " digits of decimal places"); + } + } + } + return null; + } + // validate an object against a schema + function checkObj(instance,objTypeDef,path,additionalProp){ + + if(typeof objTypeDef =='object'){ + if(typeof instance != 'object' || instance instanceof Array){ + errors.push({property:path,message:"an object is required"}); + } + + for(var i in objTypeDef){ + if(objTypeDef.hasOwnProperty(i)){ + var value = instance[i]; + // skip _not_ specified properties + if (value === undefined && options.existingOnly) continue; + var propDef = objTypeDef[i]; + // set default + if(value === undefined && propDef["default"]){ + value = instance[i] = propDef["default"]; + } + if(options.coerce && i in instance){ + value = instance[i] = options.coerce(value, propDef); + } + checkProp(value,propDef,path,i); + } + } + } + for(i in instance){ + if(instance.hasOwnProperty(i) && !(i.charAt(0) == '_' && i.charAt(1) == '_') && objTypeDef && !objTypeDef[i] && additionalProp===false){ + if (options.filter) { + delete instance[i]; + continue; + } else { + errors.push({property:path,message:(typeof value) + "The property " + i + + " is not defined in the schema and the schema does not allow additional properties"}); + } + } + var requires = objTypeDef && objTypeDef[i] && objTypeDef[i].requires; + if(requires && !(requires in instance)){ + errors.push({property:path,message:"the presence of the property " + i + " requires that " + requires + " also be present"}); + } + value = instance[i]; + if(additionalProp && (!(objTypeDef && typeof objTypeDef == 'object') || !(i in objTypeDef))){ + if(options.coerce){ + value = instance[i] = options.coerce(value, additionalProp); + } + checkProp(value,additionalProp,path,i); + } + if(!_changing && value && value.$schema){ + errors = errors.concat(checkProp(value,value.$schema,path,i)); + } + } + return errors; + } + if(schema){ + checkProp(instance,schema,'',_changing || ''); + } + if(!_changing && instance && instance.$schema){ + checkProp(instance,instance.$schema,'',''); + } + return {valid:!errors.length,errors:errors}; +}; +exports.mustBeValid = function(result){ + // summary: + // This checks to ensure that the result is valid and will throw an appropriate error message if it is not + // result: the result returned from checkPropertyChange or validate + if(!result.valid){ + throw new TypeError(result.errors.map(function(error){return "for property " + error.property + ': ' + error.message;}).join(", \n")); + } +} + +return exports; +})); diff --git a/node_modules/json-schema/package.json b/node_modules/json-schema/package.json new file mode 100644 index 0000000..b9b7bac --- /dev/null +++ b/node_modules/json-schema/package.json @@ -0,0 +1,31 @@ +{ + "name": "json-schema", + "version": "0.2.3", + "author": "Kris Zyp", + "description": "JSON Schema validation and specifications", + "maintainers":[ + {"name": "Kris Zyp", "email": "kriszyp@gmail.com"}], + "keywords": [ + "json", + "schema" + ], + "licenses": [ + { + "type": "AFLv2.1", + "url": "http://trac.dojotoolkit.org/browser/dojo/trunk/LICENSE#L43" + }, + { + "type": "BSD", + "url": "http://trac.dojotoolkit.org/browser/dojo/trunk/LICENSE#L13" + } + ], + "repository": { + "type":"git", + "url":"http://github.com/kriszyp/json-schema" + }, + "directories": { "lib": "./lib" }, + "main": "./lib/validate.js", + "devDependencies": { "vows": "*" }, + "scripts": { "test": "echo TESTS DISABLED vows --spec test/*.js" } +} + diff --git a/node_modules/json-schema/test/tests.js b/node_modules/json-schema/test/tests.js new file mode 100644 index 0000000..2938aea --- /dev/null +++ b/node_modules/json-schema/test/tests.js @@ -0,0 +1,95 @@ +var assert = require('assert'); +var vows = require('vows'); +var path = require('path'); +var fs = require('fs'); + +var validate = require('../lib/validate').validate; + + +var revision = 'draft-03'; +var schemaRoot = path.join(__dirname, '..', revision); +var schemaNames = ['schema', 'hyper-schema', 'links', 'json-ref' ]; +var schemas = {}; + +schemaNames.forEach(function(name) { + var file = path.join(schemaRoot, name); + schemas[name] = loadSchema(file); +}); + +schemaNames.forEach(function(name) { + var s, n = name+'-nsd', f = path.join(schemaRoot, name); + schemas[n] = loadSchema(f); + s = schemas[n]; + delete s['$schema']; +}); + +function loadSchema(path) { + var data = fs.readFileSync(path, 'utf-8'); + var schema = JSON.parse(data); + return schema; +} + +function resultIsValid() { + return function(result) { + assert.isObject(result); + //assert.isBoolean(result.valid); + assert.equal(typeof(result.valid), 'boolean'); + assert.isArray(result.errors); + for (var i = 0; i < result.errors.length; i++) { + assert.notEqual(result.errors[i], null, 'errors['+i+'] is null'); + } + } +} + +function assertValidates(doc, schema) { + var context = {}; + + context[': validate('+doc+', '+schema+')'] = { + topic: validate(schemas[doc], schemas[schema]), + 'returns valid result': resultIsValid(), + 'with valid=true': function(result) { assert.equal(result.valid, true); }, + 'and no errors': function(result) { + // XXX work-around for bug in vows: [null] chokes it + if (result.errors[0] == null) assert.fail('(errors contains null)'); + assert.length(result.errors, 0); + } + }; + + return context; +} + +function assertSelfValidates(doc) { + var context = {}; + + context[': validate('+doc+')'] = { + topic: validate(schemas[doc]), + 'returns valid result': resultIsValid(), + 'with valid=true': function(result) { assert.equal(result.valid, true); }, + 'and no errors': function(result) { assert.length(result.errors, 0); } + }; + + return context; +} + +var suite = vows.describe('JSON Schema').addBatch({ + 'Core-NSD self-validates': assertSelfValidates('schema-nsd'), + 'Core-NSD/Core-NSD': assertValidates('schema-nsd', 'schema-nsd'), + 'Core-NSD/Core': assertValidates('schema-nsd', 'schema'), + + 'Core self-validates': assertSelfValidates('schema'), + 'Core/Core': assertValidates('schema', 'schema'), + + 'Hyper-NSD self-validates': assertSelfValidates('hyper-schema-nsd'), + 'Hyper self-validates': assertSelfValidates('hyper-schema'), + 'Hyper/Hyper': assertValidates('hyper-schema', 'hyper-schema'), + 'Hyper/Core': assertValidates('hyper-schema', 'schema'), + + 'Links-NSD self-validates': assertSelfValidates('links-nsd'), + 'Links self-validates': assertSelfValidates('links'), + 'Links/Hyper': assertValidates('links', 'hyper-schema'), + 'Links/Core': assertValidates('links', 'schema'), + + 'Json-Ref self-validates': assertSelfValidates('json-ref'), + 'Json-Ref/Hyper': assertValidates('json-ref', 'hyper-schema'), + 'Json-Ref/Core': assertValidates('json-ref', 'schema') +}).export(module); diff --git a/node_modules/json-stringify-safe/.npmignore b/node_modules/json-stringify-safe/.npmignore new file mode 100644 index 0000000..17d6b36 --- /dev/null +++ b/node_modules/json-stringify-safe/.npmignore @@ -0,0 +1 @@ +/*.tgz diff --git a/node_modules/json-stringify-safe/CHANGELOG.md b/node_modules/json-stringify-safe/CHANGELOG.md new file mode 100644 index 0000000..42bcb60 --- /dev/null +++ b/node_modules/json-stringify-safe/CHANGELOG.md @@ -0,0 +1,14 @@ +## Unreleased +- Fixes stringify to only take ancestors into account when checking + circularity. + It previously assumed every visited object was circular which led to [false + positives][issue9]. + Uses the tiny serializer I wrote for [Must.js][must] a year and a half ago. +- Fixes calling the `replacer` function in the proper context (`thisArg`). +- Fixes calling the `cycleReplacer` function in the proper context (`thisArg`). +- Speeds serializing by a factor of + Big-O(h-my-god-it-linearly-searched-every-object) it had ever seen. Searching + only the ancestors for a circular references speeds up things considerably. + +[must]: https://github.com/moll/js-must +[issue9]: https://github.com/isaacs/json-stringify-safe/issues/9 diff --git a/node_modules/json-stringify-safe/LICENSE b/node_modules/json-stringify-safe/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/json-stringify-safe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/json-stringify-safe/Makefile b/node_modules/json-stringify-safe/Makefile new file mode 100644 index 0000000..36088c7 --- /dev/null +++ b/node_modules/json-stringify-safe/Makefile @@ -0,0 +1,35 @@ +NODE_OPTS = +TEST_OPTS = + +love: + @echo "Feel like makin' love." + +test: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R dot $(TEST_OPTS) + +spec: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R spec $(TEST_OPTS) + +autotest: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R dot --watch $(TEST_OPTS) + +autospec: + @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R spec --watch $(TEST_OPTS) + +pack: + @file=$$(npm pack); echo "$$file"; tar tf "$$file" + +publish: + npm publish + +tag: + git tag "v$$(node -e 'console.log(require("./package").version)')" + +clean: + rm -f *.tgz + npm prune --production + +.PHONY: love +.PHONY: test spec autotest autospec +.PHONY: pack publish tag +.PHONY: clean diff --git a/node_modules/json-stringify-safe/README.md b/node_modules/json-stringify-safe/README.md new file mode 100644 index 0000000..a11f302 --- /dev/null +++ b/node_modules/json-stringify-safe/README.md @@ -0,0 +1,52 @@ +# json-stringify-safe + +Like JSON.stringify, but doesn't throw on circular references. + +## Usage + +Takes the same arguments as `JSON.stringify`. + +```javascript +var stringify = require('json-stringify-safe'); +var circularObj = {}; +circularObj.circularRef = circularObj; +circularObj.list = [ circularObj, circularObj ]; +console.log(stringify(circularObj, null, 2)); +``` + +Output: + +```json +{ + "circularRef": "[Circular]", + "list": [ + "[Circular]", + "[Circular]" + ] +} +``` + +## Details + +``` +stringify(obj, serializer, indent, decycler) +``` + +The first three arguments are the same as to JSON.stringify. The last +is an argument that's only used when the object has been seen already. + +The default `decycler` function returns the string `'[Circular]'`. +If, for example, you pass in `function(k,v){}` (return nothing) then it +will prune cycles. If you pass in `function(k,v){ return {foo: 'bar'}}`, +then cyclical objects will always be represented as `{"foo":"bar"}` in +the result. + +``` +stringify.getSerialize(serializer, decycler) +``` + +Returns a serializer that can be used elsewhere. This is the actual +function that's passed to JSON.stringify. + +**Note** that the function returned from `getSerialize` is stateful for now, so +do **not** use it more than once. diff --git a/node_modules/json-stringify-safe/package.json b/node_modules/json-stringify-safe/package.json new file mode 100644 index 0000000..8e17b12 --- /dev/null +++ b/node_modules/json-stringify-safe/package.json @@ -0,0 +1,31 @@ +{ + "name": "json-stringify-safe", + "version": "5.0.1", + "description": "Like JSON.stringify, but doesn't blow up on circular refs.", + "keywords": [ + "json", + "stringify", + "circular", + "safe" + ], + "homepage": "https://github.com/isaacs/json-stringify-safe", + "bugs": "https://github.com/isaacs/json-stringify-safe/issues", + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "contributors": [ + "Andri Möll (http://themoll.com)" + ], + "license": "ISC", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/json-stringify-safe" + }, + "main": "stringify.js", + "scripts": { + "test": "node test.js" + }, + "devDependencies": { + "mocha": ">= 2.1.0 < 3", + "must": ">= 0.12 < 0.13", + "sinon": ">= 1.12.2 < 2" + } +} diff --git a/node_modules/json-stringify-safe/stringify.js b/node_modules/json-stringify-safe/stringify.js new file mode 100644 index 0000000..124a452 --- /dev/null +++ b/node_modules/json-stringify-safe/stringify.js @@ -0,0 +1,27 @@ +exports = module.exports = stringify +exports.getSerialize = serializer + +function stringify(obj, replacer, spaces, cycleReplacer) { + return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces) +} + +function serializer(replacer, cycleReplacer) { + var stack = [], keys = [] + + if (cycleReplacer == null) cycleReplacer = function(key, value) { + if (stack[0] === value) return "[Circular ~]" + return "[Circular ~." + keys.slice(0, stack.indexOf(value)).join(".") + "]" + } + + return function(key, value) { + if (stack.length > 0) { + var thisPos = stack.indexOf(this) + ~thisPos ? stack.splice(thisPos + 1) : stack.push(this) + ~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key) + if (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value) + } + else stack.push(value) + + return replacer == null ? value : replacer.call(this, key, value) + } +} diff --git a/node_modules/json-stringify-safe/test/mocha.opts b/node_modules/json-stringify-safe/test/mocha.opts new file mode 100644 index 0000000..2544e58 --- /dev/null +++ b/node_modules/json-stringify-safe/test/mocha.opts @@ -0,0 +1,2 @@ +--recursive +--require must diff --git a/node_modules/json-stringify-safe/test/stringify_test.js b/node_modules/json-stringify-safe/test/stringify_test.js new file mode 100644 index 0000000..5b32583 --- /dev/null +++ b/node_modules/json-stringify-safe/test/stringify_test.js @@ -0,0 +1,246 @@ +var Sinon = require("sinon") +var stringify = require("..") +function jsonify(obj) { return JSON.stringify(obj, null, 2) } + +describe("Stringify", function() { + it("must stringify circular objects", function() { + var obj = {name: "Alice"} + obj.self = obj + var json = stringify(obj, null, 2) + json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"})) + }) + + it("must stringify circular objects with intermediaries", function() { + var obj = {name: "Alice"} + obj.identity = {self: obj} + var json = stringify(obj, null, 2) + json.must.eql(jsonify({name: "Alice", identity: {self: "[Circular ~]"}})) + }) + + it("must stringify circular objects deeper", function() { + var obj = {name: "Alice", child: {name: "Bob"}} + obj.child.self = obj.child + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", + child: {name: "Bob", self: "[Circular ~.child]"} + })) + }) + + it("must stringify circular objects deeper with intermediaries", function() { + var obj = {name: "Alice", child: {name: "Bob"}} + obj.child.identity = {self: obj.child} + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", + child: {name: "Bob", identity: {self: "[Circular ~.child]"}} + })) + }) + + it("must stringify circular objects in an array", function() { + var obj = {name: "Alice"} + obj.self = [obj, obj] + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", self: ["[Circular ~]", "[Circular ~]"] + })) + }) + + it("must stringify circular objects deeper in an array", function() { + var obj = {name: "Alice", children: [{name: "Bob"}, {name: "Eve"}]} + obj.children[0].self = obj.children[0] + obj.children[1].self = obj.children[1] + + stringify(obj, null, 2).must.eql(jsonify({ + name: "Alice", + children: [ + {name: "Bob", self: "[Circular ~.children.0]"}, + {name: "Eve", self: "[Circular ~.children.1]"} + ] + })) + }) + + it("must stringify circular arrays", function() { + var obj = [] + obj.push(obj) + obj.push(obj) + var json = stringify(obj, null, 2) + json.must.eql(jsonify(["[Circular ~]", "[Circular ~]"])) + }) + + it("must stringify circular arrays with intermediaries", function() { + var obj = [] + obj.push({name: "Alice", self: obj}) + obj.push({name: "Bob", self: obj}) + + stringify(obj, null, 2).must.eql(jsonify([ + {name: "Alice", self: "[Circular ~]"}, + {name: "Bob", self: "[Circular ~]"} + ])) + }) + + it("must stringify repeated objects in objects", function() { + var obj = {} + var alice = {name: "Alice"} + obj.alice1 = alice + obj.alice2 = alice + + stringify(obj, null, 2).must.eql(jsonify({ + alice1: {name: "Alice"}, + alice2: {name: "Alice"} + })) + }) + + it("must stringify repeated objects in arrays", function() { + var alice = {name: "Alice"} + var obj = [alice, alice] + var json = stringify(obj, null, 2) + json.must.eql(jsonify([{name: "Alice"}, {name: "Alice"}])) + }) + + it("must call given decycler and use its output", function() { + var obj = {} + obj.a = obj + obj.b = obj + + var decycle = Sinon.spy(function() { return decycle.callCount }) + var json = stringify(obj, null, 2, decycle) + json.must.eql(jsonify({a: 1, b: 2}, null, 2)) + + decycle.callCount.must.equal(2) + decycle.thisValues[0].must.equal(obj) + decycle.args[0][0].must.equal("a") + decycle.args[0][1].must.equal(obj) + decycle.thisValues[1].must.equal(obj) + decycle.args[1][0].must.equal("b") + decycle.args[1][1].must.equal(obj) + }) + + it("must call replacer and use its output", function() { + var obj = {name: "Alice", child: {name: "Bob"}} + + var replacer = Sinon.spy(bangString) + var json = stringify(obj, replacer, 2) + json.must.eql(jsonify({name: "Alice!", child: {name: "Bob!"}})) + + replacer.callCount.must.equal(4) + replacer.args[0][0].must.equal("") + replacer.args[0][1].must.equal(obj) + replacer.thisValues[1].must.equal(obj) + replacer.args[1][0].must.equal("name") + replacer.args[1][1].must.equal("Alice") + replacer.thisValues[2].must.equal(obj) + replacer.args[2][0].must.equal("child") + replacer.args[2][1].must.equal(obj.child) + replacer.thisValues[3].must.equal(obj.child) + replacer.args[3][0].must.equal("name") + replacer.args[3][1].must.equal("Bob") + }) + + it("must call replacer after describing circular references", function() { + var obj = {name: "Alice"} + obj.self = obj + + var replacer = Sinon.spy(bangString) + var json = stringify(obj, replacer, 2) + json.must.eql(jsonify({name: "Alice!", self: "[Circular ~]!"})) + + replacer.callCount.must.equal(3) + replacer.args[0][0].must.equal("") + replacer.args[0][1].must.equal(obj) + replacer.thisValues[1].must.equal(obj) + replacer.args[1][0].must.equal("name") + replacer.args[1][1].must.equal("Alice") + replacer.thisValues[2].must.equal(obj) + replacer.args[2][0].must.equal("self") + replacer.args[2][1].must.equal("[Circular ~]") + }) + + it("must call given decycler and use its output for nested objects", + function() { + var obj = {} + obj.a = obj + obj.b = {self: obj} + + var decycle = Sinon.spy(function() { return decycle.callCount }) + var json = stringify(obj, null, 2, decycle) + json.must.eql(jsonify({a: 1, b: {self: 2}})) + + decycle.callCount.must.equal(2) + decycle.args[0][0].must.equal("a") + decycle.args[0][1].must.equal(obj) + decycle.args[1][0].must.equal("self") + decycle.args[1][1].must.equal(obj) + }) + + it("must use decycler's output when it returned null", function() { + var obj = {a: "b"} + obj.self = obj + obj.selves = [obj, obj] + + function decycle() { return null } + stringify(obj, null, 2, decycle).must.eql(jsonify({ + a: "b", + self: null, + selves: [null, null] + })) + }) + + it("must use decycler's output when it returned undefined", function() { + var obj = {a: "b"} + obj.self = obj + obj.selves = [obj, obj] + + function decycle() {} + stringify(obj, null, 2, decycle).must.eql(jsonify({ + a: "b", + selves: [null, null] + })) + }) + + it("must throw given a decycler that returns a cycle", function() { + var obj = {} + obj.self = obj + var err + function identity(key, value) { return value } + try { stringify(obj, null, 2, identity) } catch (ex) { err = ex } + err.must.be.an.instanceof(TypeError) + }) + + describe(".getSerialize", function() { + it("must stringify circular objects", function() { + var obj = {a: "b"} + obj.circularRef = obj + obj.list = [obj, obj] + + var json = JSON.stringify(obj, stringify.getSerialize(), 2) + json.must.eql(jsonify({ + "a": "b", + "circularRef": "[Circular ~]", + "list": ["[Circular ~]", "[Circular ~]"] + })) + }) + + // This is the behavior as of Mar 3, 2015. + // The serializer function keeps state inside the returned function and + // so far I'm not sure how to not do that. JSON.stringify's replacer is not + // called _after_ serialization. + xit("must return a function that could be called twice", function() { + var obj = {name: "Alice"} + obj.self = obj + + var json + var serializer = stringify.getSerialize() + + json = JSON.stringify(obj, serializer, 2) + json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"})) + + json = JSON.stringify(obj, serializer, 2) + json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"})) + }) + }) +}) + +function bangString(key, value) { + return typeof value == "string" ? value + "!" : value +} diff --git a/node_modules/json5/CHANGELOG.md b/node_modules/json5/CHANGELOG.md new file mode 100644 index 0000000..af2c608 --- /dev/null +++ b/node_modules/json5/CHANGELOG.md @@ -0,0 +1,158 @@ +### v0.5.0 [[code][c0.5.0], [diff][d0.5.0]] + +[c0.5.0]: https://github.com/aseemk/json5/tree/v0.5.0 +[d0.5.0]: https://github.com/aseemk/json5/compare/v0.4.0...v0.5.0 + +This release includes major internal changes and public API enhancements. + +- **Major:** JSON5 officially supports Node.js v4 LTS and v5. Support for + Node.js v0.6 and v0.8 have been dropped, while support for v0.10 and v0.12 + remain. + +- Fix: YUI Compressor no longer fails when compressing json5.js. ([#97]) + +- New: `parse` and the CLI provide line and column numbers when displaying error + messages. ([#101]; awesome work by [@amb26].) + + +### v0.4.0 [[code][c0.4.0], [diff][d0.4.0]] + +[c0.4.0]: https://github.com/aseemk/json5/tree/v0.4.0 +[d0.4.0]: https://github.com/aseemk/json5/compare/v0.2.0...v0.4.0 + +Note that v0.3.0 was tagged, but never published to npm, so this v0.4.0 +changelog entry includes v0.3.0 features. + +This is a massive release that adds `stringify` support, among other things. + +- **Major:** `JSON5.stringify()` now exists! + This method is analogous to the native `JSON.stringify()`; + it just avoids quoting keys where possible. + See the [usage documentation](./README.md#usage) for more. + ([#32]; huge thanks and props [@aeisenberg]!) + +- New: `NaN` and `-NaN` are now allowed number literals. + ([#30]; thanks [@rowanhill].) + +- New: Duplicate object keys are now allowed; the last value is used. + This is the same behavior as JSON. ([#57]; thanks [@jordanbtucker].) + +- Fix: Properly handle various whitespace and newline cases now. + E.g. JSON5 now properly supports escaped CR and CRLF newlines in strings, + and JSON5 now accepts the same whitespace as JSON (stricter than ES5). + ([#58], [#60], and [#63]; thanks [@jordanbtucker].) + +- New: Negative hexadecimal numbers (e.g. `-0xC8`) are allowed again. + (They were disallowed in v0.2.0; see below.) + It turns out they *are* valid in ES5, so JSON5 supports them now too. + ([#36]; thanks [@jordanbtucker]!) + + +### v0.2.0 [[code][c0.2.0], [diff][d0.2.0]] + +[c0.2.0]: https://github.com/aseemk/json5/tree/v0.2.0 +[d0.2.0]: https://github.com/aseemk/json5/compare/v0.1.0...v0.2.0 + +This release fixes some bugs and adds some more utility features to help you +express data more easily: + +- **Breaking:** Negative hexadecimal numbers (e.g. `-0xC8`) are rejected now. + While V8 (e.g. Chrome and Node) supported them, it turns out they're invalid + in ES5. This has been [fixed in V8][v8-hex-fix] (and by extension, Chrome + and Node), so JSON5 officially rejects them now, too. ([#36]) + +- New: Trailing decimal points in decimal numbers are allowed again. + (They were disallowed in v0.1.0; see below.) + They're allowed by ES5, and differentiating between integers and floats may + make sense on some platforms. ([#16]; thanks [@Midar].) + +- New: `Infinity` and `-Infinity` are now allowed number literals. + ([#30]; thanks [@pepkin88].) + +- New: Plus signs (`+`) in front of numbers are now allowed, since it can + be helpful in some contexts to explicitly mark numbers as positive. + (E.g. when a property represents changes or deltas.) + +- Fix: unescaped newlines in strings are rejected now. + ([#24]; thanks [@Midar].) + + +### v0.1.0 [[code][c0.1.0], [diff][d0.1.0]] + +[c0.1.0]: https://github.com/aseemk/json5/tree/v0.1.0 +[d0.1.0]: https://github.com/aseemk/json5/compare/v0.0.1...v0.1.0 + +This release tightens JSON5 support and adds helpful utility features: + +- New: Support hexadecimal numbers. (Thanks [@MaxNanasy].) + +- Fix: Reject octal numbers properly now. Previously, they were accepted but + improperly parsed as base-10 numbers. (Thanks [@MaxNanasy].) + +- **Breaking:** Reject "noctal" numbers now (base-10 numbers that begin with a + leading zero). These are disallowed by both JSON5 and JSON, as well as by + ES5's strict mode. (Thanks [@MaxNanasy].) + +- New: Support leading decimal points in decimal numbers. + (Thanks [@MaxNanasy].) + +- **Breaking:** Reject trailing decimal points in decimal numbers now. These + are disallowed by both JSON5 and JSON. (Thanks [@MaxNanasy].) + +- **Breaking:** Reject omitted elements in arrays now. These are disallowed by + both JSON5 and JSON. + +- Fix: Throw proper `SyntaxError` instances on errors now. + +- New: Add Node.js `require()` hook. Register via `json5/lib/require`. + +- New: Add Node.js `json5` executable to compile JSON5 files to JSON. + + +### v0.0.1 [[code][c0.0.1], [diff][d0.0.1]] + +[c0.0.1]: https://github.com/aseemk/json5/tree/v0.0.1 +[d0.0.1]: https://github.com/aseemk/json5/compare/v0.0.0...v0.0.1 + +This was the first implementation of this JSON5 parser. + +- Support unquoted object keys, including reserved words. Unicode characters + and escape sequences sequences aren't yet supported. + +- Support single-quoted strings. + +- Support multi-line strings. + +- Support trailing commas in arrays and objects. + +- Support comments, both inline and block. + + +### v0.0.0 [[code](https://github.com/aseemk/json5/tree/v0.0.0)] + +Let's consider this to be Douglas Crockford's original [json_parse.js] — a +parser for the regular JSON format. + + +[json_parse.js]: https://github.com/douglascrockford/JSON-js/blob/master/json_parse.js +[v8-hex-fix]: http://code.google.com/p/v8/issues/detail?id=2240 + +[@MaxNanasy]: https://github.com/MaxNanasy +[@Midar]: https://github.com/Midar +[@pepkin88]: https://github.com/pepkin88 +[@rowanhill]: https://github.com/rowanhill +[@aeisenberg]: https://github.com/aeisenberg +[@jordanbtucker]: https://github.com/jordanbtucker +[@amb26]: https://github.com/amb26 + +[#16]: https://github.com/aseemk/json5/issues/16 +[#24]: https://github.com/aseemk/json5/issues/24 +[#30]: https://github.com/aseemk/json5/issues/30 +[#32]: https://github.com/aseemk/json5/issues/32 +[#36]: https://github.com/aseemk/json5/issues/36 +[#57]: https://github.com/aseemk/json5/issues/57 +[#58]: https://github.com/aseemk/json5/pull/58 +[#60]: https://github.com/aseemk/json5/pull/60 +[#63]: https://github.com/aseemk/json5/pull/63 +[#97]: https://github.com/aseemk/json5/pull/97 +[#101]: https://github.com/aseemk/json5/pull/101 diff --git a/node_modules/json5/README.md b/node_modules/json5/README.md new file mode 100644 index 0000000..7194ed4 --- /dev/null +++ b/node_modules/json5/README.md @@ -0,0 +1,285 @@ +# JSON5 – Modern JSON + +[![Build Status](https://travis-ci.org/aseemk/json5.png)](https://travis-ci.org/aseemk/json5) + +JSON is an excellent data format, but we think it can be better. + +**JSON5 is a proposed extension to JSON** that aims to make it easier for +*humans to write and maintain* by hand. It does this by adding some minimal +syntax features directly from ECMAScript 5. + +JSON5 remains a **strict subset of JavaScript**, adds **no new data types**, +and **works with all existing JSON content**. + +JSON5 is *not* an official successor to JSON, and JSON5 content may *not* +work with existing JSON parsers. For this reason, JSON5 files use a new .json5 +extension. *(TODO: new MIME type needed too.)* + +The code here is a **reference JavaScript implementation** for both Node.js +and all browsers. It’s based directly off of Douglas Crockford’s own [JSON +implementation][json_parse.js], and it’s both robust and secure. + + +## Why + +JSON isn’t the friendliest to *write*. Keys need to be quoted, objects and +arrays can’t have trailing commas, and comments aren’t allowed — even though +none of these are the case with regular JavaScript today. + +That was fine when JSON’s goal was to be a great data format, but JSON’s usage +has expanded beyond *machines*. JSON is now used for writing [configs][ex1], +[manifests][ex2], even [tests][ex3] — all by *humans*. + +[ex1]: http://plovr.com/docs.html +[ex2]: https://www.npmjs.org/doc/files/package.json.html +[ex3]: http://code.google.com/p/fuzztester/wiki/JSONFileFormat + +There are other formats that are human-friendlier, like YAML, but changing +from JSON to a completely different format is undesirable in many cases. +JSON5’s aim is to remain close to JSON and JavaScript. + + +## Features + +The following is the exact list of additions to JSON’s syntax introduced by +JSON5. **All of these are optional**, and **all of these come from ES5**. + +### Objects + +- Object keys can be unquoted if they’re valid [identifiers][mdn_variables]. + Yes, even reserved keywords (like `default`) are valid unquoted keys in ES5 + [[§11.1.5](http://es5.github.com/#x11.1.5), [§7.6](http://es5.github.com/#x7.6)]. + ([More info](https://mathiasbynens.be/notes/javascript-identifiers)) + + *(TODO: Unicode characters and escape sequences aren’t yet supported in this + implementation.)* + +- Objects can have trailing commas. + +[mdn_variables]: https://developer.mozilla.org/en/Core_JavaScript_1.5_Guide/Core_Language_Features#Variables + +### Arrays + +- Arrays can have trailing commas. + +### Strings + +- Strings can be single-quoted. + +- Strings can be split across multiple lines; just prefix each newline with a + backslash. [ES5 [§7.8.4](http://es5.github.com/#x7.8.4)] + +### Numbers + +- Numbers can be hexadecimal (base 16). + +- Numbers can begin or end with a (leading or trailing) decimal point. + +- Numbers can include `Infinity`, `-Infinity`, `NaN`, and `-NaN`. + +- Numbers can begin with an explicit plus sign. + +### Comments + +- Both inline (single-line) and block (multi-line) comments are allowed. + + +## Example + +The following is a contrived example, but it illustrates most of the features: + +```js +{ + foo: 'bar', + while: true, + + this: 'is a \ +multi-line string', + + // this is an inline comment + here: 'is another', // inline comment + + /* this is a block comment + that continues on another line */ + + hex: 0xDEADbeef, + half: .5, + delta: +10, + to: Infinity, // and beyond! + + finally: 'a trailing comma', + oh: [ + "we shouldn't forget", + 'arrays can have', + 'trailing commas too', + ], +} +``` + +This implementation’s own [package.json5](package.json5) is more realistic: + +```js +// This file is written in JSON5 syntax, naturally, but npm needs a regular +// JSON file, so compile via `npm run build`. Be sure to keep both in sync! + +{ + name: 'json5', + version: '0.5.0', + description: 'JSON for the ES5 era.', + keywords: ['json', 'es5'], + author: 'Aseem Kishore ', + contributors: [ + // TODO: Should we remove this section in favor of GitHub's list? + // https://github.com/aseemk/json5/contributors + 'Max Nanasy ', + 'Andrew Eisenberg ', + 'Jordan Tucker ', + ], + main: 'lib/json5.js', + bin: 'lib/cli.js', + files: ["lib/"], + dependencies: {}, + devDependencies: { + gulp: "^3.9.1", + 'gulp-jshint': "^2.0.0", + jshint: "^2.9.1", + 'jshint-stylish': "^2.1.0", + mocha: "^2.4.5" + }, + scripts: { + build: 'node ./lib/cli.js -c package.json5', + test: 'mocha --ui exports --reporter spec', + // TODO: Would it be better to define these in a mocha.opts file? + }, + homepage: 'http://json5.org/', + license: 'MIT', + repository: { + type: 'git', + url: 'https://github.com/aseemk/json5.git', + }, +} +``` + + +## Community + +Join the [Google Group](http://groups.google.com/group/json5) if you’re +interested in JSON5 news, updates, and general discussion. +Don’t worry, it’s very low-traffic. + +The [GitHub wiki](https://github.com/aseemk/json5/wiki) is a good place to track +JSON5 support and usage. Contribute freely there! + +[GitHub Issues](https://github.com/aseemk/json5/issues) is the place to +formally propose feature requests and report bugs. Questions and general +feedback are better directed at the Google Group. + + +## Usage + +This JavaScript implementation of JSON5 simply provides a `JSON5` object just +like the native ES5 `JSON` object. + +To use from Node: + +```sh +npm install json5 +``` + +```js +var JSON5 = require('json5'); +``` + +To use in the browser (adds the `JSON5` object to the global namespace): + +```html + +``` + +Then in both cases, you can simply replace native `JSON` calls with `JSON5`: + +```js +var obj = JSON5.parse('{unquoted:"key",trailing:"comma",}'); +var str = JSON5.stringify(obj); +``` + +`JSON5.parse` supports all of the JSON5 features listed above (*TODO: except +Unicode*), as well as the native [`reviver` argument][json-parse]. + +[json-parse]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse + +`JSON5.stringify` mainly avoids quoting keys where possible, but we hope to +keep expanding it in the future (e.g. to also output trailing commas). +It supports the native [`replacer` and `space` arguments][json-stringify], +as well. *(TODO: Any implemented `toJSON` methods aren’t used today.)* + +[json-stringify]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify + + +### Extras + +If you’re running this on Node, you can also register a JSON5 `require()` hook +to let you `require()` `.json5` files just like you can `.json` files: + +```js +require('json5/lib/require'); +require('./path/to/foo'); // tries foo.json5 after foo.js, foo.json, etc. +require('./path/to/bar.json5'); +``` + +This module also provides a `json5` executable (requires Node) for converting +JSON5 files to JSON: + +```sh +json5 -c path/to/foo.json5 # generates path/to/foo.json +``` + + +## Development + +```sh +git clone git://github.com/aseemk/json5.git +cd json5 +npm install +npm test +``` + +As the `package.json5` file states, be sure to run `npm run build` on changes +to `package.json5`, since npm requires `package.json`. + +Feel free to [file issues](https://github.com/aseemk/json5/issues) and submit +[pull requests](https://github.com/aseemk/json5/pulls) — contributions are +welcome. If you do submit a pull request, please be sure to add or update the +tests, and ensure that `npm test` continues to pass. + + +## License + +MIT License © 2012-2016 Aseem Kishore, and [others]( +https://github.com/aseemk/json5/contributors). + + +## Credits + +[Michael Bolin](http://bolinfest.com/) independently arrived at and published +some of these same ideas with awesome explanations and detail. +Recommended reading: +[Suggested Improvements to JSON](http://bolinfest.com/essays/json.html) + +[Douglas Crockford](http://www.crockford.com/) of course designed and built +JSON, but his state machine diagrams on the [JSON website](http://json.org/), +as cheesy as it may sound, gave me motivation and confidence that building a +new parser to implement these ideas this was within my reach! +This code is also modeled directly off of Doug’s open-source [json_parse.js][] +parser. I’m super grateful for that clean and well-documented code. + +[json_parse.js]: https://github.com/douglascrockford/JSON-js/blob/master/json_parse.js + +[Max Nanasy](https://github.com/MaxNanasy) has been an early and prolific +supporter, contributing multiple patches and ideas. Thanks Max! + +[Andrew Eisenberg](https://github.com/aeisenberg) has contributed the +`stringify` method. + +[Jordan Tucker](https://github.com/jordanbtucker) has aligned JSON5 more closely +with ES5 and is actively maintaining this project. diff --git a/node_modules/json5/lib/cli.js b/node_modules/json5/lib/cli.js new file mode 100755 index 0000000..9b72f6d --- /dev/null +++ b/node_modules/json5/lib/cli.js @@ -0,0 +1,41 @@ +#!/usr/bin/env node + +// cli.js +// JSON5 command-line interface. +// +// This is pretty minimal for now; just supports compiling files via `-c`. +// TODO More useful functionality, like output path, watch, etc.? + +var FS = require('fs'); +var JSON5 = require('./json5'); +var Path = require('path'); + +var USAGE = [ + 'Usage: json5 -c path/to/file.json5 ...', + 'Compiles JSON5 files into sibling JSON files with the same basenames.', +].join('\n'); + +// if valid, args look like [node, json5, -c, file1, file2, ...] +var args = process.argv; + +if (args.length < 4 || args[2] !== '-c') { + console.error(USAGE); + process.exit(1); +} + +var cwd = process.cwd(); +var files = args.slice(3); + +// iterate over each file and convert JSON5 files to JSON: +files.forEach(function (file) { + var path = Path.resolve(cwd, file); + var basename = Path.basename(path, '.json5'); + var dirname = Path.dirname(path); + + var json5 = FS.readFileSync(path, 'utf8'); + var obj = JSON5.parse(json5); + var json = JSON.stringify(obj, null, 4); // 4 spaces; TODO configurable? + + path = Path.join(dirname, basename + '.json'); + FS.writeFileSync(path, json, 'utf8'); +}); diff --git a/node_modules/json5/lib/json5.js b/node_modules/json5/lib/json5.js new file mode 100644 index 0000000..ead6862 --- /dev/null +++ b/node_modules/json5/lib/json5.js @@ -0,0 +1,767 @@ +// json5.js +// Modern JSON. See README.md for details. +// +// This file is based directly off of Douglas Crockford's json_parse.js: +// https://github.com/douglascrockford/JSON-js/blob/master/json_parse.js + +var JSON5 = (typeof exports === 'object' ? exports : {}); + +JSON5.parse = (function () { + "use strict"; + +// This is a function that can parse a JSON5 text, producing a JavaScript +// data structure. It is a simple, recursive descent parser. It does not use +// eval or regular expressions, so it can be used as a model for implementing +// a JSON5 parser in other languages. + +// We are defining the function inside of another function to avoid creating +// global variables. + + var at, // The index of the current character + lineNumber, // The current line number + columnNumber, // The current column number + ch, // The current character + escapee = { + "'": "'", + '"': '"', + '\\': '\\', + '/': '/', + '\n': '', // Replace escaped newlines in strings w/ empty string + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t' + }, + ws = [ + ' ', + '\t', + '\r', + '\n', + '\v', + '\f', + '\xA0', + '\uFEFF' + ], + text, + + renderChar = function (chr) { + return chr === '' ? 'EOF' : "'" + chr + "'"; + }, + + error = function (m) { + +// Call error when something is wrong. + + var error = new SyntaxError(); + // beginning of message suffix to agree with that provided by Gecko - see https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse + error.message = m + " at line " + lineNumber + " column " + columnNumber + " of the JSON5 data. Still to read: " + JSON.stringify(text.substring(at - 1, at + 19)); + error.at = at; + // These two property names have been chosen to agree with the ones in Gecko, the only popular + // environment which seems to supply this info on JSON.parse + error.lineNumber = lineNumber; + error.columnNumber = columnNumber; + throw error; + }, + + next = function (c) { + +// If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected " + renderChar(c) + " instead of " + renderChar(ch)); + } + +// Get the next character. When there are no more characters, +// return the empty string. + + ch = text.charAt(at); + at++; + columnNumber++; + if (ch === '\n' || ch === '\r' && peek() !== '\n') { + lineNumber++; + columnNumber = 0; + } + return ch; + }, + + peek = function () { + +// Get the next character without consuming it or +// assigning it to the ch varaible. + + return text.charAt(at); + }, + + identifier = function () { + +// Parse an identifier. Normally, reserved words are disallowed here, but we +// only use this for unquoted object keys, where reserved words are allowed, +// so we don't check for those here. References: +// - http://es5.github.com/#x7.6 +// - https://developer.mozilla.org/en/Core_JavaScript_1.5_Guide/Core_Language_Features#Variables +// - http://docstore.mik.ua/orelly/webprog/jscript/ch02_07.htm +// TODO Identifiers can have Unicode "letters" in them; add support for those. + + var key = ch; + + // Identifiers must start with a letter, _ or $. + if ((ch !== '_' && ch !== '$') && + (ch < 'a' || ch > 'z') && + (ch < 'A' || ch > 'Z')) { + error("Bad identifier as unquoted key"); + } + + // Subsequent characters can contain digits. + while (next() && ( + ch === '_' || ch === '$' || + (ch >= 'a' && ch <= 'z') || + (ch >= 'A' && ch <= 'Z') || + (ch >= '0' && ch <= '9'))) { + key += ch; + } + + return key; + }, + + number = function () { + +// Parse a number value. + + var number, + sign = '', + string = '', + base = 10; + + if (ch === '-' || ch === '+') { + sign = ch; + next(ch); + } + + // support for Infinity (could tweak to allow other words): + if (ch === 'I') { + number = word(); + if (typeof number !== 'number' || isNaN(number)) { + error('Unexpected word for number'); + } + return (sign === '-') ? -number : number; + } + + // support for NaN + if (ch === 'N' ) { + number = word(); + if (!isNaN(number)) { + error('expected word to be NaN'); + } + // ignore sign as -NaN also is NaN + return number; + } + + if (ch === '0') { + string += ch; + next(); + if (ch === 'x' || ch === 'X') { + string += ch; + next(); + base = 16; + } else if (ch >= '0' && ch <= '9') { + error('Octal literal'); + } + } + + switch (base) { + case 10: + while (ch >= '0' && ch <= '9' ) { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + break; + case 16: + while (ch >= '0' && ch <= '9' || ch >= 'A' && ch <= 'F' || ch >= 'a' && ch <= 'f') { + string += ch; + next(); + } + break; + } + + if(sign === '-') { + number = -string; + } else { + number = +string; + } + + if (!isFinite(number)) { + error("Bad number"); + } else { + return number; + } + }, + + string = function () { + +// Parse a string value. + + var hex, + i, + string = '', + delim, // double quote or single quote + uffff; + +// When parsing for string values, we must look for ' or " and \ characters. + + if (ch === '"' || ch === "'") { + delim = ch; + while (next()) { + if (ch === delim) { + next(); + return string; + } else if (ch === '\\') { + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (ch === '\r') { + if (peek() === '\n') { + next(); + } + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + } else if (ch === '\n') { + // unescaped newlines are invalid; see: + // https://github.com/aseemk/json5/issues/24 + // TODO this feels special-cased; are there other + // invalid unescaped chars? + break; + } else { + string += ch; + } + } + } + error("Bad string"); + }, + + inlineComment = function () { + +// Skip an inline comment, assuming this is one. The current character should +// be the second / character in the // pair that begins this inline comment. +// To finish the inline comment, we look for a newline or the end of the text. + + if (ch !== '/') { + error("Not an inline comment"); + } + + do { + next(); + if (ch === '\n' || ch === '\r') { + next(); + return; + } + } while (ch); + }, + + blockComment = function () { + +// Skip a block comment, assuming this is one. The current character should be +// the * character in the /* pair that begins this block comment. +// To finish the block comment, we look for an ending */ pair of characters, +// but we also watch for the end of text before the comment is terminated. + + if (ch !== '*') { + error("Not a block comment"); + } + + do { + next(); + while (ch === '*') { + next('*'); + if (ch === '/') { + next('/'); + return; + } + } + } while (ch); + + error("Unterminated block comment"); + }, + + comment = function () { + +// Skip a comment, whether inline or block-level, assuming this is one. +// Comments always begin with a / character. + + if (ch !== '/') { + error("Not a comment"); + } + + next('/'); + + if (ch === '/') { + inlineComment(); + } else if (ch === '*') { + blockComment(); + } else { + error("Unrecognized comment"); + } + }, + + white = function () { + +// Skip whitespace and comments. +// Note that we're detecting comments by only a single / character. +// This works since regular expressions are not valid JSON(5), but this will +// break if there are other valid values that begin with a / character! + + while (ch) { + if (ch === '/') { + comment(); + } else if (ws.indexOf(ch) >= 0) { + next(); + } else { + return; + } + } + }, + + word = function () { + +// true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + case 'I': + next('I'); + next('n'); + next('f'); + next('i'); + next('n'); + next('i'); + next('t'); + next('y'); + return Infinity; + case 'N': + next( 'N' ); + next( 'a' ); + next( 'N' ); + return NaN; + } + error("Unexpected " + renderChar(ch)); + }, + + value, // Place holder for the value function. + + array = function () { + +// Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + while (ch) { + if (ch === ']') { + next(']'); + return array; // Potentially empty array + } + // ES5 allows omitting elements in arrays, e.g. [,] and + // [,null]. We don't allow this in JSON5. + if (ch === ',') { + error("Missing array element"); + } else { + array.push(value()); + } + white(); + // If there's no comma after this value, this needs to + // be the end of the array. + if (ch !== ',') { + next(']'); + return array; + } + next(','); + white(); + } + } + error("Bad array"); + }, + + object = function () { + +// Parse an object value. + + var key, + object = {}; + + if (ch === '{') { + next('{'); + white(); + while (ch) { + if (ch === '}') { + next('}'); + return object; // Potentially empty object + } + + // Keys can be unquoted. If they are, they need to be + // valid JS identifiers. + if (ch === '"' || ch === "'") { + key = string(); + } else { + key = identifier(); + } + + white(); + next(':'); + object[key] = value(); + white(); + // If there's no comma after this pair, this needs to be + // the end of the object. + if (ch !== ',') { + next('}'); + return object; + } + next(','); + white(); + } + } + error("Bad object"); + }; + + value = function () { + +// Parse a JSON value. It could be an object, an array, a string, a number, +// or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + case "'": + return string(); + case '-': + case '+': + case '.': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + +// Return the json_parse function. It will have access to all of the above +// functions and variables. + + return function (source, reviver) { + var result; + + text = String(source); + at = 0; + lineNumber = 1; + columnNumber = 1; + ch = ' '; + result = value(); + white(); + if (ch) { + error("Syntax error"); + } + +// If there is a reviver function, we recursively walk the new structure, +// passing each name/value pair to the reviver function for possible +// transformation, starting with a temporary root object that holds the result +// in an empty key. If there is not a reviver function, we simply return the +// result. + + return typeof reviver === 'function' ? (function walk(holder, key) { + var k, v, value = holder[key]; + if (value && typeof value === 'object') { + for (k in value) { + if (Object.prototype.hasOwnProperty.call(value, k)) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + } + } + } + return reviver.call(holder, key, value); + }({'': result}, '')) : result; + }; +}()); + +// JSON5 stringify will not quote keys where appropriate +JSON5.stringify = function (obj, replacer, space) { + if (replacer && (typeof(replacer) !== "function" && !isArray(replacer))) { + throw new Error('Replacer must be a function or an array'); + } + var getReplacedValueOrUndefined = function(holder, key, isTopLevel) { + var value = holder[key]; + + // Replace the value with its toJSON value first, if possible + if (value && value.toJSON && typeof value.toJSON === "function") { + value = value.toJSON(); + } + + // If the user-supplied replacer if a function, call it. If it's an array, check objects' string keys for + // presence in the array (removing the key/value pair from the resulting JSON if the key is missing). + if (typeof(replacer) === "function") { + return replacer.call(holder, key, value); + } else if(replacer) { + if (isTopLevel || isArray(holder) || replacer.indexOf(key) >= 0) { + return value; + } else { + return undefined; + } + } else { + return value; + } + }; + + function isWordChar(c) { + return (c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + (c >= '0' && c <= '9') || + c === '_' || c === '$'; + } + + function isWordStart(c) { + return (c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + c === '_' || c === '$'; + } + + function isWord(key) { + if (typeof key !== 'string') { + return false; + } + if (!isWordStart(key[0])) { + return false; + } + var i = 1, length = key.length; + while (i < length) { + if (!isWordChar(key[i])) { + return false; + } + i++; + } + return true; + } + + // export for use in tests + JSON5.isWord = isWord; + + // polyfills + function isArray(obj) { + if (Array.isArray) { + return Array.isArray(obj); + } else { + return Object.prototype.toString.call(obj) === '[object Array]'; + } + } + + function isDate(obj) { + return Object.prototype.toString.call(obj) === '[object Date]'; + } + + var objStack = []; + function checkForCircular(obj) { + for (var i = 0; i < objStack.length; i++) { + if (objStack[i] === obj) { + throw new TypeError("Converting circular structure to JSON"); + } + } + } + + function makeIndent(str, num, noNewLine) { + if (!str) { + return ""; + } + // indentation no more than 10 chars + if (str.length > 10) { + str = str.substring(0, 10); + } + + var indent = noNewLine ? "" : "\n"; + for (var i = 0; i < num; i++) { + indent += str; + } + + return indent; + } + + var indentStr; + if (space) { + if (typeof space === "string") { + indentStr = space; + } else if (typeof space === "number" && space >= 0) { + indentStr = makeIndent(" ", space, true); + } else { + // ignore space parameter + } + } + + // Copied from Crokford's implementation of JSON + // See https://github.com/douglascrockford/JSON-js/blob/e39db4b7e6249f04a195e7dd0840e610cc9e941e/json2.js#L195 + // Begin + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }; + function escapeString(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' ? + c : + '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + // End + + function internalStringify(holder, key, isTopLevel) { + var buffer, res; + + // Replace the value, if necessary + var obj_part = getReplacedValueOrUndefined(holder, key, isTopLevel); + + if (obj_part && !isDate(obj_part)) { + // unbox objects + // don't unbox dates, since will turn it into number + obj_part = obj_part.valueOf(); + } + switch(typeof obj_part) { + case "boolean": + return obj_part.toString(); + + case "number": + if (isNaN(obj_part) || !isFinite(obj_part)) { + return "null"; + } + return obj_part.toString(); + + case "string": + return escapeString(obj_part.toString()); + + case "object": + if (obj_part === null) { + return "null"; + } else if (isArray(obj_part)) { + checkForCircular(obj_part); + buffer = "["; + objStack.push(obj_part); + + for (var i = 0; i < obj_part.length; i++) { + res = internalStringify(obj_part, i, false); + buffer += makeIndent(indentStr, objStack.length); + if (res === null || typeof res === "undefined") { + buffer += "null"; + } else { + buffer += res; + } + if (i < obj_part.length-1) { + buffer += ","; + } else if (indentStr) { + buffer += "\n"; + } + } + objStack.pop(); + buffer += makeIndent(indentStr, objStack.length, true) + "]"; + } else { + checkForCircular(obj_part); + buffer = "{"; + var nonEmpty = false; + objStack.push(obj_part); + for (var prop in obj_part) { + if (obj_part.hasOwnProperty(prop)) { + var value = internalStringify(obj_part, prop, false); + isTopLevel = false; + if (typeof value !== "undefined" && value !== null) { + buffer += makeIndent(indentStr, objStack.length); + nonEmpty = true; + key = isWord(prop) ? prop : escapeString(prop); + buffer += key + ":" + (indentStr ? ' ' : '') + value + ","; + } + } + } + objStack.pop(); + if (nonEmpty) { + buffer = buffer.substring(0, buffer.length-1) + makeIndent(indentStr, objStack.length) + "}"; + } else { + buffer = '{}'; + } + } + return buffer; + default: + // functions and undefined should be ignored + return undefined; + } + } + + // special case...when undefined is used inside of + // a compound object/array, return null. + // but when top-level, return undefined + var topLevelHolder = {"":obj}; + if (obj === undefined) { + return getReplacedValueOrUndefined(topLevelHolder, '', true); + } + return internalStringify(topLevelHolder, '', true); +}; diff --git a/node_modules/json5/lib/require.js b/node_modules/json5/lib/require.js new file mode 100644 index 0000000..73e56fe --- /dev/null +++ b/node_modules/json5/lib/require.js @@ -0,0 +1,18 @@ +// require.js +// Node.js only: adds a require() hook for .json5 files, just like the native +// hook for .json files. +// +// Usage: +// require('json5/require'); +// require('./foo'); // will check foo.json5 after foo.js, foo.json, etc. +// require('./bar.json5'); + +var FS = require('fs'); +var JSON5 = require('./json5'); + +// Modeled off of (v0.6.18 link; check latest too): +// https://github.com/joyent/node/blob/v0.6.18/lib/module.js#L468-L472 +require.extensions['.json5'] = function (module, filename) { + var content = FS.readFileSync(filename, 'utf8'); + module.exports = JSON5.parse(content); +}; diff --git a/node_modules/json5/package.json b/node_modules/json5/package.json new file mode 100644 index 0000000..4e619cd --- /dev/null +++ b/node_modules/json5/package.json @@ -0,0 +1,38 @@ +{ + "name": "json5", + "version": "0.5.0", + "description": "JSON for the ES5 era.", + "keywords": [ + "json", + "es5" + ], + "author": "Aseem Kishore ", + "contributors": [ + "Max Nanasy ", + "Andrew Eisenberg ", + "Jordan Tucker " + ], + "main": "lib/json5.js", + "bin": "lib/cli.js", + "files": [ + "lib/" + ], + "dependencies": {}, + "devDependencies": { + "gulp": "^3.9.1", + "gulp-jshint": "^2.0.0", + "jshint": "^2.9.1", + "jshint-stylish": "^2.1.0", + "mocha": "^2.4.5" + }, + "scripts": { + "build": "node ./lib/cli.js -c package.json5", + "test": "mocha --ui exports --reporter spec" + }, + "homepage": "http://json5.org/", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/aseemk/json5.git" + } +} \ No newline at end of file diff --git a/node_modules/jsonfile/.npmignore b/node_modules/jsonfile/.npmignore new file mode 100644 index 0000000..cefaa67 --- /dev/null +++ b/node_modules/jsonfile/.npmignore @@ -0,0 +1,2 @@ +test/ +.travis.yml \ No newline at end of file diff --git a/node_modules/jsonfile/CHANGELOG.md b/node_modules/jsonfile/CHANGELOG.md new file mode 100644 index 0000000..66fcbb4 --- /dev/null +++ b/node_modules/jsonfile/CHANGELOG.md @@ -0,0 +1,126 @@ +2.4.0 / 2016-09-15 +------------------ +### Changed +- added optional support for `graceful-fs` [#62] + +2.3.1 / 2016-05-13 +------------------ +- fix to support BOM. [#45][#45] + +2.3.0 / 2016-04-16 +------------------ +- add `throws` to `readFile()`. See [#39][#39] +- add support for any arbitrary `fs` module. Useful with [mock-fs](https://www.npmjs.com/package/mock-fs) + +2.2.3 / 2015-10-14 +------------------ +- include file name in parse error. See: https://github.com/jprichardson/node-jsonfile/pull/34 + +2.2.2 / 2015-09-16 +------------------ +- split out tests into separate files +- fixed `throws` when set to `true` in `readFileSync()`. See: https://github.com/jprichardson/node-jsonfile/pull/33 + +2.2.1 / 2015-06-25 +------------------ +- fixed regression when passing in string as encoding for options in `writeFile()` and `writeFileSync()`. See: https://github.com/jprichardson/node-jsonfile/issues/28 + +2.2.0 / 2015-06-25 +------------------ +- added `options.spaces` to `writeFile()` and `writeFileSync()` + +2.1.2 / 2015-06-22 +------------------ +- fixed if passed `readFileSync(file, 'utf8')`. See: https://github.com/jprichardson/node-jsonfile/issues/25 + +2.1.1 / 2015-06-19 +------------------ +- fixed regressions if `null` is passed for options. See: https://github.com/jprichardson/node-jsonfile/issues/24 + +2.1.0 / 2015-06-19 +------------------ +- cleanup: JavaScript Standard Style, rename files, dropped terst for assert +- methods now support JSON revivers/replacers + +2.0.1 / 2015-05-24 +------------------ +- update license attribute https://github.com/jprichardson/node-jsonfile/pull/21 + +2.0.0 / 2014-07-28 +------------------ +* added `\n` to end of file on write. [#14](https://github.com/jprichardson/node-jsonfile/pull/14) +* added `options.throws` to `readFileSync()` +* dropped support for Node v0.8 + +1.2.0 / 2014-06-29 +------------------ +* removed semicolons +* bugfix: passed `options` to `fs.readFile` and `fs.readFileSync`. This technically changes behavior, but +changes it according to docs. [#12][#12] + +1.1.1 / 2013-11-11 +------------------ +* fixed catching of callback bug (ffissore / #5) + +1.1.0 / 2013-10-11 +------------------ +* added `options` param to methods, (seanodell / #4) + +1.0.1 / 2013-09-05 +------------------ +* removed `homepage` field from package.json to remove NPM warning + +1.0.0 / 2013-06-28 +------------------ +* added `.npmignore`, #1 +* changed spacing default from `4` to `2` to follow Node conventions + +0.0.1 / 2012-09-10 +------------------ +* Initial release. + +[#45]: https://github.com/jprichardson/node-jsonfile/issues/45 "Reading of UTF8-encoded (w/ BOM) files fails" +[#44]: https://github.com/jprichardson/node-jsonfile/issues/44 "Extra characters in written file" +[#43]: https://github.com/jprichardson/node-jsonfile/issues/43 "Prettyfy json when written to file" +[#42]: https://github.com/jprichardson/node-jsonfile/pull/42 "Moved fs.readFileSync within the try/catch" +[#41]: https://github.com/jprichardson/node-jsonfile/issues/41 "Linux: Hidden file not working" +[#40]: https://github.com/jprichardson/node-jsonfile/issues/40 "autocreate folder doesnt work from Path-value" +[#39]: https://github.com/jprichardson/node-jsonfile/pull/39 "Add `throws` option for readFile (async)" +[#38]: https://github.com/jprichardson/node-jsonfile/pull/38 "Update README.md writeFile[Sync] signature" +[#37]: https://github.com/jprichardson/node-jsonfile/pull/37 "support append file" +[#36]: https://github.com/jprichardson/node-jsonfile/pull/36 "Add typescript definition file." +[#35]: https://github.com/jprichardson/node-jsonfile/pull/35 "Add typescript definition file." +[#34]: https://github.com/jprichardson/node-jsonfile/pull/34 "readFile JSON parse error includes filename" +[#33]: https://github.com/jprichardson/node-jsonfile/pull/33 "fix throw->throws typo in readFileSync()" +[#32]: https://github.com/jprichardson/node-jsonfile/issues/32 "readFile & readFileSync can possible have strip-comments as an option?" +[#31]: https://github.com/jprichardson/node-jsonfile/pull/31 "[Modify] Support string include is unicode escape string" +[#30]: https://github.com/jprichardson/node-jsonfile/issues/30 "How to use Jsonfile package in Meteor.js App?" +[#29]: https://github.com/jprichardson/node-jsonfile/issues/29 "writefile callback if no error?" +[#28]: https://github.com/jprichardson/node-jsonfile/issues/28 "writeFile options argument broken " +[#27]: https://github.com/jprichardson/node-jsonfile/pull/27 "Use svg instead of png to get better image quality" +[#26]: https://github.com/jprichardson/node-jsonfile/issues/26 "Breaking change to fs-extra" +[#25]: https://github.com/jprichardson/node-jsonfile/issues/25 "support string encoding param for read methods" +[#24]: https://github.com/jprichardson/node-jsonfile/issues/24 "readFile: Passing in null options with a callback throws an error" +[#23]: https://github.com/jprichardson/node-jsonfile/pull/23 "Add appendFile and appendFileSync" +[#22]: https://github.com/jprichardson/node-jsonfile/issues/22 "Default value for spaces in readme.md is outdated" +[#21]: https://github.com/jprichardson/node-jsonfile/pull/21 "Update license attribute" +[#20]: https://github.com/jprichardson/node-jsonfile/issues/20 "Add simple caching functionallity" +[#19]: https://github.com/jprichardson/node-jsonfile/pull/19 "Add appendFileSync method" +[#18]: https://github.com/jprichardson/node-jsonfile/issues/18 "Add updateFile and updateFileSync methods" +[#17]: https://github.com/jprichardson/node-jsonfile/issues/17 "seem read & write sync has sequentially problem" +[#16]: https://github.com/jprichardson/node-jsonfile/pull/16 "export spaces defaulted to null" +[#15]: https://github.com/jprichardson/node-jsonfile/issues/15 "`jsonfile.spaces` should default to `null`" +[#14]: https://github.com/jprichardson/node-jsonfile/pull/14 "Add EOL at EOF" +[#13]: https://github.com/jprichardson/node-jsonfile/issues/13 "Add a final newline" +[#12]: https://github.com/jprichardson/node-jsonfile/issues/12 "readFile doesn't accept options" +[#11]: https://github.com/jprichardson/node-jsonfile/pull/11 "Added try,catch to readFileSync" +[#10]: https://github.com/jprichardson/node-jsonfile/issues/10 "No output or error from writeFile" +[#9]: https://github.com/jprichardson/node-jsonfile/pull/9 "Change 'js' to 'jf' in example." +[#8]: https://github.com/jprichardson/node-jsonfile/pull/8 "Updated forgotten module.exports to me." +[#7]: https://github.com/jprichardson/node-jsonfile/pull/7 "Add file name in error message" +[#6]: https://github.com/jprichardson/node-jsonfile/pull/6 "Use graceful-fs when possible" +[#5]: https://github.com/jprichardson/node-jsonfile/pull/5 "Jsonfile doesn't behave nicely when used inside a test suite." +[#4]: https://github.com/jprichardson/node-jsonfile/pull/4 "Added options parameter to writeFile and writeFileSync" +[#3]: https://github.com/jprichardson/node-jsonfile/issues/3 "test2" +[#2]: https://github.com/jprichardson/node-jsonfile/issues/2 "homepage field must be a string url. Deleted." +[#1]: https://github.com/jprichardson/node-jsonfile/pull/1 "adding an `.npmignore` file" diff --git a/node_modules/jsonfile/LICENSE b/node_modules/jsonfile/LICENSE new file mode 100644 index 0000000..cb7e807 --- /dev/null +++ b/node_modules/jsonfile/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2012-2015, JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/jsonfile/README.md b/node_modules/jsonfile/README.md new file mode 100644 index 0000000..54bca05 --- /dev/null +++ b/node_modules/jsonfile/README.md @@ -0,0 +1,162 @@ +Node.js - jsonfile +================ + +Easily read/write JSON files. + +[![npm Package](https://img.shields.io/npm/v/jsonfile.svg?style=flat-square)](https://www.npmjs.org/package/jsonfile) +[![build status](https://secure.travis-ci.org/jprichardson/node-jsonfile.svg)](http://travis-ci.org/jprichardson/node-jsonfile) +[![windows Build status](https://img.shields.io/appveyor/ci/jprichardson/node-jsonfile/master.svg?label=windows%20build)](https://ci.appveyor.com/project/jprichardson/node-jsonfile/branch/master) + +Standard JavaScript + +Why? +---- + +Writing `JSON.stringify()` and then `fs.writeFile()` and `JSON.parse()` with `fs.readFile()` enclosed in `try/catch` blocks became annoying. + + + +Installation +------------ + + npm install --save jsonfile + + + +API +--- + +### readFile(filename, [options], callback) + +`options` (`object`, default `undefined`): Pass in any `fs.readFile` options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse). + - `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback. + If `false`, returns `null` for the object. + + +```js +var jsonfile = require('jsonfile') +var file = '/tmp/data.json' +jsonfile.readFile(file, function(err, obj) { + console.dir(obj) +}) +``` + + +### readFileSync(filename, [options]) + +`options` (`object`, default `undefined`): Pass in any `fs.readFileSync` options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse). +- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, throw the error. +If `false`, returns `null` for the object. + +```js +var jsonfile = require('jsonfile') +var file = '/tmp/data.json' + +console.dir(jsonfile.readFileSync(file)) +``` + + +### writeFile(filename, obj, [options], callback) + +`options`: Pass in any `fs.writeFile` options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`. + + +```js +var jsonfile = require('jsonfile') + +var file = '/tmp/data.json' +var obj = {name: 'JP'} + +jsonfile.writeFile(file, obj, function (err) { + console.error(err) +}) +``` + +**formatting with spaces:** + +```js +var jsonfile = require('jsonfile') + +var file = '/tmp/data.json' +var obj = {name: 'JP'} + +jsonfile.writeFile(file, obj, {spaces: 2}, function(err) { + console.error(err) +}) +``` + + +### writeFileSync(filename, obj, [options]) + +`options`: Pass in any `fs.writeFileSync` options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`. + +```js +var jsonfile = require('jsonfile') + +var file = '/tmp/data.json' +var obj = {name: 'JP'} + +jsonfile.writeFileSync(file, obj) +``` + +**formatting with spaces:** + +```js +var jsonfile = require('jsonfile') + +var file = '/tmp/data.json' +var obj = {name: 'JP'} + +jsonfile.writeFileSync(file, obj, {spaces: 2}) +``` + + + +### spaces + +Global configuration to set spaces to indent JSON files. + +**default:** `null` + +```js +var jsonfile = require('jsonfile') + +jsonfile.spaces = 4 + +var file = '/tmp/data.json' +var obj = {name: 'JP'} + +// json file has four space indenting now +jsonfile.writeFile(file, obj, function (err) { + console.error(err) +}) +``` + +Note, it's bound to `this.spaces`. So, if you do this: + +```js +var myObj = {} +myObj.writeJsonSync = jsonfile.writeFileSync +// => this.spaces = null +``` + +Could do the following: + +```js +var jsonfile = require('jsonfile') +jsonfile.spaces = 4 +jsonfile.writeFileSync(file, obj) // will have 4 spaces indentation + +var myCrazyObj = {spaces: 32} +myCrazyObj.writeJsonSync = jsonfile.writeFileSync +myCrazyObj.writeJsonSync(file, obj) // will have 32 space indentation +myCrazyObj.writeJsonSync(file, obj, {spaces: 2}) // will have only 2 +``` + + +License +------- + +(MIT License) + +Copyright 2012-2016, JP Richardson diff --git a/node_modules/jsonfile/appveyor.yml b/node_modules/jsonfile/appveyor.yml new file mode 100644 index 0000000..872af18 --- /dev/null +++ b/node_modules/jsonfile/appveyor.yml @@ -0,0 +1,28 @@ +# Test against this version of Node.js +environment: + matrix: + # node.js + - nodejs_version: "0.10" + - nodejs_version: "0.12" + - nodejs_version: "4" + - nodejs_version: "5" + - nodejs_version: "6" + +# Install scripts. (runs after repo cloning) +install: + # Get the latest stable version of Node.js or io.js + - ps: Install-Product node $env:nodejs_version + # install modules + - npm config set loglevel warn + - npm install --silent + +# Post-install test scripts. +test_script: + # Output useful info for debugging. + - node --version + - npm --version + # run tests + - npm test + +# Don't actually build. +build: off diff --git a/node_modules/jsonfile/index.js b/node_modules/jsonfile/index.js new file mode 100644 index 0000000..7111e15 --- /dev/null +++ b/node_modules/jsonfile/index.js @@ -0,0 +1,133 @@ +var _fs +try { + _fs = require('graceful-fs') +} catch (_) { + _fs = require('fs') +} + +function readFile (file, options, callback) { + if (callback == null) { + callback = options + options = {} + } + + if (typeof options === 'string') { + options = {encoding: options} + } + + options = options || {} + var fs = options.fs || _fs + + var shouldThrow = true + // DO NOT USE 'passParsingErrors' THE NAME WILL CHANGE!!!, use 'throws' instead + if ('passParsingErrors' in options) { + shouldThrow = options.passParsingErrors + } else if ('throws' in options) { + shouldThrow = options.throws + } + + fs.readFile(file, options, function (err, data) { + if (err) return callback(err) + + data = stripBom(data) + + var obj + try { + obj = JSON.parse(data, options ? options.reviver : null) + } catch (err2) { + if (shouldThrow) { + err2.message = file + ': ' + err2.message + return callback(err2) + } else { + return callback(null, null) + } + } + + callback(null, obj) + }) +} + +function readFileSync (file, options) { + options = options || {} + if (typeof options === 'string') { + options = {encoding: options} + } + + var fs = options.fs || _fs + + var shouldThrow = true + // DO NOT USE 'passParsingErrors' THE NAME WILL CHANGE!!!, use 'throws' instead + if ('passParsingErrors' in options) { + shouldThrow = options.passParsingErrors + } else if ('throws' in options) { + shouldThrow = options.throws + } + + var content = fs.readFileSync(file, options) + content = stripBom(content) + + try { + return JSON.parse(content, options.reviver) + } catch (err) { + if (shouldThrow) { + err.message = file + ': ' + err.message + throw err + } else { + return null + } + } +} + +function writeFile (file, obj, options, callback) { + if (callback == null) { + callback = options + options = {} + } + options = options || {} + var fs = options.fs || _fs + + var spaces = typeof options === 'object' && options !== null + ? 'spaces' in options + ? options.spaces : this.spaces + : this.spaces + + var str = '' + try { + str = JSON.stringify(obj, options ? options.replacer : null, spaces) + '\n' + } catch (err) { + if (callback) return callback(err, null) + } + + fs.writeFile(file, str, options, callback) +} + +function writeFileSync (file, obj, options) { + options = options || {} + var fs = options.fs || _fs + + var spaces = typeof options === 'object' && options !== null + ? 'spaces' in options + ? options.spaces : this.spaces + : this.spaces + + var str = JSON.stringify(obj, options.replacer, spaces) + '\n' + // not sure if fs.writeFileSync returns anything, but just in case + return fs.writeFileSync(file, str, options) +} + +function stripBom (content) { + // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified + if (Buffer.isBuffer(content)) content = content.toString('utf8') + content = content.replace(/^\uFEFF/, '') + return content +} + +var jsonfile = { + spaces: null, + readFile: readFile, + readFileSync: readFileSync, + writeFile: writeFile, + writeFileSync: writeFileSync +} + +module.exports = jsonfile diff --git a/node_modules/jsonfile/package.json b/node_modules/jsonfile/package.json new file mode 100644 index 0000000..ea7ac1e --- /dev/null +++ b/node_modules/jsonfile/package.json @@ -0,0 +1,35 @@ +{ + "name": "jsonfile", + "version": "2.4.0", + "description": "Easily read/write JSON files.", + "repository": { + "type": "git", + "url": "git@github.com:jprichardson/node-jsonfile.git" + }, + "keywords": [ + "read", + "write", + "file", + "json", + "fs", + "fs-extra" + ], + "author": "JP Richardson ", + "license": "MIT", + "dependencies": {}, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + }, + "devDependencies": { + "mocha": "2.x", + "mock-fs": "^3.8.0", + "rimraf": "^2.4.0", + "standard": "^6.0.8" + }, + "main": "index.js", + "scripts": { + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "mocha" + } +} diff --git a/node_modules/jsonpointer/.travis.yml b/node_modules/jsonpointer/.travis.yml new file mode 100644 index 0000000..7f56324 --- /dev/null +++ b/node_modules/jsonpointer/.travis.yml @@ -0,0 +1,7 @@ +language: "node_js" +node_js: + - 0.10 + - 0.11 + - 0.12 + - 4.0 + - node diff --git a/node_modules/jsonpointer/LICENSE.md b/node_modules/jsonpointer/LICENSE.md new file mode 100644 index 0000000..ac32f5d --- /dev/null +++ b/node_modules/jsonpointer/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2011-2015 Jan Lehnardt & Marc Bachmann + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/jsonpointer/README.md b/node_modules/jsonpointer/README.md new file mode 100644 index 0000000..bc7aa15 --- /dev/null +++ b/node_modules/jsonpointer/README.md @@ -0,0 +1,39 @@ +# JSON Pointer for nodejs + +This is an implementation of [JSON Pointer](http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-08). + +## Usage +```javascript +var jsonpointer = require('jsonpointer'); +var obj = { foo: 1, bar: { baz: 2}, qux: [3, 4, 5]}; + +jsonpointer.get(obj, '/foo'); // returns 1 +jsonpointer.get(obj, '/bar/baz'); // returns 2 +jsonpointer.get(obj, '/qux/0'); // returns 3 +jsonpointer.get(obj, '/qux/1'); // returns 4 +jsonpointer.get(obj, '/qux/2'); // returns 5 +jsonpointer.get(obj, '/quo'); // returns null + +jsonpointer.set(obj, '/foo', 6); // sets obj.foo = 6; +jsonpointer.set(obj, '/qux/-', 6) // sets obj.qux = [3, 4, 5, 6] + +var pointer = jsonpointer.compile('/foo') +pointer.get(obj) // returns 1 +pointer.set(obj, 1) // sets obj.foo = 1 +``` + +## Testing + + $ node test.js + All tests pass. + $ + +[![Build Status](https://travis-ci.org/janl/node-jsonpointer.png?branch=master)](https://travis-ci.org/janl/node-jsonpointer) + +## Author + +(c) 2011-2015 Jan Lehnardt & Marc Bachmann + +## License + +MIT License. diff --git a/node_modules/jsonpointer/benchmark.js b/node_modules/jsonpointer/benchmark.js new file mode 100644 index 0000000..8a95636 --- /dev/null +++ b/node_modules/jsonpointer/benchmark.js @@ -0,0 +1,56 @@ +var jsonpointer = require('./') + +var i +var obj = { + a: 1, + b: { + c: 2 + }, + d: { + e: [{ a: 3 }, { b: 4 }, { c: 5 }] + } +} + +// Get +console.time('get first level property') +for (i = 0; i < 1e6; i++) { + jsonpointer.get(obj, '/a') +} +console.timeEnd('get first level property') + +console.time('get second level property') +for (i = 0; i < 1e6; i++) { + jsonpointer.get(obj, '/d/e') +} +console.timeEnd('get second level property') + +console.time('get third level property') +for (i = 0; i < 1e6; i++) { + jsonpointer.get(obj, '/d/e/0') +} +console.timeEnd('get third level property') + +// Set +console.time('set first level property') +for (i = 0; i < 1e6; i++) { + jsonpointer.set(obj, '/a', 'bla') +} +console.timeEnd('set first level property') + +console.time('set second level property') +for (i = 0; i < 1e6; i++) { + jsonpointer.set(obj, '/d/e', 'bla') +} +console.timeEnd('set second level property') + +console.time('set third level property') +for (i = 0; i < 1e6; i++) { + jsonpointer.set(obj, '/d/e/0', 'bla') +} +console.timeEnd('set third level property') + +console.time('push property into array') +for (i = 0; i < 1e6; i++) { + jsonpointer.set(obj, '/d/e/-', 'bla') +} +console.timeEnd('push property into array') diff --git a/node_modules/jsonpointer/jsonpointer.js b/node_modules/jsonpointer/jsonpointer.js new file mode 100644 index 0000000..7cfaec0 --- /dev/null +++ b/node_modules/jsonpointer/jsonpointer.js @@ -0,0 +1,93 @@ +var hasExcape = /~/ +var escapeMatcher = /~[01]/g +function escapeReplacer (m) { + switch (m) { + case '~1': return '/' + case '~0': return '~' + } + throw new Error('Invalid tilde escape: ' + m) +} + +function untilde (str) { + if (!hasExcape.test(str)) return str + return str.replace(escapeMatcher, escapeReplacer) +} + +function setter (obj, pointer, value) { + var part + var hasNextPart + + for (var p = 1, len = pointer.length; p < len;) { + part = untilde(pointer[p++]) + hasNextPart = len > p + + if (typeof obj[part] === 'undefined') { + // support setting of /- + if (Array.isArray(obj) && part === '-') { + part = obj.length + } + + // support nested objects/array when setting values + if (hasNextPart) { + if ((pointer[p] !== '' && pointer[p] < Infinity) || pointer[p] === '-') obj[part] = [] + else obj[part] = {} + } + } + + if (!hasNextPart) break + obj = obj[part] + } + + var oldValue = obj[part] + if (value === undefined) delete obj[part] + else obj[part] = value + return oldValue +} + +function compilePointer (pointer) { + if (typeof pointer === 'string') { + pointer = pointer.split('/') + if (pointer[0] === '') return pointer + throw new Error('Invalid JSON pointer.') + } else if (Array.isArray(pointer)) { + return pointer + } + + throw new Error('Invalid JSON pointer.') +} + +function get (obj, pointer) { + if (typeof obj !== 'object') throw new Error('Invalid input object.') + pointer = compilePointer(pointer) + var len = pointer.length + if (len === 1) return obj + + for (var p = 1; p < len;) { + obj = obj[untilde(pointer[p++])] + if (len === p) return obj + if (typeof obj !== 'object') return undefined + } +} + +function set (obj, pointer, value) { + if (typeof obj !== 'object') throw new Error('Invalid input object.') + pointer = compilePointer(pointer) + if (pointer.length === 0) throw new Error('Invalid JSON pointer for set.') + return setter(obj, pointer, value) +} + +function compile (pointer) { + var compiled = compilePointer(pointer) + return { + get: function (object) { + return get(object, compiled) + }, + set: function (object, value) { + return set(object, compiled, value) + } + } +} + +exports.get = get +exports.set = set +exports.compile = compile diff --git a/node_modules/jsonpointer/package.json b/node_modules/jsonpointer/package.json new file mode 100644 index 0000000..19bdd38 --- /dev/null +++ b/node_modules/jsonpointer/package.json @@ -0,0 +1,34 @@ +{ + "name": "jsonpointer", + "description": "Simple JSON Addressing.", + "tags": [ + "util", + "simple", + "util", + "utility" + ], + "version": "4.0.0", + "author": "Jan Lehnardt ", + "contributors": [ + "Joe Hildebrand ", + "Marc Bachmann " + ], + "repository": { + "type": "git", + "url": "http://github.com/janl/node-jsonpointer.git" + }, + "bugs": { + "url": "http://github.com/janl/node-jsonpointer/issues" + }, + "engines": { + "node": ">=0.10.0" + }, + "main": "./jsonpointer", + "scripts": { + "test": "standard && node test.js" + }, + "license": "MIT", + "devDependencies": { + "standard": "^5.3.1" + } +} diff --git a/node_modules/jsonpointer/test.js b/node_modules/jsonpointer/test.js new file mode 100644 index 0000000..e3d9963 --- /dev/null +++ b/node_modules/jsonpointer/test.js @@ -0,0 +1,131 @@ +var assert = require('assert') +var jsonpointer = require('./jsonpointer') + +var obj = { + a: 1, + b: { + c: 2 + }, + d: { + e: [{ a: 3 }, { b: 4 }, { c: 5 }] + } +} + +assert.equal(jsonpointer.get(obj, '/a'), 1) +assert.equal(jsonpointer.get(obj, '/b/c'), 2) +assert.equal(jsonpointer.get(obj, '/d/e/0/a'), 3) +assert.equal(jsonpointer.get(obj, '/d/e/1/b'), 4) +assert.equal(jsonpointer.get(obj, '/d/e/2/c'), 5) + +// set returns old value +assert.equal(jsonpointer.set(obj, '/a', 2), 1) +assert.equal(jsonpointer.set(obj, '/b/c', 3), 2) +assert.equal(jsonpointer.set(obj, '/d/e/0/a', 4), 3) +assert.equal(jsonpointer.set(obj, '/d/e/1/b', 5), 4) +assert.equal(jsonpointer.set(obj, '/d/e/2/c', 6), 5) + +// set nested properties +assert.equal(jsonpointer.set(obj, '/f/g/h/i', 6), undefined) +assert.equal(jsonpointer.get(obj, '/f/g/h/i'), 6) + +// set an array +assert.equal(jsonpointer.set(obj, '/f/g/h/foo/-', 'test'), undefined) +var arr = jsonpointer.get(obj, '/f/g/h/foo') +assert(Array.isArray(arr), 'set /- creates an array.') +assert.equal(arr[0], 'test') + +assert.equal(jsonpointer.get(obj, '/a'), 2) +assert.equal(jsonpointer.get(obj, '/b/c'), 3) +assert.equal(jsonpointer.get(obj, '/d/e/0/a'), 4) +assert.equal(jsonpointer.get(obj, '/d/e/1/b'), 5) +assert.equal(jsonpointer.get(obj, '/d/e/2/c'), 6) + +// can set `null` as a value +assert.equal(jsonpointer.set(obj, '/f/g/h/foo/0', null), 'test') +assert.strictEqual(jsonpointer.get(obj, '/f/g/h/foo/0'), null) +assert.equal(jsonpointer.set(obj, '/b/c', null), 3) +assert.strictEqual(jsonpointer.get(obj, '/b/c'), null) + +assert.equal(jsonpointer.get(obj, ''), obj) +assert.throws(function () { jsonpointer.get(obj, 'a') }, validateError) +assert.throws(function () { jsonpointer.get(obj, 'a/') }, validateError) + +// can unset values with `undefined` +jsonpointer.set(obj, '/a', undefined) +assert.strictEqual(jsonpointer.get(obj, '/a'), undefined) +jsonpointer.set(obj, '/d/e/1', undefined) +assert.strictEqual(jsonpointer.get(obj, '/d/e/1'), undefined) + +// returns `undefined` when path extends beyond any existing objects +assert.strictEqual(jsonpointer.get(obj, '/x/y/z'), undefined) + +function validateError (err) { + if ((err instanceof Error) && /Invalid JSON pointer/.test(err.message)) { + return true + } +} + +var complexKeys = { + 'a/b': { + c: 1 + }, + d: { + 'e/f': 2 + }, + '~1': 3, + '01': 4 +} + +assert.equal(jsonpointer.get(complexKeys, '/a~1b/c'), 1) +assert.equal(jsonpointer.get(complexKeys, '/d/e~1f'), 2) +assert.equal(jsonpointer.get(complexKeys, '/~01'), 3) +assert.equal(jsonpointer.get(complexKeys, '/01'), 4) +assert.equal(jsonpointer.get(complexKeys, '/a/b/c'), null) +assert.equal(jsonpointer.get(complexKeys, '/~1'), null) + +// draft-ietf-appsawg-json-pointer-08 has special array rules +var ary = [ 'zero', 'one', 'two' ] +assert.equal(jsonpointer.get(ary, '/01'), null) + +assert.equal(jsonpointer.set(ary, '/-', 'three'), null) +assert.equal(ary[3], 'three') + +// Examples from the draft: +var example = { + 'foo': ['bar', 'baz'], + '': 0, + 'a/b': 1, + 'c%d': 2, + 'e^f': 3, + 'g|h': 4, + 'i\\j': 5, + 'k\'l': 6, + ' ': 7, + 'm~n': 8 +} + +assert.equal(jsonpointer.get(example, ''), example) +var ans = jsonpointer.get(example, '/foo') +assert.equal(ans.length, 2) +assert.equal(ans[0], 'bar') +assert.equal(ans[1], 'baz') +assert.equal(jsonpointer.get(example, '/foo/0'), 'bar') +assert.equal(jsonpointer.get(example, '/'), 0) +assert.equal(jsonpointer.get(example, '/a~1b'), 1) +assert.equal(jsonpointer.get(example, '/c%d'), 2) +assert.equal(jsonpointer.get(example, '/e^f'), 3) +assert.equal(jsonpointer.get(example, '/g|h'), 4) +assert.equal(jsonpointer.get(example, '/i\\j'), 5) +assert.equal(jsonpointer.get(example, '/k\'l'), 6) +assert.equal(jsonpointer.get(example, '/ '), 7) +assert.equal(jsonpointer.get(example, '/m~0n'), 8) + +// jsonpointer.compile(path) +var a = {foo: 'bar'} +var pointer = jsonpointer.compile('/foo') +assert.equal(pointer.get(a), 'bar') +assert.equal(pointer.set(a, 'test'), 'bar') +assert.equal(pointer.get(a), 'test') +assert.deepEqual(a, {foo: 'test'}) + +console.log('All tests pass.') diff --git a/node_modules/jsprim/CHANGES.md b/node_modules/jsprim/CHANGES.md new file mode 100644 index 0000000..3e152ab --- /dev/null +++ b/node_modules/jsprim/CHANGES.md @@ -0,0 +1,39 @@ +# Changelog + +## not yet released + +None yet. + +## v1.3.1 (2016-09-12) + +* #13 Incompatible with webpack + +## v1.3.0 (2016-06-22) + +* #14 add safer version of hasOwnProperty() +* #15 forEachKey() should ignore inherited properties + +## v1.2.2 (2015-10-15) + +* #11 NPM package shouldn't include any code that does `require('JSV')` +* #12 jsl.node.conf missing definition for "module" + +## v1.2.1 (2015-10-14) + +* #8 odd date parsing behaviour + +## v1.2.0 (2015-10-13) + +* #9 want function for returning RFC1123 dates + +## v1.1.0 (2015-09-02) + +* #6 a new suite of hrtime manipulation routines: `hrtimeAdd()`, + `hrtimeAccum()`, `hrtimeNanosec()`, `hrtimeMicrosec()` and + `hrtimeMillisec()`. + +## v1.0.0 (2015-09-01) + +First tracked release. Includes everything in previous releases, plus: + +* #4 want function for merging objects diff --git a/node_modules/jsprim/LICENSE b/node_modules/jsprim/LICENSE new file mode 100644 index 0000000..cbc0bb3 --- /dev/null +++ b/node_modules/jsprim/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012, Joyent, Inc. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/jsprim/README.md b/node_modules/jsprim/README.md new file mode 100644 index 0000000..7303642 --- /dev/null +++ b/node_modules/jsprim/README.md @@ -0,0 +1,237 @@ +# jsprim: utilities for primitive JavaScript types + +This module provides miscellaneous facilities for working with strings, +numbers, dates, and objects and arrays of these basic types. + + +### deepCopy(obj) + +Creates a deep copy of a primitive type, object, or array of primitive types. + + +### deepEqual(obj1, obj2) + +Returns whether two objects are equal. + + +### isEmpty(obj) + +Returns true if the given object has no properties and false otherwise. This +is O(1) (unlike `Object.keys(obj).length === 0`, which is O(N)). + +### hasKey(obj, key) + +Returns true if the given object has an enumerable, non-inherited property +called `key`. [For information on enumerability and ownership of properties, see +the MDN +documentation.](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Enumerability_and_ownership_of_properties) + +### forEachKey(obj, callback) + +Like Array.forEach, but iterates enumerable, owned properties of an object +rather than elements of an array. Equivalent to: + + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) { + callback(key, obj[key]); + } + } + + +### flattenObject(obj, depth) + +Flattens an object up to a given level of nesting, returning an array of arrays +of length "depth + 1", where the first "depth" elements correspond to flattened +columns and the last element contains the remaining object . For example: + + flattenObject({ + 'I': { + 'A': { + 'i': { + 'datum1': [ 1, 2 ], + 'datum2': [ 3, 4 ] + }, + 'ii': { + 'datum1': [ 3, 4 ] + } + }, + 'B': { + 'i': { + 'datum1': [ 5, 6 ] + }, + 'ii': { + 'datum1': [ 7, 8 ], + 'datum2': [ 3, 4 ], + }, + 'iii': { + } + } + }, + 'II': { + 'A': { + 'i': { + 'datum1': [ 1, 2 ], + 'datum2': [ 3, 4 ] + } + } + } + }, 3) + +becomes: + + [ + [ 'I', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ], + [ 'I', 'A', 'ii', { 'datum1': [ 3, 4 ] } ], + [ 'I', 'B', 'i', { 'datum1': [ 5, 6 ] } ], + [ 'I', 'B', 'ii', { 'datum1': [ 7, 8 ], 'datum2': [ 3, 4 ] } ], + [ 'I', 'B', 'iii', {} ], + [ 'II', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ] + ] + +This function is strict: "depth" must be a non-negative integer and "obj" must +be a non-null object with at least "depth" levels of nesting under all keys. + + +### flattenIter(obj, depth, func) + +This is similar to `flattenObject` except that instead of returning an array, +this function invokes `func(entry)` for each `entry` in the array that +`flattenObject` would return. `flattenIter(obj, depth, func)` is logically +equivalent to `flattenObject(obj, depth).forEach(func)`. Importantly, this +version never constructs the full array. Its memory usage is O(depth) rather +than O(n) (where `n` is the number of flattened elements). + +There's another difference between `flattenObject` and `flattenIter` that's +related to the special case where `depth === 0`. In this case, `flattenObject` +omits the array wrapping `obj` (which is regrettable). + + +### pluck(obj, key) + +Fetch nested property "key" from object "obj", traversing objects as needed. +For example, `pluck(obj, "foo.bar.baz")` is roughly equivalent to +`obj.foo.bar.baz`, except that: + +1. If traversal fails, the resulting value is undefined, and no error is + thrown. For example, `pluck({}, "foo.bar")` is just undefined. +2. If "obj" has property "key" directly (without traversing), the + corresponding property is returned. For example, + `pluck({ 'foo.bar': 1 }, 'foo.bar')` is 1, not undefined. This is also + true recursively, so `pluck({ 'a': { 'foo.bar': 1 } }, 'a.foo.bar')` is + also 1, not undefined. + + +### randElt(array) + +Returns an element from "array" selected uniformly at random. If "array" is +empty, throws an Error. + + +### startsWith(str, prefix) + +Returns true if the given string starts with the given prefix and false +otherwise. + + +### endsWith(str, suffix) + +Returns true if the given string ends with the given suffix and false +otherwise. + + +### iso8601(date) + +Converts a Date object to an ISO8601 date string of the form +"YYYY-MM-DDTHH:MM:SS.sssZ". This format is not customizable. + + +### parseDateTime(str) + +Parses a date expressed as a string, as either a number of milliseconds since +the epoch or any string format that Date accepts, giving preference to the +former where these two sets overlap (e.g., strings containing small numbers). + + +### hrtimeDiff(timeA, timeB) + +Given two hrtime readings (as from Node's `process.hrtime()`), where timeA is +later than timeB, compute the difference and return that as an hrtime. It is +illegal to invoke this for a pair of times where timeB is newer than timeA. + +### hrtimeAdd(timeA, timeB) + +Add two hrtime intervals (as from Node's `process.hrtime()`), returning a new +hrtime interval array. This function does not modify either input argument. + + +### hrtimeAccum(timeA, timeB) + +Add two hrtime intervals (as from Node's `process.hrtime()`), storing the +result in `timeA`. This function overwrites (and returns) the first argument +passed in. + + +### hrtimeNanosec(timeA), hrtimeMicrosec(timeA), hrtimeMillisec(timeA) + +This suite of functions converts a hrtime interval (as from Node's +`process.hrtime()`) into a scalar number of nanoseconds, microseconds or +milliseconds. Results are truncated, as with `Math.floor()`. + + +### validateJsonObject(schema, object) + +Uses JSON validation (via JSV) to validate the given object against the given +schema. On success, returns null. On failure, *returns* (does not throw) a +useful Error object. + + +### extraProperties(object, allowed) + +Check an object for unexpected properties. Accepts the object to check, and an +array of allowed property name strings. If extra properties are detected, an +array of extra property names is returned. If no properties other than those +in the allowed list are present on the object, the returned array will be of +zero length. + +### mergeObjects(provided, overrides, defaults) + +Merge properties from objects "provided", "overrides", and "defaults". The +intended use case is for functions that accept named arguments in an "args" +object, but want to provide some default values and override other values. In +that case, "provided" is what the caller specified, "overrides" are what the +function wants to override, and "defaults" contains default values. + +The function starts with the values in "defaults", overrides them with the +values in "provided", and then overrides those with the values in "overrides". +For convenience, any of these objects may be falsey, in which case they will be +ignored. The input objects are never modified, but properties in the returned +object are not deep-copied. + +For example: + + mergeObjects(undefined, { 'objectMode': true }, { 'highWaterMark': 0 }) + +returns: + + { 'objectMode': true, 'highWaterMark': 0 } + +For another example: + + mergeObjects( + { 'highWaterMark': 16, 'objectMode': 7 }, /* from caller */ + { 'objectMode': true }, /* overrides */ + { 'highWaterMark': 0 }); /* default */ + +returns: + + { 'objectMode': true, 'highWaterMark': 16 } + + +# Contributing + +Code should be "make check" clean. This target assumes that +[jsl](http://github.com/davepacheco/javascriptlint) and +[jsstyle](http://github.com/davepacheco/jsstyle) are on your path. + +New tests should generally accompany new functions and bug fixes. The tests +should pass cleanly (run tests/basic.js). diff --git a/node_modules/jsprim/lib/jsprim.js b/node_modules/jsprim/lib/jsprim.js new file mode 100644 index 0000000..26c3ba1 --- /dev/null +++ b/node_modules/jsprim/lib/jsprim.js @@ -0,0 +1,488 @@ +/* + * lib/jsprim.js: utilities for primitive JavaScript types + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +var mod_extsprintf = require('extsprintf'); +var mod_verror = require('verror'); +var mod_jsonschema = require('json-schema'); + +/* + * Public interface + */ +exports.deepCopy = deepCopy; +exports.deepEqual = deepEqual; +exports.isEmpty = isEmpty; +exports.hasKey = hasKey; +exports.forEachKey = forEachKey; +exports.pluck = pluck; +exports.flattenObject = flattenObject; +exports.flattenIter = flattenIter; +exports.validateJsonObject = validateJsonObjectJS; +exports.validateJsonObjectJS = validateJsonObjectJS; +exports.randElt = randElt; +exports.extraProperties = extraProperties; +exports.mergeObjects = mergeObjects; + +exports.startsWith = startsWith; +exports.endsWith = endsWith; + +exports.iso8601 = iso8601; +exports.rfc1123 = rfc1123; +exports.parseDateTime = parseDateTime; + +exports.hrtimediff = hrtimeDiff; +exports.hrtimeDiff = hrtimeDiff; +exports.hrtimeAccum = hrtimeAccum; +exports.hrtimeAdd = hrtimeAdd; +exports.hrtimeNanosec = hrtimeNanosec; +exports.hrtimeMicrosec = hrtimeMicrosec; +exports.hrtimeMillisec = hrtimeMillisec; + + +/* + * Deep copy an acyclic *basic* Javascript object. This only handles basic + * scalars (strings, numbers, booleans) and arbitrarily deep arrays and objects + * containing these. This does *not* handle instances of other classes. + */ +function deepCopy(obj) +{ + var ret, key; + var marker = '__deepCopy'; + + if (obj && obj[marker]) + throw (new Error('attempted deep copy of cyclic object')); + + if (obj && obj.constructor == Object) { + ret = {}; + obj[marker] = true; + + for (key in obj) { + if (key == marker) + continue; + + ret[key] = deepCopy(obj[key]); + } + + delete (obj[marker]); + return (ret); + } + + if (obj && obj.constructor == Array) { + ret = []; + obj[marker] = true; + + for (key = 0; key < obj.length; key++) + ret.push(deepCopy(obj[key])); + + delete (obj[marker]); + return (ret); + } + + /* + * It must be a primitive type -- just return it. + */ + return (obj); +} + +function deepEqual(obj1, obj2) +{ + if (typeof (obj1) != typeof (obj2)) + return (false); + + if (obj1 === null || obj2 === null || typeof (obj1) != 'object') + return (obj1 === obj2); + + if (obj1.constructor != obj2.constructor) + return (false); + + var k; + for (k in obj1) { + if (!obj2.hasOwnProperty(k)) + return (false); + + if (!deepEqual(obj1[k], obj2[k])) + return (false); + } + + for (k in obj2) { + if (!obj1.hasOwnProperty(k)) + return (false); + } + + return (true); +} + +function isEmpty(obj) +{ + var key; + for (key in obj) + return (false); + return (true); +} + +function hasKey(obj, key) +{ + mod_assert.equal(typeof (key), 'string'); + return (Object.prototype.hasOwnProperty.call(obj, key)); +} + +function forEachKey(obj, callback) +{ + for (var key in obj) { + if (hasKey(obj, key)) { + callback(key, obj[key]); + } + } +} + +function pluck(obj, key) +{ + mod_assert.equal(typeof (key), 'string'); + return (pluckv(obj, key)); +} + +function pluckv(obj, key) +{ + if (obj === null || typeof (obj) !== 'object') + return (undefined); + + if (obj.hasOwnProperty(key)) + return (obj[key]); + + var i = key.indexOf('.'); + if (i == -1) + return (undefined); + + var key1 = key.substr(0, i); + if (!obj.hasOwnProperty(key1)) + return (undefined); + + return (pluckv(obj[key1], key.substr(i + 1))); +} + +/* + * Invoke callback(row) for each entry in the array that would be returned by + * flattenObject(data, depth). This is just like flattenObject(data, + * depth).forEach(callback), except that the intermediate array is never + * created. + */ +function flattenIter(data, depth, callback) +{ + doFlattenIter(data, depth, [], callback); +} + +function doFlattenIter(data, depth, accum, callback) +{ + var each; + var key; + + if (depth === 0) { + each = accum.slice(0); + each.push(data); + callback(each); + return; + } + + mod_assert.ok(data !== null); + mod_assert.equal(typeof (data), 'object'); + mod_assert.equal(typeof (depth), 'number'); + mod_assert.ok(depth >= 0); + + for (key in data) { + each = accum.slice(0); + each.push(key); + doFlattenIter(data[key], depth - 1, each, callback); + } +} + +function flattenObject(data, depth) +{ + if (depth === 0) + return ([ data ]); + + mod_assert.ok(data !== null); + mod_assert.equal(typeof (data), 'object'); + mod_assert.equal(typeof (depth), 'number'); + mod_assert.ok(depth >= 0); + + var rv = []; + var key; + + for (key in data) { + flattenObject(data[key], depth - 1).forEach(function (p) { + rv.push([ key ].concat(p)); + }); + } + + return (rv); +} + +function startsWith(str, prefix) +{ + return (str.substr(0, prefix.length) == prefix); +} + +function endsWith(str, suffix) +{ + return (str.substr( + str.length - suffix.length, suffix.length) == suffix); +} + +function iso8601(d) +{ + if (typeof (d) == 'number') + d = new Date(d); + mod_assert.ok(d.constructor === Date); + return (mod_extsprintf.sprintf('%4d-%02d-%02dT%02d:%02d:%02d.%03dZ', + d.getUTCFullYear(), d.getUTCMonth() + 1, d.getUTCDate(), + d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(), + d.getUTCMilliseconds())); +} + +var RFC1123_MONTHS = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']; +var RFC1123_DAYS = [ + 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']; + +function rfc1123(date) { + return (mod_extsprintf.sprintf('%s, %02d %s %04d %02d:%02d:%02d GMT', + RFC1123_DAYS[date.getUTCDay()], date.getUTCDate(), + RFC1123_MONTHS[date.getUTCMonth()], date.getUTCFullYear(), + date.getUTCHours(), date.getUTCMinutes(), + date.getUTCSeconds())); +} + +/* + * Parses a date expressed as a string, as either a number of milliseconds since + * the epoch or any string format that Date accepts, giving preference to the + * former where these two sets overlap (e.g., small numbers). + */ +function parseDateTime(str) +{ + /* + * This is irritatingly implicit, but significantly more concise than + * alternatives. The "+str" will convert a string containing only a + * number directly to a Number, or NaN for other strings. Thus, if the + * conversion succeeds, we use it (this is the milliseconds-since-epoch + * case). Otherwise, we pass the string directly to the Date + * constructor to parse. + */ + var numeric = +str; + if (!isNaN(numeric)) { + return (new Date(numeric)); + } else { + return (new Date(str)); + } +} + +function validateJsonObjectJS(schema, input) +{ + var report = mod_jsonschema.validate(input, schema); + + if (report.errors.length === 0) + return (null); + + /* Currently, we only do anything useful with the first error. */ + var error = report.errors[0]; + + /* The failed property is given by a URI with an irrelevant prefix. */ + var propname = error['property']; + var reason = error['message'].toLowerCase(); + var i, j; + + /* + * There's at least one case where the property error message is + * confusing at best. We work around this here. + */ + if ((i = reason.indexOf('the property ')) != -1 && + (j = reason.indexOf(' is not defined in the schema and the ' + + 'schema does not allow additional properties')) != -1) { + i += 'the property '.length; + if (propname === '') + propname = reason.substr(i, j - i); + else + propname = propname + '.' + reason.substr(i, j - i); + + reason = 'unsupported property'; + } + + var rv = new mod_verror.VError('property "%s": %s', propname, reason); + rv.jsv_details = error; + return (rv); +} + +function randElt(arr) +{ + mod_assert.ok(Array.isArray(arr) && arr.length > 0, + 'randElt argument must be a non-empty array'); + + return (arr[Math.floor(Math.random() * arr.length)]); +} + +function assertHrtime(a) +{ + mod_assert.ok(a[0] >= 0 && a[1] >= 0, + 'negative numbers not allowed in hrtimes'); + mod_assert.ok(a[1] < 1e9, 'nanoseconds column overflow'); +} + +/* + * Compute the time elapsed between hrtime readings A and B, where A is later + * than B. hrtime readings come from Node's process.hrtime(). There is no + * defined way to represent negative deltas, so it's illegal to diff B from A + * where the time denoted by B is later than the time denoted by A. If this + * becomes valuable, we can define a representation and extend the + * implementation to support it. + */ +function hrtimeDiff(a, b) +{ + assertHrtime(a); + assertHrtime(b); + mod_assert.ok(a[0] > b[0] || (a[0] == b[0] && a[1] >= b[1]), + 'negative differences not allowed'); + + var rv = [ a[0] - b[0], 0 ]; + + if (a[1] >= b[1]) { + rv[1] = a[1] - b[1]; + } else { + rv[0]--; + rv[1] = 1e9 - (b[1] - a[1]); + } + + return (rv); +} + +/* + * Convert a hrtime reading from the array format returned by Node's + * process.hrtime() into a scalar number of nanoseconds. + */ +function hrtimeNanosec(a) +{ + assertHrtime(a); + + return (Math.floor(a[0] * 1e9 + a[1])); +} + +/* + * Convert a hrtime reading from the array format returned by Node's + * process.hrtime() into a scalar number of microseconds. + */ +function hrtimeMicrosec(a) +{ + assertHrtime(a); + + return (Math.floor(a[0] * 1e6 + a[1] / 1e3)); +} + +/* + * Convert a hrtime reading from the array format returned by Node's + * process.hrtime() into a scalar number of milliseconds. + */ +function hrtimeMillisec(a) +{ + assertHrtime(a); + + return (Math.floor(a[0] * 1e3 + a[1] / 1e6)); +} + +/* + * Add two hrtime readings A and B, overwriting A with the result of the + * addition. This function is useful for accumulating several hrtime intervals + * into a counter. Returns A. + */ +function hrtimeAccum(a, b) +{ + assertHrtime(a); + assertHrtime(b); + + /* + * Accumulate the nanosecond component. + */ + a[1] += b[1]; + if (a[1] >= 1e9) { + /* + * The nanosecond component overflowed, so carry to the seconds + * field. + */ + a[0]++; + a[1] -= 1e9; + } + + /* + * Accumulate the seconds component. + */ + a[0] += b[0]; + + return (a); +} + +/* + * Add two hrtime readings A and B, returning the result as a new hrtime array. + * Does not modify either input argument. + */ +function hrtimeAdd(a, b) +{ + assertHrtime(a); + + var rv = [ a[0], a[1] ]; + + return (hrtimeAccum(rv, b)); +} + + +/* + * Check an object for unexpected properties. Accepts the object to check, and + * an array of allowed property names (strings). Returns an array of key names + * that were found on the object, but did not appear in the list of allowed + * properties. If no properties were found, the returned array will be of + * zero length. + */ +function extraProperties(obj, allowed) +{ + mod_assert.ok(typeof (obj) === 'object' && obj !== null, + 'obj argument must be a non-null object'); + mod_assert.ok(Array.isArray(allowed), + 'allowed argument must be an array of strings'); + for (var i = 0; i < allowed.length; i++) { + mod_assert.ok(typeof (allowed[i]) === 'string', + 'allowed argument must be an array of strings'); + } + + return (Object.keys(obj).filter(function (key) { + return (allowed.indexOf(key) === -1); + })); +} + +/* + * Given three sets of properties "provided" (may be undefined), "overrides" + * (required), and "defaults" (may be undefined), construct an object containing + * the union of these sets with "overrides" overriding "provided", and + * "provided" overriding "defaults". None of the input objects are modified. + */ +function mergeObjects(provided, overrides, defaults) +{ + var rv, k; + + rv = {}; + if (defaults) { + for (k in defaults) + rv[k] = defaults[k]; + } + + if (provided) { + for (k in provided) + rv[k] = provided[k]; + } + + if (overrides) { + for (k in overrides) + rv[k] = overrides[k]; + } + + return (rv); +} diff --git a/node_modules/jsprim/package.json b/node_modules/jsprim/package.json new file mode 100644 index 0000000..8874a87 --- /dev/null +++ b/node_modules/jsprim/package.json @@ -0,0 +1,19 @@ +{ + "name": "jsprim", + "version": "1.3.1", + "description": "utilities for primitive JavaScript types", + "main": "./lib/jsprim.js", + "repository": { + "type": "git", + "url": "git://github.com/davepacheco/node-jsprim.git" + }, + "dependencies": { + "extsprintf": "1.0.2", + "json-schema": "0.2.3", + "verror": "1.3.6" + }, + "engines": [ + "node >=0.6.0" + ], + "license": "MIT" +} diff --git a/node_modules/kind-of/LICENSE b/node_modules/kind-of/LICENSE new file mode 100644 index 0000000..39245ac --- /dev/null +++ b/node_modules/kind-of/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/kind-of/README.md b/node_modules/kind-of/README.md new file mode 100644 index 0000000..4ac4f18 --- /dev/null +++ b/node_modules/kind-of/README.md @@ -0,0 +1,247 @@ +# kind-of [![NPM version](https://img.shields.io/npm/v/kind-of.svg?style=flat)](https://www.npmjs.com/package/kind-of) [![NPM downloads](https://img.shields.io/npm/dm/kind-of.svg?style=flat)](https://npmjs.org/package/kind-of) [![Build Status](https://img.shields.io/travis/jonschlinkert/kind-of.svg?style=flat)](https://travis-ci.org/jonschlinkert/kind-of) + +Get the native type of a value. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save kind-of +``` + +Install with [bower](http://bower.io/) + +```sh +$ bower install kind-of --save +``` + +## Usage + +> es5, browser and es6 ready + +```js +var kindOf = require('kind-of'); + +kindOf(undefined); +//=> 'undefined' + +kindOf(null); +//=> 'null' + +kindOf(true); +//=> 'boolean' + +kindOf(false); +//=> 'boolean' + +kindOf(new Boolean(true)); +//=> 'boolean' + +kindOf(new Buffer('')); +//=> 'buffer' + +kindOf(42); +//=> 'number' + +kindOf(new Number(42)); +//=> 'number' + +kindOf('str'); +//=> 'string' + +kindOf(new String('str')); +//=> 'string' + +kindOf(arguments); +//=> 'arguments' + +kindOf({}); +//=> 'object' + +kindOf(Object.create(null)); +//=> 'object' + +kindOf(new Test()); +//=> 'object' + +kindOf(new Date()); +//=> 'date' + +kindOf([]); +//=> 'array' + +kindOf([1, 2, 3]); +//=> 'array' + +kindOf(new Array()); +//=> 'array' + +kindOf(/foo/); +//=> 'regexp' + +kindOf(new RegExp('foo')); +//=> 'regexp' + +kindOf(function () {}); +//=> 'function' + +kindOf(function * () {}); +//=> 'function' + +kindOf(new Function()); +//=> 'function' + +kindOf(new Map()); +//=> 'map' + +kindOf(new WeakMap()); +//=> 'weakmap' + +kindOf(new Set()); +//=> 'set' + +kindOf(new WeakSet()); +//=> 'weakset' + +kindOf(Symbol('str')); +//=> 'symbol' + +kindOf(new Int8Array()); +//=> 'int8array' + +kindOf(new Uint8Array()); +//=> 'uint8array' + +kindOf(new Uint8ClampedArray()); +//=> 'uint8clampedarray' + +kindOf(new Int16Array()); +//=> 'int16array' + +kindOf(new Uint16Array()); +//=> 'uint16array' + +kindOf(new Int32Array()); +//=> 'int32array' + +kindOf(new Uint32Array()); +//=> 'uint32array' + +kindOf(new Float32Array()); +//=> 'float32array' + +kindOf(new Float64Array()); +//=> 'float64array' +``` + +## Benchmarks + +Benchmarked against [typeof](http://github.com/CodingFu/typeof) and [type-of](https://github.com/ForbesLindesay/type-of). +Note that performaces is slower for es6 features `Map`, `WeakMap`, `Set` and `WeakSet`. + +```bash +#1: array + current x 23,329,397 ops/sec ±0.82% (94 runs sampled) + lib-type-of x 4,170,273 ops/sec ±0.55% (94 runs sampled) + lib-typeof x 9,686,935 ops/sec ±0.59% (98 runs sampled) + +#2: boolean + current x 27,197,115 ops/sec ±0.85% (94 runs sampled) + lib-type-of x 3,145,791 ops/sec ±0.73% (97 runs sampled) + lib-typeof x 9,199,562 ops/sec ±0.44% (99 runs sampled) + +#3: date + current x 20,190,117 ops/sec ±0.86% (92 runs sampled) + lib-type-of x 5,166,970 ops/sec ±0.74% (94 runs sampled) + lib-typeof x 9,610,821 ops/sec ±0.50% (96 runs sampled) + +#4: function + current x 23,855,460 ops/sec ±0.60% (97 runs sampled) + lib-type-of x 5,667,740 ops/sec ±0.54% (100 runs sampled) + lib-typeof x 10,010,644 ops/sec ±0.44% (100 runs sampled) + +#5: null + current x 27,061,047 ops/sec ±0.97% (96 runs sampled) + lib-type-of x 13,965,573 ops/sec ±0.62% (97 runs sampled) + lib-typeof x 8,460,194 ops/sec ±0.61% (97 runs sampled) + +#6: number + current x 25,075,682 ops/sec ±0.53% (99 runs sampled) + lib-type-of x 2,266,405 ops/sec ±0.41% (98 runs sampled) + lib-typeof x 9,821,481 ops/sec ±0.45% (99 runs sampled) + +#7: object + current x 3,348,980 ops/sec ±0.49% (99 runs sampled) + lib-type-of x 3,245,138 ops/sec ±0.60% (94 runs sampled) + lib-typeof x 9,262,952 ops/sec ±0.59% (99 runs sampled) + +#8: regex + current x 21,284,827 ops/sec ±0.72% (96 runs sampled) + lib-type-of x 4,689,241 ops/sec ±0.43% (100 runs sampled) + lib-typeof x 8,957,593 ops/sec ±0.62% (98 runs sampled) + +#9: string + current x 25,379,234 ops/sec ±0.58% (96 runs sampled) + lib-type-of x 3,635,148 ops/sec ±0.76% (93 runs sampled) + lib-typeof x 9,494,134 ops/sec ±0.49% (98 runs sampled) + +#10: undef + current x 27,459,221 ops/sec ±1.01% (93 runs sampled) + lib-type-of x 14,360,433 ops/sec ±0.52% (99 runs sampled) + lib-typeof x 23,202,868 ops/sec ±0.59% (94 runs sampled) +``` + +## Optimizations + +In 7 out of 8 cases, this library is 2x-10x faster than other top libraries included in the benchmarks. There are a few things that lead to this performance advantage, none of them hard and fast rules, but all of them simple and repeatable in almost any code library: + +1. Optimize around the fastest and most common use cases first. Of course, this will change from project-to-project, but I took some time to understand how and why `typeof` checks were being used in my own libraries and other libraries I use a lot. +2. Optimize around bottlenecks - In other words, the order in which conditionals are implemented is significant, because each check is only as fast as the failing checks that came before it. Here, the biggest bottleneck by far is checking for plain objects (an object that was created by the `Object` constructor). I opted to make this check happen by process of elimination rather than brute force up front (e.g. by using something like `val.constructor.name`), so that every other type check would not be penalized it. +3. Don't do uneccessary processing - why do `.slice(8, -1).toLowerCase();` just to get the word `regex`? It's much faster to do `if (type === '[object RegExp]') return 'regex'` + +## About + +### Related projects + +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [is-number](https://www.npmjs.com/package/is-number): Returns true if the value is a number. comprehensive tests. | [homepage](https://github.com/jonschlinkert/is-number "Returns true if the value is a number. comprehensive tests.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/kind-of/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.28, on July 29, 2016._ \ No newline at end of file diff --git a/node_modules/kind-of/index.js b/node_modules/kind-of/index.js new file mode 100644 index 0000000..87938c9 --- /dev/null +++ b/node_modules/kind-of/index.js @@ -0,0 +1,113 @@ +var isBuffer = require('is-buffer'); +var toString = Object.prototype.toString; + +/** + * Get the native `typeof` a value. + * + * @param {*} `val` + * @return {*} Native javascript type + */ + +module.exports = function kindOf(val) { + // primitivies + if (typeof val === 'undefined') { + return 'undefined'; + } + if (val === null) { + return 'null'; + } + if (val === true || val === false || val instanceof Boolean) { + return 'boolean'; + } + if (typeof val === 'string' || val instanceof String) { + return 'string'; + } + if (typeof val === 'number' || val instanceof Number) { + return 'number'; + } + + // functions + if (typeof val === 'function' || val instanceof Function) { + return 'function'; + } + + // array + if (typeof Array.isArray !== 'undefined' && Array.isArray(val)) { + return 'array'; + } + + // check for instances of RegExp and Date before calling `toString` + if (val instanceof RegExp) { + return 'regexp'; + } + if (val instanceof Date) { + return 'date'; + } + + // other objects + var type = toString.call(val); + + if (type === '[object RegExp]') { + return 'regexp'; + } + if (type === '[object Date]') { + return 'date'; + } + if (type === '[object Arguments]') { + return 'arguments'; + } + + // buffer + if (typeof Buffer !== 'undefined' && isBuffer(val)) { + return 'buffer'; + } + + // es6: Map, WeakMap, Set, WeakSet + if (type === '[object Set]') { + return 'set'; + } + if (type === '[object WeakSet]') { + return 'weakset'; + } + if (type === '[object Map]') { + return 'map'; + } + if (type === '[object WeakMap]') { + return 'weakmap'; + } + if (type === '[object Symbol]') { + return 'symbol'; + } + + // typed arrays + if (type === '[object Int8Array]') { + return 'int8array'; + } + if (type === '[object Uint8Array]') { + return 'uint8array'; + } + if (type === '[object Uint8ClampedArray]') { + return 'uint8clampedarray'; + } + if (type === '[object Int16Array]') { + return 'int16array'; + } + if (type === '[object Uint16Array]') { + return 'uint16array'; + } + if (type === '[object Int32Array]') { + return 'int32array'; + } + if (type === '[object Uint32Array]') { + return 'uint32array'; + } + if (type === '[object Float32Array]') { + return 'float32array'; + } + if (type === '[object Float64Array]') { + return 'float64array'; + } + + // must be a plain object + return 'object'; +}; diff --git a/node_modules/kind-of/package.json b/node_modules/kind-of/package.json new file mode 100644 index 0000000..e649719 --- /dev/null +++ b/node_modules/kind-of/package.json @@ -0,0 +1,84 @@ +{ + "name": "kind-of", + "description": "Get the native type of a value.", + "version": "3.0.4", + "homepage": "https://github.com/jonschlinkert/kind-of", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/kind-of", + "bugs": { + "url": "https://github.com/jonschlinkert/kind-of/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "LICENSE", + "README.md" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js -s index --bare" + }, + "dependencies": { + "is-buffer": "^1.0.2" + }, + "devDependencies": { + "ansi-bold": "^0.1.1", + "benchmarked": "^0.2.5", + "browserify": "^13.1.0", + "glob": "^7.0.5", + "gulp-format-md": "^0.1.9", + "mocha": "^2.5.3", + "type-of": "^2.0.1", + "typeof": "^1.0.0" + }, + "keywords": [ + "arguments", + "array", + "boolean", + "check", + "date", + "function", + "is", + "is-type", + "is-type-of", + "kind", + "kind-of", + "number", + "object", + "of", + "regexp", + "string", + "test", + "type", + "type-of", + "typeof", + "types" + ], + "verb": { + "related": { + "list": [ + "is-glob", + "is-number", + "is-primitive" + ] + }, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/klaw/.npmignore b/node_modules/klaw/.npmignore new file mode 100644 index 0000000..2db813b --- /dev/null +++ b/node_modules/klaw/.npmignore @@ -0,0 +1,3 @@ +tests/ +appveyor.yml +.travis.yml diff --git a/node_modules/klaw/CHANGELOG.md b/node_modules/klaw/CHANGELOG.md new file mode 100644 index 0000000..83410c7 --- /dev/null +++ b/node_modules/klaw/CHANGELOG.md @@ -0,0 +1,42 @@ +1.3.1 / 2016-10-25 +------------------ +### Added +- `graceful-fs` added as an `optionalDependencies`. Thanks [ryanzim]! + +1.3.0 / 2016-06-09 +------------------ +### Added +- `filter` option to pre-filter and not walk directories. + +1.2.0 / 2016-04-16 +------------------ +- added support for custom `fs` implementation. Useful for https://github.com/tschaub/mock-fs + +1.1.3 / 2015-12-23 +------------------ +- bugfix: if `readdir` error, got hung up. See: https://github.com/jprichardson/node-klaw/issues/1 + +1.1.2 / 2015-11-12 +------------------ +- assert that param `dir` is a `string` + +1.1.1 / 2015-10-25 +------------------ +- bug fix, options not being passed + +1.1.0 / 2015-10-25 +------------------ +- added `queueMethod` and `pathSorter` to `options` to affect searching strategy. + +1.0.0 / 2015-10-25 +------------------ +- removed unused `filter` param +- bugfix: always set `streamOptions` to `objectMode` +- simplified, converted from push mode (streams 1) to proper pull mode (streams 3) + +0.1.0 / 2015-10-25 +------------------ +- initial release + + +[ryanzim]: https://github.com/ryanzim diff --git a/node_modules/klaw/LICENSE b/node_modules/klaw/LICENSE new file mode 100644 index 0000000..ddb217c --- /dev/null +++ b/node_modules/klaw/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2015-2016 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/klaw/README.md b/node_modules/klaw/README.md new file mode 100644 index 0000000..78386e4 --- /dev/null +++ b/node_modules/klaw/README.md @@ -0,0 +1,270 @@ +Node.js - klaw +============== + +A Node.js file system walker extracted from [fs-extra](https://github.com/jprichardson/node-fs-extra). + +[![npm Package](https://img.shields.io/npm/v/klaw.svg?style=flat-square)](https://www.npmjs.org/package/klaw) +[![build status](https://api.travis-ci.org/jprichardson/node-klaw.svg)](http://travis-ci.org/jprichardson/node-klaw) +[![windows build status](https://ci.appveyor.com/api/projects/status/github/jprichardson/node-klaw?branch=master&svg=true)](https://ci.appveyor.com/project/jprichardson/node-klaw/branch/master) + + +Standard + +Install +------- + + npm i --save klaw + + +Name +---- + +`klaw` is `walk` backwards :p + + +Usage +----- + +### klaw(directory, [options]) + +Returns a [Readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable) that iterates +through every file and directory starting with `dir` as the root. Every `read()` or `data` event +returns an object with two properties: `path` and `stats`. `path` is the full path of the file and +`stats` is an instance of [fs.Stats](https://nodejs.org/api/fs.html#fs_class_fs_stats). + +- `directory`: The directory to recursively walk. Type `string`. +- `options`: [Readable stream options](https://nodejs.org/api/stream.html#stream_new_stream_readable_options) and +the following: + - `queueMethod` (`string`, default: `'shift'`): Either `'shift'` or `'pop'`. On `readdir()` array, call either `shift()` or `pop()`. + - `pathSorter` (`function`, default: `undefined`): Sorting [function for Arrays](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort). + - `fs` (`object`, default: `require('fs')`): Use this to hook into the `fs` methods or to use [`mock-fs`](https://github.com/tschaub/mock-fs) + - `filter` (`function`, default: `undefined`): Filtering [function for Arrays](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter) + +**Streams 1 (push) example:** + +```js +var klaw = require('klaw') + +var items = [] // files, directories, symlinks, etc +klaw('/some/dir') + .on('data', function (item) { + items.push(item.path) + }) + .on('end', function () { + console.dir(items) // => [ ... array of files] + }) +``` + +**Streams 2 & 3 (pull) example:** + +```js +var klaw = require('klaw') + +var items = [] // files, directories, symlinks, etc +klaw('/some/dir') + .on('readable', function () { + var item + while ((item = this.read())) { + items.push(item.path) + } + }) + .on('end', function () { + console.dir(items) // => [ ... array of files] + }) +``` + +If you're not sure of the differences on Node.js streams 1, 2, 3 then I'd +recommend this resource as a good starting point: https://strongloop.com/strongblog/whats-new-io-js-beta-streams3/. + + +### Error Handling + +Listen for the `error` event. + +Example: + +```js +var klaw = require('klaw') +klaw('/some/dir') + .on('readable', function () { + var item + while ((item = this.read())) { + // do something with the file + } + }) + .on('error', function (err, item) { + console.log(err.message) + console.log(item.path) // the file the error occurred on + }) + .on('end', function () { + console.dir(items) // => [ ... array of files] + }) + +``` + + +### Aggregation / Filtering / Executing Actions (Through Streams) + +On many occasions you may want to filter files based upon size, extension, etc. +Or you may want to aggregate stats on certain file types. Or maybe you want to +perform an action on certain file types. + +You should use the module [`through2`](https://www.npmjs.com/package/through2) to easily +accomplish this. + +Install `through2`: + + npm i --save through2 + + +**Example (skipping directories):** + +```js +var klaw = require('klaw') +var through2 = require('through2') + +var excludeDirFilter = through2.obj(function (item, enc, next) { + if (!item.stats.isDirectory()) this.push(item) + next() +}) + +var items = [] // files, directories, symlinks, etc +klaw('/some/dir') + .pipe(excludeDirFilter) + .on('data', function (item) { + items.push(item.path) + }) + .on('end', function () { + console.dir(items) // => [ ... array of files without directories] + }) + +``` +**Example (ignore hidden directories):** +```js +var klaw = require('klaw') +var path = require('path') + +var filterFunc = function(item){ + var basename = path.basename(item) + return basename === '.' || basename[0] !== '.' +} + +klaw('/some/dir', { filter : filterFunc }) + .on('data', function(item){ + // only items of none hidden folders will reach here + }) + +``` + +**Example (totaling size of PNG files):** + +```js +var klaw = require('klaw') +var path = require('path') +var through2 = require('through2') + +var totalPngsInBytes = 0 +var aggregatePngSize = through2.obj(function (item, enc, next) { + if (path.extname(item.path) === '.png') { + totalPngsInBytes += item.stats.size + } + this.push(item) + next() +}) + +klaw('/some/dir') + .pipe(aggregatePngSize) + .on('data', function (item) { + items.push(item.path) + }) + .on('end', function () { + console.dir(totalPngsInBytes) // => total of all pngs (bytes) + }) +``` + + +**Example (deleting all .tmp files):** + +```js +var fs = require('fs') +var klaw = require('klaw') +var through2 = require('through2') + +var deleteAction = through2.obj(function (item, enc, next) { + this.push(item) + + if (path.extname(item.path) === '.tmp') { + item.deleted = true + fs.unklink(item.path, next) + } else { + item.deleted = false + next() + } +}) + +var deletedFiles = [] +klaw('/some/dir') + .pipe(deleteAction) + .on('data', function (item) { + if (!item.deleted) return + deletedFiles.push(item.path) + }) + .on('end', function () { + console.dir(deletedFiles) // => all deleted files + }) +``` + +You can even chain a bunch of these filters and aggregators together. By using +multiple pipes. + +**Example (using multiple filters / aggregators):** + +```js +klaw('/some/dir') + .pipe(filterCertainFiles) + .pipe(deleteSomeOtherFiles) + .on('end', function () { + console.log('all done!') + }) +``` + +**Example passing (piping) through errors:** + +Node.js does not `pipe()` errors. This means that the error on one stream, like +`klaw` will not pipe through to the next. If you want to do this, do the following: + +```js +var klaw = require('klaw') +var through2 = require('through2') + +var excludeDirFilter = through2.obj(function (item, enc, next) { + if (!item.stats.isDirectory()) this.push(item) + next() +}) + +var items = [] // files, directories, symlinks, etc +klaw('/some/dir') + .on('error', function (err) { excludeDirFilter.emit('error', err) }) // forward the error on + .pipe(excludeDirFilter) + .on('data', function (item) { + items.push(item.path) + }) + .on('end', function () { + console.dir(items) // => [ ... array of files without directories] + }) +``` + + +### Searching Strategy + +Pass in options for `queueMethod` and `pathSorter` to affect how the file system +is recursively iterated. See the code for more details, it's less than 50 lines :) + + + +License +------- + +MIT + +Copyright (c) 2015 [JP Richardson](https://github.com/jprichardson) diff --git a/node_modules/klaw/package.json b/node_modules/klaw/package.json new file mode 100644 index 0000000..6a2346a --- /dev/null +++ b/node_modules/klaw/package.json @@ -0,0 +1,40 @@ +{ + "name": "klaw", + "version": "1.3.1", + "description": "File system walker with Readable stream interface.", + "main": "./src/index.js", + "scripts": { + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "tape tests/**/*.js | tap-spec" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/jprichardson/node-klaw.git" + }, + "keywords": [ + "walk", + "walker", + "fs", + "fs-extra", + "readable", + "streams" + ], + "author": "JP Richardson", + "license": "MIT", + "bugs": { + "url": "https://github.com/jprichardson/node-klaw/issues" + }, + "homepage": "https://github.com/jprichardson/node-klaw#readme", + "devDependencies": { + "mkdirp": "^0.5.1", + "mock-fs": "^3.8.0", + "rimraf": "^2.4.3", + "standard": "^8.4.0", + "tap-spec": "^4.1.1", + "tape": "^4.2.2" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.9" + } +} diff --git a/node_modules/klaw/src/assign.js b/node_modules/klaw/src/assign.js new file mode 100644 index 0000000..69740f6 --- /dev/null +++ b/node_modules/klaw/src/assign.js @@ -0,0 +1,16 @@ +// simple mutable assign (extracted from fs-extra) +// I really like object-assign package, but I wanted a lean package with zero deps +function _assign () { + var args = [].slice.call(arguments).filter(function (i) { return i }) + var dest = args.shift() + args.forEach(function (src) { + Object.keys(src).forEach(function (key) { + dest[key] = src[key] + }) + }) + + return dest +} + +// thank you baby Jesus for Node v4 and Object.assign +module.exports = Object.assign || _assign diff --git a/node_modules/klaw/src/index.js b/node_modules/klaw/src/index.js new file mode 100644 index 0000000..ae4a20e --- /dev/null +++ b/node_modules/klaw/src/index.js @@ -0,0 +1,57 @@ +var assert = require('assert') +var fs +try { + fs = require('graceful-fs') +} catch (e) { + fs = require('fs') +} +var path = require('path') +var Readable = require('stream').Readable +var util = require('util') +var assign = require('./assign') + +function Walker (dir, options) { + assert.strictEqual(typeof dir, 'string', '`dir` parameter should be of type string. Got type: ' + typeof dir) + var defaultStreamOptions = { objectMode: true } + var defaultOpts = { queueMethod: 'shift', pathSorter: undefined, filter: undefined } + options = assign(defaultOpts, options, defaultStreamOptions) + + Readable.call(this, options) + this.root = path.resolve(dir) + this.paths = [this.root] + this.options = options + this.fs = options.fs || fs // mock-fs +} +util.inherits(Walker, Readable) + +Walker.prototype._read = function () { + if (this.paths.length === 0) return this.push(null) + var self = this + var pathItem = this.paths[this.options.queueMethod]() + + self.fs.lstat(pathItem, function (err, stats) { + var item = { path: pathItem, stats: stats } + if (err) return self.emit('error', err, item) + if (!stats.isDirectory()) return self.push(item) + + self.fs.readdir(pathItem, function (err, pathItems) { + if (err) { + self.push(item) + return self.emit('error', err, item) + } + + pathItems = pathItems.map(function (part) { return path.join(pathItem, part) }) + if (self.options.filter) pathItems = pathItems.filter(self.options.filter) + if (self.options.pathSorter) pathItems.sort(self.options.pathSorter) + pathItems.forEach(function (pi) { self.paths.push(pi) }) + + self.push(item) + }) + }) +} + +function walk (root, options) { + return new Walker(root, options) +} + +module.exports = walk diff --git a/node_modules/lazy-cache/LICENSE b/node_modules/lazy-cache/LICENSE new file mode 100644 index 0000000..1e49edf --- /dev/null +++ b/node_modules/lazy-cache/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/lazy-cache/README.md b/node_modules/lazy-cache/README.md new file mode 100644 index 0000000..33b5a4d --- /dev/null +++ b/node_modules/lazy-cache/README.md @@ -0,0 +1,147 @@ +# lazy-cache [![NPM version](https://img.shields.io/npm/v/lazy-cache.svg?style=flat)](https://www.npmjs.com/package/lazy-cache) [![NPM downloads](https://img.shields.io/npm/dm/lazy-cache.svg?style=flat)](https://npmjs.org/package/lazy-cache) [![Build Status](https://img.shields.io/travis/jonschlinkert/lazy-cache.svg?style=flat)](https://travis-ci.org/jonschlinkert/lazy-cache) + +> Cache requires to be lazy-loaded when needed. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install lazy-cache --save +``` + +If you use webpack and are experiencing issues, try using [unlazy-loader](https://github.com/doowb/unlazy-loader), a webpack loader that fixes the bug that prevents webpack from working with native javascript getters. + +## Usage + +```js +var utils = require('lazy-cache')(require); +``` + +**Use as a property on `lazy`** + +The module is also added as a property to the `lazy` function +so it can be called without having to call a function first. + +```js +var utils = require('lazy-cache')(require); + +// `npm install glob` +utils('glob'); + +// glob sync +console.log(utils.glob.sync('*.js')); + +// glob async +utils.glob('*.js', function (err, files) { + console.log(files); +}); +``` + +**Use as a function** + +```js +var utils = require('lazy-cache')(require); +var glob = utils('glob'); + +// `glob` is a now a function that may be called when needed +glob().sync('foo/*.js'); +``` + +## Aliases + +An alias may be passed as the second argument if you don't want to use the automatically camel-cased variable name. + +**Example** + +```js +var utils = require('lazy-cache')(require); + +// alias `ansi-yellow` as `yellow` +utils('ansi-yellow', 'yellow'); +console.log(utils.yellow('foo')); +``` + +## Browserify usage + +**Example** + +```js +var utils = require('lazy-cache')(require); +// temporarily re-assign `require` to trick browserify +var fn = require; +require = utils; +// list module dependencies (here, `require` is actually `lazy-cache`) +require('glob'); +require = fn; // restore the native `require` function + +/** + * Now you can use glob with the `utils.glob` variable + */ + +// sync +console.log(utils.glob.sync('*.js')); + +// async +utils.glob('*.js', function (err, files) { + console.log(files.join('\n')); +}); +``` + +## Kill switch + +In certain rare edge cases it may be necessary to unlazy all lazy-cached dependencies (5 reported cases after ~30 million downloads). + +To force lazy-cache to immediately invoke all dependencies, do: + +```js +process.env.UNLAZY = true; +``` + +## Related projects + +You might also be interested in these projects: + +[lint-deps](https://www.npmjs.com/package/lint-deps): CLI tool that tells you when dependencies are missing from package.json and offers you a… [more](https://www.npmjs.com/package/lint-deps) | [homepage](https://github.com/jonschlinkert/lint-deps) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/lazy-cache/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/lazy-cache/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v0.9.0, on April 22, 2016._ \ No newline at end of file diff --git a/node_modules/lazy-cache/index.js b/node_modules/lazy-cache/index.js new file mode 100644 index 0000000..da7897d --- /dev/null +++ b/node_modules/lazy-cache/index.js @@ -0,0 +1,67 @@ +'use strict'; + +/** + * Cache results of the first function call to ensure only calling once. + * + * ```js + * var utils = require('lazy-cache')(require); + * // cache the call to `require('ansi-yellow')` + * utils('ansi-yellow', 'yellow'); + * // use `ansi-yellow` + * console.log(utils.yellow('this is yellow')); + * ``` + * + * @param {Function} `fn` Function that will be called only once. + * @return {Function} Function that can be called to get the cached function + * @api public + */ + +function lazyCache(fn) { + var cache = {}; + var proxy = function(mod, name) { + name = name || camelcase(mod); + + // check both boolean and string in case `process.env` cases to string + if (process.env.UNLAZY === 'true' || process.env.UNLAZY === true || process.env.TRAVIS) { + cache[name] = fn(mod); + } + + Object.defineProperty(proxy, name, { + enumerable: true, + configurable: true, + get: getter + }); + + function getter() { + if (cache.hasOwnProperty(name)) { + return cache[name]; + } + return (cache[name] = fn(mod)); + } + return getter; + }; + return proxy; +} + +/** + * Used to camelcase the name to be stored on the `lazy` object. + * + * @param {String} `str` String containing `_`, `.`, `-` or whitespace that will be camelcased. + * @return {String} camelcased string. + */ + +function camelcase(str) { + if (str.length === 1) { + return str.toLowerCase(); + } + str = str.replace(/^[\W_]+|[\W_]+$/g, '').toLowerCase(); + return str.replace(/[\W_]+(\w|$)/g, function(_, ch) { + return ch.toUpperCase(); + }); +} + +/** + * Expose `lazyCache` + */ + +module.exports = lazyCache; diff --git a/node_modules/lazy-cache/package.json b/node_modules/lazy-cache/package.json new file mode 100644 index 0000000..e635e98 --- /dev/null +++ b/node_modules/lazy-cache/package.json @@ -0,0 +1,58 @@ +{ + "name": "lazy-cache", + "description": "Cache requires to be lazy-loaded when needed.", + "version": "1.0.4", + "homepage": "https://github.com/jonschlinkert/lazy-cache", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/lazy-cache", + "bugs": { + "url": "https://github.com/jonschlinkert/lazy-cache/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi-yellow": "^0.1.1", + "glob": "^7.0.3", + "gulp-format-md": "^0.1.8", + "mocha": "^2.4.5" + }, + "keywords": [ + "cache", + "caching", + "dependencies", + "dependency", + "lazy", + "require", + "requires" + ], + "verb": { + "related": { + "list": [ + "lint-deps" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/loader-utils/LICENSE b/node_modules/loader-utils/LICENSE new file mode 100644 index 0000000..4919c47 --- /dev/null +++ b/node_modules/loader-utils/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2012 - 2015 Tobias Koppers + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/loader-utils/index.js b/node_modules/loader-utils/index.js new file mode 100644 index 0000000..94bb22a --- /dev/null +++ b/node_modules/loader-utils/index.js @@ -0,0 +1,305 @@ +var JSON5 = require("json5"); +var path = require("path"); +var assign = require("object-assign"); +var emojiRegex = /[\uD800-\uDFFF]./; +var emojiList = require("emojis-list").filter(function(emoji) { + return emojiRegex.test(emoji) +}); + +var baseEncodeTables = { + 26: "abcdefghijklmnopqrstuvwxyz", + 32: "123456789abcdefghjkmnpqrstuvwxyz", // no 0lio + 36: "0123456789abcdefghijklmnopqrstuvwxyz", + 49: "abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ", // no lIO + 52: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ", + 58: "123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ", // no 0lIO + 62: "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ", + 64: "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_" +}; +var emojiCache = {}; + +function encodeStringToEmoji(content, length) { + if (emojiCache[content]) return emojiCache[content]; + length = length || 1; + var emojis = []; + do { + var index = Math.floor(Math.random() * emojiList.length); + emojis.push(emojiList[index]); + emojiList.splice(index, 1); + } while (--length > 0); + var emojiEncoding = emojis.join(''); + emojiCache[content] = emojiEncoding; + return emojiEncoding; +} + +function encodeBufferToBase(buffer, base) { + var encodeTable = baseEncodeTables[base]; + if (!encodeTable) throw new Error("Unknown encoding base" + base); + + var readLength = buffer.length; + + var Big = require('big.js'); + Big.RM = Big.DP = 0; + var b = new Big(0); + for (var i = readLength - 1; i >= 0; i--) { + b = b.times(256).plus(buffer[i]); + } + + var output = ""; + while (b.gt(0)) { + output = encodeTable[b.mod(base)] + output; + b = b.div(base); + } + + Big.DP = 20; + Big.RM = 1; + + return output; +} + +exports.parseQuery = function parseQuery(query) { + var specialValues = { + 'null': null, + 'true': true, + 'false': false + }; + if(!query) return {}; + if(typeof query !== "string") + return query; + if(query.substr(0, 1) !== "?") + throw new Error("a valid query string passed to parseQuery should begin with '?'"); + query = query.substr(1); + var queryLength = query.length; + if(query.substr(0, 1) === "{" && query.substr(-1) === "}") { + return JSON5.parse(query); + } + var queryArgs = query.split(/[,\&]/g); + var result = {}; + queryArgs.forEach(function(arg) { + var idx = arg.indexOf("="); + if(idx >= 0) { + var name = arg.substr(0, idx); + var value = decodeURIComponent(arg.substr(idx+1)); + if (specialValues.hasOwnProperty(value)) { + value = specialValues[value]; + } + if(name.substr(-2) === "[]") { + name = decodeURIComponent(name.substr(0, name.length-2)); + if(!Array.isArray(result[name])) + result[name] = []; + result[name].push(value); + } else { + name = decodeURIComponent(name); + result[name] = value; + } + } else { + if(arg.substr(0, 1) === "-") { + result[decodeURIComponent(arg.substr(1))] = false; + } else if(arg.substr(0, 1) === "+") { + result[decodeURIComponent(arg.substr(1))] = true; + } else { + result[decodeURIComponent(arg)] = true; + } + } + }); + return result; +}; + +exports.getLoaderConfig = function(loaderContext, defaultConfigKey) { + var query = exports.parseQuery(loaderContext.query); + var configKey = query.config || defaultConfigKey; + if (configKey) { + var config = loaderContext.options[configKey] || {}; + delete query.config; + return assign({}, config, query); + } + + return query; +}; + +exports.stringifyRequest = function(loaderContext, request) { + var splitted = request.split("!"); + var context = loaderContext.context || (loaderContext.options && loaderContext.options.context); + return JSON.stringify(splitted.map(function(part) { + if(/^\/|^[A-Z]:/i.test(part) && context) { + part = path.relative(context, part); + if(/^[A-Z]:/i.test(part)) { + return part; + } else { + return "./" + part.replace(/\\/g, "/"); + } + } + return part; + }).join("!")); +}; + +function dotRequest(obj) { + return obj.request; +} + +exports.getRemainingRequest = function(loaderContext) { + if(loaderContext.remainingRequest) + return loaderContext.remainingRequest; + var request = loaderContext.loaders.slice(loaderContext.loaderIndex+1).map(dotRequest).concat([loaderContext.resource]); + return request.join("!"); +}; + +exports.getCurrentRequest = function(loaderContext) { + if(loaderContext.currentRequest) + return loaderContext.currentRequest; + var request = loaderContext.loaders.slice(loaderContext.loaderIndex).map(dotRequest).concat([loaderContext.resource]); + return request.join("!"); +}; + +exports.isUrlRequest = function(url, root) { + // An URL is not an request if + // 1. it's a Data Url + // 2. it's an absolute url or and protocol-relative + // 3. it's some kind of url for a template + if(/^data:|^chrome-extension:|^(https?:)?\/\/|^[\{\}\[\]#*;,'§\$%&\(=?`´\^°<>]/.test(url)) return false; + // 4. It's also not an request if root isn't set and it's a root-relative url + if((root === undefined || root === false) && /^\//.test(url)) return false; + return true; +}; + +exports.urlToRequest = function(url, root) { + var moduleRequestRegex = /^[^?]*~/; + var request; + + if(/^[a-zA-Z]:\\|^\\\\/.test(url)) { + // absolute windows path, keep it + request = url; + } else if(root !== undefined && root !== false && /^\//.test(url)) { + // if root is set and the url is root-relative + switch(typeof root) { + // 1. root is a string: root is prefixed to the url + case "string": + // special case: `~` roots convert to module request + if (moduleRequestRegex.test(root)) { + request = root.replace(/([^~\/])$/, "$1/") + url.slice(1); + } else { + request = root + url; + } + break; + // 2. root is `true`: absolute paths are allowed + // *nix only, windows-style absolute paths are always allowed as they doesn't start with a `/` + case "boolean": + request = url; + break; + default: + throw new Error("Unexpected parameters to loader-utils 'urlToRequest': url = " + url + ", root = " + root + "."); + } + } else if(/^\.\.?\//.test(url)) { + // A relative url stays + request = url; + } else { + // every other url is threaded like a relative url + request = "./" + url; + } + + // A `~` makes the url an module + if (moduleRequestRegex.test(request)) { + request = request.replace(moduleRequestRegex, ""); + } + + return request; +}; + +exports.parseString = function parseString(str) { + try { + if(str[0] === '"') return JSON.parse(str); + if(str[0] === "'" && str.substr(str.length - 1) === "'") { + return parseString(str.replace(/\\.|"/g, function(x) { + if(x === '"') return '\\"'; + return x; + }).replace(/^'|'$/g, '"')); + } + return JSON.parse('"' + str + '"'); + } catch(e) { + return str; + } +}; + +exports.getHashDigest = function getHashDigest(buffer, hashType, digestType, maxLength) { + hashType = hashType || "md5"; + maxLength = maxLength || 9999; + var hash = require("crypto").createHash(hashType); + hash.update(buffer); + if (digestType === "base26" || digestType === "base32" || digestType === "base36" || + digestType === "base49" || digestType === "base52" || digestType === "base58" || + digestType === "base62" || digestType === "base64") { + return encodeBufferToBase(hash.digest(), digestType.substr(4)).substr(0, maxLength); + } else { + return hash.digest(digestType || "hex").substr(0, maxLength); + } +}; + +exports.interpolateName = function interpolateName(loaderContext, name, options) { + var filename = name || "[hash].[ext]"; + var context = options.context; + var content = options.content; + var regExp = options.regExp; + var ext = "bin"; + var basename = "file"; + var directory = ""; + var folder = ""; + if(loaderContext.resourcePath) { + var resourcePath = loaderContext.resourcePath; + var idx = resourcePath.lastIndexOf("."); + var i = resourcePath.lastIndexOf("\\"); + var j = resourcePath.lastIndexOf("/"); + var p = i < 0 ? j : j < 0 ? i : i < j ? i : j; + if(idx >= 0) { + ext = resourcePath.substr(idx+1); + resourcePath = resourcePath.substr(0, idx); + } + if(p >= 0) { + basename = resourcePath.substr(p+1); + resourcePath = resourcePath.substr(0, p+1); + } + if (typeof context !== 'undefined') { + directory = path.relative(context, resourcePath + "_").replace(/\\/g, "/").replace(/\.\.(\/)?/g, "_$1"); + directory = directory.substr(0, directory.length-1); + } + else { + directory = resourcePath.replace(/\\/g, "/").replace(/\.\.(\/)?/g, "_$1"); + } + if (directory.length === 1) { + directory = ""; + } else if (directory.length > 1) { + folder = path.basename(directory); + } + } + var url = filename; + if(content) { + // Match hash template + url = url.replace(/\[(?:(\w+):)?hash(?::([a-z]+\d*))?(?::(\d+))?\]/ig, function() { + return exports.getHashDigest(content, arguments[1], arguments[2], parseInt(arguments[3], 10)); + }).replace(/\[emoji(?::(\d+))?\]/ig, function() { + return encodeStringToEmoji(content, arguments[1]); + }); + } + url = url.replace(/\[ext\]/ig, function() { + return ext; + }).replace(/\[name\]/ig, function() { + return basename; + }).replace(/\[path\]/ig, function() { + return directory; + }).replace(/\[folder\]/ig, function() { + return folder; + }); + if(regExp && loaderContext.resourcePath) { + var re = new RegExp(regExp); + var match = loaderContext.resourcePath.match(re); + if(match) { + for (var i = 0; i < match.length; i++) { + var re = new RegExp("\\[" + i + "\\]", "ig"); + url = url.replace(re, match[i]); + } + } + } + if(typeof loaderContext.options === "object" && typeof loaderContext.options.customInterpolateName === "function") { + url = loaderContext.options.customInterpolateName.call(loaderContext, url, name, options); + } + return url; +}; diff --git a/node_modules/loader-utils/node_modules/.bin/json5 b/node_modules/loader-utils/node_modules/.bin/json5 new file mode 120000 index 0000000..519e49e --- /dev/null +++ b/node_modules/loader-utils/node_modules/.bin/json5 @@ -0,0 +1 @@ +../../../json5/lib/cli.js \ No newline at end of file diff --git a/node_modules/loader-utils/package.json b/node_modules/loader-utils/package.json new file mode 100644 index 0000000..0229d44 --- /dev/null +++ b/node_modules/loader-utils/package.json @@ -0,0 +1,31 @@ +{ + "name": "loader-utils", + "version": "0.2.16", + "author": "Tobias Koppers @sokra", + "description": "utils for webpack loaders", + "dependencies": { + "big.js": "^3.1.3", + "emojis-list": "^2.0.0", + "json5": "^0.5.0", + "object-assign": "^4.0.1" + }, + "scripts": { + "test": "mocha", + "travis": "npm run cover -- --report lcovonly", + "cover": "istanbul cover -x *.runtime.js node_modules/mocha/bin/_mocha", + "publish-patch": "mocha && npm version patch && git push && git push --tags && npm publish" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/webpack/loader-utils.git" + }, + "devDependencies": { + "coveralls": "^2.11.2", + "istanbul": "^0.3.14", + "mocha": "^1.21.4" + }, + "files": [ + "index.js" + ] +} diff --git a/node_modules/lodash-node/LICENSE.txt b/node_modules/lodash-node/LICENSE.txt new file mode 100644 index 0000000..49869bb --- /dev/null +++ b/node_modules/lodash-node/LICENSE.txt @@ -0,0 +1,22 @@ +Copyright 2012-2013 The Dojo Foundation +Based on Underscore.js 1.5.2, copyright 2009-2013 Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/lodash-node/README.md b/node_modules/lodash-node/README.md new file mode 100644 index 0000000..3bc410e --- /dev/null +++ b/node_modules/lodash-node/README.md @@ -0,0 +1,44 @@ +# lodash-node v2.4.1 + +A collection of [Lo-Dash](http://lodash.com/) methods as [Node.js](http://nodejs.org/) modules generated by [lodash-cli](https://npmjs.org/package/lodash-cli). + +## Installation & usage + +Using [`npm`](http://npmjs.org/): + +```bash +npm i --save lodash-node + +{sudo} npm i -g lodash-node +npm ln lodash-node +``` + +In Node.js: + +```js +var _ = require('lodash-node'); + +// or as Underscore +var _ = require('lodash-node/underscore'); + +// or by method category +var collections = require('lodash-node/modern/collections'); + +// or individual methods +var isEqual = require('lodash-node/modern/objects/isEqual'); +var findWhere = require('lodash-node/underscore/collections/findWhere'); +``` + +## Author + +| [![twitter/jdalton](http://gravatar.com/avatar/299a3d891ff1920b69c364d061007043?s=70)](https://twitter.com/jdalton "Follow @jdalton on Twitter") | +|---| +| [John-David Dalton](http://allyoucanleet.com/) | + +## Contributors + +| [![twitter/blainebublitz](http://gravatar.com/avatar/ac1c67fd906c9fecd823ce302283b4c1?s=70)](https://twitter.com/blainebublitz "Follow @BlaineBublitz on Twitter") | [![twitter/kitcambridge](http://gravatar.com/avatar/6662a1d02f351b5ef2f8b4d815804661?s=70)](https://twitter.com/kitcambridge "Follow @kitcambridge on Twitter") | [![twitter/mathias](http://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---|---|---| +| [Blaine Bublitz](http://www.iceddev.com/) | [Kit Cambridge](http://kitcambridge.be/) | [Mathias Bynens](http://mathiasbynens.be/) | + +[![Bitdeli Badge](https://d2weczhvl823v0.cloudfront.net/lodash/lodash-node/trend.png)](https://bitdeli.com/free "Bitdeli Badge") diff --git a/node_modules/lodash-node/compat/arrays.js b/node_modules/lodash-node/compat/arrays.js new file mode 100644 index 0000000..86f6dcf --- /dev/null +++ b/node_modules/lodash-node/compat/arrays.js @@ -0,0 +1,40 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +module.exports = { + 'compact': require('./arrays/compact'), + 'difference': require('./arrays/difference'), + 'drop': require('./arrays/rest'), + 'findIndex': require('./arrays/findIndex'), + 'findLastIndex': require('./arrays/findLastIndex'), + 'first': require('./arrays/first'), + 'flatten': require('./arrays/flatten'), + 'head': require('./arrays/first'), + 'indexOf': require('./arrays/indexOf'), + 'initial': require('./arrays/initial'), + 'intersection': require('./arrays/intersection'), + 'last': require('./arrays/last'), + 'lastIndexOf': require('./arrays/lastIndexOf'), + 'object': require('./arrays/zipObject'), + 'pull': require('./arrays/pull'), + 'range': require('./arrays/range'), + 'remove': require('./arrays/remove'), + 'rest': require('./arrays/rest'), + 'sortedIndex': require('./arrays/sortedIndex'), + 'tail': require('./arrays/rest'), + 'take': require('./arrays/first'), + 'union': require('./arrays/union'), + 'uniq': require('./arrays/uniq'), + 'unique': require('./arrays/uniq'), + 'unzip': require('./arrays/zip'), + 'without': require('./arrays/without'), + 'xor': require('./arrays/xor'), + 'zip': require('./arrays/zip'), + 'zipObject': require('./arrays/zipObject') +}; diff --git a/node_modules/lodash-node/compat/arrays/compact.js b/node_modules/lodash-node/compat/arrays/compact.js new file mode 100644 index 0000000..f52dc22 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/compact.js @@ -0,0 +1,38 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are all falsey. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to compact. + * @returns {Array} Returns a new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ +function compact(array) { + var index = -1, + length = array ? array.length : 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value) { + result.push(value); + } + } + return result; +} + +module.exports = compact; diff --git a/node_modules/lodash-node/compat/arrays/difference.js b/node_modules/lodash-node/compat/arrays/difference.js new file mode 100644 index 0000000..f646752 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/difference.js @@ -0,0 +1,31 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseDifference = require('../internals/baseDifference'), + baseFlatten = require('../internals/baseFlatten'); + +/** + * Creates an array excluding all values of the provided arrays using strict + * equality for comparisons, i.e. `===`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to process. + * @param {...Array} [values] The arrays of values to exclude. + * @returns {Array} Returns a new array of filtered values. + * @example + * + * _.difference([1, 2, 3, 4, 5], [5, 2, 10]); + * // => [1, 3, 4] + */ +function difference(array) { + return baseDifference(array, baseFlatten(arguments, true, true, 1)); +} + +module.exports = difference; diff --git a/node_modules/lodash-node/compat/arrays/findIndex.js b/node_modules/lodash-node/compat/arrays/findIndex.js new file mode 100644 index 0000000..cabf7c0 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/findIndex.js @@ -0,0 +1,65 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'); + +/** + * This method is like `_.find` except that it returns the index of the first + * element that passes the callback check, instead of the element itself. + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to search. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'blocked': false }, + * { 'name': 'fred', 'age': 40, 'blocked': true }, + * { 'name': 'pebbles', 'age': 1, 'blocked': false } + * ]; + * + * _.findIndex(characters, function(chr) { + * return chr.age < 20; + * }); + * // => 2 + * + * // using "_.where" callback shorthand + * _.findIndex(characters, { 'age': 36 }); + * // => 0 + * + * // using "_.pluck" callback shorthand + * _.findIndex(characters, 'blocked'); + * // => 1 + */ +function findIndex(array, callback, thisArg) { + var index = -1, + length = array ? array.length : 0; + + callback = createCallback(callback, thisArg, 3); + while (++index < length) { + if (callback(array[index], index, array)) { + return index; + } + } + return -1; +} + +module.exports = findIndex; diff --git a/node_modules/lodash-node/compat/arrays/findLastIndex.js b/node_modules/lodash-node/compat/arrays/findLastIndex.js new file mode 100644 index 0000000..2a74e31 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/findLastIndex.js @@ -0,0 +1,63 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'); + +/** + * This method is like `_.findIndex` except that it iterates over elements + * of a `collection` from right to left. + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to search. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'blocked': true }, + * { 'name': 'fred', 'age': 40, 'blocked': false }, + * { 'name': 'pebbles', 'age': 1, 'blocked': true } + * ]; + * + * _.findLastIndex(characters, function(chr) { + * return chr.age > 30; + * }); + * // => 1 + * + * // using "_.where" callback shorthand + * _.findLastIndex(characters, { 'age': 36 }); + * // => 0 + * + * // using "_.pluck" callback shorthand + * _.findLastIndex(characters, 'blocked'); + * // => 2 + */ +function findLastIndex(array, callback, thisArg) { + var length = array ? array.length : 0; + callback = createCallback(callback, thisArg, 3); + while (length--) { + if (callback(array[length], length, array)) { + return length; + } + } + return -1; +} + +module.exports = findLastIndex; diff --git a/node_modules/lodash-node/compat/arrays/first.js b/node_modules/lodash-node/compat/arrays/first.js new file mode 100644 index 0000000..c4b1c38 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/first.js @@ -0,0 +1,86 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + slice = require('../internals/slice'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * Gets the first element or first `n` elements of an array. If a callback + * is provided elements at the beginning of the array are returned as long + * as the callback returns truey. The callback is bound to `thisArg` and + * invoked with three arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias head, take + * @category Arrays + * @param {Array} array The array to query. + * @param {Function|Object|number|string} [callback] The function called + * per element or the number of elements to return. If a property name or + * object is provided it will be used to create a "_.pluck" or "_.where" + * style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the first element(s) of `array`. + * @example + * + * _.first([1, 2, 3]); + * // => 1 + * + * _.first([1, 2, 3], 2); + * // => [1, 2] + * + * _.first([1, 2, 3], function(num) { + * return num < 3; + * }); + * // => [1, 2] + * + * var characters = [ + * { 'name': 'barney', 'blocked': true, 'employer': 'slate' }, + * { 'name': 'fred', 'blocked': false, 'employer': 'slate' }, + * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } + * ]; + * + * // using "_.pluck" callback shorthand + * _.first(characters, 'blocked'); + * // => [{ 'name': 'barney', 'blocked': true, 'employer': 'slate' }] + * + * // using "_.where" callback shorthand + * _.pluck(_.first(characters, { 'employer': 'slate' }), 'name'); + * // => ['barney', 'fred'] + */ +function first(array, callback, thisArg) { + var n = 0, + length = array ? array.length : 0; + + if (typeof callback != 'number' && callback != null) { + var index = -1; + callback = createCallback(callback, thisArg, 3); + while (++index < length && callback(array[index], index, array)) { + n++; + } + } else { + n = callback; + if (n == null || thisArg) { + return array ? array[0] : undefined; + } + } + return slice(array, 0, nativeMin(nativeMax(0, n), length)); +} + +module.exports = first; diff --git a/node_modules/lodash-node/compat/arrays/flatten.js b/node_modules/lodash-node/compat/arrays/flatten.js new file mode 100644 index 0000000..54dc494 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/flatten.js @@ -0,0 +1,66 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseFlatten = require('../internals/baseFlatten'), + map = require('../collections/map'); + +/** + * Flattens a nested array (the nesting can be to any depth). If `isShallow` + * is truey, the array will only be flattened a single level. If a callback + * is provided each element of the array is passed through the callback before + * flattening. The callback is bound to `thisArg` and invoked with three + * arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to flatten. + * @param {boolean} [isShallow=false] A flag to restrict flattening to a single level. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new flattened array. + * @example + * + * _.flatten([1, [2], [3, [[4]]]]); + * // => [1, 2, 3, 4]; + * + * _.flatten([1, [2], [3, [[4]]]], true); + * // => [1, 2, 3, [[4]]]; + * + * var characters = [ + * { 'name': 'barney', 'age': 30, 'pets': ['hoppy'] }, + * { 'name': 'fred', 'age': 40, 'pets': ['baby puss', 'dino'] } + * ]; + * + * // using "_.pluck" callback shorthand + * _.flatten(characters, 'pets'); + * // => ['hoppy', 'baby puss', 'dino'] + */ +function flatten(array, isShallow, callback, thisArg) { + // juggle arguments + if (typeof isShallow != 'boolean' && isShallow != null) { + thisArg = callback; + callback = (typeof isShallow != 'function' && thisArg && thisArg[isShallow] === array) ? null : isShallow; + isShallow = false; + } + if (callback != null) { + array = map(array, callback, thisArg); + } + return baseFlatten(array, isShallow); +} + +module.exports = flatten; diff --git a/node_modules/lodash-node/compat/arrays/indexOf.js b/node_modules/lodash-node/compat/arrays/indexOf.js new file mode 100644 index 0000000..19238b5 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/indexOf.js @@ -0,0 +1,50 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseIndexOf = require('../internals/baseIndexOf'), + sortedIndex = require('./sortedIndex'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max; + +/** + * Gets the index at which the first occurrence of `value` is found using + * strict equality for comparisons, i.e. `===`. If the array is already sorted + * providing `true` for `fromIndex` will run a faster binary search. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to search. + * @param {*} value The value to search for. + * @param {boolean|number} [fromIndex=0] The index to search from or `true` + * to perform a binary search on a sorted array. + * @returns {number} Returns the index of the matched value or `-1`. + * @example + * + * _.indexOf([1, 2, 3, 1, 2, 3], 2); + * // => 1 + * + * _.indexOf([1, 2, 3, 1, 2, 3], 2, 3); + * // => 4 + * + * _.indexOf([1, 1, 2, 2, 3, 3], 2, true); + * // => 2 + */ +function indexOf(array, value, fromIndex) { + if (typeof fromIndex == 'number') { + var length = array ? array.length : 0; + fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex || 0); + } else if (fromIndex) { + var index = sortedIndex(array, value); + return array[index] === value ? index : -1; + } + return baseIndexOf(array, value, fromIndex); +} + +module.exports = indexOf; diff --git a/node_modules/lodash-node/compat/arrays/initial.js b/node_modules/lodash-node/compat/arrays/initial.js new file mode 100644 index 0000000..77ea68b --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/initial.js @@ -0,0 +1,82 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + slice = require('../internals/slice'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * Gets all but the last element or last `n` elements of an array. If a + * callback is provided elements at the end of the array are excluded from + * the result as long as the callback returns truey. The callback is bound + * to `thisArg` and invoked with three arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to query. + * @param {Function|Object|number|string} [callback=1] The function called + * per element or the number of elements to exclude. If a property name or + * object is provided it will be used to create a "_.pluck" or "_.where" + * style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a slice of `array`. + * @example + * + * _.initial([1, 2, 3]); + * // => [1, 2] + * + * _.initial([1, 2, 3], 2); + * // => [1] + * + * _.initial([1, 2, 3], function(num) { + * return num > 1; + * }); + * // => [1] + * + * var characters = [ + * { 'name': 'barney', 'blocked': false, 'employer': 'slate' }, + * { 'name': 'fred', 'blocked': true, 'employer': 'slate' }, + * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } + * ]; + * + * // using "_.pluck" callback shorthand + * _.initial(characters, 'blocked'); + * // => [{ 'name': 'barney', 'blocked': false, 'employer': 'slate' }] + * + * // using "_.where" callback shorthand + * _.pluck(_.initial(characters, { 'employer': 'na' }), 'name'); + * // => ['barney', 'fred'] + */ +function initial(array, callback, thisArg) { + var n = 0, + length = array ? array.length : 0; + + if (typeof callback != 'number' && callback != null) { + var index = length; + callback = createCallback(callback, thisArg, 3); + while (index-- && callback(array[index], index, array)) { + n++; + } + } else { + n = (callback == null || thisArg) ? 1 : callback || n; + } + return slice(array, 0, nativeMin(nativeMax(0, length - n), length)); +} + +module.exports = initial; diff --git a/node_modules/lodash-node/compat/arrays/intersection.js b/node_modules/lodash-node/compat/arrays/intersection.js new file mode 100644 index 0000000..fd7bf65 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/intersection.js @@ -0,0 +1,83 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseIndexOf = require('../internals/baseIndexOf'), + cacheIndexOf = require('../internals/cacheIndexOf'), + createCache = require('../internals/createCache'), + getArray = require('../internals/getArray'), + isArguments = require('../objects/isArguments'), + isArray = require('../objects/isArray'), + largeArraySize = require('../internals/largeArraySize'), + releaseArray = require('../internals/releaseArray'), + releaseObject = require('../internals/releaseObject'); + +/** + * Creates an array of unique values present in all provided arrays using + * strict equality for comparisons, i.e. `===`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {...Array} [array] The arrays to inspect. + * @returns {Array} Returns an array of shared values. + * @example + * + * _.intersection([1, 2, 3], [5, 2, 1, 4], [2, 1]); + * // => [1, 2] + */ +function intersection() { + var args = [], + argsIndex = -1, + argsLength = arguments.length, + caches = getArray(), + indexOf = baseIndexOf, + trustIndexOf = indexOf === baseIndexOf, + seen = getArray(); + + while (++argsIndex < argsLength) { + var value = arguments[argsIndex]; + if (isArray(value) || isArguments(value)) { + args.push(value); + caches.push(trustIndexOf && value.length >= largeArraySize && + createCache(argsIndex ? args[argsIndex] : seen)); + } + } + var array = args[0], + index = -1, + length = array ? array.length : 0, + result = []; + + outer: + while (++index < length) { + var cache = caches[0]; + value = array[index]; + + if ((cache ? cacheIndexOf(cache, value) : indexOf(seen, value)) < 0) { + argsIndex = argsLength; + (cache || seen).push(value); + while (--argsIndex) { + cache = caches[argsIndex]; + if ((cache ? cacheIndexOf(cache, value) : indexOf(args[argsIndex], value)) < 0) { + continue outer; + } + } + result.push(value); + } + } + while (argsLength--) { + cache = caches[argsLength]; + if (cache) { + releaseObject(cache); + } + } + releaseArray(caches); + releaseArray(seen); + return result; +} + +module.exports = intersection; diff --git a/node_modules/lodash-node/compat/arrays/last.js b/node_modules/lodash-node/compat/arrays/last.js new file mode 100644 index 0000000..b54f370 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/last.js @@ -0,0 +1,84 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + slice = require('../internals/slice'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max; + +/** + * Gets the last element or last `n` elements of an array. If a callback is + * provided elements at the end of the array are returned as long as the + * callback returns truey. The callback is bound to `thisArg` and invoked + * with three arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to query. + * @param {Function|Object|number|string} [callback] The function called + * per element or the number of elements to return. If a property name or + * object is provided it will be used to create a "_.pluck" or "_.where" + * style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the last element(s) of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + * + * _.last([1, 2, 3], 2); + * // => [2, 3] + * + * _.last([1, 2, 3], function(num) { + * return num > 1; + * }); + * // => [2, 3] + * + * var characters = [ + * { 'name': 'barney', 'blocked': false, 'employer': 'slate' }, + * { 'name': 'fred', 'blocked': true, 'employer': 'slate' }, + * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } + * ]; + * + * // using "_.pluck" callback shorthand + * _.pluck(_.last(characters, 'blocked'), 'name'); + * // => ['fred', 'pebbles'] + * + * // using "_.where" callback shorthand + * _.last(characters, { 'employer': 'na' }); + * // => [{ 'name': 'pebbles', 'blocked': true, 'employer': 'na' }] + */ +function last(array, callback, thisArg) { + var n = 0, + length = array ? array.length : 0; + + if (typeof callback != 'number' && callback != null) { + var index = length; + callback = createCallback(callback, thisArg, 3); + while (index-- && callback(array[index], index, array)) { + n++; + } + } else { + n = callback; + if (n == null || thisArg) { + return array ? array[length - 1] : undefined; + } + } + return slice(array, nativeMax(0, length - n)); +} + +module.exports = last; diff --git a/node_modules/lodash-node/compat/arrays/lastIndexOf.js b/node_modules/lodash-node/compat/arrays/lastIndexOf.js new file mode 100644 index 0000000..c7b8817 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/lastIndexOf.js @@ -0,0 +1,54 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * Gets the index at which the last occurrence of `value` is found using strict + * equality for comparisons, i.e. `===`. If `fromIndex` is negative, it is used + * as the offset from the end of the collection. + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to search. + * @param {*} value The value to search for. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the matched value or `-1`. + * @example + * + * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2); + * // => 4 + * + * _.lastIndexOf([1, 2, 3, 1, 2, 3], 2, 3); + * // => 1 + */ +function lastIndexOf(array, value, fromIndex) { + var index = array ? array.length : 0; + if (typeof fromIndex == 'number') { + index = (fromIndex < 0 ? nativeMax(0, index + fromIndex) : nativeMin(fromIndex, index - 1)) + 1; + } + while (index--) { + if (array[index] === value) { + return index; + } + } + return -1; +} + +module.exports = lastIndexOf; diff --git a/node_modules/lodash-node/compat/arrays/pull.js b/node_modules/lodash-node/compat/arrays/pull.js new file mode 100644 index 0000000..d7f60d0 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/pull.js @@ -0,0 +1,57 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Native method shortcuts */ +var splice = arrayRef.splice; + +/** + * Removes all provided values from the given array using strict equality for + * comparisons, i.e. `===`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to modify. + * @param {...*} [value] The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3, 1, 2, 3]; + * _.pull(array, 2, 3); + * console.log(array); + * // => [1, 1] + */ +function pull(array) { + var args = arguments, + argsIndex = 0, + argsLength = args.length, + length = array ? array.length : 0; + + while (++argsIndex < argsLength) { + var index = -1, + value = args[argsIndex]; + while (++index < length) { + if (array[index] === value) { + splice.call(array, index--, 1); + length--; + } + } + } + return array; +} + +module.exports = pull; diff --git a/node_modules/lodash-node/compat/arrays/range.js b/node_modules/lodash-node/compat/arrays/range.js new file mode 100644 index 0000000..5ea7f49 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/range.js @@ -0,0 +1,69 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Native method shortcuts */ +var ceil = Math.ceil; + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max; + +/** + * Creates an array of numbers (positive and/or negative) progressing from + * `start` up to but not including `end`. If `start` is less than `stop` a + * zero-length range is created unless a negative `step` is specified. + * + * @static + * @memberOf _ + * @category Arrays + * @param {number} [start=0] The start of the range. + * @param {number} end The end of the range. + * @param {number} [step=1] The value to increment or decrement by. + * @returns {Array} Returns a new range array. + * @example + * + * _.range(4); + * // => [0, 1, 2, 3] + * + * _.range(1, 5); + * // => [1, 2, 3, 4] + * + * _.range(0, 20, 5); + * // => [0, 5, 10, 15] + * + * _.range(0, -4, -1); + * // => [0, -1, -2, -3] + * + * _.range(1, 4, 0); + * // => [1, 1, 1] + * + * _.range(0); + * // => [] + */ +function range(start, end, step) { + start = +start || 0; + step = typeof step == 'number' ? step : (+step || 1); + + if (end == null) { + end = start; + start = 0; + } + // use `Array(length)` so engines like Chakra and V8 avoid slower modes + // http://youtu.be/XAqIpGU8ZZk#t=17m25s + var index = -1, + length = nativeMax(0, ceil((end - start) / (step || 1))), + result = Array(length); + + while (++index < length) { + result[index] = start; + start += step; + } + return result; +} + +module.exports = range; diff --git a/node_modules/lodash-node/compat/arrays/remove.js b/node_modules/lodash-node/compat/arrays/remove.js new file mode 100644 index 0000000..7c0f8f0 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/remove.js @@ -0,0 +1,71 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'); + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Native method shortcuts */ +var splice = arrayRef.splice; + +/** + * Removes all elements from an array that the callback returns truey for + * and returns an array of removed elements. The callback is bound to `thisArg` + * and invoked with three arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to modify. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new array of removed elements. + * @example + * + * var array = [1, 2, 3, 4, 5, 6]; + * var evens = _.remove(array, function(num) { return num % 2 == 0; }); + * + * console.log(array); + * // => [1, 3, 5] + * + * console.log(evens); + * // => [2, 4, 6] + */ +function remove(array, callback, thisArg) { + var index = -1, + length = array ? array.length : 0, + result = []; + + callback = createCallback(callback, thisArg, 3); + while (++index < length) { + var value = array[index]; + if (callback(value, index, array)) { + result.push(value); + splice.call(array, index--, 1); + length--; + } + } + return result; +} + +module.exports = remove; diff --git a/node_modules/lodash-node/compat/arrays/rest.js b/node_modules/lodash-node/compat/arrays/rest.js new file mode 100644 index 0000000..d778fb6 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/rest.js @@ -0,0 +1,83 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + slice = require('../internals/slice'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max; + +/** + * The opposite of `_.initial` this method gets all but the first element or + * first `n` elements of an array. If a callback function is provided elements + * at the beginning of the array are excluded from the result as long as the + * callback returns truey. The callback is bound to `thisArg` and invoked + * with three arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias drop, tail + * @category Arrays + * @param {Array} array The array to query. + * @param {Function|Object|number|string} [callback=1] The function called + * per element or the number of elements to exclude. If a property name or + * object is provided it will be used to create a "_.pluck" or "_.where" + * style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a slice of `array`. + * @example + * + * _.rest([1, 2, 3]); + * // => [2, 3] + * + * _.rest([1, 2, 3], 2); + * // => [3] + * + * _.rest([1, 2, 3], function(num) { + * return num < 3; + * }); + * // => [3] + * + * var characters = [ + * { 'name': 'barney', 'blocked': true, 'employer': 'slate' }, + * { 'name': 'fred', 'blocked': false, 'employer': 'slate' }, + * { 'name': 'pebbles', 'blocked': true, 'employer': 'na' } + * ]; + * + * // using "_.pluck" callback shorthand + * _.pluck(_.rest(characters, 'blocked'), 'name'); + * // => ['fred', 'pebbles'] + * + * // using "_.where" callback shorthand + * _.rest(characters, { 'employer': 'slate' }); + * // => [{ 'name': 'pebbles', 'blocked': true, 'employer': 'na' }] + */ +function rest(array, callback, thisArg) { + if (typeof callback != 'number' && callback != null) { + var n = 0, + index = -1, + length = array ? array.length : 0; + + callback = createCallback(callback, thisArg, 3); + while (++index < length && callback(array[index], index, array)) { + n++; + } + } else { + n = (callback == null || thisArg) ? 1 : nativeMax(0, callback); + } + return slice(array, n); +} + +module.exports = rest; diff --git a/node_modules/lodash-node/compat/arrays/sortedIndex.js b/node_modules/lodash-node/compat/arrays/sortedIndex.js new file mode 100644 index 0000000..1bcb5b5 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/sortedIndex.js @@ -0,0 +1,77 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + identity = require('../utilities/identity'); + +/** + * Uses a binary search to determine the smallest index at which a value + * should be inserted into a given sorted array in order to maintain the sort + * order of the array. If a callback is provided it will be executed for + * `value` and each element of `array` to compute their sort ranking. The + * callback is bound to `thisArg` and invoked with one argument; (value). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to inspect. + * @param {*} value The value to evaluate. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedIndex([20, 30, 50], 40); + * // => 2 + * + * // using "_.pluck" callback shorthand + * _.sortedIndex([{ 'x': 20 }, { 'x': 30 }, { 'x': 50 }], { 'x': 40 }, 'x'); + * // => 2 + * + * var dict = { + * 'wordToNumber': { 'twenty': 20, 'thirty': 30, 'fourty': 40, 'fifty': 50 } + * }; + * + * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) { + * return dict.wordToNumber[word]; + * }); + * // => 2 + * + * _.sortedIndex(['twenty', 'thirty', 'fifty'], 'fourty', function(word) { + * return this.wordToNumber[word]; + * }, dict); + * // => 2 + */ +function sortedIndex(array, value, callback, thisArg) { + var low = 0, + high = array ? array.length : low; + + // explicitly reference `identity` for better inlining in Firefox + callback = callback ? createCallback(callback, thisArg, 1) : identity; + value = callback(value); + + while (low < high) { + var mid = (low + high) >>> 1; + (callback(array[mid]) < value) + ? low = mid + 1 + : high = mid; + } + return low; +} + +module.exports = sortedIndex; diff --git a/node_modules/lodash-node/compat/arrays/union.js b/node_modules/lodash-node/compat/arrays/union.js new file mode 100644 index 0000000..854f7b1 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/union.js @@ -0,0 +1,30 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseFlatten = require('../internals/baseFlatten'), + baseUniq = require('../internals/baseUniq'); + +/** + * Creates an array of unique values, in order, of the provided arrays using + * strict equality for comparisons, i.e. `===`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {...Array} [array] The arrays to inspect. + * @returns {Array} Returns an array of combined values. + * @example + * + * _.union([1, 2, 3], [5, 2, 1, 4], [2, 1]); + * // => [1, 2, 3, 5, 4] + */ +function union() { + return baseUniq(baseFlatten(arguments, true, true)); +} + +module.exports = union; diff --git a/node_modules/lodash-node/compat/arrays/uniq.js b/node_modules/lodash-node/compat/arrays/uniq.js new file mode 100644 index 0000000..50df292 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/uniq.js @@ -0,0 +1,69 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseUniq = require('../internals/baseUniq'), + createCallback = require('../functions/createCallback'); + +/** + * Creates a duplicate-value-free version of an array using strict equality + * for comparisons, i.e. `===`. If the array is sorted, providing + * `true` for `isSorted` will use a faster algorithm. If a callback is provided + * each element of `array` is passed through the callback before uniqueness + * is computed. The callback is bound to `thisArg` and invoked with three + * arguments; (value, index, array). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias unique + * @category Arrays + * @param {Array} array The array to process. + * @param {boolean} [isSorted=false] A flag to indicate that `array` is sorted. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a duplicate-value-free array. + * @example + * + * _.uniq([1, 2, 1, 3, 1]); + * // => [1, 2, 3] + * + * _.uniq([1, 1, 2, 2, 3], true); + * // => [1, 2, 3] + * + * _.uniq(['A', 'b', 'C', 'a', 'B', 'c'], function(letter) { return letter.toLowerCase(); }); + * // => ['A', 'b', 'C'] + * + * _.uniq([1, 2.5, 3, 1.5, 2, 3.5], function(num) { return this.floor(num); }, Math); + * // => [1, 2.5, 3] + * + * // using "_.pluck" callback shorthand + * _.uniq([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ +function uniq(array, isSorted, callback, thisArg) { + // juggle arguments + if (typeof isSorted != 'boolean' && isSorted != null) { + thisArg = callback; + callback = (typeof isSorted != 'function' && thisArg && thisArg[isSorted] === array) ? null : isSorted; + isSorted = false; + } + if (callback != null) { + callback = createCallback(callback, thisArg, 3); + } + return baseUniq(array, isSorted, callback); +} + +module.exports = uniq; diff --git a/node_modules/lodash-node/compat/arrays/without.js b/node_modules/lodash-node/compat/arrays/without.js new file mode 100644 index 0000000..5fda56c --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/without.js @@ -0,0 +1,31 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseDifference = require('../internals/baseDifference'), + slice = require('../internals/slice'); + +/** + * Creates an array excluding all provided values using strict equality for + * comparisons, i.e. `===`. + * + * @static + * @memberOf _ + * @category Arrays + * @param {Array} array The array to filter. + * @param {...*} [value] The values to exclude. + * @returns {Array} Returns a new array of filtered values. + * @example + * + * _.without([1, 2, 1, 0, 3, 1, 4], 0, 1); + * // => [2, 3, 4] + */ +function without(array) { + return baseDifference(array, slice(arguments, 1)); +} + +module.exports = without; diff --git a/node_modules/lodash-node/compat/arrays/xor.js b/node_modules/lodash-node/compat/arrays/xor.js new file mode 100644 index 0000000..1a444a1 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/xor.js @@ -0,0 +1,46 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseDifference = require('../internals/baseDifference'), + baseUniq = require('../internals/baseUniq'), + isArguments = require('../objects/isArguments'), + isArray = require('../objects/isArray'); + +/** + * Creates an array that is the symmetric difference of the provided arrays. + * See http://en.wikipedia.org/wiki/Symmetric_difference. + * + * @static + * @memberOf _ + * @category Arrays + * @param {...Array} [array] The arrays to inspect. + * @returns {Array} Returns an array of values. + * @example + * + * _.xor([1, 2, 3], [5, 2, 1, 4]); + * // => [3, 5, 4] + * + * _.xor([1, 2, 5], [2, 3, 5], [3, 4, 5]); + * // => [1, 4, 5] + */ +function xor() { + var index = -1, + length = arguments.length; + + while (++index < length) { + var array = arguments[index]; + if (isArray(array) || isArguments(array)) { + var result = result + ? baseUniq(baseDifference(result, array).concat(baseDifference(array, result))) + : array; + } + } + return result || []; +} + +module.exports = xor; diff --git a/node_modules/lodash-node/compat/arrays/zip.js b/node_modules/lodash-node/compat/arrays/zip.js new file mode 100644 index 0000000..7d3d6e6 --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/zip.js @@ -0,0 +1,40 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var max = require('../collections/max'), + pluck = require('../collections/pluck'); + +/** + * Creates an array of grouped elements, the first of which contains the first + * elements of the given arrays, the second of which contains the second + * elements of the given arrays, and so on. + * + * @static + * @memberOf _ + * @alias unzip + * @category Arrays + * @param {...Array} [array] Arrays to process. + * @returns {Array} Returns a new array of grouped elements. + * @example + * + * _.zip(['fred', 'barney'], [30, 40], [true, false]); + * // => [['fred', 30, true], ['barney', 40, false]] + */ +function zip() { + var array = arguments.length > 1 ? arguments : arguments[0], + index = -1, + length = array ? max(pluck(array, 'length')) : 0, + result = Array(length < 0 ? 0 : length); + + while (++index < length) { + result[index] = pluck(array, index); + } + return result; +} + +module.exports = zip; diff --git a/node_modules/lodash-node/compat/arrays/zipObject.js b/node_modules/lodash-node/compat/arrays/zipObject.js new file mode 100644 index 0000000..6f901ef --- /dev/null +++ b/node_modules/lodash-node/compat/arrays/zipObject.js @@ -0,0 +1,48 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isArray = require('../objects/isArray'); + +/** + * Creates an object composed from arrays of `keys` and `values`. Provide + * either a single two dimensional array, i.e. `[[key1, value1], [key2, value2]]` + * or two arrays, one of `keys` and one of corresponding `values`. + * + * @static + * @memberOf _ + * @alias object + * @category Arrays + * @param {Array} keys The array of keys. + * @param {Array} [values=[]] The array of values. + * @returns {Object} Returns an object composed of the given keys and + * corresponding values. + * @example + * + * _.zipObject(['fred', 'barney'], [30, 40]); + * // => { 'fred': 30, 'barney': 40 } + */ +function zipObject(keys, values) { + var index = -1, + length = keys ? keys.length : 0, + result = {}; + + if (!values && length && !isArray(keys[0])) { + values = []; + } + while (++index < length) { + var key = keys[index]; + if (values) { + result[key] = values[index]; + } else if (key) { + result[key[0]] = key[1]; + } + } + return result; +} + +module.exports = zipObject; diff --git a/node_modules/lodash-node/compat/chaining.js b/node_modules/lodash-node/compat/chaining.js new file mode 100644 index 0000000..6cde453 --- /dev/null +++ b/node_modules/lodash-node/compat/chaining.js @@ -0,0 +1,17 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +module.exports = { + 'chain': require('./chaining/chain'), + 'tap': require('./chaining/tap'), + 'value': require('./chaining/wrapperValueOf'), + 'wrapperChain': require('./chaining/wrapperChain'), + 'wrapperToString': require('./chaining/wrapperToString'), + 'wrapperValueOf': require('./chaining/wrapperValueOf') +}; diff --git a/node_modules/lodash-node/compat/chaining/chain.js b/node_modules/lodash-node/compat/chaining/chain.js new file mode 100644 index 0000000..d613c55 --- /dev/null +++ b/node_modules/lodash-node/compat/chaining/chain.js @@ -0,0 +1,41 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var lodashWrapper = require('../internals/lodashWrapper'); + +/** + * Creates a `lodash` object that wraps the given value with explicit + * method chaining enabled. + * + * @static + * @memberOf _ + * @category Chaining + * @param {*} value The value to wrap. + * @returns {Object} Returns the wrapper object. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 }, + * { 'name': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _.chain(characters) + * .sortBy('age') + * .map(function(chr) { return chr.name + ' is ' + chr.age; }) + * .first() + * .value(); + * // => 'pebbles is 1' + */ +function chain(value) { + value = new lodashWrapper(value); + value.__chain__ = true; + return value; +} + +module.exports = chain; diff --git a/node_modules/lodash-node/compat/chaining/tap.js b/node_modules/lodash-node/compat/chaining/tap.js new file mode 100644 index 0000000..0be66d3 --- /dev/null +++ b/node_modules/lodash-node/compat/chaining/tap.js @@ -0,0 +1,35 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Invokes `interceptor` with the `value` as the first argument and then + * returns `value`. The purpose of this method is to "tap into" a method + * chain in order to perform operations on intermediate results within + * the chain. + * + * @static + * @memberOf _ + * @category Chaining + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns `value`. + * @example + * + * _([1, 2, 3, 4]) + * .tap(function(array) { array.pop(); }) + * .reverse() + * .value(); + * // => [3, 2, 1] + */ +function tap(value, interceptor) { + interceptor(value); + return value; +} + +module.exports = tap; diff --git a/node_modules/lodash-node/compat/chaining/wrapperChain.js b/node_modules/lodash-node/compat/chaining/wrapperChain.js new file mode 100644 index 0000000..e6bf9a8 --- /dev/null +++ b/node_modules/lodash-node/compat/chaining/wrapperChain.js @@ -0,0 +1,40 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Enables explicit method chaining on the wrapper object. + * + * @name chain + * @memberOf _ + * @category Chaining + * @returns {*} Returns the wrapper object. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * // without explicit chaining + * _(characters).first(); + * // => { 'name': 'barney', 'age': 36 } + * + * // with explicit chaining + * _(characters).chain() + * .first() + * .pick('age') + * .value(); + * // => { 'age': 36 } + */ +function wrapperChain() { + this.__chain__ = true; + return this; +} + +module.exports = wrapperChain; diff --git a/node_modules/lodash-node/compat/chaining/wrapperToString.js b/node_modules/lodash-node/compat/chaining/wrapperToString.js new file mode 100644 index 0000000..393d86d --- /dev/null +++ b/node_modules/lodash-node/compat/chaining/wrapperToString.js @@ -0,0 +1,26 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Produces the `toString` result of the wrapped value. + * + * @name toString + * @memberOf _ + * @category Chaining + * @returns {string} Returns the string result. + * @example + * + * _([1, 2, 3]).toString(); + * // => '1,2,3' + */ +function wrapperToString() { + return String(this.__wrapped__); +} + +module.exports = wrapperToString; diff --git a/node_modules/lodash-node/compat/chaining/wrapperValueOf.js b/node_modules/lodash-node/compat/chaining/wrapperValueOf.js new file mode 100644 index 0000000..96bd2f4 --- /dev/null +++ b/node_modules/lodash-node/compat/chaining/wrapperValueOf.js @@ -0,0 +1,28 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var support = require('../support'); + +/** + * Extracts the wrapped value. + * + * @name valueOf + * @memberOf _ + * @alias value + * @category Chaining + * @returns {*} Returns the wrapped value. + * @example + * + * _([1, 2, 3]).valueOf(); + * // => [1, 2, 3] + */ +function wrapperValueOf() { + return this.__wrapped__; +} + +module.exports = wrapperValueOf; diff --git a/node_modules/lodash-node/compat/collections.js b/node_modules/lodash-node/compat/collections.js new file mode 100644 index 0000000..5d2fc99 --- /dev/null +++ b/node_modules/lodash-node/compat/collections.js @@ -0,0 +1,49 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +module.exports = { + 'all': require('./collections/every'), + 'any': require('./collections/some'), + 'at': require('./collections/at'), + 'collect': require('./collections/map'), + 'contains': require('./collections/contains'), + 'countBy': require('./collections/countBy'), + 'detect': require('./collections/find'), + 'each': require('./collections/forEach'), + 'eachRight': require('./collections/forEachRight'), + 'every': require('./collections/every'), + 'filter': require('./collections/filter'), + 'find': require('./collections/find'), + 'findLast': require('./collections/findLast'), + 'findWhere': require('./collections/find'), + 'foldl': require('./collections/reduce'), + 'foldr': require('./collections/reduceRight'), + 'forEach': require('./collections/forEach'), + 'forEachRight': require('./collections/forEachRight'), + 'groupBy': require('./collections/groupBy'), + 'include': require('./collections/contains'), + 'indexBy': require('./collections/indexBy'), + 'inject': require('./collections/reduce'), + 'invoke': require('./collections/invoke'), + 'map': require('./collections/map'), + 'max': require('./collections/max'), + 'min': require('./collections/min'), + 'pluck': require('./collections/pluck'), + 'reduce': require('./collections/reduce'), + 'reduceRight': require('./collections/reduceRight'), + 'reject': require('./collections/reject'), + 'sample': require('./collections/sample'), + 'select': require('./collections/filter'), + 'shuffle': require('./collections/shuffle'), + 'size': require('./collections/size'), + 'some': require('./collections/some'), + 'sortBy': require('./collections/sortBy'), + 'toArray': require('./collections/toArray'), + 'where': require('./collections/where') +}; diff --git a/node_modules/lodash-node/compat/collections/at.js b/node_modules/lodash-node/compat/collections/at.js new file mode 100644 index 0000000..a994bb7 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/at.js @@ -0,0 +1,50 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseFlatten = require('../internals/baseFlatten'), + isString = require('../objects/isString'), + support = require('../support'); + +/** + * Creates an array of elements from the specified indexes, or keys, of the + * `collection`. Indexes may be specified as individual arguments or as arrays + * of indexes. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {...(number|number[]|string|string[])} [index] The indexes of `collection` + * to retrieve, specified as individual indexes or arrays of indexes. + * @returns {Array} Returns a new array of elements corresponding to the + * provided indexes. + * @example + * + * _.at(['a', 'b', 'c', 'd', 'e'], [0, 2, 4]); + * // => ['a', 'c', 'e'] + * + * _.at(['fred', 'barney', 'pebbles'], 0, 2); + * // => ['fred', 'pebbles'] + */ +function at(collection) { + var args = arguments, + index = -1, + props = baseFlatten(args, true, false, 1), + length = (args[2] && args[2][args[1]] === collection) ? 1 : props.length, + result = Array(length); + + if (support.unindexedChars && isString(collection)) { + collection = collection.split(''); + } + while(++index < length) { + result[index] = collection[props[index]]; + } + return result; +} + +module.exports = at; diff --git a/node_modules/lodash-node/compat/collections/contains.js b/node_modules/lodash-node/compat/collections/contains.js new file mode 100644 index 0000000..69e552d --- /dev/null +++ b/node_modules/lodash-node/compat/collections/contains.js @@ -0,0 +1,65 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + baseIndexOf = require('../internals/baseIndexOf'), + isArray = require('../objects/isArray'), + isString = require('../objects/isString'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max; + +/** + * Checks if a given value is present in a collection using strict equality + * for comparisons, i.e. `===`. If `fromIndex` is negative, it is used as the + * offset from the end of the collection. + * + * @static + * @memberOf _ + * @alias include + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {*} target The value to check for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {boolean} Returns `true` if the `target` element is found, else `false`. + * @example + * + * _.contains([1, 2, 3], 1); + * // => true + * + * _.contains([1, 2, 3], 1, 2); + * // => false + * + * _.contains({ 'name': 'fred', 'age': 40 }, 'fred'); + * // => true + * + * _.contains('pebbles', 'eb'); + * // => true + */ +function contains(collection, target, fromIndex) { + var index = -1, + indexOf = baseIndexOf, + length = collection ? collection.length : 0, + result = false; + + fromIndex = (fromIndex < 0 ? nativeMax(0, length + fromIndex) : fromIndex) || 0; + if (isArray(collection)) { + result = indexOf(collection, target, fromIndex) > -1; + } else if (typeof length == 'number') { + result = (isString(collection) ? collection.indexOf(target, fromIndex) : indexOf(collection, target, fromIndex)) > -1; + } else { + baseEach(collection, function(value) { + if (++index >= fromIndex) { + return !(result = value === target); + } + }); + } + return result; +} + +module.exports = contains; diff --git a/node_modules/lodash-node/compat/collections/countBy.js b/node_modules/lodash-node/compat/collections/countBy.js new file mode 100644 index 0000000..ece9ece --- /dev/null +++ b/node_modules/lodash-node/compat/collections/countBy.js @@ -0,0 +1,55 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createAggregator = require('../internals/createAggregator'); + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Creates an object composed of keys generated from the results of running + * each element of `collection` through the callback. The corresponding value + * of each key is the number of times the key was returned by the callback. + * The callback is bound to `thisArg` and invoked with three arguments; + * (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.countBy([4.3, 6.1, 6.4], function(num) { return Math.floor(num); }); + * // => { '4': 1, '6': 2 } + * + * _.countBy([4.3, 6.1, 6.4], function(num) { return this.floor(num); }, Math); + * // => { '4': 1, '6': 2 } + * + * _.countBy(['one', 'two', 'three'], 'length'); + * // => { '3': 2, '5': 1 } + */ +var countBy = createAggregator(function(result, value, key) { + (hasOwnProperty.call(result, key) ? result[key]++ : result[key] = 1); +}); + +module.exports = countBy; diff --git a/node_modules/lodash-node/compat/collections/every.js b/node_modules/lodash-node/compat/collections/every.js new file mode 100644 index 0000000..633489c --- /dev/null +++ b/node_modules/lodash-node/compat/collections/every.js @@ -0,0 +1,75 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Checks if the given callback returns truey value for **all** elements of + * a collection. The callback is bound to `thisArg` and invoked with three + * arguments; (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias all + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {boolean} Returns `true` if all elements passed the callback check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes']); + * // => false + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * // using "_.pluck" callback shorthand + * _.every(characters, 'age'); + * // => true + * + * // using "_.where" callback shorthand + * _.every(characters, { 'age': 36 }); + * // => false + */ +function every(collection, callback, thisArg) { + var result = true; + callback = createCallback(callback, thisArg, 3); + + if (isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + if (!(result = !!callback(collection[index], index, collection))) { + break; + } + } + } else { + baseEach(collection, function(value, index, collection) { + return (result = !!callback(value, index, collection)); + }); + } + return result; +} + +module.exports = every; diff --git a/node_modules/lodash-node/compat/collections/filter.js b/node_modules/lodash-node/compat/collections/filter.js new file mode 100644 index 0000000..30c351e --- /dev/null +++ b/node_modules/lodash-node/compat/collections/filter.js @@ -0,0 +1,77 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Iterates over elements of a collection, returning an array of all elements + * the callback returns truey for. The callback is bound to `thisArg` and + * invoked with three arguments; (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias select + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new array of elements that passed the callback check. + * @example + * + * var evens = _.filter([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; }); + * // => [2, 4, 6] + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'blocked': false }, + * { 'name': 'fred', 'age': 40, 'blocked': true } + * ]; + * + * // using "_.pluck" callback shorthand + * _.filter(characters, 'blocked'); + * // => [{ 'name': 'fred', 'age': 40, 'blocked': true }] + * + * // using "_.where" callback shorthand + * _.filter(characters, { 'age': 36 }); + * // => [{ 'name': 'barney', 'age': 36, 'blocked': false }] + */ +function filter(collection, callback, thisArg) { + var result = []; + callback = createCallback(callback, thisArg, 3); + + if (isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + var value = collection[index]; + if (callback(value, index, collection)) { + result.push(value); + } + } + } else { + baseEach(collection, function(value, index, collection) { + if (callback(value, index, collection)) { + result.push(value); + } + }); + } + return result; +} + +module.exports = filter; diff --git a/node_modules/lodash-node/compat/collections/find.js b/node_modules/lodash-node/compat/collections/find.js new file mode 100644 index 0000000..59f9853 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/find.js @@ -0,0 +1,81 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Iterates over elements of a collection, returning the first element that + * the callback returns truey for. The callback is bound to `thisArg` and + * invoked with three arguments; (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias detect, findWhere + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the found element, else `undefined`. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'blocked': false }, + * { 'name': 'fred', 'age': 40, 'blocked': true }, + * { 'name': 'pebbles', 'age': 1, 'blocked': false } + * ]; + * + * _.find(characters, function(chr) { + * return chr.age < 40; + * }); + * // => { 'name': 'barney', 'age': 36, 'blocked': false } + * + * // using "_.where" callback shorthand + * _.find(characters, { 'age': 1 }); + * // => { 'name': 'pebbles', 'age': 1, 'blocked': false } + * + * // using "_.pluck" callback shorthand + * _.find(characters, 'blocked'); + * // => { 'name': 'fred', 'age': 40, 'blocked': true } + */ +function find(collection, callback, thisArg) { + callback = createCallback(callback, thisArg, 3); + + if (isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + var value = collection[index]; + if (callback(value, index, collection)) { + return value; + } + } + } else { + var result; + baseEach(collection, function(value, index, collection) { + if (callback(value, index, collection)) { + result = value; + return false; + } + }); + return result; + } +} + +module.exports = find; diff --git a/node_modules/lodash-node/compat/collections/findLast.js b/node_modules/lodash-node/compat/collections/findLast.js new file mode 100644 index 0000000..43d18e8 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/findLast.js @@ -0,0 +1,44 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + forEachRight = require('./forEachRight'); + +/** + * This method is like `_.find` except that it iterates over elements + * of a `collection` from right to left. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the found element, else `undefined`. + * @example + * + * _.findLast([1, 2, 3, 4], function(num) { + * return num % 2 == 1; + * }); + * // => 3 + */ +function findLast(collection, callback, thisArg) { + var result; + callback = createCallback(callback, thisArg, 3); + forEachRight(collection, function(value, index, collection) { + if (callback(value, index, collection)) { + result = value; + return false; + } + }); + return result; +} + +module.exports = findLast; diff --git a/node_modules/lodash-node/compat/collections/forEach.js b/node_modules/lodash-node/compat/collections/forEach.js new file mode 100644 index 0000000..4f89582 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/forEach.js @@ -0,0 +1,55 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + baseEach = require('../internals/baseEach'), + isArray = require('../objects/isArray'); + +/** + * Iterates over elements of a collection, executing the callback for each + * element. The callback is bound to `thisArg` and invoked with three arguments; + * (value, index|key, collection). Callbacks may exit iteration early by + * explicitly returning `false`. + * + * Note: As with other "Collections" methods, objects with a `length` property + * are iterated like arrays. To avoid this behavior `_.forIn` or `_.forOwn` + * may be used for object iteration. + * + * @static + * @memberOf _ + * @alias each + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array|Object|string} Returns `collection`. + * @example + * + * _([1, 2, 3]).forEach(function(num) { console.log(num); }).join(','); + * // => logs each number and returns '1,2,3' + * + * _.forEach({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { console.log(num); }); + * // => logs each number and returns the object (property order is not guaranteed across environments) + */ +function forEach(collection, callback, thisArg) { + if (callback && typeof thisArg == 'undefined' && isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + if (callback(collection[index], index, collection) === false) { + break; + } + } + } else { + baseEach(collection, callback, thisArg); + } + return collection; +} + +module.exports = forEach; diff --git a/node_modules/lodash-node/compat/collections/forEachRight.js b/node_modules/lodash-node/compat/collections/forEachRight.js new file mode 100644 index 0000000..fc003a1 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/forEachRight.js @@ -0,0 +1,59 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + baseEach = require('../internals/baseEach'), + isArray = require('../objects/isArray'), + isString = require('../objects/isString'), + keys = require('../objects/keys'), + support = require('../support'); + +/** + * This method is like `_.forEach` except that it iterates over elements + * of a `collection` from right to left. + * + * @static + * @memberOf _ + * @alias eachRight + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array|Object|string} Returns `collection`. + * @example + * + * _([1, 2, 3]).forEachRight(function(num) { console.log(num); }).join(','); + * // => logs each number from right to left and returns '3,2,1' + */ +function forEachRight(collection, callback, thisArg) { + var iterable = collection, + length = collection ? collection.length : 0; + + callback = callback && typeof thisArg == 'undefined' ? callback : baseCreateCallback(callback, thisArg, 3); + if (isArray(collection)) { + while (length--) { + if (callback(collection[length], length, collection) === false) { + break; + } + } + } else { + if (typeof length != 'number') { + var props = keys(collection); + length = props.length; + } else if (support.unindexedChars && isString(collection)) { + iterable = collection.split(''); + } + baseEach(collection, function(value, key, collection) { + key = props ? props[--length] : --length; + return callback(iterable[key], key, collection); + }); + } + return collection; +} + +module.exports = forEachRight; diff --git a/node_modules/lodash-node/compat/collections/groupBy.js b/node_modules/lodash-node/compat/collections/groupBy.js new file mode 100644 index 0000000..90756c4 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/groupBy.js @@ -0,0 +1,56 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createAggregator = require('../internals/createAggregator'); + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Creates an object composed of keys generated from the results of running + * each element of a collection through the callback. The corresponding value + * of each key is an array of the elements responsible for generating the key. + * The callback is bound to `thisArg` and invoked with three arguments; + * (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false` + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.groupBy([4.2, 6.1, 6.4], function(num) { return Math.floor(num); }); + * // => { '4': [4.2], '6': [6.1, 6.4] } + * + * _.groupBy([4.2, 6.1, 6.4], function(num) { return this.floor(num); }, Math); + * // => { '4': [4.2], '6': [6.1, 6.4] } + * + * // using "_.pluck" callback shorthand + * _.groupBy(['one', 'two', 'three'], 'length'); + * // => { '3': ['one', 'two'], '5': ['three'] } + */ +var groupBy = createAggregator(function(result, value, key) { + (hasOwnProperty.call(result, key) ? result[key] : result[key] = []).push(value); +}); + +module.exports = groupBy; diff --git a/node_modules/lodash-node/compat/collections/indexBy.js b/node_modules/lodash-node/compat/collections/indexBy.js new file mode 100644 index 0000000..b8c277e --- /dev/null +++ b/node_modules/lodash-node/compat/collections/indexBy.js @@ -0,0 +1,54 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createAggregator = require('../internals/createAggregator'); + +/** + * Creates an object composed of keys generated from the results of running + * each element of the collection through the given callback. The corresponding + * value of each key is the last element responsible for generating the key. + * The callback is bound to `thisArg` and invoked with three arguments; + * (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * var keys = [ + * { 'dir': 'left', 'code': 97 }, + * { 'dir': 'right', 'code': 100 } + * ]; + * + * _.indexBy(keys, 'dir'); + * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } + * + * _.indexBy(keys, function(key) { return String.fromCharCode(key.code); }); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + * + * _.indexBy(characters, function(key) { this.fromCharCode(key.code); }, String); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + */ +var indexBy = createAggregator(function(result, value, key) { + result[key] = value; +}); + +module.exports = indexBy; diff --git a/node_modules/lodash-node/compat/collections/invoke.js b/node_modules/lodash-node/compat/collections/invoke.js new file mode 100644 index 0000000..17de1ee --- /dev/null +++ b/node_modules/lodash-node/compat/collections/invoke.js @@ -0,0 +1,47 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forEach = require('./forEach'), + slice = require('../internals/slice'); + +/** + * Invokes the method named by `methodName` on each element in the `collection` + * returning an array of the results of each invoked method. Additional arguments + * will be provided to each invoked method. If `methodName` is a function it + * will be invoked for, and `this` bound to, each element in the `collection`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|string} methodName The name of the method to invoke or + * the function invoked per iteration. + * @param {...*} [arg] Arguments to invoke the method with. + * @returns {Array} Returns a new array of the results of each invoked method. + * @example + * + * _.invoke([[5, 1, 7], [3, 2, 1]], 'sort'); + * // => [[1, 5, 7], [1, 2, 3]] + * + * _.invoke([123, 456], String.prototype.split, ''); + * // => [['1', '2', '3'], ['4', '5', '6']] + */ +function invoke(collection, methodName) { + var args = slice(arguments, 2), + index = -1, + isFunc = typeof methodName == 'function', + length = collection ? collection.length : 0, + result = Array(typeof length == 'number' ? length : 0); + + forEach(collection, function(value) { + result[++index] = (isFunc ? methodName : value[methodName]).apply(value, args); + }); + return result; +} + +module.exports = invoke; diff --git a/node_modules/lodash-node/compat/collections/map.js b/node_modules/lodash-node/compat/collections/map.js new file mode 100644 index 0000000..cd371e8 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/map.js @@ -0,0 +1,70 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Creates an array of values by running each element in the collection + * through the callback. The callback is bound to `thisArg` and invoked with + * three arguments; (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias collect + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new array of the results of each `callback` execution. + * @example + * + * _.map([1, 2, 3], function(num) { return num * 3; }); + * // => [3, 6, 9] + * + * _.map({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { return num * 3; }); + * // => [3, 6, 9] (property order is not guaranteed across environments) + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * // using "_.pluck" callback shorthand + * _.map(characters, 'name'); + * // => ['barney', 'fred'] + */ +function map(collection, callback, thisArg) { + var index = -1, + length = collection ? collection.length : 0, + result = Array(typeof length == 'number' ? length : 0); + + callback = createCallback(callback, thisArg, 3); + if (isArray(collection)) { + while (++index < length) { + result[index] = callback(collection[index], index, collection); + } + } else { + baseEach(collection, function(value, key, collection) { + result[++index] = callback(value, key, collection); + }); + } + return result; +} + +module.exports = map; diff --git a/node_modules/lodash-node/compat/collections/max.js b/node_modules/lodash-node/compat/collections/max.js new file mode 100644 index 0000000..8547613 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/max.js @@ -0,0 +1,90 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + charAtCallback = require('../internals/charAtCallback'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'), + isString = require('../objects/isString'); + +/** + * Retrieves the maximum value of a collection. If the collection is empty or + * falsey `-Infinity` is returned. If a callback is provided it will be executed + * for each value in the collection to generate the criterion by which the value + * is ranked. The callback is bound to `thisArg` and invoked with three + * arguments; (value, index, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the maximum value. + * @example + * + * _.max([4, 2, 8, 6]); + * // => 8 + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * _.max(characters, function(chr) { return chr.age; }); + * // => { 'name': 'fred', 'age': 40 }; + * + * // using "_.pluck" callback shorthand + * _.max(characters, 'age'); + * // => { 'name': 'fred', 'age': 40 }; + */ +function max(collection, callback, thisArg) { + var computed = -Infinity, + result = computed; + + // allows working with functions like `_.map` without using + // their `index` argument as a callback + if (typeof callback != 'function' && thisArg && thisArg[callback] === collection) { + callback = null; + } + if (callback == null && isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + var value = collection[index]; + if (value > result) { + result = value; + } + } + } else { + callback = (callback == null && isString(collection)) + ? charAtCallback + : createCallback(callback, thisArg, 3); + + baseEach(collection, function(value, index, collection) { + var current = callback(value, index, collection); + if (current > computed) { + computed = current; + result = value; + } + }); + } + return result; +} + +module.exports = max; diff --git a/node_modules/lodash-node/compat/collections/min.js b/node_modules/lodash-node/compat/collections/min.js new file mode 100644 index 0000000..925f32c --- /dev/null +++ b/node_modules/lodash-node/compat/collections/min.js @@ -0,0 +1,90 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + charAtCallback = require('../internals/charAtCallback'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'), + isString = require('../objects/isString'); + +/** + * Retrieves the minimum value of a collection. If the collection is empty or + * falsey `Infinity` is returned. If a callback is provided it will be executed + * for each value in the collection to generate the criterion by which the value + * is ranked. The callback is bound to `thisArg` and invoked with three + * arguments; (value, index, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the minimum value. + * @example + * + * _.min([4, 2, 8, 6]); + * // => 2 + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * _.min(characters, function(chr) { return chr.age; }); + * // => { 'name': 'barney', 'age': 36 }; + * + * // using "_.pluck" callback shorthand + * _.min(characters, 'age'); + * // => { 'name': 'barney', 'age': 36 }; + */ +function min(collection, callback, thisArg) { + var computed = Infinity, + result = computed; + + // allows working with functions like `_.map` without using + // their `index` argument as a callback + if (typeof callback != 'function' && thisArg && thisArg[callback] === collection) { + callback = null; + } + if (callback == null && isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + var value = collection[index]; + if (value < result) { + result = value; + } + } + } else { + callback = (callback == null && isString(collection)) + ? charAtCallback + : createCallback(callback, thisArg, 3); + + baseEach(collection, function(value, index, collection) { + var current = callback(value, index, collection); + if (current < computed) { + computed = current; + result = value; + } + }); + } + return result; +} + +module.exports = min; diff --git a/node_modules/lodash-node/compat/collections/pluck.js b/node_modules/lodash-node/compat/collections/pluck.js new file mode 100644 index 0000000..8ef0be0 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/pluck.js @@ -0,0 +1,33 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var map = require('./map'); + +/** + * Retrieves the value of a specified property from all elements in the collection. + * + * @static + * @memberOf _ + * @type Function + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {string} property The name of the property to pluck. + * @returns {Array} Returns a new array of property values. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * _.pluck(characters, 'name'); + * // => ['barney', 'fred'] + */ +var pluck = map; + +module.exports = pluck; diff --git a/node_modules/lodash-node/compat/collections/reduce.js b/node_modules/lodash-node/compat/collections/reduce.js new file mode 100644 index 0000000..fb7854b --- /dev/null +++ b/node_modules/lodash-node/compat/collections/reduce.js @@ -0,0 +1,67 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Reduces a collection to a value which is the accumulated result of running + * each element in the collection through the callback, where each successive + * callback execution consumes the return value of the previous execution. If + * `accumulator` is not provided the first element of the collection will be + * used as the initial `accumulator` value. The callback is bound to `thisArg` + * and invoked with four arguments; (accumulator, value, index|key, collection). + * + * @static + * @memberOf _ + * @alias foldl, inject + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [accumulator] Initial value of the accumulator. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the accumulated value. + * @example + * + * var sum = _.reduce([1, 2, 3], function(sum, num) { + * return sum + num; + * }); + * // => 6 + * + * var mapped = _.reduce({ 'a': 1, 'b': 2, 'c': 3 }, function(result, num, key) { + * result[key] = num * 3; + * return result; + * }, {}); + * // => { 'a': 3, 'b': 6, 'c': 9 } + */ +function reduce(collection, callback, accumulator, thisArg) { + var noaccum = arguments.length < 3; + callback = createCallback(callback, thisArg, 4); + + if (isArray(collection)) { + var index = -1, + length = collection.length; + + if (noaccum) { + accumulator = collection[++index]; + } + while (++index < length) { + accumulator = callback(accumulator, collection[index], index, collection); + } + } else { + baseEach(collection, function(value, index, collection) { + accumulator = noaccum + ? (noaccum = false, value) + : callback(accumulator, value, index, collection) + }); + } + return accumulator; +} + +module.exports = reduce; diff --git a/node_modules/lodash-node/compat/collections/reduceRight.js b/node_modules/lodash-node/compat/collections/reduceRight.js new file mode 100644 index 0000000..58625ea --- /dev/null +++ b/node_modules/lodash-node/compat/collections/reduceRight.js @@ -0,0 +1,42 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + forEachRight = require('./forEachRight'); + +/** + * This method is like `_.reduce` except that it iterates over elements + * of a `collection` from right to left. + * + * @static + * @memberOf _ + * @alias foldr + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [accumulator] Initial value of the accumulator. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the accumulated value. + * @example + * + * var list = [[0, 1], [2, 3], [4, 5]]; + * var flat = _.reduceRight(list, function(a, b) { return a.concat(b); }, []); + * // => [4, 5, 2, 3, 0, 1] + */ +function reduceRight(collection, callback, accumulator, thisArg) { + var noaccum = arguments.length < 3; + callback = createCallback(callback, thisArg, 4); + forEachRight(collection, function(value, index, collection) { + accumulator = noaccum + ? (noaccum = false, value) + : callback(accumulator, value, index, collection); + }); + return accumulator; +} + +module.exports = reduceRight; diff --git a/node_modules/lodash-node/compat/collections/reject.js b/node_modules/lodash-node/compat/collections/reject.js new file mode 100644 index 0000000..f33239b --- /dev/null +++ b/node_modules/lodash-node/compat/collections/reject.js @@ -0,0 +1,57 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + filter = require('./filter'); + +/** + * The opposite of `_.filter` this method returns the elements of a + * collection that the callback does **not** return truey for. + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new array of elements that failed the callback check. + * @example + * + * var odds = _.reject([1, 2, 3, 4, 5, 6], function(num) { return num % 2 == 0; }); + * // => [1, 3, 5] + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'blocked': false }, + * { 'name': 'fred', 'age': 40, 'blocked': true } + * ]; + * + * // using "_.pluck" callback shorthand + * _.reject(characters, 'blocked'); + * // => [{ 'name': 'barney', 'age': 36, 'blocked': false }] + * + * // using "_.where" callback shorthand + * _.reject(characters, { 'age': 36 }); + * // => [{ 'name': 'fred', 'age': 40, 'blocked': true }] + */ +function reject(collection, callback, thisArg) { + callback = createCallback(callback, thisArg, 3); + return filter(collection, function(value, index, collection) { + return !callback(value, index, collection); + }); +} + +module.exports = reject; diff --git a/node_modules/lodash-node/compat/collections/sample.js b/node_modules/lodash-node/compat/collections/sample.js new file mode 100644 index 0000000..481c7e4 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/sample.js @@ -0,0 +1,52 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseRandom = require('../internals/baseRandom'), + isString = require('../objects/isString'), + shuffle = require('./shuffle'), + support = require('../support'), + values = require('../objects/values'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * Retrieves a random element or `n` random elements from a collection. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to sample. + * @param {number} [n] The number of elements to sample. + * @param- {Object} [guard] Allows working with functions like `_.map` + * without using their `index` arguments as `n`. + * @returns {Array} Returns the random sample(s) of `collection`. + * @example + * + * _.sample([1, 2, 3, 4]); + * // => 2 + * + * _.sample([1, 2, 3, 4], 2); + * // => [3, 1] + */ +function sample(collection, n, guard) { + if (collection && typeof collection.length != 'number') { + collection = values(collection); + } else if (support.unindexedChars && isString(collection)) { + collection = collection.split(''); + } + if (n == null || guard) { + return collection ? collection[baseRandom(0, collection.length - 1)] : undefined; + } + var result = shuffle(collection); + result.length = nativeMin(nativeMax(0, n), result.length); + return result; +} + +module.exports = sample; diff --git a/node_modules/lodash-node/compat/collections/shuffle.js b/node_modules/lodash-node/compat/collections/shuffle.js new file mode 100644 index 0000000..d37777c --- /dev/null +++ b/node_modules/lodash-node/compat/collections/shuffle.js @@ -0,0 +1,39 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseRandom = require('../internals/baseRandom'), + forEach = require('./forEach'); + +/** + * Creates an array of shuffled values, using a version of the Fisher-Yates + * shuffle. See http://en.wikipedia.org/wiki/Fisher-Yates_shuffle. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to shuffle. + * @returns {Array} Returns a new shuffled collection. + * @example + * + * _.shuffle([1, 2, 3, 4, 5, 6]); + * // => [4, 1, 6, 3, 5, 2] + */ +function shuffle(collection) { + var index = -1, + length = collection ? collection.length : 0, + result = Array(typeof length == 'number' ? length : 0); + + forEach(collection, function(value) { + var rand = baseRandom(0, ++index); + result[index] = result[rand]; + result[rand] = value; + }); + return result; +} + +module.exports = shuffle; diff --git a/node_modules/lodash-node/compat/collections/size.js b/node_modules/lodash-node/compat/collections/size.js new file mode 100644 index 0000000..bfc821f --- /dev/null +++ b/node_modules/lodash-node/compat/collections/size.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keys = require('../objects/keys'); + +/** + * Gets the size of the `collection` by returning `collection.length` for arrays + * and array-like objects or the number of own enumerable properties for objects. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to inspect. + * @returns {number} Returns `collection.length` or number of own enumerable properties. + * @example + * + * _.size([1, 2]); + * // => 2 + * + * _.size({ 'one': 1, 'two': 2, 'three': 3 }); + * // => 3 + * + * _.size('pebbles'); + * // => 7 + */ +function size(collection) { + var length = collection ? collection.length : 0; + return typeof length == 'number' ? length : keys(collection).length; +} + +module.exports = size; diff --git a/node_modules/lodash-node/compat/collections/some.js b/node_modules/lodash-node/compat/collections/some.js new file mode 100644 index 0000000..c8d7ee5 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/some.js @@ -0,0 +1,76 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Checks if the callback returns a truey value for **any** element of a + * collection. The function returns as soon as it finds a passing value and + * does not iterate over the entire collection. The callback is bound to + * `thisArg` and invoked with three arguments; (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @alias any + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {boolean} Returns `true` if any element passed the callback check, + * else `false`. + * @example + * + * _.some([null, 0, 'yes', false], Boolean); + * // => true + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'blocked': false }, + * { 'name': 'fred', 'age': 40, 'blocked': true } + * ]; + * + * // using "_.pluck" callback shorthand + * _.some(characters, 'blocked'); + * // => true + * + * // using "_.where" callback shorthand + * _.some(characters, { 'age': 1 }); + * // => false + */ +function some(collection, callback, thisArg) { + var result; + callback = createCallback(callback, thisArg, 3); + + if (isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + if ((result = callback(collection[index], index, collection))) { + break; + } + } + } else { + baseEach(collection, function(value, index, collection) { + return !(result = callback(value, index, collection)); + }); + } + return !!result; +} + +module.exports = some; diff --git a/node_modules/lodash-node/compat/collections/sortBy.js b/node_modules/lodash-node/compat/collections/sortBy.js new file mode 100644 index 0000000..777b152 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/sortBy.js @@ -0,0 +1,101 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var compareAscending = require('../internals/compareAscending'), + createCallback = require('../functions/createCallback'), + forEach = require('./forEach'), + getArray = require('../internals/getArray'), + getObject = require('../internals/getObject'), + isArray = require('../objects/isArray'), + map = require('./map'), + releaseArray = require('../internals/releaseArray'), + releaseObject = require('../internals/releaseObject'); + +/** + * Creates an array of elements, sorted in ascending order by the results of + * running each element in a collection through the callback. This method + * performs a stable sort, that is, it will preserve the original sort order + * of equal elements. The callback is bound to `thisArg` and invoked with + * three arguments; (value, index|key, collection). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an array of property names is provided for `callback` the collection + * will be sorted by each property value. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Array|Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new array of sorted elements. + * @example + * + * _.sortBy([1, 2, 3], function(num) { return Math.sin(num); }); + * // => [3, 1, 2] + * + * _.sortBy([1, 2, 3], function(num) { return this.sin(num); }, Math); + * // => [3, 1, 2] + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 }, + * { 'name': 'barney', 'age': 26 }, + * { 'name': 'fred', 'age': 30 } + * ]; + * + * // using "_.pluck" callback shorthand + * _.map(_.sortBy(characters, 'age'), _.values); + * // => [['barney', 26], ['fred', 30], ['barney', 36], ['fred', 40]] + * + * // sorting by multiple properties + * _.map(_.sortBy(characters, ['name', 'age']), _.values); + * // = > [['barney', 26], ['barney', 36], ['fred', 30], ['fred', 40]] + */ +function sortBy(collection, callback, thisArg) { + var index = -1, + isArr = isArray(callback), + length = collection ? collection.length : 0, + result = Array(typeof length == 'number' ? length : 0); + + if (!isArr) { + callback = createCallback(callback, thisArg, 3); + } + forEach(collection, function(value, key, collection) { + var object = result[++index] = getObject(); + if (isArr) { + object.criteria = map(callback, function(key) { return value[key]; }); + } else { + (object.criteria = getArray())[0] = callback(value, key, collection); + } + object.index = index; + object.value = value; + }); + + length = result.length; + result.sort(compareAscending); + while (length--) { + var object = result[length]; + result[length] = object.value; + if (!isArr) { + releaseArray(object.criteria); + } + releaseObject(object); + } + return result; +} + +module.exports = sortBy; diff --git a/node_modules/lodash-node/compat/collections/toArray.js b/node_modules/lodash-node/compat/collections/toArray.js new file mode 100644 index 0000000..223cf21 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/toArray.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isString = require('../objects/isString'), + slice = require('../internals/slice'), + support = require('../support'), + values = require('../objects/values'); + +/** + * Converts the `collection` to an array. + * + * @static + * @memberOf _ + * @category Collections + * @param {Array|Object|string} collection The collection to convert. + * @returns {Array} Returns the new converted array. + * @example + * + * (function() { return _.toArray(arguments).slice(1); })(1, 2, 3, 4); + * // => [2, 3, 4] + */ +function toArray(collection) { + if (collection && typeof collection.length == 'number') { + return (support.unindexedChars && isString(collection)) + ? collection.split('') + : slice(collection); + } + return values(collection); +} + +module.exports = toArray; diff --git a/node_modules/lodash-node/compat/collections/where.js b/node_modules/lodash-node/compat/collections/where.js new file mode 100644 index 0000000..c035b72 --- /dev/null +++ b/node_modules/lodash-node/compat/collections/where.js @@ -0,0 +1,38 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var filter = require('./filter'); + +/** + * Performs a deep comparison of each element in a `collection` to the given + * `properties` object, returning an array of all elements that have equivalent + * property values. + * + * @static + * @memberOf _ + * @type Function + * @category Collections + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Object} props The object of property values to filter by. + * @returns {Array} Returns a new array of elements that have the given properties. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36, 'pets': ['hoppy'] }, + * { 'name': 'fred', 'age': 40, 'pets': ['baby puss', 'dino'] } + * ]; + * + * _.where(characters, { 'age': 36 }); + * // => [{ 'name': 'barney', 'age': 36, 'pets': ['hoppy'] }] + * + * _.where(characters, { 'pets': ['dino'] }); + * // => [{ 'name': 'fred', 'age': 40, 'pets': ['baby puss', 'dino'] }] + */ +var where = filter; + +module.exports = where; diff --git a/node_modules/lodash-node/compat/functions.js b/node_modules/lodash-node/compat/functions.js new file mode 100644 index 0000000..084bdd4 --- /dev/null +++ b/node_modules/lodash-node/compat/functions.js @@ -0,0 +1,27 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +module.exports = { + 'after': require('./functions/after'), + 'bind': require('./functions/bind'), + 'bindAll': require('./functions/bindAll'), + 'bindKey': require('./functions/bindKey'), + 'compose': require('./functions/compose'), + 'createCallback': require('./functions/createCallback'), + 'curry': require('./functions/curry'), + 'debounce': require('./functions/debounce'), + 'defer': require('./functions/defer'), + 'delay': require('./functions/delay'), + 'memoize': require('./functions/memoize'), + 'once': require('./functions/once'), + 'partial': require('./functions/partial'), + 'partialRight': require('./functions/partialRight'), + 'throttle': require('./functions/throttle'), + 'wrap': require('./functions/wrap') +}; diff --git a/node_modules/lodash-node/compat/functions/after.js b/node_modules/lodash-node/compat/functions/after.js new file mode 100644 index 0000000..48ef88e --- /dev/null +++ b/node_modules/lodash-node/compat/functions/after.js @@ -0,0 +1,46 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'); + +/** + * Creates a function that executes `func`, with the `this` binding and + * arguments of the created function, only after being called `n` times. + * + * @static + * @memberOf _ + * @category Functions + * @param {number} n The number of times the function must be called before + * `func` is executed. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var saves = ['profile', 'settings']; + * + * var done = _.after(saves.length, function() { + * console.log('Done saving!'); + * }); + * + * _.forEach(saves, function(type) { + * asyncSave({ 'type': type, 'complete': done }); + * }); + * // => logs 'Done saving!', after all saves have completed + */ +function after(n, func) { + if (!isFunction(func)) { + throw new TypeError; + } + return function() { + if (--n < 1) { + return func.apply(this, arguments); + } + }; +} + +module.exports = after; diff --git a/node_modules/lodash-node/compat/functions/bind.js b/node_modules/lodash-node/compat/functions/bind.js new file mode 100644 index 0000000..42284b3 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/bind.js @@ -0,0 +1,41 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createWrapper = require('../internals/createWrapper'), + slice = require('../internals/slice'), + support = require('../support'); + +/** + * Creates a function that, when called, invokes `func` with the `this` + * binding of `thisArg` and prepends any additional `bind` arguments to those + * provided to the bound function. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to bind. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {...*} [arg] Arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var func = function(greeting) { + * return greeting + ' ' + this.name; + * }; + * + * func = _.bind(func, { 'name': 'fred' }, 'hi'); + * func(); + * // => 'hi fred' + */ +function bind(func, thisArg) { + return arguments.length > 2 + ? createWrapper(func, 17, slice(arguments, 2), null, thisArg) + : createWrapper(func, 1, null, null, thisArg); +} + +module.exports = bind; diff --git a/node_modules/lodash-node/compat/functions/bindAll.js b/node_modules/lodash-node/compat/functions/bindAll.js new file mode 100644 index 0000000..f62d906 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/bindAll.js @@ -0,0 +1,49 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseFlatten = require('../internals/baseFlatten'), + createWrapper = require('../internals/createWrapper'), + functions = require('../objects/functions'); + +/** + * Binds methods of an object to the object itself, overwriting the existing + * method. Method names may be specified as individual arguments or as arrays + * of method names. If no method names are provided all the function properties + * of `object` will be bound. + * + * @static + * @memberOf _ + * @category Functions + * @param {Object} object The object to bind and assign the bound methods to. + * @param {...string} [methodName] The object method names to + * bind, specified as individual method names or arrays of method names. + * @returns {Object} Returns `object`. + * @example + * + * var view = { + * 'label': 'docs', + * 'onClick': function() { console.log('clicked ' + this.label); } + * }; + * + * _.bindAll(view); + * jQuery('#docs').on('click', view.onClick); + * // => logs 'clicked docs', when the button is clicked + */ +function bindAll(object) { + var funcs = arguments.length > 1 ? baseFlatten(arguments, true, false, 1) : functions(object), + index = -1, + length = funcs.length; + + while (++index < length) { + var key = funcs[index]; + object[key] = createWrapper(object[key], 1, null, null, object); + } + return object; +} + +module.exports = bindAll; diff --git a/node_modules/lodash-node/compat/functions/bindKey.js b/node_modules/lodash-node/compat/functions/bindKey.js new file mode 100644 index 0000000..7e07139 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/bindKey.js @@ -0,0 +1,52 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createWrapper = require('../internals/createWrapper'), + slice = require('../internals/slice'); + +/** + * Creates a function that, when called, invokes the method at `object[key]` + * and prepends any additional `bindKey` arguments to those provided to the bound + * function. This method differs from `_.bind` by allowing bound functions to + * reference methods that will be redefined or don't yet exist. + * See http://michaux.ca/articles/lazy-function-definition-pattern. + * + * @static + * @memberOf _ + * @category Functions + * @param {Object} object The object the method belongs to. + * @param {string} key The key of the method. + * @param {...*} [arg] Arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var object = { + * 'name': 'fred', + * 'greet': function(greeting) { + * return greeting + ' ' + this.name; + * } + * }; + * + * var func = _.bindKey(object, 'greet', 'hi'); + * func(); + * // => 'hi fred' + * + * object.greet = function(greeting) { + * return greeting + 'ya ' + this.name + '!'; + * }; + * + * func(); + * // => 'hiya fred!' + */ +function bindKey(object, key) { + return arguments.length > 2 + ? createWrapper(key, 19, slice(arguments, 2), null, object) + : createWrapper(key, 3, null, null, object); +} + +module.exports = bindKey; diff --git a/node_modules/lodash-node/compat/functions/compose.js b/node_modules/lodash-node/compat/functions/compose.js new file mode 100644 index 0000000..2a70d65 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/compose.js @@ -0,0 +1,61 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'); + +/** + * Creates a function that is the composition of the provided functions, + * where each function consumes the return value of the function that follows. + * For example, composing the functions `f()`, `g()`, and `h()` produces `f(g(h()))`. + * Each function is executed with the `this` binding of the composed function. + * + * @static + * @memberOf _ + * @category Functions + * @param {...Function} [func] Functions to compose. + * @returns {Function} Returns the new composed function. + * @example + * + * var realNameMap = { + * 'pebbles': 'penelope' + * }; + * + * var format = function(name) { + * name = realNameMap[name.toLowerCase()] || name; + * return name.charAt(0).toUpperCase() + name.slice(1).toLowerCase(); + * }; + * + * var greet = function(formatted) { + * return 'Hiya ' + formatted + '!'; + * }; + * + * var welcome = _.compose(greet, format); + * welcome('pebbles'); + * // => 'Hiya Penelope!' + */ +function compose() { + var funcs = arguments, + length = funcs.length; + + while (length--) { + if (!isFunction(funcs[length])) { + throw new TypeError; + } + } + return function() { + var args = arguments, + length = funcs.length; + + while (length--) { + args = [funcs[length].apply(this, args)]; + } + return args[0]; + }; +} + +module.exports = compose; diff --git a/node_modules/lodash-node/compat/functions/createCallback.js b/node_modules/lodash-node/compat/functions/createCallback.js new file mode 100644 index 0000000..460b80e --- /dev/null +++ b/node_modules/lodash-node/compat/functions/createCallback.js @@ -0,0 +1,81 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + baseIsEqual = require('../internals/baseIsEqual'), + isObject = require('../objects/isObject'), + keys = require('../objects/keys'), + property = require('../utilities/property'); + +/** + * Produces a callback bound to an optional `thisArg`. If `func` is a property + * name the created callback will return the property value for a given element. + * If `func` is an object the created callback will return `true` for elements + * that contain the equivalent object properties, otherwise it will return `false`. + * + * @static + * @memberOf _ + * @category Utilities + * @param {*} [func=identity] The value to convert to a callback. + * @param {*} [thisArg] The `this` binding of the created callback. + * @param {number} [argCount] The number of arguments the callback accepts. + * @returns {Function} Returns a callback function. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * // wrap to create custom callback shorthands + * _.createCallback = _.wrap(_.createCallback, function(func, callback, thisArg) { + * var match = /^(.+?)__([gl]t)(.+)$/.exec(callback); + * return !match ? func(callback, thisArg) : function(object) { + * return match[2] == 'gt' ? object[match[1]] > match[3] : object[match[1]] < match[3]; + * }; + * }); + * + * _.filter(characters, 'age__gt38'); + * // => [{ 'name': 'fred', 'age': 40 }] + */ +function createCallback(func, thisArg, argCount) { + var type = typeof func; + if (func == null || type == 'function') { + return baseCreateCallback(func, thisArg, argCount); + } + // handle "_.pluck" style callback shorthands + if (type != 'object') { + return property(func); + } + var props = keys(func), + key = props[0], + a = func[key]; + + // handle "_.where" style callback shorthands + if (props.length == 1 && a === a && !isObject(a)) { + // fast path the common case of providing an object with a single + // property containing a primitive value + return function(object) { + var b = object[key]; + return a === b && (a !== 0 || (1 / a == 1 / b)); + }; + } + return function(object) { + var length = props.length, + result = false; + + while (length--) { + if (!(result = baseIsEqual(object[props[length]], func[props[length]], null, true))) { + break; + } + } + return result; + }; +} + +module.exports = createCallback; diff --git a/node_modules/lodash-node/compat/functions/curry.js b/node_modules/lodash-node/compat/functions/curry.js new file mode 100644 index 0000000..77fb780 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/curry.js @@ -0,0 +1,44 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createWrapper = require('../internals/createWrapper'); + +/** + * Creates a function which accepts one or more arguments of `func` that when + * invoked either executes `func` returning its result, if all `func` arguments + * have been provided, or returns a function that accepts one or more of the + * remaining `func` arguments, and so on. The arity of `func` can be specified + * if `func.length` is not sufficient. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @returns {Function} Returns the new curried function. + * @example + * + * var curried = _.curry(function(a, b, c) { + * console.log(a + b + c); + * }); + * + * curried(1)(2)(3); + * // => 6 + * + * curried(1, 2)(3); + * // => 6 + * + * curried(1, 2, 3); + * // => 6 + */ +function curry(func, arity) { + arity = typeof arity == 'number' ? arity : (+arity || func.length); + return createWrapper(func, 4, null, null, null, arity); +} + +module.exports = curry; diff --git a/node_modules/lodash-node/compat/functions/debounce.js b/node_modules/lodash-node/compat/functions/debounce.js new file mode 100644 index 0000000..6bc27f4 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/debounce.js @@ -0,0 +1,156 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'), + isObject = require('../objects/isObject'), + now = require('../utilities/now'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMax = Math.max; + +/** + * Creates a function that will delay the execution of `func` until after + * `wait` milliseconds have elapsed since the last time it was invoked. + * Provide an options object to indicate that `func` should be invoked on + * the leading and/or trailing edge of the `wait` timeout. Subsequent calls + * to the debounced function will return the result of the last `func` call. + * + * Note: If `leading` and `trailing` options are `true` `func` will be called + * on the trailing edge of the timeout only if the the debounced function is + * invoked more than once during the `wait` timeout. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to debounce. + * @param {number} wait The number of milliseconds to delay. + * @param {Object} [options] The options object. + * @param {boolean} [options.leading=false] Specify execution on the leading edge of the timeout. + * @param {number} [options.maxWait] The maximum time `func` is allowed to be delayed before it's called. + * @param {boolean} [options.trailing=true] Specify execution on the trailing edge of the timeout. + * @returns {Function} Returns the new debounced function. + * @example + * + * // avoid costly calculations while the window size is in flux + * var lazyLayout = _.debounce(calculateLayout, 150); + * jQuery(window).on('resize', lazyLayout); + * + * // execute `sendMail` when the click event is fired, debouncing subsequent calls + * jQuery('#postbox').on('click', _.debounce(sendMail, 300, { + * 'leading': true, + * 'trailing': false + * }); + * + * // ensure `batchLog` is executed once after 1 second of debounced calls + * var source = new EventSource('/stream'); + * source.addEventListener('message', _.debounce(batchLog, 250, { + * 'maxWait': 1000 + * }, false); + */ +function debounce(func, wait, options) { + var args, + maxTimeoutId, + result, + stamp, + thisArg, + timeoutId, + trailingCall, + lastCalled = 0, + maxWait = false, + trailing = true; + + if (!isFunction(func)) { + throw new TypeError; + } + wait = nativeMax(0, wait) || 0; + if (options === true) { + var leading = true; + trailing = false; + } else if (isObject(options)) { + leading = options.leading; + maxWait = 'maxWait' in options && (nativeMax(wait, options.maxWait) || 0); + trailing = 'trailing' in options ? options.trailing : trailing; + } + var delayed = function() { + var remaining = wait - (now() - stamp); + if (remaining <= 0) { + if (maxTimeoutId) { + clearTimeout(maxTimeoutId); + } + var isCalled = trailingCall; + maxTimeoutId = timeoutId = trailingCall = undefined; + if (isCalled) { + lastCalled = now(); + result = func.apply(thisArg, args); + if (!timeoutId && !maxTimeoutId) { + args = thisArg = null; + } + } + } else { + timeoutId = setTimeout(delayed, remaining); + } + }; + + var maxDelayed = function() { + if (timeoutId) { + clearTimeout(timeoutId); + } + maxTimeoutId = timeoutId = trailingCall = undefined; + if (trailing || (maxWait !== wait)) { + lastCalled = now(); + result = func.apply(thisArg, args); + if (!timeoutId && !maxTimeoutId) { + args = thisArg = null; + } + } + }; + + return function() { + args = arguments; + stamp = now(); + thisArg = this; + trailingCall = trailing && (timeoutId || !leading); + + if (maxWait === false) { + var leadingCall = leading && !timeoutId; + } else { + if (!maxTimeoutId && !leading) { + lastCalled = stamp; + } + var remaining = maxWait - (stamp - lastCalled), + isCalled = remaining <= 0; + + if (isCalled) { + if (maxTimeoutId) { + maxTimeoutId = clearTimeout(maxTimeoutId); + } + lastCalled = stamp; + result = func.apply(thisArg, args); + } + else if (!maxTimeoutId) { + maxTimeoutId = setTimeout(maxDelayed, remaining); + } + } + if (isCalled && timeoutId) { + timeoutId = clearTimeout(timeoutId); + } + else if (!timeoutId && wait !== maxWait) { + timeoutId = setTimeout(delayed, wait); + } + if (leadingCall) { + isCalled = true; + result = func.apply(thisArg, args); + } + if (isCalled && !timeoutId && !maxTimeoutId) { + args = thisArg = null; + } + return result; + }; +} + +module.exports = debounce; diff --git a/node_modules/lodash-node/compat/functions/defer.js b/node_modules/lodash-node/compat/functions/defer.js new file mode 100644 index 0000000..7a7bf32 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/defer.js @@ -0,0 +1,35 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'), + slice = require('../internals/slice'); + +/** + * Defers executing the `func` function until the current call stack has cleared. + * Additional arguments will be provided to `func` when it is invoked. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to defer. + * @param {...*} [arg] Arguments to invoke the function with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { console.log(text); }, 'deferred'); + * // logs 'deferred' after one or more milliseconds + */ +function defer(func) { + if (!isFunction(func)) { + throw new TypeError; + } + var args = slice(arguments, 1); + return setTimeout(function() { func.apply(undefined, args); }, 1); +} + +module.exports = defer; diff --git a/node_modules/lodash-node/compat/functions/delay.js b/node_modules/lodash-node/compat/functions/delay.js new file mode 100644 index 0000000..5a466ae --- /dev/null +++ b/node_modules/lodash-node/compat/functions/delay.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'), + slice = require('../internals/slice'); + +/** + * Executes the `func` function after `wait` milliseconds. Additional arguments + * will be provided to `func` when it is invoked. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay execution. + * @param {...*} [arg] Arguments to invoke the function with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { console.log(text); }, 1000, 'later'); + * // => logs 'later' after one second + */ +function delay(func, wait) { + if (!isFunction(func)) { + throw new TypeError; + } + var args = slice(arguments, 2); + return setTimeout(function() { func.apply(undefined, args); }, wait); +} + +module.exports = delay; diff --git a/node_modules/lodash-node/compat/functions/memoize.js b/node_modules/lodash-node/compat/functions/memoize.js new file mode 100644 index 0000000..4ba8192 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/memoize.js @@ -0,0 +1,71 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'), + keyPrefix = require('../internals/keyPrefix'); + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Creates a function that memoizes the result of `func`. If `resolver` is + * provided it will be used to determine the cache key for storing the result + * based on the arguments provided to the memoized function. By default, the + * first argument provided to the memoized function is used as the cache key. + * The `func` is executed with the `this` binding of the memoized function. + * The result cache is exposed as the `cache` property on the memoized function. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to have its output memoized. + * @param {Function} [resolver] A function used to resolve the cache key. + * @returns {Function} Returns the new memoizing function. + * @example + * + * var fibonacci = _.memoize(function(n) { + * return n < 2 ? n : fibonacci(n - 1) + fibonacci(n - 2); + * }); + * + * fibonacci(9) + * // => 34 + * + * var data = { + * 'fred': { 'name': 'fred', 'age': 40 }, + * 'pebbles': { 'name': 'pebbles', 'age': 1 } + * }; + * + * // modifying the result cache + * var get = _.memoize(function(name) { return data[name]; }, _.identity); + * get('pebbles'); + * // => { 'name': 'pebbles', 'age': 1 } + * + * get.cache.pebbles.name = 'penelope'; + * get('pebbles'); + * // => { 'name': 'penelope', 'age': 1 } + */ +function memoize(func, resolver) { + if (!isFunction(func)) { + throw new TypeError; + } + var memoized = function() { + var cache = memoized.cache, + key = resolver ? resolver.apply(this, arguments) : keyPrefix + arguments[0]; + + return hasOwnProperty.call(cache, key) + ? cache[key] + : (cache[key] = func.apply(this, arguments)); + } + memoized.cache = {}; + return memoized; +} + +module.exports = memoize; diff --git a/node_modules/lodash-node/compat/functions/once.js b/node_modules/lodash-node/compat/functions/once.js new file mode 100644 index 0000000..6409810 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/once.js @@ -0,0 +1,48 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'); + +/** + * Creates a function that is restricted to execute `func` once. Repeat calls to + * the function will return the value of the first call. The `func` is executed + * with the `this` binding of the created function. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // `initialize` executes `createApplication` once + */ +function once(func) { + var ran, + result; + + if (!isFunction(func)) { + throw new TypeError; + } + return function() { + if (ran) { + return result; + } + ran = true; + result = func.apply(this, arguments); + + // clear the `func` variable so the function may be garbage collected + func = null; + return result; + }; +} + +module.exports = once; diff --git a/node_modules/lodash-node/compat/functions/partial.js b/node_modules/lodash-node/compat/functions/partial.js new file mode 100644 index 0000000..64ff0ee --- /dev/null +++ b/node_modules/lodash-node/compat/functions/partial.js @@ -0,0 +1,34 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createWrapper = require('../internals/createWrapper'), + slice = require('../internals/slice'); + +/** + * Creates a function that, when called, invokes `func` with any additional + * `partial` arguments prepended to those provided to the new function. This + * method is similar to `_.bind` except it does **not** alter the `this` binding. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [arg] Arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * var greet = function(greeting, name) { return greeting + ' ' + name; }; + * var hi = _.partial(greet, 'hi'); + * hi('fred'); + * // => 'hi fred' + */ +function partial(func) { + return createWrapper(func, 16, slice(arguments, 1)); +} + +module.exports = partial; diff --git a/node_modules/lodash-node/compat/functions/partialRight.js b/node_modules/lodash-node/compat/functions/partialRight.js new file mode 100644 index 0000000..3028819 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/partialRight.js @@ -0,0 +1,43 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createWrapper = require('../internals/createWrapper'), + slice = require('../internals/slice'); + +/** + * This method is like `_.partial` except that `partial` arguments are + * appended to those provided to the new function. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [arg] Arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * var defaultsDeep = _.partialRight(_.merge, _.defaults); + * + * var options = { + * 'variable': 'data', + * 'imports': { 'jq': $ } + * }; + * + * defaultsDeep(options, _.templateSettings); + * + * options.variable + * // => 'data' + * + * options.imports + * // => { '_': _, 'jq': $ } + */ +function partialRight(func) { + return createWrapper(func, 32, null, slice(arguments, 1)); +} + +module.exports = partialRight; diff --git a/node_modules/lodash-node/compat/functions/throttle.js b/node_modules/lodash-node/compat/functions/throttle.js new file mode 100644 index 0000000..8300c65 --- /dev/null +++ b/node_modules/lodash-node/compat/functions/throttle.js @@ -0,0 +1,71 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var debounce = require('./debounce'), + isFunction = require('../objects/isFunction'), + isObject = require('../objects/isObject'); + +/** Used as an internal `_.debounce` options object */ +var debounceOptions = { + 'leading': false, + 'maxWait': 0, + 'trailing': false +}; + +/** + * Creates a function that, when executed, will only call the `func` function + * at most once per every `wait` milliseconds. Provide an options object to + * indicate that `func` should be invoked on the leading and/or trailing edge + * of the `wait` timeout. Subsequent calls to the throttled function will + * return the result of the last `func` call. + * + * Note: If `leading` and `trailing` options are `true` `func` will be called + * on the trailing edge of the timeout only if the the throttled function is + * invoked more than once during the `wait` timeout. + * + * @static + * @memberOf _ + * @category Functions + * @param {Function} func The function to throttle. + * @param {number} wait The number of milliseconds to throttle executions to. + * @param {Object} [options] The options object. + * @param {boolean} [options.leading=true] Specify execution on the leading edge of the timeout. + * @param {boolean} [options.trailing=true] Specify execution on the trailing edge of the timeout. + * @returns {Function} Returns the new throttled function. + * @example + * + * // avoid excessively updating the position while scrolling + * var throttled = _.throttle(updatePosition, 100); + * jQuery(window).on('scroll', throttled); + * + * // execute `renewToken` when the click event is fired, but not more than once every 5 minutes + * jQuery('.interactive').on('click', _.throttle(renewToken, 300000, { + * 'trailing': false + * })); + */ +function throttle(func, wait, options) { + var leading = true, + trailing = true; + + if (!isFunction(func)) { + throw new TypeError; + } + if (options === false) { + leading = false; + } else if (isObject(options)) { + leading = 'leading' in options ? options.leading : leading; + trailing = 'trailing' in options ? options.trailing : trailing; + } + debounceOptions.leading = leading; + debounceOptions.maxWait = wait; + debounceOptions.trailing = trailing; + + return debounce(func, wait, debounceOptions); +} + +module.exports = throttle; diff --git a/node_modules/lodash-node/compat/functions/wrap.js b/node_modules/lodash-node/compat/functions/wrap.js new file mode 100644 index 0000000..9168ffc --- /dev/null +++ b/node_modules/lodash-node/compat/functions/wrap.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createWrapper = require('../internals/createWrapper'); + +/** + * Creates a function that provides `value` to the wrapper function as its + * first argument. Additional arguments provided to the function are appended + * to those provided to the wrapper function. The wrapper is executed with + * the `this` binding of the created function. + * + * @static + * @memberOf _ + * @category Functions + * @param {*} value The value to wrap. + * @param {Function} wrapper The wrapper function. + * @returns {Function} Returns the new function. + * @example + * + * var p = _.wrap(_.escape, function(func, text) { + * return '

' + func(text) + '

'; + * }); + * + * p('Fred, Wilma, & Pebbles'); + * // => '

Fred, Wilma, & Pebbles

' + */ +function wrap(value, wrapper) { + return createWrapper(wrapper, 16, [value]); +} + +module.exports = wrap; diff --git a/node_modules/lodash-node/compat/index.js b/node_modules/lodash-node/compat/index.js new file mode 100644 index 0000000..cbea241 --- /dev/null +++ b/node_modules/lodash-node/compat/index.js @@ -0,0 +1,376 @@ +/** + * @license + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var arrays = require('./arrays'), + chaining = require('./chaining'), + collections = require('./collections'), + functions = require('./functions'), + objects = require('./objects'), + utilities = require('./utilities'), + baseEach = require('./internals/baseEach'), + forOwn = require('./objects/forOwn'), + isArray = require('./objects/isArray'), + lodashWrapper = require('./internals/lodashWrapper'), + mixin = require('./utilities/mixin'), + support = require('./support'), + templateSettings = require('./utilities/templateSettings'); + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Creates a `lodash` object which wraps the given value to enable intuitive + * method chaining. + * + * In addition to Lo-Dash methods, wrappers also have the following `Array` methods: + * `concat`, `join`, `pop`, `push`, `reverse`, `shift`, `slice`, `sort`, `splice`, + * and `unshift` + * + * Chaining is supported in custom builds as long as the `value` method is + * implicitly or explicitly included in the build. + * + * The chainable wrapper functions are: + * `after`, `assign`, `bind`, `bindAll`, `bindKey`, `chain`, `compact`, + * `compose`, `concat`, `countBy`, `create`, `createCallback`, `curry`, + * `debounce`, `defaults`, `defer`, `delay`, `difference`, `filter`, `flatten`, + * `forEach`, `forEachRight`, `forIn`, `forInRight`, `forOwn`, `forOwnRight`, + * `functions`, `groupBy`, `indexBy`, `initial`, `intersection`, `invert`, + * `invoke`, `keys`, `map`, `max`, `memoize`, `merge`, `min`, `object`, `omit`, + * `once`, `pairs`, `partial`, `partialRight`, `pick`, `pluck`, `pull`, `push`, + * `range`, `reject`, `remove`, `rest`, `reverse`, `shuffle`, `slice`, `sort`, + * `sortBy`, `splice`, `tap`, `throttle`, `times`, `toArray`, `transform`, + * `union`, `uniq`, `unshift`, `unzip`, `values`, `where`, `without`, `wrap`, + * and `zip` + * + * The non-chainable wrapper functions are: + * `clone`, `cloneDeep`, `contains`, `escape`, `every`, `find`, `findIndex`, + * `findKey`, `findLast`, `findLastIndex`, `findLastKey`, `has`, `identity`, + * `indexOf`, `isArguments`, `isArray`, `isBoolean`, `isDate`, `isElement`, + * `isEmpty`, `isEqual`, `isFinite`, `isFunction`, `isNaN`, `isNull`, `isNumber`, + * `isObject`, `isPlainObject`, `isRegExp`, `isString`, `isUndefined`, `join`, + * `lastIndexOf`, `mixin`, `noConflict`, `parseInt`, `pop`, `random`, `reduce`, + * `reduceRight`, `result`, `shift`, `size`, `some`, `sortedIndex`, `runInContext`, + * `template`, `unescape`, `uniqueId`, and `value` + * + * The wrapper functions `first` and `last` return wrapped values when `n` is + * provided, otherwise they return unwrapped values. + * + * Explicit chaining can be enabled by using the `_.chain` method. + * + * @name _ + * @constructor + * @category Chaining + * @param {*} value The value to wrap in a `lodash` instance. + * @returns {Object} Returns a `lodash` instance. + * @example + * + * var wrapped = _([1, 2, 3]); + * + * // returns an unwrapped value + * wrapped.reduce(function(sum, num) { + * return sum + num; + * }); + * // => 6 + * + * // returns a wrapped value + * var squares = wrapped.map(function(num) { + * return num * num; + * }); + * + * _.isArray(squares); + * // => false + * + * _.isArray(squares.value()); + * // => true + */ +function lodash(value) { + // don't wrap if already wrapped, even if wrapped by a different `lodash` constructor + return (value && typeof value == 'object' && !isArray(value) && hasOwnProperty.call(value, '__wrapped__')) + ? value + : new lodashWrapper(value); +} +// ensure `new lodashWrapper` is an instance of `lodash` +lodashWrapper.prototype = lodash.prototype; + +// wrap `_.mixin` so it works when provided only one argument +mixin = (function(fn) { + var functions = objects.functions; + return function(object, source, options) { + if (!source || (!options && !functions(source).length)) { + if (options == null) { + options = source; + } + source = object; + object = lodash; + } + return fn(object, source, options); + }; +}(mixin)); + +// add functions that return wrapped values when chaining +lodash.after = functions.after; +lodash.assign = objects.assign; +lodash.at = collections.at; +lodash.bind = functions.bind; +lodash.bindAll = functions.bindAll; +lodash.bindKey = functions.bindKey; +lodash.chain = chaining.chain; +lodash.compact = arrays.compact; +lodash.compose = functions.compose; +lodash.constant = utilities.constant; +lodash.countBy = collections.countBy; +lodash.create = objects.create; +lodash.createCallback = functions.createCallback; +lodash.curry = functions.curry; +lodash.debounce = functions.debounce; +lodash.defaults = objects.defaults; +lodash.defer = functions.defer; +lodash.delay = functions.delay; +lodash.difference = arrays.difference; +lodash.filter = collections.filter; +lodash.flatten = arrays.flatten; +lodash.forEach = collections.forEach; +lodash.forEachRight = collections.forEachRight; +lodash.forIn = objects.forIn; +lodash.forInRight = objects.forInRight; +lodash.forOwn = forOwn; +lodash.forOwnRight = objects.forOwnRight; +lodash.functions = objects.functions; +lodash.groupBy = collections.groupBy; +lodash.indexBy = collections.indexBy; +lodash.initial = arrays.initial; +lodash.intersection = arrays.intersection; +lodash.invert = objects.invert; +lodash.invoke = collections.invoke; +lodash.keys = objects.keys; +lodash.map = collections.map; +lodash.mapValues = objects.mapValues; +lodash.max = collections.max; +lodash.memoize = functions.memoize; +lodash.merge = objects.merge; +lodash.min = collections.min; +lodash.omit = objects.omit; +lodash.once = functions.once; +lodash.pairs = objects.pairs; +lodash.partial = functions.partial; +lodash.partialRight = functions.partialRight; +lodash.pick = objects.pick; +lodash.pluck = collections.pluck; +lodash.property = utilities.property; +lodash.pull = arrays.pull; +lodash.range = arrays.range; +lodash.reject = collections.reject; +lodash.remove = arrays.remove; +lodash.rest = arrays.rest; +lodash.shuffle = collections.shuffle; +lodash.sortBy = collections.sortBy; +lodash.tap = chaining.tap; +lodash.throttle = functions.throttle; +lodash.times = utilities.times; +lodash.toArray = collections.toArray; +lodash.transform = objects.transform; +lodash.union = arrays.union; +lodash.uniq = arrays.uniq; +lodash.values = objects.values; +lodash.where = collections.where; +lodash.without = arrays.without; +lodash.wrap = functions.wrap; +lodash.xor = arrays.xor; +lodash.zip = arrays.zip; +lodash.zipObject = arrays.zipObject; + +// add aliases +lodash.collect = collections.map; +lodash.drop = arrays.rest; +lodash.each = collections.forEach; +lodash.eachRight = collections.forEachRight; +lodash.extend = objects.assign; +lodash.methods = objects.functions; +lodash.object = arrays.zipObject; +lodash.select = collections.filter; +lodash.tail = arrays.rest; +lodash.unique = arrays.uniq; +lodash.unzip = arrays.zip; + +// add functions to `lodash.prototype` +mixin(lodash); + +// add functions that return unwrapped values when chaining +lodash.clone = objects.clone; +lodash.cloneDeep = objects.cloneDeep; +lodash.contains = collections.contains; +lodash.escape = utilities.escape; +lodash.every = collections.every; +lodash.find = collections.find; +lodash.findIndex = arrays.findIndex; +lodash.findKey = objects.findKey; +lodash.findLast = collections.findLast; +lodash.findLastIndex = arrays.findLastIndex; +lodash.findLastKey = objects.findLastKey; +lodash.has = objects.has; +lodash.identity = utilities.identity; +lodash.indexOf = arrays.indexOf; +lodash.isArguments = objects.isArguments; +lodash.isArray = isArray; +lodash.isBoolean = objects.isBoolean; +lodash.isDate = objects.isDate; +lodash.isElement = objects.isElement; +lodash.isEmpty = objects.isEmpty; +lodash.isEqual = objects.isEqual; +lodash.isFinite = objects.isFinite; +lodash.isFunction = objects.isFunction; +lodash.isNaN = objects.isNaN; +lodash.isNull = objects.isNull; +lodash.isNumber = objects.isNumber; +lodash.isObject = objects.isObject; +lodash.isPlainObject = objects.isPlainObject; +lodash.isRegExp = objects.isRegExp; +lodash.isString = objects.isString; +lodash.isUndefined = objects.isUndefined; +lodash.lastIndexOf = arrays.lastIndexOf; +lodash.mixin = mixin; +lodash.noConflict = utilities.noConflict; +lodash.noop = utilities.noop; +lodash.now = utilities.now; +lodash.parseInt = utilities.parseInt; +lodash.random = utilities.random; +lodash.reduce = collections.reduce; +lodash.reduceRight = collections.reduceRight; +lodash.result = utilities.result; +lodash.size = collections.size; +lodash.some = collections.some; +lodash.sortedIndex = arrays.sortedIndex; +lodash.template = utilities.template; +lodash.unescape = utilities.unescape; +lodash.uniqueId = utilities.uniqueId; + +// add aliases +lodash.all = collections.every; +lodash.any = collections.some; +lodash.detect = collections.find; +lodash.findWhere = collections.find; +lodash.foldl = collections.reduce; +lodash.foldr = collections.reduceRight; +lodash.include = collections.contains; +lodash.inject = collections.reduce; + +mixin(function() { + var source = {} + forOwn(lodash, function(func, methodName) { + if (!lodash.prototype[methodName]) { + source[methodName] = func; + } + }); + return source; +}(), false); + +// add functions capable of returning wrapped and unwrapped values when chaining +lodash.first = arrays.first; +lodash.last = arrays.last; +lodash.sample = collections.sample; + +// add aliases +lodash.take = arrays.first; +lodash.head = arrays.first; + +forOwn(lodash, function(func, methodName) { + var callbackable = methodName !== 'sample'; + if (!lodash.prototype[methodName]) { + lodash.prototype[methodName]= function(n, guard) { + var chainAll = this.__chain__, + result = func(this.__wrapped__, n, guard); + + return !chainAll && (n == null || (guard && !(callbackable && typeof n == 'function'))) + ? result + : new lodashWrapper(result, chainAll); + }; + } +}); + +/** + * The semantic version number. + * + * @static + * @memberOf _ + * @type string + */ +lodash.VERSION = '2.4.1'; + +// add "Chaining" functions to the wrapper +lodash.prototype.chain = chaining.wrapperChain; +lodash.prototype.toString = chaining.wrapperToString; +lodash.prototype.value = chaining.wrapperValueOf; +lodash.prototype.valueOf = chaining.wrapperValueOf; + +// add `Array` functions that return unwrapped values +baseEach(['join', 'pop', 'shift'], function(methodName) { + var func = arrayRef[methodName]; + lodash.prototype[methodName] = function() { + var chainAll = this.__chain__, + result = func.apply(this.__wrapped__, arguments); + + return chainAll + ? new lodashWrapper(result, chainAll) + : result; + }; +}); + +// add `Array` functions that return the existing wrapped value +baseEach(['push', 'reverse', 'sort', 'unshift'], function(methodName) { + var func = arrayRef[methodName]; + lodash.prototype[methodName] = function() { + func.apply(this.__wrapped__, arguments); + return this; + }; +}); + +// add `Array` functions that return new wrapped values +baseEach(['concat', 'slice', 'splice'], function(methodName) { + var func = arrayRef[methodName]; + lodash.prototype[methodName] = function() { + return new lodashWrapper(func.apply(this.__wrapped__, arguments), this.__chain__); + }; +}); + +// avoid array-like object bugs with `Array#shift` and `Array#splice` +// in IE < 9, Firefox < 10, Narwhal, and RingoJS +if (!support.spliceObjects) { + baseEach(['pop', 'shift', 'splice'], function(methodName) { + var func = arrayRef[methodName], + isSplice = methodName == 'splice'; + + lodash.prototype[methodName] = function() { + var chainAll = this.__chain__, + value = this.__wrapped__, + result = func.apply(value, arguments); + + if (value.length === 0) { + delete value[0]; + } + return (chainAll || isSplice) + ? new lodashWrapper(result, chainAll) + : result; + }; + }); +} + +lodash.support = support; +(lodash.templateSettings = utilities.templateSettings).imports._ = lodash; +module.exports = lodash; diff --git a/node_modules/lodash-node/compat/internals/arrayPool.js b/node_modules/lodash-node/compat/internals/arrayPool.js new file mode 100644 index 0000000..56e34e5 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/arrayPool.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to pool arrays and objects used internally */ +var arrayPool = []; + +module.exports = arrayPool; diff --git a/node_modules/lodash-node/compat/internals/baseBind.js b/node_modules/lodash-node/compat/internals/baseBind.js new file mode 100644 index 0000000..5e328f7 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseBind.js @@ -0,0 +1,62 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreate = require('./baseCreate'), + isObject = require('../objects/isObject'), + setBindData = require('./setBindData'), + slice = require('./slice'); + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Native method shortcuts */ +var push = arrayRef.push; + +/** + * The base implementation of `_.bind` that creates the bound function and + * sets its meta data. + * + * @private + * @param {Array} bindData The bind data array. + * @returns {Function} Returns the new bound function. + */ +function baseBind(bindData) { + var func = bindData[0], + partialArgs = bindData[2], + thisArg = bindData[4]; + + function bound() { + // `Function#bind` spec + // http://es5.github.io/#x15.3.4.5 + if (partialArgs) { + // avoid `arguments` object deoptimizations by using `slice` instead + // of `Array.prototype.slice.call` and not assigning `arguments` to a + // variable as a ternary expression + var args = slice(partialArgs); + push.apply(args, arguments); + } + // mimic the constructor's `return` behavior + // http://es5.github.io/#x13.2.2 + if (this instanceof bound) { + // ensure `new bound` is an instance of `func` + var thisBinding = baseCreate(func.prototype), + result = func.apply(thisBinding, args || arguments); + return isObject(result) ? result : thisBinding; + } + return func.apply(thisArg, args || arguments); + } + setBindData(bound, bindData); + return bound; +} + +module.exports = baseBind; diff --git a/node_modules/lodash-node/compat/internals/baseClone.js b/node_modules/lodash-node/compat/internals/baseClone.js new file mode 100644 index 0000000..fa262bf --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseClone.js @@ -0,0 +1,154 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var assign = require('../objects/assign'), + baseEach = require('./baseEach'), + forOwn = require('../objects/forOwn'), + getArray = require('./getArray'), + isArray = require('../objects/isArray'), + isNode = require('./isNode'), + isObject = require('../objects/isObject'), + releaseArray = require('./releaseArray'), + slice = require('./slice'), + support = require('../support'); + +/** Used to match regexp flags from their coerced string values */ +var reFlags = /\w*$/; + +/** `Object#toString` result shortcuts */ +var argsClass = '[object Arguments]', + arrayClass = '[object Array]', + boolClass = '[object Boolean]', + dateClass = '[object Date]', + funcClass = '[object Function]', + numberClass = '[object Number]', + objectClass = '[object Object]', + regexpClass = '[object RegExp]', + stringClass = '[object String]'; + +/** Used to identify object classifications that `_.clone` supports */ +var cloneableClasses = {}; +cloneableClasses[funcClass] = false; +cloneableClasses[argsClass] = cloneableClasses[arrayClass] = +cloneableClasses[boolClass] = cloneableClasses[dateClass] = +cloneableClasses[numberClass] = cloneableClasses[objectClass] = +cloneableClasses[regexpClass] = cloneableClasses[stringClass] = true; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** Used to lookup a built-in constructor by [[Class]] */ +var ctorByClass = {}; +ctorByClass[arrayClass] = Array; +ctorByClass[boolClass] = Boolean; +ctorByClass[dateClass] = Date; +ctorByClass[funcClass] = Function; +ctorByClass[objectClass] = Object; +ctorByClass[numberClass] = Number; +ctorByClass[regexpClass] = RegExp; +ctorByClass[stringClass] = String; + +/** + * The base implementation of `_.clone` without argument juggling or support + * for `thisArg` binding. + * + * @private + * @param {*} value The value to clone. + * @param {boolean} [isDeep=false] Specify a deep clone. + * @param {Function} [callback] The function to customize cloning values. + * @param {Array} [stackA=[]] Tracks traversed source objects. + * @param {Array} [stackB=[]] Associates clones with source counterparts. + * @returns {*} Returns the cloned value. + */ +function baseClone(value, isDeep, callback, stackA, stackB) { + if (callback) { + var result = callback(value); + if (typeof result != 'undefined') { + return result; + } + } + // inspect [[Class]] + var isObj = isObject(value); + if (isObj) { + var className = toString.call(value); + if (!cloneableClasses[className] || (!support.nodeClass && isNode(value))) { + return value; + } + var ctor = ctorByClass[className]; + switch (className) { + case boolClass: + case dateClass: + return new ctor(+value); + + case numberClass: + case stringClass: + return new ctor(value); + + case regexpClass: + result = ctor(value.source, reFlags.exec(value)); + result.lastIndex = value.lastIndex; + return result; + } + } else { + return value; + } + var isArr = isArray(value); + if (isDeep) { + // check for circular references and return corresponding clone + var initedStack = !stackA; + stackA || (stackA = getArray()); + stackB || (stackB = getArray()); + + var length = stackA.length; + while (length--) { + if (stackA[length] == value) { + return stackB[length]; + } + } + result = isArr ? ctor(value.length) : {}; + } + else { + result = isArr ? slice(value) : assign({}, value); + } + // add array properties assigned by `RegExp#exec` + if (isArr) { + if (hasOwnProperty.call(value, 'index')) { + result.index = value.index; + } + if (hasOwnProperty.call(value, 'input')) { + result.input = value.input; + } + } + // exit for shallow clone + if (!isDeep) { + return result; + } + // add the source value to the stack of traversed objects + // and associate it with its clone + stackA.push(value); + stackB.push(result); + + // recursively populate clone (susceptible to call stack limits) + (isArr ? baseEach : forOwn)(value, function(objValue, key) { + result[key] = baseClone(objValue, isDeep, callback, stackA, stackB); + }); + + if (initedStack) { + releaseArray(stackA); + releaseArray(stackB); + } + return result; +} + +module.exports = baseClone; diff --git a/node_modules/lodash-node/compat/internals/baseCreate.js b/node_modules/lodash-node/compat/internals/baseCreate.js new file mode 100644 index 0000000..90fcd68 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseCreate.js @@ -0,0 +1,42 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isNative = require('./isNative'), + isObject = require('../objects/isObject'), + noop = require('../utilities/noop'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeCreate = isNative(nativeCreate = Object.create) && nativeCreate; + +/** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} prototype The object to inherit from. + * @returns {Object} Returns the new object. + */ +function baseCreate(prototype, properties) { + return isObject(prototype) ? nativeCreate(prototype) : {}; +} +// fallback for browsers without `Object.create` +if (!nativeCreate) { + baseCreate = (function() { + function Object() {} + return function(prototype) { + if (isObject(prototype)) { + Object.prototype = prototype; + var result = new Object; + Object.prototype = null; + } + return result || global.Object(); + }; + }()); +} + +module.exports = baseCreate; diff --git a/node_modules/lodash-node/compat/internals/baseCreateCallback.js b/node_modules/lodash-node/compat/internals/baseCreateCallback.js new file mode 100644 index 0000000..cb9657b --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseCreateCallback.js @@ -0,0 +1,80 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var bind = require('../functions/bind'), + identity = require('../utilities/identity'), + setBindData = require('./setBindData'), + support = require('../support'); + +/** Used to detected named functions */ +var reFuncName = /^\s*function[ \n\r\t]+\w/; + +/** Used to detect functions containing a `this` reference */ +var reThis = /\bthis\b/; + +/** Native method shortcuts */ +var fnToString = Function.prototype.toString; + +/** + * The base implementation of `_.createCallback` without support for creating + * "_.pluck" or "_.where" style callbacks. + * + * @private + * @param {*} [func=identity] The value to convert to a callback. + * @param {*} [thisArg] The `this` binding of the created callback. + * @param {number} [argCount] The number of arguments the callback accepts. + * @returns {Function} Returns a callback function. + */ +function baseCreateCallback(func, thisArg, argCount) { + if (typeof func != 'function') { + return identity; + } + // exit early for no `thisArg` or already bound by `Function#bind` + if (typeof thisArg == 'undefined' || !('prototype' in func)) { + return func; + } + var bindData = func.__bindData__; + if (typeof bindData == 'undefined') { + if (support.funcNames) { + bindData = !func.name; + } + bindData = bindData || !support.funcDecomp; + if (!bindData) { + var source = fnToString.call(func); + if (!support.funcNames) { + bindData = !reFuncName.test(source); + } + if (!bindData) { + // checks if `func` references the `this` keyword and stores the result + bindData = reThis.test(source); + setBindData(func, bindData); + } + } + } + // exit early if there are no `this` references or `func` is bound + if (bindData === false || (bindData !== true && bindData[1] & 1)) { + return func; + } + switch (argCount) { + case 1: return function(value) { + return func.call(thisArg, value); + }; + case 2: return function(a, b) { + return func.call(thisArg, a, b); + }; + case 3: return function(value, index, collection) { + return func.call(thisArg, value, index, collection); + }; + case 4: return function(accumulator, value, index, collection) { + return func.call(thisArg, accumulator, value, index, collection); + }; + } + return bind(func, thisArg); +} + +module.exports = baseCreateCallback; diff --git a/node_modules/lodash-node/compat/internals/baseCreateWrapper.js b/node_modules/lodash-node/compat/internals/baseCreateWrapper.js new file mode 100644 index 0000000..41bb35f --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseCreateWrapper.js @@ -0,0 +1,78 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreate = require('./baseCreate'), + isObject = require('../objects/isObject'), + setBindData = require('./setBindData'), + slice = require('./slice'); + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Native method shortcuts */ +var push = arrayRef.push; + +/** + * The base implementation of `createWrapper` that creates the wrapper and + * sets its meta data. + * + * @private + * @param {Array} bindData The bind data array. + * @returns {Function} Returns the new function. + */ +function baseCreateWrapper(bindData) { + var func = bindData[0], + bitmask = bindData[1], + partialArgs = bindData[2], + partialRightArgs = bindData[3], + thisArg = bindData[4], + arity = bindData[5]; + + var isBind = bitmask & 1, + isBindKey = bitmask & 2, + isCurry = bitmask & 4, + isCurryBound = bitmask & 8, + key = func; + + function bound() { + var thisBinding = isBind ? thisArg : this; + if (partialArgs) { + var args = slice(partialArgs); + push.apply(args, arguments); + } + if (partialRightArgs || isCurry) { + args || (args = slice(arguments)); + if (partialRightArgs) { + push.apply(args, partialRightArgs); + } + if (isCurry && args.length < arity) { + bitmask |= 16 & ~32; + return baseCreateWrapper([func, (isCurryBound ? bitmask : bitmask & ~3), args, null, thisArg, arity]); + } + } + args || (args = arguments); + if (isBindKey) { + func = thisBinding[key]; + } + if (this instanceof bound) { + thisBinding = baseCreate(func.prototype); + var result = func.apply(thisBinding, args); + return isObject(result) ? result : thisBinding; + } + return func.apply(thisBinding, args); + } + setBindData(bound, bindData); + return bound; +} + +module.exports = baseCreateWrapper; diff --git a/node_modules/lodash-node/compat/internals/baseDifference.js b/node_modules/lodash-node/compat/internals/baseDifference.js new file mode 100644 index 0000000..cc32c28 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseDifference.js @@ -0,0 +1,52 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseIndexOf = require('./baseIndexOf'), + cacheIndexOf = require('./cacheIndexOf'), + createCache = require('./createCache'), + largeArraySize = require('./largeArraySize'), + releaseObject = require('./releaseObject'); + +/** + * The base implementation of `_.difference` that accepts a single array + * of values to exclude. + * + * @private + * @param {Array} array The array to process. + * @param {Array} [values] The array of values to exclude. + * @returns {Array} Returns a new array of filtered values. + */ +function baseDifference(array, values) { + var index = -1, + indexOf = baseIndexOf, + length = array ? array.length : 0, + isLarge = length >= largeArraySize, + result = []; + + if (isLarge) { + var cache = createCache(values); + if (cache) { + indexOf = cacheIndexOf; + values = cache; + } else { + isLarge = false; + } + } + while (++index < length) { + var value = array[index]; + if (indexOf(values, value) < 0) { + result.push(value); + } + } + if (isLarge) { + releaseObject(values); + } + return result; +} + +module.exports = baseDifference; diff --git a/node_modules/lodash-node/compat/internals/baseEach.js b/node_modules/lodash-node/compat/internals/baseEach.js new file mode 100644 index 0000000..0c833bc --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseEach.js @@ -0,0 +1,28 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createIterator = require('./createIterator'), + eachIteratorOptions = require('./eachIteratorOptions'); + +/** + * A function compiled to iterate `arguments` objects, arrays, objects, and + * strings consistenly across environments, executing the callback for each + * element in the collection. The callback is bound to `thisArg` and invoked + * with three arguments; (value, index|key, collection). Callbacks may exit + * iteration early by explicitly returning `false`. + * + * @private + * @type Function + * @param {Array|Object|string} collection The collection to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array|Object|string} Returns `collection`. + */ +var baseEach = createIterator(eachIteratorOptions); + +module.exports = baseEach; diff --git a/node_modules/lodash-node/compat/internals/baseFlatten.js b/node_modules/lodash-node/compat/internals/baseFlatten.js new file mode 100644 index 0000000..aaad714 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseFlatten.js @@ -0,0 +1,52 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isArguments = require('../objects/isArguments'), + isArray = require('../objects/isArray'); + +/** + * The base implementation of `_.flatten` without support for callback + * shorthands or `thisArg` binding. + * + * @private + * @param {Array} array The array to flatten. + * @param {boolean} [isShallow=false] A flag to restrict flattening to a single level. + * @param {boolean} [isStrict=false] A flag to restrict flattening to arrays and `arguments` objects. + * @param {number} [fromIndex=0] The index to start from. + * @returns {Array} Returns a new flattened array. + */ +function baseFlatten(array, isShallow, isStrict, fromIndex) { + var index = (fromIndex || 0) - 1, + length = array ? array.length : 0, + result = []; + + while (++index < length) { + var value = array[index]; + + if (value && typeof value == 'object' && typeof value.length == 'number' + && (isArray(value) || isArguments(value))) { + // recursively flatten arrays (susceptible to call stack limits) + if (!isShallow) { + value = baseFlatten(value, isShallow, isStrict); + } + var valIndex = -1, + valLength = value.length, + resIndex = result.length; + + result.length += valLength; + while (++valIndex < valLength) { + result[resIndex++] = value[valIndex]; + } + } else if (!isStrict) { + result.push(value); + } + } + return result; +} + +module.exports = baseFlatten; diff --git a/node_modules/lodash-node/compat/internals/baseIndexOf.js b/node_modules/lodash-node/compat/internals/baseIndexOf.js new file mode 100644 index 0000000..bb12119 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseIndexOf.js @@ -0,0 +1,32 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * The base implementation of `_.indexOf` without support for binary searches + * or `fromIndex` constraints. + * + * @private + * @param {Array} array The array to search. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value or `-1`. + */ +function baseIndexOf(array, value, fromIndex) { + var index = (fromIndex || 0) - 1, + length = array ? array.length : 0; + + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; +} + +module.exports = baseIndexOf; diff --git a/node_modules/lodash-node/compat/internals/baseIsEqual.js b/node_modules/lodash-node/compat/internals/baseIsEqual.js new file mode 100644 index 0000000..79e26a9 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseIsEqual.js @@ -0,0 +1,212 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forIn = require('../objects/forIn'), + getArray = require('./getArray'), + isArguments = require('../objects/isArguments'), + isFunction = require('../objects/isFunction'), + isNode = require('./isNode'), + objectTypes = require('./objectTypes'), + releaseArray = require('./releaseArray'), + support = require('../support'); + +/** `Object#toString` result shortcuts */ +var argsClass = '[object Arguments]', + arrayClass = '[object Array]', + boolClass = '[object Boolean]', + dateClass = '[object Date]', + numberClass = '[object Number]', + objectClass = '[object Object]', + regexpClass = '[object RegExp]', + stringClass = '[object String]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * The base implementation of `_.isEqual`, without support for `thisArg` binding, + * that allows partial "_.where" style comparisons. + * + * @private + * @param {*} a The value to compare. + * @param {*} b The other value to compare. + * @param {Function} [callback] The function to customize comparing values. + * @param {Function} [isWhere=false] A flag to indicate performing partial comparisons. + * @param {Array} [stackA=[]] Tracks traversed `a` objects. + * @param {Array} [stackB=[]] Tracks traversed `b` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ +function baseIsEqual(a, b, callback, isWhere, stackA, stackB) { + // used to indicate that when comparing objects, `a` has at least the properties of `b` + if (callback) { + var result = callback(a, b); + if (typeof result != 'undefined') { + return !!result; + } + } + // exit early for identical values + if (a === b) { + // treat `+0` vs. `-0` as not equal + return a !== 0 || (1 / a == 1 / b); + } + var type = typeof a, + otherType = typeof b; + + // exit early for unlike primitive values + if (a === a && + !(a && objectTypes[type]) && + !(b && objectTypes[otherType])) { + return false; + } + // exit early for `null` and `undefined` avoiding ES3's Function#call behavior + // http://es5.github.io/#x15.3.4.4 + if (a == null || b == null) { + return a === b; + } + // compare [[Class]] names + var className = toString.call(a), + otherClass = toString.call(b); + + if (className == argsClass) { + className = objectClass; + } + if (otherClass == argsClass) { + otherClass = objectClass; + } + if (className != otherClass) { + return false; + } + switch (className) { + case boolClass: + case dateClass: + // coerce dates and booleans to numbers, dates to milliseconds and booleans + // to `1` or `0` treating invalid dates coerced to `NaN` as not equal + return +a == +b; + + case numberClass: + // treat `NaN` vs. `NaN` as equal + return (a != +a) + ? b != +b + // but treat `+0` vs. `-0` as not equal + : (a == 0 ? (1 / a == 1 / b) : a == +b); + + case regexpClass: + case stringClass: + // coerce regexes to strings (http://es5.github.io/#x15.10.6.4) + // treat string primitives and their corresponding object instances as equal + return a == String(b); + } + var isArr = className == arrayClass; + if (!isArr) { + // unwrap any `lodash` wrapped values + var aWrapped = hasOwnProperty.call(a, '__wrapped__'), + bWrapped = hasOwnProperty.call(b, '__wrapped__'); + + if (aWrapped || bWrapped) { + return baseIsEqual(aWrapped ? a.__wrapped__ : a, bWrapped ? b.__wrapped__ : b, callback, isWhere, stackA, stackB); + } + // exit for functions and DOM nodes + if (className != objectClass || (!support.nodeClass && (isNode(a) || isNode(b)))) { + return false; + } + // in older versions of Opera, `arguments` objects have `Array` constructors + var ctorA = !support.argsObject && isArguments(a) ? Object : a.constructor, + ctorB = !support.argsObject && isArguments(b) ? Object : b.constructor; + + // non `Object` object instances with different constructors are not equal + if (ctorA != ctorB && + !(isFunction(ctorA) && ctorA instanceof ctorA && isFunction(ctorB) && ctorB instanceof ctorB) && + ('constructor' in a && 'constructor' in b) + ) { + return false; + } + } + // assume cyclic structures are equal + // the algorithm for detecting cyclic structures is adapted from ES 5.1 + // section 15.12.3, abstract operation `JO` (http://es5.github.io/#x15.12.3) + var initedStack = !stackA; + stackA || (stackA = getArray()); + stackB || (stackB = getArray()); + + var length = stackA.length; + while (length--) { + if (stackA[length] == a) { + return stackB[length] == b; + } + } + var size = 0; + result = true; + + // add `a` and `b` to the stack of traversed objects + stackA.push(a); + stackB.push(b); + + // recursively compare objects and arrays (susceptible to call stack limits) + if (isArr) { + // compare lengths to determine if a deep comparison is necessary + length = a.length; + size = b.length; + result = size == length; + + if (result || isWhere) { + // deep compare the contents, ignoring non-numeric properties + while (size--) { + var index = length, + value = b[size]; + + if (isWhere) { + while (index--) { + if ((result = baseIsEqual(a[index], value, callback, isWhere, stackA, stackB))) { + break; + } + } + } else if (!(result = baseIsEqual(a[size], value, callback, isWhere, stackA, stackB))) { + break; + } + } + } + } + else { + // deep compare objects using `forIn`, instead of `forOwn`, to avoid `Object.keys` + // which, in this case, is more costly + forIn(b, function(value, key, b) { + if (hasOwnProperty.call(b, key)) { + // count the number of properties. + size++; + // deep compare each property value. + return (result = hasOwnProperty.call(a, key) && baseIsEqual(a[key], value, callback, isWhere, stackA, stackB)); + } + }); + + if (result && !isWhere) { + // ensure both objects have the same number of properties + forIn(a, function(value, key, a) { + if (hasOwnProperty.call(a, key)) { + // `size` will be `-1` if `a` has more properties than `b` + return (result = --size > -1); + } + }); + } + } + stackA.pop(); + stackB.pop(); + + if (initedStack) { + releaseArray(stackA); + releaseArray(stackB); + } + return result; +} + +module.exports = baseIsEqual; diff --git a/node_modules/lodash-node/compat/internals/baseMerge.js b/node_modules/lodash-node/compat/internals/baseMerge.js new file mode 100644 index 0000000..f0dd885 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseMerge.js @@ -0,0 +1,79 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forEach = require('../collections/forEach'), + forOwn = require('../objects/forOwn'), + isArray = require('../objects/isArray'), + isPlainObject = require('../objects/isPlainObject'); + +/** + * The base implementation of `_.merge` without argument juggling or support + * for `thisArg` binding. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {Function} [callback] The function to customize merging properties. + * @param {Array} [stackA=[]] Tracks traversed source objects. + * @param {Array} [stackB=[]] Associates values with source counterparts. + */ +function baseMerge(object, source, callback, stackA, stackB) { + (isArray(source) ? forEach : forOwn)(source, function(source, key) { + var found, + isArr, + result = source, + value = object[key]; + + if (source && ((isArr = isArray(source)) || isPlainObject(source))) { + // avoid merging previously merged cyclic sources + var stackLength = stackA.length; + while (stackLength--) { + if ((found = stackA[stackLength] == source)) { + value = stackB[stackLength]; + break; + } + } + if (!found) { + var isShallow; + if (callback) { + result = callback(value, source); + if ((isShallow = typeof result != 'undefined')) { + value = result; + } + } + if (!isShallow) { + value = isArr + ? (isArray(value) ? value : []) + : (isPlainObject(value) ? value : {}); + } + // add `source` and associated `value` to the stack of traversed objects + stackA.push(source); + stackB.push(value); + + // recursively merge objects and arrays (susceptible to call stack limits) + if (!isShallow) { + baseMerge(value, source, callback, stackA, stackB); + } + } + } + else { + if (callback) { + result = callback(value, source); + if (typeof result == 'undefined') { + result = source; + } + } + if (typeof result != 'undefined') { + value = result; + } + } + object[key] = value; + }); +} + +module.exports = baseMerge; diff --git a/node_modules/lodash-node/compat/internals/baseRandom.js b/node_modules/lodash-node/compat/internals/baseRandom.js new file mode 100644 index 0000000..a9f12b9 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseRandom.js @@ -0,0 +1,29 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Native method shortcuts */ +var floor = Math.floor; + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeRandom = Math.random; + +/** + * The base implementation of `_.random` without argument juggling or support + * for returning floating-point numbers. + * + * @private + * @param {number} min The minimum possible value. + * @param {number} max The maximum possible value. + * @returns {number} Returns a random number. + */ +function baseRandom(min, max) { + return min + floor(nativeRandom() * (max - min + 1)); +} + +module.exports = baseRandom; diff --git a/node_modules/lodash-node/compat/internals/baseUniq.js b/node_modules/lodash-node/compat/internals/baseUniq.js new file mode 100644 index 0000000..1d96d63 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/baseUniq.js @@ -0,0 +1,64 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseIndexOf = require('./baseIndexOf'), + cacheIndexOf = require('./cacheIndexOf'), + createCache = require('./createCache'), + getArray = require('./getArray'), + largeArraySize = require('./largeArraySize'), + releaseArray = require('./releaseArray'), + releaseObject = require('./releaseObject'); + +/** + * The base implementation of `_.uniq` without support for callback shorthands + * or `thisArg` binding. + * + * @private + * @param {Array} array The array to process. + * @param {boolean} [isSorted=false] A flag to indicate that `array` is sorted. + * @param {Function} [callback] The function called per iteration. + * @returns {Array} Returns a duplicate-value-free array. + */ +function baseUniq(array, isSorted, callback) { + var index = -1, + indexOf = baseIndexOf, + length = array ? array.length : 0, + result = []; + + var isLarge = !isSorted && length >= largeArraySize, + seen = (callback || isLarge) ? getArray() : result; + + if (isLarge) { + var cache = createCache(seen); + indexOf = cacheIndexOf; + seen = cache; + } + while (++index < length) { + var value = array[index], + computed = callback ? callback(value, index, array) : value; + + if (isSorted + ? !index || seen[seen.length - 1] !== computed + : indexOf(seen, computed) < 0 + ) { + if (callback || isLarge) { + seen.push(computed); + } + result.push(value); + } + } + if (isLarge) { + releaseArray(seen.array); + releaseObject(seen); + } else if (callback) { + releaseArray(seen); + } + return result; +} + +module.exports = baseUniq; diff --git a/node_modules/lodash-node/compat/internals/cacheIndexOf.js b/node_modules/lodash-node/compat/internals/cacheIndexOf.js new file mode 100644 index 0000000..7d540fd --- /dev/null +++ b/node_modules/lodash-node/compat/internals/cacheIndexOf.js @@ -0,0 +1,39 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseIndexOf = require('./baseIndexOf'), + keyPrefix = require('./keyPrefix'); + +/** + * An implementation of `_.contains` for cache objects that mimics the return + * signature of `_.indexOf` by returning `0` if the value is found, else `-1`. + * + * @private + * @param {Object} cache The cache object to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns `0` if `value` is found, else `-1`. + */ +function cacheIndexOf(cache, value) { + var type = typeof value; + cache = cache.cache; + + if (type == 'boolean' || value == null) { + return cache[value] ? 0 : -1; + } + if (type != 'number' && type != 'string') { + type = 'object'; + } + var key = type == 'number' ? value : keyPrefix + value; + cache = (cache = cache[type]) && cache[key]; + + return type == 'object' + ? (cache && baseIndexOf(cache, value) > -1 ? 0 : -1) + : (cache ? 0 : -1); +} + +module.exports = cacheIndexOf; diff --git a/node_modules/lodash-node/compat/internals/cachePush.js b/node_modules/lodash-node/compat/internals/cachePush.js new file mode 100644 index 0000000..b16163a --- /dev/null +++ b/node_modules/lodash-node/compat/internals/cachePush.js @@ -0,0 +1,38 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keyPrefix = require('./keyPrefix'); + +/** + * Adds a given value to the corresponding cache object. + * + * @private + * @param {*} value The value to add to the cache. + */ +function cachePush(value) { + var cache = this.cache, + type = typeof value; + + if (type == 'boolean' || value == null) { + cache[value] = true; + } else { + if (type != 'number' && type != 'string') { + type = 'object'; + } + var key = type == 'number' ? value : keyPrefix + value, + typeCache = cache[type] || (cache[type] = {}); + + if (type == 'object') { + (typeCache[key] || (typeCache[key] = [])).push(value); + } else { + typeCache[key] = true; + } + } +} + +module.exports = cachePush; diff --git a/node_modules/lodash-node/compat/internals/charAtCallback.js b/node_modules/lodash-node/compat/internals/charAtCallback.js new file mode 100644 index 0000000..f754fe9 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/charAtCallback.js @@ -0,0 +1,22 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Used by `_.max` and `_.min` as the default callback when a given + * collection is a string value. + * + * @private + * @param {string} value The character to inspect. + * @returns {number} Returns the code unit of given character. + */ +function charAtCallback(value) { + return value.charCodeAt(0); +} + +module.exports = charAtCallback; diff --git a/node_modules/lodash-node/compat/internals/compareAscending.js b/node_modules/lodash-node/compat/internals/compareAscending.js new file mode 100644 index 0000000..7387123 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/compareAscending.js @@ -0,0 +1,47 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Used by `sortBy` to compare transformed `collection` elements, stable sorting + * them in ascending order. + * + * @private + * @param {Object} a The object to compare to `b`. + * @param {Object} b The object to compare to `a`. + * @returns {number} Returns the sort order indicator of `1` or `-1`. + */ +function compareAscending(a, b) { + var ac = a.criteria, + bc = b.criteria, + index = -1, + length = ac.length; + + while (++index < length) { + var value = ac[index], + other = bc[index]; + + if (value !== other) { + if (value > other || typeof value == 'undefined') { + return 1; + } + if (value < other || typeof other == 'undefined') { + return -1; + } + } + } + // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications + // that causes it, under certain circumstances, to return the same value for + // `a` and `b`. See https://github.com/jashkenas/underscore/pull/1247 + // + // This also ensures a stable sort in V8 and other engines. + // See http://code.google.com/p/v8/issues/detail?id=90 + return a.index - b.index; +} + +module.exports = compareAscending; diff --git a/node_modules/lodash-node/compat/internals/createAggregator.js b/node_modules/lodash-node/compat/internals/createAggregator.js new file mode 100644 index 0000000..cef4290 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/createAggregator.js @@ -0,0 +1,45 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseEach = require('./baseEach'), + createCallback = require('../functions/createCallback'), + isArray = require('../objects/isArray'); + +/** + * Creates a function that aggregates a collection, creating an object composed + * of keys generated from the results of running each element of the collection + * through a callback. The given `setter` function sets the keys and values + * of the composed object. + * + * @private + * @param {Function} setter The setter function. + * @returns {Function} Returns the new aggregator function. + */ +function createAggregator(setter) { + return function(collection, callback, thisArg) { + var result = {}; + callback = createCallback(callback, thisArg, 3); + + if (isArray(collection)) { + var index = -1, + length = collection.length; + + while (++index < length) { + var value = collection[index]; + setter(result, value, callback(value, index, collection), collection); + } + } else { + baseEach(collection, function(value, key, collection) { + setter(result, value, callback(value, key, collection), collection); + }); + } + return result; + }; +} + +module.exports = createAggregator; diff --git a/node_modules/lodash-node/compat/internals/createCache.js b/node_modules/lodash-node/compat/internals/createCache.js new file mode 100644 index 0000000..fb94745 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/createCache.js @@ -0,0 +1,45 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var cachePush = require('./cachePush'), + getObject = require('./getObject'), + releaseObject = require('./releaseObject'); + +/** + * Creates a cache object to optimize linear searches of large arrays. + * + * @private + * @param {Array} [array=[]] The array to search. + * @returns {null|Object} Returns the cache object or `null` if caching should not be used. + */ +function createCache(array) { + var index = -1, + length = array.length, + first = array[0], + mid = array[(length / 2) | 0], + last = array[length - 1]; + + if (first && typeof first == 'object' && + mid && typeof mid == 'object' && last && typeof last == 'object') { + return false; + } + var cache = getObject(); + cache['false'] = cache['null'] = cache['true'] = cache['undefined'] = false; + + var result = getObject(); + result.array = array; + result.cache = cache; + result.push = cachePush; + + while (++index < length) { + result.push(array[index]); + } + return result; +} + +module.exports = createCache; diff --git a/node_modules/lodash-node/compat/internals/createIterator.js b/node_modules/lodash-node/compat/internals/createIterator.js new file mode 100644 index 0000000..2dab121 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/createIterator.js @@ -0,0 +1,127 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('./baseCreateCallback'), + indicatorObject = require('./indicatorObject'), + isArguments = require('../objects/isArguments'), + isArray = require('../objects/isArray'), + isString = require('../objects/isString'), + iteratorTemplate = require('./iteratorTemplate'), + objectTypes = require('./objectTypes'); + +/** Used to fix the JScript [[DontEnum]] bug */ +var shadowedProps = [ + 'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', + 'toLocaleString', 'toString', 'valueOf' +]; + +/** `Object#toString` result shortcuts */ +var arrayClass = '[object Array]', + boolClass = '[object Boolean]', + dateClass = '[object Date]', + errorClass = '[object Error]', + funcClass = '[object Function]', + numberClass = '[object Number]', + objectClass = '[object Object]', + regexpClass = '[object RegExp]', + stringClass = '[object String]'; + +/** Used as the data object for `iteratorTemplate` */ +var iteratorData = { + 'args': '', + 'array': null, + 'bottom': '', + 'firstArg': '', + 'init': '', + 'keys': null, + 'loop': '', + 'shadowedProps': null, + 'support': null, + 'top': '', + 'useHas': false +}; + +/** Used for native method references */ +var errorProto = Error.prototype, + objectProto = Object.prototype, + stringProto = String.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** Used to avoid iterating non-enumerable properties in IE < 9 */ +var nonEnumProps = {}; +nonEnumProps[arrayClass] = nonEnumProps[dateClass] = nonEnumProps[numberClass] = { 'constructor': true, 'toLocaleString': true, 'toString': true, 'valueOf': true }; +nonEnumProps[boolClass] = nonEnumProps[stringClass] = { 'constructor': true, 'toString': true, 'valueOf': true }; +nonEnumProps[errorClass] = nonEnumProps[funcClass] = nonEnumProps[regexpClass] = { 'constructor': true, 'toString': true }; +nonEnumProps[objectClass] = { 'constructor': true }; + +(function() { + var length = shadowedProps.length; + while (length--) { + var key = shadowedProps[length]; + for (var className in nonEnumProps) { + if (hasOwnProperty.call(nonEnumProps, className) && !hasOwnProperty.call(nonEnumProps[className], key)) { + nonEnumProps[className][key] = false; + } + } + } +}()); + +/** + * Creates compiled iteration functions. + * + * @private + * @param {...Object} [options] The compile options object(s). + * @param {string} [options.array] Code to determine if the iterable is an array or array-like. + * @param {boolean} [options.useHas] Specify using `hasOwnProperty` checks in the object loop. + * @param {Function} [options.keys] A reference to `_.keys` for use in own property iteration. + * @param {string} [options.args] A comma separated string of iteration function arguments. + * @param {string} [options.top] Code to execute before the iteration branches. + * @param {string} [options.loop] Code to execute in the object loop. + * @param {string} [options.bottom] Code to execute after the iteration branches. + * @returns {Function} Returns the compiled function. + */ +function createIterator() { + // data properties + iteratorData.shadowedProps = shadowedProps; + + // iterator options + iteratorData.array = iteratorData.bottom = iteratorData.loop = iteratorData.top = ''; + iteratorData.init = 'iterable'; + iteratorData.useHas = true; + + // merge options into a template data object + for (var object, index = 0; object = arguments[index]; index++) { + for (var key in object) { + iteratorData[key] = object[key]; + } + } + var args = iteratorData.args; + iteratorData.firstArg = /^[^,]+/.exec(args)[0]; + + // create the function factory + var factory = Function( + 'baseCreateCallback, errorClass, errorProto, hasOwnProperty, ' + + 'indicatorObject, isArguments, isArray, isString, keys, objectProto, ' + + 'objectTypes, nonEnumProps, stringClass, stringProto, toString', + 'return function(' + args + ') {\n' + iteratorTemplate(iteratorData) + '\n}' + ); + + // return the compiled function + return factory( + baseCreateCallback, errorClass, errorProto, hasOwnProperty, + indicatorObject, isArguments, isArray, isString, iteratorData.keys, objectProto, + objectTypes, nonEnumProps, stringClass, stringProto, toString + ); +} + +module.exports = createIterator; diff --git a/node_modules/lodash-node/compat/internals/createWrapper.js b/node_modules/lodash-node/compat/internals/createWrapper.js new file mode 100644 index 0000000..2525e10 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/createWrapper.js @@ -0,0 +1,106 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseBind = require('./baseBind'), + baseCreateWrapper = require('./baseCreateWrapper'), + isFunction = require('../objects/isFunction'), + slice = require('./slice'); + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Native method shortcuts */ +var push = arrayRef.push, + unshift = arrayRef.unshift; + +/** + * Creates a function that, when called, either curries or invokes `func` + * with an optional `this` binding and partially applied arguments. + * + * @private + * @param {Function|string} func The function or method name to reference. + * @param {number} bitmask The bitmask of method flags to compose. + * The bitmask may be composed of the following flags: + * 1 - `_.bind` + * 2 - `_.bindKey` + * 4 - `_.curry` + * 8 - `_.curry` (bound) + * 16 - `_.partial` + * 32 - `_.partialRight` + * @param {Array} [partialArgs] An array of arguments to prepend to those + * provided to the new function. + * @param {Array} [partialRightArgs] An array of arguments to append to those + * provided to the new function. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new function. + */ +function createWrapper(func, bitmask, partialArgs, partialRightArgs, thisArg, arity) { + var isBind = bitmask & 1, + isBindKey = bitmask & 2, + isCurry = bitmask & 4, + isCurryBound = bitmask & 8, + isPartial = bitmask & 16, + isPartialRight = bitmask & 32; + + if (!isBindKey && !isFunction(func)) { + throw new TypeError; + } + if (isPartial && !partialArgs.length) { + bitmask &= ~16; + isPartial = partialArgs = false; + } + if (isPartialRight && !partialRightArgs.length) { + bitmask &= ~32; + isPartialRight = partialRightArgs = false; + } + var bindData = func && func.__bindData__; + if (bindData && bindData !== true) { + // clone `bindData` + bindData = slice(bindData); + if (bindData[2]) { + bindData[2] = slice(bindData[2]); + } + if (bindData[3]) { + bindData[3] = slice(bindData[3]); + } + // set `thisBinding` is not previously bound + if (isBind && !(bindData[1] & 1)) { + bindData[4] = thisArg; + } + // set if previously bound but not currently (subsequent curried functions) + if (!isBind && bindData[1] & 1) { + bitmask |= 8; + } + // set curried arity if not yet set + if (isCurry && !(bindData[1] & 4)) { + bindData[5] = arity; + } + // append partial left arguments + if (isPartial) { + push.apply(bindData[2] || (bindData[2] = []), partialArgs); + } + // append partial right arguments + if (isPartialRight) { + unshift.apply(bindData[3] || (bindData[3] = []), partialRightArgs); + } + // merge flags + bindData[1] |= bitmask; + return createWrapper.apply(null, bindData); + } + // fast path for `_.bind` + var creater = (bitmask == 1 || bitmask === 17) ? baseBind : baseCreateWrapper; + return creater([func, bitmask, partialArgs, partialRightArgs, thisArg, arity]); +} + +module.exports = createWrapper; diff --git a/node_modules/lodash-node/compat/internals/defaultsIteratorOptions.js b/node_modules/lodash-node/compat/internals/defaultsIteratorOptions.js new file mode 100644 index 0000000..97c87dd --- /dev/null +++ b/node_modules/lodash-node/compat/internals/defaultsIteratorOptions.js @@ -0,0 +1,26 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keys = require('../objects/keys'); + +/** Reusable iterator options for `assign` and `defaults` */ +var defaultsIteratorOptions = { + 'args': 'object, source, guard', + 'top': + 'var args = arguments,\n' + + ' argsIndex = 0,\n' + + " argsLength = typeof guard == 'number' ? 2 : args.length;\n" + + 'while (++argsIndex < argsLength) {\n' + + ' iterable = args[argsIndex];\n' + + ' if (iterable && objectTypes[typeof iterable]) {', + 'keys': keys, + 'loop': "if (typeof result[index] == 'undefined') result[index] = iterable[index]", + 'bottom': ' }\n}' +}; + +module.exports = defaultsIteratorOptions; diff --git a/node_modules/lodash-node/compat/internals/eachIteratorOptions.js b/node_modules/lodash-node/compat/internals/eachIteratorOptions.js new file mode 100644 index 0000000..0112c82 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/eachIteratorOptions.js @@ -0,0 +1,20 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keys = require('../objects/keys'); + +/** Reusable iterator options shared by `each`, `forIn`, and `forOwn` */ +var eachIteratorOptions = { + 'args': 'collection, callback, thisArg', + 'top': "callback = callback && typeof thisArg == 'undefined' ? callback : baseCreateCallback(callback, thisArg, 3)", + 'array': "typeof length == 'number'", + 'keys': keys, + 'loop': 'if (callback(iterable[index], index, collection) === false) return result' +}; + +module.exports = eachIteratorOptions; diff --git a/node_modules/lodash-node/compat/internals/escapeHtmlChar.js b/node_modules/lodash-node/compat/internals/escapeHtmlChar.js new file mode 100644 index 0000000..10fe163 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/escapeHtmlChar.js @@ -0,0 +1,22 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var htmlEscapes = require('./htmlEscapes'); + +/** + * Used by `escape` to convert characters to HTML entities. + * + * @private + * @param {string} match The matched character to escape. + * @returns {string} Returns the escaped character. + */ +function escapeHtmlChar(match) { + return htmlEscapes[match]; +} + +module.exports = escapeHtmlChar; diff --git a/node_modules/lodash-node/compat/internals/escapeStringChar.js b/node_modules/lodash-node/compat/internals/escapeStringChar.js new file mode 100644 index 0000000..00ad70a --- /dev/null +++ b/node_modules/lodash-node/compat/internals/escapeStringChar.js @@ -0,0 +1,33 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to escape characters for inclusion in compiled string literals */ +var stringEscapes = { + '\\': '\\', + "'": "'", + '\n': 'n', + '\r': 'r', + '\t': 't', + '\u2028': 'u2028', + '\u2029': 'u2029' +}; + +/** + * Used by `template` to escape characters for inclusion in compiled + * string literals. + * + * @private + * @param {string} match The matched character to escape. + * @returns {string} Returns the escaped character. + */ +function escapeStringChar(match) { + return '\\' + stringEscapes[match]; +} + +module.exports = escapeStringChar; diff --git a/node_modules/lodash-node/compat/internals/forOwnIteratorOptions.js b/node_modules/lodash-node/compat/internals/forOwnIteratorOptions.js new file mode 100644 index 0000000..831d622 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/forOwnIteratorOptions.js @@ -0,0 +1,17 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var eachIteratorOptions = require('./eachIteratorOptions'); + +/** Reusable iterator options for `forIn` and `forOwn` */ +var forOwnIteratorOptions = { + 'top': 'if (!objectTypes[typeof iterable]) return result;\n' + eachIteratorOptions.top, + 'array': false +}; + +module.exports = forOwnIteratorOptions; diff --git a/node_modules/lodash-node/compat/internals/getArray.js b/node_modules/lodash-node/compat/internals/getArray.js new file mode 100644 index 0000000..9420559 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/getArray.js @@ -0,0 +1,21 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var arrayPool = require('./arrayPool'); + +/** + * Gets an array from the array pool or creates a new one if the pool is empty. + * + * @private + * @returns {Array} The array from the pool. + */ +function getArray() { + return arrayPool.pop() || []; +} + +module.exports = getArray; diff --git a/node_modules/lodash-node/compat/internals/getObject.js b/node_modules/lodash-node/compat/internals/getObject.js new file mode 100644 index 0000000..67b0cb5 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/getObject.js @@ -0,0 +1,35 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var objectPool = require('./objectPool'); + +/** + * Gets an object from the object pool or creates a new one if the pool is empty. + * + * @private + * @returns {Object} The object from the pool. + */ +function getObject() { + return objectPool.pop() || { + 'array': null, + 'cache': null, + 'criteria': null, + 'false': false, + 'index': 0, + 'null': false, + 'number': null, + 'object': null, + 'push': null, + 'string': null, + 'true': false, + 'undefined': false, + 'value': null + }; +} + +module.exports = getObject; diff --git a/node_modules/lodash-node/compat/internals/htmlEscapes.js b/node_modules/lodash-node/compat/internals/htmlEscapes.js new file mode 100644 index 0000000..0fd6c1d --- /dev/null +++ b/node_modules/lodash-node/compat/internals/htmlEscapes.js @@ -0,0 +1,26 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Used to convert characters to HTML entities: + * + * Though the `>` character is escaped for symmetry, characters like `>` and `/` + * don't require escaping in HTML and have no special meaning unless they're part + * of a tag or an unquoted attribute value. + * http://mathiasbynens.be/notes/ambiguous-ampersands (under "semi-related fun fact") + */ +var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' +}; + +module.exports = htmlEscapes; diff --git a/node_modules/lodash-node/compat/internals/htmlUnescapes.js b/node_modules/lodash-node/compat/internals/htmlUnescapes.js new file mode 100644 index 0000000..9401df9 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/htmlUnescapes.js @@ -0,0 +1,15 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var htmlEscapes = require('./htmlEscapes'), + invert = require('../objects/invert'); + +/** Used to convert HTML entities to characters */ +var htmlUnescapes = invert(htmlEscapes); + +module.exports = htmlUnescapes; diff --git a/node_modules/lodash-node/compat/internals/indicatorObject.js b/node_modules/lodash-node/compat/internals/indicatorObject.js new file mode 100644 index 0000000..a0f71ef --- /dev/null +++ b/node_modules/lodash-node/compat/internals/indicatorObject.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used internally to indicate various things */ +var indicatorObject = {}; + +module.exports = indicatorObject; diff --git a/node_modules/lodash-node/compat/internals/isNative.js b/node_modules/lodash-node/compat/internals/isNative.js new file mode 100644 index 0000000..8d729ff --- /dev/null +++ b/node_modules/lodash-node/compat/internals/isNative.js @@ -0,0 +1,34 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Used to detect if a method is native */ +var reNative = RegExp('^' + + String(toString) + .replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + .replace(/toString| for [^\]]+/g, '.*?') + '$' +); + +/** + * Checks if `value` is a native function. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a native function, else `false`. + */ +function isNative(value) { + return typeof value == 'function' && reNative.test(value); +} + +module.exports = isNative; diff --git a/node_modules/lodash-node/compat/internals/isNode.js b/node_modules/lodash-node/compat/internals/isNode.js new file mode 100644 index 0000000..50c5c06 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/isNode.js @@ -0,0 +1,23 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Checks if `value` is a DOM node in IE < 9. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a DOM node, else `false`. + */ +function isNode(value) { + // IE < 9 presents DOM nodes as `Object` objects except they have `toString` + // methods that are `typeof` "string" and still can coerce nodes to strings + return typeof value.toString != 'function' && typeof (value + '') == 'string'; +} + +module.exports = isNode; diff --git a/node_modules/lodash-node/compat/internals/iteratorTemplate.js b/node_modules/lodash-node/compat/internals/iteratorTemplate.js new file mode 100644 index 0000000..e5526dc --- /dev/null +++ b/node_modules/lodash-node/compat/internals/iteratorTemplate.js @@ -0,0 +1,109 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var support = require('../support'); + +/** + * The template used to create iterator functions. + * + * @private + * @param {Object} data The data object used to populate the text. + * @returns {string} Returns the interpolated text. + */ +var iteratorTemplate = function(obj) { + + var __p = 'var index, iterable = ' + + (obj.firstArg) + + ', result = ' + + (obj.init) + + ';\nif (!iterable) return result;\n' + + (obj.top) + + ';'; + if (obj.array) { + __p += '\nvar length = iterable.length; index = -1;\nif (' + + (obj.array) + + ') { '; + if (support.unindexedChars) { + __p += '\n if (isString(iterable)) {\n iterable = iterable.split(\'\')\n } '; + } + __p += '\n while (++index < length) {\n ' + + (obj.loop) + + ';\n }\n}\nelse { '; + } else if (support.nonEnumArgs) { + __p += '\n var length = iterable.length; index = -1;\n if (length && isArguments(iterable)) {\n while (++index < length) {\n index += \'\';\n ' + + (obj.loop) + + ';\n }\n } else { '; + } + + if (support.enumPrototypes) { + __p += '\n var skipProto = typeof iterable == \'function\';\n '; + } + + if (support.enumErrorProps) { + __p += '\n var skipErrorProps = iterable === errorProto || iterable instanceof Error;\n '; + } + + var conditions = []; if (support.enumPrototypes) { conditions.push('!(skipProto && index == "prototype")'); } if (support.enumErrorProps) { conditions.push('!(skipErrorProps && (index == "message" || index == "name"))'); } + + if (obj.useHas && obj.keys) { + __p += '\n var ownIndex = -1,\n ownProps = objectTypes[typeof iterable] && keys(iterable),\n length = ownProps ? ownProps.length : 0;\n\n while (++ownIndex < length) {\n index = ownProps[ownIndex];\n'; + if (conditions.length) { + __p += ' if (' + + (conditions.join(' && ')) + + ') {\n '; + } + __p += + (obj.loop) + + '; '; + if (conditions.length) { + __p += '\n }'; + } + __p += '\n } '; + } else { + __p += '\n for (index in iterable) {\n'; + if (obj.useHas) { conditions.push("hasOwnProperty.call(iterable, index)"); } if (conditions.length) { + __p += ' if (' + + (conditions.join(' && ')) + + ') {\n '; + } + __p += + (obj.loop) + + '; '; + if (conditions.length) { + __p += '\n }'; + } + __p += '\n } '; + if (support.nonEnumShadows) { + __p += '\n\n if (iterable !== objectProto) {\n var ctor = iterable.constructor,\n isProto = iterable === (ctor && ctor.prototype),\n className = iterable === stringProto ? stringClass : iterable === errorProto ? errorClass : toString.call(iterable),\n nonEnum = nonEnumProps[className];\n '; + for (k = 0; k < 7; k++) { + __p += '\n index = \'' + + (obj.shadowedProps[k]) + + '\';\n if ((!(isProto && nonEnum[index]) && hasOwnProperty.call(iterable, index))'; + if (!obj.useHas) { + __p += ' || (!nonEnum[index] && iterable[index] !== objectProto[index])'; + } + __p += ') {\n ' + + (obj.loop) + + ';\n } '; + } + __p += '\n } '; + } + + } + + if (obj.array || support.nonEnumArgs) { + __p += '\n}'; + } + __p += + (obj.bottom) + + ';\nreturn result'; + + return __p +}; + +module.exports = iteratorTemplate; diff --git a/node_modules/lodash-node/compat/internals/keyPrefix.js b/node_modules/lodash-node/compat/internals/keyPrefix.js new file mode 100644 index 0000000..be57f36 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/keyPrefix.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to prefix keys to avoid issues with `__proto__` and properties on `Object.prototype` */ +var keyPrefix = +new Date + ''; + +module.exports = keyPrefix; diff --git a/node_modules/lodash-node/compat/internals/largeArraySize.js b/node_modules/lodash-node/compat/internals/largeArraySize.js new file mode 100644 index 0000000..b61e52d --- /dev/null +++ b/node_modules/lodash-node/compat/internals/largeArraySize.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used as the size when optimizations are enabled for large arrays */ +var largeArraySize = 75; + +module.exports = largeArraySize; diff --git a/node_modules/lodash-node/compat/internals/lodashWrapper.js b/node_modules/lodash-node/compat/internals/lodashWrapper.js new file mode 100644 index 0000000..b8d2442 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/lodashWrapper.js @@ -0,0 +1,23 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * A fast path for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap in a `lodash` instance. + * @param {boolean} chainAll A flag to enable chaining for all methods + * @returns {Object} Returns a `lodash` instance. + */ +function lodashWrapper(value, chainAll) { + this.__chain__ = !!chainAll; + this.__wrapped__ = value; +} + +module.exports = lodashWrapper; diff --git a/node_modules/lodash-node/compat/internals/maxPoolSize.js b/node_modules/lodash-node/compat/internals/maxPoolSize.js new file mode 100644 index 0000000..600c92b --- /dev/null +++ b/node_modules/lodash-node/compat/internals/maxPoolSize.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used as the max size of the `arrayPool` and `objectPool` */ +var maxPoolSize = 40; + +module.exports = maxPoolSize; diff --git a/node_modules/lodash-node/compat/internals/objectPool.js b/node_modules/lodash-node/compat/internals/objectPool.js new file mode 100644 index 0000000..ab798f1 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/objectPool.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to pool arrays and objects used internally */ +var objectPool = []; + +module.exports = objectPool; diff --git a/node_modules/lodash-node/compat/internals/objectTypes.js b/node_modules/lodash-node/compat/internals/objectTypes.js new file mode 100644 index 0000000..42a9573 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/objectTypes.js @@ -0,0 +1,20 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to determine if values are of the language type Object */ +var objectTypes = { + 'boolean': false, + 'function': true, + 'object': true, + 'number': false, + 'string': false, + 'undefined': false +}; + +module.exports = objectTypes; diff --git a/node_modules/lodash-node/compat/internals/reEscapedHtml.js b/node_modules/lodash-node/compat/internals/reEscapedHtml.js new file mode 100644 index 0000000..311180d --- /dev/null +++ b/node_modules/lodash-node/compat/internals/reEscapedHtml.js @@ -0,0 +1,15 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var htmlUnescapes = require('./htmlUnescapes'), + keys = require('../objects/keys'); + +/** Used to match HTML entities and HTML characters */ +var reEscapedHtml = RegExp('(' + keys(htmlUnescapes).join('|') + ')', 'g'); + +module.exports = reEscapedHtml; diff --git a/node_modules/lodash-node/compat/internals/reInterpolate.js b/node_modules/lodash-node/compat/internals/reInterpolate.js new file mode 100644 index 0000000..18287e4 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/reInterpolate.js @@ -0,0 +1,13 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to match "interpolate" template delimiters */ +var reInterpolate = /<%=([\s\S]+?)%>/g; + +module.exports = reInterpolate; diff --git a/node_modules/lodash-node/compat/internals/reUnescapedHtml.js b/node_modules/lodash-node/compat/internals/reUnescapedHtml.js new file mode 100644 index 0000000..10d1871 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/reUnescapedHtml.js @@ -0,0 +1,15 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var htmlEscapes = require('./htmlEscapes'), + keys = require('../objects/keys'); + +/** Used to match HTML entities and HTML characters */ +var reUnescapedHtml = RegExp('[' + keys(htmlEscapes).join('') + ']', 'g'); + +module.exports = reUnescapedHtml; diff --git a/node_modules/lodash-node/compat/internals/releaseArray.js b/node_modules/lodash-node/compat/internals/releaseArray.js new file mode 100644 index 0000000..2c7ccba --- /dev/null +++ b/node_modules/lodash-node/compat/internals/releaseArray.js @@ -0,0 +1,25 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var arrayPool = require('./arrayPool'), + maxPoolSize = require('./maxPoolSize'); + +/** + * Releases the given array back to the array pool. + * + * @private + * @param {Array} [array] The array to release. + */ +function releaseArray(array) { + array.length = 0; + if (arrayPool.length < maxPoolSize) { + arrayPool.push(array); + } +} + +module.exports = releaseArray; diff --git a/node_modules/lodash-node/compat/internals/releaseObject.js b/node_modules/lodash-node/compat/internals/releaseObject.js new file mode 100644 index 0000000..6ab3d91 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/releaseObject.js @@ -0,0 +1,29 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var maxPoolSize = require('./maxPoolSize'), + objectPool = require('./objectPool'); + +/** + * Releases the given object back to the object pool. + * + * @private + * @param {Object} [object] The object to release. + */ +function releaseObject(object) { + var cache = object.cache; + if (cache) { + releaseObject(cache); + } + object.array = object.cache = object.criteria = object.object = object.number = object.string = object.value = null; + if (objectPool.length < maxPoolSize) { + objectPool.push(object); + } +} + +module.exports = releaseObject; diff --git a/node_modules/lodash-node/compat/internals/setBindData.js b/node_modules/lodash-node/compat/internals/setBindData.js new file mode 100644 index 0000000..f12bd86 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/setBindData.js @@ -0,0 +1,43 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isNative = require('./isNative'), + noop = require('../utilities/noop'); + +/** Used as the property descriptor for `__bindData__` */ +var descriptor = { + 'configurable': false, + 'enumerable': false, + 'value': null, + 'writable': false +}; + +/** Used to set meta data on functions */ +var defineProperty = (function() { + // IE 8 only accepts DOM elements + try { + var o = {}, + func = isNative(func = Object.defineProperty) && func, + result = func(o, o, o) && func; + } catch(e) { } + return result; +}()); + +/** + * Sets `this` binding data on a given function. + * + * @private + * @param {Function} func The function to set data on. + * @param {Array} value The data array to set. + */ +var setBindData = !defineProperty ? noop : function(func, value) { + descriptor.value = value; + defineProperty(func, '__bindData__', descriptor); +}; + +module.exports = setBindData; diff --git a/node_modules/lodash-node/compat/internals/shimIsPlainObject.js b/node_modules/lodash-node/compat/internals/shimIsPlainObject.js new file mode 100644 index 0000000..32bf0f0 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/shimIsPlainObject.js @@ -0,0 +1,67 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forIn = require('../objects/forIn'), + isArguments = require('../objects/isArguments'), + isFunction = require('../objects/isFunction'), + isNode = require('./isNode'), + support = require('../support'); + +/** `Object#toString` result shortcuts */ +var objectClass = '[object Object]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * A fallback implementation of `isPlainObject` which checks if a given value + * is an object created by the `Object` constructor, assuming objects created + * by the `Object` constructor have no inherited enumerable properties and that + * there are no `Object.prototype` extensions. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + */ +function shimIsPlainObject(value) { + var ctor, + result; + + // avoid non Object objects, `arguments` objects, and DOM elements + if (!(value && toString.call(value) == objectClass) || + (ctor = value.constructor, isFunction(ctor) && !(ctor instanceof ctor)) || + (!support.argsClass && isArguments(value)) || + (!support.nodeClass && isNode(value))) { + return false; + } + // IE < 9 iterates inherited properties before own properties. If the first + // iterated property is an object's own property then there are no inherited + // enumerable properties. + if (support.ownLast) { + forIn(value, function(value, key, object) { + result = hasOwnProperty.call(object, key); + return false; + }); + return result !== false; + } + // In most environments an object's own properties are iterated before + // its inherited properties. If the last iterated property is an object's + // own property then there are no inherited enumerable properties. + forIn(value, function(value, key) { + result = key; + }); + return typeof result == 'undefined' || hasOwnProperty.call(value, result); +} + +module.exports = shimIsPlainObject; diff --git a/node_modules/lodash-node/compat/internals/shimKeys.js b/node_modules/lodash-node/compat/internals/shimKeys.js new file mode 100644 index 0000000..6d70860 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/shimKeys.js @@ -0,0 +1,27 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createIterator = require('./createIterator'); + +/** + * A fallback implementation of `Object.keys` which produces an array of the + * given object's own enumerable property names. + * + * @private + * @type Function + * @param {Object} object The object to inspect. + * @returns {Array} Returns an array of property names. + */ +var shimKeys = createIterator({ + 'args': 'object', + 'init': '[]', + 'top': 'if (!(objectTypes[typeof object])) return result', + 'loop': 'result.push(index)' +}); + +module.exports = shimKeys; diff --git a/node_modules/lodash-node/compat/internals/slice.js b/node_modules/lodash-node/compat/internals/slice.js new file mode 100644 index 0000000..2f8318b --- /dev/null +++ b/node_modules/lodash-node/compat/internals/slice.js @@ -0,0 +1,38 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Slices the `collection` from the `start` index up to, but not including, + * the `end` index. + * + * Note: This function is used instead of `Array#slice` to support node lists + * in IE < 9 and to ensure dense arrays are returned. + * + * @private + * @param {Array|Object|string} collection The collection to slice. + * @param {number} start The start index. + * @param {number} end The end index. + * @returns {Array} Returns the new array. + */ +function slice(array, start, end) { + start || (start = 0); + if (typeof end == 'undefined') { + end = array ? array.length : 0; + } + var index = -1, + length = end - start || 0, + result = Array(length < 0 ? 0 : length); + + while (++index < length) { + result[index] = array[start + index]; + } + return result; +} + +module.exports = slice; diff --git a/node_modules/lodash-node/compat/internals/unescapeHtmlChar.js b/node_modules/lodash-node/compat/internals/unescapeHtmlChar.js new file mode 100644 index 0000000..3b8fd57 --- /dev/null +++ b/node_modules/lodash-node/compat/internals/unescapeHtmlChar.js @@ -0,0 +1,22 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var htmlUnescapes = require('./htmlUnescapes'); + +/** + * Used by `unescape` to convert HTML entities to characters. + * + * @private + * @param {string} match The matched character to unescape. + * @returns {string} Returns the unescaped character. + */ +function unescapeHtmlChar(match) { + return htmlUnescapes[match]; +} + +module.exports = unescapeHtmlChar; diff --git a/node_modules/lodash-node/compat/objects.js b/node_modules/lodash-node/compat/objects.js new file mode 100644 index 0000000..dca0d30 --- /dev/null +++ b/node_modules/lodash-node/compat/objects.js @@ -0,0 +1,52 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +module.exports = { + 'assign': require('./objects/assign'), + 'clone': require('./objects/clone'), + 'cloneDeep': require('./objects/cloneDeep'), + 'create': require('./objects/create'), + 'defaults': require('./objects/defaults'), + 'extend': require('./objects/assign'), + 'findKey': require('./objects/findKey'), + 'findLastKey': require('./objects/findLastKey'), + 'forIn': require('./objects/forIn'), + 'forInRight': require('./objects/forInRight'), + 'forOwn': require('./objects/forOwn'), + 'forOwnRight': require('./objects/forOwnRight'), + 'functions': require('./objects/functions'), + 'has': require('./objects/has'), + 'invert': require('./objects/invert'), + 'isArguments': require('./objects/isArguments'), + 'isArray': require('./objects/isArray'), + 'isBoolean': require('./objects/isBoolean'), + 'isDate': require('./objects/isDate'), + 'isElement': require('./objects/isElement'), + 'isEmpty': require('./objects/isEmpty'), + 'isEqual': require('./objects/isEqual'), + 'isFinite': require('./objects/isFinite'), + 'isFunction': require('./objects/isFunction'), + 'isNaN': require('./objects/isNaN'), + 'isNull': require('./objects/isNull'), + 'isNumber': require('./objects/isNumber'), + 'isObject': require('./objects/isObject'), + 'isPlainObject': require('./objects/isPlainObject'), + 'isRegExp': require('./objects/isRegExp'), + 'isString': require('./objects/isString'), + 'isUndefined': require('./objects/isUndefined'), + 'keys': require('./objects/keys'), + 'mapValues': require('./objects/mapValues'), + 'merge': require('./objects/merge'), + 'methods': require('./objects/functions'), + 'omit': require('./objects/omit'), + 'pairs': require('./objects/pairs'), + 'pick': require('./objects/pick'), + 'transform': require('./objects/transform'), + 'values': require('./objects/values') +}; diff --git a/node_modules/lodash-node/compat/objects/assign.js b/node_modules/lodash-node/compat/objects/assign.js new file mode 100644 index 0000000..73f88ae --- /dev/null +++ b/node_modules/lodash-node/compat/objects/assign.js @@ -0,0 +1,55 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createIterator = require('../internals/createIterator'), + defaultsIteratorOptions = require('../internals/defaultsIteratorOptions'); + +/** + * Assigns own enumerable properties of source object(s) to the destination + * object. Subsequent sources will overwrite property assignments of previous + * sources. If a callback is provided it will be executed to produce the + * assigned values. The callback is bound to `thisArg` and invoked with two + * arguments; (objectValue, sourceValue). + * + * @static + * @memberOf _ + * @type Function + * @alias extend + * @category Objects + * @param {Object} object The destination object. + * @param {...Object} [source] The source objects. + * @param {Function} [callback] The function to customize assigning values. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns the destination object. + * @example + * + * _.assign({ 'name': 'fred' }, { 'employer': 'slate' }); + * // => { 'name': 'fred', 'employer': 'slate' } + * + * var defaults = _.partialRight(_.assign, function(a, b) { + * return typeof a == 'undefined' ? b : a; + * }); + * + * var object = { 'name': 'barney' }; + * defaults(object, { 'name': 'fred', 'employer': 'slate' }); + * // => { 'name': 'barney', 'employer': 'slate' } + */ +var assign = createIterator(defaultsIteratorOptions, { + 'top': + defaultsIteratorOptions.top.replace(';', + ';\n' + + "if (argsLength > 3 && typeof args[argsLength - 2] == 'function') {\n" + + ' var callback = baseCreateCallback(args[--argsLength - 1], args[argsLength--], 2);\n' + + "} else if (argsLength > 2 && typeof args[argsLength - 1] == 'function') {\n" + + ' callback = args[--argsLength];\n' + + '}' + ), + 'loop': 'result[index] = callback ? callback(result[index], iterable[index]) : iterable[index]' +}); + +module.exports = assign; diff --git a/node_modules/lodash-node/compat/objects/clone.js b/node_modules/lodash-node/compat/objects/clone.js new file mode 100644 index 0000000..c81b204 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/clone.js @@ -0,0 +1,63 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseClone = require('../internals/baseClone'), + baseCreateCallback = require('../internals/baseCreateCallback'); + +/** + * Creates a clone of `value`. If `isDeep` is `true` nested objects will also + * be cloned, otherwise they will be assigned by reference. If a callback + * is provided it will be executed to produce the cloned values. If the + * callback returns `undefined` cloning will be handled by the method instead. + * The callback is bound to `thisArg` and invoked with one argument; (value). + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to clone. + * @param {boolean} [isDeep=false] Specify a deep clone. + * @param {Function} [callback] The function to customize cloning values. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the cloned value. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * var shallow = _.clone(characters); + * shallow[0] === characters[0]; + * // => true + * + * var deep = _.clone(characters, true); + * deep[0] === characters[0]; + * // => false + * + * _.mixin({ + * 'clone': _.partialRight(_.clone, function(value) { + * return _.isElement(value) ? value.cloneNode(false) : undefined; + * }) + * }); + * + * var clone = _.clone(document.body); + * clone.childNodes.length; + * // => 0 + */ +function clone(value, isDeep, callback, thisArg) { + // allows working with "Collections" methods without using their `index` + // and `collection` arguments for `isDeep` and `callback` + if (typeof isDeep != 'boolean' && isDeep != null) { + thisArg = callback; + callback = isDeep; + isDeep = false; + } + return baseClone(value, isDeep, typeof callback == 'function' && baseCreateCallback(callback, thisArg, 1)); +} + +module.exports = clone; diff --git a/node_modules/lodash-node/compat/objects/cloneDeep.js b/node_modules/lodash-node/compat/objects/cloneDeep.js new file mode 100644 index 0000000..922214e --- /dev/null +++ b/node_modules/lodash-node/compat/objects/cloneDeep.js @@ -0,0 +1,57 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseClone = require('../internals/baseClone'), + baseCreateCallback = require('../internals/baseCreateCallback'); + +/** + * Creates a deep clone of `value`. If a callback is provided it will be + * executed to produce the cloned values. If the callback returns `undefined` + * cloning will be handled by the method instead. The callback is bound to + * `thisArg` and invoked with one argument; (value). + * + * Note: This method is loosely based on the structured clone algorithm. Functions + * and DOM nodes are **not** cloned. The enumerable properties of `arguments` objects and + * objects created by constructors other than `Object` are cloned to plain `Object` objects. + * See http://www.w3.org/TR/html5/infrastructure.html#internal-structured-cloning-algorithm. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to deep clone. + * @param {Function} [callback] The function to customize cloning values. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the deep cloned value. + * @example + * + * var characters = [ + * { 'name': 'barney', 'age': 36 }, + * { 'name': 'fred', 'age': 40 } + * ]; + * + * var deep = _.cloneDeep(characters); + * deep[0] === characters[0]; + * // => false + * + * var view = { + * 'label': 'docs', + * 'node': element + * }; + * + * var clone = _.cloneDeep(view, function(value) { + * return _.isElement(value) ? value.cloneNode(true) : undefined; + * }); + * + * clone.node == view.node; + * // => false + */ +function cloneDeep(value, callback, thisArg) { + return baseClone(value, true, typeof callback == 'function' && baseCreateCallback(callback, thisArg, 1)); +} + +module.exports = cloneDeep; diff --git a/node_modules/lodash-node/compat/objects/create.js b/node_modules/lodash-node/compat/objects/create.js new file mode 100644 index 0000000..ec60f54 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/create.js @@ -0,0 +1,48 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var assign = require('./assign'), + baseCreate = require('../internals/baseCreate'); + +/** + * Creates an object that inherits from the given `prototype` object. If a + * `properties` object is provided its own enumerable properties are assigned + * to the created object. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { 'constructor': Circle }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ +function create(prototype, properties) { + var result = baseCreate(prototype); + return properties ? assign(result, properties) : result; +} + +module.exports = create; diff --git a/node_modules/lodash-node/compat/objects/defaults.js b/node_modules/lodash-node/compat/objects/defaults.js new file mode 100644 index 0000000..64b5f4d --- /dev/null +++ b/node_modules/lodash-node/compat/objects/defaults.js @@ -0,0 +1,34 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createIterator = require('../internals/createIterator'), + defaultsIteratorOptions = require('../internals/defaultsIteratorOptions'); + +/** + * Assigns own enumerable properties of source object(s) to the destination + * object for all destination properties that resolve to `undefined`. Once a + * property is set, additional defaults of the same property will be ignored. + * + * @static + * @memberOf _ + * @type Function + * @category Objects + * @param {Object} object The destination object. + * @param {...Object} [source] The source objects. + * @param- {Object} [guard] Allows working with `_.reduce` without using its + * `key` and `object` arguments as sources. + * @returns {Object} Returns the destination object. + * @example + * + * var object = { 'name': 'barney' }; + * _.defaults(object, { 'name': 'fred', 'employer': 'slate' }); + * // => { 'name': 'barney', 'employer': 'slate' } + */ +var defaults = createIterator(defaultsIteratorOptions); + +module.exports = defaults; diff --git a/node_modules/lodash-node/compat/objects/findKey.js b/node_modules/lodash-node/compat/objects/findKey.js new file mode 100644 index 0000000..65a06d3 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/findKey.js @@ -0,0 +1,65 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + forOwn = require('./forOwn'); + +/** + * This method is like `_.findIndex` except that it returns the key of the + * first element that passes the callback check, instead of the element itself. + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to search. + * @param {Function|Object|string} [callback=identity] The function called per + * iteration. If a property name or object is provided it will be used to + * create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {string|undefined} Returns the key of the found element, else `undefined`. + * @example + * + * var characters = { + * 'barney': { 'age': 36, 'blocked': false }, + * 'fred': { 'age': 40, 'blocked': true }, + * 'pebbles': { 'age': 1, 'blocked': false } + * }; + * + * _.findKey(characters, function(chr) { + * return chr.age < 40; + * }); + * // => 'barney' (property order is not guaranteed across environments) + * + * // using "_.where" callback shorthand + * _.findKey(characters, { 'age': 1 }); + * // => 'pebbles' + * + * // using "_.pluck" callback shorthand + * _.findKey(characters, 'blocked'); + * // => 'fred' + */ +function findKey(object, callback, thisArg) { + var result; + callback = createCallback(callback, thisArg, 3); + forOwn(object, function(value, key, object) { + if (callback(value, key, object)) { + result = key; + return false; + } + }); + return result; +} + +module.exports = findKey; diff --git a/node_modules/lodash-node/compat/objects/findLastKey.js b/node_modules/lodash-node/compat/objects/findLastKey.js new file mode 100644 index 0000000..86ca4dd --- /dev/null +++ b/node_modules/lodash-node/compat/objects/findLastKey.js @@ -0,0 +1,65 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + forOwnRight = require('./forOwnRight'); + +/** + * This method is like `_.findKey` except that it iterates over elements + * of a `collection` in the opposite order. + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to search. + * @param {Function|Object|string} [callback=identity] The function called per + * iteration. If a property name or object is provided it will be used to + * create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {string|undefined} Returns the key of the found element, else `undefined`. + * @example + * + * var characters = { + * 'barney': { 'age': 36, 'blocked': true }, + * 'fred': { 'age': 40, 'blocked': false }, + * 'pebbles': { 'age': 1, 'blocked': true } + * }; + * + * _.findLastKey(characters, function(chr) { + * return chr.age < 40; + * }); + * // => returns `pebbles`, assuming `_.findKey` returns `barney` + * + * // using "_.where" callback shorthand + * _.findLastKey(characters, { 'age': 40 }); + * // => 'fred' + * + * // using "_.pluck" callback shorthand + * _.findLastKey(characters, 'blocked'); + * // => 'pebbles' + */ +function findLastKey(object, callback, thisArg) { + var result; + callback = createCallback(callback, thisArg, 3); + forOwnRight(object, function(value, key, object) { + if (callback(value, key, object)) { + result = key; + return false; + } + }); + return result; +} + +module.exports = findLastKey; diff --git a/node_modules/lodash-node/compat/objects/forIn.js b/node_modules/lodash-node/compat/objects/forIn.js new file mode 100644 index 0000000..8f0c0f9 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/forIn.js @@ -0,0 +1,48 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createIterator = require('../internals/createIterator'), + eachIteratorOptions = require('../internals/eachIteratorOptions'), + forOwnIteratorOptions = require('../internals/forOwnIteratorOptions'); + +/** + * Iterates over own and inherited enumerable properties of an object, + * executing the callback for each property. The callback is bound to `thisArg` + * and invoked with three arguments; (value, key, object). Callbacks may exit + * iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @type Function + * @category Objects + * @param {Object} object The object to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns `object`. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * Shape.prototype.move = function(x, y) { + * this.x += x; + * this.y += y; + * }; + * + * _.forIn(new Shape, function(value, key) { + * console.log(key); + * }); + * // => logs 'x', 'y', and 'move' (property order is not guaranteed across environments) + */ +var forIn = createIterator(eachIteratorOptions, forOwnIteratorOptions, { + 'useHas': false +}); + +module.exports = forIn; diff --git a/node_modules/lodash-node/compat/objects/forInRight.js b/node_modules/lodash-node/compat/objects/forInRight.js new file mode 100644 index 0000000..2cdabf5 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/forInRight.js @@ -0,0 +1,57 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + forIn = require('./forIn'); + +/** + * This method is like `_.forIn` except that it iterates over elements + * of a `collection` in the opposite order. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns `object`. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * Shape.prototype.move = function(x, y) { + * this.x += x; + * this.y += y; + * }; + * + * _.forInRight(new Shape, function(value, key) { + * console.log(key); + * }); + * // => logs 'move', 'y', and 'x' assuming `_.forIn ` logs 'x', 'y', and 'move' + */ +function forInRight(object, callback, thisArg) { + var pairs = []; + + forIn(object, function(value, key) { + pairs.push(key, value); + }); + + var length = pairs.length; + callback = baseCreateCallback(callback, thisArg, 3); + while (length--) { + if (callback(pairs[length--], pairs[length], object) === false) { + break; + } + } + return object; +} + +module.exports = forInRight; diff --git a/node_modules/lodash-node/compat/objects/forOwn.js b/node_modules/lodash-node/compat/objects/forOwn.js new file mode 100644 index 0000000..a4ab74c --- /dev/null +++ b/node_modules/lodash-node/compat/objects/forOwn.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createIterator = require('../internals/createIterator'), + eachIteratorOptions = require('../internals/eachIteratorOptions'), + forOwnIteratorOptions = require('../internals/forOwnIteratorOptions'); + +/** + * Iterates over own enumerable properties of an object, executing the callback + * for each property. The callback is bound to `thisArg` and invoked with three + * arguments; (value, key, object). Callbacks may exit iteration early by + * explicitly returning `false`. + * + * @static + * @memberOf _ + * @type Function + * @category Objects + * @param {Object} object The object to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns `object`. + * @example + * + * _.forOwn({ '0': 'zero', '1': 'one', 'length': 2 }, function(num, key) { + * console.log(key); + * }); + * // => logs '0', '1', and 'length' (property order is not guaranteed across environments) + */ +var forOwn = createIterator(eachIteratorOptions, forOwnIteratorOptions); + +module.exports = forOwn; diff --git a/node_modules/lodash-node/compat/objects/forOwnRight.js b/node_modules/lodash-node/compat/objects/forOwnRight.js new file mode 100644 index 0000000..5d1e3b9 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/forOwnRight.js @@ -0,0 +1,44 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + keys = require('./keys'); + +/** + * This method is like `_.forOwn` except that it iterates over elements + * of a `collection` in the opposite order. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns `object`. + * @example + * + * _.forOwnRight({ '0': 'zero', '1': 'one', 'length': 2 }, function(num, key) { + * console.log(key); + * }); + * // => logs 'length', '1', and '0' assuming `_.forOwn` logs '0', '1', and 'length' + */ +function forOwnRight(object, callback, thisArg) { + var props = keys(object), + length = props.length; + + callback = baseCreateCallback(callback, thisArg, 3); + while (length--) { + var key = props[length]; + if (callback(object[key], key, object) === false) { + break; + } + } + return object; +} + +module.exports = forOwnRight; diff --git a/node_modules/lodash-node/compat/objects/functions.js b/node_modules/lodash-node/compat/objects/functions.js new file mode 100644 index 0000000..3b78842 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/functions.js @@ -0,0 +1,37 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forIn = require('./forIn'), + isFunction = require('./isFunction'); + +/** + * Creates a sorted array of property names of all enumerable properties, + * own and inherited, of `object` that have function values. + * + * @static + * @memberOf _ + * @alias methods + * @category Objects + * @param {Object} object The object to inspect. + * @returns {Array} Returns an array of property names that have function values. + * @example + * + * _.functions(_); + * // => ['all', 'any', 'bind', 'bindAll', 'clone', 'compact', 'compose', ...] + */ +function functions(object) { + var result = []; + forIn(object, function(value, key) { + if (isFunction(value)) { + result.push(key); + } + }); + return result.sort(); +} + +module.exports = functions; diff --git a/node_modules/lodash-node/compat/objects/has.js b/node_modules/lodash-node/compat/objects/has.js new file mode 100644 index 0000000..055689c --- /dev/null +++ b/node_modules/lodash-node/compat/objects/has.js @@ -0,0 +1,35 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Checks if the specified property name exists as a direct property of `object`, + * instead of an inherited property. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to inspect. + * @param {string} key The name of the property to check. + * @returns {boolean} Returns `true` if key is a direct property, else `false`. + * @example + * + * _.has({ 'a': 1, 'b': 2, 'c': 3 }, 'b'); + * // => true + */ +function has(object, key) { + return object ? hasOwnProperty.call(object, key) : false; +} + +module.exports = has; diff --git a/node_modules/lodash-node/compat/objects/invert.js b/node_modules/lodash-node/compat/objects/invert.js new file mode 100644 index 0000000..a623f7c --- /dev/null +++ b/node_modules/lodash-node/compat/objects/invert.js @@ -0,0 +1,37 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keys = require('./keys'); + +/** + * Creates an object composed of the inverted keys and values of the given object. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to invert. + * @returns {Object} Returns the created inverted object. + * @example + * + * _.invert({ 'first': 'fred', 'second': 'barney' }); + * // => { 'fred': 'first', 'barney': 'second' } + */ +function invert(object) { + var index = -1, + props = keys(object), + length = props.length, + result = {}; + + while (++index < length) { + var key = props[index]; + result[object[key]] = key; + } + return result; +} + +module.exports = invert; diff --git a/node_modules/lodash-node/compat/objects/isArguments.js b/node_modules/lodash-node/compat/objects/isArguments.js new file mode 100644 index 0000000..7efc264 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isArguments.js @@ -0,0 +1,52 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var support = require('../support'); + +/** `Object#toString` result shortcuts */ +var argsClass = '[object Arguments]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var hasOwnProperty = objectProto.hasOwnProperty, + propertyIsEnumerable = objectProto.propertyIsEnumerable; + +/** + * Checks if `value` is an `arguments` object. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is an `arguments` object, else `false`. + * @example + * + * (function() { return _.isArguments(arguments); })(1, 2, 3); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ +function isArguments(value) { + return value && typeof value == 'object' && typeof value.length == 'number' && + toString.call(value) == argsClass || false; +} +// fallback for browsers that can't detect `arguments` objects by [[Class]] +if (!support.argsClass) { + isArguments = function(value) { + return value && typeof value == 'object' && typeof value.length == 'number' && + hasOwnProperty.call(value, 'callee') && !propertyIsEnumerable.call(value, 'callee') || false; + }; +} + +module.exports = isArguments; diff --git a/node_modules/lodash-node/compat/objects/isArray.js b/node_modules/lodash-node/compat/objects/isArray.js new file mode 100644 index 0000000..ceb9f39 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isArray.js @@ -0,0 +1,45 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isNative = require('../internals/isNative'); + +/** `Object#toString` result shortcuts */ +var arrayClass = '[object Array]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeIsArray = isNative(nativeIsArray = Array.isArray) && nativeIsArray; + +/** + * Checks if `value` is an array. + * + * @static + * @memberOf _ + * @type Function + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is an array, else `false`. + * @example + * + * (function() { return _.isArray(arguments); })(); + * // => false + * + * _.isArray([1, 2, 3]); + * // => true + */ +var isArray = nativeIsArray || function(value) { + return value && typeof value == 'object' && typeof value.length == 'number' && + toString.call(value) == arrayClass || false; +}; + +module.exports = isArray; diff --git a/node_modules/lodash-node/compat/objects/isBoolean.js b/node_modules/lodash-node/compat/objects/isBoolean.js new file mode 100644 index 0000000..aa97cc4 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isBoolean.js @@ -0,0 +1,37 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result shortcuts */ +var boolClass = '[object Boolean]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is a boolean value. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a boolean value, else `false`. + * @example + * + * _.isBoolean(null); + * // => false + */ +function isBoolean(value) { + return value === true || value === false || + value && typeof value == 'object' && toString.call(value) == boolClass || false; +} + +module.exports = isBoolean; diff --git a/node_modules/lodash-node/compat/objects/isDate.js b/node_modules/lodash-node/compat/objects/isDate.js new file mode 100644 index 0000000..9084a4c --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isDate.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result shortcuts */ +var dateClass = '[object Date]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is a date. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a date, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + */ +function isDate(value) { + return value && typeof value == 'object' && toString.call(value) == dateClass || false; +} + +module.exports = isDate; diff --git a/node_modules/lodash-node/compat/objects/isElement.js b/node_modules/lodash-node/compat/objects/isElement.js new file mode 100644 index 0000000..9538278 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isElement.js @@ -0,0 +1,27 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Checks if `value` is a DOM element. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a DOM element, else `false`. + * @example + * + * _.isElement(document.body); + * // => true + */ +function isElement(value) { + return value && value.nodeType === 1 || false; +} + +module.exports = isElement; diff --git a/node_modules/lodash-node/compat/objects/isEmpty.js b/node_modules/lodash-node/compat/objects/isEmpty.js new file mode 100644 index 0000000..c9899e3 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isEmpty.js @@ -0,0 +1,66 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forOwn = require('./forOwn'), + isArguments = require('./isArguments'), + isFunction = require('./isFunction'), + support = require('../support'); + +/** `Object#toString` result shortcuts */ +var argsClass = '[object Arguments]', + arrayClass = '[object Array]', + objectClass = '[object Object]', + stringClass = '[object String]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is empty. Arrays, strings, or `arguments` objects with a + * length of `0` and objects with no own enumerable properties are considered + * "empty". + * + * @static + * @memberOf _ + * @category Objects + * @param {Array|Object|string} value The value to inspect. + * @returns {boolean} Returns `true` if the `value` is empty, else `false`. + * @example + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({}); + * // => true + * + * _.isEmpty(''); + * // => true + */ +function isEmpty(value) { + var result = true; + if (!value) { + return result; + } + var className = toString.call(value), + length = value.length; + + if ((className == arrayClass || className == stringClass || + (support.argsClass ? className == argsClass : isArguments(value))) || + (className == objectClass && typeof length == 'number' && isFunction(value.splice))) { + return !length; + } + forOwn(value, function() { + return (result = false); + }); + return result; +} + +module.exports = isEmpty; diff --git a/node_modules/lodash-node/compat/objects/isEqual.js b/node_modules/lodash-node/compat/objects/isEqual.js new file mode 100644 index 0000000..fbb4960 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isEqual.js @@ -0,0 +1,54 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + baseIsEqual = require('../internals/baseIsEqual'); + +/** + * Performs a deep comparison between two values to determine if they are + * equivalent to each other. If a callback is provided it will be executed + * to compare values. If the callback returns `undefined` comparisons will + * be handled by the method instead. The callback is bound to `thisArg` and + * invoked with two arguments; (a, b). + * + * @static + * @memberOf _ + * @category Objects + * @param {*} a The value to compare. + * @param {*} b The other value to compare. + * @param {Function} [callback] The function to customize comparing values. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'name': 'fred' }; + * var copy = { 'name': 'fred' }; + * + * object == copy; + * // => false + * + * _.isEqual(object, copy); + * // => true + * + * var words = ['hello', 'goodbye']; + * var otherWords = ['hi', 'goodbye']; + * + * _.isEqual(words, otherWords, function(a, b) { + * var reGreet = /^(?:hello|hi)$/i, + * aGreet = _.isString(a) && reGreet.test(a), + * bGreet = _.isString(b) && reGreet.test(b); + * + * return (aGreet || bGreet) ? (aGreet == bGreet) : undefined; + * }); + * // => true + */ +function isEqual(a, b, callback, thisArg) { + return baseIsEqual(a, b, typeof callback == 'function' && baseCreateCallback(callback, thisArg, 2)); +} + +module.exports = isEqual; diff --git a/node_modules/lodash-node/compat/objects/isFinite.js b/node_modules/lodash-node/compat/objects/isFinite.js new file mode 100644 index 0000000..8546112 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isFinite.js @@ -0,0 +1,46 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeIsFinite = global.isFinite, + nativeIsNaN = global.isNaN; + +/** + * Checks if `value` is, or can be coerced to, a finite number. + * + * Note: This is not the same as native `isFinite` which will return true for + * booleans and empty strings. See http://es5.github.io/#x15.1.2.5. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is finite, else `false`. + * @example + * + * _.isFinite(-101); + * // => true + * + * _.isFinite('10'); + * // => true + * + * _.isFinite(true); + * // => false + * + * _.isFinite(''); + * // => false + * + * _.isFinite(Infinity); + * // => false + */ +function isFinite(value) { + return nativeIsFinite(value) && !nativeIsNaN(parseFloat(value)); +} + +module.exports = isFinite; diff --git a/node_modules/lodash-node/compat/objects/isFunction.js b/node_modules/lodash-node/compat/objects/isFunction.js new file mode 100644 index 0000000..22608cd --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isFunction.js @@ -0,0 +1,42 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result shortcuts */ +var funcClass = '[object Function]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is a function. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + */ +function isFunction(value) { + return typeof value == 'function'; +} +// fallback for older versions of Chrome and Safari +if (isFunction(/x/)) { + isFunction = function(value) { + return typeof value == 'function' && toString.call(value) == funcClass; + }; +} + +module.exports = isFunction; diff --git a/node_modules/lodash-node/compat/objects/isNaN.js b/node_modules/lodash-node/compat/objects/isNaN.js new file mode 100644 index 0000000..426652d --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isNaN.js @@ -0,0 +1,42 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isNumber = require('./isNumber'); + +/** + * Checks if `value` is `NaN`. + * + * Note: This is not the same as native `isNaN` which will return `true` for + * `undefined` and other non-numeric values. See http://es5.github.io/#x15.1.2.4. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ +function isNaN(value) { + // `NaN` as a primitive is the only value that is not equal to itself + // (perform the [[Class]] check first to avoid errors with some host objects in IE) + return isNumber(value) && value != +value; +} + +module.exports = isNaN; diff --git a/node_modules/lodash-node/compat/objects/isNull.js b/node_modules/lodash-node/compat/objects/isNull.js new file mode 100644 index 0000000..4789d5d --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isNull.js @@ -0,0 +1,30 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(undefined); + * // => false + */ +function isNull(value) { + return value === null; +} + +module.exports = isNull; diff --git a/node_modules/lodash-node/compat/objects/isNumber.js b/node_modules/lodash-node/compat/objects/isNumber.js new file mode 100644 index 0000000..f242787 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isNumber.js @@ -0,0 +1,39 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result shortcuts */ +var numberClass = '[object Number]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is a number. + * + * Note: `NaN` is considered a number. See http://es5.github.io/#x8.5. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a number, else `false`. + * @example + * + * _.isNumber(8.4 * 5); + * // => true + */ +function isNumber(value) { + return typeof value == 'number' || + value && typeof value == 'object' && toString.call(value) == numberClass || false; +} + +module.exports = isNumber; diff --git a/node_modules/lodash-node/compat/objects/isObject.js b/node_modules/lodash-node/compat/objects/isObject.js new file mode 100644 index 0000000..a156308 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isObject.js @@ -0,0 +1,39 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var objectTypes = require('../internals/objectTypes'); + +/** + * Checks if `value` is the language type of Object. + * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(1); + * // => false + */ +function isObject(value) { + // check if the value is the ECMAScript language type of Object + // http://es5.github.io/#x8 + // and avoid a V8 bug + // http://code.google.com/p/v8/issues/detail?id=2291 + return !!(value && objectTypes[typeof value]); +} + +module.exports = isObject; diff --git a/node_modules/lodash-node/compat/objects/isPlainObject.js b/node_modules/lodash-node/compat/objects/isPlainObject.js new file mode 100644 index 0000000..9f2fe81 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isPlainObject.js @@ -0,0 +1,62 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isArguments = require('./isArguments'), + isNative = require('../internals/isNative'), + shimIsPlainObject = require('../internals/shimIsPlainObject'), + support = require('../support'); + +/** `Object#toString` result shortcuts */ +var objectClass = '[object Object]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var getPrototypeOf = isNative(getPrototypeOf = Object.getPrototypeOf) && getPrototypeOf; + +/** + * Checks if `value` is an object created by the `Object` constructor. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * _.isPlainObject(new Shape); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + */ +var isPlainObject = !getPrototypeOf ? shimIsPlainObject : function(value) { + if (!(value && toString.call(value) == objectClass) || (!support.argsClass && isArguments(value))) { + return false; + } + var valueOf = value.valueOf, + objProto = isNative(valueOf) && (objProto = getPrototypeOf(valueOf)) && getPrototypeOf(objProto); + + return objProto + ? (value == objProto || getPrototypeOf(value) == objProto) + : shimIsPlainObject(value); +}; + +module.exports = isPlainObject; diff --git a/node_modules/lodash-node/compat/objects/isRegExp.js b/node_modules/lodash-node/compat/objects/isRegExp.js new file mode 100644 index 0000000..d7800d0 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isRegExp.js @@ -0,0 +1,37 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var objectTypes = require('../internals/objectTypes'); + +/** `Object#toString` result shortcuts */ +var regexpClass = '[object RegExp]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is a regular expression. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a regular expression, else `false`. + * @example + * + * _.isRegExp(/fred/); + * // => true + */ +function isRegExp(value) { + return value && objectTypes[typeof value] && toString.call(value) == regexpClass || false; +} + +module.exports = isRegExp; diff --git a/node_modules/lodash-node/compat/objects/isString.js b/node_modules/lodash-node/compat/objects/isString.js new file mode 100644 index 0000000..e1fabd5 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isString.js @@ -0,0 +1,37 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** `Object#toString` result shortcuts */ +var stringClass = '[object String]'; + +/** Used for native method references */ +var objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** + * Checks if `value` is a string. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is a string, else `false`. + * @example + * + * _.isString('fred'); + * // => true + */ +function isString(value) { + return typeof value == 'string' || + value && typeof value == 'object' && toString.call(value) == stringClass || false; +} + +module.exports = isString; diff --git a/node_modules/lodash-node/compat/objects/isUndefined.js b/node_modules/lodash-node/compat/objects/isUndefined.js new file mode 100644 index 0000000..1e28a1f --- /dev/null +++ b/node_modules/lodash-node/compat/objects/isUndefined.js @@ -0,0 +1,27 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Checks if `value` is `undefined`. + * + * @static + * @memberOf _ + * @category Objects + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if the `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + */ +function isUndefined(value) { + return typeof value == 'undefined'; +} + +module.exports = isUndefined; diff --git a/node_modules/lodash-node/compat/objects/keys.js b/node_modules/lodash-node/compat/objects/keys.js new file mode 100644 index 0000000..9959b2b --- /dev/null +++ b/node_modules/lodash-node/compat/objects/keys.js @@ -0,0 +1,42 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isArguments = require('./isArguments'), + isNative = require('../internals/isNative'), + isObject = require('./isObject'), + shimKeys = require('../internals/shimKeys'), + support = require('../support'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeKeys = isNative(nativeKeys = Object.keys) && nativeKeys; + +/** + * Creates an array composed of the own enumerable property names of an object. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to inspect. + * @returns {Array} Returns an array of property names. + * @example + * + * _.keys({ 'one': 1, 'two': 2, 'three': 3 }); + * // => ['one', 'two', 'three'] (property order is not guaranteed across environments) + */ +var keys = !nativeKeys ? shimKeys : function(object) { + if (!isObject(object)) { + return []; + } + if ((support.enumPrototypes && typeof object == 'function') || + (support.nonEnumArgs && object.length && isArguments(object))) { + return shimKeys(object); + } + return nativeKeys(object); +}; + +module.exports = keys; diff --git a/node_modules/lodash-node/compat/objects/mapValues.js b/node_modules/lodash-node/compat/objects/mapValues.js new file mode 100644 index 0000000..431f55b --- /dev/null +++ b/node_modules/lodash-node/compat/objects/mapValues.js @@ -0,0 +1,58 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var createCallback = require('../functions/createCallback'), + forOwn = require('./forOwn'); + +/** + * Creates an object with the same keys as `object` and values generated by + * running each own enumerable property of `object` through the callback. + * The callback is bound to `thisArg` and invoked with three arguments; + * (value, key, object). + * + * If a property name is provided for `callback` the created "_.pluck" style + * callback will return the property value of the given element. + * + * If an object is provided for `callback` the created "_.where" style callback + * will return `true` for elements that have the properties of the given object, + * else `false`. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to iterate over. + * @param {Function|Object|string} [callback=identity] The function called + * per iteration. If a property name or object is provided it will be used + * to create a "_.pluck" or "_.where" style callback, respectively. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Array} Returns a new object with values of the results of each `callback` execution. + * @example + * + * _.mapValues({ 'a': 1, 'b': 2, 'c': 3} , function(num) { return num * 3; }); + * // => { 'a': 3, 'b': 6, 'c': 9 } + * + * var characters = { + * 'fred': { 'name': 'fred', 'age': 40 }, + * 'pebbles': { 'name': 'pebbles', 'age': 1 } + * }; + * + * // using "_.pluck" callback shorthand + * _.mapValues(characters, 'age'); + * // => { 'fred': 40, 'pebbles': 1 } + */ +function mapValues(object, callback, thisArg) { + var result = {}; + callback = createCallback(callback, thisArg, 3); + + forOwn(object, function(value, key, object) { + result[key] = callback(value, key, object); + }); + return result; +} + +module.exports = mapValues; diff --git a/node_modules/lodash-node/compat/objects/merge.js b/node_modules/lodash-node/compat/objects/merge.js new file mode 100644 index 0000000..cb602a2 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/merge.js @@ -0,0 +1,97 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreateCallback = require('../internals/baseCreateCallback'), + baseMerge = require('../internals/baseMerge'), + getArray = require('../internals/getArray'), + isObject = require('./isObject'), + releaseArray = require('../internals/releaseArray'), + slice = require('../internals/slice'); + +/** + * Recursively merges own enumerable properties of the source object(s), that + * don't resolve to `undefined` into the destination object. Subsequent sources + * will overwrite property assignments of previous sources. If a callback is + * provided it will be executed to produce the merged values of the destination + * and source properties. If the callback returns `undefined` merging will + * be handled by the method instead. The callback is bound to `thisArg` and + * invoked with two arguments; (objectValue, sourceValue). + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The destination object. + * @param {...Object} [source] The source objects. + * @param {Function} [callback] The function to customize merging properties. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns the destination object. + * @example + * + * var names = { + * 'characters': [ + * { 'name': 'barney' }, + * { 'name': 'fred' } + * ] + * }; + * + * var ages = { + * 'characters': [ + * { 'age': 36 }, + * { 'age': 40 } + * ] + * }; + * + * _.merge(names, ages); + * // => { 'characters': [{ 'name': 'barney', 'age': 36 }, { 'name': 'fred', 'age': 40 }] } + * + * var food = { + * 'fruits': ['apple'], + * 'vegetables': ['beet'] + * }; + * + * var otherFood = { + * 'fruits': ['banana'], + * 'vegetables': ['carrot'] + * }; + * + * _.merge(food, otherFood, function(a, b) { + * return _.isArray(a) ? a.concat(b) : undefined; + * }); + * // => { 'fruits': ['apple', 'banana'], 'vegetables': ['beet', 'carrot] } + */ +function merge(object) { + var args = arguments, + length = 2; + + if (!isObject(object)) { + return object; + } + // allows working with `_.reduce` and `_.reduceRight` without using + // their `index` and `collection` arguments + if (typeof args[2] != 'number') { + length = args.length; + } + if (length > 3 && typeof args[length - 2] == 'function') { + var callback = baseCreateCallback(args[--length - 1], args[length--], 2); + } else if (length > 2 && typeof args[length - 1] == 'function') { + callback = args[--length]; + } + var sources = slice(arguments, 1, length), + index = -1, + stackA = getArray(), + stackB = getArray(); + + while (++index < length) { + baseMerge(object, sources[index], callback, stackA, stackB); + } + releaseArray(stackA); + releaseArray(stackB); + return object; +} + +module.exports = merge; diff --git a/node_modules/lodash-node/compat/objects/omit.js b/node_modules/lodash-node/compat/objects/omit.js new file mode 100644 index 0000000..6517419 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/omit.js @@ -0,0 +1,67 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseDifference = require('../internals/baseDifference'), + baseFlatten = require('../internals/baseFlatten'), + createCallback = require('../functions/createCallback'), + forIn = require('./forIn'); + +/** + * Creates a shallow clone of `object` excluding the specified properties. + * Property names may be specified as individual arguments or as arrays of + * property names. If a callback is provided it will be executed for each + * property of `object` omitting the properties the callback returns truey + * for. The callback is bound to `thisArg` and invoked with three arguments; + * (value, key, object). + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The source object. + * @param {Function|...string|string[]} [callback] The properties to omit or the + * function called per iteration. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns an object without the omitted properties. + * @example + * + * _.omit({ 'name': 'fred', 'age': 40 }, 'age'); + * // => { 'name': 'fred' } + * + * _.omit({ 'name': 'fred', 'age': 40 }, function(value) { + * return typeof value == 'number'; + * }); + * // => { 'name': 'fred' } + */ +function omit(object, callback, thisArg) { + var result = {}; + if (typeof callback != 'function') { + var props = []; + forIn(object, function(value, key) { + props.push(key); + }); + props = baseDifference(props, baseFlatten(arguments, true, false, 1)); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + result[key] = object[key]; + } + } else { + callback = createCallback(callback, thisArg, 3); + forIn(object, function(value, key, object) { + if (!callback(value, key, object)) { + result[key] = value; + } + }); + } + return result; +} + +module.exports = omit; diff --git a/node_modules/lodash-node/compat/objects/pairs.js b/node_modules/lodash-node/compat/objects/pairs.js new file mode 100644 index 0000000..b334c4a --- /dev/null +++ b/node_modules/lodash-node/compat/objects/pairs.js @@ -0,0 +1,38 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keys = require('./keys'); + +/** + * Creates a two dimensional array of an object's key-value pairs, + * i.e. `[[key1, value1], [key2, value2]]`. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to inspect. + * @returns {Array} Returns new array of key-value pairs. + * @example + * + * _.pairs({ 'barney': 36, 'fred': 40 }); + * // => [['barney', 36], ['fred', 40]] (property order is not guaranteed across environments) + */ +function pairs(object) { + var index = -1, + props = keys(object), + length = props.length, + result = Array(length); + + while (++index < length) { + var key = props[index]; + result[index] = [key, object[key]]; + } + return result; +} + +module.exports = pairs; diff --git a/node_modules/lodash-node/compat/objects/pick.js b/node_modules/lodash-node/compat/objects/pick.js new file mode 100644 index 0000000..fb581ad --- /dev/null +++ b/node_modules/lodash-node/compat/objects/pick.js @@ -0,0 +1,65 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseFlatten = require('../internals/baseFlatten'), + createCallback = require('../functions/createCallback'), + forIn = require('./forIn'), + isObject = require('./isObject'); + +/** + * Creates a shallow clone of `object` composed of the specified properties. + * Property names may be specified as individual arguments or as arrays of + * property names. If a callback is provided it will be executed for each + * property of `object` picking the properties the callback returns truey + * for. The callback is bound to `thisArg` and invoked with three arguments; + * (value, key, object). + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The source object. + * @param {Function|...string|string[]} [callback] The function called per + * iteration or property names to pick, specified as individual property + * names or arrays of property names. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {Object} Returns an object composed of the picked properties. + * @example + * + * _.pick({ 'name': 'fred', '_userid': 'fred1' }, 'name'); + * // => { 'name': 'fred' } + * + * _.pick({ 'name': 'fred', '_userid': 'fred1' }, function(value, key) { + * return key.charAt(0) != '_'; + * }); + * // => { 'name': 'fred' } + */ +function pick(object, callback, thisArg) { + var result = {}; + if (typeof callback != 'function') { + var index = -1, + props = baseFlatten(arguments, true, false, 1), + length = isObject(object) ? props.length : 0; + + while (++index < length) { + var key = props[index]; + if (key in object) { + result[key] = object[key]; + } + } + } else { + callback = createCallback(callback, thisArg, 3); + forIn(object, function(value, key, object) { + if (callback(value, key, object)) { + result[key] = value; + } + }); + } + return result; +} + +module.exports = pick; diff --git a/node_modules/lodash-node/compat/objects/transform.js b/node_modules/lodash-node/compat/objects/transform.js new file mode 100644 index 0000000..f04b078 --- /dev/null +++ b/node_modules/lodash-node/compat/objects/transform.js @@ -0,0 +1,67 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseCreate = require('../internals/baseCreate'), + baseEach = require('../internals/baseEach'), + createCallback = require('../functions/createCallback'), + forOwn = require('./forOwn'), + isArray = require('./isArray'); + +/** + * An alternative to `_.reduce` this method transforms `object` to a new + * `accumulator` object which is the result of running each of its own + * enumerable properties through a callback, with each callback execution + * potentially mutating the `accumulator` object. The callback is bound to + * `thisArg` and invoked with four arguments; (accumulator, value, key, object). + * Callbacks may exit iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @category Objects + * @param {Array|Object} object The object to iterate over. + * @param {Function} [callback=identity] The function called per iteration. + * @param {*} [accumulator] The custom accumulator value. + * @param {*} [thisArg] The `this` binding of `callback`. + * @returns {*} Returns the accumulated value. + * @example + * + * var squares = _.transform([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], function(result, num) { + * num *= num; + * if (num % 2) { + * return result.push(num) < 3; + * } + * }); + * // => [1, 9, 25] + * + * var mapped = _.transform({ 'a': 1, 'b': 2, 'c': 3 }, function(result, num, key) { + * result[key] = num * 3; + * }); + * // => { 'a': 3, 'b': 6, 'c': 9 } + */ +function transform(object, callback, accumulator, thisArg) { + var isArr = isArray(object); + if (accumulator == null) { + if (isArr) { + accumulator = []; + } else { + var ctor = object && object.constructor, + proto = ctor && ctor.prototype; + + accumulator = baseCreate(proto); + } + } + if (callback) { + callback = createCallback(callback, thisArg, 4); + (isArr ? baseEach : forOwn)(object, function(value, index, object) { + return callback(accumulator, value, index, object); + }); + } + return accumulator; +} + +module.exports = transform; diff --git a/node_modules/lodash-node/compat/objects/values.js b/node_modules/lodash-node/compat/objects/values.js new file mode 100644 index 0000000..e3135ae --- /dev/null +++ b/node_modules/lodash-node/compat/objects/values.js @@ -0,0 +1,36 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var keys = require('./keys'); + +/** + * Creates an array composed of the own enumerable property values of `object`. + * + * @static + * @memberOf _ + * @category Objects + * @param {Object} object The object to inspect. + * @returns {Array} Returns an array of property values. + * @example + * + * _.values({ 'one': 1, 'two': 2, 'three': 3 }); + * // => [1, 2, 3] (property order is not guaranteed across environments) + */ +function values(object) { + var index = -1, + props = keys(object), + length = props.length, + result = Array(length); + + while (++index < length) { + result[index] = object[props[index]]; + } + return result; +} + +module.exports = values; diff --git a/node_modules/lodash-node/compat/support.js b/node_modules/lodash-node/compat/support.js new file mode 100644 index 0000000..fb6ec48 --- /dev/null +++ b/node_modules/lodash-node/compat/support.js @@ -0,0 +1,177 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isNative = require('./internals/isNative'); + +/** Used to detect functions containing a `this` reference */ +var reThis = /\bthis\b/; + +/** `Object#toString` result shortcuts */ +var argsClass = '[object Arguments]', + objectClass = '[object Object]'; + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Used for native method references */ +var errorProto = Error.prototype, + objectProto = Object.prototype; + +/** Used to resolve the internal [[Class]] of values */ +var toString = objectProto.toString; + +/** Native method shortcuts */ +var propertyIsEnumerable = objectProto.propertyIsEnumerable; + +/** + * An object used to flag environments features. + * + * @static + * @memberOf _ + * @type Object + */ +var support = {}; + +(function() { + var ctor = function() { this.x = 1; }, + object = { '0': 1, 'length': 1 }, + props = []; + + ctor.prototype = { 'valueOf': 1, 'y': 1 }; + for (var key in new ctor) { props.push(key); } + for (key in arguments) { } + + /** + * Detect if an `arguments` object's [[Class]] is resolvable (all but Firefox < 4, IE < 9). + * + * @memberOf _.support + * @type boolean + */ + support.argsClass = toString.call(arguments) == argsClass; + + /** + * Detect if `arguments` objects are `Object` objects (all but Narwhal and Opera < 10.5). + * + * @memberOf _.support + * @type boolean + */ + support.argsObject = arguments.constructor == Object && !(arguments instanceof Array); + + /** + * Detect if `name` or `message` properties of `Error.prototype` are + * enumerable by default. (IE < 9, Safari < 5.1) + * + * @memberOf _.support + * @type boolean + */ + support.enumErrorProps = propertyIsEnumerable.call(errorProto, 'message') || propertyIsEnumerable.call(errorProto, 'name'); + + /** + * Detect if `prototype` properties are enumerable by default. + * + * Firefox < 3.6, Opera > 9.50 - Opera < 11.60, and Safari < 5.1 + * (if the prototype or a property on the prototype has been set) + * incorrectly sets a function's `prototype` property [[Enumerable]] + * value to `true`. + * + * @memberOf _.support + * @type boolean + */ + support.enumPrototypes = propertyIsEnumerable.call(ctor, 'prototype'); + + /** + * Detect if functions can be decompiled by `Function#toString` + * (all but PS3 and older Opera mobile browsers & avoided in Windows 8 apps). + * + * @memberOf _.support + * @type boolean + */ + support.funcDecomp = !isNative(global.WinRTError) && reThis.test(function() { return this; }); + + /** + * Detect if `Function#name` is supported (all but IE). + * + * @memberOf _.support + * @type boolean + */ + support.funcNames = typeof Function.name == 'string'; + + /** + * Detect if `arguments` object indexes are non-enumerable + * (Firefox < 4, IE < 9, PhantomJS, Safari < 5.1). + * + * @memberOf _.support + * @type boolean + */ + support.nonEnumArgs = key != 0; + + /** + * Detect if properties shadowing those on `Object.prototype` are non-enumerable. + * + * In IE < 9 an objects own properties, shadowing non-enumerable ones, are + * made non-enumerable as well (a.k.a the JScript [[DontEnum]] bug). + * + * @memberOf _.support + * @type boolean + */ + support.nonEnumShadows = !/valueOf/.test(props); + + /** + * Detect if own properties are iterated after inherited properties (all but IE < 9). + * + * @memberOf _.support + * @type boolean + */ + support.ownLast = props[0] != 'x'; + + /** + * Detect if `Array#shift` and `Array#splice` augment array-like objects correctly. + * + * Firefox < 10, IE compatibility mode, and IE < 9 have buggy Array `shift()` + * and `splice()` functions that fail to remove the last element, `value[0]`, + * of array-like objects even though the `length` property is set to `0`. + * The `shift()` method is buggy in IE 8 compatibility mode, while `splice()` + * is buggy regardless of mode in IE < 9 and buggy in compatibility mode in IE 9. + * + * @memberOf _.support + * @type boolean + */ + support.spliceObjects = (arrayRef.splice.call(object, 0, 1), !object[0]); + + /** + * Detect lack of support for accessing string characters by index. + * + * IE < 8 can't access characters by index and IE 8 can only access + * characters by index on string literals. + * + * @memberOf _.support + * @type boolean + */ + support.unindexedChars = ('x'[0] + Object('x')[0]) != 'xx'; + + /** + * Detect if a DOM node's [[Class]] is resolvable (all but IE < 9) + * and that the JS engine errors when attempting to coerce an object to + * a string without a `toString` function. + * + * @memberOf _.support + * @type boolean + */ + try { + support.nodeClass = !(toString.call(document) == objectClass && !({ 'toString': 0 } + '')); + } catch(e) { + support.nodeClass = true; + } +}(1)); + +module.exports = support; diff --git a/node_modules/lodash-node/compat/utilities.js b/node_modules/lodash-node/compat/utilities.js new file mode 100644 index 0000000..0fc2d98 --- /dev/null +++ b/node_modules/lodash-node/compat/utilities.js @@ -0,0 +1,28 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +module.exports = { + 'constant': require('./utilities/constant'), + 'createCallback': require('./functions/createCallback'), + 'escape': require('./utilities/escape'), + 'identity': require('./utilities/identity'), + 'mixin': require('./utilities/mixin'), + 'noConflict': require('./utilities/noConflict'), + 'noop': require('./utilities/noop'), + 'now': require('./utilities/now'), + 'parseInt': require('./utilities/parseInt'), + 'property': require('./utilities/property'), + 'random': require('./utilities/random'), + 'result': require('./utilities/result'), + 'template': require('./utilities/template'), + 'templateSettings': require('./utilities/templateSettings'), + 'times': require('./utilities/times'), + 'unescape': require('./utilities/unescape'), + 'uniqueId': require('./utilities/uniqueId') +}; diff --git a/node_modules/lodash-node/compat/utilities/constant.js b/node_modules/lodash-node/compat/utilities/constant.js new file mode 100644 index 0000000..ae112ed --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/constant.js @@ -0,0 +1,31 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Creates a function that returns `value`. + * + * @static + * @memberOf _ + * @category Utilities + * @param {*} value The value to return from the new function. + * @returns {Function} Returns the new function. + * @example + * + * var object = { 'name': 'fred' }; + * var getter = _.constant(object); + * getter() === object; + * // => true + */ +function constant(value) { + return function() { + return value; + }; +} + +module.exports = constant; diff --git a/node_modules/lodash-node/compat/utilities/escape.js b/node_modules/lodash-node/compat/utilities/escape.js new file mode 100644 index 0000000..00582b9 --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/escape.js @@ -0,0 +1,31 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var escapeHtmlChar = require('../internals/escapeHtmlChar'), + keys = require('../objects/keys'), + reUnescapedHtml = require('../internals/reUnescapedHtml'); + +/** + * Converts the characters `&`, `<`, `>`, `"`, and `'` in `string` to their + * corresponding HTML entities. + * + * @static + * @memberOf _ + * @category Utilities + * @param {string} string The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('Fred, Wilma, & Pebbles'); + * // => 'Fred, Wilma, & Pebbles' + */ +function escape(string) { + return string == null ? '' : String(string).replace(reUnescapedHtml, escapeHtmlChar); +} + +module.exports = escape; diff --git a/node_modules/lodash-node/compat/utilities/identity.js b/node_modules/lodash-node/compat/utilities/identity.js new file mode 100644 index 0000000..838700c --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/identity.js @@ -0,0 +1,28 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * This method returns the first argument provided to it. + * + * @static + * @memberOf _ + * @category Utilities + * @param {*} value Any value. + * @returns {*} Returns `value`. + * @example + * + * var object = { 'name': 'fred' }; + * _.identity(object) === object; + * // => true + */ +function identity(value) { + return value; +} + +module.exports = identity; diff --git a/node_modules/lodash-node/compat/utilities/mixin.js b/node_modules/lodash-node/compat/utilities/mixin.js new file mode 100644 index 0000000..26db74f --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/mixin.js @@ -0,0 +1,88 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var forEach = require('../collections/forEach'), + functions = require('../objects/functions'), + isFunction = require('../objects/isFunction'), + isObject = require('../objects/isObject'); + +/** + * Used for `Array` method references. + * + * Normally `Array.prototype` would suffice, however, using an array literal + * avoids issues in Narwhal. + */ +var arrayRef = []; + +/** Native method shortcuts */ +var push = arrayRef.push; + +/** + * Adds function properties of a source object to the destination object. + * If `object` is a function methods will be added to its prototype as well. + * + * @static + * @memberOf _ + * @category Utilities + * @param {Function|Object} [object=lodash] object The destination object. + * @param {Object} source The object of functions to add. + * @param {Object} [options] The options object. + * @param {boolean} [options.chain=true] Specify whether the functions added are chainable. + * @example + * + * function capitalize(string) { + * return string.charAt(0).toUpperCase() + string.slice(1).toLowerCase(); + * } + * + * _.mixin({ 'capitalize': capitalize }); + * _.capitalize('fred'); + * // => 'Fred' + * + * _('fred').capitalize().value(); + * // => 'Fred' + * + * _.mixin({ 'capitalize': capitalize }, { 'chain': false }); + * _('fred').capitalize(); + * // => 'Fred' + */ +function mixin(object, source, options) { + var chain = true, + methodNames = source && functions(source); + + if (options === false) { + chain = false; + } else if (isObject(options) && 'chain' in options) { + chain = options.chain; + } + var ctor = object, + isFunc = isFunction(ctor); + + forEach(methodNames, function(methodName) { + var func = object[methodName] = source[methodName]; + if (isFunc) { + ctor.prototype[methodName] = function() { + var chainAll = this.__chain__, + value = this.__wrapped__, + args = [value]; + + push.apply(args, arguments); + var result = func.apply(object, args); + if (chain || chainAll) { + if (value === result && isObject(result)) { + return this; + } + result = new ctor(result); + result.__chain__ = chainAll; + } + return result; + }; + } + }); +} + +module.exports = mixin; diff --git a/node_modules/lodash-node/compat/utilities/noConflict.js b/node_modules/lodash-node/compat/utilities/noConflict.js new file mode 100644 index 0000000..eb78149 --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/noConflict.js @@ -0,0 +1,30 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** Used to restore the original `_` reference in `noConflict` */ +var oldDash = global._; + +/** + * Reverts the '_' variable to its previous value and returns a reference to + * the `lodash` function. + * + * @static + * @memberOf _ + * @category Utilities + * @returns {Function} Returns the `lodash` function. + * @example + * + * var lodash = _.noConflict(); + */ +function noConflict() { + global._ = oldDash; + return this; +} + +module.exports = noConflict; diff --git a/node_modules/lodash-node/compat/utilities/noop.js b/node_modules/lodash-node/compat/utilities/noop.js new file mode 100644 index 0000000..3cd2d53 --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/noop.js @@ -0,0 +1,26 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * A no-operation function. + * + * @static + * @memberOf _ + * @category Utilities + * @example + * + * var object = { 'name': 'fred' }; + * _.noop(object) === undefined; + * // => true + */ +function noop() { + // no operation performed +} + +module.exports = noop; diff --git a/node_modules/lodash-node/compat/utilities/now.js b/node_modules/lodash-node/compat/utilities/now.js new file mode 100644 index 0000000..ee091dc --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/now.js @@ -0,0 +1,28 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isNative = require('../internals/isNative'); + +/** + * Gets the number of milliseconds that have elapsed since the Unix epoch + * (1 January 1970 00:00:00 UTC). + * + * @static + * @memberOf _ + * @category Utilities + * @example + * + * var stamp = _.now(); + * _.defer(function() { console.log(_.now() - stamp); }); + * // => logs the number of milliseconds it took for the deferred function to be called + */ +var now = isNative(now = Date.now) && now || function() { + return new Date().getTime(); +}; + +module.exports = now; diff --git a/node_modules/lodash-node/compat/utilities/parseInt.js b/node_modules/lodash-node/compat/utilities/parseInt.js new file mode 100644 index 0000000..6227ccd --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/parseInt.js @@ -0,0 +1,53 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isString = require('../objects/isString'); + +/** Used to detect and test whitespace */ +var whitespace = ( + // whitespace + ' \t\x0B\f\xA0\ufeff' + + + // line terminators + '\n\r\u2028\u2029' + + + // unicode category "Zs" space separators + '\u1680\u180e\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200a\u202f\u205f\u3000' +); + +/** Used to match leading whitespace and zeros to be removed */ +var reLeadingSpacesAndZeros = RegExp('^[' + whitespace + ']*0+(?=.$)'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeParseInt = global.parseInt; + +/** + * Converts the given value into an integer of the specified radix. + * If `radix` is `undefined` or `0` a `radix` of `10` is used unless the + * `value` is a hexadecimal, in which case a `radix` of `16` is used. + * + * Note: This method avoids differences in native ES3 and ES5 `parseInt` + * implementations. See http://es5.github.io/#E. + * + * @static + * @memberOf _ + * @category Utilities + * @param {string} value The value to parse. + * @param {number} [radix] The radix used to interpret the value to parse. + * @returns {number} Returns the new integer value. + * @example + * + * _.parseInt('08'); + * // => 8 + */ +var parseInt = nativeParseInt(whitespace + '08') == 8 ? nativeParseInt : function(value, radix) { + // Firefox < 21 and Opera < 15 follow the ES3 specified implementation of `parseInt` + return nativeParseInt(isString(value) ? value.replace(reLeadingSpacesAndZeros, '') : value, radix || 0); +}; + +module.exports = parseInt; diff --git a/node_modules/lodash-node/compat/utilities/property.js b/node_modules/lodash-node/compat/utilities/property.js new file mode 100644 index 0000000..f3446dd --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/property.js @@ -0,0 +1,40 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ + +/** + * Creates a "_.pluck" style function, which returns the `key` value of a + * given object. + * + * @static + * @memberOf _ + * @category Utilities + * @param {string} key The name of the property to retrieve. + * @returns {Function} Returns the new function. + * @example + * + * var characters = [ + * { 'name': 'fred', 'age': 40 }, + * { 'name': 'barney', 'age': 36 } + * ]; + * + * var getName = _.property('name'); + * + * _.map(characters, getName); + * // => ['barney', 'fred'] + * + * _.sortBy(characters, getName); + * // => [{ 'name': 'barney', 'age': 36 }, { 'name': 'fred', 'age': 40 }] + */ +function property(key) { + return function(object) { + return object[key]; + }; +} + +module.exports = property; diff --git a/node_modules/lodash-node/compat/utilities/random.js b/node_modules/lodash-node/compat/utilities/random.js new file mode 100644 index 0000000..286bd1e --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/random.js @@ -0,0 +1,73 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var baseRandom = require('../internals/baseRandom'); + +/* Native method shortcuts for methods with the same name as other `lodash` methods */ +var nativeMin = Math.min, + nativeRandom = Math.random; + +/** + * Produces a random number between `min` and `max` (inclusive). If only one + * argument is provided a number between `0` and the given number will be + * returned. If `floating` is truey or either `min` or `max` are floats a + * floating-point number will be returned instead of an integer. + * + * @static + * @memberOf _ + * @category Utilities + * @param {number} [min=0] The minimum possible value. + * @param {number} [max=1] The maximum possible value. + * @param {boolean} [floating=false] Specify returning a floating-point number. + * @returns {number} Returns a random number. + * @example + * + * _.random(0, 5); + * // => an integer between 0 and 5 + * + * _.random(5); + * // => also an integer between 0 and 5 + * + * _.random(5, true); + * // => a floating-point number between 0 and 5 + * + * _.random(1.2, 5.2); + * // => a floating-point number between 1.2 and 5.2 + */ +function random(min, max, floating) { + var noMin = min == null, + noMax = max == null; + + if (floating == null) { + if (typeof min == 'boolean' && noMax) { + floating = min; + min = 1; + } + else if (!noMax && typeof max == 'boolean') { + floating = max; + noMax = true; + } + } + if (noMin && noMax) { + max = 1; + } + min = +min || 0; + if (noMax) { + max = min; + min = 0; + } else { + max = +max || 0; + } + if (floating || min % 1 || max % 1) { + var rand = nativeRandom(); + return nativeMin(min + (rand * (max - min + parseFloat('1e-' + ((rand +'').length - 1)))), max); + } + return baseRandom(min, max); +} + +module.exports = random; diff --git a/node_modules/lodash-node/compat/utilities/result.js b/node_modules/lodash-node/compat/utilities/result.js new file mode 100644 index 0000000..306b563 --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/result.js @@ -0,0 +1,45 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var isFunction = require('../objects/isFunction'); + +/** + * Resolves the value of property `key` on `object`. If `key` is a function + * it will be invoked with the `this` binding of `object` and its result returned, + * else the property value is returned. If `object` is falsey then `undefined` + * is returned. + * + * @static + * @memberOf _ + * @category Utilities + * @param {Object} object The object to inspect. + * @param {string} key The name of the property to resolve. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { + * 'cheese': 'crumpets', + * 'stuff': function() { + * return 'nonsense'; + * } + * }; + * + * _.result(object, 'cheese'); + * // => 'crumpets' + * + * _.result(object, 'stuff'); + * // => 'nonsense' + */ +function result(object, key) { + if (object) { + var value = object[key]; + return isFunction(value) ? object[key]() : value; + } +} + +module.exports = result; diff --git a/node_modules/lodash-node/compat/utilities/template.js b/node_modules/lodash-node/compat/utilities/template.js new file mode 100644 index 0000000..b4906b3 --- /dev/null +++ b/node_modules/lodash-node/compat/utilities/template.js @@ -0,0 +1,216 @@ +/** + * Lo-Dash 2.4.1 (Custom Build) + * Build: `lodash modularize exports="node" -o ./compat/` + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.5.2 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Available under MIT license + */ +var defaults = require('../objects/defaults'), + escape = require('./escape'), + escapeStringChar = require('../internals/escapeStringChar'), + keys = require('../objects/keys'), + reInterpolate = require('../internals/reInterpolate'), + templateSettings = require('./templateSettings'), + values = require('../objects/values'); + +/** Used to match empty string literals in compiled template source */ +var reEmptyStringLeading = /\b__p \+= '';/g, + reEmptyStringMiddle = /\b(__p \+=) '' \+/g, + reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; + +/** + * Used to match ES6 template delimiters + * http://people.mozilla.org/~jorendorff/es6-draft.html#sec-literals-string-literals + */ +var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; + +/** Used to ensure capturing order of template delimiters */ +var reNoMatch = /($^)/; + +/** Used to match unescaped characters in compiled string literals */ +var reUnescapedString = /['\n\r\t\u2028\u2029\\]/g; + +/** + * A micro-templating method that handles arbitrary delimiters, preserves + * whitespace, and correctly escapes quotes within interpolated code. + * + * Note: In the development build, `_.template` utilizes sourceURLs for easier + * debugging. See http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl + * + * For more information on precompiling templates see: + * http://lodash.com/custom-builds + * + * For more information on Chrome extension sandboxes see: + * http://developer.chrome.com/stable/extensions/sandboxingEval.html + * + * @static + * @memberOf _ + * @category Utilities + * @param {string} text The template text. + * @param {Object} data The data object used to populate the text. + * @param {Object} [options] The options object. + * @param {RegExp} [options.escape] The "escape" delimiter. + * @param {RegExp} [options.evaluate] The "evaluate" delimiter. + * @param {Object} [options.imports] An object to import into the template as local variables. + * @param {RegExp} [options.interpolate] The "interpolate" delimiter. + * @param {string} [sourceURL] The sourceURL of the template's compiled source. + * @param {string} [variable] The data object variable name. + * @returns {Function|string} Returns a compiled function when no `data` object + * is given, else it returns the interpolated text. + * @example + * + * // using the "interpolate" delimiter to create a compiled template + * var compiled = _.template('hello <%= name %>'); + * compiled({ 'name': 'fred' }); + * // => 'hello fred' + * + * // using the "escape" delimiter to escape HTML in data property values + * _.template('<%- value %>', { 'value': ' + + +

Node-webkit-based module test

+ + diff --git a/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json b/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json new file mode 100644 index 0000000..71d03f8 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/nw-pre-gyp/package.json @@ -0,0 +1,9 @@ +{ +"main": "index.html", +"name": "nw-pre-gyp-module-test", +"description": "Node-webkit-based module test.", +"version": "0.0.1", +"window": { + "show": false +} +} diff --git a/node_modules/node-pre-gyp/lib/util/s3_setup.js b/node_modules/node-pre-gyp/lib/util/s3_setup.js new file mode 100644 index 0000000..5bc42e9 --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/s3_setup.js @@ -0,0 +1,27 @@ +"use strict"; + +module.exports = exports; + +var url = require('url'); + +var URI_REGEX="^(.*)\.(s3(?:-.*)?)\.amazonaws\.com$"; + +module.exports.detect = function(to,config) { + var uri = url.parse(to); + var hostname_matches = uri.hostname.match(URI_REGEX); + config.prefix = (!uri.pathname || uri.pathname == '/') ? '' : uri.pathname.replace('/',''); + if(!hostname_matches) { + return; + } + if (!config.bucket) { + config.bucket = hostname_matches[1]; + } + if (!config.region) { + var s3_domain = hostname_matches[2]; + if (s3_domain.slice(0,3) == 's3-' && + s3_domain.length >= 3) { + // it appears the region is explicit in the url + config.region = s3_domain.replace('s3-',''); + } + } +}; diff --git a/node_modules/node-pre-gyp/lib/util/versioning.js b/node_modules/node-pre-gyp/lib/util/versioning.js new file mode 100644 index 0000000..8ebbe3a --- /dev/null +++ b/node_modules/node-pre-gyp/lib/util/versioning.js @@ -0,0 +1,308 @@ +"use strict"; + +module.exports = exports; + +var path = require('path'); +var semver = require('semver'); +var url = require('url'); + +var abi_crosswalk; + +// This is used for unit testing to provide a fake +// ABI crosswalk that emulates one that is not updated +// for the current version +if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { + abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); +} else { + abi_crosswalk = require('./abi_crosswalk.json'); +} + +function get_electron_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_electron_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if electron is the target."); + } + // Electron guarantees that patch version update won't break native modules. + var sem_ver = semver.parse(target_version); + return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; +} +module.exports.get_electron_abi = get_electron_abi; + +function get_node_webkit_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_node_webkit_abi requires valid runtime arg"); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error("Empty target version is not supported if node-webkit is the target."); + } + return runtime + '-v' + target_version; +} +module.exports.get_node_webkit_abi = get_node_webkit_abi; + +function get_node_abi(runtime, versions) { + if (!runtime) { + throw new Error("get_node_abi requires valid runtime arg"); + } + if (!versions) { + throw new Error("get_node_abi requires valid process.versions object"); + } + var sem_ver = semver.parse(versions.node); + if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series + // https://github.com/mapbox/node-pre-gyp/issues/124 + return runtime+'-v'+versions.node; + } else { + // process.versions.modules added in >= v0.10.4 and v0.11.7 + // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e + return versions.modules ? runtime+'-v' + (+versions.modules) : + 'v8-' + versions.v8.split('.').slice(0,2).join('.'); + } +} +module.exports.get_node_abi = get_node_abi; + +function get_runtime_abi(runtime, target_version) { + if (!runtime) { + throw new Error("get_runtime_abi requires valid runtime arg"); + } + if (runtime === 'node-webkit') { + return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); + } else if (runtime === 'electron') { + return get_electron_abi(runtime, target_version || process.versions.electron); + } else { + if (runtime != 'node') { + throw new Error("Unknown Runtime: '" + runtime + "'"); + } + if (!target_version) { + return get_node_abi(runtime,process.versions); + } else { + var cross_obj; + // abi_crosswalk generated with ./scripts/abi_crosswalk.js + if (abi_crosswalk[target_version]) { + cross_obj = abi_crosswalk[target_version]; + } else { + var target_parts = target_version.split('.').map(function(i) { return +i; }); + if (target_parts.length != 3) { // parse failed + throw new Error("Unknown target version: " + target_version); + } + /* + The below code tries to infer the last known ABI compatible version + that we have recorded in the abi_crosswalk.json when an exact match + is not possible. The reasons for this to exist are complicated: + + - We support passing --target to be able to allow developers to package binaries for versions of node + that are not the same one as they are running. This might also be used in combination with the + --target_arch or --target_platform flags to also package binaries for alternative platforms + - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node + version that is running in memory + - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look + this info up for all versions + - But we cannot easily predict what the future ABI will be for released versions + - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly + by being fully available at install time. + - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release + need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if + you want the `--target` flag to keep working for the latest version + - Which is impractical ^^ + - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. + + In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that + only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be + ABI compatible with v0.10.33). + + TODO: use semver module instead of custom version parsing + */ + var major = target_parts[0]; + var minor = target_parts[1]; + var patch = target_parts[2]; + // io.js: yeah if node.js ever releases 1.x this will break + // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 + if (major === 1) { + // look for last release that is the same major version + // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 + while (true) { + if (minor > 0) --minor; + if (patch > 0) --patch; + var new_iojs_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_iojs_target]) { + cross_obj = abi_crosswalk[new_iojs_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); + break; + } + if (minor === 0 && patch === 0) { + break; + } + } + } else if (major === 0) { // node.js + if (target_parts[1] % 2 === 0) { // for stable/even node.js series + // look for the last release that is the same minor release + // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 + while (--patch > 0) { + var new_node_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_node_target]) { + cross_obj = abi_crosswalk[new_node_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); + break; + } + } + } + } + } + if (!cross_obj) { + throw new Error("Unsupported target version: " + target_version); + } + // emulate process.versions + var versions_obj = { + node: target_version, + v8: cross_obj.v8+'.0', + // abi_crosswalk uses 1 for node versions lacking process.versions.modules + // process.versions.modules added in >= v0.10.4 and v0.11.7 + modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined + }; + return get_node_abi(runtime, versions_obj); + } + } +} +module.exports.get_runtime_abi = get_runtime_abi; + +var required_parameters = [ + 'module_name', + 'module_path', + 'host' +]; + +function validate_config(package_json) { + var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; + var missing = []; + if (!package_json.main) { + missing.push('main'); + } + if (!package_json.version) { + missing.push('version'); + } + if (!package_json.name) { + missing.push('name'); + } + if (!package_json.binary) { + missing.push('binary'); + } + var o = package_json.binary; + required_parameters.forEach(function(p) { + if (missing.indexOf('binary') > -1) { + missing.pop('binary'); + } + if (!o || o[p] === undefined) { + missing.push('binary.' + p); + } + }); + if (missing.length >= 1) { + throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n')); + } + if (o) { + // enforce https over http + var protocol = url.parse(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted"); + } + } +} + +module.exports.validate_config = validate_config; + +function eval_template(template,opts) { + Object.keys(opts).forEach(function(key) { + var pattern = '{'+key+'}'; + while (template.indexOf(pattern) > -1) { + template = template.replace(pattern,opts[key]); + } + }); + return template; +} + +// url.resolve needs single trailing slash +// to behave correctly, otherwise a double slash +// may end up in the url which breaks requests +// and a lacking slash may not lead to proper joining +function fix_slashes(pathname) { + if (pathname.slice(-1) != '/') { + return pathname + '/'; + } + return pathname; +} + +// remove double slashes +// note: path.normalize will not work because +// it will convert forward to back slashes +function drop_double_slashes(pathname) { + return pathname.replace(/\/\//g,'/'); +} + +function get_process_runtime(versions) { + var runtime = 'node'; + if (versions['node-webkit']) { + runtime = 'node-webkit'; + } else if (versions.electron) { + runtime = 'electron'; + } + return runtime; +} + +module.exports.get_process_runtime = get_process_runtime; + +var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; +var default_remote_path = ''; + +module.exports.evaluate = function(package_json,options) { + options = options || {}; + validate_config(package_json); + var v = package_json.version; + var module_version = semver.parse(v); + var runtime = options.runtime || get_process_runtime(process.versions); + var opts = { + name: package_json.name, + configuration: Boolean(options.debug) ? 'Debug' : 'Release', + debug: options.debug, + module_name: package_json.binary.module_name, + version: module_version.version, + prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', + build: module_version.build.length ? module_version.build.join('.') : '', + major: module_version.major, + minor: module_version.minor, + patch: module_version.patch, + runtime: runtime, + node_abi: get_runtime_abi(runtime,options.target), + target: options.target || '', + platform: options.target_platform || process.platform, + target_platform: options.target_platform || process.platform, + arch: options.target_arch || process.arch, + target_arch: options.target_arch || process.arch, + module_main: package_json.main, + toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119 + }; + // support host mirror with npm config `--{module_name}_binary_host_mirror` + // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 + // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ + var host = process.env['npm_config_' + opts.module_name + '_binary_host_mirror'] || package_json.binary.host; + opts.host = fix_slashes(eval_template(host,opts)); + opts.module_path = eval_template(package_json.binary.module_path,opts); + // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably + if (options.module_root) { + // resolve relative to known module root: works for pre-binding require + opts.module_path = path.join(options.module_root,opts.module_path); + } else { + // resolve relative to current working directory: works for node-pre-gyp commands + opts.module_path = path.resolve(opts.module_path); + } + opts.module = path.join(opts.module_path,opts.module_name + '.node'); + opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path; + var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; + opts.package_name = eval_template(package_name,opts); + opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name); + opts.hosted_path = url.resolve(opts.host,opts.remote_path); + opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name); + return opts; +}; diff --git a/node_modules/node-pre-gyp/node_modules/.bin/mkdirp b/node_modules/node-pre-gyp/node_modules/.bin/mkdirp new file mode 120000 index 0000000..91a5f62 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/mkdirp @@ -0,0 +1 @@ +../../../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/nopt b/node_modules/node-pre-gyp/node_modules/.bin/nopt new file mode 120000 index 0000000..d6493b6 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/nopt @@ -0,0 +1 @@ +../../../nopt/bin/nopt.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/rc b/node_modules/node-pre-gyp/node_modules/.bin/rc new file mode 120000 index 0000000..62203c8 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/rc @@ -0,0 +1 @@ +../../../rc/index.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/rimraf b/node_modules/node-pre-gyp/node_modules/.bin/rimraf new file mode 120000 index 0000000..632d6da --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../../../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/node-pre-gyp/node_modules/.bin/semver b/node_modules/node-pre-gyp/node_modules/.bin/semver new file mode 120000 index 0000000..b3ca603 --- /dev/null +++ b/node_modules/node-pre-gyp/node_modules/.bin/semver @@ -0,0 +1 @@ +../../../semver/bin/semver \ No newline at end of file diff --git a/node_modules/node-pre-gyp/package.json b/node_modules/node-pre-gyp/package.json new file mode 100644 index 0000000..b92e636 --- /dev/null +++ b/node_modules/node-pre-gyp/package.json @@ -0,0 +1,52 @@ +{ + "name": "node-pre-gyp", + "description": "Node.js native addon binary install tool", + "version" : "0.6.31", + "keywords": [ + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "binary" + ], + "license": "BSD-3-Clause", + "author": "Dane Springmeyer ", + "repository": { + "type": "git", + "url": "git://github.com/mapbox/node-pre-gyp.git" + }, + "bin": "./bin/node-pre-gyp", + "main": "./lib/node-pre-gyp.js", + "dependencies": { + "mkdirp": "~0.5.1", + "nopt": "~3.0.6", + "npmlog": "^4.0.0", + "rc": "~1.1.6", + "request": "^2.75.0", + "rimraf": "~2.5.4", + "semver": "~5.3.0", + "tar": "~2.2.1", + "tar-pack": "~3.3.0" + }, + "devDependencies": { + "aws-sdk": "2.x", + "mocha": "3.x", + "retire": "1.2.10", + "jshint": "2.x" + }, + "jshintConfig": { + "node": true, + "globalstrict": true, + "undef": true, + "unused": true, + "noarg": true, + "mocha": true + }, + "scripts": { + "prepublish": "retire -n && npm ls && jshint test/build.test.js test/s3_setup.test.js test/versioning.test.js", + "update-crosswalk": "node scripts/abi_crosswalk.js", + "test": "jshint lib lib/util scripts bin/node-pre-gyp && mocha -R spec --timeout 500000" + } +} diff --git a/node_modules/node-uuid/.npmignore b/node_modules/node-uuid/.npmignore new file mode 100644 index 0000000..8886139 --- /dev/null +++ b/node_modules/node-uuid/.npmignore @@ -0,0 +1,4 @@ +node_modules +.DS_Store +.nyc_output +coverage diff --git a/node_modules/node-uuid/LICENSE.md b/node_modules/node-uuid/LICENSE.md new file mode 100644 index 0000000..652609b --- /dev/null +++ b/node_modules/node-uuid/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010-2012 Robert Kieffer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/node-uuid/README.md b/node_modules/node-uuid/README.md new file mode 100644 index 0000000..5cd8555 --- /dev/null +++ b/node_modules/node-uuid/README.md @@ -0,0 +1,254 @@ +# node-uuid + +Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. + +Features: + +* Generate RFC4122 version 1 or version 4 UUIDs +* Runs in node.js and all browsers. +* Registered as a [ComponentJS](https://github.com/component/component) [component](https://github.com/component/component/wiki/Components) ('broofa/node-uuid'). +* Cryptographically strong random # generation + * `crypto.randomBytes(n)` in node.js + * `window.crypto.getRandomValues(ta)` in [supported browsers](https://developer.mozilla.org/en-US/docs/Web/API/RandomSource/getRandomValues#Browser_Compatibility) +* 1.1K minified and gzip'ed (Want something smaller? Check this [crazy shit](https://gist.github.com/982883) out! ) +* [Annotated source code](http://broofa.github.com/node-uuid/docs/uuid.html) +* Comes with a Command Line Interface for generating uuids on the command line + +## Getting Started + +Install it in your browser: + +```html + +``` + +Or in node.js: + +``` +npm install node-uuid +``` + +```javascript +var uuid = require('node-uuid'); +``` + +Then create some ids ... + +```javascript +// Generate a v1 (time-based) id +uuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' + +// Generate a v4 (random) id +uuid.v4(); // -> '110ec58a-a0f2-4ac4-8393-c866d813b8d1' +``` + +## API + +### uuid.v1([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v1 (timestamp-based) UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. + * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. + * `msecs` - (Number | Date) Time in milliseconds since unix Epoch. Default: The current time is used. + * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Notes: + +1. The randomly generated node id is only guaranteed to stay constant for the lifetime of the current JS runtime. (Future versions of this module may use persistent storage mechanisms to extend this guarantee.) + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v1({ + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678 +}); // -> "710b962e-041c-11e1-9234-0123456789ab" +``` + +Example: In-place generation of two binary IDs + +```javascript +// Generate two ids in an array +var arr = new Array(32); // -> [] +uuid.v1(null, arr, 0); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15] +uuid.v1(null, arr, 16); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15 02 a3 1c b0 14 32 11 e1 85 58 0b 48 8e 4f c1 15] + +// Optionally use uuid.unparse() to get stringify the ids +uuid.unparse(buffer); // -> '02a2ce90-1432-11e1-8558-0b488e4fc115' +uuid.unparse(buffer, 16) // -> '02a31cb0-1432-11e1-8558-0b488e4fc115' +``` + +### uuid.v4([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v4 UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values + * `rng` - (Function) Random # generator to use. Set to one of the built-in generators - `uuid.mathRNG` (all platforms), `uuid.nodeRNG` (node.js only), `uuid.whatwgRNG` (WebKit only) - or a custom function that returns an array[16] of byte values. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v4({ + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, + 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 + ] +}); +// -> "109156be-c4fb-41ea-b1b4-efe1671c5836" +``` + +Example: Generate two IDs in a single buffer + +```javascript +var buffer = new Array(32); // (or 'new Buffer' in node.js) +uuid.v4(null, buffer, 0); +uuid.v4(null, buffer, 16); +``` + +### uuid.parse(id[, buffer[, offset]]) +### uuid.unparse(buffer[, offset]) + +Parse and unparse UUIDs + + * `id` - (String) UUID(-like) string + * `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. Default: A new Array or Buffer is used + * `offset` - (Number) Starting index in `buffer` at which to begin writing. Default: 0 + +Example parsing and unparsing a UUID string + +```javascript +var bytes = uuid.parse('797ff043-11eb-11e1-80d6-510998755d10'); // -> +var string = uuid.unparse(bytes); // -> '797ff043-11eb-11e1-80d6-510998755d10' +``` + +### uuid.noConflict() + +(Browsers only) Set `uuid` property back to it's previous value. + +Returns the node-uuid object. + +Example: + +```javascript +var myUuid = uuid.noConflict(); +myUuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' +``` + +## Deprecated APIs + +Support for the following v1.2 APIs is available in v1.3, but is deprecated and will be removed in the next major version. + +### uuid([format [, buffer [, offset]]]) + +uuid() has become uuid.v4(), and the `format` argument is now implicit in the `buffer` argument. (i.e. if you specify a buffer, the format is assumed to be binary). + +### uuid.BufferClass + +The class of container created when generating binary uuid data if no buffer argument is specified. This is expected to go away, with no replacement API. + +## Command Line Interface + +To use the executable, it's probably best to install this library globally. + +`npm install -g node-uuid` + +Usage: + +``` +USAGE: uuid [version] [options] + + +options: + +--help Display this message and exit +``` + +`version` must be an RFC4122 version that is supported by this library, which is currently version 1 and version 4 (denoted by "v1" and "v4", respectively). `version` defaults to version 4 when not supplied. + +### Examples + +``` +> uuid +3a91f950-dec8-4688-ba14-5b7bbfc7a563 +``` + +``` +> uuid v1 +9d0b43e0-7696-11e3-964b-250efa37a98e +``` + +``` +> uuid v4 +6790ac7c-24ac-4f98-8464-42f6d98a53ae +``` + +## Testing + +In node.js + +``` +npm test +``` + +In Browser + +``` +open test/test.html +``` + +### Benchmarking + +Requires node.js + +``` +npm install uuid uuid-js +node benchmark/benchmark.js +``` + +For a more complete discussion of node-uuid performance, please see the `benchmark/README.md` file, and the [benchmark wiki](https://github.com/broofa/node-uuid/wiki/Benchmark) + +For browser performance [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance). + +## Release notes + +### 1.4.6 + +* Properly detect node crypto and whatwg crypto +* Workaround phantomjs/browserify bug +* Explicit check for `window` rather implicit this-global +* Issue warning if Math.random() is being used +* "use strict"; +* A few jshint / stylistic updates (=== and such) + +### 1.4.0 + +* Improved module context detection +* Removed public RNG functions + +### 1.3.2 + +* Improve tests and handling of v1() options (Issue #24) +* Expose RNG option to allow for perf testing with different generators + +### 1.3.0 + +* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +* Support for node.js crypto API +* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/node-uuid/benchmark/README.md b/node_modules/node-uuid/benchmark/README.md new file mode 100644 index 0000000..aaeb2ea --- /dev/null +++ b/node_modules/node-uuid/benchmark/README.md @@ -0,0 +1,53 @@ +# node-uuid Benchmarks + +### Results + +To see the results of our benchmarks visit https://github.com/broofa/node-uuid/wiki/Benchmark + +### Run them yourself + +node-uuid comes with some benchmarks to measure performance of generating UUIDs. These can be run using node.js. node-uuid is being benchmarked against some other uuid modules, that are available through npm namely `uuid` and `uuid-js`. + +To prepare and run the benchmark issue; + +``` +npm install uuid uuid-js +node benchmark/benchmark.js +``` + +You'll see an output like this one: + +``` +# v4 +nodeuuid.v4(): 854700 uuids/second +nodeuuid.v4('binary'): 788643 uuids/second +nodeuuid.v4('binary', buffer): 1336898 uuids/second +uuid(): 479386 uuids/second +uuid('binary'): 582072 uuids/second +uuidjs.create(4): 312304 uuids/second + +# v1 +nodeuuid.v1(): 938086 uuids/second +nodeuuid.v1('binary'): 683060 uuids/second +nodeuuid.v1('binary', buffer): 1644736 uuids/second +uuidjs.create(1): 190621 uuids/second +``` + +* The `uuid()` entries are for Nikhil Marathe's [uuid module](https://bitbucket.org/nikhilm/uuidjs) which is a wrapper around the native libuuid library. +* The `uuidjs()` entries are for Patrick Negri's [uuid-js module](https://github.com/pnegri/uuid-js) which is a pure javascript implementation based on [UUID.js](https://github.com/LiosK/UUID.js) by LiosK. + +If you want to get more reliable results you can run the benchmark multiple times and write the output into a log file: + +``` +for i in {0..9}; do node benchmark/benchmark.js >> benchmark/bench_0.4.12.log; done; +``` + +If you're interested in how performance varies between different node versions, you can issue the above command multiple times. + +You can then use the shell script `bench.sh` provided in this directory to calculate the averages over all benchmark runs and draw a nice plot: + +``` +(cd benchmark/ && ./bench.sh) +``` + +This assumes you have [gnuplot](http://www.gnuplot.info/) and [ImageMagick](http://www.imagemagick.org/) installed. You'll find a nice `bench.png` graph in the `benchmark/` directory then. diff --git a/node_modules/node-uuid/benchmark/bench.gnu b/node_modules/node-uuid/benchmark/bench.gnu new file mode 100644 index 0000000..a342fbb --- /dev/null +++ b/node_modules/node-uuid/benchmark/bench.gnu @@ -0,0 +1,174 @@ +#!/opt/local/bin/gnuplot -persist +# +# +# G N U P L O T +# Version 4.4 patchlevel 3 +# last modified March 2011 +# System: Darwin 10.8.0 +# +# Copyright (C) 1986-1993, 1998, 2004, 2007-2010 +# Thomas Williams, Colin Kelley and many others +# +# gnuplot home: http://www.gnuplot.info +# faq, bugs, etc: type "help seeking-assistance" +# immediate help: type "help" +# plot window: hit 'h' +set terminal postscript eps noenhanced defaultplex \ + leveldefault color colortext \ + solid linewidth 1.2 butt noclip \ + palfuncparam 2000,0.003 \ + "Helvetica" 14 +set output 'bench.eps' +unset clip points +set clip one +unset clip two +set bar 1.000000 front +set border 31 front linetype -1 linewidth 1.000 +set xdata +set ydata +set zdata +set x2data +set y2data +set timefmt x "%d/%m/%y,%H:%M" +set timefmt y "%d/%m/%y,%H:%M" +set timefmt z "%d/%m/%y,%H:%M" +set timefmt x2 "%d/%m/%y,%H:%M" +set timefmt y2 "%d/%m/%y,%H:%M" +set timefmt cb "%d/%m/%y,%H:%M" +set boxwidth +set style fill empty border +set style rectangle back fc lt -3 fillstyle solid 1.00 border lt -1 +set style circle radius graph 0.02, first 0, 0 +set dummy x,y +set format x "% g" +set format y "% g" +set format x2 "% g" +set format y2 "% g" +set format z "% g" +set format cb "% g" +set angles radians +unset grid +set key title "" +set key outside left top horizontal Right noreverse enhanced autotitles columnhead nobox +set key noinvert samplen 4 spacing 1 width 0 height 0 +set key maxcolumns 2 maxrows 0 +unset label +unset arrow +set style increment default +unset style line +set style line 1 linetype 1 linewidth 2.000 pointtype 1 pointsize default pointinterval 0 +unset style arrow +set style histogram clustered gap 2 title offset character 0, 0, 0 +unset logscale +set offsets graph 0.05, 0.15, 0, 0 +set pointsize 1.5 +set pointintervalbox 1 +set encoding default +unset polar +unset parametric +unset decimalsign +set view 60, 30, 1, 1 +set samples 100, 100 +set isosamples 10, 10 +set surface +unset contour +set clabel '%8.3g' +set mapping cartesian +set datafile separator whitespace +unset hidden3d +set cntrparam order 4 +set cntrparam linear +set cntrparam levels auto 5 +set cntrparam points 5 +set size ratio 0 1,1 +set origin 0,0 +set style data points +set style function lines +set xzeroaxis linetype -2 linewidth 1.000 +set yzeroaxis linetype -2 linewidth 1.000 +set zzeroaxis linetype -2 linewidth 1.000 +set x2zeroaxis linetype -2 linewidth 1.000 +set y2zeroaxis linetype -2 linewidth 1.000 +set ticslevel 0.5 +set mxtics default +set mytics default +set mztics default +set mx2tics default +set my2tics default +set mcbtics default +set xtics border in scale 1,0.5 mirror norotate offset character 0, 0, 0 +set xtics norangelimit +set xtics () +set ytics border in scale 1,0.5 mirror norotate offset character 0, 0, 0 +set ytics autofreq norangelimit +set ztics border in scale 1,0.5 nomirror norotate offset character 0, 0, 0 +set ztics autofreq norangelimit +set nox2tics +set noy2tics +set cbtics border in scale 1,0.5 mirror norotate offset character 0, 0, 0 +set cbtics autofreq norangelimit +set title "" +set title offset character 0, 0, 0 font "" norotate +set timestamp bottom +set timestamp "" +set timestamp offset character 0, 0, 0 font "" norotate +set rrange [ * : * ] noreverse nowriteback # (currently [8.98847e+307:-8.98847e+307] ) +set autoscale rfixmin +set autoscale rfixmax +set trange [ * : * ] noreverse nowriteback # (currently [-5.00000:5.00000] ) +set autoscale tfixmin +set autoscale tfixmax +set urange [ * : * ] noreverse nowriteback # (currently [-10.0000:10.0000] ) +set autoscale ufixmin +set autoscale ufixmax +set vrange [ * : * ] noreverse nowriteback # (currently [-10.0000:10.0000] ) +set autoscale vfixmin +set autoscale vfixmax +set xlabel "" +set xlabel offset character 0, 0, 0 font "" textcolor lt -1 norotate +set x2label "" +set x2label offset character 0, 0, 0 font "" textcolor lt -1 norotate +set xrange [ * : * ] noreverse nowriteback # (currently [-0.150000:3.15000] ) +set autoscale xfixmin +set autoscale xfixmax +set x2range [ * : * ] noreverse nowriteback # (currently [0.00000:3.00000] ) +set autoscale x2fixmin +set autoscale x2fixmax +set ylabel "" +set ylabel offset character 0, 0, 0 font "" textcolor lt -1 rotate by -270 +set y2label "" +set y2label offset character 0, 0, 0 font "" textcolor lt -1 rotate by -270 +set yrange [ 0.00000 : 1.90000e+06 ] noreverse nowriteback # (currently [:] ) +set autoscale yfixmin +set autoscale yfixmax +set y2range [ * : * ] noreverse nowriteback # (currently [0.00000:1.90000e+06] ) +set autoscale y2fixmin +set autoscale y2fixmax +set zlabel "" +set zlabel offset character 0, 0, 0 font "" textcolor lt -1 norotate +set zrange [ * : * ] noreverse nowriteback # (currently [-10.0000:10.0000] ) +set autoscale zfixmin +set autoscale zfixmax +set cblabel "" +set cblabel offset character 0, 0, 0 font "" textcolor lt -1 rotate by -270 +set cbrange [ * : * ] noreverse nowriteback # (currently [8.98847e+307:-8.98847e+307] ) +set autoscale cbfixmin +set autoscale cbfixmax +set zero 1e-08 +set lmargin -1 +set bmargin -1 +set rmargin -1 +set tmargin -1 +set pm3d explicit at s +set pm3d scansautomatic +set pm3d interpolate 1,1 flush begin noftriangles nohidden3d corners2color mean +set palette positive nops_allcF maxcolors 0 gamma 1.5 color model RGB +set palette rgbformulae 7, 5, 15 +set colorbox default +set colorbox vertical origin screen 0.9, 0.2, 0 size screen 0.05, 0.6, 0 front bdefault +set loadpath +set fontpath +set fit noerrorvariables +GNUTERM = "aqua" +plot 'bench_results.txt' using 2:xticlabel(1) w lp lw 2, '' using 3:xticlabel(1) w lp lw 2, '' using 4:xticlabel(1) w lp lw 2, '' using 5:xticlabel(1) w lp lw 2, '' using 6:xticlabel(1) w lp lw 2, '' using 7:xticlabel(1) w lp lw 2, '' using 8:xticlabel(1) w lp lw 2, '' using 9:xticlabel(1) w lp lw 2 +# EOF diff --git a/node_modules/node-uuid/benchmark/bench.sh b/node_modules/node-uuid/benchmark/bench.sh new file mode 100755 index 0000000..d870a0c --- /dev/null +++ b/node_modules/node-uuid/benchmark/bench.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# for a given node version run: +# for i in {0..9}; do node benchmark.js >> bench_0.6.2.log; done; + +PATTERNS=('nodeuuid.v1()' "nodeuuid.v1('binary'," 'nodeuuid.v4()' "nodeuuid.v4('binary'," "uuid()" "uuid('binary')" 'uuidjs.create(1)' 'uuidjs.create(4)' '140byte') +FILES=(node_uuid_v1_string node_uuid_v1_buf node_uuid_v4_string node_uuid_v4_buf libuuid_v4_string libuuid_v4_binary uuidjs_v1_string uuidjs_v4_string 140byte_es) +INDICES=(2 3 2 3 2 2 2 2 2) +VERSIONS=$( ls bench_*.log | sed -e 's/^bench_\([0-9\.]*\)\.log/\1/' | tr "\\n" " " ) +TMPJOIN="tmp_join" +OUTPUT="bench_results.txt" + +for I in ${!FILES[*]}; do + F=${FILES[$I]} + P=${PATTERNS[$I]} + INDEX=${INDICES[$I]} + echo "version $F" > $F + for V in $VERSIONS; do + (VAL=$( grep "$P" bench_$V.log | LC_ALL=en_US awk '{ sum += $'$INDEX' } END { print sum/NR }' ); echo $V $VAL) >> $F + done + if [ $I == 0 ]; then + cat $F > $TMPJOIN + else + join $TMPJOIN $F > $OUTPUT + cp $OUTPUT $TMPJOIN + fi + rm $F +done + +rm $TMPJOIN + +gnuplot bench.gnu +convert -density 200 -resize 800x560 -flatten bench.eps bench.png +rm bench.eps diff --git a/node_modules/node-uuid/benchmark/benchmark-native.c b/node_modules/node-uuid/benchmark/benchmark-native.c new file mode 100644 index 0000000..dbfc75f --- /dev/null +++ b/node_modules/node-uuid/benchmark/benchmark-native.c @@ -0,0 +1,34 @@ +/* +Test performance of native C UUID generation + +To Compile: cc -luuid benchmark-native.c -o benchmark-native +*/ + +#include +#include +#include +#include + +int main() { + uuid_t myid; + char buf[36+1]; + int i; + struct timeval t; + double start, finish; + + gettimeofday(&t, NULL); + start = t.tv_sec + t.tv_usec/1e6; + + int n = 2e5; + for (i = 0; i < n; i++) { + uuid_generate(myid); + uuid_unparse(myid, buf); + } + + gettimeofday(&t, NULL); + finish = t.tv_sec + t.tv_usec/1e6; + double dur = finish - start; + + printf("%d uuids/sec", (int)(n/dur)); + return 0; +} diff --git a/node_modules/node-uuid/benchmark/benchmark.js b/node_modules/node-uuid/benchmark/benchmark.js new file mode 100644 index 0000000..40e6efb --- /dev/null +++ b/node_modules/node-uuid/benchmark/benchmark.js @@ -0,0 +1,84 @@ +try { + var nodeuuid = require('../uuid'); +} catch (e) { + console.error('node-uuid require failed - skipping tests'); +} + +try { + var uuid = require('uuid'); +} catch (e) { + console.error('uuid require failed - skipping tests'); +} + +try { + var uuidjs = require('uuid-js'); +} catch (e) { + console.error('uuid-js require failed - skipping tests'); +} + +var N = 5e5; + +function rate(msg, t) { + console.log(msg + ': ' + + (N / (Date.now() - t) * 1e3 | 0) + + ' uuids/second'); +} + +console.log('# v4'); + +// node-uuid - string form +if (nodeuuid) { + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4(); + rate('nodeuuid.v4() - using node.js crypto RNG', t); + + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4({rng: nodeuuid.mathRNG}); + rate('nodeuuid.v4() - using Math.random() RNG', t); + + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4('binary'); + rate('nodeuuid.v4(\'binary\')', t); + + var buffer = new nodeuuid.BufferClass(16); + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v4('binary', buffer); + rate('nodeuuid.v4(\'binary\', buffer)', t); +} + +// libuuid - string form +if (uuid) { + for (var i = 0, t = Date.now(); i < N; i++) uuid(); + rate('uuid()', t); + + for (var i = 0, t = Date.now(); i < N; i++) uuid('binary'); + rate('uuid(\'binary\')', t); +} + +// uuid-js - string form +if (uuidjs) { + for (var i = 0, t = Date.now(); i < N; i++) uuidjs.create(4); + rate('uuidjs.create(4)', t); +} + +// 140byte.es +for (var i = 0, t = Date.now(); i < N; i++) 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g,function(s,r){r=Math.random()*16|0;return (s=='x'?r:r&0x3|0x8).toString(16)}); +rate('140byte.es_v4', t); + +console.log(''); +console.log('# v1'); + +// node-uuid - v1 string form +if (nodeuuid) { + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v1(); + rate('nodeuuid.v1()', t); + + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v1('binary'); + rate('nodeuuid.v1(\'binary\')', t); + + var buffer = new nodeuuid.BufferClass(16); + for (var i = 0, t = Date.now(); i < N; i++) nodeuuid.v1('binary', buffer); + rate('nodeuuid.v1(\'binary\', buffer)', t); +} + +// uuid-js - v1 string form +if (uuidjs) { + for (var i = 0, t = Date.now(); i < N; i++) uuidjs.create(1); + rate('uuidjs.create(1)', t); +} diff --git a/node_modules/node-uuid/bin/uuid b/node_modules/node-uuid/bin/uuid new file mode 100755 index 0000000..f732e99 --- /dev/null +++ b/node_modules/node-uuid/bin/uuid @@ -0,0 +1,26 @@ +#!/usr/bin/env node + +var path = require('path'); +var uuid = require(path.join(__dirname, '..')); + +var arg = process.argv[2]; + +if ('--help' === arg) { + console.log('\n USAGE: uuid [version] [options]\n\n'); + console.log(' options:\n'); + console.log(' --help Display this message and exit\n'); + process.exit(0); +} + +if (null == arg) { + console.log(uuid()); + process.exit(0); +} + +if ('v1' !== arg && 'v4' !== arg) { + console.error('Version must be RFC4122 version 1 or version 4, denoted as "v1" or "v4"'); + process.exit(1); +} + +console.log(uuid[arg]()); +process.exit(0); diff --git a/node_modules/node-uuid/bower.json b/node_modules/node-uuid/bower.json new file mode 100644 index 0000000..c0925e1 --- /dev/null +++ b/node_modules/node-uuid/bower.json @@ -0,0 +1,23 @@ +{ + "name": "node-uuid", + "version": "1.4.7", + "homepage": "https://github.com/broofa/node-uuid", + "authors": [ + "Robert Kieffer " + ], + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "main": "uuid.js", + "keywords": [ + "uuid", + "gid", + "rfc4122" + ], + "license": "MIT", + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ] +} diff --git a/node_modules/node-uuid/component.json b/node_modules/node-uuid/component.json new file mode 100644 index 0000000..3ff4633 --- /dev/null +++ b/node_modules/node-uuid/component.json @@ -0,0 +1,25 @@ +{ + "name": "node-uuid", + "repo": "broofa/node-uuid", + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "version": "1.4.7", + "author": "Robert Kieffer ", + "contributors": [ + { + "name": "Christoph Tavan ", + "github": "https://github.com/ctavan" + } + ], + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "dependencies": {}, + "development": {}, + "main": "uuid.js", + "scripts": [ + "uuid.js" + ], + "license": "MIT" +} \ No newline at end of file diff --git a/node_modules/node-uuid/package.json b/node_modules/node-uuid/package.json new file mode 100644 index 0000000..5adef42 --- /dev/null +++ b/node_modules/node-uuid/package.json @@ -0,0 +1,61 @@ +{ + "author": { + "email": "robert@broofa.com", + "name": "Robert Kieffer" + }, + "bin": { + "uuid": "./bin/uuid" + }, + "bugs": { + "url": "https://github.com/broofa/node-uuid/issues" + }, + "contributors": [ + { + "name": "AJ ONeal", + "email": "coolaj86@gmail.com" + }, + { + "name": "Christoph Tavan", + "email": "dev@tavan.de" + } + ], + "dependencies": {}, + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "devDependencies": { + "nyc": "^2.2.0" + }, + "directories": {}, + "homepage": "https://github.com/broofa/node-uuid", + "installable": true, + "keywords": [ + "guid", + "rfc4122", + "uuid" + ], + "lib": ".", + "licenses": [ + { + "type": "MIT", + "url": "https://raw.github.com/broofa/node-uuid/master/LICENSE.md" + } + ], + "main": "./uuid.js", + "maintainers": [ + { + "name": "broofa", + "email": "robert@broofa.com" + } + ], + "name": "node-uuid", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "https://github.com/broofa/node-uuid.git" + }, + "scripts": { + "coverage": "nyc npm test && nyc report", + "test": "node test/test.js" + }, + "url": "http://github.com/broofa/node-uuid", + "version": "1.4.7" +} diff --git a/node_modules/node-uuid/test/compare_v1.js b/node_modules/node-uuid/test/compare_v1.js new file mode 100644 index 0000000..05af822 --- /dev/null +++ b/node_modules/node-uuid/test/compare_v1.js @@ -0,0 +1,63 @@ +var assert = require('assert'), + nodeuuid = require('../uuid'), + uuidjs = require('uuid-js'), + libuuid = require('uuid').generate, + util = require('util'), + exec = require('child_process').exec, + os = require('os'); + +// On Mac Os X / macports there's only the ossp-uuid package that provides uuid +// On Linux there's uuid-runtime which provides uuidgen +var uuidCmd = os.type() === 'Darwin' ? 'uuid -1' : 'uuidgen -t'; + +function compare(ids) { + console.log(ids); + for (var i = 0; i < ids.length; i++) { + var id = ids[i].split('-'); + id = [id[2], id[1], id[0]].join(''); + ids[i] = id; + } + var sorted = ([].concat(ids)).sort(); + + if (sorted.toString() !== ids.toString()) { + console.log('Warning: sorted !== ids'); + } else { + console.log('everything in order!'); + } +} + +// Test time order of v1 uuids +var ids = []; +while (ids.length < 10e3) ids.push(nodeuuid.v1()); + +var max = 10; +console.log('node-uuid:'); +ids = []; +for (var i = 0; i < max; i++) ids.push(nodeuuid.v1()); +compare(ids); + +console.log(''); +console.log('uuidjs:'); +ids = []; +for (var i = 0; i < max; i++) ids.push(uuidjs.create(1).toString()); +compare(ids); + +console.log(''); +console.log('libuuid:'); +ids = []; +var count = 0; +var last = function() { + compare(ids); +} +var cb = function(err, stdout, stderr) { + ids.push(stdout.substring(0, stdout.length-1)); + count++; + if (count < max) { + return next(); + } + last(); +}; +var next = function() { + exec(uuidCmd, cb); +}; +next(); diff --git a/node_modules/node-uuid/test/test.html b/node_modules/node-uuid/test/test.html new file mode 100644 index 0000000..d80326e --- /dev/null +++ b/node_modules/node-uuid/test/test.html @@ -0,0 +1,17 @@ + + + + + + + + + diff --git a/node_modules/node-uuid/test/test.js b/node_modules/node-uuid/test/test.js new file mode 100644 index 0000000..5f1113d --- /dev/null +++ b/node_modules/node-uuid/test/test.js @@ -0,0 +1,231 @@ +if (!this.uuid) { + // node.js + uuid = require('../uuid'); + if (!/_rb/.test(uuid._rng.toString())) { + throw new Error("should use crypto for node.js"); + } +} + +// +// x-platform log/assert shims +// + +function _log(msg, type) { + type = type || 'log'; + + if (typeof(document) != 'undefined') { + document.write('
' + msg.replace(/\n/g, '
') + '
'); + } + if (typeof(console) != 'undefined') { + var color = { + log: '\033[39m', + warn: '\033[33m', + error: '\033[31m' + }; + console[type](color[type] + msg + color.log); + } +} + +function log(msg) {_log(msg, 'log');} +function warn(msg) {_log(msg, 'warn');} +function error(msg) {_log(msg, 'error');} + +function assert(res, msg) { + if (!res) { + error('FAIL: ' + msg); + } else { + log('Pass: ' + msg); + } +} + +// +// Unit tests +// + +// Verify ordering of v1 ids created with explicit times +var TIME = 1321644961388; // 2011-11-18 11:36:01.388-08:00 + +function compare(name, ids) { + ids = ids.map(function(id) { + return id.split('-').reverse().join('-'); + }).sort(); + var sorted = ([].concat(ids)).sort(); + + assert(sorted.toString() == ids.toString(), name + ' have expected order'); +} + +// Verify ordering of v1 ids created using default behavior +compare('uuids with current time', [ + uuid.v1(), + uuid.v1(), + uuid.v1(), + uuid.v1(), + uuid.v1() +]); + +// Verify ordering of v1 ids created with explicit times +compare('uuids with time option', [ + uuid.v1({msecs: TIME - 10*3600*1000}), + uuid.v1({msecs: TIME - 1}), + uuid.v1({msecs: TIME}), + uuid.v1({msecs: TIME + 1}), + uuid.v1({msecs: TIME + 28*24*3600*1000}) +]); + +assert( + uuid.v1({msecs: TIME}) != uuid.v1({msecs: TIME}), + 'IDs created at same msec are different' +); + +// Verify throw if too many ids created +var thrown = false; +try { + uuid.v1({msecs: TIME, nsecs: 10000}); +} catch (e) { + thrown = true; +} +assert(thrown, 'Exception thrown when > 10K ids created in 1 ms'); + +// Verify clock regression bumps clockseq +var uidt = uuid.v1({msecs: TIME}); +var uidtb = uuid.v1({msecs: TIME - 1}); +assert( + parseInt(uidtb.split('-')[3], 16) - parseInt(uidt.split('-')[3], 16) === 1, + 'Clock regression by msec increments the clockseq' +); + +// Verify clock regression bumps clockseq +var uidtn = uuid.v1({msecs: TIME, nsecs: 10}); +var uidtnb = uuid.v1({msecs: TIME, nsecs: 9}); +assert( + parseInt(uidtnb.split('-')[3], 16) - parseInt(uidtn.split('-')[3], 16) === 1, + 'Clock regression by nsec increments the clockseq' +); + +// Verify explicit options produce expected id +var id = uuid.v1({ + msecs: 1321651533573, + nsecs: 5432, + clockseq: 0x385c, + node: [ 0x61, 0xcd, 0x3c, 0xbb, 0x32, 0x10 ] +}); +assert(id == 'd9428888-122b-11e1-b85c-61cd3cbb3210', 'Explicit options produce expected id'); + +// Verify adjacent ids across a msec boundary are 1 time unit apart +var u0 = uuid.v1({msecs: TIME, nsecs: 9999}); +var u1 = uuid.v1({msecs: TIME + 1, nsecs: 0}); + +var before = u0.split('-')[0], after = u1.split('-')[0]; +var dt = parseInt(after, 16) - parseInt(before, 16); +assert(dt === 1, 'Ids spanning 1ms boundary are 100ns apart'); + +// +// Test parse/unparse +// + +id = '00112233445566778899aabbccddeeff'; +assert(uuid.unparse(uuid.parse(id.substr(0,10))) == + '00112233-4400-0000-0000-000000000000', 'Short parse'); +assert(uuid.unparse(uuid.parse('(this is the uuid -> ' + id + id)) == + '00112233-4455-6677-8899-aabbccddeeff', 'Dirty parse'); + +// +// Perf tests +// + +var generators = { + v1: uuid.v1, + v4: uuid.v4 +}; + +var UUID_FORMAT = { + v1: /[0-9a-f]{8}-[0-9a-f]{4}-1[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i, + v4: /[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i +}; + +var N = 1e4; + +// Get %'age an actual value differs from the ideal value +function divergence(actual, ideal) { + return Math.round(100*100*(actual - ideal)/ideal)/100; +} + +function rate(msg, t) { + log(msg + ': ' + (N / (Date.now() - t) * 1e3 | 0) + ' uuids\/second'); +} + +for (var version in generators) { + var counts = {}, max = 0; + var generator = generators[version]; + var format = UUID_FORMAT[version]; + + log('\nSanity check ' + N + ' ' + version + ' uuids'); + for (var i = 0, ok = 0; i < N; i++) { + id = generator(); + if (!format.test(id)) { + throw Error(id + ' is not a valid UUID string'); + } + + if (id != uuid.unparse(uuid.parse(id))) { + assert(fail, id + ' is not a valid id'); + } + + // Count digits for our randomness check + if (version == 'v4') { + var digits = id.replace(/-/g, '').split(''); + for (var j = digits.length-1; j >= 0; j--) { + var c = digits[j]; + max = Math.max(max, counts[c] = (counts[c] || 0) + 1); + } + } + } + + // Check randomness for v4 UUIDs + if (version == 'v4') { + // Limit that we get worried about randomness. (Purely empirical choice, this!) + var limit = 2*100*Math.sqrt(1/N); + + log('\nChecking v4 randomness. Distribution of Hex Digits (% deviation from ideal)'); + + for (var i = 0; i < 16; i++) { + var c = i.toString(16); + var bar = '', n = counts[c], p = Math.round(n/max*100|0); + + // 1-3,5-8, and D-F: 1:16 odds over 30 digits + var ideal = N*30/16; + if (i == 4) { + // 4: 1:1 odds on 1 digit, plus 1:16 odds on 30 digits + ideal = N*(1 + 30/16); + } else if (i >= 8 && i <= 11) { + // 8-B: 1:4 odds on 1 digit, plus 1:16 odds on 30 digits + ideal = N*(1/4 + 30/16); + } else { + // Otherwise: 1:16 odds on 30 digits + ideal = N*30/16; + } + var d = divergence(n, ideal); + + // Draw bar using UTF squares (just for grins) + var s = n/max*50 | 0; + while (s--) bar += '='; + + assert(Math.abs(d) < limit, c + ' |' + bar + '| ' + counts[c] + ' (' + d + '% < ' + limit + '%)'); + } + } +} + +// Perf tests +for (var version in generators) { + log('\nPerformance testing ' + version + ' UUIDs'); + var generator = generators[version]; + var buf = new uuid.BufferClass(16); + + for (var i = 0, t = Date.now(); i < N; i++) generator(); + rate('uuid.' + version + '()', t); + + for (var i = 0, t = Date.now(); i < N; i++) generator('binary'); + rate('uuid.' + version + '(\'binary\')', t); + + for (var i = 0, t = Date.now(); i < N; i++) generator('binary', buf); + rate('uuid.' + version + '(\'binary\', buffer)', t); +} diff --git a/node_modules/node-uuid/uuid.js b/node_modules/node-uuid/uuid.js new file mode 100644 index 0000000..89c5b8f --- /dev/null +++ b/node_modules/node-uuid/uuid.js @@ -0,0 +1,272 @@ +// uuid.js +// +// Copyright (c) 2010-2012 Robert Kieffer +// MIT License - http://opensource.org/licenses/mit-license.php + +/*global window, require, define */ +(function(_window) { + 'use strict'; + + // Unique ID creation requires a high quality random # generator. We feature + // detect to determine the best RNG source, normalizing to a function that + // returns 128-bits of randomness, since that's what's usually required + var _rng, _mathRNG, _nodeRNG, _whatwgRNG, _previousRoot; + + function setupBrowser() { + // Allow for MSIE11 msCrypto + var _crypto = _window.crypto || _window.msCrypto; + + if (!_rng && _crypto && _crypto.getRandomValues) { + // WHATWG crypto-based RNG - http://wiki.whatwg.org/wiki/Crypto + // + // Moderately fast, high quality + try { + var _rnds8 = new Uint8Array(16); + _whatwgRNG = _rng = function whatwgRNG() { + _crypto.getRandomValues(_rnds8); + return _rnds8; + }; + _rng(); + } catch(e) {} + } + + if (!_rng) { + // Math.random()-based (RNG) + // + // If all else fails, use Math.random(). It's fast, but is of unspecified + // quality. + var _rnds = new Array(16); + _mathRNG = _rng = function() { + for (var i = 0, r; i < 16; i++) { + if ((i & 0x03) === 0) { r = Math.random() * 0x100000000; } + _rnds[i] = r >>> ((i & 0x03) << 3) & 0xff; + } + + return _rnds; + }; + if ('undefined' !== typeof console && console.warn) { + console.warn("[SECURITY] node-uuid: crypto not usable, falling back to insecure Math.random()"); + } + } + } + + function setupNode() { + // Node.js crypto-based RNG - http://nodejs.org/docs/v0.6.2/api/crypto.html + // + // Moderately fast, high quality + if ('function' === typeof require) { + try { + var _rb = require('crypto').randomBytes; + _nodeRNG = _rng = _rb && function() {return _rb(16);}; + _rng(); + } catch(e) {} + } + } + + if (_window) { + setupBrowser(); + } else { + setupNode(); + } + + // Buffer class to use + var BufferClass = ('function' === typeof Buffer) ? Buffer : Array; + + // Maps for number <-> hex string conversion + var _byteToHex = []; + var _hexToByte = {}; + for (var i = 0; i < 256; i++) { + _byteToHex[i] = (i + 0x100).toString(16).substr(1); + _hexToByte[_byteToHex[i]] = i; + } + + // **`parse()` - Parse a UUID into it's component bytes** + function parse(s, buf, offset) { + var i = (buf && offset) || 0, ii = 0; + + buf = buf || []; + s.toLowerCase().replace(/[0-9a-f]{2}/g, function(oct) { + if (ii < 16) { // Don't overflow! + buf[i + ii++] = _hexToByte[oct]; + } + }); + + // Zero out remaining bytes if string was short + while (ii < 16) { + buf[i + ii++] = 0; + } + + return buf; + } + + // **`unparse()` - Convert UUID byte array (ala parse()) into a string** + function unparse(buf, offset) { + var i = offset || 0, bth = _byteToHex; + return bth[buf[i++]] + bth[buf[i++]] + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + '-' + + bth[buf[i++]] + bth[buf[i++]] + + bth[buf[i++]] + bth[buf[i++]] + + bth[buf[i++]] + bth[buf[i++]]; + } + + // **`v1()` - Generate time-based UUID** + // + // Inspired by https://github.com/LiosK/UUID.js + // and http://docs.python.org/library/uuid.html + + // random #'s we need to init node and clockseq + var _seedBytes = _rng(); + + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + var _nodeId = [ + _seedBytes[0] | 0x01, + _seedBytes[1], _seedBytes[2], _seedBytes[3], _seedBytes[4], _seedBytes[5] + ]; + + // Per 4.2.2, randomize (14 bit) clockseq + var _clockseq = (_seedBytes[6] << 8 | _seedBytes[7]) & 0x3fff; + + // Previous uuid creation time + var _lastMSecs = 0, _lastNSecs = 0; + + // See https://github.com/broofa/node-uuid for API details + function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + + var clockseq = (options.clockseq != null) ? options.clockseq : _clockseq; + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = (options.msecs != null) ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = (options.nsecs != null) ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq == null) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs == null) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + var node = options.node || _nodeId; + for (var n = 0; n < 6; n++) { + b[i + n] = node[n]; + } + + return buf ? buf : unparse(b); + } + + // **`v4()` - Generate random UUID** + + // See https://github.com/broofa/node-uuid for API details + function v4(options, buf, offset) { + // Deprecated - 'format' argument, as supported in v1.2 + var i = buf && offset || 0; + + if (typeof(options) === 'string') { + buf = (options === 'binary') ? new BufferClass(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || _rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ii++) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || unparse(rnds); + } + + // Export public API + var uuid = v4; + uuid.v1 = v1; + uuid.v4 = v4; + uuid.parse = parse; + uuid.unparse = unparse; + uuid.BufferClass = BufferClass; + uuid._rng = _rng; + uuid._mathRNG = _mathRNG; + uuid._nodeRNG = _nodeRNG; + uuid._whatwgRNG = _whatwgRNG; + + if (('undefined' !== typeof module) && module.exports) { + // Publish as node.js module + module.exports = uuid; + } else if (typeof define === 'function' && define.amd) { + // Publish as AMD module + define(function() {return uuid;}); + + + } else { + // Publish as global (in browsers) + _previousRoot = _window.uuid; + + // **`noConflict()` - (browser only) to reset global 'uuid' var** + uuid.noConflict = function() { + _window.uuid = _previousRoot; + return uuid; + }; + + _window.uuid = uuid; + } +})('undefined' !== typeof window ? window : null); diff --git a/node_modules/nopt/.npmignore b/node_modules/nopt/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/nopt/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/nopt/.travis.yml b/node_modules/nopt/.travis.yml new file mode 100644 index 0000000..99f2bbf --- /dev/null +++ b/node_modules/nopt/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +language: node_js +node_js: + - '0.8' + - '0.10' + - '0.12' + - 'iojs' +before_install: + - npm install -g npm@latest diff --git a/node_modules/nopt/LICENSE b/node_modules/nopt/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/nopt/README.md b/node_modules/nopt/README.md new file mode 100644 index 0000000..f21a4b3 --- /dev/null +++ b/node_modules/nopt/README.md @@ -0,0 +1,211 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + + // my-program.js + var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + console.log(parsed) + +This would give you support for any of the following: + +```bash +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/nopt/bin/nopt.js b/node_modules/nopt/bin/nopt.js new file mode 100755 index 0000000..3232d4c --- /dev/null +++ b/node_modules/nopt/bin/nopt.js @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/nopt/examples/my-program.js b/node_modules/nopt/examples/my-program.js new file mode 100755 index 0000000..142447e --- /dev/null +++ b/node_modules/nopt/examples/my-program.js @@ -0,0 +1,30 @@ +#!/usr/bin/env node + +//process.env.DEBUG_NOPT = 1 + +// my-program.js +var nopt = require("../lib/nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag", "true"] + , "g" : ["--flag"] + , "s" : "--flag" + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) + +console.log("parsed =\n"+ require("util").inspect(parsed)) diff --git a/node_modules/nopt/lib/nopt.js b/node_modules/nopt/lib/nopt.js new file mode 100644 index 0000000..97707e7 --- /dev/null +++ b/node_modules/nopt/lib/nopt.js @@ -0,0 +1,415 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , remain = [] + , cooked = args + , original = args.slice(0) + + parse(args, data, remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = {remain:remain,cooked:cooked,original:original} + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + if (!val.length) delete data[k] + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true + + val = String(val) + var homePattern = process.platform === 'win32' ? /^~(\/|\\)/ : /^~\// + if (val.match(homePattern) && process.env.HOME) { + val = path.resolve(process.env.HOME, val.substr(2)) + } + data[k] = path.resolve(String(val)) + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + debug("validate Date %j %j %j", k, val, Date.parse(val)) + var s = Date.parse(val) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + if (arg.indexOf("=") !== -1) { + hadEq = true + var v = arg.split("=") + arg = v.shift() + v = v.join("=") + args.splice.apply(args, [i, 1].concat([arg, v])) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var isArray = types[arg] === Array || + Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + types[arg] === Boolean || + Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 || + (typeof types[arg] === 'undefined' && !hadEq) || + (la === "false" && + (types[arg] === null || + Array.isArray(types[arg]) && ~types[arg].indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (Array.isArray(types[arg]) && la) { + if (~types[arg].indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~types[arg].indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~types[arg].indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (types[arg] === String && la === undefined) + la = "" + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} diff --git a/node_modules/nopt/package.json b/node_modules/nopt/package.json new file mode 100644 index 0000000..bb2745d --- /dev/null +++ b/node_modules/nopt/package.json @@ -0,0 +1,22 @@ +{ + "name": "nopt", + "version": "3.0.6", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "main": "lib/nopt.js", + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/nopt.git" + }, + "bin": "./bin/nopt.js", + "license": "ISC", + "dependencies": { + "abbrev": "1" + }, + "devDependencies": { + "tap": "^1.2.0" + } +} diff --git a/node_modules/nopt/test/basic.js b/node_modules/nopt/test/basic.js new file mode 100644 index 0000000..d399de9 --- /dev/null +++ b/node_modules/nopt/test/basic.js @@ -0,0 +1,273 @@ +var nopt = require("../") + , test = require('tap').test + + +test("passing a string results in a string", function (t) { + var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0) + t.same(parsed.key, "myvalue") + t.end() +}) + +// https://github.com/npm/nopt/issues/31 +test("Empty String results in empty string, not true", function (t) { + var parsed = nopt({ empty: String }, {}, ["--empty"], 0) + t.same(parsed.empty, "") + t.end() +}) + +test("~ path is resolved to $HOME", function (t) { + var path = require("path") + if (!process.env.HOME) process.env.HOME = "/tmp" + var parsed = nopt({key: path}, {}, ["--key=~/val"], 0) + t.same(parsed.key, path.resolve(process.env.HOME, "val")) + t.end() +}) + +// https://github.com/npm/nopt/issues/24 +test("Unknown options are not parsed as numbers", function (t) { + var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0) + t.equal(parsed['leave-as-is'], '1.20') + t.equal(parsed['parse-me'], 1.2) + t.end() +}); + +// https://github.com/npm/nopt/issues/48 +test("Check types based on name of type", function (t) { + var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0) + t.equal(parsed['parse-me'], 1.2) + t.end() +}) + + +test("Missing types are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("Types passed without a name are not parsed", function (t) { + var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0) + //should only contain argv + t.equal(Object.keys(parsed).length, 1) + t.end() +}) + +test("other tests", function (t) { + + var util = require("util") + , Stream = require("stream") + , path = require("path") + , url = require("url") + + , shorthands = + { s : ["--loglevel", "silent"] + , d : ["--loglevel", "info"] + , dd : ["--loglevel", "verbose"] + , ddd : ["--loglevel", "silly"] + , noreg : ["--no-registry"] + , reg : ["--registry"] + , "no-reg" : ["--no-registry"] + , silent : ["--loglevel", "silent"] + , verbose : ["--loglevel", "verbose"] + , h : ["--usage"] + , H : ["--usage"] + , "?" : ["--usage"] + , help : ["--usage"] + , v : ["--version"] + , f : ["--force"] + , desc : ["--description"] + , "no-desc" : ["--no-description"] + , "local" : ["--no-global"] + , l : ["--long"] + , p : ["--parseable"] + , porcelain : ["--parseable"] + , g : ["--global"] + } + + , types = + { aoa: Array + , nullstream: [null, Stream] + , date: Date + , str: String + , browser : String + , cache : path + , color : ["always", Boolean] + , depth : Number + , description : Boolean + , dev : Boolean + , editor : path + , force : Boolean + , global : Boolean + , globalconfig : path + , group : [String, Number] + , gzipbin : String + , logfd : [Number, Stream] + , loglevel : ["silent","win","error","warn","info","verbose","silly"] + , long : Boolean + , "node-version" : [false, String] + , npaturl : url + , npat : Boolean + , "onload-script" : [false, String] + , outfd : [Number, Stream] + , parseable : Boolean + , pre: Boolean + , prefix: path + , proxy : url + , "rebuild-bundle" : Boolean + , registry : url + , searchopts : String + , searchexclude: [null, String] + , shell : path + , t: [Array, String] + , tag : String + , tar : String + , tmp : path + , "unsafe-perm" : Boolean + , usage : Boolean + , user : String + , username : String + , userconfig : path + , version : Boolean + , viewer: path + , _exit : Boolean + , path: path + } + + ; [["-v", {version:true}, []] + ,["---v", {version:true}, []] + ,["ls -s --no-reg connect -d", + {loglevel:"info",registry:null},["ls","connect"]] + ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]] + ,["ls --registry blargle", {}, ["ls"]] + ,["--no-registry", {registry:null}, []] + ,["--no-color true", {color:false}, []] + ,["--no-color false", {color:true}, []] + ,["--no-color", {color:false}, []] + ,["--color false", {color:false}, []] + ,["--color --logfd 7", {logfd:7,color:true}, []] + ,["--color=true", {color:true}, []] + ,["--logfd=10", {logfd:10}, []] + ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]] + ,["--tmp=tmp -tar=gtar", + {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]] + ,["--logfd x", {}, []] + ,["a -true -- -no-false", {true:true},["a","-no-false"]] + ,["a -no-false", {false:false},["a"]] + ,["a -no-no-true", {true:true}, ["a"]] + ,["a -no-no-no-false", {false:false}, ["a"]] + ,["---NO-no-No-no-no-no-nO-no-no"+ + "-No-no-no-no-no-no-no-no-no"+ + "-no-no-no-no-NO-NO-no-no-no-no-no-no"+ + "-no-body-can-do-the-boogaloo-like-I-do" + ,{"body-can-do-the-boogaloo-like-I-do":false}, []] + ,["we are -no-strangers-to-love "+ + "--you-know=the-rules --and=so-do-i "+ + "---im-thinking-of=a-full-commitment "+ + "--no-you-would-get-this-from-any-other-guy "+ + "--no-gonna-give-you-up "+ + "-no-gonna-let-you-down=true "+ + "--no-no-gonna-run-around false "+ + "--desert-you=false "+ + "--make-you-cry false "+ + "--no-tell-a-lie "+ + "--no-no-and-hurt-you false" + ,{"strangers-to-love":false + ,"you-know":"the-rules" + ,"and":"so-do-i" + ,"you-would-get-this-from-any-other-guy":false + ,"gonna-give-you-up":false + ,"gonna-let-you-down":false + ,"gonna-run-around":false + ,"desert-you":false + ,"make-you-cry":false + ,"tell-a-lie":false + ,"and-hurt-you":false + },["we", "are"]] + ,["-t one -t two -t three" + ,{t: ["one", "two", "three"]} + ,[]] + ,["-t one -t null -t three four five null" + ,{t: ["one", "null", "three"]} + ,["four", "five", "null"]] + ,["-t foo" + ,{t:["foo"]} + ,[]] + ,["--no-t" + ,{t:["false"]} + ,[]] + ,["-no-no-t" + ,{t:["true"]} + ,[]] + ,["-aoa one -aoa null -aoa 100" + ,{aoa:["one", null, '100']} + ,[]] + ,["-str 100" + ,{str:"100"} + ,[]] + ,["--color always" + ,{color:"always"} + ,[]] + ,["--no-nullstream" + ,{nullstream:null} + ,[]] + ,["--nullstream false" + ,{nullstream:null} + ,[]] + ,["--notadate=2011-01-25" + ,{notadate: "2011-01-25"} + ,[]] + ,["--date 2011-01-25" + ,{date: new Date("2011-01-25")} + ,[]] + ,["-cl 1" + ,{config: true, length: 1} + ,[] + ,{config: Boolean, length: Number, clear: Boolean} + ,{c: "--config", l: "--length"}] + ,["--acount bla" + ,{"acount":true} + ,["bla"] + ,{account: Boolean, credentials: Boolean, options: String} + ,{a:"--account", c:"--credentials",o:"--options"}] + ,["--clear" + ,{clear:true} + ,[] + ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean} + ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}] + ,["--file -" + ,{"file":"-"} + ,[] + ,{file:String} + ,{}] + ,["--file -" + ,{"file":true} + ,["-"] + ,{file:Boolean} + ,{}] + ,["--path" + ,{"path":null} + ,[]] + ,["--path ." + ,{"path":process.cwd()} + ,[]] + ].forEach(function (test) { + var argv = test[0].split(/\s+/) + , opts = test[1] + , rem = test[2] + , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0) + , parsed = actual.argv + delete actual.argv + for (var i in opts) { + var e = JSON.stringify(opts[i]) + , a = JSON.stringify(actual[i] === undefined ? null : actual[i]) + if (e && typeof e === "object") { + t.deepEqual(e, a) + } else { + t.equal(e, a) + } + } + t.deepEqual(rem, parsed.remain) + }) + t.end() +}) diff --git a/node_modules/normalize-path/LICENSE b/node_modules/normalize-path/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/normalize-path/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/normalize-path/README.md b/node_modules/normalize-path/README.md new file mode 100644 index 0000000..e75d4bd --- /dev/null +++ b/node_modules/normalize-path/README.md @@ -0,0 +1,75 @@ +# normalize-path [![NPM version](https://badge.fury.io/js/normalize-path.svg)](http://badge.fury.io/js/normalize-path) [![Build Status](https://travis-ci.org/jonschlinkert/normalize-path.svg)](https://travis-ci.org/jonschlinkert/normalize-path) + +> Normalize file path slashes to be unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i normalize-path --save +``` + +## Usage + +```js +var normalize = require('normalize-path'); + +normalize('\\foo\\bar\\baz\\'); +//=> '/foo/bar/baz' + +normalize('./foo/bar/baz/'); +//=> './foo/bar/baz' +``` + +Pass `false` as the last argument to **not** strip trailing slashes: + +```js +normalize('./foo/bar/baz/', false); +//=> './foo/bar/baz/' + +normalize('foo\\bar\\baz\\', false); +//=> 'foo/bar/baz/' +``` + +## Related + +Other useful libraries for working with paths in node.js: + +* [contains-path](https://www.npmjs.com/package/contains-path): Return true if a file path contains the given path. | [homepage](https://github.com/jonschlinkert/contains-path) +* [ends-with](https://www.npmjs.com/package/ends-with): Returns `true` if the given `string` or `array` ends with `suffix` using strict equality for… [more](https://www.npmjs.com/package/ends-with) | [homepage](https://github.com/jonschlinkert/ends-with) +* [is-absolute](https://www.npmjs.com/package/is-absolute): Returns true if a file path is absolute. | [homepage](https://github.com/jonschlinkert/is-absolute) +* [is-relative](https://www.npmjs.com/package/is-relative): Returns `true` if the path appears to be relative. | [homepage](https://github.com/jonschlinkert/is-relative) +* [parse-filepath](https://www.npmjs.com/package/parse-filepath): Parse a filepath into an object. Falls back on the native node.js `path.parse` method if… [more](https://www.npmjs.com/package/parse-filepath) | [homepage](https://github.com/jonschlinkert/parse-filepath) +* [path-ends-with](https://www.npmjs.com/package/path-ends-with): Return `true` if a file path ends with the given string/suffix. | [homepage](https://github.com/jonschlinkert/path-ends-with) +* [path-segments](https://www.npmjs.com/package/path-segments): Get n specific segments of a file path, e.g. first 2, last 3, etc. | [homepage](https://github.com/jonschlinkert/path-segments) +* [rewrite-ext](https://www.npmjs.com/package/rewrite-ext): Automatically re-write the destination extension of a filepath based on the source extension. e.g … [more](https://www.npmjs.com/package/rewrite-ext) | [homepage](https://github.com/jonschlinkert/rewrite-ext) +* [unixify](https://www.npmjs.com/package/unixify): Convert Windows file paths to unix paths. | [homepage](https://github.com/jonschlinkert/unixify) + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/normalize-path/issues/new). + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on November 17, 2015._ \ No newline at end of file diff --git a/node_modules/normalize-path/index.js b/node_modules/normalize-path/index.js new file mode 100644 index 0000000..137f601 --- /dev/null +++ b/node_modules/normalize-path/index.js @@ -0,0 +1,17 @@ +/*! + * normalize-path + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License + */ + +module.exports = function normalizePath(str, stripTrailing) { + if (typeof str !== 'string') { + throw new TypeError('expected a string'); + } + str = str.replace(/[\\\/]+/g, '/'); + if (stripTrailing !== false) { + str = str.replace(/\/$/, ''); + } + return str; +}; diff --git a/node_modules/normalize-path/package.json b/node_modules/normalize-path/package.json new file mode 100644 index 0000000..44dc399 --- /dev/null +++ b/node_modules/normalize-path/package.json @@ -0,0 +1,59 @@ +{ + "name": "normalize-path", + "description": "Normalize file path slashes to be unix-like forward slashes. Also condenses repeat slashes to a single slash and removes and trailing slashes.", + "version": "2.0.1", + "homepage": "https://github.com/jonschlinkert/normalize-path", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/normalize-path", + "bugs": { + "url": "https://github.com/jonschlinkert/normalize-path/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "benchmarked": "^0.1.1", + "minimist": "^1.2.0", + "mocha": "*" + }, + "keywords": [ + "backslash", + "file", + "filepath", + "fix", + "forward", + "fp", + "fs", + "normalize", + "path", + "slash", + "slashes", + "trailing", + "unix", + "urix" + ], + "verb": { + "related": { + "list": [ + "rewrite-ext", + "contains-path", + "ends-with", + "path-ends-with", + "path-segments", + "is-absolute", + "is-relative", + "parse-filepath", + "unixify" + ], + "description": "Other useful libraries for working with paths in node.js:" + } + } +} diff --git a/node_modules/npm-programmatic/.npmignore b/node_modules/npm-programmatic/.npmignore new file mode 100644 index 0000000..a478b79 --- /dev/null +++ b/node_modules/npm-programmatic/.npmignore @@ -0,0 +1,39 @@ +# Logs +logs +*.log +npm-debug.log* + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules +jspm_packages + +# Optional npm cache directory +.npm + +# Optional REPL history +.node_repl_history + +test/backup \ No newline at end of file diff --git a/node_modules/npm-programmatic/.travis.yml b/node_modules/npm-programmatic/.travis.yml new file mode 100644 index 0000000..4796715 --- /dev/null +++ b/node_modules/npm-programmatic/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "node" \ No newline at end of file diff --git a/node_modules/npm-programmatic/README.md b/node_modules/npm-programmatic/README.md new file mode 100644 index 0000000..f1ba0a5 --- /dev/null +++ b/node_modules/npm-programmatic/README.md @@ -0,0 +1,84 @@ +# npm-programmatic +[![Build Status](https://travis-ci.org/Manak/npm-programmatic.svg?branch=master)](https://travis-ci.org/Manak/npm-programmatic) + +npm-programmatic is a library that allows you to access npm commands programmatically from javascript +## Usage +Every function returns a Bluebird promise. +CWD refers to current working directory, allowing you to ensure the command executes in a certain folder in the filesystem. +If output is set, the output of npm will be shown in the console. + +## Installation of Packages + +``` + npm.install(packages, opts).then(function) +``` +| Name | Type | Value | +| ------------- |:-------------:| -----:| +| packages | Array | packages to be installed | +| opts | Object | save:true/false; global:true/false; cwd:string; saveDev:true/false; output:true/false| + +### Example +``` + var npm = require('npm-programmatic'); + npm.install(['left-pad'], { + cwd:'/path/to/my/project', + save:true + }) + .then(function(){ + console.log("SUCCESS!!!"); + }) + .catch(function(){ + console.log("Unable to install package"); + }); +``` + +## Unistallation of Packages + +``` + npm.uninstall(packages, opts).then(function) +``` +| Name | Type | Value | +| ------------- |:-------------:| -----:| +| packages | Array | packages to be uninstalled | +| opts | Object | save:true/false; global:true/false; cwd:string; saveDev:true/false; output:true/false| + +### Example +``` + var npm = require('npm-programmatic'); + npm.install(['left-pad'], { + cwd:'/path/to/my/project', + save:true + }) + .then(function(){ + console.log("SUCCESS!!!"); + }) + .catch(function(){ + console.log("Unable to uninstall package"); + }); +``` + + +## List Installed Packages + +``` + npm.list(path).then(function) +``` +| Name | Type | Value | +| ------------- |:-------------:| -----:| +| path | String | path at which to look | + +### Example +``` + var npm = require('npm-programmatic'); + npm.list('/path/to/project') + .then(function(arrayOfPackages){ + console.log(arrayOfPackages); + }) + .catch(function(){ + console.log("Unable to uninstall package"); + }); +``` + +## Tests +install mocha and dev dependencies. Then run +``` npm test ``` diff --git a/node_modules/npm-programmatic/index.js b/node_modules/npm-programmatic/index.js new file mode 100644 index 0000000..6d99fcf --- /dev/null +++ b/node_modules/npm-programmatic/index.js @@ -0,0 +1,96 @@ +const Promise = require('bluebird'); +const exec = require('child_process').exec; + +module.exports = { + install: function(packages, opts){ + if(packages.length == 0 || !packages || !packages.length){Promise.reject("No packages found");} + if(typeof packages == "string") packages = [packages]; + if(!opts) opts = {}; + var cmdString = "npm install " + packages.join(" ") + " " + + (opts.global ? " -g":"") + + (opts.save ? " --save":"") + + (opts.saveDev? " --saveDev":""); + + return new Promise(function(resolve, reject){ + var cmd = exec(cmdString, {cwd: opts.cwd?opts.cwd:"/"},(error, stdout, stderr) => { + if (error) { + reject(error); + } else { + resolve(true); + } + }); + + if(opts.output) { + var consoleOutput = function(msg) { + console.log('npm: ' + msg); + }; + + cmd.stdout.on('data', consoleOutput); + cmd.stderr.on('data', consoleOutput); + } + }); + }, + + uninstall: function(packages, opts){ + if(packages.length == 0 || !packages || !packages.length){Promise.reject(new Error("No packages found"));} + if(typeof packages == "string") packages = [packages]; + if(!opts) opts = {}; + var cmdString = "npm uninstall " + packages.join(" ") + " " + + (opts.global ? " -g":"") + + (opts.save ? " --save":"") + + (opts.saveDev? " --saveDev":""); + + return new Promise(function(resolve, reject){ + var cmd = exec(cmdString, {cwd: opts.cwd?opts.cwd:"/"},(error, stdout, stderr) => { + if (error) { + reject(error); + } else { + resolve(true); + } + }); + + if(opts.output) { + var consoleOutput = function(msg) { + console.log('npm: ' + msg); + }; + + cmd.stdout.on('data', consoleOutput); + cmd.stderr.on('data', consoleOutput); + } + }); + }, + + list:function(path){ + var global = false; + if(!path) global = true; + var cmdString = "npm ls --depth=0 " + (global?"-g ":" "); + return new Promise(function(resolve, reject){ + exec(cmdString, {cwd: path?path:"/"},(error, stdout, stderr) => { + if (stderr.indexOf("missing")== -1 && stderr.indexOf("required") == -1) { + reject(error); + } + var packages = []; + packages = stdout.split('\n'); + packages = packages.filter(function(item){ + if(item.match(/^├──.+/g) != null){ + return true + } + if(item.match(/^└──.+/g) != null){ + return true + } + return undefined; + }); + packages = packages.map(function(item){ + if(item.match(/^├──.+/g) != null){ + return item.replace(/^├──\s/g, ""); + } + if(item.match(/^└──.+/g) != null){ + return item.replace(/^└──\s/g, ""); + } + }) + resolve(packages); + + }); + }); + } +} diff --git a/node_modules/npm-programmatic/package.json b/node_modules/npm-programmatic/package.json new file mode 100644 index 0000000..f70e446 --- /dev/null +++ b/node_modules/npm-programmatic/package.json @@ -0,0 +1,32 @@ +{ + "name": "npm-programmatic", + "version": "0.0.5", + "description": "Use NPM commands programmatically", + "main": "index.js", + "scripts": { + "test": "mocha --recursive ./test/**/*.test.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Manak/npm-programmatic.git" + }, + "keywords": [ + "NPM", + "Programmatic", + "npm", + "programmatic" + ], + "author": "manak", + "license": "ISC", + "bugs": { + "url": "https://github.com/Manak/npm-programmatic/issues" + }, + "homepage": "https://github.com/Manak/npm-programmatic#readme", + "devDependencies": { + "mocha": "^2.5.3", + "should": "^9.0.2" + }, + "dependencies": { + "bluebird": "^3.4.1" + } +} diff --git a/node_modules/npm-programmatic/test/install/install.test.js b/node_modules/npm-programmatic/test/install/install.test.js new file mode 100644 index 0000000..e042ffa --- /dev/null +++ b/node_modules/npm-programmatic/test/install/install.test.js @@ -0,0 +1,48 @@ +var npm = require("../../index"); +var fs = require("fs"); +var should = require("should"); +var exec = require('child_process').exec,child; + +describe("Test installation of packages", ()=>{ + beforeEach(()=>{ + child = exec('rm -rf ./node_modules/left-pad',function(err,out) {}); + child = exec('cp ./package.json ./test/backup/package.json',function(err,out) {}); + }); + afterEach(()=>{ + child = exec('rm -rf ./node_modules/left-pad',function(err,out) {}); + child = exec('cp ./test/backup/package.json ./package.json' ,function(err,out) {}); + }); + it("should install package", function(done){ + this.timeout(5000); + npm.install(["left-pad"], {cwd:'.'}).then((result)=>{ + try{ + var checkExists = fs.accessSync('./node_modules/left-pad'); + } catch(err){ + return done(err); + } + return done(); + }).catch((err)=>{ + return done(err) + }); + }); + + it("should install package inside a node project and save it to package.json", function(done){ + this.timeout(5000); + npm.install(["left-pad"], {cwd:'.', save:true}).then((result)=>{ + try{ + var checkExists = fs.accessSync('./node_modules/left-pad'); + var contents = require('../../package.json'); + if(!contents.dependencies['left-pad']){ + throw new Error(); + } + } catch(err){ + return done(err); + } + + return done(); + + }).catch((err)=>{ + return done(err); + }); + }); +}); \ No newline at end of file diff --git a/node_modules/npm-programmatic/test/list/list.test.js b/node_modules/npm-programmatic/test/list/list.test.js new file mode 100644 index 0000000..08e5fbe --- /dev/null +++ b/node_modules/npm-programmatic/test/list/list.test.js @@ -0,0 +1,19 @@ +var npm = require("../../index"); + +describe("Test listing of installed packages", ()=>{ + it("should list all installed packages by npm-programmatic", function(done){ + this.timeout(5000); + npm.list('.') + .then(function(packages){ + for(var i in packages){ + if(packages[i].indexOf('bluebird')!=-1){ + return done(); + } + } + return done(new Error("bluebird not found!")); + }) + .catch(function(err, stderr){ + return done(err, stderr); + }) + }); +}); diff --git a/node_modules/npm-programmatic/test/uninstall/uninstall.test.js b/node_modules/npm-programmatic/test/uninstall/uninstall.test.js new file mode 100644 index 0000000..8356b1d --- /dev/null +++ b/node_modules/npm-programmatic/test/uninstall/uninstall.test.js @@ -0,0 +1,54 @@ +var npm = require("../../index"); +var fs = require("fs"); +var should = require("should"); +var exec = require('child_process').exec,child; + +describe("Test uninstallation of packages", ()=>{ + beforeEach(()=>{ + child = exec('cp -R ./node_modules/bluebird ./test/backup/node_modules/bluebird',function(err,out) { + }); + child = exec('cp ./package.json ./test/backup/package.json',function(err,out) {}); + }); + + afterEach(()=>{ + child = exec('cp -R ./test/backup/node_modules/bluebird ./node_modules/bluebird ',function(err,out) { + }); + child = exec('cp ./test/backup/package.json ./package.json' ,function(err,out) {}); + }); + + + it("should uninstall package", function(done){ + this.timeout(5000); + npm.uninstall('bluebird', {cwd:"."}) + .then(function(status){ + try{ + var checkExists = fs.accessSync('./node_modules/bluebird'); + } catch(err){ + return done(); + } + return done(new Error("Uninstallation failed, package still exists in node_modules.")); + }) + .catch(function(err){ + done(err); + }); + }); + + it("should uninstall package inside a node project and save it to package.json", function(done){ + this.timeout(5000); + npm.uninstall('bluebird', {cwd:'.', save:true}).then(()=>{ + try{ + var contents = fs.readFileSync('./package.json','UTF-8'); + contents = JSON.parse(contents); + if(!contents.dependencies['bluebird']){ + var checkExists = fs.accessSync('./node_modules/bluebird'); + return done(new Error()); + } + } catch(err){ + return done(); + } + }).catch((err)=>{ + return done(err); + }); + }); + +}); \ No newline at end of file diff --git a/node_modules/npmlog/CHANGELOG.md b/node_modules/npmlog/CHANGELOG.md new file mode 100644 index 0000000..f549a1f --- /dev/null +++ b/node_modules/npmlog/CHANGELOG.md @@ -0,0 +1,36 @@ +### v3.1.2 + +* Update to `gauge@1.6.0` adding support for default values for template + items. + +### v3.1.1 + +* Update to `gauge@1.5.3` to fix to `1.x` compatibility when it comes to + when a progress bar is enabled. In `1.x` if you didn't have a TTY the + progress bar was never shown. In `2.x` it merely defaults to disabled, + but you can enable it explicitly if you still want progress updates. + +### v3.1.0 + +* Update to `gauge@2.5.2`: + * Updates the `signal-exit` dependency which fixes an incompatibility with + the node profiler. + * Uses externalizes its ansi code generation in `console-control-strings` +* Make the default progress bar include the last line printed, colored as it + would be when printing to a tty. + +### v3.0.0 + +* Switch to `gauge@2.0.0`, for better performance, better look. +* Set stderr/stdout blocking if they're tty's, so that we can hide a + progress bar going to stderr and then safely print to stdout. Without + this the two can end up overlapping producing confusing and sometimes + corrupted output. + +### v2.0.0 + +* Make the `error` event non-fatal so that folks can use it as a prefix. + +### v1.0.0 + +* Add progress bar with `gauge@1.1.0` diff --git a/node_modules/npmlog/LICENSE b/node_modules/npmlog/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/npmlog/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npmlog/README.md b/node_modules/npmlog/README.md new file mode 100644 index 0000000..dba3550 --- /dev/null +++ b/node_modules/npmlog/README.md @@ -0,0 +1,210 @@ +# npmlog + +The logger util that npm uses. + +This logger is very basic. It does the logging for npm. It supports +custom levels and colored output. + +By default, logs are written to stderr. If you want to send log messages +to outputs other than streams, then you can change the `log.stream` +member, or you can just listen to the events that it emits, and do +whatever you want with them. + +# Basic Usage + +``` +var log = require('npmlog') + +// additional stuff ---------------------------+ +// message ----------+ | +// prefix ----+ | | +// level -+ | | | +// v v v v + log.info('fyi', 'I have a kitty cat: %j', myKittyCat) +``` + +## log.level + +* {String} + +The level to display logs at. Any logs at or above this level will be +displayed. The special level `silent` will prevent anything from being +displayed ever. + +## log.record + +* {Array} + +An array of all the log messages that have been entered. + +## log.maxRecordSize + +* {Number} + +The maximum number of records to keep. If log.record gets bigger than +10% over this value, then it is sliced down to 90% of this value. + +The reason for the 10% window is so that it doesn't have to resize a +large array on every log entry. + +## log.prefixStyle + +* {Object} + +A style object that specifies how prefixes are styled. (See below) + +## log.headingStyle + +* {Object} + +A style object that specifies how the heading is styled. (See below) + +## log.heading + +* {String} Default: "" + +If set, a heading that is printed at the start of every line. + +## log.stream + +* {Stream} Default: `process.stderr` + +The stream where output is written. + +## log.enableColor() + +Force colors to be used on all messages, regardless of the output +stream. + +## log.disableColor() + +Disable colors on all messages. + +## log.enableProgress() + +Enable the display of log activity spinner and progress bar + +## log.disableProgress() + +Disable the display of a progress bar + +## log.enableUnicode() + +Force the unicode theme to be used for the progress bar. + +## log.disableUnicode() + +Disable the use of unicode in the progress bar. + +## log.setGaugeTemplate(template) + +Set a template for outputting the progress bar. See the [gauge documentation] for details. + +[gauge documentation]: https://npmjs.com/package/gauge + +## log.setGaugeThemeset(themes) + +Select a themeset to pick themes from for the progress bar. See the [gauge documentation] for details. + +## log.pause() + +Stop emitting messages to the stream, but do not drop them. + +## log.resume() + +Emit all buffered messages that were written while paused. + +## log.log(level, prefix, message, ...) + +* `level` {String} The level to emit the message at +* `prefix` {String} A string prefix. Set to "" to skip. +* `message...` Arguments to `util.format` + +Emit a log message at the specified level. + +## log\[level](prefix, message, ...) + +For example, + +* log.silly(prefix, message, ...) +* log.verbose(prefix, message, ...) +* log.info(prefix, message, ...) +* log.http(prefix, message, ...) +* log.warn(prefix, message, ...) +* log.error(prefix, message, ...) + +Like `log.log(level, prefix, message, ...)`. In this way, each level is +given a shorthand, so you can do `log.info(prefix, message)`. + +## log.addLevel(level, n, style, disp) + +* `level` {String} Level indicator +* `n` {Number} The numeric level +* `style` {Object} Object with fg, bg, inverse, etc. +* `disp` {String} Optional replacement for `level` in the output. + +Sets up a new level with a shorthand function and so forth. + +Note that if the number is `Infinity`, then setting the level to that +will cause all log messages to be suppressed. If the number is +`-Infinity`, then the only way to show it is to enable all log messages. + +## log.newItem(name, todo, weight) + +* `name` {String} Optional; progress item name. +* `todo` {Number} Optional; total amount of work to be done. Default 0. +* `weight` {Number} Optional; the weight of this item relative to others. Default 1. + +This adds a new `are-we-there-yet` item tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `Tracker` object. + +## log.newStream(name, todo, weight) + +This adds a new `are-we-there-yet` stream tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerStream` object. + +## log.newGroup(name, weight) + +This adds a new `are-we-there-yet` tracker group to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerGroup` object. + +# Events + +Events are all emitted with the message object. + +* `log` Emitted for all messages +* `log.` Emitted for all messages with the `` level. +* `` Messages with prefixes also emit their prefix as an event. + +# Style Objects + +Style objects can have the following fields: + +* `fg` {String} Color for the foreground text +* `bg` {String} Color for the background +* `bold`, `inverse`, `underline` {Boolean} Set the associated property +* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) + +# Message Objects + +Every log event is emitted with a message object, and the `log.record` +list contains all of them that have been created. They have the +following fields: + +* `id` {Number} +* `level` {String} +* `prefix` {String} +* `message` {String} Result of `util.format()` +* `messageRaw` {Array} Arguments to `util.format()` + +# Blocking TTYs + +We use [`set-blocking`](https://npmjs.com/package/set-blocking) to set +stderr and stdout blocking if they are tty's and have the setBlocking call. +This is a work around for an issue in early versions of Node.js 6.x, which +made stderr and stdout non-blocking on OSX. (They are always blocking +Windows and were never blocking on Linux.) `npmlog` needs them to be blocking +so that it can allow output to stdout and stderr to be interlaced. diff --git a/node_modules/npmlog/log.js b/node_modules/npmlog/log.js new file mode 100644 index 0000000..bf894fb --- /dev/null +++ b/node_modules/npmlog/log.js @@ -0,0 +1,301 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) this.gauge.setWriteTo(stream, stream) + }, + get: function () { + return stream + } +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''} + ] +}) + +log.tracker = new Progress.TrackerGroup() + +// no progress bars unless asked +log.progressEnabled = false + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) return + this.progressEnabled = true + if (this._pause) return + this.tracker.on('change', this.showProgress) + this.gauge.enable() + this.showProgress() +} + +log.disableProgress = function () { + if (!this.progressEnabled) return + this.clearProgress() + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') return + if (trackerConstructors.filter(function (C) { return C === P }).length) return + if (tracker[P]) return + if (typeof log[P] !== 'function') return + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) return cb && process.nextTick(cb) + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) return + var values = {} + if (name) values.section = name + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle) + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true +} + +log.resume = function () { + if (!this._paused) return + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) this.enableProgress() +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg && + (arg instanceof Error) && arg.stack) { + arg.stack = stack = arg.stack + '' + } + } + if (stack) a.unshift(stack + '\n') + message = util.format.apply(util, a) + + var m = { id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) this.emit(m.prefix, m) + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) this.gauge.pulse(m.prefix) + var l = this.levels[m.level] + if (l === undefined) return + if (l < this.levels[this.level]) return + if (l > 0 && !isFinite(l)) return + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) this.write(' ') + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) return + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) settings.push(style.fg) + if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + if (style.bold) settings.push('bold') + if (style.underline) settings.push('underline') + if (style.inverse) settings.push('inverse') + if (settings.length) output += consoleControl.color(settings) + if (style.beep) output += consoleControl.beep() + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + return output +} + +log.write = function (msg, style) { + if (!stream) return + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) disp = lvl + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/node_modules/npmlog/package.json b/node_modules/npmlog/package.json new file mode 100644 index 0000000..f899f4a --- /dev/null +++ b/node_modules/npmlog/package.json @@ -0,0 +1,28 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "npmlog", + "description": "logger for npm", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "https://github.com/npm/npmlog.git" + }, + "main": "log.js", + "files": [ + "log.js" + ], + "scripts": { + "test": "standard && tap test/*.js" + }, + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.6.0", + "set-blocking": "~2.0.0" + }, + "devDependencies": { + "standard": "~7.1.2", + "tap": "~5.7.0" + }, + "license": "ISC" +} diff --git a/node_modules/number-is-nan/index.js b/node_modules/number-is-nan/index.js new file mode 100644 index 0000000..79be4b9 --- /dev/null +++ b/node_modules/number-is-nan/index.js @@ -0,0 +1,4 @@ +'use strict'; +module.exports = Number.isNaN || function (x) { + return x !== x; +}; diff --git a/node_modules/number-is-nan/license b/node_modules/number-is-nan/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/number-is-nan/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/number-is-nan/package.json b/node_modules/number-is-nan/package.json new file mode 100644 index 0000000..d2f51d4 --- /dev/null +++ b/node_modules/number-is-nan/package.json @@ -0,0 +1,35 @@ +{ + "name": "number-is-nan", + "version": "1.0.1", + "description": "ES2015 Number.isNaN() ponyfill", + "license": "MIT", + "repository": "sindresorhus/number-is-nan", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "es2015", + "ecmascript", + "ponyfill", + "polyfill", + "shim", + "number", + "is", + "nan", + "not" + ], + "devDependencies": { + "ava": "*" + } +} diff --git a/node_modules/number-is-nan/readme.md b/node_modules/number-is-nan/readme.md new file mode 100644 index 0000000..2463508 --- /dev/null +++ b/node_modules/number-is-nan/readme.md @@ -0,0 +1,28 @@ +# number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) + +> ES2015 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save number-is-nan +``` + + +## Usage + +```js +var numberIsNan = require('number-is-nan'); + +numberIsNan(NaN); +//=> true + +numberIsNan('unicorn'); +//=> false +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/oauth-sign/LICENSE b/node_modules/oauth-sign/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/oauth-sign/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/oauth-sign/README.md b/node_modules/oauth-sign/README.md new file mode 100644 index 0000000..34c4a85 --- /dev/null +++ b/node_modules/oauth-sign/README.md @@ -0,0 +1,4 @@ +oauth-sign +========== + +OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module. diff --git a/node_modules/oauth-sign/index.js b/node_modules/oauth-sign/index.js new file mode 100644 index 0000000..dadcba9 --- /dev/null +++ b/node_modules/oauth-sign/index.js @@ -0,0 +1,136 @@ +var crypto = require('crypto') + , qs = require('querystring') + ; + +function sha1 (key, body) { + return crypto.createHmac('sha1', key).update(body).digest('base64') +} + +function rsa (key, body) { + return crypto.createSign("RSA-SHA1").update(body).sign(key, 'base64'); +} + +function rfc3986 (str) { + return encodeURIComponent(str) + .replace(/!/g,'%21') + .replace(/\*/g,'%2A') + .replace(/\(/g,'%28') + .replace(/\)/g,'%29') + .replace(/'/g,'%27') + ; +} + +// Maps object to bi-dimensional array +// Converts { foo: 'A', bar: [ 'b', 'B' ]} to +// [ ['foo', 'A'], ['bar', 'b'], ['bar', 'B'] ] +function map (obj) { + var key, val, arr = [] + for (key in obj) { + val = obj[key] + if (Array.isArray(val)) + for (var i = 0; i < val.length; i++) + arr.push([key, val[i]]) + else if (typeof val === "object") + for (var prop in val) + arr.push([key + '[' + prop + ']', val[prop]]); + else + arr.push([key, val]) + } + return arr +} + +// Compare function for sort +function compare (a, b) { + return a > b ? 1 : a < b ? -1 : 0 +} + +function generateBase (httpMethod, base_uri, params) { + // adapted from https://dev.twitter.com/docs/auth/oauth and + // https://dev.twitter.com/docs/auth/creating-signature + + // Parameter normalization + // http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2 + var normalized = map(params) + // 1. First, the name and value of each parameter are encoded + .map(function (p) { + return [ rfc3986(p[0]), rfc3986(p[1] || '') ] + }) + // 2. The parameters are sorted by name, using ascending byte value + // ordering. If two or more parameters share the same name, they + // are sorted by their value. + .sort(function (a, b) { + return compare(a[0], b[0]) || compare(a[1], b[1]) + }) + // 3. The name of each parameter is concatenated to its corresponding + // value using an "=" character (ASCII code 61) as a separator, even + // if the value is empty. + .map(function (p) { return p.join('=') }) + // 4. The sorted name/value pairs are concatenated together into a + // single string by using an "&" character (ASCII code 38) as + // separator. + .join('&') + + var base = [ + rfc3986(httpMethod ? httpMethod.toUpperCase() : 'GET'), + rfc3986(base_uri), + rfc3986(normalized) + ].join('&') + + return base +} + +function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') + + return sha1(key, base) +} + +function rsasign (httpMethod, base_uri, params, private_key, token_secret) { + var base = generateBase(httpMethod, base_uri, params) + var key = private_key || '' + + return rsa(key, base) +} + +function plaintext (consumer_secret, token_secret) { + var key = [ + consumer_secret || '', + token_secret || '' + ].map(rfc3986).join('&') + + return key +} + +function sign (signMethod, httpMethod, base_uri, params, consumer_secret, token_secret) { + var method + var skipArgs = 1 + + switch (signMethod) { + case 'RSA-SHA1': + method = rsasign + break + case 'HMAC-SHA1': + method = hmacsign + break + case 'PLAINTEXT': + method = plaintext + skipArgs = 4 + break + default: + throw new Error("Signature method not supported: " + signMethod) + } + + return method.apply(null, [].slice.call(arguments, skipArgs)) +} + +exports.hmacsign = hmacsign +exports.rsasign = rsasign +exports.plaintext = plaintext +exports.sign = sign +exports.rfc3986 = rfc3986 +exports.generateBase = generateBase + diff --git a/node_modules/oauth-sign/package.json b/node_modules/oauth-sign/package.json new file mode 100644 index 0000000..4c7991e --- /dev/null +++ b/node_modules/oauth-sign/package.json @@ -0,0 +1,23 @@ +{ + "author": "Mikeal Rogers (http://www.futurealoof.com)", + "name": "oauth-sign", + "description": "OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module.", + "version": "0.8.2", + "license": "Apache-2.0", + "repository": { + "url": "https://github.com/mikeal/oauth-sign" + }, + "main": "index.js", + "files": [ + "index.js" + ], + "dependencies": {}, + "devDependencies": {}, + "optionalDependencies": {}, + "engines": { + "node": "*" + }, + "scripts": { + "test": "node test.js" + } +} diff --git a/node_modules/object-assign/index.js b/node_modules/object-assign/index.js new file mode 100644 index 0000000..5085048 --- /dev/null +++ b/node_modules/object-assign/index.js @@ -0,0 +1,83 @@ +'use strict'; +/* eslint-disable no-unused-vars */ +var hasOwnProperty = Object.prototype.hasOwnProperty; +var propIsEnumerable = Object.prototype.propertyIsEnumerable; + +function toObject(val) { + if (val === null || val === undefined) { + throw new TypeError('Object.assign cannot be called with null or undefined'); + } + + return Object(val); +} + +function shouldUseNative() { + try { + if (!Object.assign) { + return false; + } + + // Detect buggy property enumeration order in older V8 versions. + + // https://bugs.chromium.org/p/v8/issues/detail?id=4118 + var test1 = new String('abc'); // eslint-disable-line + test1[5] = 'de'; + if (Object.getOwnPropertyNames(test1)[0] === '5') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test2 = {}; + for (var i = 0; i < 10; i++) { + test2['_' + String.fromCharCode(i)] = i; + } + var order2 = Object.getOwnPropertyNames(test2).map(function (n) { + return test2[n]; + }); + if (order2.join('') !== '0123456789') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test3 = {}; + 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { + test3[letter] = letter; + }); + if (Object.keys(Object.assign({}, test3)).join('') !== + 'abcdefghijklmnopqrst') { + return false; + } + + return true; + } catch (e) { + // We don't expect any of the above to throw, but better to be safe. + return false; + } +} + +module.exports = shouldUseNative() ? Object.assign : function (target, source) { + var from; + var to = toObject(target); + var symbols; + + for (var s = 1; s < arguments.length; s++) { + from = Object(arguments[s]); + + for (var key in from) { + if (hasOwnProperty.call(from, key)) { + to[key] = from[key]; + } + } + + if (Object.getOwnPropertySymbols) { + symbols = Object.getOwnPropertySymbols(from); + for (var i = 0; i < symbols.length; i++) { + if (propIsEnumerable.call(from, symbols[i])) { + to[symbols[i]] = from[symbols[i]]; + } + } + } + } + + return to; +}; diff --git a/node_modules/object-assign/license b/node_modules/object-assign/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/object-assign/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object-assign/package.json b/node_modules/object-assign/package.json new file mode 100644 index 0000000..a25d428 --- /dev/null +++ b/node_modules/object-assign/package.json @@ -0,0 +1,42 @@ +{ + "name": "object-assign", + "version": "4.1.0", + "description": "ES2015 Object.assign() ponyfill", + "license": "MIT", + "repository": "sindresorhus/object-assign", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && mocha", + "bench": "matcha bench.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "object", + "assign", + "extend", + "properties", + "es2015", + "ecmascript", + "harmony", + "ponyfill", + "prollyfill", + "polyfill", + "shim", + "browser" + ], + "devDependencies": { + "lodash": "^4.8.2", + "matcha": "^0.7.0", + "mocha": "*", + "xo": "*" + } +} diff --git a/node_modules/object-assign/readme.md b/node_modules/object-assign/readme.md new file mode 100644 index 0000000..13c0977 --- /dev/null +++ b/node_modules/object-assign/readme.md @@ -0,0 +1,56 @@ +# object-assign [![Build Status](https://travis-ci.org/sindresorhus/object-assign.svg?branch=master)](https://travis-ci.org/sindresorhus/object-assign) + +> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) ponyfill + +> Ponyfill: A polyfill that doesn't overwrite the native method + + +## Install + +``` +$ npm install --save object-assign +``` + + +## Usage + +```js +const objectAssign = require('object-assign'); + +objectAssign({foo: 0}, {bar: 1}); +//=> {foo: 0, bar: 1} + +// multiple sources +objectAssign({foo: 0}, {bar: 1}, {baz: 2}); +//=> {foo: 0, bar: 1, baz: 2} + +// overwrites equal keys +objectAssign({foo: 0}, {foo: 1}, {foo: 2}); +//=> {foo: 2} + +// ignores null and undefined sources +objectAssign({foo: 0}, null, {bar: 1}, undefined); +//=> {foo: 0, bar: 1} +``` + + +## API + +### objectAssign(target, source, [source, ...]) + +Assigns enumerable own properties of `source` objects to the `target` object and returns the `target` object. Additional `source` objects will overwrite previous ones. + + +## Resources + +- [ES2015 spec - Object.assign](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign) + + +## Related + +- [deep-assign](https://github.com/sindresorhus/deep-assign) - Recursive `Object.assign()` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/object.omit/LICENSE b/node_modules/object.omit/LICENSE new file mode 100644 index 0000000..fa30c4c --- /dev/null +++ b/node_modules/object.omit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object.omit/README.md b/node_modules/object.omit/README.md new file mode 100644 index 0000000..3925183 --- /dev/null +++ b/node_modules/object.omit/README.md @@ -0,0 +1,104 @@ +# object.omit [![NPM version](https://badge.fury.io/js/object.omit.svg)](http://badge.fury.io/js/object.omit) + +> Return a copy of an object excluding the given key, or array of keys. Also accepts an optional filter function as the last argument. + +## Install + +```sh +$ npm i object.omit --save-dev +``` + +## Usage + +```js +var omit = require('object.omit'); +``` + +Pass a string `key` to omit: + +```js +omit({a: 'a', b: 'b', c: 'c'}, 'a') +//=> { b: 'b', c: 'c' } +``` + +Pass an array of `keys` to omit: + +```js +omit({a: 'a', b: 'b', c: 'c'}, ['a', 'c']) +//=> { b: 'b' } +``` + +Returns the object if no keys are passed: + +```js +omit({a: 'a', b: 'b', c: 'c'}) +//=> {a: 'a', b: 'b', c: 'c'} +``` + +Returns an empty object if no value is passed. + +```js +omit() +//=> {} +``` + +### Filter function + +An optional filter function may be passed as the last argument, with or without keys passed on the arguments: + +**filter on keys** + +```js +var res = omit({a: 'a', b: 'b', c: 'c'}, function (val, key) { + return key === 'a'; +}); +//=> {a: 'a'} +``` + +**filter on values** + +```js +var fn = function() {}; +var obj = {a: 'a', b: 'b', c: fn}; + +var res = omit(obj, ['a'], function (val, key) { + return typeof val !== 'function'; +}); +//=> {b: 'b'} +``` + +### Other awesome javascript/node.js utils + +* [object.filter](https://github.com/jonschlinkert/object.filter): Create a new object filtered to have only properties for which the callback returns true. +* [object.pick](https://github.com/jonschlinkert/object.pick): Returns a filtered copy of an object with only the specified keys, like `pick` from… [more](https://github.com/jonschlinkert/object.pick) +* [object.pluck](https://github.com/jonschlinkert/object.pluck): Like pluck from underscore / lo-dash, but returns an object composed of specified properties, with… [more](https://github.com/jonschlinkert/object.pluck) +* [object.reduce](https://github.com/jonschlinkert/object.reduce): Reduces an object to a value that is the accumulated result of running each property… [more](https://github.com/jonschlinkert/object.reduce) +* [object.defaults](https://github.com/jonschlinkert/object.defaults): Like `extend` but only copies missing properties/values to the target object. + +## Runing tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/object.omit/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2014-2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on July 22, 2015._ \ No newline at end of file diff --git a/node_modules/object.omit/index.js b/node_modules/object.omit/index.js new file mode 100644 index 0000000..576a578 --- /dev/null +++ b/node_modules/object.omit/index.js @@ -0,0 +1,40 @@ +/*! + * object.omit + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isObject = require('is-extendable'); +var forOwn = require('for-own'); + +module.exports = function omit(obj, keys) { + if (!isObject(obj)) return {}; + + var keys = [].concat.apply([], [].slice.call(arguments, 1)); + var last = keys[keys.length - 1]; + var res = {}, fn; + + if (typeof last === 'function') { + fn = keys.pop(); + } + + var isFunction = typeof fn === 'function'; + if (!keys.length && !isFunction) { + return obj; + } + + forOwn(obj, function (value, key) { + if (keys.indexOf(key) === -1) { + + if (!isFunction) { + res[key] = value; + } else if (fn(value, key, obj)) { + res[key] = value; + } + } + }); + return res; +}; diff --git a/node_modules/object.omit/package.json b/node_modules/object.omit/package.json new file mode 100644 index 0000000..c8778db --- /dev/null +++ b/node_modules/object.omit/package.json @@ -0,0 +1,51 @@ +{ + "name": "object.omit", + "description": "Return a copy of an object excluding the given key, or array of keys. Also accepts an optional filter function as the last argument.", + "version": "2.0.0", + "homepage": "https://github.com/jonschlinkert/object.omit", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/object.omit", + "bugs": { + "url": "https://github.com/jonschlinkert/object.omit/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "for-own": "^0.1.3", + "is-extendable": "^0.1.1" + }, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "verb": { + "related": { + "list": [ + "object.filter", + "object.pick", + "object.pluck", + "object.reduce", + "object.defaults" + ] + } + }, + "keywords": [ + "clear", + "delete", + "key", + "value", + "object", + "omit", + "property", + "remove" + ] +} diff --git a/node_modules/on-finished/HISTORY.md b/node_modules/on-finished/HISTORY.md new file mode 100644 index 0000000..98ff0e9 --- /dev/null +++ b/node_modules/on-finished/HISTORY.md @@ -0,0 +1,88 @@ +2.3.0 / 2015-05-26 +================== + + * Add defined behavior for HTTP `CONNECT` requests + * Add defined behavior for HTTP `Upgrade` requests + * deps: ee-first@1.1.1 + +2.2.1 / 2015-04-22 +================== + + * Fix `isFinished(req)` when data buffered + +2.2.0 / 2014-12-22 +================== + + * Add message object to callback arguments + +2.1.1 / 2014-10-22 +================== + + * Fix handling of pipelined requests + +2.1.0 / 2014-08-16 +================== + + * Check if `socket` is detached + * Return `undefined` for `isFinished` if state unknown + +2.0.0 / 2014-08-16 +================== + + * Add `isFinished` function + * Move to `jshttp` organization + * Remove support for plain socket argument + * Rename to `on-finished` + * Support both `req` and `res` as arguments + * deps: ee-first@1.0.5 + +1.2.2 / 2014-06-10 +================== + + * Reduce listeners added to emitters + - avoids "event emitter leak" warnings when used multiple times on same request + +1.2.1 / 2014-06-08 +================== + + * Fix returned value when already finished + +1.2.0 / 2014-06-05 +================== + + * Call callback when called on already-finished socket + +1.1.4 / 2014-05-27 +================== + + * Support node.js 0.8 + +1.1.3 / 2014-04-30 +================== + + * Make sure errors passed as instanceof `Error` + +1.1.2 / 2014-04-18 +================== + + * Default the `socket` to passed-in object + +1.1.1 / 2014-01-16 +================== + + * Rename module to `finished` + +1.1.0 / 2013-12-25 +================== + + * Call callback when called on already-errored socket + +1.0.1 / 2013-12-20 +================== + + * Actually pass the error to the callback + +1.0.0 / 2013-12-20 +================== + + * Initial release diff --git a/node_modules/on-finished/LICENSE b/node_modules/on-finished/LICENSE new file mode 100644 index 0000000..5931fd2 --- /dev/null +++ b/node_modules/on-finished/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2013 Jonathan Ong +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/on-finished/README.md b/node_modules/on-finished/README.md new file mode 100644 index 0000000..a0e1157 --- /dev/null +++ b/node_modules/on-finished/README.md @@ -0,0 +1,154 @@ +# on-finished + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Execute a callback when a HTTP request closes, finishes, or errors. + +## Install + +```sh +$ npm install on-finished +``` + +## API + +```js +var onFinished = require('on-finished') +``` + +### onFinished(res, listener) + +Attach a listener to listen for the response to finish. The listener will +be invoked only once when the response finished. If the response finished +to an error, the first argument will contain the error. If the response +has already finished, the listener will be invoked. + +Listening to the end of a response would be used to close things associated +with the response, like open files. + +Listener is invoked as `listener(err, res)`. + +```js +onFinished(res, function (err, res) { + // clean up open fds, etc. + // err contains the error is request error'd +}) +``` + +### onFinished(req, listener) + +Attach a listener to listen for the request to finish. The listener will +be invoked only once when the request finished. If the request finished +to an error, the first argument will contain the error. If the request +has already finished, the listener will be invoked. + +Listening to the end of a request would be used to know when to continue +after reading the data. + +Listener is invoked as `listener(err, req)`. + +```js +var data = '' + +req.setEncoding('utf8') +res.on('data', function (str) { + data += str +}) + +onFinished(req, function (err, req) { + // data is read unless there is err +}) +``` + +### onFinished.isFinished(res) + +Determine if `res` is already finished. This would be useful to check and +not even start certain operations if the response has already finished. + +### onFinished.isFinished(req) + +Determine if `req` is already finished. This would be useful to check and +not even start certain operations if the request has already finished. + +## Special Node.js requests + +### HTTP CONNECT method + +The meaning of the `CONNECT` method from RFC 7231, section 4.3.6: + +> The CONNECT method requests that the recipient establish a tunnel to +> the destination origin server identified by the request-target and, +> if successful, thereafter restrict its behavior to blind forwarding +> of packets, in both directions, until the tunnel is closed. Tunnels +> are commonly used to create an end-to-end virtual connection, through +> one or more proxies, which can then be secured using TLS (Transport +> Layer Security, [RFC5246]). + +In Node.js, these request objects come from the `'connect'` event on +the HTTP server. + +When this module is used on a HTTP `CONNECT` request, the request is +considered "finished" immediately, **due to limitations in the Node.js +interface**. This means if the `CONNECT` request contains a request entity, +the request will be considered "finished" even before it has been read. + +There is no such thing as a response object to a `CONNECT` request in +Node.js, so there is no support for for one. + +### HTTP Upgrade request + +The meaning of the `Upgrade` header from RFC 7230, section 6.1: + +> The "Upgrade" header field is intended to provide a simple mechanism +> for transitioning from HTTP/1.1 to some other protocol on the same +> connection. + +In Node.js, these request objects come from the `'upgrade'` event on +the HTTP server. + +When this module is used on a HTTP request with an `Upgrade` header, the +request is considered "finished" immediately, **due to limitations in the +Node.js interface**. This means if the `Upgrade` request contains a request +entity, the request will be considered "finished" even before it has been +read. + +There is no such thing as a response object to a `Upgrade` request in +Node.js, so there is no support for for one. + +## Example + +The following code ensures that file descriptors are always closed +once the response finishes. + +```js +var destroy = require('destroy') +var http = require('http') +var onFinished = require('on-finished') + +http.createServer(function onRequest(req, res) { + var stream = fs.createReadStream('package.json') + stream.pipe(res) + onFinished(res, function (err) { + destroy(stream) + }) +}) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/on-finished.svg +[npm-url]: https://npmjs.org/package/on-finished +[node-version-image]: https://img.shields.io/node/v/on-finished.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/on-finished/master.svg +[travis-url]: https://travis-ci.org/jshttp/on-finished +[coveralls-image]: https://img.shields.io/coveralls/jshttp/on-finished/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/on-finished?branch=master +[downloads-image]: https://img.shields.io/npm/dm/on-finished.svg +[downloads-url]: https://npmjs.org/package/on-finished diff --git a/node_modules/on-finished/index.js b/node_modules/on-finished/index.js new file mode 100644 index 0000000..9abd98f --- /dev/null +++ b/node_modules/on-finished/index.js @@ -0,0 +1,196 @@ +/*! + * on-finished + * Copyright(c) 2013 Jonathan Ong + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = onFinished +module.exports.isFinished = isFinished + +/** + * Module dependencies. + * @private + */ + +var first = require('ee-first') + +/** + * Variables. + * @private + */ + +/* istanbul ignore next */ +var defer = typeof setImmediate === 'function' + ? setImmediate + : function(fn){ process.nextTick(fn.bind.apply(fn, arguments)) } + +/** + * Invoke callback when the response has finished, useful for + * cleaning up resources afterwards. + * + * @param {object} msg + * @param {function} listener + * @return {object} + * @public + */ + +function onFinished(msg, listener) { + if (isFinished(msg) !== false) { + defer(listener, null, msg) + return msg + } + + // attach the listener to the message + attachListener(msg, listener) + + return msg +} + +/** + * Determine if message is already finished. + * + * @param {object} msg + * @return {boolean} + * @public + */ + +function isFinished(msg) { + var socket = msg.socket + + if (typeof msg.finished === 'boolean') { + // OutgoingMessage + return Boolean(msg.finished || (socket && !socket.writable)) + } + + if (typeof msg.complete === 'boolean') { + // IncomingMessage + return Boolean(msg.upgrade || !socket || !socket.readable || (msg.complete && !msg.readable)) + } + + // don't know + return undefined +} + +/** + * Attach a finished listener to the message. + * + * @param {object} msg + * @param {function} callback + * @private + */ + +function attachFinishedListener(msg, callback) { + var eeMsg + var eeSocket + var finished = false + + function onFinish(error) { + eeMsg.cancel() + eeSocket.cancel() + + finished = true + callback(error) + } + + // finished on first message event + eeMsg = eeSocket = first([[msg, 'end', 'finish']], onFinish) + + function onSocket(socket) { + // remove listener + msg.removeListener('socket', onSocket) + + if (finished) return + if (eeMsg !== eeSocket) return + + // finished on first socket event + eeSocket = first([[socket, 'error', 'close']], onFinish) + } + + if (msg.socket) { + // socket already assigned + onSocket(msg.socket) + return + } + + // wait for socket to be assigned + msg.on('socket', onSocket) + + if (msg.socket === undefined) { + // node.js 0.8 patch + patchAssignSocket(msg, onSocket) + } +} + +/** + * Attach the listener to the message. + * + * @param {object} msg + * @return {function} + * @private + */ + +function attachListener(msg, listener) { + var attached = msg.__onFinished + + // create a private single listener with queue + if (!attached || !attached.queue) { + attached = msg.__onFinished = createListener(msg) + attachFinishedListener(msg, attached) + } + + attached.queue.push(listener) +} + +/** + * Create listener on message. + * + * @param {object} msg + * @return {function} + * @private + */ + +function createListener(msg) { + function listener(err) { + if (msg.__onFinished === listener) msg.__onFinished = null + if (!listener.queue) return + + var queue = listener.queue + listener.queue = null + + for (var i = 0; i < queue.length; i++) { + queue[i](err, msg) + } + } + + listener.queue = [] + + return listener +} + +/** + * Patch ServerResponse.prototype.assignSocket for node.js 0.8. + * + * @param {ServerResponse} res + * @param {function} callback + * @private + */ + +function patchAssignSocket(res, callback) { + var assignSocket = res.assignSocket + + if (typeof assignSocket !== 'function') return + + // res.on('socket', callback) is broken in 0.8 + res.assignSocket = function _assignSocket(socket) { + assignSocket.call(this, socket) + callback(socket) + } +} diff --git a/node_modules/on-finished/package.json b/node_modules/on-finished/package.json new file mode 100644 index 0000000..b9df1bd --- /dev/null +++ b/node_modules/on-finished/package.json @@ -0,0 +1,31 @@ +{ + "name": "on-finished", + "description": "Execute a callback when a request closes, finishes, or errors", + "version": "2.3.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/on-finished", + "dependencies": { + "ee-first": "1.1.1" + }, + "devDependencies": { + "istanbul": "0.3.9", + "mocha": "2.2.5" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 0000000..1f1ffca --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 0000000..2354067 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 0000000..16815b2 --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/optimist/.travis.yml b/node_modules/optimist/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/optimist/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/optimist/LICENSE b/node_modules/optimist/LICENSE new file mode 100644 index 0000000..432d1ae --- /dev/null +++ b/node_modules/optimist/LICENSE @@ -0,0 +1,21 @@ +Copyright 2010 James Halliday (mail@substack.net) + +This project is free software released under the MIT/X11 license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/optimist/example/bool.js b/node_modules/optimist/example/bool.js new file mode 100644 index 0000000..a998fb7 --- /dev/null +++ b/node_modules/optimist/example/bool.js @@ -0,0 +1,10 @@ +#!/usr/bin/env node +var util = require('util'); +var argv = require('optimist').argv; + +if (argv.s) { + util.print(argv.fr ? 'Le chat dit: ' : 'The cat says: '); +} +console.log( + (argv.fr ? 'miaou' : 'meow') + (argv.p ? '.' : '') +); diff --git a/node_modules/optimist/example/boolean_double.js b/node_modules/optimist/example/boolean_double.js new file mode 100644 index 0000000..a35a7e6 --- /dev/null +++ b/node_modules/optimist/example/boolean_double.js @@ -0,0 +1,7 @@ +#!/usr/bin/env node +var argv = require('optimist') + .boolean(['x','y','z']) + .argv +; +console.dir([ argv.x, argv.y, argv.z ]); +console.dir(argv._); diff --git a/node_modules/optimist/example/boolean_single.js b/node_modules/optimist/example/boolean_single.js new file mode 100644 index 0000000..017bb68 --- /dev/null +++ b/node_modules/optimist/example/boolean_single.js @@ -0,0 +1,7 @@ +#!/usr/bin/env node +var argv = require('optimist') + .boolean('v') + .argv +; +console.dir(argv.v); +console.dir(argv._); diff --git a/node_modules/optimist/example/default_hash.js b/node_modules/optimist/example/default_hash.js new file mode 100644 index 0000000..ade7768 --- /dev/null +++ b/node_modules/optimist/example/default_hash.js @@ -0,0 +1,8 @@ +#!/usr/bin/env node + +var argv = require('optimist') + .default({ x : 10, y : 10 }) + .argv +; + +console.log(argv.x + argv.y); diff --git a/node_modules/optimist/example/default_singles.js b/node_modules/optimist/example/default_singles.js new file mode 100644 index 0000000..d9b1ff4 --- /dev/null +++ b/node_modules/optimist/example/default_singles.js @@ -0,0 +1,7 @@ +#!/usr/bin/env node +var argv = require('optimist') + .default('x', 10) + .default('y', 10) + .argv +; +console.log(argv.x + argv.y); diff --git a/node_modules/optimist/example/divide.js b/node_modules/optimist/example/divide.js new file mode 100644 index 0000000..5e2ee82 --- /dev/null +++ b/node_modules/optimist/example/divide.js @@ -0,0 +1,8 @@ +#!/usr/bin/env node + +var argv = require('optimist') + .usage('Usage: $0 -x [num] -y [num]') + .demand(['x','y']) + .argv; + +console.log(argv.x / argv.y); diff --git a/node_modules/optimist/example/line_count.js b/node_modules/optimist/example/line_count.js new file mode 100644 index 0000000..b5f95bf --- /dev/null +++ b/node_modules/optimist/example/line_count.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node +var argv = require('optimist') + .usage('Count the lines in a file.\nUsage: $0') + .demand('f') + .alias('f', 'file') + .describe('f', 'Load a file') + .argv +; + +var fs = require('fs'); +var s = fs.createReadStream(argv.file); + +var lines = 0; +s.on('data', function (buf) { + lines += buf.toString().match(/\n/g).length; +}); + +s.on('end', function () { + console.log(lines); +}); diff --git a/node_modules/optimist/example/line_count_options.js b/node_modules/optimist/example/line_count_options.js new file mode 100644 index 0000000..d9ac709 --- /dev/null +++ b/node_modules/optimist/example/line_count_options.js @@ -0,0 +1,29 @@ +#!/usr/bin/env node +var argv = require('optimist') + .usage('Count the lines in a file.\nUsage: $0') + .options({ + file : { + demand : true, + alias : 'f', + description : 'Load a file' + }, + base : { + alias : 'b', + description : 'Numeric base to use for output', + default : 10, + }, + }) + .argv +; + +var fs = require('fs'); +var s = fs.createReadStream(argv.file); + +var lines = 0; +s.on('data', function (buf) { + lines += buf.toString().match(/\n/g).length; +}); + +s.on('end', function () { + console.log(lines.toString(argv.base)); +}); diff --git a/node_modules/optimist/example/line_count_wrap.js b/node_modules/optimist/example/line_count_wrap.js new file mode 100644 index 0000000..4267511 --- /dev/null +++ b/node_modules/optimist/example/line_count_wrap.js @@ -0,0 +1,29 @@ +#!/usr/bin/env node +var argv = require('optimist') + .usage('Count the lines in a file.\nUsage: $0') + .wrap(80) + .demand('f') + .alias('f', [ 'file', 'filename' ]) + .describe('f', + "Load a file. It's pretty important." + + " Required even. So you'd better specify it." + ) + .alias('b', 'base') + .describe('b', 'Numeric base to display the number of lines in') + .default('b', 10) + .describe('x', 'Super-secret optional parameter which is secret') + .default('x', '') + .argv +; + +var fs = require('fs'); +var s = fs.createReadStream(argv.file); + +var lines = 0; +s.on('data', function (buf) { + lines += buf.toString().match(/\n/g).length; +}); + +s.on('end', function () { + console.log(lines.toString(argv.base)); +}); diff --git a/node_modules/optimist/example/nonopt.js b/node_modules/optimist/example/nonopt.js new file mode 100644 index 0000000..ee633ee --- /dev/null +++ b/node_modules/optimist/example/nonopt.js @@ -0,0 +1,4 @@ +#!/usr/bin/env node +var argv = require('optimist').argv; +console.log('(%d,%d)', argv.x, argv.y); +console.log(argv._); diff --git a/node_modules/optimist/example/reflect.js b/node_modules/optimist/example/reflect.js new file mode 100644 index 0000000..816b3e1 --- /dev/null +++ b/node_modules/optimist/example/reflect.js @@ -0,0 +1,2 @@ +#!/usr/bin/env node +console.dir(require('optimist').argv); diff --git a/node_modules/optimist/example/short.js b/node_modules/optimist/example/short.js new file mode 100644 index 0000000..1db0ad0 --- /dev/null +++ b/node_modules/optimist/example/short.js @@ -0,0 +1,3 @@ +#!/usr/bin/env node +var argv = require('optimist').argv; +console.log('(%d,%d)', argv.x, argv.y); diff --git a/node_modules/optimist/example/string.js b/node_modules/optimist/example/string.js new file mode 100644 index 0000000..a8e5aeb --- /dev/null +++ b/node_modules/optimist/example/string.js @@ -0,0 +1,11 @@ +#!/usr/bin/env node +var argv = require('optimist') + .string('x', 'y') + .argv +; +console.dir([ argv.x, argv.y ]); + +/* Turns off numeric coercion: + ./node string.js -x 000123 -y 9876 + [ '000123', '9876' ] +*/ diff --git a/node_modules/optimist/example/usage-options.js b/node_modules/optimist/example/usage-options.js new file mode 100644 index 0000000..b999977 --- /dev/null +++ b/node_modules/optimist/example/usage-options.js @@ -0,0 +1,19 @@ +var optimist = require('./../index'); + +var argv = optimist.usage('This is my awesome program', { + 'about': { + description: 'Provide some details about the author of this program', + required: true, + short: 'a', + }, + 'info': { + description: 'Provide some information about the node.js agains!!!!!!', + boolean: true, + short: 'i' + } +}).argv; + +optimist.showHelp(); + +console.log('\n\nInspecting options'); +console.dir(argv); \ No newline at end of file diff --git a/node_modules/optimist/example/xup.js b/node_modules/optimist/example/xup.js new file mode 100644 index 0000000..8f6ecd2 --- /dev/null +++ b/node_modules/optimist/example/xup.js @@ -0,0 +1,10 @@ +#!/usr/bin/env node +var argv = require('optimist').argv; + +if (argv.rif - 5 * argv.xup > 7.138) { + console.log('Buy more riffiwobbles'); +} +else { + console.log('Sell the xupptumblers'); +} + diff --git a/node_modules/optimist/index.js b/node_modules/optimist/index.js new file mode 100644 index 0000000..4da5a6d --- /dev/null +++ b/node_modules/optimist/index.js @@ -0,0 +1,343 @@ +var path = require('path'); +var minimist = require('minimist'); +var wordwrap = require('wordwrap'); + +/* Hack an instance of Argv with process.argv into Argv + so people can do + require('optimist')(['--beeble=1','-z','zizzle']).argv + to parse a list of args and + require('optimist').argv + to get a parsed version of process.argv. +*/ + +var inst = Argv(process.argv.slice(2)); +Object.keys(inst).forEach(function (key) { + Argv[key] = typeof inst[key] == 'function' + ? inst[key].bind(inst) + : inst[key]; +}); + +var exports = module.exports = Argv; +function Argv (processArgs, cwd) { + var self = {}; + if (!cwd) cwd = process.cwd(); + + self.$0 = process.argv + .slice(0,2) + .map(function (x) { + var b = rebase(cwd, x); + return x.match(/^\//) && b.length < x.length + ? b : x + }) + .join(' ') + ; + + if (process.env._ != undefined && process.argv[1] == process.env._) { + self.$0 = process.env._.replace( + path.dirname(process.execPath) + '/', '' + ); + } + + var options = { + boolean: [], + string: [], + alias: {}, + default: [] + }; + + self.boolean = function (bools) { + options.boolean.push.apply(options.boolean, [].concat(bools)); + return self; + }; + + self.string = function (strings) { + options.string.push.apply(options.string, [].concat(strings)); + return self; + }; + + self.default = function (key, value) { + if (typeof key === 'object') { + Object.keys(key).forEach(function (k) { + self.default(k, key[k]); + }); + } + else { + options.default[key] = value; + } + return self; + }; + + self.alias = function (x, y) { + if (typeof x === 'object') { + Object.keys(x).forEach(function (key) { + self.alias(key, x[key]); + }); + } + else { + options.alias[x] = (options.alias[x] || []).concat(y); + } + return self; + }; + + var demanded = {}; + self.demand = function (keys) { + if (typeof keys == 'number') { + if (!demanded._) demanded._ = 0; + demanded._ += keys; + } + else if (Array.isArray(keys)) { + keys.forEach(function (key) { + self.demand(key); + }); + } + else { + demanded[keys] = true; + } + + return self; + }; + + var usage; + self.usage = function (msg, opts) { + if (!opts && typeof msg === 'object') { + opts = msg; + msg = null; + } + + usage = msg; + + if (opts) self.options(opts); + + return self; + }; + + function fail (msg) { + self.showHelp(); + if (msg) console.error(msg); + process.exit(1); + } + + var checks = []; + self.check = function (f) { + checks.push(f); + return self; + }; + + var descriptions = {}; + self.describe = function (key, desc) { + if (typeof key === 'object') { + Object.keys(key).forEach(function (k) { + self.describe(k, key[k]); + }); + } + else { + descriptions[key] = desc; + } + return self; + }; + + self.parse = function (args) { + return parseArgs(args); + }; + + self.option = self.options = function (key, opt) { + if (typeof key === 'object') { + Object.keys(key).forEach(function (k) { + self.options(k, key[k]); + }); + } + else { + if (opt.alias) self.alias(key, opt.alias); + if (opt.demand) self.demand(key); + if (typeof opt.default !== 'undefined') { + self.default(key, opt.default); + } + + if (opt.boolean || opt.type === 'boolean') { + self.boolean(key); + } + if (opt.string || opt.type === 'string') { + self.string(key); + } + + var desc = opt.describe || opt.description || opt.desc; + if (desc) { + self.describe(key, desc); + } + } + + return self; + }; + + var wrap = null; + self.wrap = function (cols) { + wrap = cols; + return self; + }; + + self.showHelp = function (fn) { + if (!fn) fn = console.error; + fn(self.help()); + }; + + self.help = function () { + var keys = Object.keys( + Object.keys(descriptions) + .concat(Object.keys(demanded)) + .concat(Object.keys(options.default)) + .reduce(function (acc, key) { + if (key !== '_') acc[key] = true; + return acc; + }, {}) + ); + + var help = keys.length ? [ 'Options:' ] : []; + + if (usage) { + help.unshift(usage.replace(/\$0/g, self.$0), ''); + } + + var switches = keys.reduce(function (acc, key) { + acc[key] = [ key ].concat(options.alias[key] || []) + .map(function (sw) { + return (sw.length > 1 ? '--' : '-') + sw + }) + .join(', ') + ; + return acc; + }, {}); + + var switchlen = longest(Object.keys(switches).map(function (s) { + return switches[s] || ''; + })); + + var desclen = longest(Object.keys(descriptions).map(function (d) { + return descriptions[d] || ''; + })); + + keys.forEach(function (key) { + var kswitch = switches[key]; + var desc = descriptions[key] || ''; + + if (wrap) { + desc = wordwrap(switchlen + 4, wrap)(desc) + .slice(switchlen + 4) + ; + } + + var spadding = new Array( + Math.max(switchlen - kswitch.length + 3, 0) + ).join(' '); + + var dpadding = new Array( + Math.max(desclen - desc.length + 1, 0) + ).join(' '); + + var type = null; + + if (options.boolean[key]) type = '[boolean]'; + if (options.string[key]) type = '[string]'; + + if (!wrap && dpadding.length > 0) { + desc += dpadding; + } + + var prelude = ' ' + kswitch + spadding; + var extra = [ + type, + demanded[key] + ? '[required]' + : null + , + options.default[key] !== undefined + ? '[default: ' + JSON.stringify(options.default[key]) + ']' + : null + , + ].filter(Boolean).join(' '); + + var body = [ desc, extra ].filter(Boolean).join(' '); + + if (wrap) { + var dlines = desc.split('\n'); + var dlen = dlines.slice(-1)[0].length + + (dlines.length === 1 ? prelude.length : 0) + + body = desc + (dlen + extra.length > wrap - 2 + ? '\n' + + new Array(wrap - extra.length + 1).join(' ') + + extra + : new Array(wrap - extra.length - dlen + 1).join(' ') + + extra + ); + } + + help.push(prelude + body); + }); + + help.push(''); + return help.join('\n'); + }; + + Object.defineProperty(self, 'argv', { + get : function () { return parseArgs(processArgs) }, + enumerable : true, + }); + + function parseArgs (args) { + var argv = minimist(args, options); + argv.$0 = self.$0; + + if (demanded._ && argv._.length < demanded._) { + fail('Not enough non-option arguments: got ' + + argv._.length + ', need at least ' + demanded._ + ); + } + + var missing = []; + Object.keys(demanded).forEach(function (key) { + if (!argv[key]) missing.push(key); + }); + + if (missing.length) { + fail('Missing required arguments: ' + missing.join(', ')); + } + + checks.forEach(function (f) { + try { + if (f(argv) === false) { + fail('Argument check failed: ' + f.toString()); + } + } + catch (err) { + fail(err) + } + }); + + return argv; + } + + function longest (xs) { + return Math.max.apply( + null, + xs.map(function (x) { return x.length }) + ); + } + + return self; +}; + +// rebase an absolute path to a relative one with respect to a base directory +// exported for tests +exports.rebase = rebase; +function rebase (base, dir) { + var ds = path.normalize(dir).split('/').slice(1); + var bs = path.normalize(base).split('/').slice(1); + + for (var i = 0; ds[i] && ds[i] == bs[i]; i++); + ds.splice(0, i); bs.splice(0, i); + + var p = path.normalize( + bs.map(function () { return '..' }).concat(ds).join('/') + ).replace(/\/$/,'').replace(/^$/, '.'); + return p.match(/^[.\/]/) ? p : './' + p; +}; diff --git a/node_modules/optimist/package.json b/node_modules/optimist/package.json new file mode 100644 index 0000000..edd7517 --- /dev/null +++ b/node_modules/optimist/package.json @@ -0,0 +1,39 @@ +{ + "name" : "optimist", + "version" : "0.6.1", + "description" : "Light-weight option parsing with an argv hash. No optstrings attached.", + "main" : "./index.js", + "dependencies" : { + "wordwrap" : "~0.0.2", + "minimist" : "~0.0.1" + }, + "devDependencies" : { + "hashish": "~0.0.4", + "tap" : "~0.4.0" + }, + "scripts" : { + "test" : "tap ./test/*.js" + }, + "repository" : { + "type" : "git", + "url" : "http://github.com/substack/node-optimist.git" + }, + "keywords" : [ + "argument", + "args", + "option", + "parser", + "parsing", + "cli", + "command" + ], + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "license" : "MIT/X11", + "engine" : { + "node" : ">=0.4" + } +} diff --git a/node_modules/optimist/readme.markdown b/node_modules/optimist/readme.markdown new file mode 100644 index 0000000..b74b437 --- /dev/null +++ b/node_modules/optimist/readme.markdown @@ -0,0 +1,513 @@ +# DEPRECATION NOTICE + +I don't want to maintain this module anymore since I just use +[minimist](https://npmjs.org/package/minimist), the argument parsing engine, +directly instead nowadays. + +See [yargs](https://github.com/chevex/yargs) for the modern, pirate-themed +successor to optimist. + +[![yarrrrrrrgs!](http://i.imgur.com/4WFGVJ9.png)](https://github.com/chevex/yargs) + +You should also consider [nomnom](https://github.com/harthur/nomnom). + +optimist +======== + +Optimist is a node.js library for option parsing for people who hate option +parsing. More specifically, this module is for people who like all the --bells +and -whistlz of program usage but think optstrings are a waste of time. + +With optimist, option parsing doesn't have to suck (as much). + +[![build status](https://secure.travis-ci.org/substack/node-optimist.png)](http://travis-ci.org/substack/node-optimist) + +examples +======== + +With Optimist, the options are just a hash! No optstrings attached. +------------------------------------------------------------------- + +xup.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist').argv; + +if (argv.rif - 5 * argv.xup > 7.138) { + console.log('Buy more riffiwobbles'); +} +else { + console.log('Sell the xupptumblers'); +} +```` + +*** + + $ ./xup.js --rif=55 --xup=9.52 + Buy more riffiwobbles + + $ ./xup.js --rif 12 --xup 8.1 + Sell the xupptumblers + +![This one's optimistic.](http://substack.net/images/optimistic.png) + +But wait! There's more! You can do short options: +------------------------------------------------- + +short.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist').argv; +console.log('(%d,%d)', argv.x, argv.y); +```` + +*** + + $ ./short.js -x 10 -y 21 + (10,21) + +And booleans, both long and short (and grouped): +---------------------------------- + +bool.js: + +````javascript +#!/usr/bin/env node +var util = require('util'); +var argv = require('optimist').argv; + +if (argv.s) { + util.print(argv.fr ? 'Le chat dit: ' : 'The cat says: '); +} +console.log( + (argv.fr ? 'miaou' : 'meow') + (argv.p ? '.' : '') +); +```` + +*** + + $ ./bool.js -s + The cat says: meow + + $ ./bool.js -sp + The cat says: meow. + + $ ./bool.js -sp --fr + Le chat dit: miaou. + +And non-hypenated options too! Just use `argv._`! +------------------------------------------------- + +nonopt.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist').argv; +console.log('(%d,%d)', argv.x, argv.y); +console.log(argv._); +```` + +*** + + $ ./nonopt.js -x 6.82 -y 3.35 moo + (6.82,3.35) + [ 'moo' ] + + $ ./nonopt.js foo -x 0.54 bar -y 1.12 baz + (0.54,1.12) + [ 'foo', 'bar', 'baz' ] + +Plus, Optimist comes with .usage() and .demand()! +------------------------------------------------- + +divide.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .usage('Usage: $0 -x [num] -y [num]') + .demand(['x','y']) + .argv; + +console.log(argv.x / argv.y); +```` + +*** + + $ ./divide.js -x 55 -y 11 + 5 + + $ node ./divide.js -x 4.91 -z 2.51 + Usage: node ./divide.js -x [num] -y [num] + + Options: + -x [required] + -y [required] + + Missing required arguments: y + +EVEN MORE HOLY COW +------------------ + +default_singles.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .default('x', 10) + .default('y', 10) + .argv +; +console.log(argv.x + argv.y); +```` + +*** + + $ ./default_singles.js -x 5 + 15 + +default_hash.js: + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .default({ x : 10, y : 10 }) + .argv +; +console.log(argv.x + argv.y); +```` + +*** + + $ ./default_hash.js -y 7 + 17 + +And if you really want to get all descriptive about it... +--------------------------------------------------------- + +boolean_single.js + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .boolean('v') + .argv +; +console.dir(argv); +```` + +*** + + $ ./boolean_single.js -v foo bar baz + true + [ 'bar', 'baz', 'foo' ] + +boolean_double.js + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .boolean(['x','y','z']) + .argv +; +console.dir([ argv.x, argv.y, argv.z ]); +console.dir(argv._); +```` + +*** + + $ ./boolean_double.js -x -z one two three + [ true, false, true ] + [ 'one', 'two', 'three' ] + +Optimist is here to help... +--------------------------- + +You can describe parameters for help messages and set aliases. Optimist figures +out how to format a handy help string automatically. + +line_count.js + +````javascript +#!/usr/bin/env node +var argv = require('optimist') + .usage('Count the lines in a file.\nUsage: $0') + .demand('f') + .alias('f', 'file') + .describe('f', 'Load a file') + .argv +; + +var fs = require('fs'); +var s = fs.createReadStream(argv.file); + +var lines = 0; +s.on('data', function (buf) { + lines += buf.toString().match(/\n/g).length; +}); + +s.on('end', function () { + console.log(lines); +}); +```` + +*** + + $ node line_count.js + Count the lines in a file. + Usage: node ./line_count.js + + Options: + -f, --file Load a file [required] + + Missing required arguments: f + + $ node line_count.js --file line_count.js + 20 + + $ node line_count.js -f line_count.js + 20 + +methods +======= + +By itself, + +````javascript +require('optimist').argv +````` + +will use `process.argv` array to construct the `argv` object. + +You can pass in the `process.argv` yourself: + +````javascript +require('optimist')([ '-x', '1', '-y', '2' ]).argv +```` + +or use .parse() to do the same thing: + +````javascript +require('optimist').parse([ '-x', '1', '-y', '2' ]) +```` + +The rest of these methods below come in just before the terminating `.argv`. + +.alias(key, alias) +------------------ + +Set key names as equivalent such that updates to a key will propagate to aliases +and vice-versa. + +Optionally `.alias()` can take an object that maps keys to aliases. + +.default(key, value) +-------------------- + +Set `argv[key]` to `value` if no option was specified on `process.argv`. + +Optionally `.default()` can take an object that maps keys to default values. + +.demand(key) +------------ + +If `key` is a string, show the usage information and exit if `key` wasn't +specified in `process.argv`. + +If `key` is a number, demand at least as many non-option arguments, which show +up in `argv._`. + +If `key` is an Array, demand each element. + +.describe(key, desc) +-------------------- + +Describe a `key` for the generated usage information. + +Optionally `.describe()` can take an object that maps keys to descriptions. + +.options(key, opt) +------------------ + +Instead of chaining together `.alias().demand().default()`, you can specify +keys in `opt` for each of the chainable methods. + +For example: + +````javascript +var argv = require('optimist') + .options('f', { + alias : 'file', + default : '/etc/passwd', + }) + .argv +; +```` + +is the same as + +````javascript +var argv = require('optimist') + .alias('f', 'file') + .default('f', '/etc/passwd') + .argv +; +```` + +Optionally `.options()` can take an object that maps keys to `opt` parameters. + +.usage(message) +--------------- + +Set a usage message to show which commands to use. Inside `message`, the string +`$0` will get interpolated to the current script name or node command for the +present script similar to how `$0` works in bash or perl. + +.check(fn) +---------- + +Check that certain conditions are met in the provided arguments. + +If `fn` throws or returns `false`, show the thrown error, usage information, and +exit. + +.boolean(key) +------------- + +Interpret `key` as a boolean. If a non-flag option follows `key` in +`process.argv`, that string won't get set as the value of `key`. + +If `key` never shows up as a flag in `process.arguments`, `argv[key]` will be +`false`. + +If `key` is an Array, interpret all the elements as booleans. + +.string(key) +------------ + +Tell the parser logic not to interpret `key` as a number or boolean. +This can be useful if you need to preserve leading zeros in an input. + +If `key` is an Array, interpret all the elements as strings. + +.wrap(columns) +-------------- + +Format usage output to wrap at `columns` many columns. + +.help() +------- + +Return the generated usage string. + +.showHelp(fn=console.error) +--------------------------- + +Print the usage data using `fn` for printing. + +.parse(args) +------------ + +Parse `args` instead of `process.argv`. Returns the `argv` object. + +.argv +----- + +Get the arguments as a plain old object. + +Arguments without a corresponding flag show up in the `argv._` array. + +The script name or node command is available at `argv.$0` similarly to how `$0` +works in bash or perl. + +parsing tricks +============== + +stop parsing +------------ + +Use `--` to stop parsing flags and stuff the remainder into `argv._`. + + $ node examples/reflect.js -a 1 -b 2 -- -c 3 -d 4 + { _: [ '-c', '3', '-d', '4' ], + '$0': 'node ./examples/reflect.js', + a: 1, + b: 2 } + +negate fields +------------- + +If you want to explicity set a field to false instead of just leaving it +undefined or to override a default you can do `--no-key`. + + $ node examples/reflect.js -a --no-b + { _: [], + '$0': 'node ./examples/reflect.js', + a: true, + b: false } + +numbers +------- + +Every argument that looks like a number (`!isNaN(Number(arg))`) is converted to +one. This way you can just `net.createConnection(argv.port)` and you can add +numbers out of `argv` with `+` without having that mean concatenation, +which is super frustrating. + +duplicates +---------- + +If you specify a flag multiple times it will get turned into an array containing +all the values in order. + + $ node examples/reflect.js -x 5 -x 8 -x 0 + { _: [], + '$0': 'node ./examples/reflect.js', + x: [ 5, 8, 0 ] } + +dot notation +------------ + +When you use dots (`.`s) in argument names, an implicit object path is assumed. +This lets you organize arguments into nested objects. + + $ node examples/reflect.js --foo.bar.baz=33 --foo.quux=5 + { _: [], + '$0': 'node ./examples/reflect.js', + foo: { bar: { baz: 33 }, quux: 5 } } + +short numbers +------------- + +Short numeric `head -n5` style argument work too: + + $ node reflect.js -n123 -m456 + { '3': true, + '6': true, + _: [], + '$0': 'node ./reflect.js', + n: 123, + m: 456 } + +installation +============ + +With [npm](http://github.com/isaacs/npm), just do: + npm install optimist + +or clone this project on github: + + git clone http://github.com/substack/node-optimist.git + +To run the tests with [expresso](http://github.com/visionmedia/expresso), +just do: + + expresso + +inspired By +=========== + +This module is loosely inspired by Perl's +[Getopt::Casual](http://search.cpan.org/~photo/Getopt-Casual-0.13.1/Casual.pm). diff --git a/node_modules/optimist/test/_.js b/node_modules/optimist/test/_.js new file mode 100644 index 0000000..d9c58b3 --- /dev/null +++ b/node_modules/optimist/test/_.js @@ -0,0 +1,71 @@ +var spawn = require('child_process').spawn; +var test = require('tap').test; + +test('dotSlashEmpty', testCmd('./bin.js', [])); + +test('dotSlashArgs', testCmd('./bin.js', [ 'a', 'b', 'c' ])); + +test('nodeEmpty', testCmd('node bin.js', [])); + +test('nodeArgs', testCmd('node bin.js', [ 'x', 'y', 'z' ])); + +test('whichNodeEmpty', function (t) { + var which = spawn('which', ['node']); + + which.stdout.on('data', function (buf) { + t.test( + testCmd(buf.toString().trim() + ' bin.js', []) + ); + t.end(); + }); + + which.stderr.on('data', function (err) { + assert.error(err); + t.end(); + }); +}); + +test('whichNodeArgs', function (t) { + var which = spawn('which', ['node']); + + which.stdout.on('data', function (buf) { + t.test( + testCmd(buf.toString().trim() + ' bin.js', [ 'q', 'r' ]) + ); + t.end(); + }); + + which.stderr.on('data', function (err) { + t.error(err); + t.end(); + }); +}); + +function testCmd (cmd, args) { + + return function (t) { + var to = setTimeout(function () { + assert.fail('Never got stdout data.') + }, 5000); + + var oldDir = process.cwd(); + process.chdir(__dirname + '/_'); + + var cmds = cmd.split(' '); + + var bin = spawn(cmds[0], cmds.slice(1).concat(args.map(String))); + process.chdir(oldDir); + + bin.stderr.on('data', function (err) { + t.error(err); + t.end(); + }); + + bin.stdout.on('data', function (buf) { + clearTimeout(to); + var _ = JSON.parse(buf.toString()); + t.same(_.map(String), args.map(String)); + t.end(); + }); + }; +} diff --git a/node_modules/optimist/test/_/argv.js b/node_modules/optimist/test/_/argv.js new file mode 100644 index 0000000..3d09606 --- /dev/null +++ b/node_modules/optimist/test/_/argv.js @@ -0,0 +1,2 @@ +#!/usr/bin/env node +console.log(JSON.stringify(process.argv)); diff --git a/node_modules/optimist/test/_/bin.js b/node_modules/optimist/test/_/bin.js new file mode 100755 index 0000000..4a18d85 --- /dev/null +++ b/node_modules/optimist/test/_/bin.js @@ -0,0 +1,3 @@ +#!/usr/bin/env node +var argv = require('../../index').argv +console.log(JSON.stringify(argv._)); diff --git a/node_modules/optimist/test/dash.js b/node_modules/optimist/test/dash.js new file mode 100644 index 0000000..af8ed6f --- /dev/null +++ b/node_modules/optimist/test/dash.js @@ -0,0 +1,31 @@ +var optimist = require('../index'); +var test = require('tap').test; + +test('-', function (t) { + t.plan(5); + t.deepEqual( + fix(optimist.parse([ '-n', '-' ])), + { n: '-', _: [] } + ); + t.deepEqual( + fix(optimist.parse([ '-' ])), + { _: [ '-' ] } + ); + t.deepEqual( + fix(optimist.parse([ '-f-' ])), + { f: '-', _: [] } + ); + t.deepEqual( + fix(optimist([ '-b', '-' ]).boolean('b').argv), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + fix(optimist([ '-s', '-' ]).string('s').argv), + { s: '-', _: [] } + ); +}); + +function fix (obj) { + delete obj.$0; + return obj; +} diff --git a/node_modules/optimist/test/parse.js b/node_modules/optimist/test/parse.js new file mode 100644 index 0000000..d320f43 --- /dev/null +++ b/node_modules/optimist/test/parse.js @@ -0,0 +1,446 @@ +var optimist = require('../index'); +var path = require('path'); +var test = require('tap').test; + +var $0 = 'node ./' + path.relative(process.cwd(), __filename); + +test('short boolean', function (t) { + var parse = optimist.parse([ '-b' ]); + t.same(parse, { b : true, _ : [], $0 : $0 }); + t.same(typeof parse.b, 'boolean'); + t.end(); +}); + +test('long boolean', function (t) { + t.same( + optimist.parse([ '--bool' ]), + { bool : true, _ : [], $0 : $0 } + ); + t.end(); +}); + +test('bare', function (t) { + t.same( + optimist.parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ], $0 : $0 } + ); + t.end(); +}); + +test('short group', function (t) { + t.same( + optimist.parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [], $0 : $0 } + ); + t.end(); +}); + +test('short group next', function (t) { + t.same( + optimist.parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [], $0 : $0 } + ); + t.end(); +}); + +test('short capture', function (t) { + t.same( + optimist.parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [], $0 : $0 } + ); + t.end(); +}); + +test('short captures', function (t) { + t.same( + optimist.parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [], $0 : $0 } + ); + t.end(); +}); + +test('long capture sp', function (t) { + t.same( + optimist.parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [], $0 : $0 } + ); + t.end(); +}); + +test('long capture eq', function (t) { + t.same( + optimist.parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [], $0 : $0 } + ); + t.end() +}); + +test('long captures sp', function (t) { + t.same( + optimist.parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [], $0 : $0 } + ); + t.end(); +}); + +test('long captures eq', function (t) { + t.same( + optimist.parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [], $0 : $0 } + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + optimist.parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ], $0 : $0, + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.same( + optimist.parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ], $0 : $0, + } + ); + t.end(); +}); + +test('no', function (t) { + t.same( + optimist.parse([ '--no-moo' ]), + { moo : false, _ : [], $0 : $0 } + ); + t.end(); +}); + +test('multi', function (t) { + t.same( + optimist.parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [], $0 : $0 } + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.same( + optimist.parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ], + $0 : $0 + } + ); + t.end(); +}); + +test('nums', function (t) { + var argv = optimist.parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789', + ]); + t.same(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ], + $0 : $0 + }); + t.same(typeof argv.x, 'number'); + t.same(typeof argv.y, 'number'); + t.same(typeof argv.z, 'number'); + t.same(typeof argv.w, 'string'); + t.same(typeof argv.hex, 'number'); + t.same(typeof argv._[0], 'number'); + t.end(); +}); + +test('flag boolean', function (t) { + var parse = optimist([ '-t', 'moo' ]).boolean(['t']).argv; + t.same(parse, { t : true, _ : [ 'moo' ], $0 : $0 }); + t.same(typeof parse.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var parse = optimist(['--verbose', 'false', 'moo', '-t', 'true']) + .boolean(['t', 'verbose']).default('verbose', true).argv; + + t.same(parse, { + verbose: false, + t: true, + _: ['moo'], + $0 : $0 + }); + + t.same(typeof parse.verbose, 'boolean'); + t.same(typeof parse.t, 'boolean'); + t.end(); +}); + +test('flag boolean default false', function (t) { + var parse = optimist(['moo']) + .boolean(['t', 'verbose']) + .default('verbose', false) + .default('t', false).argv; + + t.same(parse, { + verbose: false, + t: false, + _: ['moo'], + $0 : $0 + }); + + t.same(typeof parse.verbose, 'boolean'); + t.same(typeof parse.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var parse = optimist([ '-x', '-z', 'one', 'two', 'three' ]) + .boolean(['x','y','z']).argv; + + t.same(parse, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ], + $0 : $0 + }); + + t.same(typeof parse.x, 'boolean'); + t.same(typeof parse.y, 'boolean'); + t.same(typeof parse.z, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = optimist.parse([ '-s', "X\nX" ]) + t.same(args, { _ : [], s : "X\nX", $0 : $0 }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = optimist.parse([ "--s=X\nX" ]) + t.same(args, { _ : [], s : "X\nX", $0 : $0 }); + t.end(); +}); + +test('strings' , function (t) { + var s = optimist([ '-s', '0001234' ]).string('s').argv.s; + t.same(s, '0001234'); + t.same(typeof s, 'string'); + + var x = optimist([ '-x', '56' ]).string('x').argv.x; + t.same(x, '56'); + t.same(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = optimist([ ' ', ' ' ]).string('_').argv._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + optimist.parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [], $0 : $0 } + ); + t.same( + optimist.parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [], $0 : $0 } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = optimist([ '-f', '11', '--zoom', '55' ]) + .alias('z', 'zoom') + .argv + ; + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = optimist([ '-f', '11', '--zoom', '55' ]) + .alias('z', [ 'zm', 'zoom' ]) + .argv + ; + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('boolean default true', function (t) { + var argv = optimist.options({ + sometrue: { + boolean: true, + default: true + } + }).argv; + + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = optimist.options({ + somefalse: { + boolean: true, + default: false + } + }).argv; + + t.equal(argv.somefalse, false); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = optimist([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]).argv; + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + }, + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); + +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = optimist(aliased) + .boolean('herp') + .alias('h', 'herp') + .argv; + var propertyArgv = optimist(regular) + .boolean('herp') + .alias('h', 'herp') + .argv; + var expected = { + herp: true, + h: true, + '_': [ 'derp' ], + '$0': $0, + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = optimist(aliased) + .options(opts) + .argv; + var propertyArgv = optimist(regular).options(opts).argv; + var expected = { + herp: true, + h: true, + '_': [ 'derp' ], + '$0': $0, + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = optimist(aliased) + .boolean('h') + .alias('h', 'herp') + .argv; + var propertyArgv = optimist(regular) + .boolean('h') + .alias('h', 'herp') + .argv; + var expected = { + herp: true, + h: true, + '_': [ ], + '$0': $0, + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = optimist(['--boool', '--other=true']).boolean('boool').argv; + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = optimist(['--boool', '--other=false']).boolean('boool').argv; + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); diff --git a/node_modules/optimist/test/parse_modified.js b/node_modules/optimist/test/parse_modified.js new file mode 100644 index 0000000..a57dc84 --- /dev/null +++ b/node_modules/optimist/test/parse_modified.js @@ -0,0 +1,14 @@ +var optimist = require('../'); +var test = require('tap').test; + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = optimist().boolean('b').parse([ '-b', '123' ]); + t.deepEqual(fix(argv), { b: true, _: ['123'] }); +}); + +function fix (obj) { + delete obj.$0; + return obj; +} diff --git a/node_modules/optimist/test/short.js b/node_modules/optimist/test/short.js new file mode 100644 index 0000000..b2c38ad --- /dev/null +++ b/node_modules/optimist/test/short.js @@ -0,0 +1,16 @@ +var optimist = require('../index'); +var test = require('tap').test; + +test('-n123', function (t) { + t.plan(1); + var parse = optimist.parse([ '-n123' ]); + t.equal(parse.n, 123); +}); + +test('-123', function (t) { + t.plan(3); + var parse = optimist.parse([ '-123', '456' ]); + t.equal(parse['1'], true); + t.equal(parse['2'], true); + t.equal(parse['3'], 456); +}); diff --git a/node_modules/optimist/test/usage.js b/node_modules/optimist/test/usage.js new file mode 100644 index 0000000..300454c --- /dev/null +++ b/node_modules/optimist/test/usage.js @@ -0,0 +1,292 @@ +var Hash = require('hashish'); +var optimist = require('../index'); +var test = require('tap').test; + +test('usageFail', function (t) { + var r = checkUsage(function () { + return optimist('-x 10 -z 20'.split(' ')) + .usage('Usage: $0 -x NUM -y NUM') + .demand(['x','y']) + .argv; + }); + t.same( + r.result, + { x : 10, z : 20, _ : [], $0 : './usage' } + ); + + t.same( + r.errors.join('\n').split(/\n+/), + [ + 'Usage: ./usage -x NUM -y NUM', + 'Options:', + ' -x [required]', + ' -y [required]', + 'Missing required arguments: y', + ] + ); + t.same(r.logs, []); + t.ok(r.exit); + t.end(); +}); + + +test('usagePass', function (t) { + var r = checkUsage(function () { + return optimist('-x 10 -y 20'.split(' ')) + .usage('Usage: $0 -x NUM -y NUM') + .demand(['x','y']) + .argv; + }); + t.same(r, { + result : { x : 10, y : 20, _ : [], $0 : './usage' }, + errors : [], + logs : [], + exit : false, + }); + t.end(); +}); + +test('checkPass', function (t) { + var r = checkUsage(function () { + return optimist('-x 10 -y 20'.split(' ')) + .usage('Usage: $0 -x NUM -y NUM') + .check(function (argv) { + if (!('x' in argv)) throw 'You forgot about -x'; + if (!('y' in argv)) throw 'You forgot about -y'; + }) + .argv; + }); + t.same(r, { + result : { x : 10, y : 20, _ : [], $0 : './usage' }, + errors : [], + logs : [], + exit : false, + }); + t.end(); +}); + +test('checkFail', function (t) { + var r = checkUsage(function () { + return optimist('-x 10 -z 20'.split(' ')) + .usage('Usage: $0 -x NUM -y NUM') + .check(function (argv) { + if (!('x' in argv)) throw 'You forgot about -x'; + if (!('y' in argv)) throw 'You forgot about -y'; + }) + .argv; + }); + + t.same( + r.result, + { x : 10, z : 20, _ : [], $0 : './usage' } + ); + + t.same( + r.errors.join('\n').split(/\n+/), + [ + 'Usage: ./usage -x NUM -y NUM', + 'You forgot about -y' + ] + ); + + t.same(r.logs, []); + t.ok(r.exit); + t.end(); +}); + +test('checkCondPass', function (t) { + function checker (argv) { + return 'x' in argv && 'y' in argv; + } + + var r = checkUsage(function () { + return optimist('-x 10 -y 20'.split(' ')) + .usage('Usage: $0 -x NUM -y NUM') + .check(checker) + .argv; + }); + t.same(r, { + result : { x : 10, y : 20, _ : [], $0 : './usage' }, + errors : [], + logs : [], + exit : false, + }); + t.end(); +}); + +test('checkCondFail', function (t) { + function checker (argv) { + return 'x' in argv && 'y' in argv; + } + + var r = checkUsage(function () { + return optimist('-x 10 -z 20'.split(' ')) + .usage('Usage: $0 -x NUM -y NUM') + .check(checker) + .argv; + }); + + t.same( + r.result, + { x : 10, z : 20, _ : [], $0 : './usage' } + ); + + t.same( + r.errors.join('\n').split(/\n+/).join('\n'), + 'Usage: ./usage -x NUM -y NUM\n' + + 'Argument check failed: ' + checker.toString() + ); + + t.same(r.logs, []); + t.ok(r.exit); + t.end(); +}); + +test('countPass', function (t) { + var r = checkUsage(function () { + return optimist('1 2 3 --moo'.split(' ')) + .usage('Usage: $0 [x] [y] [z] {OPTIONS}') + .demand(3) + .argv; + }); + t.same(r, { + result : { _ : [ '1', '2', '3' ], moo : true, $0 : './usage' }, + errors : [], + logs : [], + exit : false, + }); + t.end(); +}); + +test('countFail', function (t) { + var r = checkUsage(function () { + return optimist('1 2 --moo'.split(' ')) + .usage('Usage: $0 [x] [y] [z] {OPTIONS}') + .demand(3) + .argv; + }); + t.same( + r.result, + { _ : [ '1', '2' ], moo : true, $0 : './usage' } + ); + + t.same( + r.errors.join('\n').split(/\n+/), + [ + 'Usage: ./usage [x] [y] [z] {OPTIONS}', + 'Not enough non-option arguments: got 2, need at least 3', + ] + ); + + t.same(r.logs, []); + t.ok(r.exit); + t.end(); +}); + +test('defaultSingles', function (t) { + var r = checkUsage(function () { + return optimist('--foo 50 --baz 70 --powsy'.split(' ')) + .default('foo', 5) + .default('bar', 6) + .default('baz', 7) + .argv + ; + }); + t.same(r.result, { + foo : '50', + bar : 6, + baz : '70', + powsy : true, + _ : [], + $0 : './usage', + }); + t.end(); +}); + +test('defaultAliases', function (t) { + var r = checkUsage(function () { + return optimist('') + .alias('f', 'foo') + .default('f', 5) + .argv + ; + }); + t.same(r.result, { + f : '5', + foo : '5', + _ : [], + $0 : './usage', + }); + t.end(); +}); + +test('defaultHash', function (t) { + var r = checkUsage(function () { + return optimist('--foo 50 --baz 70'.split(' ')) + .default({ foo : 10, bar : 20, quux : 30 }) + .argv + ; + }); + t.same(r.result, { + _ : [], + $0 : './usage', + foo : 50, + baz : 70, + bar : 20, + quux : 30, + }); + t.end(); +}); + +test('rebase', function (t) { + t.equal( + optimist.rebase('/home/substack', '/home/substack/foo/bar/baz'), + './foo/bar/baz' + ); + t.equal( + optimist.rebase('/home/substack/foo/bar/baz', '/home/substack'), + '../../..' + ); + t.equal( + optimist.rebase('/home/substack/foo', '/home/substack/pow/zoom.txt'), + '../pow/zoom.txt' + ); + t.end(); +}); + +function checkUsage (f) { + + var exit = false; + + process._exit = process.exit; + process._env = process.env; + process._argv = process.argv; + + process.exit = function (t) { exit = true }; + process.env = Hash.merge(process.env, { _ : 'node' }); + process.argv = [ './usage' ]; + + var errors = []; + var logs = []; + + console._error = console.error; + console.error = function (msg) { errors.push(msg) }; + console._log = console.log; + console.log = function (msg) { logs.push(msg) }; + + var result = f(); + + process.exit = process._exit; + process.env = process._env; + process.argv = process._argv; + + console.error = console._error; + console.log = console._log; + + return { + errors : errors, + logs : logs, + exit : exit, + result : result, + }; +}; diff --git a/node_modules/optimist/test/whitespace.js b/node_modules/optimist/test/whitespace.js new file mode 100644 index 0000000..90b9075 --- /dev/null +++ b/node_modules/optimist/test/whitespace.js @@ -0,0 +1,8 @@ +var optimist = require('../'); +var test = require('tap').test; + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = optimist.parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/os-browserify/.npmignore b/node_modules/os-browserify/.npmignore new file mode 100644 index 0000000..f356293 --- /dev/null +++ b/node_modules/os-browserify/.npmignore @@ -0,0 +1,14 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +npm-debug.log diff --git a/node_modules/os-browserify/LICENSE b/node_modules/os-browserify/LICENSE new file mode 100644 index 0000000..a52e9b8 --- /dev/null +++ b/node_modules/os-browserify/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Drew Young + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/node_modules/os-browserify/README.md b/node_modules/os-browserify/README.md new file mode 100644 index 0000000..c772502 --- /dev/null +++ b/node_modules/os-browserify/README.md @@ -0,0 +1,2 @@ +os-browserify +============= \ No newline at end of file diff --git a/node_modules/os-browserify/browser.js b/node_modules/os-browserify/browser.js new file mode 100644 index 0000000..6a1ecb0 --- /dev/null +++ b/node_modules/os-browserify/browser.js @@ -0,0 +1,45 @@ +exports.endianness = function () { return 'LE' }; + +exports.hostname = function () { + if (typeof location !== 'undefined') { + return location.hostname + } + else return ''; +}; + +exports.loadavg = function () { return [] }; + +exports.uptime = function () { return 0 }; + +exports.freemem = function () { + return Number.MAX_VALUE; +}; + +exports.totalmem = function () { + return Number.MAX_VALUE; +}; + +exports.cpus = function () { return [] }; + +exports.type = function () { return 'Browser' }; + +exports.release = function () { + if (typeof navigator !== 'undefined') { + return navigator.appVersion; + } + return ''; +}; + +exports.networkInterfaces += exports.getNetworkInterfaces += function () { return {} }; + +exports.arch = function () { return 'javascript' }; + +exports.platform = function () { return 'browser' }; + +exports.tmpdir = exports.tmpDir = function () { + return '/tmp'; +}; + +exports.EOL = '\n'; diff --git a/node_modules/os-browserify/main.js b/node_modules/os-browserify/main.js new file mode 100644 index 0000000..5910697 --- /dev/null +++ b/node_modules/os-browserify/main.js @@ -0,0 +1 @@ +module.exports = require('os'); diff --git a/node_modules/os-browserify/package.json b/node_modules/os-browserify/package.json new file mode 100644 index 0000000..17dbae0 --- /dev/null +++ b/node_modules/os-browserify/package.json @@ -0,0 +1,12 @@ +{ + "name": "os-browserify", + "version": "0.1.2", + "author": "Drew Young", + "license": "MIT", + "main": "main.js", + "browser": "browser.js", + "repository": { + "type": "git", + "url": "http://github.com/drewyoung1/os-browserify.git" + } +} diff --git a/node_modules/os-homedir/index.js b/node_modules/os-homedir/index.js new file mode 100644 index 0000000..3306616 --- /dev/null +++ b/node_modules/os-homedir/index.js @@ -0,0 +1,24 @@ +'use strict'; +var os = require('os'); + +function homedir() { + var env = process.env; + var home = env.HOME; + var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME; + + if (process.platform === 'win32') { + return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null; + } + + if (process.platform === 'darwin') { + return home || (user ? '/Users/' + user : null); + } + + if (process.platform === 'linux') { + return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null)); + } + + return home || null; +} + +module.exports = typeof os.homedir === 'function' ? os.homedir : homedir; diff --git a/node_modules/os-homedir/license b/node_modules/os-homedir/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/os-homedir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/os-homedir/package.json b/node_modules/os-homedir/package.json new file mode 100644 index 0000000..525b225 --- /dev/null +++ b/node_modules/os-homedir/package.json @@ -0,0 +1,41 @@ +{ + "name": "os-homedir", + "version": "1.0.2", + "description": "Node.js 4 `os.homedir()` ponyfill", + "license": "MIT", + "repository": "sindresorhus/os-homedir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "builtin", + "core", + "ponyfill", + "polyfill", + "shim", + "os", + "homedir", + "home", + "dir", + "directory", + "folder", + "user", + "path" + ], + "devDependencies": { + "ava": "*", + "path-exists": "^2.0.0", + "xo": "^0.16.0" + } +} diff --git a/node_modules/os-homedir/readme.md b/node_modules/os-homedir/readme.md new file mode 100644 index 0000000..856ae61 --- /dev/null +++ b/node_modules/os-homedir/readme.md @@ -0,0 +1,31 @@ +# os-homedir [![Build Status](https://travis-ci.org/sindresorhus/os-homedir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-homedir) + +> Node.js 4 [`os.homedir()`](https://nodejs.org/api/os.html#os_os_homedir) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save os-homedir +``` + + +## Usage + +```js +const osHomedir = require('os-homedir'); + +console.log(osHomedir()); +//=> '/Users/sindresorhus' +``` + + +## Related + +- [user-home](https://github.com/sindresorhus/user-home) - Same as this module but caches the result +- [home-or-tmp](https://github.com/sindresorhus/home-or-tmp) - Get the user home directory with fallback to the system temp directory + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/os-tmpdir/index.js b/node_modules/os-tmpdir/index.js new file mode 100644 index 0000000..2077b1c --- /dev/null +++ b/node_modules/os-tmpdir/index.js @@ -0,0 +1,25 @@ +'use strict'; +var isWindows = process.platform === 'win32'; +var trailingSlashRe = isWindows ? /[^:]\\$/ : /.\/$/; + +// https://github.com/nodejs/node/blob/3e7a14381497a3b73dda68d05b5130563cdab420/lib/os.js#L25-L43 +module.exports = function () { + var path; + + if (isWindows) { + path = process.env.TEMP || + process.env.TMP || + (process.env.SystemRoot || process.env.windir) + '\\temp'; + } else { + path = process.env.TMPDIR || + process.env.TMP || + process.env.TEMP || + '/tmp'; + } + + if (trailingSlashRe.test(path)) { + path = path.slice(0, -1); + } + + return path; +}; diff --git a/node_modules/os-tmpdir/license b/node_modules/os-tmpdir/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/os-tmpdir/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/os-tmpdir/package.json b/node_modules/os-tmpdir/package.json new file mode 100644 index 0000000..180a317 --- /dev/null +++ b/node_modules/os-tmpdir/package.json @@ -0,0 +1,41 @@ +{ + "name": "os-tmpdir", + "version": "1.0.2", + "description": "Node.js os.tmpdir() ponyfill", + "license": "MIT", + "repository": "sindresorhus/os-tmpdir", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "built-in", + "core", + "ponyfill", + "polyfill", + "shim", + "os", + "tmpdir", + "tempdir", + "tmp", + "temp", + "dir", + "directory", + "env", + "environment" + ], + "devDependencies": { + "ava": "*", + "xo": "^0.16.0" + } +} diff --git a/node_modules/os-tmpdir/readme.md b/node_modules/os-tmpdir/readme.md new file mode 100644 index 0000000..c09f7ed --- /dev/null +++ b/node_modules/os-tmpdir/readme.md @@ -0,0 +1,32 @@ +# os-tmpdir [![Build Status](https://travis-ci.org/sindresorhus/os-tmpdir.svg?branch=master)](https://travis-ci.org/sindresorhus/os-tmpdir) + +> Node.js [`os.tmpdir()`](https://nodejs.org/api/os.html#os_os_tmpdir) [ponyfill](https://ponyfill.com) + +Use this instead of `require('os').tmpdir()` to get a consistent behavior on different Node.js versions (even 0.8). + + +## Install + +``` +$ npm install --save os-tmpdir +``` + + +## Usage + +```js +const osTmpdir = require('os-tmpdir'); + +osTmpdir(); +//=> '/var/folders/m3/5574nnhn0yj488ccryqr7tc80000gn/T' +``` + + +## API + +See the [`os.tmpdir()` docs](https://nodejs.org/api/os.html#os_os_tmpdir). + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/pako/CHANGELOG.md b/node_modules/pako/CHANGELOG.md new file mode 100644 index 0000000..f985922 --- /dev/null +++ b/node_modules/pako/CHANGELOG.md @@ -0,0 +1,90 @@ +1.0.2 / 2016-07-21 +------------------ + +- Fixed nasty bug in deflate (wrong `d_buf` offset), which could cause + broken data in some rare cases. +- Also released as 0.2.9 to give chance to old dependents, not updated to 1.x + version. + + +1.0.1 / 2016-04-01 +------------------ + +- Added dictionary support. Thanks to @dignifiedquire. + + +1.0.0 / 2016-02-17 +------------------ + +- Maintenance release (semver, coding style). + + +0.2.8 / 2015-09-14 +------------------ + +- Fixed regression after 0.2.4 for edge conditions in inflate wrapper (#65). + Added more tests to cover possible cases. + + +0.2.7 / 2015-06-09 +------------------ + +- Added Z_SYNC_FLUSH support. Thanks to @TinoLange. + + +0.2.6 / 2015-03-24 +------------------ + +- Allow ArrayBuffer input. + + +0.2.5 / 2014-07-19 +------------------ + +- Workaround for Chrome 38.0.2096.0 script parser bug, #30. + + +0.2.4 / 2014-07-07 +------------------ + +- Fixed bug in inflate wrapper, #29 + + +0.2.3 / 2014-06-09 +------------------ + +- Maintenance release, dependencies update. + + +0.2.2 / 2014-06-04 +------------------ + +- Fixed iOS 5.1 Safary issue with `apply(typed_array)`, #26. + + +0.2.1 / 2014-05-01 +------------------ + +- Fixed collision on switch dynamic/fixed tables. + + +0.2.0 / 2014-04-18 +------------------ + +- Added custom gzip headers support. +- Added strings support. +- Improved memory allocations for small chunks. +- ZStream properties rename/cleanup. +- More coverage tests. + + +0.1.1 / 2014-03-20 +------------------ + +- Bugfixes for inflate/deflate. + + +0.1.0 / 2014-03-15 +------------------ + +- First release. diff --git a/node_modules/pako/LICENSE b/node_modules/pako/LICENSE new file mode 100644 index 0000000..d082ae3 --- /dev/null +++ b/node_modules/pako/LICENSE @@ -0,0 +1,21 @@ +(The MIT License) + +Copyright (C) 2014-2016 by Vitaly Puzrin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pako/README.md b/node_modules/pako/README.md new file mode 100644 index 0000000..ef121e7 --- /dev/null +++ b/node_modules/pako/README.md @@ -0,0 +1,176 @@ +pako - zlib port to javascript, very fast! +========================================== + +[![Build Status](https://travis-ci.org/nodeca/pako.svg?branch=master)](https://travis-ci.org/nodeca/pako) +[![NPM version](https://img.shields.io/npm/v/pako.svg)](https://www.npmjs.org/package/pako) + +__Why pako is cool:__ + +- Almost as fast in modern JS engines as C implementation (see benchmarks). +- Works in browsers, you can browserify any separate component. +- Chunking support for big blobs. +- Results are binary equal to well known [zlib](http://www.zlib.net/) (now v1.2.8 ported). + +This project was done to understand how fast JS can be and is it necessary to +develop native C modules for CPU-intensive tasks. Enjoy the result! + + +__Famous projects, using pako:__ + +- [browserify](http://browserify.org/) (via [browserify-zlib](https://github.com/devongovett/browserify-zlib)) +- [JSZip](http://stuk.github.io/jszip/) +- [mincer](https://github.com/nodeca/mincer) +- [JS-Git](https://github.com/creationix/js-git) and + [Tedit](https://chrome.google.com/webstore/detail/tedit-development-environ/ooekdijbnbbjdfjocaiflnjgoohnblgf) + by [@creatronix](https://github.com/creationix) + + +__Benchmarks:__ + +``` +node v0.10.26, 1mb sample: + + deflate-dankogai x 4.73 ops/sec ±0.82% (15 runs sampled) + deflate-gildas x 4.58 ops/sec ±2.33% (15 runs sampled) + deflate-imaya x 3.22 ops/sec ±3.95% (12 runs sampled) + ! deflate-pako x 6.99 ops/sec ±0.51% (21 runs sampled) + deflate-pako-string x 5.89 ops/sec ±0.77% (18 runs sampled) + deflate-pako-untyped x 4.39 ops/sec ±1.58% (14 runs sampled) + * deflate-zlib x 14.71 ops/sec ±4.23% (59 runs sampled) + inflate-dankogai x 32.16 ops/sec ±0.13% (56 runs sampled) + inflate-imaya x 30.35 ops/sec ±0.92% (53 runs sampled) + ! inflate-pako x 69.89 ops/sec ±1.46% (71 runs sampled) + inflate-pako-string x 19.22 ops/sec ±1.86% (49 runs sampled) + inflate-pako-untyped x 17.19 ops/sec ±0.85% (32 runs sampled) + * inflate-zlib x 70.03 ops/sec ±1.64% (81 runs sampled) + +node v0.11.12, 1mb sample: + + deflate-dankogai x 5.60 ops/sec ±0.49% (17 runs sampled) + deflate-gildas x 5.06 ops/sec ±6.00% (16 runs sampled) + deflate-imaya x 3.52 ops/sec ±3.71% (13 runs sampled) + ! deflate-pako x 11.52 ops/sec ±0.22% (32 runs sampled) + deflate-pako-string x 9.53 ops/sec ±1.12% (27 runs sampled) + deflate-pako-untyped x 5.44 ops/sec ±0.72% (17 runs sampled) + * deflate-zlib x 14.05 ops/sec ±3.34% (63 runs sampled) + inflate-dankogai x 42.19 ops/sec ±0.09% (56 runs sampled) + inflate-imaya x 79.68 ops/sec ±1.07% (68 runs sampled) + ! inflate-pako x 97.52 ops/sec ±0.83% (80 runs sampled) + inflate-pako-string x 45.19 ops/sec ±1.69% (57 runs sampled) + inflate-pako-untyped x 24.35 ops/sec ±2.59% (40 runs sampled) + * inflate-zlib x 60.32 ops/sec ±1.36% (69 runs sampled) +``` + +zlib's test is partialy afferted by marshling (that make sense for inflate only). +You can change deflate level to 0 in benchmark source, to investigate details. +For deflate level 6 results can be considered as correct. + +__Install:__ + +node.js: + +``` +npm install pako +``` + +browser: + +``` +bower install pako +``` + + +Example & API +------------- + +Full docs - http://nodeca.github.io/pako/ + +```javascript +var pako = require('pako'); + +// Deflate +// +var input = new Uint8Array(); +//... fill input data here +var output = pako.deflate(input); + +// Inflate (simple wrapper can throw exception on broken stream) +// +var compressed = new Uint8Array(); +//... fill data to uncompress here +try { + var result = pako.inflate(compressed); +} catch (err) { + console.log(err); +} + +// +// Alternate interface for chunking & without exceptions +// + +var inflator = new pako.Inflate(); + +inflator.push(chunk1, false); +inflator.push(chunk2, false); +... +inflator.push(chunkN, true); // true -> last chunk + +if (inflator.err) { + console.log(inflator.msg); +} + +var output = inflator.result; + +``` + +Sometime you can wish to work with strings. For example, to send +big objects as json to server. Pako detects input data type. You can +force output to be string with option `{ to: 'string' }`. + +```javascript +var pako = require('pako'); + +var test = { my: 'super', puper: [456, 567], awesome: 'pako' }; + +var binaryString = pako.deflate(JSON.stringify(test), { to: 'string' }); + +// +// Here you can do base64 encode, make xhr requests and so on. +// + +var restored = JSON.parse(pako.inflate(binaryString, { to: 'string' })); +``` + + +Notes +----- + +Pako does not contain some specific zlib functions: + +- __deflate__ - methods `deflateCopy`, `deflateBound`, `deflateParams`, + `deflatePending`, `deflatePrime`, `deflateTune`. +- __inflate__ - methods `inflateCopy`, `inflateMark`, + `inflatePrime`, `inflateGetDictionary`, `inflateSync`, `inflateSyncPoint`, `inflateUndermine`. +- High level inflate/deflate wrappers (classes) may not support some flush + modes. Those should work: Z_NO_FLUSH, Z_FINISH, Z_SYNC_FLUSH. + + +Authors +------- + +- Andrey Tupitsin [@anrd83](https://github.com/andr83) +- Vitaly Puzrin [@puzrin](https://github.com/puzrin) + +Personal thanks to: + +- Vyacheslav Egorov ([@mraleph](https://github.com/mraleph)) for his awesome + tutorials about optimising JS code for v8, [IRHydra](http://mrale.ph/irhydra/) + tool and his advices. +- David Duponchel ([@dduponchel](https://github.com/dduponchel)) for help with + testing. + + +License +------- + +MIT diff --git a/node_modules/pako/dist/pako.js b/node_modules/pako/dist/pako.js new file mode 100644 index 0000000..6d6db94 --- /dev/null +++ b/node_modules/pako/dist/pako.js @@ -0,0 +1,6606 @@ +/* pako 0.2.9 nodeca/pako */(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.pako = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o Array + * + * Chunks of output data, if [[Deflate#onData]] not overriden. + **/ + +/** + * Deflate.result -> Uint8Array|Array + * + * Compressed result, generated by default [[Deflate#onData]] + * and [[Deflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Deflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Deflate.err -> Number + * + * Error code after deflate finished. 0 (Z_OK) on success. + * You will not need it in real life, because deflate errors + * are possible only on wrong options or bad `onData` / `onEnd` + * custom handlers. + **/ + +/** + * Deflate.msg -> String + * + * Error message, if [[Deflate.err]] != 0 + **/ + + +/** + * new Deflate(options) + * - options (Object): zlib deflate options. + * + * Creates new deflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `level` + * - `windowBits` + * - `memLevel` + * - `strategy` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw deflate + * - `gzip` (Boolean) - create gzip wrapper + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * - `header` (Object) - custom header for gzip + * - `text` (Boolean) - true if compressed data believed to be text + * - `time` (Number) - modification time, unix timestamp + * - `os` (Number) - operation system code + * - `extra` (Array) - array of bytes with extra data (max 65536) + * - `name` (String) - file name (binary string) + * - `comment` (String) - comment (binary string) + * - `hcrc` (Boolean) - true if header crc should be added + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var deflate = new pako.Deflate({ level: 3}); + * + * deflate.push(chunk1, false); + * deflate.push(chunk2, true); // true -> last chunk + * + * if (deflate.err) { throw new Error(deflate.err); } + * + * console.log(deflate.result); + * ``` + **/ +function Deflate(options) { + if (!(this instanceof Deflate)) return new Deflate(options); + + this.options = utils.assign({ + level: Z_DEFAULT_COMPRESSION, + method: Z_DEFLATED, + chunkSize: 16384, + windowBits: 15, + memLevel: 8, + strategy: Z_DEFAULT_STRATEGY, + to: '' + }, options || {}); + + var opt = this.options; + + if (opt.raw && (opt.windowBits > 0)) { + opt.windowBits = -opt.windowBits; + } + + else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) { + opt.windowBits += 16; + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_deflate.deflateInit2( + this.strm, + opt.level, + opt.method, + opt.windowBits, + opt.memLevel, + opt.strategy + ); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + if (opt.header) { + zlib_deflate.deflateSetHeader(this.strm, opt.header); + } + + if (opt.dictionary) { + var dict; + // Convert data if needed + if (typeof opt.dictionary === 'string') { + // If we need to compress text, change encoding to utf8. + dict = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(opt.dictionary); + } else { + dict = opt.dictionary; + } + + status = zlib_deflate.deflateSetDictionary(this.strm, dict); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + this._dict_set = true; + } +} + +/** + * Deflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be + * converted to utf8 byte sequence. + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH. + * + * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with + * new compressed chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the compression context. + * + * On fail call [[Deflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * array format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Deflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var status, _mode; + + if (this.ended) { return false; } + + _mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // If we need to compress text, change encoding to utf8. + strm.input = strings.string2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + status = zlib_deflate.deflate(strm, _mode); /* no bad return value */ + + if (status !== Z_STREAM_END && status !== Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) { + if (this.options.to === 'string') { + this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out))); + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END); + + // Finalize on the last chunk. + if (_mode === Z_FINISH) { + status = zlib_deflate.deflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === Z_SYNC_FLUSH) { + this.onEnd(Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Deflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): ouput data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Deflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Deflate#onEnd(status) -> Void + * - status (Number): deflate status. 0 (Z_OK) on success, + * other if not. + * + * Called once after you tell deflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Deflate.prototype.onEnd = function (status) { + // On success - join + if (status === Z_OK) { + if (this.options.to === 'string') { + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * deflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * Compress `data` with deflate algorithm and `options`. + * + * Supported options are: + * + * - level + * - windowBits + * - memLevel + * - strategy + * - dictionary + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , data = Uint8Array([1,2,3,4,5,6,7,8,9]); + * + * console.log(pako.deflate(data)); + * ``` + **/ +function deflate(input, options) { + var deflator = new Deflate(options); + + deflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (deflator.err) { throw deflator.msg; } + + return deflator.result; +} + + +/** + * deflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function deflateRaw(input, options) { + options = options || {}; + options.raw = true; + return deflate(input, options); +} + + +/** + * gzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but create gzip wrapper instead of + * deflate one. + **/ +function gzip(input, options) { + options = options || {}; + options.gzip = true; + return deflate(input, options); +} + + +exports.Deflate = Deflate; +exports.deflate = deflate; +exports.deflateRaw = deflateRaw; +exports.gzip = gzip; + +},{"./utils/common":3,"./utils/strings":4,"./zlib/deflate":8,"./zlib/messages":13,"./zlib/zstream":15}],2:[function(require,module,exports){ +'use strict'; + + +var zlib_inflate = require('./zlib/inflate'); +var utils = require('./utils/common'); +var strings = require('./utils/strings'); +var c = require('./zlib/constants'); +var msg = require('./zlib/messages'); +var ZStream = require('./zlib/zstream'); +var GZheader = require('./zlib/gzheader'); + +var toString = Object.prototype.toString; + +/** + * class Inflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[inflate]] + * and [[inflateRaw]]. + **/ + +/* internal + * inflate.chunks -> Array + * + * Chunks of output data, if [[Inflate#onData]] not overriden. + **/ + +/** + * Inflate.result -> Uint8Array|Array|String + * + * Uncompressed result, generated by default [[Inflate#onData]] + * and [[Inflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Inflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Inflate.err -> Number + * + * Error code after inflate finished. 0 (Z_OK) on success. + * Should be checked if broken data possible. + **/ + +/** + * Inflate.msg -> String + * + * Error message, if [[Inflate.err]] != 0 + **/ + + +/** + * new Inflate(options) + * - options (Object): zlib inflate options. + * + * Creates new inflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `windowBits` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw inflate + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * By default, when no options set, autodetect deflate/gzip data format via + * wrapper header. + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var inflate = new pako.Inflate({ level: 3}); + * + * inflate.push(chunk1, false); + * inflate.push(chunk2, true); // true -> last chunk + * + * if (inflate.err) { throw new Error(inflate.err); } + * + * console.log(inflate.result); + * ``` + **/ +function Inflate(options) { + if (!(this instanceof Inflate)) return new Inflate(options); + + this.options = utils.assign({ + chunkSize: 16384, + windowBits: 0, + to: '' + }, options || {}); + + var opt = this.options; + + // Force window size for `raw` data, if not set directly, + // because we have no header for autodetect. + if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) { + opt.windowBits = -opt.windowBits; + if (opt.windowBits === 0) { opt.windowBits = -15; } + } + + // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate + if ((opt.windowBits >= 0) && (opt.windowBits < 16) && + !(options && options.windowBits)) { + opt.windowBits += 32; + } + + // Gzip header has no info about windows size, we can do autodetect only + // for deflate. So, if window size not set, force it to max when gzip possible + if ((opt.windowBits > 15) && (opt.windowBits < 48)) { + // bit 3 (16) -> gzipped data + // bit 4 (32) -> autodetect gzip/deflate + if ((opt.windowBits & 15) === 0) { + opt.windowBits |= 15; + } + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_inflate.inflateInit2( + this.strm, + opt.windowBits + ); + + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + + this.header = new GZheader(); + + zlib_inflate.inflateGetHeader(this.strm, this.header); +} + +/** + * Inflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH. + * + * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with + * new output chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the decompression context. + * + * On fail call [[Inflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Inflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var dictionary = this.options.dictionary; + var status, _mode; + var next_out_utf8, tail, utf8str; + var dict; + + // Flag to properly process Z_BUF_ERROR on testing inflate call + // when we check that all output data was flushed. + var allowBufError = false; + + if (this.ended) { return false; } + _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // Only binary strings can be decompressed on practice + strm.input = strings.binstring2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + + status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */ + + if (status === c.Z_NEED_DICT && dictionary) { + // Convert data if needed + if (typeof dictionary === 'string') { + dict = strings.string2buf(dictionary); + } else if (toString.call(dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(dictionary); + } else { + dict = dictionary; + } + + status = zlib_inflate.inflateSetDictionary(this.strm, dict); + + } + + if (status === c.Z_BUF_ERROR && allowBufError === true) { + status = c.Z_OK; + allowBufError = false; + } + + if (status !== c.Z_STREAM_END && status !== c.Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + + if (strm.next_out) { + if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) { + + if (this.options.to === 'string') { + + next_out_utf8 = strings.utf8border(strm.output, strm.next_out); + + tail = strm.next_out - next_out_utf8; + utf8str = strings.buf2string(strm.output, next_out_utf8); + + // move tail + strm.next_out = tail; + strm.avail_out = chunkSize - tail; + if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); } + + this.onData(utf8str); + + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } + + // When no more input data, we should check that internal inflate buffers + // are flushed. The only way to do it when avail_out = 0 - run one more + // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR. + // Here we set flag to process this error properly. + // + // NOTE. Deflate does not return error in this case and does not needs such + // logic. + if (strm.avail_in === 0 && strm.avail_out === 0) { + allowBufError = true; + } + + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END); + + if (status === c.Z_STREAM_END) { + _mode = c.Z_FINISH; + } + + // Finalize on the last chunk. + if (_mode === c.Z_FINISH) { + status = zlib_inflate.inflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === c.Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === c.Z_SYNC_FLUSH) { + this.onEnd(c.Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Inflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): ouput data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Inflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Inflate#onEnd(status) -> Void + * - status (Number): inflate status. 0 (Z_OK) on success, + * other if not. + * + * Called either after you tell inflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Inflate.prototype.onEnd = function (status) { + // On success - join + if (status === c.Z_OK) { + if (this.options.to === 'string') { + // Glue & convert here, until we teach pako to send + // utf8 alligned strings to onData + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * inflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Decompress `data` with inflate/ungzip and `options`. Autodetect + * format via wrapper header by default. That's why we don't provide + * separate `ungzip` method. + * + * Supported options are: + * + * - windowBits + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , input = pako.deflate([1,2,3,4,5,6,7,8,9]) + * , output; + * + * try { + * output = pako.inflate(input); + * } catch (err) + * console.log(err); + * } + * ``` + **/ +function inflate(input, options) { + var inflator = new Inflate(options); + + inflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (inflator.err) { throw inflator.msg; } + + return inflator.result; +} + + +/** + * inflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * The same as [[inflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function inflateRaw(input, options) { + options = options || {}; + options.raw = true; + return inflate(input, options); +} + + +/** + * ungzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Just shortcut to [[inflate]], because it autodetects format + * by header.content. Done for convenience. + **/ + + +exports.Inflate = Inflate; +exports.inflate = inflate; +exports.inflateRaw = inflateRaw; +exports.ungzip = inflate; + +},{"./utils/common":3,"./utils/strings":4,"./zlib/constants":6,"./zlib/gzheader":9,"./zlib/inflate":11,"./zlib/messages":13,"./zlib/zstream":15}],3:[function(require,module,exports){ +'use strict'; + + +var TYPED_OK = (typeof Uint8Array !== 'undefined') && + (typeof Uint16Array !== 'undefined') && + (typeof Int32Array !== 'undefined'); + + +exports.assign = function (obj /*from1, from2, from3, ...*/) { + var sources = Array.prototype.slice.call(arguments, 1); + while (sources.length) { + var source = sources.shift(); + if (!source) { continue; } + + if (typeof source !== 'object') { + throw new TypeError(source + 'must be non-object'); + } + + for (var p in source) { + if (source.hasOwnProperty(p)) { + obj[p] = source[p]; + } + } + } + + return obj; +}; + + +// reduce buffer size, avoiding mem copy +exports.shrinkBuf = function (buf, size) { + if (buf.length === size) { return buf; } + if (buf.subarray) { return buf.subarray(0, size); } + buf.length = size; + return buf; +}; + + +var fnTyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + if (src.subarray && dest.subarray) { + dest.set(src.subarray(src_offs, src_offs + len), dest_offs); + return; + } + // Fallback to ordinary array + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + var i, l, len, pos, chunk, result; + + // calculate data length + len = 0; + for (i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length; + } + + // join chunks + result = new Uint8Array(len); + pos = 0; + for (i = 0, l = chunks.length; i < l; i++) { + chunk = chunks[i]; + result.set(chunk, pos); + pos += chunk.length; + } + + return result; + } +}; + +var fnUntyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + return [].concat.apply([], chunks); + } +}; + + +// Enable/Disable typed arrays use, for testing +// +exports.setTyped = function (on) { + if (on) { + exports.Buf8 = Uint8Array; + exports.Buf16 = Uint16Array; + exports.Buf32 = Int32Array; + exports.assign(exports, fnTyped); + } else { + exports.Buf8 = Array; + exports.Buf16 = Array; + exports.Buf32 = Array; + exports.assign(exports, fnUntyped); + } +}; + +exports.setTyped(TYPED_OK); + +},{}],4:[function(require,module,exports){ +// String encode/decode helpers +'use strict'; + + +var utils = require('./common'); + + +// Quick check if we can use fast array to bin string conversion +// +// - apply(Array) can fail on Android 2.2 +// - apply(Uint8Array) can fail on iOS 5.1 Safary +// +var STR_APPLY_OK = true; +var STR_APPLY_UIA_OK = true; + +try { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; } +try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; } + + +// Table with utf8 lengths (calculated by first byte of sequence) +// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS, +// because max possible codepoint is 0x10ffff +var _utf8len = new utils.Buf8(256); +for (var q = 0; q < 256; q++) { + _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); +} +_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start + + +// convert string to array (typed, when possible) +exports.string2buf = function (str) { + var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; + + // count binary size + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; + } + + // allocate buffer + buf = new utils.Buf8(buf_len); + + // convert + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + if (c < 0x80) { + /* one byte */ + buf[i++] = c; + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xC0 | (c >>> 6); + buf[i++] = 0x80 | (c & 0x3f); + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xE0 | (c >>> 12); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18); + buf[i++] = 0x80 | (c >>> 12 & 0x3f); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } + } + + return buf; +}; + +// Helper (used in 2 places) +function buf2binstring(buf, len) { + // use fallback for big arrays to avoid stack overflow + if (len < 65537) { + if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { + return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); + } + } + + var result = ''; + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]); + } + return result; +} + + +// Convert byte array to binary string +exports.buf2binstring = function (buf) { + return buf2binstring(buf, buf.length); +}; + + +// Convert binary string (typed, when possible) +exports.binstring2buf = function (str) { + var buf = new utils.Buf8(str.length); + for (var i = 0, len = buf.length; i < len; i++) { + buf[i] = str.charCodeAt(i); + } + return buf; +}; + + +// convert array to string +exports.buf2string = function (buf, max) { + var i, out, c, c_len; + var len = max || buf.length; + + // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + var utf16buf = new Array(len * 2); + + for (out = 0, i = 0; i < len;) { + c = buf[i++]; + // quick process ascii + if (c < 0x80) { utf16buf[out++] = c; continue; } + + c_len = _utf8len[c]; + // skip 5 & 6 byte codes + if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } + + // apply mask on first byte + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; + // join the rest + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f); + c_len--; + } + + // terminated by end of string? + if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } + + if (c < 0x10000) { + utf16buf[out++] = c; + } else { + c -= 0x10000; + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); + utf16buf[out++] = 0xdc00 | (c & 0x3ff); + } + } + + return buf2binstring(utf16buf, out); +}; + + +// Calculate max possible position in utf8 buffer, +// that will not break sequence. If that's not possible +// - (very small limits) return max size as is. +// +// buf[] - utf8 bytes array +// max - length limit (mandatory); +exports.utf8border = function (buf, max) { + var pos; + + max = max || buf.length; + if (max > buf.length) { max = buf.length; } + + // go back from last position, until start of sequence found + pos = max - 1; + while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } + + // Fuckup - very small and broken sequence, + // return max, because we should return something anyway. + if (pos < 0) { return max; } + + // If we came to start of buffer - that means vuffer is too small, + // return max too. + if (pos === 0) { return max; } + + return (pos + _utf8len[buf[pos]] > max) ? pos : max; +}; + +},{"./common":3}],5:[function(require,module,exports){ +'use strict'; + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It doesn't worth to make additional optimizationa as in original. +// Small size is preferable. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; + +},{}],6:[function(require,module,exports){ +'use strict'; + + +module.exports = { + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; + +},{}],7:[function(require,module,exports){ +'use strict'; + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + +},{}],8:[function(require,module,exports){ +'use strict'; + +var utils = require('../utils/common'); +var trees = require('./trees'); +var adler32 = require('./adler32'); +var crc32 = require('./crc32'); +var msg = require('./messages'); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; + } + if (len === 0) { return; } + + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ + +},{"../utils/common":3,"./adler32":5,"./crc32":7,"./messages":13,"./trees":14}],9:[function(require,module,exports){ +'use strict'; + + +function GZheader() { + /* true if compressed data believed to be text */ + this.text = 0; + /* modification time */ + this.time = 0; + /* extra flags (not used when writing a gzip file) */ + this.xflags = 0; + /* operating system */ + this.os = 0; + /* pointer to extra field or Z_NULL if none */ + this.extra = null; + /* extra field length (valid if extra != Z_NULL) */ + this.extra_len = 0; // Actually, we don't need it in JS, + // but leave for few code modifications + + // + // Setup limits is not necessary because in js we should not preallocate memory + // for inflate use constant limit in 65536 bytes + // + + /* space at extra (only when reading header) */ + // this.extra_max = 0; + /* pointer to zero-terminated file name or Z_NULL */ + this.name = ''; + /* space at name (only when reading header) */ + // this.name_max = 0; + /* pointer to zero-terminated comment or Z_NULL */ + this.comment = ''; + /* space at comment (only when reading header) */ + // this.comm_max = 0; + /* true if there was or will be a header crc */ + this.hcrc = 0; + /* true when done reading gzip header (not used when writing a gzip file) */ + this.done = false; +} + +module.exports = GZheader; + +},{}],10:[function(require,module,exports){ +'use strict'; + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; + +},{}],11:[function(require,module,exports){ +'use strict'; + + +var utils = require('../utils/common'); +var adler32 = require('./adler32'); +var crc32 = require('./crc32'); +var inflate_fast = require('./inffast'); +var inflate_table = require('./inftrees'); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more conveniend processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' insdead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ + +},{"../utils/common":3,"./adler32":5,"./crc32":7,"./inffast":10,"./inftrees":12}],12:[function(require,module,exports){ +'use strict'; + + +var utils = require('../utils/common'); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + var i = 0; + /* process all codes and make table entries */ + for (;;) { + i++; + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; + +},{"../utils/common":3}],13:[function(require,module,exports){ +'use strict'; + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + +},{}],14:[function(require,module,exports){ +'use strict'; + + +var utils = require('../utils/common'); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array insdead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; + +},{"../utils/common":3}],15:[function(require,module,exports){ +'use strict'; + + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + +},{}],"/":[function(require,module,exports){ +// Top level file is just a mixin of submodules & constants +'use strict'; + +var assign = require('./lib/utils/common').assign; + +var deflate = require('./lib/deflate'); +var inflate = require('./lib/inflate'); +var constants = require('./lib/zlib/constants'); + +var pako = {}; + +assign(pako, deflate, inflate, constants); + +module.exports = pako; + +},{"./lib/deflate":1,"./lib/inflate":2,"./lib/utils/common":3,"./lib/zlib/constants":6}]},{},[])("/") +}); \ No newline at end of file diff --git a/node_modules/pako/dist/pako.min.js b/node_modules/pako/dist/pako.min.js new file mode 100644 index 0000000..c2b86b6 --- /dev/null +++ b/node_modules/pako/dist/pako.min.js @@ -0,0 +1,3 @@ +/* pako 0.2.9 nodeca/pako */ +!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,e.pako=t()}}(function(){return function t(e,a,i){function n(s,o){if(!a[s]){if(!e[s]){var l="function"==typeof require&&require;if(!o&&l)return l(s,!0);if(r)return r(s,!0);var h=new Error("Cannot find module '"+s+"'");throw h.code="MODULE_NOT_FOUND",h}var d=a[s]={exports:{}};e[s][0].call(d.exports,function(t){var a=e[s][1][t];return n(a?a:t)},d,d.exports,t,e,a,i)}return a[s].exports}for(var r="function"==typeof require&&require,s=0;s0?e.windowBits=-e.windowBits:e.gzip&&e.windowBits>0&&e.windowBits<16&&(e.windowBits+=16),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new f,this.strm.avail_out=0;var a=o.deflateInit2(this.strm,e.level,e.method,e.windowBits,e.memLevel,e.strategy);if(a!==b)throw new Error(d[a]);if(e.header&&o.deflateSetHeader(this.strm,e.header),e.dictionary){var n;if(n="string"==typeof e.dictionary?h.string2buf(e.dictionary):"[object ArrayBuffer]"===_.call(e.dictionary)?new Uint8Array(e.dictionary):e.dictionary,a=o.deflateSetDictionary(this.strm,n),a!==b)throw new Error(d[a]);this._dict_set=!0}}function n(t,e){var a=new i(e);if(a.push(t,!0),a.err)throw a.msg;return a.result}function r(t,e){return e=e||{},e.raw=!0,n(t,e)}function s(t,e){return e=e||{},e.gzip=!0,n(t,e)}var o=t("./zlib/deflate"),l=t("./utils/common"),h=t("./utils/strings"),d=t("./zlib/messages"),f=t("./zlib/zstream"),_=Object.prototype.toString,u=0,c=4,b=0,g=1,m=2,w=-1,p=0,v=8;i.prototype.push=function(t,e){var a,i,n=this.strm,r=this.options.chunkSize;if(this.ended)return!1;i=e===~~e?e:e===!0?c:u,"string"==typeof t?n.input=h.string2buf(t):"[object ArrayBuffer]"===_.call(t)?n.input=new Uint8Array(t):n.input=t,n.next_in=0,n.avail_in=n.input.length;do{if(0===n.avail_out&&(n.output=new l.Buf8(r),n.next_out=0,n.avail_out=r),a=o.deflate(n,i),a!==g&&a!==b)return this.onEnd(a),this.ended=!0,!1;0!==n.avail_out&&(0!==n.avail_in||i!==c&&i!==m)||("string"===this.options.to?this.onData(h.buf2binstring(l.shrinkBuf(n.output,n.next_out))):this.onData(l.shrinkBuf(n.output,n.next_out)))}while((n.avail_in>0||0===n.avail_out)&&a!==g);return i===c?(a=o.deflateEnd(this.strm),this.onEnd(a),this.ended=!0,a===b):i!==m||(this.onEnd(b),n.avail_out=0,!0)},i.prototype.onData=function(t){this.chunks.push(t)},i.prototype.onEnd=function(t){t===b&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=l.flattenChunks(this.chunks)),this.chunks=[],this.err=t,this.msg=this.strm.msg},a.Deflate=i,a.deflate=n,a.deflateRaw=r,a.gzip=s},{"./utils/common":3,"./utils/strings":4,"./zlib/deflate":8,"./zlib/messages":13,"./zlib/zstream":15}],2:[function(t,e,a){"use strict";function i(t){if(!(this instanceof i))return new i(t);this.options=o.assign({chunkSize:16384,windowBits:0,to:""},t||{});var e=this.options;e.raw&&e.windowBits>=0&&e.windowBits<16&&(e.windowBits=-e.windowBits,0===e.windowBits&&(e.windowBits=-15)),!(e.windowBits>=0&&e.windowBits<16)||t&&t.windowBits||(e.windowBits+=32),e.windowBits>15&&e.windowBits<48&&0===(15&e.windowBits)&&(e.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new f,this.strm.avail_out=0;var a=s.inflateInit2(this.strm,e.windowBits);if(a!==h.Z_OK)throw new Error(d[a]);this.header=new _,s.inflateGetHeader(this.strm,this.header)}function n(t,e){var a=new i(e);if(a.push(t,!0),a.err)throw a.msg;return a.result}function r(t,e){return e=e||{},e.raw=!0,n(t,e)}var s=t("./zlib/inflate"),o=t("./utils/common"),l=t("./utils/strings"),h=t("./zlib/constants"),d=t("./zlib/messages"),f=t("./zlib/zstream"),_=t("./zlib/gzheader"),u=Object.prototype.toString;i.prototype.push=function(t,e){var a,i,n,r,d,f,_=this.strm,c=this.options.chunkSize,b=this.options.dictionary,g=!1;if(this.ended)return!1;i=e===~~e?e:e===!0?h.Z_FINISH:h.Z_NO_FLUSH,"string"==typeof t?_.input=l.binstring2buf(t):"[object ArrayBuffer]"===u.call(t)?_.input=new Uint8Array(t):_.input=t,_.next_in=0,_.avail_in=_.input.length;do{if(0===_.avail_out&&(_.output=new o.Buf8(c),_.next_out=0,_.avail_out=c),a=s.inflate(_,h.Z_NO_FLUSH),a===h.Z_NEED_DICT&&b&&(f="string"==typeof b?l.string2buf(b):"[object ArrayBuffer]"===u.call(b)?new Uint8Array(b):b,a=s.inflateSetDictionary(this.strm,f)),a===h.Z_BUF_ERROR&&g===!0&&(a=h.Z_OK,g=!1),a!==h.Z_STREAM_END&&a!==h.Z_OK)return this.onEnd(a),this.ended=!0,!1;_.next_out&&(0!==_.avail_out&&a!==h.Z_STREAM_END&&(0!==_.avail_in||i!==h.Z_FINISH&&i!==h.Z_SYNC_FLUSH)||("string"===this.options.to?(n=l.utf8border(_.output,_.next_out),r=_.next_out-n,d=l.buf2string(_.output,n),_.next_out=r,_.avail_out=c-r,r&&o.arraySet(_.output,_.output,n,r,0),this.onData(d)):this.onData(o.shrinkBuf(_.output,_.next_out)))),0===_.avail_in&&0===_.avail_out&&(g=!0)}while((_.avail_in>0||0===_.avail_out)&&a!==h.Z_STREAM_END);return a===h.Z_STREAM_END&&(i=h.Z_FINISH),i===h.Z_FINISH?(a=s.inflateEnd(this.strm),this.onEnd(a),this.ended=!0,a===h.Z_OK):i!==h.Z_SYNC_FLUSH||(this.onEnd(h.Z_OK),_.avail_out=0,!0)},i.prototype.onData=function(t){this.chunks.push(t)},i.prototype.onEnd=function(t){t===h.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=o.flattenChunks(this.chunks)),this.chunks=[],this.err=t,this.msg=this.strm.msg},a.Inflate=i,a.inflate=n,a.inflateRaw=r,a.ungzip=n},{"./utils/common":3,"./utils/strings":4,"./zlib/constants":6,"./zlib/gzheader":9,"./zlib/inflate":11,"./zlib/messages":13,"./zlib/zstream":15}],3:[function(t,e,a){"use strict";var i="undefined"!=typeof Uint8Array&&"undefined"!=typeof Uint16Array&&"undefined"!=typeof Int32Array;a.assign=function(t){for(var e=Array.prototype.slice.call(arguments,1);e.length;){var a=e.shift();if(a){if("object"!=typeof a)throw new TypeError(a+"must be non-object");for(var i in a)a.hasOwnProperty(i)&&(t[i]=a[i])}}return t},a.shrinkBuf=function(t,e){return t.length===e?t:t.subarray?t.subarray(0,e):(t.length=e,t)};var n={arraySet:function(t,e,a,i,n){if(e.subarray&&t.subarray)return void t.set(e.subarray(a,a+i),n);for(var r=0;r=252?6:l>=248?5:l>=240?4:l>=224?3:l>=192?2:1;o[254]=o[254]=1,a.string2buf=function(t){var e,a,i,r,s,o=t.length,l=0;for(r=0;r>>6,e[s++]=128|63&a):a<65536?(e[s++]=224|a>>>12,e[s++]=128|a>>>6&63,e[s++]=128|63&a):(e[s++]=240|a>>>18,e[s++]=128|a>>>12&63,e[s++]=128|a>>>6&63,e[s++]=128|63&a);return e},a.buf2binstring=function(t){return i(t,t.length)},a.binstring2buf=function(t){for(var e=new n.Buf8(t.length),a=0,i=e.length;a4)h[n++]=65533,a+=s-1;else{for(r&=2===s?31:3===s?15:7;s>1&&a1?h[n++]=65533:r<65536?h[n++]=r:(r-=65536,h[n++]=55296|r>>10&1023,h[n++]=56320|1023&r)}return i(h,n)},a.utf8border=function(t,e){var a;for(e=e||t.length,e>t.length&&(e=t.length),a=e-1;a>=0&&128===(192&t[a]);)a--;return a<0?e:0===a?e:a+o[t[a]]>e?a:e}},{"./common":3}],5:[function(t,e,a){"use strict";function i(t,e,a,i){for(var n=65535&t|0,r=t>>>16&65535|0,s=0;0!==a;){s=a>2e3?2e3:a,a-=s;do n=n+e[i++]|0,r=r+n|0;while(--s);n%=65521,r%=65521}return n|r<<16|0}e.exports=i},{}],6:[function(t,e,a){"use strict";e.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],7:[function(t,e,a){"use strict";function i(){for(var t,e=[],a=0;a<256;a++){t=a;for(var i=0;i<8;i++)t=1&t?3988292384^t>>>1:t>>>1;e[a]=t}return e}function n(t,e,a,i){var n=r,s=i+a;t^=-1;for(var o=i;o>>8^n[255&(t^e[o])];return t^-1}var r=i();e.exports=n},{}],8:[function(t,e,a){"use strict";function i(t,e){return t.msg=D[e],e}function n(t){return(t<<1)-(t>4?9:0)}function r(t){for(var e=t.length;--e>=0;)t[e]=0}function s(t){var e=t.state,a=e.pending;a>t.avail_out&&(a=t.avail_out),0!==a&&(R.arraySet(t.output,e.pending_buf,e.pending_out,a,t.next_out),t.next_out+=a,e.pending_out+=a,t.total_out+=a,t.avail_out-=a,e.pending-=a,0===e.pending&&(e.pending_out=0))}function o(t,e){C._tr_flush_block(t,t.block_start>=0?t.block_start:-1,t.strstart-t.block_start,e),t.block_start=t.strstart,s(t.strm)}function l(t,e){t.pending_buf[t.pending++]=e}function h(t,e){t.pending_buf[t.pending++]=e>>>8&255,t.pending_buf[t.pending++]=255&e}function d(t,e,a,i){var n=t.avail_in;return n>i&&(n=i),0===n?0:(t.avail_in-=n,R.arraySet(e,t.input,t.next_in,n,a),1===t.state.wrap?t.adler=N(t.adler,e,n,a):2===t.state.wrap&&(t.adler=O(t.adler,e,n,a)),t.next_in+=n,t.total_in+=n,n)}function f(t,e){var a,i,n=t.max_chain_length,r=t.strstart,s=t.prev_length,o=t.nice_match,l=t.strstart>t.w_size-ft?t.strstart-(t.w_size-ft):0,h=t.window,d=t.w_mask,f=t.prev,_=t.strstart+dt,u=h[r+s-1],c=h[r+s];t.prev_length>=t.good_match&&(n>>=2),o>t.lookahead&&(o=t.lookahead);do if(a=e,h[a+s]===c&&h[a+s-1]===u&&h[a]===h[r]&&h[++a]===h[r+1]){r+=2,a++;do;while(h[++r]===h[++a]&&h[++r]===h[++a]&&h[++r]===h[++a]&&h[++r]===h[++a]&&h[++r]===h[++a]&&h[++r]===h[++a]&&h[++r]===h[++a]&&h[++r]===h[++a]&&r<_);if(i=dt-(_-r),r=_-dt,i>s){if(t.match_start=e,s=i,i>=o)break;u=h[r+s-1],c=h[r+s]}}while((e=f[e&d])>l&&0!==--n);return s<=t.lookahead?s:t.lookahead}function _(t){var e,a,i,n,r,s=t.w_size;do{if(n=t.window_size-t.lookahead-t.strstart,t.strstart>=s+(s-ft)){R.arraySet(t.window,t.window,s,s,0),t.match_start-=s,t.strstart-=s,t.block_start-=s,a=t.hash_size,e=a;do i=t.head[--e],t.head[e]=i>=s?i-s:0;while(--a);a=s,e=a;do i=t.prev[--e],t.prev[e]=i>=s?i-s:0;while(--a);n+=s}if(0===t.strm.avail_in)break;if(a=d(t.strm,t.window,t.strstart+t.lookahead,n),t.lookahead+=a,t.lookahead+t.insert>=ht)for(r=t.strstart-t.insert,t.ins_h=t.window[r],t.ins_h=(t.ins_h<t.pending_buf_size-5&&(a=t.pending_buf_size-5);;){if(t.lookahead<=1){if(_(t),0===t.lookahead&&e===I)return vt;if(0===t.lookahead)break}t.strstart+=t.lookahead,t.lookahead=0;var i=t.block_start+a;if((0===t.strstart||t.strstart>=i)&&(t.lookahead=t.strstart-i,t.strstart=i,o(t,!1),0===t.strm.avail_out))return vt;if(t.strstart-t.block_start>=t.w_size-ft&&(o(t,!1),0===t.strm.avail_out))return vt}return t.insert=0,e===F?(o(t,!0),0===t.strm.avail_out?yt:xt):t.strstart>t.block_start&&(o(t,!1),0===t.strm.avail_out)?vt:vt}function c(t,e){for(var a,i;;){if(t.lookahead=ht&&(t.ins_h=(t.ins_h<=ht)if(i=C._tr_tally(t,t.strstart-t.match_start,t.match_length-ht),t.lookahead-=t.match_length,t.match_length<=t.max_lazy_match&&t.lookahead>=ht){t.match_length--;do t.strstart++,t.ins_h=(t.ins_h<=ht&&(t.ins_h=(t.ins_h<4096)&&(t.match_length=ht-1)),t.prev_length>=ht&&t.match_length<=t.prev_length){n=t.strstart+t.lookahead-ht,i=C._tr_tally(t,t.strstart-1-t.prev_match,t.prev_length-ht),t.lookahead-=t.prev_length-1,t.prev_length-=2;do++t.strstart<=n&&(t.ins_h=(t.ins_h<=ht&&t.strstart>0&&(n=t.strstart-1,i=s[n],i===s[++n]&&i===s[++n]&&i===s[++n])){r=t.strstart+dt;do;while(i===s[++n]&&i===s[++n]&&i===s[++n]&&i===s[++n]&&i===s[++n]&&i===s[++n]&&i===s[++n]&&i===s[++n]&&nt.lookahead&&(t.match_length=t.lookahead)}if(t.match_length>=ht?(a=C._tr_tally(t,1,t.match_length-ht),t.lookahead-=t.match_length,t.strstart+=t.match_length,t.match_length=0):(a=C._tr_tally(t,0,t.window[t.strstart]),t.lookahead--,t.strstart++),a&&(o(t,!1),0===t.strm.avail_out))return vt}return t.insert=0,e===F?(o(t,!0),0===t.strm.avail_out?yt:xt):t.last_lit&&(o(t,!1),0===t.strm.avail_out)?vt:kt}function m(t,e){for(var a;;){if(0===t.lookahead&&(_(t),0===t.lookahead)){if(e===I)return vt;break}if(t.match_length=0,a=C._tr_tally(t,0,t.window[t.strstart]),t.lookahead--,t.strstart++,a&&(o(t,!1),0===t.strm.avail_out))return vt}return t.insert=0,e===F?(o(t,!0),0===t.strm.avail_out?yt:xt):t.last_lit&&(o(t,!1),0===t.strm.avail_out)?vt:kt}function w(t,e,a,i,n){this.good_length=t,this.max_lazy=e,this.nice_length=a,this.max_chain=i,this.func=n}function p(t){t.window_size=2*t.w_size,r(t.head),t.max_lazy_match=Z[t.level].max_lazy,t.good_match=Z[t.level].good_length,t.nice_match=Z[t.level].nice_length,t.max_chain_length=Z[t.level].max_chain,t.strstart=0,t.block_start=0,t.lookahead=0,t.insert=0,t.match_length=t.prev_length=ht-1,t.match_available=0,t.ins_h=0}function v(){this.strm=null,this.status=0,this.pending_buf=null,this.pending_buf_size=0,this.pending_out=0,this.pending=0,this.wrap=0,this.gzhead=null,this.gzindex=0,this.method=V,this.last_flush=-1,this.w_size=0,this.w_bits=0,this.w_mask=0,this.window=null,this.window_size=0,this.prev=null,this.head=null,this.ins_h=0,this.hash_size=0,this.hash_bits=0,this.hash_mask=0,this.hash_shift=0,this.block_start=0,this.match_length=0,this.prev_match=0,this.match_available=0,this.strstart=0,this.match_start=0,this.lookahead=0,this.prev_length=0,this.max_chain_length=0,this.max_lazy_match=0,this.level=0,this.strategy=0,this.good_match=0,this.nice_match=0,this.dyn_ltree=new R.Buf16(2*ot),this.dyn_dtree=new R.Buf16(2*(2*rt+1)),this.bl_tree=new R.Buf16(2*(2*st+1)),r(this.dyn_ltree),r(this.dyn_dtree),r(this.bl_tree),this.l_desc=null,this.d_desc=null,this.bl_desc=null,this.bl_count=new R.Buf16(lt+1),this.heap=new R.Buf16(2*nt+1),r(this.heap),this.heap_len=0,this.heap_max=0,this.depth=new R.Buf16(2*nt+1),r(this.depth),this.l_buf=0,this.lit_bufsize=0,this.last_lit=0,this.d_buf=0,this.opt_len=0,this.static_len=0,this.matches=0,this.insert=0,this.bi_buf=0,this.bi_valid=0}function k(t){var e;return t&&t.state?(t.total_in=t.total_out=0,t.data_type=Q,e=t.state,e.pending=0,e.pending_out=0,e.wrap<0&&(e.wrap=-e.wrap),e.status=e.wrap?ut:wt,t.adler=2===e.wrap?0:1,e.last_flush=I,C._tr_init(e),H):i(t,K)}function y(t){var e=k(t);return e===H&&p(t.state),e}function x(t,e){return t&&t.state?2!==t.state.wrap?K:(t.state.gzhead=e,H):K}function z(t,e,a,n,r,s){if(!t)return K;var o=1;if(e===Y&&(e=6),n<0?(o=0,n=-n):n>15&&(o=2,n-=16),r<1||r>$||a!==V||n<8||n>15||e<0||e>9||s<0||s>W)return i(t,K);8===n&&(n=9);var l=new v;return t.state=l,l.strm=t,l.wrap=o,l.gzhead=null,l.w_bits=n,l.w_size=1<L||e<0)return t?i(t,K):K;if(o=t.state,!t.output||!t.input&&0!==t.avail_in||o.status===pt&&e!==F)return i(t,0===t.avail_out?P:K);if(o.strm=t,a=o.last_flush,o.last_flush=e,o.status===ut)if(2===o.wrap)t.adler=0,l(o,31),l(o,139),l(o,8),o.gzhead?(l(o,(o.gzhead.text?1:0)+(o.gzhead.hcrc?2:0)+(o.gzhead.extra?4:0)+(o.gzhead.name?8:0)+(o.gzhead.comment?16:0)),l(o,255&o.gzhead.time),l(o,o.gzhead.time>>8&255),l(o,o.gzhead.time>>16&255),l(o,o.gzhead.time>>24&255),l(o,9===o.level?2:o.strategy>=G||o.level<2?4:0),l(o,255&o.gzhead.os),o.gzhead.extra&&o.gzhead.extra.length&&(l(o,255&o.gzhead.extra.length),l(o,o.gzhead.extra.length>>8&255)),o.gzhead.hcrc&&(t.adler=O(t.adler,o.pending_buf,o.pending,0)),o.gzindex=0,o.status=ct):(l(o,0),l(o,0),l(o,0),l(o,0),l(o,0),l(o,9===o.level?2:o.strategy>=G||o.level<2?4:0),l(o,zt),o.status=wt);else{var _=V+(o.w_bits-8<<4)<<8,u=-1;u=o.strategy>=G||o.level<2?0:o.level<6?1:6===o.level?2:3,_|=u<<6,0!==o.strstart&&(_|=_t),_+=31-_%31,o.status=wt,h(o,_),0!==o.strstart&&(h(o,t.adler>>>16),h(o,65535&t.adler)),t.adler=1}if(o.status===ct)if(o.gzhead.extra){for(d=o.pending;o.gzindex<(65535&o.gzhead.extra.length)&&(o.pending!==o.pending_buf_size||(o.gzhead.hcrc&&o.pending>d&&(t.adler=O(t.adler,o.pending_buf,o.pending-d,d)),s(t),d=o.pending,o.pending!==o.pending_buf_size));)l(o,255&o.gzhead.extra[o.gzindex]),o.gzindex++;o.gzhead.hcrc&&o.pending>d&&(t.adler=O(t.adler,o.pending_buf,o.pending-d,d)),o.gzindex===o.gzhead.extra.length&&(o.gzindex=0,o.status=bt)}else o.status=bt;if(o.status===bt)if(o.gzhead.name){d=o.pending;do{if(o.pending===o.pending_buf_size&&(o.gzhead.hcrc&&o.pending>d&&(t.adler=O(t.adler,o.pending_buf,o.pending-d,d)),s(t),d=o.pending,o.pending===o.pending_buf_size)){f=1;break}f=o.gzindexd&&(t.adler=O(t.adler,o.pending_buf,o.pending-d,d)),0===f&&(o.gzindex=0,o.status=gt)}else o.status=gt;if(o.status===gt)if(o.gzhead.comment){d=o.pending;do{if(o.pending===o.pending_buf_size&&(o.gzhead.hcrc&&o.pending>d&&(t.adler=O(t.adler,o.pending_buf,o.pending-d,d)),s(t),d=o.pending,o.pending===o.pending_buf_size)){f=1;break}f=o.gzindexd&&(t.adler=O(t.adler,o.pending_buf,o.pending-d,d)),0===f&&(o.status=mt)}else o.status=mt;if(o.status===mt&&(o.gzhead.hcrc?(o.pending+2>o.pending_buf_size&&s(t),o.pending+2<=o.pending_buf_size&&(l(o,255&t.adler),l(o,t.adler>>8&255),t.adler=0,o.status=wt)):o.status=wt),0!==o.pending){if(s(t),0===t.avail_out)return o.last_flush=-1,H}else if(0===t.avail_in&&n(e)<=n(a)&&e!==F)return i(t,P);if(o.status===pt&&0!==t.avail_in)return i(t,P);if(0!==t.avail_in||0!==o.lookahead||e!==I&&o.status!==pt){var c=o.strategy===G?m(o,e):o.strategy===X?g(o,e):Z[o.level].func(o,e);if(c!==yt&&c!==xt||(o.status=pt),c===vt||c===yt)return 0===t.avail_out&&(o.last_flush=-1),H;if(c===kt&&(e===U?C._tr_align(o):e!==L&&(C._tr_stored_block(o,0,0,!1),e===T&&(r(o.head),0===o.lookahead&&(o.strstart=0,o.block_start=0,o.insert=0))),s(t),0===t.avail_out))return o.last_flush=-1,H}return e!==F?H:o.wrap<=0?j:(2===o.wrap?(l(o,255&t.adler),l(o,t.adler>>8&255),l(o,t.adler>>16&255),l(o,t.adler>>24&255),l(o,255&t.total_in),l(o,t.total_in>>8&255),l(o,t.total_in>>16&255),l(o,t.total_in>>24&255)):(h(o,t.adler>>>16),h(o,65535&t.adler)),s(t),o.wrap>0&&(o.wrap=-o.wrap),0!==o.pending?H:j)}function E(t){var e;return t&&t.state?(e=t.state.status,e!==ut&&e!==ct&&e!==bt&&e!==gt&&e!==mt&&e!==wt&&e!==pt?i(t,K):(t.state=null,e===wt?i(t,M):H)):K}function A(t,e){var a,i,n,s,o,l,h,d,f=e.length;if(!t||!t.state)return K;if(a=t.state,s=a.wrap,2===s||1===s&&a.status!==ut||a.lookahead)return K;for(1===s&&(t.adler=N(t.adler,e,f,0)),a.wrap=0,f>=a.w_size&&(0===s&&(r(a.head),a.strstart=0,a.block_start=0,a.insert=0),d=new R.Buf8(a.w_size),R.arraySet(d,e,f-a.w_size,a.w_size,0),e=d,f=a.w_size),o=t.avail_in,l=t.next_in,h=t.input,t.avail_in=f,t.next_in=0,t.input=e,_(a);a.lookahead>=ht;){i=a.strstart,n=a.lookahead-(ht-1);do a.ins_h=(a.ins_h<>>24,b>>>=y,g-=y,y=k>>>16&255,0===y)A[o++]=65535&k;else{if(!(16&y)){if(0===(64&y)){k=m[(65535&k)+(b&(1<>>=y,g-=y),g<15&&(b+=E[r++]<>>24,b>>>=y,g-=y,y=k>>>16&255,!(16&y)){if(0===(64&y)){k=w[(65535&k)+(b&(1<d){t.msg="invalid distance too far back",a.mode=i;break t}if(b>>>=y,g-=y,y=o-l,z>y){if(y=z-y,y>_&&a.sane){t.msg="invalid distance too far back",a.mode=i;break t}if(B=0,S=c,0===u){if(B+=f-y,y2;)A[o++]=S[B++],A[o++]=S[B++],A[o++]=S[B++],x-=3;x&&(A[o++]=S[B++],x>1&&(A[o++]=S[B++]))}else{B=o-z;do A[o++]=A[B++],A[o++]=A[B++],A[o++]=A[B++],x-=3;while(x>2);x&&(A[o++]=A[B++],x>1&&(A[o++]=A[B++]))}break}}break}}while(r>3,r-=x,g-=x<<3,b&=(1<>>24&255)+(t>>>8&65280)+((65280&t)<<8)+((255&t)<<24)}function n(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new w.Buf16(320),this.work=new w.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function r(t){var e;return t&&t.state?(e=t.state,t.total_in=t.total_out=e.total=0,t.msg="",e.wrap&&(t.adler=1&e.wrap),e.mode=T,e.last=0,e.havedict=0,e.dmax=32768,e.head=null,e.hold=0,e.bits=0,e.lencode=e.lendyn=new w.Buf32(bt),e.distcode=e.distdyn=new w.Buf32(gt),e.sane=1,e.back=-1,Z):N}function s(t){var e;return t&&t.state?(e=t.state,e.wsize=0,e.whave=0,e.wnext=0,r(t)):N}function o(t,e){var a,i;return t&&t.state?(i=t.state,e<0?(a=0,e=-e):(a=(e>>4)+1,e<48&&(e&=15)),e&&(e<8||e>15)?N:(null!==i.window&&i.wbits!==e&&(i.window=null),i.wrap=a,i.wbits=e,s(t))):N}function l(t,e){var a,i;return t?(i=new n,t.state=i,i.window=null,a=o(t,e),a!==Z&&(t.state=null),a):N}function h(t){return l(t,wt)}function d(t){if(pt){var e;for(g=new w.Buf32(512),m=new w.Buf32(32),e=0;e<144;)t.lens[e++]=8;for(;e<256;)t.lens[e++]=9;for(;e<280;)t.lens[e++]=7;for(;e<288;)t.lens[e++]=8;for(y(z,t.lens,0,288,g,0,t.work,{bits:9}),e=0;e<32;)t.lens[e++]=5;y(B,t.lens,0,32,m,0,t.work,{bits:5}),pt=!1}t.lencode=g,t.lenbits=9,t.distcode=m,t.distbits=5}function f(t,e,a,i){var n,r=t.state;return null===r.window&&(r.wsize=1<=r.wsize?(w.arraySet(r.window,e,a-r.wsize,r.wsize,0),r.wnext=0,r.whave=r.wsize):(n=r.wsize-r.wnext,n>i&&(n=i),w.arraySet(r.window,e,a-i,n,r.wnext),i-=n,i?(w.arraySet(r.window,e,a-i,i,0),r.wnext=i,r.whave=r.wsize):(r.wnext+=n,r.wnext===r.wsize&&(r.wnext=0),r.whave>>8&255,a.check=v(a.check,Et,2,0),_=0,u=0,a.mode=F;break}if(a.flags=0,a.head&&(a.head.done=!1),!(1&a.wrap)||(((255&_)<<8)+(_>>8))%31){t.msg="incorrect header check",a.mode=_t;break}if((15&_)!==U){t.msg="unknown compression method",a.mode=_t;break}if(_>>>=4,u-=4,yt=(15&_)+8,0===a.wbits)a.wbits=yt;else if(yt>a.wbits){t.msg="invalid window size",a.mode=_t;break}a.dmax=1<>8&1),512&a.flags&&(Et[0]=255&_,Et[1]=_>>>8&255,a.check=v(a.check,Et,2,0)),_=0,u=0,a.mode=L;case L:for(;u<32;){if(0===l)break t;l--,_+=n[s++]<>>8&255,Et[2]=_>>>16&255,Et[3]=_>>>24&255,a.check=v(a.check,Et,4,0)),_=0,u=0,a.mode=H;case H:for(;u<16;){if(0===l)break t;l--,_+=n[s++]<>8),512&a.flags&&(Et[0]=255&_,Et[1]=_>>>8&255,a.check=v(a.check,Et,2,0)),_=0,u=0,a.mode=j;case j:if(1024&a.flags){for(;u<16;){if(0===l)break t;l--,_+=n[s++]<>>8&255,a.check=v(a.check,Et,2,0)),_=0,u=0}else a.head&&(a.head.extra=null);a.mode=K;case K:if(1024&a.flags&&(g=a.length,g>l&&(g=l),g&&(a.head&&(yt=a.head.extra_len-a.length,a.head.extra||(a.head.extra=new Array(a.head.extra_len)),w.arraySet(a.head.extra,n,s,g,yt)),512&a.flags&&(a.check=v(a.check,n,g,s)),l-=g,s+=g,a.length-=g),a.length))break t;a.length=0,a.mode=M;case M:if(2048&a.flags){if(0===l)break t;g=0;do yt=n[s+g++],a.head&&yt&&a.length<65536&&(a.head.name+=String.fromCharCode(yt));while(yt&&g>9&1,a.head.done=!0),t.adler=a.check=0,a.mode=X;break;case q:for(;u<32;){if(0===l)break t;l--,_+=n[s++]<>>=7&u,u-=7&u,a.mode=ht;break}for(;u<3;){if(0===l)break t;l--,_+=n[s++]<>>=1,u-=1,3&_){case 0:a.mode=J;break;case 1:if(d(a),a.mode=at,e===A){_>>>=2,u-=2;break t}break;case 2:a.mode=$;break;case 3:t.msg="invalid block type",a.mode=_t}_>>>=2,u-=2;break;case J:for(_>>>=7&u,u-=7&u;u<32;){if(0===l)break t;l--,_+=n[s++]<>>16^65535)){t.msg="invalid stored block lengths",a.mode=_t;break}if(a.length=65535&_,_=0,u=0,a.mode=Q,e===A)break t;case Q:a.mode=V;case V:if(g=a.length){if(g>l&&(g=l),g>h&&(g=h),0===g)break t;w.arraySet(r,n,s,g,o),l-=g,s+=g,h-=g,o+=g,a.length-=g;break}a.mode=X;break;case $:for(;u<14;){if(0===l)break t; +l--,_+=n[s++]<>>=5,u-=5,a.ndist=(31&_)+1,_>>>=5,u-=5,a.ncode=(15&_)+4,_>>>=4,u-=4,a.nlen>286||a.ndist>30){t.msg="too many length or distance symbols",a.mode=_t;break}a.have=0,a.mode=tt;case tt:for(;a.have>>=3,u-=3}for(;a.have<19;)a.lens[At[a.have++]]=0;if(a.lencode=a.lendyn,a.lenbits=7,zt={bits:a.lenbits},xt=y(x,a.lens,0,19,a.lencode,0,a.work,zt),a.lenbits=zt.bits,xt){t.msg="invalid code lengths set",a.mode=_t;break}a.have=0,a.mode=et;case et:for(;a.have>>24,mt=St>>>16&255,wt=65535&St,!(gt<=u);){if(0===l)break t;l--,_+=n[s++]<>>=gt,u-=gt,a.lens[a.have++]=wt;else{if(16===wt){for(Bt=gt+2;u>>=gt,u-=gt,0===a.have){t.msg="invalid bit length repeat",a.mode=_t;break}yt=a.lens[a.have-1],g=3+(3&_),_>>>=2,u-=2}else if(17===wt){for(Bt=gt+3;u>>=gt,u-=gt,yt=0,g=3+(7&_),_>>>=3,u-=3}else{for(Bt=gt+7;u>>=gt,u-=gt,yt=0,g=11+(127&_),_>>>=7,u-=7}if(a.have+g>a.nlen+a.ndist){t.msg="invalid bit length repeat",a.mode=_t;break}for(;g--;)a.lens[a.have++]=yt}}if(a.mode===_t)break;if(0===a.lens[256]){t.msg="invalid code -- missing end-of-block",a.mode=_t;break}if(a.lenbits=9,zt={bits:a.lenbits},xt=y(z,a.lens,0,a.nlen,a.lencode,0,a.work,zt),a.lenbits=zt.bits,xt){t.msg="invalid literal/lengths set",a.mode=_t;break}if(a.distbits=6,a.distcode=a.distdyn,zt={bits:a.distbits},xt=y(B,a.lens,a.nlen,a.ndist,a.distcode,0,a.work,zt),a.distbits=zt.bits,xt){t.msg="invalid distances set",a.mode=_t;break}if(a.mode=at,e===A)break t;case at:a.mode=it;case it:if(l>=6&&h>=258){t.next_out=o,t.avail_out=h,t.next_in=s,t.avail_in=l,a.hold=_,a.bits=u,k(t,b),o=t.next_out,r=t.output,h=t.avail_out,s=t.next_in,n=t.input,l=t.avail_in,_=a.hold,u=a.bits,a.mode===X&&(a.back=-1);break}for(a.back=0;St=a.lencode[_&(1<>>24,mt=St>>>16&255,wt=65535&St,!(gt<=u);){if(0===l)break t;l--,_+=n[s++]<>pt)],gt=St>>>24,mt=St>>>16&255,wt=65535&St,!(pt+gt<=u);){if(0===l)break t;l--,_+=n[s++]<>>=pt,u-=pt,a.back+=pt}if(_>>>=gt,u-=gt,a.back+=gt,a.length=wt,0===mt){a.mode=lt;break}if(32&mt){a.back=-1,a.mode=X;break}if(64&mt){t.msg="invalid literal/length code",a.mode=_t;break}a.extra=15&mt,a.mode=nt;case nt:if(a.extra){for(Bt=a.extra;u>>=a.extra,u-=a.extra,a.back+=a.extra}a.was=a.length,a.mode=rt;case rt:for(;St=a.distcode[_&(1<>>24,mt=St>>>16&255,wt=65535&St,!(gt<=u);){if(0===l)break t;l--,_+=n[s++]<>pt)],gt=St>>>24,mt=St>>>16&255,wt=65535&St,!(pt+gt<=u);){if(0===l)break t;l--,_+=n[s++]<>>=pt,u-=pt,a.back+=pt}if(_>>>=gt,u-=gt,a.back+=gt,64&mt){t.msg="invalid distance code",a.mode=_t;break}a.offset=wt,a.extra=15&mt,a.mode=st;case st:if(a.extra){for(Bt=a.extra;u>>=a.extra,u-=a.extra,a.back+=a.extra}if(a.offset>a.dmax){t.msg="invalid distance too far back",a.mode=_t;break}a.mode=ot;case ot:if(0===h)break t;if(g=b-h,a.offset>g){if(g=a.offset-g,g>a.whave&&a.sane){t.msg="invalid distance too far back",a.mode=_t;break}g>a.wnext?(g-=a.wnext,m=a.wsize-g):m=a.wnext-g,g>a.length&&(g=a.length),bt=a.window}else bt=r,m=o-a.offset,g=a.length;g>h&&(g=h),h-=g,a.length-=g;do r[o++]=bt[m++];while(--g);0===a.length&&(a.mode=it);break;case lt:if(0===h)break t;r[o++]=a.length,h--,a.mode=it;break;case ht:if(a.wrap){for(;u<32;){if(0===l)break t;l--,_|=n[s++]<=1&&0===j[N];N--);if(O>N&&(O=N),0===N)return b[g++]=20971520,b[g++]=20971520,w.bits=1,0;for(C=1;C0&&(t===o||1!==N))return-1;for(K[1]=0,Z=1;Zr||t===h&&T>s)return 1;for(var Y=0;;){Y++,B=Z-I,m[R]z?(S=M[P+m[R]],E=L[H+m[R]]):(S=96,E=0),p=1<>I)+v]=B<<24|S<<16|E|0;while(0!==v);for(p=1<>=1;if(0!==p?(F&=p-1,F+=p):F=0,R++,0===--j[Z]){if(Z===N)break;Z=e[a+m[R]]}if(Z>O&&(F&y)!==k){for(0===I&&(I=O),x+=C,D=Z-I,U=1<r||t===h&&T>s)return 1;k=F&y,b[k]=O<<24|D<<16|x-g|0}}return 0!==F&&(b[x+F]=Z-I<<24|64<<16|0),w.bits=O,0}},{"../utils/common":3}],13:[function(t,e,a){"use strict";e.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],14:[function(t,e,a){"use strict";function i(t){for(var e=t.length;--e>=0;)t[e]=0}function n(t,e,a,i,n){this.static_tree=t,this.extra_bits=e,this.extra_base=a,this.elems=i,this.max_length=n,this.has_stree=t&&t.length}function r(t,e){this.dyn_tree=t,this.max_code=0,this.stat_desc=e}function s(t){return t<256?lt[t]:lt[256+(t>>>7)]}function o(t,e){t.pending_buf[t.pending++]=255&e,t.pending_buf[t.pending++]=e>>>8&255}function l(t,e,a){t.bi_valid>W-a?(t.bi_buf|=e<>W-t.bi_valid,t.bi_valid+=a-W):(t.bi_buf|=e<>>=1,a<<=1;while(--e>0);return a>>>1}function f(t){16===t.bi_valid?(o(t,t.bi_buf),t.bi_buf=0,t.bi_valid=0):t.bi_valid>=8&&(t.pending_buf[t.pending++]=255&t.bi_buf,t.bi_buf>>=8,t.bi_valid-=8)}function _(t,e){var a,i,n,r,s,o,l=e.dyn_tree,h=e.max_code,d=e.stat_desc.static_tree,f=e.stat_desc.has_stree,_=e.stat_desc.extra_bits,u=e.stat_desc.extra_base,c=e.stat_desc.max_length,b=0;for(r=0;r<=X;r++)t.bl_count[r]=0;for(l[2*t.heap[t.heap_max]+1]=0,a=t.heap_max+1;ac&&(r=c,b++),l[2*i+1]=r,i>h||(t.bl_count[r]++,s=0,i>=u&&(s=_[i-u]),o=l[2*i],t.opt_len+=o*(r+s),f&&(t.static_len+=o*(d[2*i+1]+s)));if(0!==b){do{for(r=c-1;0===t.bl_count[r];)r--;t.bl_count[r]--,t.bl_count[r+1]+=2,t.bl_count[c]--,b-=2}while(b>0);for(r=c;0!==r;r--)for(i=t.bl_count[r];0!==i;)n=t.heap[--a],n>h||(l[2*n+1]!==r&&(t.opt_len+=(r-l[2*n+1])*l[2*n],l[2*n+1]=r),i--)}}function u(t,e,a){var i,n,r=new Array(X+1),s=0;for(i=1;i<=X;i++)r[i]=s=s+a[i-1]<<1;for(n=0;n<=e;n++){var o=t[2*n+1];0!==o&&(t[2*n]=d(r[o]++,o))}}function c(){var t,e,a,i,r,s=new Array(X+1);for(a=0,i=0;i>=7;i8?o(t,t.bi_buf):t.bi_valid>0&&(t.pending_buf[t.pending++]=t.bi_buf),t.bi_buf=0,t.bi_valid=0}function m(t,e,a,i){g(t),i&&(o(t,a),o(t,~a)),N.arraySet(t.pending_buf,t.window,e,a,t.pending),t.pending+=a}function w(t,e,a,i){var n=2*e,r=2*a;return t[n]>1;a>=1;a--)p(t,r,a);n=l;do a=t.heap[1],t.heap[1]=t.heap[t.heap_len--],p(t,r,1),i=t.heap[1],t.heap[--t.heap_max]=a,t.heap[--t.heap_max]=i,r[2*n]=r[2*a]+r[2*i],t.depth[n]=(t.depth[a]>=t.depth[i]?t.depth[a]:t.depth[i])+1,r[2*a+1]=r[2*i+1]=n,t.heap[1]=n++,p(t,r,1);while(t.heap_len>=2);t.heap[--t.heap_max]=t.heap[1],_(t,e),u(r,h,t.bl_count)}function y(t,e,a){var i,n,r=-1,s=e[1],o=0,l=7,h=4;for(0===s&&(l=138,h=3),e[2*(a+1)+1]=65535,i=0;i<=a;i++)n=s,s=e[2*(i+1)+1],++o=3&&0===t.bl_tree[2*nt[e]+1];e--);return t.opt_len+=3*(e+1)+5+5+4,e}function B(t,e,a,i){var n;for(l(t,e-257,5),l(t,a-1,5),l(t,i-4,4),n=0;n>>=1)if(1&a&&0!==t.dyn_ltree[2*e])return D;if(0!==t.dyn_ltree[18]||0!==t.dyn_ltree[20]||0!==t.dyn_ltree[26])return I;for(e=32;e0?(t.strm.data_type===U&&(t.strm.data_type=S(t)),k(t,t.l_desc),k(t,t.d_desc),s=z(t),n=t.opt_len+3+7>>>3,r=t.static_len+3+7>>>3,r<=n&&(n=r)):n=r=a+5,a+4<=n&&e!==-1?A(t,e,a,i):t.strategy===O||r===n?(l(t,(F<<1)+(i?1:0),3),v(t,st,ot)):(l(t,(L<<1)+(i?1:0),3),B(t,t.l_desc.max_code+1,t.d_desc.max_code+1,s+1),v(t,t.dyn_ltree,t.dyn_dtree)),b(t),i&&g(t)}function C(t,e,a){return t.pending_buf[t.d_buf+2*t.last_lit]=e>>>8&255,t.pending_buf[t.d_buf+2*t.last_lit+1]=255&e,t.pending_buf[t.l_buf+t.last_lit]=255&a,t.last_lit++,0===e?t.dyn_ltree[2*a]++:(t.matches++,e--,t.dyn_ltree[2*(ht[a]+M+1)]++,t.dyn_dtree[2*s(e)]++),t.last_lit===t.lit_bufsize-1}var N=t("../utils/common"),O=4,D=0,I=1,U=2,T=0,F=1,L=2,H=3,j=258,K=29,M=256,P=M+1+K,Y=30,q=19,G=2*P+1,X=15,W=16,J=7,Q=256,V=16,$=17,tt=18,et=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0],at=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13],it=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7],nt=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],rt=512,st=new Array(2*(P+2));i(st);var ot=new Array(2*Y);i(ot);var lt=new Array(rt);i(lt);var ht=new Array(j-H+1);i(ht);var dt=new Array(K);i(dt);var ft=new Array(Y);i(ft);var _t,ut,ct,bt=!1;a._tr_init=E,a._tr_stored_block=A,a._tr_flush_block=R,a._tr_tally=C,a._tr_align=Z},{"../utils/common":3}],15:[function(t,e,a){"use strict";function i(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}e.exports=i},{}],"/":[function(t,e,a){"use strict";var i=t("./lib/utils/common").assign,n=t("./lib/deflate"),r=t("./lib/inflate"),s=t("./lib/zlib/constants"),o={};i(o,n,r,s),e.exports=o},{"./lib/deflate":1,"./lib/inflate":2,"./lib/utils/common":3,"./lib/zlib/constants":6}]},{},[])("/")}); diff --git a/node_modules/pako/dist/pako_deflate.js b/node_modules/pako/dist/pako_deflate.js new file mode 100644 index 0000000..a5f9572 --- /dev/null +++ b/node_modules/pako/dist/pako_deflate.js @@ -0,0 +1,3879 @@ +/* pako 0.2.9 nodeca/pako */(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.pako = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); +} +_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start + + +// convert string to array (typed, when possible) +exports.string2buf = function (str) { + var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; + + // count binary size + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; + } + + // allocate buffer + buf = new utils.Buf8(buf_len); + + // convert + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + if (c < 0x80) { + /* one byte */ + buf[i++] = c; + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xC0 | (c >>> 6); + buf[i++] = 0x80 | (c & 0x3f); + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xE0 | (c >>> 12); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18); + buf[i++] = 0x80 | (c >>> 12 & 0x3f); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } + } + + return buf; +}; + +// Helper (used in 2 places) +function buf2binstring(buf, len) { + // use fallback for big arrays to avoid stack overflow + if (len < 65537) { + if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { + return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); + } + } + + var result = ''; + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]); + } + return result; +} + + +// Convert byte array to binary string +exports.buf2binstring = function (buf) { + return buf2binstring(buf, buf.length); +}; + + +// Convert binary string (typed, when possible) +exports.binstring2buf = function (str) { + var buf = new utils.Buf8(str.length); + for (var i = 0, len = buf.length; i < len; i++) { + buf[i] = str.charCodeAt(i); + } + return buf; +}; + + +// convert array to string +exports.buf2string = function (buf, max) { + var i, out, c, c_len; + var len = max || buf.length; + + // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + var utf16buf = new Array(len * 2); + + for (out = 0, i = 0; i < len;) { + c = buf[i++]; + // quick process ascii + if (c < 0x80) { utf16buf[out++] = c; continue; } + + c_len = _utf8len[c]; + // skip 5 & 6 byte codes + if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } + + // apply mask on first byte + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; + // join the rest + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f); + c_len--; + } + + // terminated by end of string? + if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } + + if (c < 0x10000) { + utf16buf[out++] = c; + } else { + c -= 0x10000; + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); + utf16buf[out++] = 0xdc00 | (c & 0x3ff); + } + } + + return buf2binstring(utf16buf, out); +}; + + +// Calculate max possible position in utf8 buffer, +// that will not break sequence. If that's not possible +// - (very small limits) return max size as is. +// +// buf[] - utf8 bytes array +// max - length limit (mandatory); +exports.utf8border = function (buf, max) { + var pos; + + max = max || buf.length; + if (max > buf.length) { max = buf.length; } + + // go back from last position, until start of sequence found + pos = max - 1; + while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } + + // Fuckup - very small and broken sequence, + // return max, because we should return something anyway. + if (pos < 0) { return max; } + + // If we came to start of buffer - that means vuffer is too small, + // return max too. + if (pos === 0) { return max; } + + return (pos + _utf8len[buf[pos]] > max) ? pos : max; +}; + +},{"./common":1}],3:[function(require,module,exports){ +'use strict'; + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It doesn't worth to make additional optimizationa as in original. +// Small size is preferable. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; + +},{}],4:[function(require,module,exports){ +'use strict'; + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + +},{}],5:[function(require,module,exports){ +'use strict'; + +var utils = require('../utils/common'); +var trees = require('./trees'); +var adler32 = require('./adler32'); +var crc32 = require('./crc32'); +var msg = require('./messages'); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; + } + if (len === 0) { return; } + + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ + +},{"../utils/common":1,"./adler32":3,"./crc32":4,"./messages":6,"./trees":7}],6:[function(require,module,exports){ +'use strict'; + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + +},{}],7:[function(require,module,exports){ +'use strict'; + + +var utils = require('../utils/common'); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array insdead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; + +},{"../utils/common":1}],8:[function(require,module,exports){ +'use strict'; + + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + +},{}],"/lib/deflate.js":[function(require,module,exports){ +'use strict'; + + +var zlib_deflate = require('./zlib/deflate'); +var utils = require('./utils/common'); +var strings = require('./utils/strings'); +var msg = require('./zlib/messages'); +var ZStream = require('./zlib/zstream'); + +var toString = Object.prototype.toString; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + +var Z_NO_FLUSH = 0; +var Z_FINISH = 4; + +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_SYNC_FLUSH = 2; + +var Z_DEFAULT_COMPRESSION = -1; + +var Z_DEFAULT_STRATEGY = 0; + +var Z_DEFLATED = 8; + +/* ===========================================================================*/ + + +/** + * class Deflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[deflate]], + * [[deflateRaw]] and [[gzip]]. + **/ + +/* internal + * Deflate.chunks -> Array + * + * Chunks of output data, if [[Deflate#onData]] not overriden. + **/ + +/** + * Deflate.result -> Uint8Array|Array + * + * Compressed result, generated by default [[Deflate#onData]] + * and [[Deflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Deflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Deflate.err -> Number + * + * Error code after deflate finished. 0 (Z_OK) on success. + * You will not need it in real life, because deflate errors + * are possible only on wrong options or bad `onData` / `onEnd` + * custom handlers. + **/ + +/** + * Deflate.msg -> String + * + * Error message, if [[Deflate.err]] != 0 + **/ + + +/** + * new Deflate(options) + * - options (Object): zlib deflate options. + * + * Creates new deflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `level` + * - `windowBits` + * - `memLevel` + * - `strategy` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw deflate + * - `gzip` (Boolean) - create gzip wrapper + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * - `header` (Object) - custom header for gzip + * - `text` (Boolean) - true if compressed data believed to be text + * - `time` (Number) - modification time, unix timestamp + * - `os` (Number) - operation system code + * - `extra` (Array) - array of bytes with extra data (max 65536) + * - `name` (String) - file name (binary string) + * - `comment` (String) - comment (binary string) + * - `hcrc` (Boolean) - true if header crc should be added + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var deflate = new pako.Deflate({ level: 3}); + * + * deflate.push(chunk1, false); + * deflate.push(chunk2, true); // true -> last chunk + * + * if (deflate.err) { throw new Error(deflate.err); } + * + * console.log(deflate.result); + * ``` + **/ +function Deflate(options) { + if (!(this instanceof Deflate)) return new Deflate(options); + + this.options = utils.assign({ + level: Z_DEFAULT_COMPRESSION, + method: Z_DEFLATED, + chunkSize: 16384, + windowBits: 15, + memLevel: 8, + strategy: Z_DEFAULT_STRATEGY, + to: '' + }, options || {}); + + var opt = this.options; + + if (opt.raw && (opt.windowBits > 0)) { + opt.windowBits = -opt.windowBits; + } + + else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) { + opt.windowBits += 16; + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_deflate.deflateInit2( + this.strm, + opt.level, + opt.method, + opt.windowBits, + opt.memLevel, + opt.strategy + ); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + if (opt.header) { + zlib_deflate.deflateSetHeader(this.strm, opt.header); + } + + if (opt.dictionary) { + var dict; + // Convert data if needed + if (typeof opt.dictionary === 'string') { + // If we need to compress text, change encoding to utf8. + dict = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(opt.dictionary); + } else { + dict = opt.dictionary; + } + + status = zlib_deflate.deflateSetDictionary(this.strm, dict); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + this._dict_set = true; + } +} + +/** + * Deflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be + * converted to utf8 byte sequence. + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH. + * + * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with + * new compressed chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the compression context. + * + * On fail call [[Deflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * array format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Deflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var status, _mode; + + if (this.ended) { return false; } + + _mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // If we need to compress text, change encoding to utf8. + strm.input = strings.string2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + status = zlib_deflate.deflate(strm, _mode); /* no bad return value */ + + if (status !== Z_STREAM_END && status !== Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) { + if (this.options.to === 'string') { + this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out))); + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END); + + // Finalize on the last chunk. + if (_mode === Z_FINISH) { + status = zlib_deflate.deflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === Z_SYNC_FLUSH) { + this.onEnd(Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Deflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): ouput data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Deflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Deflate#onEnd(status) -> Void + * - status (Number): deflate status. 0 (Z_OK) on success, + * other if not. + * + * Called once after you tell deflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Deflate.prototype.onEnd = function (status) { + // On success - join + if (status === Z_OK) { + if (this.options.to === 'string') { + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * deflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * Compress `data` with deflate algorithm and `options`. + * + * Supported options are: + * + * - level + * - windowBits + * - memLevel + * - strategy + * - dictionary + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , data = Uint8Array([1,2,3,4,5,6,7,8,9]); + * + * console.log(pako.deflate(data)); + * ``` + **/ +function deflate(input, options) { + var deflator = new Deflate(options); + + deflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (deflator.err) { throw deflator.msg; } + + return deflator.result; +} + + +/** + * deflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function deflateRaw(input, options) { + options = options || {}; + options.raw = true; + return deflate(input, options); +} + + +/** + * gzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but create gzip wrapper instead of + * deflate one. + **/ +function gzip(input, options) { + options = options || {}; + options.gzip = true; + return deflate(input, options); +} + + +exports.Deflate = Deflate; +exports.deflate = deflate; +exports.deflateRaw = deflateRaw; +exports.gzip = gzip; + +},{"./utils/common":1,"./utils/strings":2,"./zlib/deflate":5,"./zlib/messages":6,"./zlib/zstream":8}]},{},[])("/lib/deflate.js") +}); \ No newline at end of file diff --git a/node_modules/pako/dist/pako_deflate.min.js b/node_modules/pako/dist/pako_deflate.min.js new file mode 100644 index 0000000..b307f85 --- /dev/null +++ b/node_modules/pako/dist/pako_deflate.min.js @@ -0,0 +1,2 @@ +/* pako 0.2.9 nodeca/pako */ +!function(t){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var e;e="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,e.pako=t()}}(function(){return function t(e,a,n){function r(s,h){if(!a[s]){if(!e[s]){var l="function"==typeof require&&require;if(!h&&l)return l(s,!0);if(i)return i(s,!0);var o=new Error("Cannot find module '"+s+"'");throw o.code="MODULE_NOT_FOUND",o}var _=a[s]={exports:{}};e[s][0].call(_.exports,function(t){var a=e[s][1][t];return r(a?a:t)},_,_.exports,t,e,a,n)}return a[s].exports}for(var i="function"==typeof require&&require,s=0;s=252?6:l>=248?5:l>=240?4:l>=224?3:l>=192?2:1;h[254]=h[254]=1,a.string2buf=function(t){var e,a,n,i,s,h=t.length,l=0;for(i=0;i>>6,e[s++]=128|63&a):a<65536?(e[s++]=224|a>>>12,e[s++]=128|a>>>6&63,e[s++]=128|63&a):(e[s++]=240|a>>>18,e[s++]=128|a>>>12&63,e[s++]=128|a>>>6&63,e[s++]=128|63&a);return e},a.buf2binstring=function(t){return n(t,t.length)},a.binstring2buf=function(t){for(var e=new r.Buf8(t.length),a=0,n=e.length;a4)o[r++]=65533,a+=s-1;else{for(i&=2===s?31:3===s?15:7;s>1&&a1?o[r++]=65533:i<65536?o[r++]=i:(i-=65536,o[r++]=55296|i>>10&1023,o[r++]=56320|1023&i)}return n(o,r)},a.utf8border=function(t,e){var a;for(e=e||t.length,e>t.length&&(e=t.length),a=e-1;a>=0&&128===(192&t[a]);)a--;return a<0?e:0===a?e:a+h[t[a]]>e?a:e}},{"./common":1}],3:[function(t,e,a){"use strict";function n(t,e,a,n){for(var r=65535&t|0,i=t>>>16&65535|0,s=0;0!==a;){s=a>2e3?2e3:a,a-=s;do r=r+e[n++]|0,i=i+r|0;while(--s);r%=65521,i%=65521}return r|i<<16|0}e.exports=n},{}],4:[function(t,e,a){"use strict";function n(){for(var t,e=[],a=0;a<256;a++){t=a;for(var n=0;n<8;n++)t=1&t?3988292384^t>>>1:t>>>1;e[a]=t}return e}function r(t,e,a,n){var r=i,s=n+a;t^=-1;for(var h=n;h>>8^r[255&(t^e[h])];return t^-1}var i=n();e.exports=r},{}],5:[function(t,e,a){"use strict";function n(t,e){return t.msg=O[e],e}function r(t){return(t<<1)-(t>4?9:0)}function i(t){for(var e=t.length;--e>=0;)t[e]=0}function s(t){var e=t.state,a=e.pending;a>t.avail_out&&(a=t.avail_out),0!==a&&(j.arraySet(t.output,e.pending_buf,e.pending_out,a,t.next_out),t.next_out+=a,e.pending_out+=a,t.total_out+=a,t.avail_out-=a,e.pending-=a,0===e.pending&&(e.pending_out=0))}function h(t,e){U._tr_flush_block(t,t.block_start>=0?t.block_start:-1,t.strstart-t.block_start,e),t.block_start=t.strstart,s(t.strm)}function l(t,e){t.pending_buf[t.pending++]=e}function o(t,e){t.pending_buf[t.pending++]=e>>>8&255,t.pending_buf[t.pending++]=255&e}function _(t,e,a,n){var r=t.avail_in;return r>n&&(r=n),0===r?0:(t.avail_in-=r,j.arraySet(e,t.input,t.next_in,r,a),1===t.state.wrap?t.adler=D(t.adler,e,r,a):2===t.state.wrap&&(t.adler=I(t.adler,e,r,a)),t.next_in+=r,t.total_in+=r,r)}function d(t,e){var a,n,r=t.max_chain_length,i=t.strstart,s=t.prev_length,h=t.nice_match,l=t.strstart>t.w_size-dt?t.strstart-(t.w_size-dt):0,o=t.window,_=t.w_mask,d=t.prev,u=t.strstart+_t,f=o[i+s-1],c=o[i+s];t.prev_length>=t.good_match&&(r>>=2),h>t.lookahead&&(h=t.lookahead);do if(a=e,o[a+s]===c&&o[a+s-1]===f&&o[a]===o[i]&&o[++a]===o[i+1]){i+=2,a++;do;while(o[++i]===o[++a]&&o[++i]===o[++a]&&o[++i]===o[++a]&&o[++i]===o[++a]&&o[++i]===o[++a]&&o[++i]===o[++a]&&o[++i]===o[++a]&&o[++i]===o[++a]&&is){if(t.match_start=e,s=n,n>=h)break;f=o[i+s-1],c=o[i+s]}}while((e=d[e&_])>l&&0!==--r);return s<=t.lookahead?s:t.lookahead}function u(t){var e,a,n,r,i,s=t.w_size;do{if(r=t.window_size-t.lookahead-t.strstart,t.strstart>=s+(s-dt)){j.arraySet(t.window,t.window,s,s,0),t.match_start-=s,t.strstart-=s,t.block_start-=s,a=t.hash_size,e=a;do n=t.head[--e],t.head[e]=n>=s?n-s:0;while(--a);a=s,e=a;do n=t.prev[--e],t.prev[e]=n>=s?n-s:0;while(--a);r+=s}if(0===t.strm.avail_in)break;if(a=_(t.strm,t.window,t.strstart+t.lookahead,r),t.lookahead+=a,t.lookahead+t.insert>=ot)for(i=t.strstart-t.insert,t.ins_h=t.window[i],t.ins_h=(t.ins_h<t.pending_buf_size-5&&(a=t.pending_buf_size-5);;){if(t.lookahead<=1){if(u(t),0===t.lookahead&&e===q)return vt;if(0===t.lookahead)break}t.strstart+=t.lookahead,t.lookahead=0;var n=t.block_start+a;if((0===t.strstart||t.strstart>=n)&&(t.lookahead=t.strstart-n,t.strstart=n,h(t,!1),0===t.strm.avail_out))return vt;if(t.strstart-t.block_start>=t.w_size-dt&&(h(t,!1),0===t.strm.avail_out))return vt}return t.insert=0,e===N?(h(t,!0),0===t.strm.avail_out?kt:zt):t.strstart>t.block_start&&(h(t,!1),0===t.strm.avail_out)?vt:vt}function c(t,e){for(var a,n;;){if(t.lookahead=ot&&(t.ins_h=(t.ins_h<=ot)if(n=U._tr_tally(t,t.strstart-t.match_start,t.match_length-ot),t.lookahead-=t.match_length,t.match_length<=t.max_lazy_match&&t.lookahead>=ot){t.match_length--;do t.strstart++,t.ins_h=(t.ins_h<=ot&&(t.ins_h=(t.ins_h<4096)&&(t.match_length=ot-1)),t.prev_length>=ot&&t.match_length<=t.prev_length){r=t.strstart+t.lookahead-ot,n=U._tr_tally(t,t.strstart-1-t.prev_match,t.prev_length-ot),t.lookahead-=t.prev_length-1,t.prev_length-=2;do++t.strstart<=r&&(t.ins_h=(t.ins_h<=ot&&t.strstart>0&&(r=t.strstart-1,n=s[r],n===s[++r]&&n===s[++r]&&n===s[++r])){i=t.strstart+_t;do;while(n===s[++r]&&n===s[++r]&&n===s[++r]&&n===s[++r]&&n===s[++r]&&n===s[++r]&&n===s[++r]&&n===s[++r]&&rt.lookahead&&(t.match_length=t.lookahead)}if(t.match_length>=ot?(a=U._tr_tally(t,1,t.match_length-ot),t.lookahead-=t.match_length,t.strstart+=t.match_length,t.match_length=0):(a=U._tr_tally(t,0,t.window[t.strstart]),t.lookahead--,t.strstart++),a&&(h(t,!1),0===t.strm.avail_out))return vt}return t.insert=0,e===N?(h(t,!0),0===t.strm.avail_out?kt:zt):t.last_lit&&(h(t,!1),0===t.strm.avail_out)?vt:yt}function m(t,e){for(var a;;){if(0===t.lookahead&&(u(t),0===t.lookahead)){if(e===q)return vt;break}if(t.match_length=0,a=U._tr_tally(t,0,t.window[t.strstart]),t.lookahead--,t.strstart++,a&&(h(t,!1),0===t.strm.avail_out))return vt}return t.insert=0,e===N?(h(t,!0),0===t.strm.avail_out?kt:zt):t.last_lit&&(h(t,!1),0===t.strm.avail_out)?vt:yt}function b(t,e,a,n,r){this.good_length=t,this.max_lazy=e,this.nice_length=a,this.max_chain=n,this.func=r}function w(t){t.window_size=2*t.w_size,i(t.head),t.max_lazy_match=E[t.level].max_lazy,t.good_match=E[t.level].good_length,t.nice_match=E[t.level].nice_length,t.max_chain_length=E[t.level].max_chain,t.strstart=0,t.block_start=0,t.lookahead=0,t.insert=0,t.match_length=t.prev_length=ot-1,t.match_available=0,t.ins_h=0}function v(){this.strm=null,this.status=0,this.pending_buf=null,this.pending_buf_size=0,this.pending_out=0,this.pending=0,this.wrap=0,this.gzhead=null,this.gzindex=0,this.method=Z,this.last_flush=-1,this.w_size=0,this.w_bits=0,this.w_mask=0,this.window=null,this.window_size=0,this.prev=null,this.head=null,this.ins_h=0,this.hash_size=0,this.hash_bits=0,this.hash_mask=0,this.hash_shift=0,this.block_start=0,this.match_length=0,this.prev_match=0,this.match_available=0,this.strstart=0,this.match_start=0,this.lookahead=0,this.prev_length=0,this.max_chain_length=0,this.max_lazy_match=0,this.level=0,this.strategy=0,this.good_match=0,this.nice_match=0,this.dyn_ltree=new j.Buf16(2*ht),this.dyn_dtree=new j.Buf16(2*(2*it+1)),this.bl_tree=new j.Buf16(2*(2*st+1)),i(this.dyn_ltree),i(this.dyn_dtree),i(this.bl_tree),this.l_desc=null,this.d_desc=null,this.bl_desc=null,this.bl_count=new j.Buf16(lt+1),this.heap=new j.Buf16(2*rt+1),i(this.heap),this.heap_len=0,this.heap_max=0,this.depth=new j.Buf16(2*rt+1),i(this.depth),this.l_buf=0,this.lit_bufsize=0,this.last_lit=0,this.d_buf=0,this.opt_len=0,this.static_len=0,this.matches=0,this.insert=0,this.bi_buf=0,this.bi_valid=0}function y(t){var e;return t&&t.state?(t.total_in=t.total_out=0,t.data_type=Y,e=t.state,e.pending=0,e.pending_out=0,e.wrap<0&&(e.wrap=-e.wrap),e.status=e.wrap?ft:bt,t.adler=2===e.wrap?0:1,e.last_flush=q,U._tr_init(e),H):n(t,K)}function k(t){var e=y(t);return e===H&&w(t.state),e}function z(t,e){return t&&t.state?2!==t.state.wrap?K:(t.state.gzhead=e,H):K}function x(t,e,a,r,i,s){if(!t)return K;var h=1;if(e===G&&(e=6),r<0?(h=0,r=-r):r>15&&(h=2,r-=16),i<1||i>$||a!==Z||r<8||r>15||e<0||e>9||s<0||s>W)return n(t,K);8===r&&(r=9);var l=new v;return t.state=l,l.strm=t,l.wrap=h,l.gzhead=null,l.w_bits=r,l.w_size=1<R||e<0)return t?n(t,K):K;if(h=t.state,!t.output||!t.input&&0!==t.avail_in||h.status===wt&&e!==N)return n(t,0===t.avail_out?P:K);if(h.strm=t,a=h.last_flush,h.last_flush=e,h.status===ft)if(2===h.wrap)t.adler=0,l(h,31),l(h,139),l(h,8),h.gzhead?(l(h,(h.gzhead.text?1:0)+(h.gzhead.hcrc?2:0)+(h.gzhead.extra?4:0)+(h.gzhead.name?8:0)+(h.gzhead.comment?16:0)),l(h,255&h.gzhead.time),l(h,h.gzhead.time>>8&255),l(h,h.gzhead.time>>16&255),l(h,h.gzhead.time>>24&255),l(h,9===h.level?2:h.strategy>=Q||h.level<2?4:0),l(h,255&h.gzhead.os),h.gzhead.extra&&h.gzhead.extra.length&&(l(h,255&h.gzhead.extra.length),l(h,h.gzhead.extra.length>>8&255)),h.gzhead.hcrc&&(t.adler=I(t.adler,h.pending_buf,h.pending,0)),h.gzindex=0,h.status=ct):(l(h,0),l(h,0),l(h,0),l(h,0),l(h,0),l(h,9===h.level?2:h.strategy>=Q||h.level<2?4:0),l(h,xt),h.status=bt);else{var u=Z+(h.w_bits-8<<4)<<8,f=-1;f=h.strategy>=Q||h.level<2?0:h.level<6?1:6===h.level?2:3,u|=f<<6,0!==h.strstart&&(u|=ut),u+=31-u%31,h.status=bt,o(h,u),0!==h.strstart&&(o(h,t.adler>>>16),o(h,65535&t.adler)),t.adler=1}if(h.status===ct)if(h.gzhead.extra){for(_=h.pending;h.gzindex<(65535&h.gzhead.extra.length)&&(h.pending!==h.pending_buf_size||(h.gzhead.hcrc&&h.pending>_&&(t.adler=I(t.adler,h.pending_buf,h.pending-_,_)),s(t),_=h.pending,h.pending!==h.pending_buf_size));)l(h,255&h.gzhead.extra[h.gzindex]),h.gzindex++;h.gzhead.hcrc&&h.pending>_&&(t.adler=I(t.adler,h.pending_buf,h.pending-_,_)),h.gzindex===h.gzhead.extra.length&&(h.gzindex=0,h.status=pt)}else h.status=pt;if(h.status===pt)if(h.gzhead.name){_=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>_&&(t.adler=I(t.adler,h.pending_buf,h.pending-_,_)),s(t),_=h.pending,h.pending===h.pending_buf_size)){d=1;break}d=h.gzindex_&&(t.adler=I(t.adler,h.pending_buf,h.pending-_,_)),0===d&&(h.gzindex=0,h.status=gt)}else h.status=gt;if(h.status===gt)if(h.gzhead.comment){_=h.pending;do{if(h.pending===h.pending_buf_size&&(h.gzhead.hcrc&&h.pending>_&&(t.adler=I(t.adler,h.pending_buf,h.pending-_,_)),s(t),_=h.pending,h.pending===h.pending_buf_size)){d=1;break}d=h.gzindex_&&(t.adler=I(t.adler,h.pending_buf,h.pending-_,_)),0===d&&(h.status=mt)}else h.status=mt;if(h.status===mt&&(h.gzhead.hcrc?(h.pending+2>h.pending_buf_size&&s(t),h.pending+2<=h.pending_buf_size&&(l(h,255&t.adler),l(h,t.adler>>8&255),t.adler=0,h.status=bt)):h.status=bt),0!==h.pending){if(s(t),0===t.avail_out)return h.last_flush=-1,H}else if(0===t.avail_in&&r(e)<=r(a)&&e!==N)return n(t,P);if(h.status===wt&&0!==t.avail_in)return n(t,P);if(0!==t.avail_in||0!==h.lookahead||e!==q&&h.status!==wt){var c=h.strategy===Q?m(h,e):h.strategy===V?g(h,e):E[h.level].func(h,e);if(c!==kt&&c!==zt||(h.status=wt),c===vt||c===kt)return 0===t.avail_out&&(h.last_flush=-1),H;if(c===yt&&(e===T?U._tr_align(h):e!==R&&(U._tr_stored_block(h,0,0,!1),e===L&&(i(h.head),0===h.lookahead&&(h.strstart=0,h.block_start=0,h.insert=0))),s(t),0===t.avail_out))return h.last_flush=-1,H}return e!==N?H:h.wrap<=0?F:(2===h.wrap?(l(h,255&t.adler),l(h,t.adler>>8&255),l(h,t.adler>>16&255),l(h,t.adler>>24&255),l(h,255&t.total_in),l(h,t.total_in>>8&255),l(h,t.total_in>>16&255),l(h,t.total_in>>24&255)):(o(h,t.adler>>>16),o(h,65535&t.adler)),s(t),h.wrap>0&&(h.wrap=-h.wrap),0!==h.pending?H:F)}function C(t){var e;return t&&t.state?(e=t.state.status,e!==ft&&e!==ct&&e!==pt&&e!==gt&&e!==mt&&e!==bt&&e!==wt?n(t,K):(t.state=null,e===bt?n(t,M):H)):K}function S(t,e){var a,n,r,s,h,l,o,_,d=e.length;if(!t||!t.state)return K;if(a=t.state,s=a.wrap,2===s||1===s&&a.status!==ft||a.lookahead)return K;for(1===s&&(t.adler=D(t.adler,e,d,0)),a.wrap=0,d>=a.w_size&&(0===s&&(i(a.head),a.strstart=0,a.block_start=0,a.insert=0),_=new j.Buf8(a.w_size),j.arraySet(_,e,d-a.w_size,a.w_size,0),e=_,d=a.w_size),h=t.avail_in,l=t.next_in,o=t.input,t.avail_in=d,t.next_in=0,t.input=e,u(a);a.lookahead>=ot;){n=a.strstart,r=a.lookahead-(ot-1);do a.ins_h=(a.ins_h<=0;)t[e]=0}function r(t,e,a,n,r){this.static_tree=t,this.extra_bits=e,this.extra_base=a,this.elems=n,this.max_length=r,this.has_stree=t&&t.length}function i(t,e){this.dyn_tree=t,this.max_code=0,this.stat_desc=e}function s(t){return t<256?lt[t]:lt[256+(t>>>7)]}function h(t,e){t.pending_buf[t.pending++]=255&e,t.pending_buf[t.pending++]=e>>>8&255}function l(t,e,a){t.bi_valid>W-a?(t.bi_buf|=e<>W-t.bi_valid,t.bi_valid+=a-W):(t.bi_buf|=e<>>=1,a<<=1;while(--e>0);return a>>>1}function d(t){16===t.bi_valid?(h(t,t.bi_buf),t.bi_buf=0,t.bi_valid=0):t.bi_valid>=8&&(t.pending_buf[t.pending++]=255&t.bi_buf,t.bi_buf>>=8,t.bi_valid-=8)}function u(t,e){var a,n,r,i,s,h,l=e.dyn_tree,o=e.max_code,_=e.stat_desc.static_tree,d=e.stat_desc.has_stree,u=e.stat_desc.extra_bits,f=e.stat_desc.extra_base,c=e.stat_desc.max_length,p=0;for(i=0;i<=V;i++)t.bl_count[i]=0;for(l[2*t.heap[t.heap_max]+1]=0,a=t.heap_max+1;ac&&(i=c,p++),l[2*n+1]=i,n>o||(t.bl_count[i]++,s=0,n>=f&&(s=u[n-f]),h=l[2*n],t.opt_len+=h*(i+s),d&&(t.static_len+=h*(_[2*n+1]+s)));if(0!==p){do{for(i=c-1;0===t.bl_count[i];)i--;t.bl_count[i]--,t.bl_count[i+1]+=2,t.bl_count[c]--,p-=2}while(p>0);for(i=c;0!==i;i--)for(n=t.bl_count[i];0!==n;)r=t.heap[--a],r>o||(l[2*r+1]!==i&&(t.opt_len+=(i-l[2*r+1])*l[2*r],l[2*r+1]=i),n--)}}function f(t,e,a){var n,r,i=new Array(V+1),s=0;for(n=1;n<=V;n++)i[n]=s=s+a[n-1]<<1;for(r=0;r<=e;r++){var h=t[2*r+1];0!==h&&(t[2*r]=_(i[h]++,h))}}function c(){var t,e,a,n,i,s=new Array(V+1);for(a=0,n=0;n>=7;n8?h(t,t.bi_buf):t.bi_valid>0&&(t.pending_buf[t.pending++]=t.bi_buf),t.bi_buf=0,t.bi_valid=0}function m(t,e,a,n){g(t),n&&(h(t,a),h(t,~a)),D.arraySet(t.pending_buf,t.window,e,a,t.pending),t.pending+=a}function b(t,e,a,n){var r=2*e,i=2*a;return t[r]>1;a>=1;a--)w(t,i,a);r=l;do a=t.heap[1],t.heap[1]=t.heap[t.heap_len--],w(t,i,1),n=t.heap[1],t.heap[--t.heap_max]=a,t.heap[--t.heap_max]=n,i[2*r]=i[2*a]+i[2*n],t.depth[r]=(t.depth[a]>=t.depth[n]?t.depth[a]:t.depth[n])+1,i[2*a+1]=i[2*n+1]=r,t.heap[1]=r++,w(t,i,1);while(t.heap_len>=2);t.heap[--t.heap_max]=t.heap[1],u(t,e),f(i,o,t.bl_count)}function k(t,e,a){var n,r,i=-1,s=e[1],h=0,l=7,o=4;for(0===s&&(l=138,o=3),e[2*(a+1)+1]=65535,n=0;n<=a;n++)r=s,s=e[2*(n+1)+1],++h=3&&0===t.bl_tree[2*rt[e]+1];e--);return t.opt_len+=3*(e+1)+5+5+4,e}function B(t,e,a,n){var r;for(l(t,e-257,5),l(t,a-1,5),l(t,n-4,4),r=0;r>>=1)if(1&a&&0!==t.dyn_ltree[2*e])return O;if(0!==t.dyn_ltree[18]||0!==t.dyn_ltree[20]||0!==t.dyn_ltree[26])return q;for(e=32;e0?(t.strm.data_type===T&&(t.strm.data_type=A(t)),y(t,t.l_desc),y(t,t.d_desc),s=x(t),r=t.opt_len+3+7>>>3,i=t.static_len+3+7>>>3,i<=r&&(r=i)):r=i=a+5,a+4<=r&&e!==-1?S(t,e,a,n):t.strategy===I||i===r?(l(t,(N<<1)+(n?1:0),3),v(t,st,ht)):(l(t,(R<<1)+(n?1:0),3),B(t,t.l_desc.max_code+1,t.d_desc.max_code+1,s+1),v(t,t.dyn_ltree,t.dyn_dtree)),p(t),n&&g(t)}function U(t,e,a){return t.pending_buf[t.d_buf+2*t.last_lit]=e>>>8&255,t.pending_buf[t.d_buf+2*t.last_lit+1]=255&e,t.pending_buf[t.l_buf+t.last_lit]=255&a,t.last_lit++,0===e?t.dyn_ltree[2*a]++:(t.matches++,e--,t.dyn_ltree[2*(ot[a]+M+1)]++,t.dyn_dtree[2*s(e)]++),t.last_lit===t.lit_bufsize-1}var D=t("../utils/common"),I=4,O=0,q=1,T=2,L=0,N=1,R=2,H=3,F=258,K=29,M=256,P=M+1+K,G=30,J=19,Q=2*P+1,V=15,W=16,X=7,Y=256,Z=16,$=17,tt=18,et=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0],at=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13],nt=[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7],rt=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],it=512,st=new Array(2*(P+2));n(st);var ht=new Array(2*G);n(ht);var lt=new Array(it);n(lt);var ot=new Array(F-H+1);n(ot);var _t=new Array(K);n(_t);var dt=new Array(G);n(dt);var ut,ft,ct,pt=!1;a._tr_init=C,a._tr_stored_block=S,a._tr_flush_block=j,a._tr_tally=U,a._tr_align=E},{"../utils/common":1}],8:[function(t,e,a){"use strict";function n(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}e.exports=n},{}],"/lib/deflate.js":[function(t,e,a){"use strict";function n(t){if(!(this instanceof n))return new n(t);this.options=l.assign({level:b,method:v,chunkSize:16384,windowBits:15,memLevel:8,strategy:w,to:""},t||{});var e=this.options;e.raw&&e.windowBits>0?e.windowBits=-e.windowBits:e.gzip&&e.windowBits>0&&e.windowBits<16&&(e.windowBits+=16),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new d,this.strm.avail_out=0;var a=h.deflateInit2(this.strm,e.level,e.method,e.windowBits,e.memLevel,e.strategy);if(a!==p)throw new Error(_[a]);if(e.header&&h.deflateSetHeader(this.strm,e.header),e.dictionary){var r;if(r="string"==typeof e.dictionary?o.string2buf(e.dictionary):"[object ArrayBuffer]"===u.call(e.dictionary)?new Uint8Array(e.dictionary):e.dictionary,a=h.deflateSetDictionary(this.strm,r),a!==p)throw new Error(_[a]);this._dict_set=!0}}function r(t,e){var a=new n(e);if(a.push(t,!0),a.err)throw a.msg;return a.result}function i(t,e){return e=e||{},e.raw=!0,r(t,e)}function s(t,e){return e=e||{},e.gzip=!0,r(t,e)}var h=t("./zlib/deflate"),l=t("./utils/common"),o=t("./utils/strings"),_=t("./zlib/messages"),d=t("./zlib/zstream"),u=Object.prototype.toString,f=0,c=4,p=0,g=1,m=2,b=-1,w=0,v=8;n.prototype.push=function(t,e){var a,n,r=this.strm,i=this.options.chunkSize;if(this.ended)return!1;n=e===~~e?e:e===!0?c:f,"string"==typeof t?r.input=o.string2buf(t):"[object ArrayBuffer]"===u.call(t)?r.input=new Uint8Array(t):r.input=t,r.next_in=0,r.avail_in=r.input.length;do{if(0===r.avail_out&&(r.output=new l.Buf8(i),r.next_out=0,r.avail_out=i),a=h.deflate(r,n),a!==g&&a!==p)return this.onEnd(a),this.ended=!0,!1;0!==r.avail_out&&(0!==r.avail_in||n!==c&&n!==m)||("string"===this.options.to?this.onData(o.buf2binstring(l.shrinkBuf(r.output,r.next_out))):this.onData(l.shrinkBuf(r.output,r.next_out)))}while((r.avail_in>0||0===r.avail_out)&&a!==g);return n===c?(a=h.deflateEnd(this.strm),this.onEnd(a),this.ended=!0,a===p):n!==m||(this.onEnd(p),r.avail_out=0,!0)},n.prototype.onData=function(t){this.chunks.push(t)},n.prototype.onEnd=function(t){t===p&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=l.flattenChunks(this.chunks)),this.chunks=[],this.err=t,this.msg=this.strm.msg},a.Deflate=n,a.deflate=r,a.deflateRaw=i,a.gzip=s},{"./utils/common":1,"./utils/strings":2,"./zlib/deflate":5,"./zlib/messages":6,"./zlib/zstream":8}]},{},[])("/lib/deflate.js")}); diff --git a/node_modules/pako/dist/pako_inflate.js b/node_modules/pako/dist/pako_inflate.js new file mode 100644 index 0000000..c3e2e0e --- /dev/null +++ b/node_modules/pako/dist/pako_inflate.js @@ -0,0 +1,3127 @@ +/* pako 0.2.9 nodeca/pako */(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.pako = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); +} +_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start + + +// convert string to array (typed, when possible) +exports.string2buf = function (str) { + var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; + + // count binary size + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; + } + + // allocate buffer + buf = new utils.Buf8(buf_len); + + // convert + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + if (c < 0x80) { + /* one byte */ + buf[i++] = c; + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xC0 | (c >>> 6); + buf[i++] = 0x80 | (c & 0x3f); + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xE0 | (c >>> 12); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18); + buf[i++] = 0x80 | (c >>> 12 & 0x3f); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } + } + + return buf; +}; + +// Helper (used in 2 places) +function buf2binstring(buf, len) { + // use fallback for big arrays to avoid stack overflow + if (len < 65537) { + if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { + return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); + } + } + + var result = ''; + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]); + } + return result; +} + + +// Convert byte array to binary string +exports.buf2binstring = function (buf) { + return buf2binstring(buf, buf.length); +}; + + +// Convert binary string (typed, when possible) +exports.binstring2buf = function (str) { + var buf = new utils.Buf8(str.length); + for (var i = 0, len = buf.length; i < len; i++) { + buf[i] = str.charCodeAt(i); + } + return buf; +}; + + +// convert array to string +exports.buf2string = function (buf, max) { + var i, out, c, c_len; + var len = max || buf.length; + + // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + var utf16buf = new Array(len * 2); + + for (out = 0, i = 0; i < len;) { + c = buf[i++]; + // quick process ascii + if (c < 0x80) { utf16buf[out++] = c; continue; } + + c_len = _utf8len[c]; + // skip 5 & 6 byte codes + if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } + + // apply mask on first byte + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; + // join the rest + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f); + c_len--; + } + + // terminated by end of string? + if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } + + if (c < 0x10000) { + utf16buf[out++] = c; + } else { + c -= 0x10000; + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); + utf16buf[out++] = 0xdc00 | (c & 0x3ff); + } + } + + return buf2binstring(utf16buf, out); +}; + + +// Calculate max possible position in utf8 buffer, +// that will not break sequence. If that's not possible +// - (very small limits) return max size as is. +// +// buf[] - utf8 bytes array +// max - length limit (mandatory); +exports.utf8border = function (buf, max) { + var pos; + + max = max || buf.length; + if (max > buf.length) { max = buf.length; } + + // go back from last position, until start of sequence found + pos = max - 1; + while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } + + // Fuckup - very small and broken sequence, + // return max, because we should return something anyway. + if (pos < 0) { return max; } + + // If we came to start of buffer - that means vuffer is too small, + // return max too. + if (pos === 0) { return max; } + + return (pos + _utf8len[buf[pos]] > max) ? pos : max; +}; + +},{"./common":1}],3:[function(require,module,exports){ +'use strict'; + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It doesn't worth to make additional optimizationa as in original. +// Small size is preferable. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; + +},{}],4:[function(require,module,exports){ +'use strict'; + + +module.exports = { + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; + +},{}],5:[function(require,module,exports){ +'use strict'; + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + +},{}],6:[function(require,module,exports){ +'use strict'; + + +function GZheader() { + /* true if compressed data believed to be text */ + this.text = 0; + /* modification time */ + this.time = 0; + /* extra flags (not used when writing a gzip file) */ + this.xflags = 0; + /* operating system */ + this.os = 0; + /* pointer to extra field or Z_NULL if none */ + this.extra = null; + /* extra field length (valid if extra != Z_NULL) */ + this.extra_len = 0; // Actually, we don't need it in JS, + // but leave for few code modifications + + // + // Setup limits is not necessary because in js we should not preallocate memory + // for inflate use constant limit in 65536 bytes + // + + /* space at extra (only when reading header) */ + // this.extra_max = 0; + /* pointer to zero-terminated file name or Z_NULL */ + this.name = ''; + /* space at name (only when reading header) */ + // this.name_max = 0; + /* pointer to zero-terminated comment or Z_NULL */ + this.comment = ''; + /* space at comment (only when reading header) */ + // this.comm_max = 0; + /* true if there was or will be a header crc */ + this.hcrc = 0; + /* true when done reading gzip header (not used when writing a gzip file) */ + this.done = false; +} + +module.exports = GZheader; + +},{}],7:[function(require,module,exports){ +'use strict'; + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; + +},{}],8:[function(require,module,exports){ +'use strict'; + + +var utils = require('../utils/common'); +var adler32 = require('./adler32'); +var crc32 = require('./crc32'); +var inflate_fast = require('./inffast'); +var inflate_table = require('./inftrees'); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more conveniend processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' insdead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ + +},{"../utils/common":1,"./adler32":3,"./crc32":5,"./inffast":7,"./inftrees":9}],9:[function(require,module,exports){ +'use strict'; + + +var utils = require('../utils/common'); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + var i = 0; + /* process all codes and make table entries */ + for (;;) { + i++; + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; + +},{"../utils/common":1}],10:[function(require,module,exports){ +'use strict'; + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + +},{}],11:[function(require,module,exports){ +'use strict'; + + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + +},{}],"/lib/inflate.js":[function(require,module,exports){ +'use strict'; + + +var zlib_inflate = require('./zlib/inflate'); +var utils = require('./utils/common'); +var strings = require('./utils/strings'); +var c = require('./zlib/constants'); +var msg = require('./zlib/messages'); +var ZStream = require('./zlib/zstream'); +var GZheader = require('./zlib/gzheader'); + +var toString = Object.prototype.toString; + +/** + * class Inflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[inflate]] + * and [[inflateRaw]]. + **/ + +/* internal + * inflate.chunks -> Array + * + * Chunks of output data, if [[Inflate#onData]] not overriden. + **/ + +/** + * Inflate.result -> Uint8Array|Array|String + * + * Uncompressed result, generated by default [[Inflate#onData]] + * and [[Inflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Inflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Inflate.err -> Number + * + * Error code after inflate finished. 0 (Z_OK) on success. + * Should be checked if broken data possible. + **/ + +/** + * Inflate.msg -> String + * + * Error message, if [[Inflate.err]] != 0 + **/ + + +/** + * new Inflate(options) + * - options (Object): zlib inflate options. + * + * Creates new inflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `windowBits` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw inflate + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * By default, when no options set, autodetect deflate/gzip data format via + * wrapper header. + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var inflate = new pako.Inflate({ level: 3}); + * + * inflate.push(chunk1, false); + * inflate.push(chunk2, true); // true -> last chunk + * + * if (inflate.err) { throw new Error(inflate.err); } + * + * console.log(inflate.result); + * ``` + **/ +function Inflate(options) { + if (!(this instanceof Inflate)) return new Inflate(options); + + this.options = utils.assign({ + chunkSize: 16384, + windowBits: 0, + to: '' + }, options || {}); + + var opt = this.options; + + // Force window size for `raw` data, if not set directly, + // because we have no header for autodetect. + if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) { + opt.windowBits = -opt.windowBits; + if (opt.windowBits === 0) { opt.windowBits = -15; } + } + + // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate + if ((opt.windowBits >= 0) && (opt.windowBits < 16) && + !(options && options.windowBits)) { + opt.windowBits += 32; + } + + // Gzip header has no info about windows size, we can do autodetect only + // for deflate. So, if window size not set, force it to max when gzip possible + if ((opt.windowBits > 15) && (opt.windowBits < 48)) { + // bit 3 (16) -> gzipped data + // bit 4 (32) -> autodetect gzip/deflate + if ((opt.windowBits & 15) === 0) { + opt.windowBits |= 15; + } + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_inflate.inflateInit2( + this.strm, + opt.windowBits + ); + + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + + this.header = new GZheader(); + + zlib_inflate.inflateGetHeader(this.strm, this.header); +} + +/** + * Inflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH. + * + * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with + * new output chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the decompression context. + * + * On fail call [[Inflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Inflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var dictionary = this.options.dictionary; + var status, _mode; + var next_out_utf8, tail, utf8str; + var dict; + + // Flag to properly process Z_BUF_ERROR on testing inflate call + // when we check that all output data was flushed. + var allowBufError = false; + + if (this.ended) { return false; } + _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // Only binary strings can be decompressed on practice + strm.input = strings.binstring2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + + status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */ + + if (status === c.Z_NEED_DICT && dictionary) { + // Convert data if needed + if (typeof dictionary === 'string') { + dict = strings.string2buf(dictionary); + } else if (toString.call(dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(dictionary); + } else { + dict = dictionary; + } + + status = zlib_inflate.inflateSetDictionary(this.strm, dict); + + } + + if (status === c.Z_BUF_ERROR && allowBufError === true) { + status = c.Z_OK; + allowBufError = false; + } + + if (status !== c.Z_STREAM_END && status !== c.Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + + if (strm.next_out) { + if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) { + + if (this.options.to === 'string') { + + next_out_utf8 = strings.utf8border(strm.output, strm.next_out); + + tail = strm.next_out - next_out_utf8; + utf8str = strings.buf2string(strm.output, next_out_utf8); + + // move tail + strm.next_out = tail; + strm.avail_out = chunkSize - tail; + if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); } + + this.onData(utf8str); + + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } + + // When no more input data, we should check that internal inflate buffers + // are flushed. The only way to do it when avail_out = 0 - run one more + // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR. + // Here we set flag to process this error properly. + // + // NOTE. Deflate does not return error in this case and does not needs such + // logic. + if (strm.avail_in === 0 && strm.avail_out === 0) { + allowBufError = true; + } + + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END); + + if (status === c.Z_STREAM_END) { + _mode = c.Z_FINISH; + } + + // Finalize on the last chunk. + if (_mode === c.Z_FINISH) { + status = zlib_inflate.inflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === c.Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === c.Z_SYNC_FLUSH) { + this.onEnd(c.Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Inflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): ouput data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Inflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Inflate#onEnd(status) -> Void + * - status (Number): inflate status. 0 (Z_OK) on success, + * other if not. + * + * Called either after you tell inflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Inflate.prototype.onEnd = function (status) { + // On success - join + if (status === c.Z_OK) { + if (this.options.to === 'string') { + // Glue & convert here, until we teach pako to send + // utf8 alligned strings to onData + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * inflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Decompress `data` with inflate/ungzip and `options`. Autodetect + * format via wrapper header by default. That's why we don't provide + * separate `ungzip` method. + * + * Supported options are: + * + * - windowBits + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , input = pako.deflate([1,2,3,4,5,6,7,8,9]) + * , output; + * + * try { + * output = pako.inflate(input); + * } catch (err) + * console.log(err); + * } + * ``` + **/ +function inflate(input, options) { + var inflator = new Inflate(options); + + inflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (inflator.err) { throw inflator.msg; } + + return inflator.result; +} + + +/** + * inflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * The same as [[inflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function inflateRaw(input, options) { + options = options || {}; + options.raw = true; + return inflate(input, options); +} + + +/** + * ungzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Just shortcut to [[inflate]], because it autodetects format + * by header.content. Done for convenience. + **/ + + +exports.Inflate = Inflate; +exports.inflate = inflate; +exports.inflateRaw = inflateRaw; +exports.ungzip = inflate; + +},{"./utils/common":1,"./utils/strings":2,"./zlib/constants":4,"./zlib/gzheader":6,"./zlib/inflate":8,"./zlib/messages":10,"./zlib/zstream":11}]},{},[])("/lib/inflate.js") +}); \ No newline at end of file diff --git a/node_modules/pako/dist/pako_inflate.min.js b/node_modules/pako/dist/pako_inflate.min.js new file mode 100644 index 0000000..d423de9 --- /dev/null +++ b/node_modules/pako/dist/pako_inflate.min.js @@ -0,0 +1,2 @@ +/* pako 0.2.9 nodeca/pako */ +!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var t;t="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,t.pako=e()}}(function(){return function e(t,i,n){function a(o,s){if(!i[o]){if(!t[o]){var f="function"==typeof require&&require;if(!s&&f)return f(o,!0);if(r)return r(o,!0);var l=new Error("Cannot find module '"+o+"'");throw l.code="MODULE_NOT_FOUND",l}var d=i[o]={exports:{}};t[o][0].call(d.exports,function(e){var i=t[o][1][e];return a(i?i:e)},d,d.exports,e,t,i,n)}return i[o].exports}for(var r="function"==typeof require&&require,o=0;o=252?6:f>=248?5:f>=240?4:f>=224?3:f>=192?2:1;s[254]=s[254]=1,i.string2buf=function(e){var t,i,n,r,o,s=e.length,f=0;for(r=0;r>>6,t[o++]=128|63&i):i<65536?(t[o++]=224|i>>>12,t[o++]=128|i>>>6&63,t[o++]=128|63&i):(t[o++]=240|i>>>18,t[o++]=128|i>>>12&63,t[o++]=128|i>>>6&63,t[o++]=128|63&i);return t},i.buf2binstring=function(e){return n(e,e.length)},i.binstring2buf=function(e){for(var t=new a.Buf8(e.length),i=0,n=t.length;i4)l[a++]=65533,i+=o-1;else{for(r&=2===o?31:3===o?15:7;o>1&&i1?l[a++]=65533:r<65536?l[a++]=r:(r-=65536,l[a++]=55296|r>>10&1023,l[a++]=56320|1023&r)}return n(l,a)},i.utf8border=function(e,t){var i;for(t=t||e.length,t>e.length&&(t=e.length),i=t-1;i>=0&&128===(192&e[i]);)i--;return i<0?t:0===i?t:i+s[e[i]]>t?i:t}},{"./common":1}],3:[function(e,t,i){"use strict";function n(e,t,i,n){for(var a=65535&e|0,r=e>>>16&65535|0,o=0;0!==i;){o=i>2e3?2e3:i,i-=o;do a=a+t[n++]|0,r=r+a|0;while(--o);a%=65521,r%=65521}return a|r<<16|0}t.exports=n},{}],4:[function(e,t,i){"use strict";t.exports={Z_NO_FLUSH:0,Z_PARTIAL_FLUSH:1,Z_SYNC_FLUSH:2,Z_FULL_FLUSH:3,Z_FINISH:4,Z_BLOCK:5,Z_TREES:6,Z_OK:0,Z_STREAM_END:1,Z_NEED_DICT:2,Z_ERRNO:-1,Z_STREAM_ERROR:-2,Z_DATA_ERROR:-3,Z_BUF_ERROR:-5,Z_NO_COMPRESSION:0,Z_BEST_SPEED:1,Z_BEST_COMPRESSION:9,Z_DEFAULT_COMPRESSION:-1,Z_FILTERED:1,Z_HUFFMAN_ONLY:2,Z_RLE:3,Z_FIXED:4,Z_DEFAULT_STRATEGY:0,Z_BINARY:0,Z_TEXT:1,Z_UNKNOWN:2,Z_DEFLATED:8}},{}],5:[function(e,t,i){"use strict";function n(){for(var e,t=[],i=0;i<256;i++){e=i;for(var n=0;n<8;n++)e=1&e?3988292384^e>>>1:e>>>1;t[i]=e}return t}function a(e,t,i,n){var a=r,o=n+i;e^=-1;for(var s=n;s>>8^a[255&(e^t[s])];return e^-1}var r=n();t.exports=a},{}],6:[function(e,t,i){"use strict";function n(){this.text=0,this.time=0,this.xflags=0,this.os=0,this.extra=null,this.extra_len=0,this.name="",this.comment="",this.hcrc=0,this.done=!1}t.exports=n},{}],7:[function(e,t,i){"use strict";var n=30,a=12;t.exports=function(e,t){var i,r,o,s,f,l,d,u,c,h,b,w,m,k,_,g,v,p,x,y,S,E,B,Z,A;i=e.state,r=e.next_in,Z=e.input,o=r+(e.avail_in-5),s=e.next_out,A=e.output,f=s-(t-e.avail_out),l=s+(e.avail_out-257),d=i.dmax,u=i.wsize,c=i.whave,h=i.wnext,b=i.window,w=i.hold,m=i.bits,k=i.lencode,_=i.distcode,g=(1<>>24,w>>>=x,m-=x,x=p>>>16&255,0===x)A[s++]=65535&p;else{if(!(16&x)){if(0===(64&x)){p=k[(65535&p)+(w&(1<>>=x,m-=x),m<15&&(w+=Z[r++]<>>24,w>>>=x,m-=x,x=p>>>16&255,!(16&x)){if(0===(64&x)){p=_[(65535&p)+(w&(1<d){e.msg="invalid distance too far back",i.mode=n;break e}if(w>>>=x,m-=x,x=s-f,S>x){if(x=S-x,x>c&&i.sane){e.msg="invalid distance too far back",i.mode=n;break e}if(E=0,B=b,0===h){if(E+=u-x,x2;)A[s++]=B[E++],A[s++]=B[E++],A[s++]=B[E++],y-=3;y&&(A[s++]=B[E++],y>1&&(A[s++]=B[E++]))}else{E=s-S;do A[s++]=A[E++],A[s++]=A[E++],A[s++]=A[E++],y-=3;while(y>2);y&&(A[s++]=A[E++],y>1&&(A[s++]=A[E++]))}break}}break}}while(r>3,r-=y,m-=y<<3,w&=(1<>>24&255)+(e>>>8&65280)+((65280&e)<<8)+((255&e)<<24)}function a(){this.mode=0,this.last=!1,this.wrap=0,this.havedict=!1,this.flags=0,this.dmax=0,this.check=0,this.total=0,this.head=null,this.wbits=0,this.wsize=0,this.whave=0,this.wnext=0,this.window=null,this.hold=0,this.bits=0,this.length=0,this.offset=0,this.extra=0,this.lencode=null,this.distcode=null,this.lenbits=0,this.distbits=0,this.ncode=0,this.nlen=0,this.ndist=0,this.have=0,this.next=null,this.lens=new _.Buf16(320),this.work=new _.Buf16(288),this.lendyn=null,this.distdyn=null,this.sane=0,this.back=0,this.was=0}function r(e){var t;return e&&e.state?(t=e.state,e.total_in=e.total_out=t.total=0,e.msg="",t.wrap&&(e.adler=1&t.wrap),t.mode=D,t.last=0,t.havedict=0,t.dmax=32768,t.head=null,t.hold=0,t.bits=0,t.lencode=t.lendyn=new _.Buf32(we),t.distcode=t.distdyn=new _.Buf32(me),t.sane=1,t.back=-1,z):C}function o(e){var t;return e&&e.state?(t=e.state,t.wsize=0,t.whave=0,t.wnext=0,r(e)):C}function s(e,t){var i,n;return e&&e.state?(n=e.state,t<0?(i=0,t=-t):(i=(t>>4)+1,t<48&&(t&=15)),t&&(t<8||t>15)?C:(null!==n.window&&n.wbits!==t&&(n.window=null),n.wrap=i,n.wbits=t,o(e))):C}function f(e,t){var i,n;return e?(n=new a,e.state=n,n.window=null,i=s(e,t),i!==z&&(e.state=null),i):C}function l(e){return f(e,_e)}function d(e){if(ge){var t;for(m=new _.Buf32(512),k=new _.Buf32(32),t=0;t<144;)e.lens[t++]=8;for(;t<256;)e.lens[t++]=9;for(;t<280;)e.lens[t++]=7;for(;t<288;)e.lens[t++]=8;for(x(S,e.lens,0,288,m,0,e.work,{bits:9}),t=0;t<32;)e.lens[t++]=5;x(E,e.lens,0,32,k,0,e.work,{bits:5}),ge=!1}e.lencode=m,e.lenbits=9,e.distcode=k,e.distbits=5}function u(e,t,i,n){var a,r=e.state;return null===r.window&&(r.wsize=1<=r.wsize?(_.arraySet(r.window,t,i-r.wsize,r.wsize,0),r.wnext=0,r.whave=r.wsize):(a=r.wsize-r.wnext,a>n&&(a=n),_.arraySet(r.window,t,i-n,a,r.wnext),n-=a,n?(_.arraySet(r.window,t,i-n,n,0),r.wnext=n,r.whave=r.wsize):(r.wnext+=a,r.wnext===r.wsize&&(r.wnext=0),r.whave>>8&255,i.check=v(i.check,Ze,2,0),c=0,h=0,i.mode=F;break}if(i.flags=0,i.head&&(i.head.done=!1),!(1&i.wrap)||(((255&c)<<8)+(c>>8))%31){e.msg="incorrect header check",i.mode=ce;break}if((15&c)!==U){e.msg="unknown compression method",i.mode=ce;break}if(c>>>=4,h-=4,xe=(15&c)+8,0===i.wbits)i.wbits=xe;else if(xe>i.wbits){e.msg="invalid window size",i.mode=ce;break}i.dmax=1<>8&1),512&i.flags&&(Ze[0]=255&c,Ze[1]=c>>>8&255,i.check=v(i.check,Ze,2,0)),c=0,h=0,i.mode=L;case L:for(;h<32;){if(0===f)break e;f--,c+=a[o++]<>>8&255,Ze[2]=c>>>16&255,Ze[3]=c>>>24&255,i.check=v(i.check,Ze,4,0)),c=0,h=0,i.mode=H;case H:for(;h<16;){if(0===f)break e;f--,c+=a[o++]<>8),512&i.flags&&(Ze[0]=255&c,Ze[1]=c>>>8&255,i.check=v(i.check,Ze,2,0)),c=0,h=0,i.mode=M;case M:if(1024&i.flags){for(;h<16;){if(0===f)break e;f--,c+=a[o++]<>>8&255,i.check=v(i.check,Ze,2,0)),c=0,h=0}else i.head&&(i.head.extra=null);i.mode=j;case j:if(1024&i.flags&&(m=i.length,m>f&&(m=f),m&&(i.head&&(xe=i.head.extra_len-i.length,i.head.extra||(i.head.extra=new Array(i.head.extra_len)),_.arraySet(i.head.extra,a,o,m,xe)),512&i.flags&&(i.check=v(i.check,a,m,o)),f-=m,o+=m,i.length-=m),i.length))break e;i.length=0,i.mode=K;case K:if(2048&i.flags){if(0===f)break e;m=0;do xe=a[o+m++],i.head&&xe&&i.length<65536&&(i.head.name+=String.fromCharCode(xe));while(xe&&m>9&1,i.head.done=!0),e.adler=i.check=0,i.mode=X;break;case q:for(;h<32;){if(0===f)break e;f--,c+=a[o++]<>>=7&h,h-=7&h,i.mode=le;break}for(;h<3;){if(0===f)break e;f--,c+=a[o++]<>>=1,h-=1,3&c){case 0:i.mode=J;break;case 1:if(d(i),i.mode=ie,t===A){c>>>=2,h-=2;break e}break;case 2:i.mode=$;break;case 3:e.msg="invalid block type",i.mode=ce}c>>>=2,h-=2;break;case J:for(c>>>=7&h,h-=7&h;h<32;){if(0===f)break e;f--,c+=a[o++]<>>16^65535)){e.msg="invalid stored block lengths",i.mode=ce;break}if(i.length=65535&c,c=0,h=0,i.mode=Q,t===A)break e;case Q:i.mode=V;case V:if(m=i.length){if(m>f&&(m=f),m>l&&(m=l),0===m)break e;_.arraySet(r,a,o,m,s),f-=m,o+=m,l-=m,s+=m,i.length-=m;break}i.mode=X;break;case $:for(;h<14;){if(0===f)break e;f--,c+=a[o++]<>>=5,h-=5,i.ndist=(31&c)+1,c>>>=5,h-=5,i.ncode=(15&c)+4,c>>>=4,h-=4,i.nlen>286||i.ndist>30){e.msg="too many length or distance symbols",i.mode=ce;break}i.have=0,i.mode=ee;case ee:for(;i.have>>=3,h-=3}for(;i.have<19;)i.lens[Ae[i.have++]]=0;if(i.lencode=i.lendyn,i.lenbits=7,Se={bits:i.lenbits},ye=x(y,i.lens,0,19,i.lencode,0,i.work,Se),i.lenbits=Se.bits,ye){e.msg="invalid code lengths set",i.mode=ce;break}i.have=0,i.mode=te;case te:for(;i.have>>24,ke=Be>>>16&255,_e=65535&Be,!(me<=h);){if(0===f)break e;f--,c+=a[o++]<>>=me,h-=me,i.lens[i.have++]=_e;else{if(16===_e){for(Ee=me+2;h>>=me,h-=me,0===i.have){e.msg="invalid bit length repeat",i.mode=ce;break}xe=i.lens[i.have-1],m=3+(3&c),c>>>=2,h-=2}else if(17===_e){for(Ee=me+3;h>>=me,h-=me,xe=0,m=3+(7&c),c>>>=3,h-=3}else{for(Ee=me+7;h>>=me,h-=me,xe=0,m=11+(127&c),c>>>=7,h-=7}if(i.have+m>i.nlen+i.ndist){e.msg="invalid bit length repeat",i.mode=ce;break}for(;m--;)i.lens[i.have++]=xe}}if(i.mode===ce)break;if(0===i.lens[256]){e.msg="invalid code -- missing end-of-block",i.mode=ce;break}if(i.lenbits=9,Se={bits:i.lenbits},ye=x(S,i.lens,0,i.nlen,i.lencode,0,i.work,Se),i.lenbits=Se.bits,ye){e.msg="invalid literal/lengths set",i.mode=ce;break}if(i.distbits=6,i.distcode=i.distdyn,Se={bits:i.distbits},ye=x(E,i.lens,i.nlen,i.ndist,i.distcode,0,i.work,Se),i.distbits=Se.bits,ye){e.msg="invalid distances set",i.mode=ce;break}if(i.mode=ie,t===A)break e;case ie:i.mode=ne;case ne:if(f>=6&&l>=258){e.next_out=s,e.avail_out=l,e.next_in=o,e.avail_in=f,i.hold=c,i.bits=h,p(e,w),s=e.next_out,r=e.output,l=e.avail_out,o=e.next_in,a=e.input,f=e.avail_in,c=i.hold,h=i.bits,i.mode===X&&(i.back=-1);break}for(i.back=0;Be=i.lencode[c&(1<>>24,ke=Be>>>16&255,_e=65535&Be,!(me<=h);){if(0===f)break e;f--,c+=a[o++]<>ge)],me=Be>>>24,ke=Be>>>16&255,_e=65535&Be,!(ge+me<=h);){if(0===f)break e;f--,c+=a[o++]<>>=ge,h-=ge,i.back+=ge}if(c>>>=me,h-=me,i.back+=me,i.length=_e,0===ke){i.mode=fe;break}if(32&ke){i.back=-1,i.mode=X;break}if(64&ke){e.msg="invalid literal/length code",i.mode=ce;break}i.extra=15&ke,i.mode=ae;case ae:if(i.extra){for(Ee=i.extra;h>>=i.extra,h-=i.extra,i.back+=i.extra}i.was=i.length,i.mode=re;case re:for(;Be=i.distcode[c&(1<>>24,ke=Be>>>16&255,_e=65535&Be,!(me<=h);){if(0===f)break e;f--,c+=a[o++]<>ge)],me=Be>>>24,ke=Be>>>16&255,_e=65535&Be,!(ge+me<=h);){if(0===f)break e;f--,c+=a[o++]<>>=ge,h-=ge,i.back+=ge}if(c>>>=me,h-=me,i.back+=me,64&ke){e.msg="invalid distance code",i.mode=ce;break}i.offset=_e,i.extra=15&ke,i.mode=oe;case oe:if(i.extra){for(Ee=i.extra;h>>=i.extra,h-=i.extra,i.back+=i.extra}if(i.offset>i.dmax){e.msg="invalid distance too far back",i.mode=ce;break}i.mode=se;case se:if(0===l)break e;if(m=w-l,i.offset>m){if(m=i.offset-m,m>i.whave&&i.sane){e.msg="invalid distance too far back",i.mode=ce;break}m>i.wnext?(m-=i.wnext,k=i.wsize-m):k=i.wnext-m,m>i.length&&(m=i.length),we=i.window}else we=r,k=s-i.offset,m=i.length;m>l&&(m=l),l-=m,i.length-=m;do r[s++]=we[k++];while(--m);0===i.length&&(i.mode=ne);break;case fe:if(0===l)break e;r[s++]=i.length,l--,i.mode=ne;break;case le:if(i.wrap){for(;h<32;){if(0===f)break e;f--,c|=a[o++]<=1&&0===M[C];C--);if(O>C&&(O=C),0===C)return w[m++]=20971520,w[m++]=20971520,_.bits=1,0;for(N=1;N0&&(e===s||1!==C))return-1;for(j[1]=0,z=1;zr||e===l&&D>o)return 1;for(var Y=0;;){Y++,E=z-T,k[R]S?(B=K[P+k[R]],Z=L[H+k[R]]):(B=96,Z=0),g=1<>T)+v]=E<<24|B<<16|Z|0;while(0!==v);for(g=1<>=1;if(0!==g?(F&=g-1,F+=g):F=0,R++,0===--M[z]){if(z===C)break;z=t[i+k[R]]}if(z>O&&(F&x)!==p){for(0===T&&(T=O),y+=N,I=z-T,U=1<r||e===l&&D>o)return 1;p=F&x,w[p]=O<<24|I<<16|y-m|0}}return 0!==F&&(w[y+F]=z-T<<24|64<<16|0),_.bits=O,0}},{"../utils/common":1}],10:[function(e,t,i){"use strict";t.exports={2:"need dictionary",1:"stream end",0:"","-1":"file error","-2":"stream error","-3":"data error","-4":"insufficient memory","-5":"buffer error","-6":"incompatible version"}},{}],11:[function(e,t,i){"use strict";function n(){this.input=null,this.next_in=0,this.avail_in=0,this.total_in=0,this.output=null,this.next_out=0,this.avail_out=0,this.total_out=0,this.msg="",this.state=null,this.data_type=2,this.adler=0}t.exports=n},{}],"/lib/inflate.js":[function(e,t,i){"use strict";function n(e){if(!(this instanceof n))return new n(e);this.options=s.assign({chunkSize:16384,windowBits:0,to:""},e||{});var t=this.options;t.raw&&t.windowBits>=0&&t.windowBits<16&&(t.windowBits=-t.windowBits,0===t.windowBits&&(t.windowBits=-15)),!(t.windowBits>=0&&t.windowBits<16)||e&&e.windowBits||(t.windowBits+=32),t.windowBits>15&&t.windowBits<48&&0===(15&t.windowBits)&&(t.windowBits|=15),this.err=0,this.msg="",this.ended=!1,this.chunks=[],this.strm=new u,this.strm.avail_out=0;var i=o.inflateInit2(this.strm,t.windowBits);if(i!==l.Z_OK)throw new Error(d[i]);this.header=new c,o.inflateGetHeader(this.strm,this.header)}function a(e,t){var i=new n(t);if(i.push(e,!0),i.err)throw i.msg;return i.result}function r(e,t){return t=t||{},t.raw=!0,a(e,t)}var o=e("./zlib/inflate"),s=e("./utils/common"),f=e("./utils/strings"),l=e("./zlib/constants"),d=e("./zlib/messages"),u=e("./zlib/zstream"),c=e("./zlib/gzheader"),h=Object.prototype.toString;n.prototype.push=function(e,t){var i,n,a,r,d,u,c=this.strm,b=this.options.chunkSize,w=this.options.dictionary,m=!1;if(this.ended)return!1;n=t===~~t?t:t===!0?l.Z_FINISH:l.Z_NO_FLUSH,"string"==typeof e?c.input=f.binstring2buf(e):"[object ArrayBuffer]"===h.call(e)?c.input=new Uint8Array(e):c.input=e,c.next_in=0,c.avail_in=c.input.length;do{if(0===c.avail_out&&(c.output=new s.Buf8(b),c.next_out=0,c.avail_out=b),i=o.inflate(c,l.Z_NO_FLUSH),i===l.Z_NEED_DICT&&w&&(u="string"==typeof w?f.string2buf(w):"[object ArrayBuffer]"===h.call(w)?new Uint8Array(w):w,i=o.inflateSetDictionary(this.strm,u)),i===l.Z_BUF_ERROR&&m===!0&&(i=l.Z_OK,m=!1),i!==l.Z_STREAM_END&&i!==l.Z_OK)return this.onEnd(i),this.ended=!0,!1;c.next_out&&(0!==c.avail_out&&i!==l.Z_STREAM_END&&(0!==c.avail_in||n!==l.Z_FINISH&&n!==l.Z_SYNC_FLUSH)||("string"===this.options.to?(a=f.utf8border(c.output,c.next_out),r=c.next_out-a,d=f.buf2string(c.output,a),c.next_out=r,c.avail_out=b-r,r&&s.arraySet(c.output,c.output,a,r,0),this.onData(d)):this.onData(s.shrinkBuf(c.output,c.next_out)))),0===c.avail_in&&0===c.avail_out&&(m=!0)}while((c.avail_in>0||0===c.avail_out)&&i!==l.Z_STREAM_END);return i===l.Z_STREAM_END&&(n=l.Z_FINISH),n===l.Z_FINISH?(i=o.inflateEnd(this.strm),this.onEnd(i),this.ended=!0,i===l.Z_OK):n!==l.Z_SYNC_FLUSH||(this.onEnd(l.Z_OK),c.avail_out=0,!0)},n.prototype.onData=function(e){this.chunks.push(e)},n.prototype.onEnd=function(e){e===l.Z_OK&&("string"===this.options.to?this.result=this.chunks.join(""):this.result=s.flattenChunks(this.chunks)),this.chunks=[],this.err=e,this.msg=this.strm.msg},i.Inflate=n,i.inflate=a,i.inflateRaw=r,i.ungzip=a},{"./utils/common":1,"./utils/strings":2,"./zlib/constants":4,"./zlib/gzheader":6,"./zlib/inflate":8,"./zlib/messages":10,"./zlib/zstream":11}]},{},[])("/lib/inflate.js")}); diff --git a/node_modules/pako/index.js b/node_modules/pako/index.js new file mode 100644 index 0000000..cd07251 --- /dev/null +++ b/node_modules/pako/index.js @@ -0,0 +1,14 @@ +// Top level file is just a mixin of submodules & constants +'use strict'; + +var assign = require('./lib/utils/common').assign; + +var deflate = require('./lib/deflate'); +var inflate = require('./lib/inflate'); +var constants = require('./lib/zlib/constants'); + +var pako = {}; + +assign(pako, deflate, inflate, constants); + +module.exports = pako; diff --git a/node_modules/pako/lib/deflate.js b/node_modules/pako/lib/deflate.js new file mode 100644 index 0000000..11040d4 --- /dev/null +++ b/node_modules/pako/lib/deflate.js @@ -0,0 +1,400 @@ +'use strict'; + + +var zlib_deflate = require('./zlib/deflate'); +var utils = require('./utils/common'); +var strings = require('./utils/strings'); +var msg = require('./zlib/messages'); +var ZStream = require('./zlib/zstream'); + +var toString = Object.prototype.toString; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + +var Z_NO_FLUSH = 0; +var Z_FINISH = 4; + +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_SYNC_FLUSH = 2; + +var Z_DEFAULT_COMPRESSION = -1; + +var Z_DEFAULT_STRATEGY = 0; + +var Z_DEFLATED = 8; + +/* ===========================================================================*/ + + +/** + * class Deflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[deflate]], + * [[deflateRaw]] and [[gzip]]. + **/ + +/* internal + * Deflate.chunks -> Array + * + * Chunks of output data, if [[Deflate#onData]] not overriden. + **/ + +/** + * Deflate.result -> Uint8Array|Array + * + * Compressed result, generated by default [[Deflate#onData]] + * and [[Deflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Deflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Deflate.err -> Number + * + * Error code after deflate finished. 0 (Z_OK) on success. + * You will not need it in real life, because deflate errors + * are possible only on wrong options or bad `onData` / `onEnd` + * custom handlers. + **/ + +/** + * Deflate.msg -> String + * + * Error message, if [[Deflate.err]] != 0 + **/ + + +/** + * new Deflate(options) + * - options (Object): zlib deflate options. + * + * Creates new deflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `level` + * - `windowBits` + * - `memLevel` + * - `strategy` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw deflate + * - `gzip` (Boolean) - create gzip wrapper + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * - `header` (Object) - custom header for gzip + * - `text` (Boolean) - true if compressed data believed to be text + * - `time` (Number) - modification time, unix timestamp + * - `os` (Number) - operation system code + * - `extra` (Array) - array of bytes with extra data (max 65536) + * - `name` (String) - file name (binary string) + * - `comment` (String) - comment (binary string) + * - `hcrc` (Boolean) - true if header crc should be added + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var deflate = new pako.Deflate({ level: 3}); + * + * deflate.push(chunk1, false); + * deflate.push(chunk2, true); // true -> last chunk + * + * if (deflate.err) { throw new Error(deflate.err); } + * + * console.log(deflate.result); + * ``` + **/ +function Deflate(options) { + if (!(this instanceof Deflate)) return new Deflate(options); + + this.options = utils.assign({ + level: Z_DEFAULT_COMPRESSION, + method: Z_DEFLATED, + chunkSize: 16384, + windowBits: 15, + memLevel: 8, + strategy: Z_DEFAULT_STRATEGY, + to: '' + }, options || {}); + + var opt = this.options; + + if (opt.raw && (opt.windowBits > 0)) { + opt.windowBits = -opt.windowBits; + } + + else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) { + opt.windowBits += 16; + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_deflate.deflateInit2( + this.strm, + opt.level, + opt.method, + opt.windowBits, + opt.memLevel, + opt.strategy + ); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + if (opt.header) { + zlib_deflate.deflateSetHeader(this.strm, opt.header); + } + + if (opt.dictionary) { + var dict; + // Convert data if needed + if (typeof opt.dictionary === 'string') { + // If we need to compress text, change encoding to utf8. + dict = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(opt.dictionary); + } else { + dict = opt.dictionary; + } + + status = zlib_deflate.deflateSetDictionary(this.strm, dict); + + if (status !== Z_OK) { + throw new Error(msg[status]); + } + + this._dict_set = true; + } +} + +/** + * Deflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be + * converted to utf8 byte sequence. + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH. + * + * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with + * new compressed chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the compression context. + * + * On fail call [[Deflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * array format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Deflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var status, _mode; + + if (this.ended) { return false; } + + _mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // If we need to compress text, change encoding to utf8. + strm.input = strings.string2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + status = zlib_deflate.deflate(strm, _mode); /* no bad return value */ + + if (status !== Z_STREAM_END && status !== Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) { + if (this.options.to === 'string') { + this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out))); + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END); + + // Finalize on the last chunk. + if (_mode === Z_FINISH) { + status = zlib_deflate.deflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === Z_SYNC_FLUSH) { + this.onEnd(Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Deflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): ouput data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Deflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Deflate#onEnd(status) -> Void + * - status (Number): deflate status. 0 (Z_OK) on success, + * other if not. + * + * Called once after you tell deflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Deflate.prototype.onEnd = function (status) { + // On success - join + if (status === Z_OK) { + if (this.options.to === 'string') { + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * deflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * Compress `data` with deflate algorithm and `options`. + * + * Supported options are: + * + * - level + * - windowBits + * - memLevel + * - strategy + * - dictionary + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , data = Uint8Array([1,2,3,4,5,6,7,8,9]); + * + * console.log(pako.deflate(data)); + * ``` + **/ +function deflate(input, options) { + var deflator = new Deflate(options); + + deflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (deflator.err) { throw deflator.msg; } + + return deflator.result; +} + + +/** + * deflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function deflateRaw(input, options) { + options = options || {}; + options.raw = true; + return deflate(input, options); +} + + +/** + * gzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but create gzip wrapper instead of + * deflate one. + **/ +function gzip(input, options) { + options = options || {}; + options.gzip = true; + return deflate(input, options); +} + + +exports.Deflate = Deflate; +exports.deflate = deflate; +exports.deflateRaw = deflateRaw; +exports.gzip = gzip; diff --git a/node_modules/pako/lib/inflate.js b/node_modules/pako/lib/inflate.js new file mode 100644 index 0000000..0ed4999 --- /dev/null +++ b/node_modules/pako/lib/inflate.js @@ -0,0 +1,418 @@ +'use strict'; + + +var zlib_inflate = require('./zlib/inflate'); +var utils = require('./utils/common'); +var strings = require('./utils/strings'); +var c = require('./zlib/constants'); +var msg = require('./zlib/messages'); +var ZStream = require('./zlib/zstream'); +var GZheader = require('./zlib/gzheader'); + +var toString = Object.prototype.toString; + +/** + * class Inflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[inflate]] + * and [[inflateRaw]]. + **/ + +/* internal + * inflate.chunks -> Array + * + * Chunks of output data, if [[Inflate#onData]] not overriden. + **/ + +/** + * Inflate.result -> Uint8Array|Array|String + * + * Uncompressed result, generated by default [[Inflate#onData]] + * and [[Inflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Inflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Inflate.err -> Number + * + * Error code after inflate finished. 0 (Z_OK) on success. + * Should be checked if broken data possible. + **/ + +/** + * Inflate.msg -> String + * + * Error message, if [[Inflate.err]] != 0 + **/ + + +/** + * new Inflate(options) + * - options (Object): zlib inflate options. + * + * Creates new inflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `windowBits` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw inflate + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * By default, when no options set, autodetect deflate/gzip data format via + * wrapper header. + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var inflate = new pako.Inflate({ level: 3}); + * + * inflate.push(chunk1, false); + * inflate.push(chunk2, true); // true -> last chunk + * + * if (inflate.err) { throw new Error(inflate.err); } + * + * console.log(inflate.result); + * ``` + **/ +function Inflate(options) { + if (!(this instanceof Inflate)) return new Inflate(options); + + this.options = utils.assign({ + chunkSize: 16384, + windowBits: 0, + to: '' + }, options || {}); + + var opt = this.options; + + // Force window size for `raw` data, if not set directly, + // because we have no header for autodetect. + if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) { + opt.windowBits = -opt.windowBits; + if (opt.windowBits === 0) { opt.windowBits = -15; } + } + + // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate + if ((opt.windowBits >= 0) && (opt.windowBits < 16) && + !(options && options.windowBits)) { + opt.windowBits += 32; + } + + // Gzip header has no info about windows size, we can do autodetect only + // for deflate. So, if window size not set, force it to max when gzip possible + if ((opt.windowBits > 15) && (opt.windowBits < 48)) { + // bit 3 (16) -> gzipped data + // bit 4 (32) -> autodetect gzip/deflate + if ((opt.windowBits & 15) === 0) { + opt.windowBits |= 15; + } + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_inflate.inflateInit2( + this.strm, + opt.windowBits + ); + + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + + this.header = new GZheader(); + + zlib_inflate.inflateGetHeader(this.strm, this.header); +} + +/** + * Inflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH. + * + * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with + * new output chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the decompression context. + * + * On fail call [[Inflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Inflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var dictionary = this.options.dictionary; + var status, _mode; + var next_out_utf8, tail, utf8str; + var dict; + + // Flag to properly process Z_BUF_ERROR on testing inflate call + // when we check that all output data was flushed. + var allowBufError = false; + + if (this.ended) { return false; } + _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // Only binary strings can be decompressed on practice + strm.input = strings.binstring2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + + status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */ + + if (status === c.Z_NEED_DICT && dictionary) { + // Convert data if needed + if (typeof dictionary === 'string') { + dict = strings.string2buf(dictionary); + } else if (toString.call(dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(dictionary); + } else { + dict = dictionary; + } + + status = zlib_inflate.inflateSetDictionary(this.strm, dict); + + } + + if (status === c.Z_BUF_ERROR && allowBufError === true) { + status = c.Z_OK; + allowBufError = false; + } + + if (status !== c.Z_STREAM_END && status !== c.Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + + if (strm.next_out) { + if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) { + + if (this.options.to === 'string') { + + next_out_utf8 = strings.utf8border(strm.output, strm.next_out); + + tail = strm.next_out - next_out_utf8; + utf8str = strings.buf2string(strm.output, next_out_utf8); + + // move tail + strm.next_out = tail; + strm.avail_out = chunkSize - tail; + if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); } + + this.onData(utf8str); + + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } + + // When no more input data, we should check that internal inflate buffers + // are flushed. The only way to do it when avail_out = 0 - run one more + // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR. + // Here we set flag to process this error properly. + // + // NOTE. Deflate does not return error in this case and does not needs such + // logic. + if (strm.avail_in === 0 && strm.avail_out === 0) { + allowBufError = true; + } + + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END); + + if (status === c.Z_STREAM_END) { + _mode = c.Z_FINISH; + } + + // Finalize on the last chunk. + if (_mode === c.Z_FINISH) { + status = zlib_inflate.inflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === c.Z_OK; + } + + // callback interim results if Z_SYNC_FLUSH. + if (_mode === c.Z_SYNC_FLUSH) { + this.onEnd(c.Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Inflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): ouput data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Inflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Inflate#onEnd(status) -> Void + * - status (Number): inflate status. 0 (Z_OK) on success, + * other if not. + * + * Called either after you tell inflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Inflate.prototype.onEnd = function (status) { + // On success - join + if (status === c.Z_OK) { + if (this.options.to === 'string') { + // Glue & convert here, until we teach pako to send + // utf8 alligned strings to onData + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * inflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Decompress `data` with inflate/ungzip and `options`. Autodetect + * format via wrapper header by default. That's why we don't provide + * separate `ungzip` method. + * + * Supported options are: + * + * - windowBits + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , input = pako.deflate([1,2,3,4,5,6,7,8,9]) + * , output; + * + * try { + * output = pako.inflate(input); + * } catch (err) + * console.log(err); + * } + * ``` + **/ +function inflate(input, options) { + var inflator = new Inflate(options); + + inflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (inflator.err) { throw inflator.msg; } + + return inflator.result; +} + + +/** + * inflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * The same as [[inflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function inflateRaw(input, options) { + options = options || {}; + options.raw = true; + return inflate(input, options); +} + + +/** + * ungzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Just shortcut to [[inflate]], because it autodetects format + * by header.content. Done for convenience. + **/ + + +exports.Inflate = Inflate; +exports.inflate = inflate; +exports.inflateRaw = inflateRaw; +exports.ungzip = inflate; diff --git a/node_modules/pako/lib/utils/common.js b/node_modules/pako/lib/utils/common.js new file mode 100644 index 0000000..1e159fe --- /dev/null +++ b/node_modules/pako/lib/utils/common.js @@ -0,0 +1,102 @@ +'use strict'; + + +var TYPED_OK = (typeof Uint8Array !== 'undefined') && + (typeof Uint16Array !== 'undefined') && + (typeof Int32Array !== 'undefined'); + + +exports.assign = function (obj /*from1, from2, from3, ...*/) { + var sources = Array.prototype.slice.call(arguments, 1); + while (sources.length) { + var source = sources.shift(); + if (!source) { continue; } + + if (typeof source !== 'object') { + throw new TypeError(source + 'must be non-object'); + } + + for (var p in source) { + if (source.hasOwnProperty(p)) { + obj[p] = source[p]; + } + } + } + + return obj; +}; + + +// reduce buffer size, avoiding mem copy +exports.shrinkBuf = function (buf, size) { + if (buf.length === size) { return buf; } + if (buf.subarray) { return buf.subarray(0, size); } + buf.length = size; + return buf; +}; + + +var fnTyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + if (src.subarray && dest.subarray) { + dest.set(src.subarray(src_offs, src_offs + len), dest_offs); + return; + } + // Fallback to ordinary array + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + var i, l, len, pos, chunk, result; + + // calculate data length + len = 0; + for (i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length; + } + + // join chunks + result = new Uint8Array(len); + pos = 0; + for (i = 0, l = chunks.length; i < l; i++) { + chunk = chunks[i]; + result.set(chunk, pos); + pos += chunk.length; + } + + return result; + } +}; + +var fnUntyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + return [].concat.apply([], chunks); + } +}; + + +// Enable/Disable typed arrays use, for testing +// +exports.setTyped = function (on) { + if (on) { + exports.Buf8 = Uint8Array; + exports.Buf16 = Uint16Array; + exports.Buf32 = Int32Array; + exports.assign(exports, fnTyped); + } else { + exports.Buf8 = Array; + exports.Buf16 = Array; + exports.Buf32 = Array; + exports.assign(exports, fnUntyped); + } +}; + +exports.setTyped(TYPED_OK); diff --git a/node_modules/pako/lib/utils/strings.js b/node_modules/pako/lib/utils/strings.js new file mode 100644 index 0000000..1a0499c --- /dev/null +++ b/node_modules/pako/lib/utils/strings.js @@ -0,0 +1,185 @@ +// String encode/decode helpers +'use strict'; + + +var utils = require('./common'); + + +// Quick check if we can use fast array to bin string conversion +// +// - apply(Array) can fail on Android 2.2 +// - apply(Uint8Array) can fail on iOS 5.1 Safary +// +var STR_APPLY_OK = true; +var STR_APPLY_UIA_OK = true; + +try { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; } +try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; } + + +// Table with utf8 lengths (calculated by first byte of sequence) +// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS, +// because max possible codepoint is 0x10ffff +var _utf8len = new utils.Buf8(256); +for (var q = 0; q < 256; q++) { + _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); +} +_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start + + +// convert string to array (typed, when possible) +exports.string2buf = function (str) { + var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; + + // count binary size + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; + } + + // allocate buffer + buf = new utils.Buf8(buf_len); + + // convert + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + if (c < 0x80) { + /* one byte */ + buf[i++] = c; + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xC0 | (c >>> 6); + buf[i++] = 0x80 | (c & 0x3f); + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xE0 | (c >>> 12); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18); + buf[i++] = 0x80 | (c >>> 12 & 0x3f); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } + } + + return buf; +}; + +// Helper (used in 2 places) +function buf2binstring(buf, len) { + // use fallback for big arrays to avoid stack overflow + if (len < 65537) { + if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { + return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); + } + } + + var result = ''; + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]); + } + return result; +} + + +// Convert byte array to binary string +exports.buf2binstring = function (buf) { + return buf2binstring(buf, buf.length); +}; + + +// Convert binary string (typed, when possible) +exports.binstring2buf = function (str) { + var buf = new utils.Buf8(str.length); + for (var i = 0, len = buf.length; i < len; i++) { + buf[i] = str.charCodeAt(i); + } + return buf; +}; + + +// convert array to string +exports.buf2string = function (buf, max) { + var i, out, c, c_len; + var len = max || buf.length; + + // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + var utf16buf = new Array(len * 2); + + for (out = 0, i = 0; i < len;) { + c = buf[i++]; + // quick process ascii + if (c < 0x80) { utf16buf[out++] = c; continue; } + + c_len = _utf8len[c]; + // skip 5 & 6 byte codes + if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } + + // apply mask on first byte + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; + // join the rest + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f); + c_len--; + } + + // terminated by end of string? + if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } + + if (c < 0x10000) { + utf16buf[out++] = c; + } else { + c -= 0x10000; + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); + utf16buf[out++] = 0xdc00 | (c & 0x3ff); + } + } + + return buf2binstring(utf16buf, out); +}; + + +// Calculate max possible position in utf8 buffer, +// that will not break sequence. If that's not possible +// - (very small limits) return max size as is. +// +// buf[] - utf8 bytes array +// max - length limit (mandatory); +exports.utf8border = function (buf, max) { + var pos; + + max = max || buf.length; + if (max > buf.length) { max = buf.length; } + + // go back from last position, until start of sequence found + pos = max - 1; + while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } + + // Fuckup - very small and broken sequence, + // return max, because we should return something anyway. + if (pos < 0) { return max; } + + // If we came to start of buffer - that means vuffer is too small, + // return max too. + if (pos === 0) { return max; } + + return (pos + _utf8len[buf[pos]] > max) ? pos : max; +}; diff --git a/node_modules/pako/lib/zlib/adler32.js b/node_modules/pako/lib/zlib/adler32.js new file mode 100644 index 0000000..dcefe93 --- /dev/null +++ b/node_modules/pako/lib/zlib/adler32.js @@ -0,0 +1,32 @@ +'use strict'; + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It doesn't worth to make additional optimizationa as in original. +// Small size is preferable. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; diff --git a/node_modules/pako/lib/zlib/constants.js b/node_modules/pako/lib/zlib/constants.js new file mode 100644 index 0000000..ece6700 --- /dev/null +++ b/node_modules/pako/lib/zlib/constants.js @@ -0,0 +1,50 @@ +'use strict'; + + +module.exports = { + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; diff --git a/node_modules/pako/lib/zlib/crc32.js b/node_modules/pako/lib/zlib/crc32.js new file mode 100644 index 0000000..0768b1c --- /dev/null +++ b/node_modules/pako/lib/zlib/crc32.js @@ -0,0 +1,41 @@ +'use strict'; + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; diff --git a/node_modules/pako/lib/zlib/deflate.js b/node_modules/pako/lib/zlib/deflate.js new file mode 100644 index 0000000..e461fb8 --- /dev/null +++ b/node_modules/pako/lib/zlib/deflate.js @@ -0,0 +1,1855 @@ +'use strict'; + +var utils = require('../utils/common'); +var trees = require('./trees'); +var adler32 = require('./adler32'); +var crc32 = require('./crc32'); +var msg = require('./messages'); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; + } + if (len === 0) { return; } + + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ diff --git a/node_modules/pako/lib/zlib/gzheader.js b/node_modules/pako/lib/zlib/gzheader.js new file mode 100644 index 0000000..300bdee --- /dev/null +++ b/node_modules/pako/lib/zlib/gzheader.js @@ -0,0 +1,40 @@ +'use strict'; + + +function GZheader() { + /* true if compressed data believed to be text */ + this.text = 0; + /* modification time */ + this.time = 0; + /* extra flags (not used when writing a gzip file) */ + this.xflags = 0; + /* operating system */ + this.os = 0; + /* pointer to extra field or Z_NULL if none */ + this.extra = null; + /* extra field length (valid if extra != Z_NULL) */ + this.extra_len = 0; // Actually, we don't need it in JS, + // but leave for few code modifications + + // + // Setup limits is not necessary because in js we should not preallocate memory + // for inflate use constant limit in 65536 bytes + // + + /* space at extra (only when reading header) */ + // this.extra_max = 0; + /* pointer to zero-terminated file name or Z_NULL */ + this.name = ''; + /* space at name (only when reading header) */ + // this.name_max = 0; + /* pointer to zero-terminated comment or Z_NULL */ + this.comment = ''; + /* space at comment (only when reading header) */ + // this.comm_max = 0; + /* true if there was or will be a header crc */ + this.hcrc = 0; + /* true when done reading gzip header (not used when writing a gzip file) */ + this.done = false; +} + +module.exports = GZheader; diff --git a/node_modules/pako/lib/zlib/inffast.js b/node_modules/pako/lib/zlib/inffast.js new file mode 100644 index 0000000..a419f65 --- /dev/null +++ b/node_modules/pako/lib/zlib/inffast.js @@ -0,0 +1,326 @@ +'use strict'; + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; diff --git a/node_modules/pako/lib/zlib/inflate.js b/node_modules/pako/lib/zlib/inflate.js new file mode 100644 index 0000000..3be8b62 --- /dev/null +++ b/node_modules/pako/lib/zlib/inflate.js @@ -0,0 +1,1538 @@ +'use strict'; + + +var utils = require('../utils/common'); +var adler32 = require('./adler32'); +var crc32 = require('./crc32'); +var inflate_fast = require('./inffast'); +var inflate_table = require('./inftrees'); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more conveniend processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' insdead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ diff --git a/node_modules/pako/lib/zlib/inftrees.js b/node_modules/pako/lib/zlib/inftrees.js new file mode 100644 index 0000000..16e4d52 --- /dev/null +++ b/node_modules/pako/lib/zlib/inftrees.js @@ -0,0 +1,327 @@ +'use strict'; + + +var utils = require('../utils/common'); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + var i = 0; + /* process all codes and make table entries */ + for (;;) { + i++; + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; diff --git a/node_modules/pako/lib/zlib/messages.js b/node_modules/pako/lib/zlib/messages.js new file mode 100644 index 0000000..d022f0f --- /dev/null +++ b/node_modules/pako/lib/zlib/messages.js @@ -0,0 +1,13 @@ +'use strict'; + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; diff --git a/node_modules/pako/lib/zlib/trees.js b/node_modules/pako/lib/zlib/trees.js new file mode 100644 index 0000000..cef5f15 --- /dev/null +++ b/node_modules/pako/lib/zlib/trees.js @@ -0,0 +1,1202 @@ +'use strict'; + + +var utils = require('../utils/common'); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array insdead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defailts, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; diff --git a/node_modules/pako/lib/zlib/zstream.js b/node_modules/pako/lib/zlib/zstream.js new file mode 100644 index 0000000..2d93a39 --- /dev/null +++ b/node_modules/pako/lib/zlib/zstream.js @@ -0,0 +1,29 @@ +'use strict'; + + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; diff --git a/node_modules/pako/package.json b/node_modules/pako/package.json new file mode 100644 index 0000000..83ff566 --- /dev/null +++ b/node_modules/pako/package.json @@ -0,0 +1,40 @@ +{ + "name": "pako", + "description": "zlib port to javascript - fast, modularized, with browser support", + "version": "0.2.9", + "keywords": [ + "zlib", + "deflate", + "inflate", + "gzip" + ], + "homepage": "https://github.com/nodeca/pako", + "contributors": [ + "Andrei Tuputcyn (https://github.com/andr83)", + "Vitaly Puzrin (https://github.com/puzrin)" + ], + "files": [ + "index.js", + "dist/", + "lib/" + ], + "license": "MIT", + "repository": "nodeca/pako", + "devDependencies": { + "mocha": "1.21.5", + "benchmark": "*", + "ansi": "*", + "browserify": "*", + "eslint": "^2.1.0", + "eslint-plugin-nodeca": "~1.0.3", + "uglify-js": "*", + "istanbul": "*", + "ndoc": "*", + "lodash": "*", + "async": "*", + "grunt": "~0.4.4", + "grunt-cli": "~0.1.13", + "grunt-saucelabs": "~8.6.0", + "grunt-contrib-connect": "~0.9.0" + } +} diff --git a/node_modules/parse-glob/LICENSE b/node_modules/parse-glob/LICENSE new file mode 100644 index 0000000..65f90ac --- /dev/null +++ b/node_modules/parse-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/parse-glob/README.md b/node_modules/parse-glob/README.md new file mode 100644 index 0000000..000ccd9 --- /dev/null +++ b/node_modules/parse-glob/README.md @@ -0,0 +1,115 @@ +# parse-glob [![NPM version](https://badge.fury.io/js/parse-glob.svg)](http://badge.fury.io/js/parse-glob) [![Build Status](https://travis-ci.org/jonschlinkert/parse-glob.svg)](https://travis-ci.org/jonschlinkert/parse-glob) + +> Parse a glob pattern into an object of tokens. + +**Changes from v1.0.0 to v3.0.4** + +* all path-related properties are now on the `path` object +* all boolean properties are now on the `is` object +* adds `base` property + +See the [properties](#properties) section for details. + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i parse-glob --save +``` + +* parses 1,000+ glob patterns in 29ms (2.3 GHz Intel Core i7) +* Extensive [unit tests](./test.js) (more than 1,000 lines), covering wildcards, globstars, character classes, brace patterns, extglobs, dotfiles and other complex patterns. + +See the tests for [hundreds of examples](./test.js). + +## Usage + +```js +var parseGlob = require('parse-glob'); +``` + +**Example** + +```js +parseGlob('a/b/c/**/*.{yml,json}'); +``` + +**Returns:** + +```js +{ orig: 'a/b/c/**/*.{yml,json}', + is: + { glob: true, + negated: false, + extglob: false, + braces: true, + brackets: false, + globstar: true, + dotfile: false, + dotdir: false }, + glob: '**/*.{yml,json}', + base: 'a/b/c', + path: + { dirname: 'a/b/c/**/', + basename: '*.{yml,json}', + filename: '*', + extname: '.{yml,json}', + ext: '{yml,json}' } } +``` + +## Properties + +The object returned by parseGlob has the following properties: + +* `orig`: a copy of the original, unmodified glob pattern +* `is`: an object with boolean information about the glob: + - `glob`: true if the pattern actually a glob pattern + - `negated`: true if it's a negation pattern (`!**/foo.js`) + - `extglob`: true if it has extglobs (`@(foo|bar)`) + - `braces`: true if it has braces (`{1..2}` or `.{txt,md}`) + - `brackets`: true if it has POSIX brackets (`[[:alpha:]]`) + - `globstar`: true if the pattern has a globstar (double star, `**`) + - `dotfile`: true if the pattern should match dotfiles + - `dotdir`: true if the pattern should match dot-directories (like `.git`) +* `glob`: the glob pattern part of the string, if any +* `base`: the non-glob part of the string, if any +* `path`: file path segments + - `dirname`: directory + - `basename`: file name with extension + - `filename`: file name without extension + - `extname`: file extension with dot + - `ext`: file extension without dot + +## Related +* [glob-base](https://www.npmjs.com/package/glob-base): Returns an object with the (non-glob) base path and the actual pattern. | [homepage](https://github.com/jonschlinkert/glob-base) +* [glob-parent](https://www.npmjs.com/package/glob-parent): Strips glob magic from a string to provide the parent path | [homepage](https://github.com/es128/glob-parent) +* [glob-path-regex](https://www.npmjs.com/package/glob-path-regex): Regular expression for matching the parts of glob pattern. | [homepage](https://github.com/regexps/glob-path-regex) +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern. | [homepage](https://github.com/jonschlinkert/is-glob) +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. Just… [more](https://www.npmjs.com/package/micromatch) | [homepage](https://github.com/jonschlinkert/micromatch) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/parse-glob/issues/new). + +## Tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2014-2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on September 22, 2015._ \ No newline at end of file diff --git a/node_modules/parse-glob/index.js b/node_modules/parse-glob/index.js new file mode 100644 index 0000000..4ab691a --- /dev/null +++ b/node_modules/parse-glob/index.js @@ -0,0 +1,156 @@ +/*! + * parse-glob + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var isGlob = require('is-glob'); +var findBase = require('glob-base'); +var extglob = require('is-extglob'); +var dotfile = require('is-dotfile'); + +/** + * Expose `cache` + */ + +var cache = module.exports.cache = {}; + +/** + * Parse a glob pattern into tokens. + * + * When no paths or '**' are in the glob, we use a + * different strategy for parsing the filename, since + * file names can contain braces and other difficult + * patterns. such as: + * + * - `*.{a,b}` + * - `(**|*.js)` + */ + +module.exports = function parseGlob(glob) { + if (cache.hasOwnProperty(glob)) { + return cache[glob]; + } + + var tok = {}; + tok.orig = glob; + tok.is = {}; + + // unescape dots and slashes in braces/brackets + glob = escape(glob); + + var parsed = findBase(glob); + tok.is.glob = parsed.isGlob; + + tok.glob = parsed.glob; + tok.base = parsed.base; + var segs = /([^\/]*)$/.exec(glob); + + tok.path = {}; + tok.path.dirname = ''; + tok.path.basename = segs[1] || ''; + tok.path.dirname = glob.split(tok.path.basename).join('') || ''; + var basename = (tok.path.basename || '').split('.') || ''; + tok.path.filename = basename[0] || ''; + tok.path.extname = basename.slice(1).join('.') || ''; + tok.path.ext = ''; + + if (isGlob(tok.path.dirname) && !tok.path.basename) { + if (!/\/$/.test(tok.glob)) { + tok.path.basename = tok.glob; + } + tok.path.dirname = tok.base; + } + + if (glob.indexOf('/') === -1 && !tok.is.globstar) { + tok.path.dirname = ''; + tok.path.basename = tok.orig; + } + + var dot = tok.path.basename.indexOf('.'); + if (dot !== -1) { + tok.path.filename = tok.path.basename.slice(0, dot); + tok.path.extname = tok.path.basename.slice(dot); + } + + if (tok.path.extname.charAt(0) === '.') { + var exts = tok.path.extname.split('.'); + tok.path.ext = exts[exts.length - 1]; + } + + // unescape dots and slashes in braces/brackets + tok.glob = unescape(tok.glob); + tok.path.dirname = unescape(tok.path.dirname); + tok.path.basename = unescape(tok.path.basename); + tok.path.filename = unescape(tok.path.filename); + tok.path.extname = unescape(tok.path.extname); + + // Booleans + var is = (glob && tok.is.glob); + tok.is.negated = glob && glob.charAt(0) === '!'; + tok.is.extglob = glob && extglob(glob); + tok.is.braces = has(is, glob, '{'); + tok.is.brackets = has(is, glob, '[:'); + tok.is.globstar = has(is, glob, '**'); + tok.is.dotfile = dotfile(tok.path.basename) || dotfile(tok.path.filename); + tok.is.dotdir = dotdir(tok.path.dirname); + return (cache[glob] = tok); +} + +/** + * Returns true if the glob matches dot-directories. + * + * @param {Object} `tok` The tokens object + * @param {Object} `path` The path object + * @return {Object} + */ + +function dotdir(base) { + if (base.indexOf('/.') !== -1) { + return true; + } + if (base.charAt(0) === '.' && base.charAt(1) !== '/') { + return true; + } + return false; +} + +/** + * Returns true if the pattern has the given `ch`aracter(s) + * + * @param {Object} `glob` The glob pattern. + * @param {Object} `ch` The character to test for + * @return {Object} + */ + +function has(is, glob, ch) { + return is && glob.indexOf(ch) !== -1; +} + +/** + * Escape/unescape utils + */ + +function escape(str) { + var re = /\{([^{}]*?)}|\(([^()]*?)\)|\[([^\[\]]*?)\]/g; + return str.replace(re, function (outter, braces, parens, brackets) { + var inner = braces || parens || brackets; + if (!inner) { return outter; } + return outter.split(inner).join(esc(inner)); + }); +} + +function esc(str) { + str = str.split('/').join('__SLASH__'); + str = str.split('.').join('__DOT__'); + return str; +} + +function unescape(str) { + str = str.split('__SLASH__').join('/'); + str = str.split('__DOT__').join('.'); + return str; +} diff --git a/node_modules/parse-glob/package.json b/node_modules/parse-glob/package.json new file mode 100644 index 0000000..a4acb52 --- /dev/null +++ b/node_modules/parse-glob/package.json @@ -0,0 +1,62 @@ +{ + "name": "parse-glob", + "description": "Parse a glob pattern into an object of tokens.", + "version": "3.0.4", + "homepage": "https://github.com/jonschlinkert/parse-glob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/parse-glob", + "bugs": { + "url": "https://github.com/jonschlinkert/parse-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "prepublish": "browserify -o browser.js -e index.js" + }, + "dependencies": { + "glob-base": "^0.3.0", + "is-dotfile": "^1.0.0", + "is-extglob": "^1.0.0", + "is-glob": "^2.0.0" + }, + "devDependencies": { + "browserify": "^9.0.3", + "lodash": "^3.3.1", + "mocha": "*" + }, + "keywords": [ + "glob", + "match", + "bash", + "expand", + "expansion", + "expression", + "file", + "files", + "filter", + "find", + "glob", + "globbing", + "globs", + "globstar", + "match", + "matcher", + "matches", + "matching", + "path", + "pattern", + "patterns", + "regex", + "regexp", + "regular", + "shell", + "wildcard" + ] +} diff --git a/node_modules/parseurl/HISTORY.md b/node_modules/parseurl/HISTORY.md new file mode 100644 index 0000000..395041e --- /dev/null +++ b/node_modules/parseurl/HISTORY.md @@ -0,0 +1,47 @@ +1.3.1 / 2016-01-17 +================== + + * perf: enable strict mode + +1.3.0 / 2014-08-09 +================== + + * Add `parseurl.original` for parsing `req.originalUrl` with fallback + * Return `undefined` if `req.url` is `undefined` + +1.2.0 / 2014-07-21 +================== + + * Cache URLs based on original value + * Remove no-longer-needed URL mis-parse work-around + * Simplify the "fast-path" `RegExp` + +1.1.3 / 2014-07-08 +================== + + * Fix typo + +1.1.2 / 2014-07-08 +================== + + * Seriously fix Node.js 0.8 compatibility + +1.1.1 / 2014-07-08 +================== + + * Fix Node.js 0.8 compatibility + +1.1.0 / 2014-07-08 +================== + + * Incorporate URL href-only parse fast-path + +1.0.1 / 2014-03-08 +================== + + * Add missing `require` + +1.0.0 / 2014-03-08 +================== + + * Genesis from `connect` diff --git a/node_modules/parseurl/LICENSE b/node_modules/parseurl/LICENSE new file mode 100644 index 0000000..ec7dfe7 --- /dev/null +++ b/node_modules/parseurl/LICENSE @@ -0,0 +1,24 @@ + +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/parseurl/README.md b/node_modules/parseurl/README.md new file mode 100644 index 0000000..f4796eb --- /dev/null +++ b/node_modules/parseurl/README.md @@ -0,0 +1,120 @@ +# parseurl + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Parse a URL with memoization. + +## Install + +```bash +$ npm install parseurl +``` + +## API + +```js +var parseurl = require('parseurl') +``` + +### parseurl(req) + +Parse the URL of the given request object (looks at the `req.url` property) +and return the result. The result is the same as `url.parse` in Node.js core. +Calling this function multiple times on the same `req` where `req.url` does +not change will return a cached parsed object, rather than parsing again. + +### parseurl.original(req) + +Parse the original URL of the given request object and return the result. +This works by trying to parse `req.originalUrl` if it is a string, otherwise +parses `req.url`. The result is the same as `url.parse` in Node.js core. +Calling this function multiple times on the same `req` where `req.originalUrl` +does not change will return a cached parsed object, rather than parsing again. + +## Benchmark + +```bash +$ npm run-script bench + +> parseurl@1.3.1 bench nodejs-parseurl +> node benchmark/index.js + +> node benchmark/fullurl.js + + Parsing URL "http://localhost:8888/foo/bar?user=tj&pet=fluffy" + + 1 test completed. + 2 tests completed. + 3 tests completed. + + fasturl x 1,290,780 ops/sec ±0.46% (195 runs sampled) + nativeurl x 56,401 ops/sec ±0.22% (196 runs sampled) + parseurl x 55,231 ops/sec ±0.22% (194 runs sampled) + +> node benchmark/pathquery.js + + Parsing URL "/foo/bar?user=tj&pet=fluffy" + + 1 test completed. + 2 tests completed. + 3 tests completed. + + fasturl x 1,986,668 ops/sec ±0.27% (190 runs sampled) + nativeurl x 98,740 ops/sec ±0.21% (195 runs sampled) + parseurl x 2,628,171 ops/sec ±0.36% (195 runs sampled) + +> node benchmark/samerequest.js + + Parsing URL "/foo/bar?user=tj&pet=fluffy" on same request object + + 1 test completed. + 2 tests completed. + 3 tests completed. + + fasturl x 2,184,468 ops/sec ±0.40% (194 runs sampled) + nativeurl x 99,437 ops/sec ±0.71% (194 runs sampled) + parseurl x 10,498,005 ops/sec ±0.61% (186 runs sampled) + +> node benchmark/simplepath.js + + Parsing URL "/foo/bar" + + 1 test completed. + 2 tests completed. + 3 tests completed. + + fasturl x 4,535,825 ops/sec ±0.27% (191 runs sampled) + nativeurl x 98,769 ops/sec ±0.54% (191 runs sampled) + parseurl x 4,164,865 ops/sec ±0.34% (192 runs sampled) + +> node benchmark/slash.js + + Parsing URL "/" + + 1 test completed. + 2 tests completed. + 3 tests completed. + + fasturl x 4,908,405 ops/sec ±0.42% (191 runs sampled) + nativeurl x 100,945 ops/sec ±0.59% (188 runs sampled) + parseurl x 4,333,208 ops/sec ±0.27% (194 runs sampled) +``` + +## License + + [MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/parseurl.svg +[npm-url]: https://npmjs.org/package/parseurl +[node-version-image]: https://img.shields.io/node/v/parseurl.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/pillarjs/parseurl/master.svg +[travis-url]: https://travis-ci.org/pillarjs/parseurl +[coveralls-image]: https://img.shields.io/coveralls/pillarjs/parseurl/master.svg +[coveralls-url]: https://coveralls.io/r/pillarjs/parseurl?branch=master +[downloads-image]: https://img.shields.io/npm/dm/parseurl.svg +[downloads-url]: https://npmjs.org/package/parseurl diff --git a/node_modules/parseurl/index.js b/node_modules/parseurl/index.js new file mode 100644 index 0000000..56cc6ec --- /dev/null +++ b/node_modules/parseurl/index.js @@ -0,0 +1,138 @@ +/*! + * parseurl + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var url = require('url') +var parse = url.parse +var Url = url.Url + +/** + * Pattern for a simple path case. + * See: https://github.com/joyent/node/pull/7878 + */ + +var simplePathRegExp = /^(\/\/?(?!\/)[^\?#\s]*)(\?[^#\s]*)?$/ + +/** + * Exports. + */ + +module.exports = parseurl +module.exports.original = originalurl + +/** + * Parse the `req` url with memoization. + * + * @param {ServerRequest} req + * @return {Object} + * @api public + */ + +function parseurl(req) { + var url = req.url + + if (url === undefined) { + // URL is undefined + return undefined + } + + var parsed = req._parsedUrl + + if (fresh(url, parsed)) { + // Return cached URL parse + return parsed + } + + // Parse the URL + parsed = fastparse(url) + parsed._raw = url + + return req._parsedUrl = parsed +}; + +/** + * Parse the `req` original url with fallback and memoization. + * + * @param {ServerRequest} req + * @return {Object} + * @api public + */ + +function originalurl(req) { + var url = req.originalUrl + + if (typeof url !== 'string') { + // Fallback + return parseurl(req) + } + + var parsed = req._parsedOriginalUrl + + if (fresh(url, parsed)) { + // Return cached URL parse + return parsed + } + + // Parse the URL + parsed = fastparse(url) + parsed._raw = url + + return req._parsedOriginalUrl = parsed +}; + +/** + * Parse the `str` url with fast-path short-cut. + * + * @param {string} str + * @return {Object} + * @api private + */ + +function fastparse(str) { + // Try fast path regexp + // See: https://github.com/joyent/node/pull/7878 + var simplePath = typeof str === 'string' && simplePathRegExp.exec(str) + + // Construct simple URL + if (simplePath) { + var pathname = simplePath[1] + var search = simplePath[2] || null + var url = Url !== undefined + ? new Url() + : {} + url.path = str + url.href = str + url.pathname = pathname + url.search = search + url.query = search && search.substr(1) + + return url + } + + return parse(str) +} + +/** + * Determine if parsed is still fresh for url. + * + * @param {string} url + * @param {object} parsedUrl + * @return {boolean} + * @api private + */ + +function fresh(url, parsedUrl) { + return typeof parsedUrl === 'object' + && parsedUrl !== null + && (Url === undefined || parsedUrl instanceof Url) + && parsedUrl._raw === url +} diff --git a/node_modules/parseurl/package.json b/node_modules/parseurl/package.json new file mode 100644 index 0000000..6bca64c --- /dev/null +++ b/node_modules/parseurl/package.json @@ -0,0 +1,33 @@ +{ + "name": "parseurl", + "description": "parse a url with memoization", + "version": "1.3.1", + "author": "Jonathan Ong (http://jongleberry.com)", + "contributors": [ + "Douglas Christopher Wilson " + ], + "repository": "pillarjs/parseurl", + "license": "MIT", + "devDependencies": { + "benchmark": "2.0.0", + "beautify-benchmark": "0.2.4", + "fast-url-parser": "1.1.3", + "istanbul": "0.4.2", + "mocha": "~1.21.5" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "bench": "node benchmark/index.js", + "test": "mocha --check-leaks --bail --reporter spec test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --check-leaks --reporter dot test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --check-leaks --reporter spec test/" + } +} diff --git a/node_modules/path-browserify/LICENSE b/node_modules/path-browserify/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/path-browserify/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/path-browserify/index.js b/node_modules/path-browserify/index.js new file mode 100644 index 0000000..285e32d --- /dev/null +++ b/node_modules/path-browserify/index.js @@ -0,0 +1,224 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// resolves . and .. elements in a path array with directory names there +// must be no slashes, empty elements, or device names (c:\) in the array +// (so also no leading and trailing slashes - it does not distinguish +// relative and absolute paths) +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = parts.length - 1; i >= 0; i--) { + var last = parts[i]; + if (last === '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +} + +// Split a filename into [root, dir, basename, ext], unix version +// 'root' is just a slash, or nothing. +var splitPathRe = + /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; +var splitPath = function(filename) { + return splitPathRe.exec(filename).slice(1); +}; + +// path.resolve([from ...], to) +// posix version +exports.resolve = function() { + var resolvedPath = '', + resolvedAbsolute = false; + + for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { + var path = (i >= 0) ? arguments[i] : process.cwd(); + + // Skip empty and invalid entries + if (typeof path !== 'string') { + throw new TypeError('Arguments to path.resolve must be strings'); + } else if (!path) { + continue; + } + + resolvedPath = path + '/' + resolvedPath; + resolvedAbsolute = path.charAt(0) === '/'; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { + return !!p; + }), !resolvedAbsolute).join('/'); + + return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; +}; + +// path.normalize(path) +// posix version +exports.normalize = function(path) { + var isAbsolute = exports.isAbsolute(path), + trailingSlash = substr(path, -1) === '/'; + + // Normalize the path + path = normalizeArray(filter(path.split('/'), function(p) { + return !!p; + }), !isAbsolute).join('/'); + + if (!path && !isAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isAbsolute ? '/' : '') + path; +}; + +// posix version +exports.isAbsolute = function(path) { + return path.charAt(0) === '/'; +}; + +// posix version +exports.join = function() { + var paths = Array.prototype.slice.call(arguments, 0); + return exports.normalize(filter(paths, function(p, index) { + if (typeof p !== 'string') { + throw new TypeError('Arguments to path.join must be strings'); + } + return p; + }).join('/')); +}; + + +// path.relative(from, to) +// posix version +exports.relative = function(from, to) { + from = exports.resolve(from).substr(1); + to = exports.resolve(to).substr(1); + + function trim(arr) { + var start = 0; + for (; start < arr.length; start++) { + if (arr[start] !== '') break; + } + + var end = arr.length - 1; + for (; end >= 0; end--) { + if (arr[end] !== '') break; + } + + if (start > end) return []; + return arr.slice(start, end - start + 1); + } + + var fromParts = trim(from.split('/')); + var toParts = trim(to.split('/')); + + var length = Math.min(fromParts.length, toParts.length); + var samePartsLength = length; + for (var i = 0; i < length; i++) { + if (fromParts[i] !== toParts[i]) { + samePartsLength = i; + break; + } + } + + var outputParts = []; + for (var i = samePartsLength; i < fromParts.length; i++) { + outputParts.push('..'); + } + + outputParts = outputParts.concat(toParts.slice(samePartsLength)); + + return outputParts.join('/'); +}; + +exports.sep = '/'; +exports.delimiter = ':'; + +exports.dirname = function(path) { + var result = splitPath(path), + root = result[0], + dir = result[1]; + + if (!root && !dir) { + // No dirname whatsoever + return '.'; + } + + if (dir) { + // It has a dirname, strip trailing slash + dir = dir.substr(0, dir.length - 1); + } + + return root + dir; +}; + + +exports.basename = function(path, ext) { + var f = splitPath(path)[2]; + // TODO: make this comparison case-insensitive on windows? + if (ext && f.substr(-1 * ext.length) === ext) { + f = f.substr(0, f.length - ext.length); + } + return f; +}; + + +exports.extname = function(path) { + return splitPath(path)[3]; +}; + +function filter (xs, f) { + if (xs.filter) return xs.filter(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + if (f(xs[i], i, xs)) res.push(xs[i]); + } + return res; +} + +// String.prototype.substr - negative index don't work in IE8 +var substr = 'ab'.substr(-1) === 'b' + ? function (str, start, len) { return str.substr(start, len) } + : function (str, start, len) { + if (start < 0) start = str.length + start; + return str.substr(start, len); + } +; diff --git a/node_modules/path-browserify/package.json b/node_modules/path-browserify/package.json new file mode 100644 index 0000000..7c91a20 --- /dev/null +++ b/node_modules/path-browserify/package.json @@ -0,0 +1,29 @@ +{ + "name": "path-browserify", + "version": "0.0.0", + "description": "the path module from node core for browsers", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~1.0.4" + }, + "scripts": { + "test": "tape test/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/path-browserify.git" + }, + "homepage": "https://github.com/substack/path-browserify", + "keywords": [ + "path", + "browser", + "browserify" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/path-browserify/readme.markdown b/node_modules/path-browserify/readme.markdown new file mode 100644 index 0000000..8ae1dd8 --- /dev/null +++ b/node_modules/path-browserify/readme.markdown @@ -0,0 +1,3 @@ +# path-browserify + +the path module from node core for browsers diff --git a/node_modules/path-is-absolute/index.js b/node_modules/path-is-absolute/index.js new file mode 100644 index 0000000..22aa6c3 --- /dev/null +++ b/node_modules/path-is-absolute/index.js @@ -0,0 +1,20 @@ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/path-is-absolute/license b/node_modules/path-is-absolute/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/path-is-absolute/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-is-absolute/package.json b/node_modules/path-is-absolute/package.json new file mode 100644 index 0000000..91196d5 --- /dev/null +++ b/node_modules/path-is-absolute/package.json @@ -0,0 +1,43 @@ +{ + "name": "path-is-absolute", + "version": "1.0.1", + "description": "Node.js 0.12 path.isAbsolute() ponyfill", + "license": "MIT", + "repository": "sindresorhus/path-is-absolute", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "path", + "paths", + "file", + "dir", + "absolute", + "isabsolute", + "is-absolute", + "built-in", + "util", + "utils", + "core", + "ponyfill", + "polyfill", + "shim", + "is", + "detect", + "check" + ], + "devDependencies": { + "xo": "^0.16.0" + } +} diff --git a/node_modules/path-is-absolute/readme.md b/node_modules/path-is-absolute/readme.md new file mode 100644 index 0000000..8dbdf5f --- /dev/null +++ b/node_modules/path-is-absolute/readme.md @@ -0,0 +1,59 @@ +# path-is-absolute [![Build Status](https://travis-ci.org/sindresorhus/path-is-absolute.svg?branch=master)](https://travis-ci.org/sindresorhus/path-is-absolute) + +> Node.js 0.12 [`path.isAbsolute()`](http://nodejs.org/api/path.html#path_path_isabsolute_path) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save path-is-absolute +``` + + +## Usage + +```js +const pathIsAbsolute = require('path-is-absolute'); + +// Running on Linux +pathIsAbsolute('/home/foo'); +//=> true +pathIsAbsolute('C:/Users/foo'); +//=> false + +// Running on Windows +pathIsAbsolute('C:/Users/foo'); +//=> true +pathIsAbsolute('/home/foo'); +//=> false + +// Running on any OS +pathIsAbsolute.posix('/home/foo'); +//=> true +pathIsAbsolute.posix('C:/Users/foo'); +//=> false +pathIsAbsolute.win32('C:/Users/foo'); +//=> true +pathIsAbsolute.win32('/home/foo'); +//=> false +``` + + +## API + +See the [`path.isAbsolute()` docs](http://nodejs.org/api/path.html#path_path_isabsolute_path). + +### pathIsAbsolute(path) + +### pathIsAbsolute.posix(path) + +POSIX specific version. + +### pathIsAbsolute.win32(path) + +Windows specific version. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/path-to-regexp/History.md b/node_modules/path-to-regexp/History.md new file mode 100644 index 0000000..7f65878 --- /dev/null +++ b/node_modules/path-to-regexp/History.md @@ -0,0 +1,36 @@ +0.1.7 / 2015-07-28 +================== + + * Fixed regression with escaped round brackets and matching groups. + +0.1.6 / 2015-06-19 +================== + + * Replace `index` feature by outputting all parameters, unnamed and named. + +0.1.5 / 2015-05-08 +================== + + * Add an index property for position in match result. + +0.1.4 / 2015-03-05 +================== + + * Add license information + +0.1.3 / 2014-07-06 +================== + + * Better array support + * Improved support for trailing slash in non-ending mode + +0.1.0 / 2014-03-06 +================== + + * add options.end + +0.0.2 / 2013-02-10 +================== + + * Update to match current express + * add .license property to component.json diff --git a/node_modules/path-to-regexp/LICENSE b/node_modules/path-to-regexp/LICENSE new file mode 100644 index 0000000..983fbe8 --- /dev/null +++ b/node_modules/path-to-regexp/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-to-regexp/Readme.md b/node_modules/path-to-regexp/Readme.md new file mode 100644 index 0000000..95452a6 --- /dev/null +++ b/node_modules/path-to-regexp/Readme.md @@ -0,0 +1,35 @@ +# Path-to-RegExp + +Turn an Express-style path string such as `/user/:name` into a regular expression. + +**Note:** This is a legacy branch. You should upgrade to `1.x`. + +## Usage + +```javascript +var pathToRegexp = require('path-to-regexp'); +``` + +### pathToRegexp(path, keys, options) + + - **path** A string in the express format, an array of such strings, or a regular expression + - **keys** An array to be populated with the keys present in the url. Once the function completes, this will be an array of strings. + - **options** + - **options.sensitive** Defaults to false, set this to true to make routes case sensitive + - **options.strict** Defaults to false, set this to true to make the trailing slash matter. + - **options.end** Defaults to true, set this to false to only match the prefix of the URL. + +```javascript +var keys = []; +var exp = pathToRegexp('/foo/:bar', keys); +//keys = ['bar'] +//exp = /^\/foo\/(?:([^\/]+?))\/?$/i +``` + +## Live Demo + +You can see a live demo of this library in use at [express-route-tester](http://forbeslindesay.github.com/express-route-tester/). + +## License + + MIT diff --git a/node_modules/path-to-regexp/index.js b/node_modules/path-to-regexp/index.js new file mode 100644 index 0000000..500d1da --- /dev/null +++ b/node_modules/path-to-regexp/index.js @@ -0,0 +1,129 @@ +/** + * Expose `pathtoRegexp`. + */ + +module.exports = pathtoRegexp; + +/** + * Match matching groups in a regular expression. + */ +var MATCHING_GROUP_REGEXP = /\((?!\?)/g; + +/** + * Normalize the given path string, + * returning a regular expression. + * + * An empty array should be passed, + * which will contain the placeholder + * key names. For example "/user/:id" will + * then contain ["id"]. + * + * @param {String|RegExp|Array} path + * @param {Array} keys + * @param {Object} options + * @return {RegExp} + * @api private + */ + +function pathtoRegexp(path, keys, options) { + options = options || {}; + keys = keys || []; + var strict = options.strict; + var end = options.end !== false; + var flags = options.sensitive ? '' : 'i'; + var extraOffset = 0; + var keysOffset = keys.length; + var i = 0; + var name = 0; + var m; + + if (path instanceof RegExp) { + while (m = MATCHING_GROUP_REGEXP.exec(path.source)) { + keys.push({ + name: name++, + optional: false, + offset: m.index + }); + } + + return path; + } + + if (Array.isArray(path)) { + // Map array parts into regexps and return their source. We also pass + // the same keys and options instance into every generation to get + // consistent matching groups before we join the sources together. + path = path.map(function (value) { + return pathtoRegexp(value, keys, options).source; + }); + + return new RegExp('(?:' + path.join('|') + ')', flags); + } + + path = ('^' + path + (strict ? '' : path[path.length - 1] === '/' ? '?' : '/?')) + .replace(/\/\(/g, '/(?:') + .replace(/([\/\.])/g, '\\$1') + .replace(/(\\\/)?(\\\.)?:(\w+)(\(.*?\))?(\*)?(\?)?/g, function (match, slash, format, key, capture, star, optional, offset) { + slash = slash || ''; + format = format || ''; + capture = capture || '([^\\/' + format + ']+?)'; + optional = optional || ''; + + keys.push({ + name: key, + optional: !!optional, + offset: offset + extraOffset + }); + + var result = '' + + (optional ? '' : slash) + + '(?:' + + format + (optional ? slash : '') + capture + + (star ? '((?:[\\/' + format + '].+?)?)' : '') + + ')' + + optional; + + extraOffset += result.length - match.length; + + return result; + }) + .replace(/\*/g, function (star, index) { + var len = keys.length + + while (len-- > keysOffset && keys[len].offset > index) { + keys[len].offset += 3; // Replacement length minus asterisk length. + } + + return '(.*)'; + }); + + // This is a workaround for handling unnamed matching groups. + while (m = MATCHING_GROUP_REGEXP.exec(path)) { + var escapeCount = 0; + var index = m.index; + + while (path.charAt(--index) === '\\') { + escapeCount++; + } + + // It's possible to escape the bracket. + if (escapeCount % 2 === 1) { + continue; + } + + if (keysOffset + i === keys.length || keys[keysOffset + i].offset > m.index) { + keys.splice(keysOffset + i, 0, { + name: name++, // Unnamed matching groups must be consistently linear. + optional: false, + offset: m.index + }); + } + + i++; + } + + // If the path is non-ending, match until the end or a slash. + path += (end ? '$' : (path[path.length - 1] === '/' ? '' : '(?=\\/|$)')); + + return new RegExp(path, flags); +}; diff --git a/node_modules/path-to-regexp/package.json b/node_modules/path-to-regexp/package.json new file mode 100644 index 0000000..d4e51b5 --- /dev/null +++ b/node_modules/path-to-regexp/package.json @@ -0,0 +1,30 @@ +{ + "name": "path-to-regexp", + "description": "Express style path to RegExp utility", + "version": "0.1.7", + "files": [ + "index.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "keywords": [ + "express", + "regexp" + ], + "component": { + "scripts": { + "path-to-regexp": "index.js" + } + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/component/path-to-regexp.git" + }, + "devDependencies": { + "mocha": "^1.17.1", + "istanbul": "^0.2.6" + } +} diff --git a/node_modules/pbkdf2-compat/.npmignore b/node_modules/pbkdf2-compat/.npmignore new file mode 100644 index 0000000..da23d0d --- /dev/null +++ b/node_modules/pbkdf2-compat/.npmignore @@ -0,0 +1,25 @@ +# Logs +logs +*.log + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directory +# Deployed apps should consider commenting this line out: +# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git +node_modules diff --git a/node_modules/pbkdf2-compat/.travis.yml b/node_modules/pbkdf2-compat/.travis.yml new file mode 100644 index 0000000..4d69563 --- /dev/null +++ b/node_modules/pbkdf2-compat/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +before_install: + - "npm install npm -g" +node_js: + - "0.10" +env: + - TEST_SUITE=test + - TEST_SUITE=coveralls +script: "npm run-script $TEST_SUITE" diff --git a/node_modules/pbkdf2-compat/LICENSE b/node_modules/pbkdf2-compat/LICENSE new file mode 100644 index 0000000..a115b52 --- /dev/null +++ b/node_modules/pbkdf2-compat/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Daniel Cousens + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pbkdf2-compat/README.md b/node_modules/pbkdf2-compat/README.md new file mode 100644 index 0000000..e03cef1 --- /dev/null +++ b/node_modules/pbkdf2-compat/README.md @@ -0,0 +1,22 @@ +# pbkdf2-compat + +[![build status](https://secure.travis-ci.org/dcousens/pbkdf2-compat.png)](http://travis-ci.org/dcousens/pbkdf2-compat) +[![Coverage Status](https://img.shields.io/coveralls/dcousens/pbkdf2-compat.svg)](https://coveralls.io/r/dcousens/pbkdf2-compat) +[![Version](http://img.shields.io/npm/v/pbkdf2-compat.svg)](https://www.npmjs.org/package/pbkdf2-compat) + +This library provides the functionality of PBKDF2 with the ability to use any supported hashing algorithm returned from `crypto.getHashes()` + + +## Usage + +``` +var compat = require('pbkd2f-compat') +var derivedKey = compat.pbkdf2Sync('password', 'salt', 1, 32, 'sha512') + +... +``` + + +## Credits + +This module is a derivative of https://github.com/cryptocoinjs/pbkdf2-sha256/, so thanks to [JP Richardson](https://github.com/cryptocoinjs/pbkdf2-sha256/) for laying the ground work. diff --git a/node_modules/pbkdf2-compat/index.js b/node_modules/pbkdf2-compat/index.js new file mode 100644 index 0000000..2af1931 --- /dev/null +++ b/node_modules/pbkdf2-compat/index.js @@ -0,0 +1,12 @@ +var crypto = require('crypto') + +var exportFn = require('./pbkdf2') +var exported = exportFn(crypto) + +module.exports = { + pbkdf2: exported.pbkdf2, + pbkdf2Sync: exported.pbkdf2Sync, + + // for crypto-browserify + __pbkdf2Export: exportFn +} diff --git a/node_modules/pbkdf2-compat/package.json b/node_modules/pbkdf2-compat/package.json new file mode 100644 index 0000000..5f4e91c --- /dev/null +++ b/node_modules/pbkdf2-compat/package.json @@ -0,0 +1,33 @@ +{ + "name": "pbkdf2-compat", + "version": "2.0.1", + "description": "Provides the functionality of PBKDF2 with the ability to use any natively supported Node crypto hashing algorithm.", + "main": "index.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "mocha": "^1.21.4", + "istanbul": "^0.3.2" + }, + "scripts": { + "coverage": "istanbul cover mocha -- test/*.js", + "test": "istanbul test mocha -- --reporter list test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/dcousens/pbkdf2-compat.git" + }, + "keywords": [ + "pbkdf2", + "kdf", + "salt", + "hash" + ], + "author": "Daniel Cousens", + "license": "MIT", + "bugs": { + "url": "https://github.com/dcousens/pbkdf2-compat/issues" + }, + "homepage": "https://github.com/dcousens/pbkdf2-compat" +} diff --git a/node_modules/pbkdf2-compat/pbkdf2.js b/node_modules/pbkdf2-compat/pbkdf2.js new file mode 100644 index 0000000..cf8b2d8 --- /dev/null +++ b/node_modules/pbkdf2-compat/pbkdf2.js @@ -0,0 +1,84 @@ +module.exports = function(crypto) { + function pbkdf2(password, salt, iterations, keylen, digest, callback) { + if ('function' === typeof digest) { + callback = digest + digest = undefined + } + + if ('function' !== typeof callback) + throw new Error('No callback provided to pbkdf2') + + setTimeout(function() { + var result + + try { + result = pbkdf2Sync(password, salt, iterations, keylen, digest) + } catch (e) { + return callback(e) + } + + callback(undefined, result) + }) + } + + function pbkdf2Sync(password, salt, iterations, keylen, digest) { + if ('number' !== typeof iterations) + throw new TypeError('Iterations not a number') + + if (iterations < 0) + throw new TypeError('Bad iterations') + + if ('number' !== typeof keylen) + throw new TypeError('Key length not a number') + + if (keylen < 0) + throw new TypeError('Bad key length') + + digest = digest || 'sha1' + + if (!Buffer.isBuffer(password)) password = new Buffer(password) + if (!Buffer.isBuffer(salt)) salt = new Buffer(salt) + + var hLen, l = 1, r, T + var DK = new Buffer(keylen) + var block1 = new Buffer(salt.length + 4) + salt.copy(block1, 0, 0, salt.length) + + for (var i = 1; i <= l; i++) { + block1.writeUInt32BE(i, salt.length) + + var U = crypto.createHmac(digest, password).update(block1).digest() + + if (!hLen) { + hLen = U.length + T = new Buffer(hLen) + l = Math.ceil(keylen / hLen) + r = keylen - (l - 1) * hLen + + if (keylen > (Math.pow(2, 32) - 1) * hLen) + throw new TypeError('keylen exceeds maximum length') + } + + U.copy(T, 0, 0, hLen) + + for (var j = 1; j < iterations; j++) { + U = crypto.createHmac(digest, password).update(U).digest() + + for (var k = 0; k < hLen; k++) { + T[k] ^= U[k] + } + } + + var destPos = (i - 1) * hLen + var len = (i == l ? r : hLen) + T.copy(DK, destPos, 0, len) + } + + return DK + } + + return { + pbkdf2: pbkdf2, + pbkdf2Sync: pbkdf2Sync + } +} diff --git a/node_modules/pbkdf2-compat/test/fixtures.json b/node_modules/pbkdf2-compat/test/fixtures.json new file mode 100644 index 0000000..ff053ef --- /dev/null +++ b/node_modules/pbkdf2-compat/test/fixtures.json @@ -0,0 +1,111 @@ +{ + "valid": [ + { + "key": "password", + "salt": "salt", + "iterations": 1, + "dkLen": 32, + "results": { + "sha1": "0c60c80f961f0e71f3a9b524af6012062fe037a6e0f0eb94fe8fc46bdc637164", + "sha256": "120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b", + "sha512": "867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5d513554e1c8cf252" + } + }, + { + "key": "password", + "salt": "salt", + "iterations": 2, + "dkLen": 32, + "results": { + "sha1": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957cae93136266537a8d7bf4b76", + "sha256": "ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43", + "sha512": "e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f0040713f18aefdb866d53c" + } + }, + { + "key": "password", + "salt": "salt", + "iterations": 1, + "dkLen": 64, + "results": { + "sha1": "0c60c80f961f0e71f3a9b524af6012062fe037a6e0f0eb94fe8fc46bdc637164ac2e7a8e3f9d2e83ace57e0d50e5e1071367c179bc86c767fc3f78ddb561363f", + "sha256": "120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b4dbf3a2f3dad3377264bb7b8e8330d4efc7451418617dabef683735361cdc18c", + "sha512": "867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5d513554e1c8cf252c02d470a285a0501bad999bfe943c08f050235d7d68b1da55e63f73b60a57fce" + } + }, + { + "key": "password", + "salt": "salt", + "iterations": 2, + "dkLen": 64, + "results": { + "sha1": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957cae93136266537a8d7bf4b76c51094cc1ae010b19923ddc4395cd064acb023ffd1edd5ef4be8ffe61426c28e", + "sha256": "ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43830651afcb5c862f0b249bd031f7a67520d136470f5ec271ece91c07773253d9", + "sha512": "e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f0040713f18aefdb866d53cf76cab2868a39b9f7840edce4fef5a82be67335c77a6068e04112754f27ccf4e" + } + }, + { + "key": "password", + "salt": "salt", + "iterations": 4096, + "dkLen": 32, + "results": { + "sha1": "4b007901b765489abead49d926f721d065a429c12e463f6c4cd79401085b03db", + "sha256": "c5e478d59288c841aa530db6845c4c8d962893a001ce4e11a4963873aa98134a", + "sha512": "d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f87f6902e072f457b5" + } + }, + { + "key": "passwordPASSWORDpassword", + "salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt", + "iterations": 4096, + "dkLen": 40, + "results": { + "sha1": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038b6b89a48612c5a25284e6605e12329", + "sha256": "348c89dbcbd32b2f32d814b8116e84cf2b17347ebc1800181c4e2a1fb8dd53e1c635518c7dac47e9", + "sha512": "8c0511f4c6e597c6ac6315d8f0362e225f3c501495ba23b868c005174dc4ee71115b59f9e60cd953" + } + }, + { + "key": "pass\u00000word", + "salt": "sa\u00000lt", + "iterations": 4096, + "dkLen": 16, + "results": { + "sha1": "345cbad979dfccb90cac5257bea6ea46", + "sha256": "1df6274d3c0bd2fc7f54fb46f149dda4", + "sha512": "336d14366099e8aac2c46c94a8f178d2" + } + } + ], + "invalid": [ + { + "key": "password", + "salt": "salt", + "iterations": "NaN", + "dkLen": 16, + "exception": "Iterations not a number" + }, + { + "key": "password", + "salt": "salt", + "iterations": -1, + "dkLen": 16, + "exception": "Bad iterations" + }, + { + "key": "password", + "salt": "salt", + "iterations": 1, + "dkLen": "NaN", + "exception": "Key length not a number" + }, + { + "key": "password", + "salt": "salt", + "iterations": 1, + "dkLen": -1, + "exception": "Bad key length" + } + ] +} diff --git a/node_modules/pbkdf2-compat/test/index.js b/node_modules/pbkdf2-compat/test/index.js new file mode 100644 index 0000000..f61ad2d --- /dev/null +++ b/node_modules/pbkdf2-compat/test/index.js @@ -0,0 +1,75 @@ +var assert = require('assert') +var compat = require('../') + +var fixtures = require('./fixtures') + +// SHA-1 vectors generated by Node.js +// SHA-256/SHA-512 test vectors from: +// https://stackoverflow.com/questions/5130513/pbkdf2-hmac-sha2-test-vectors +// https://stackoverflow.com/questions/15593184/pbkdf2-hmac-sha-512-test-vectors + +describe('pbkdf2-compat', function() { + describe('pbkdf2', function() { + ;['sha1', 'sha256', 'sha512'].forEach(function(algorithm) { + describe(algorithm, function() { + fixtures.valid.forEach(function(f, i) { + var expected = f.results[algorithm] + + it('Async test case ' + i + ' for ' + algorithm + ' matches ' + expected, function(done) { + compat.pbkdf2(f.key, f.salt, f.iterations, f.dkLen, algorithm, function(err, result) { + assert.equal(result.toString('hex'), expected) + + done() + }) + }) + }) + + fixtures.invalid.forEach(function(f) { + it('should throw ' + f.exception, function(done) { + compat.pbkdf2(f.key, f.salt, f.iterations, f.dkLen, f.algo, function(err) { + assert(new RegExp(f.exception).test(err)) + + done() + }) + }) + }) + }) + }) + + it('should throw if no callback is provided', function() { + assert.throws(function() { + compat.pbkdf2('password', 'salt', 1, 32, 'sha1') + }, /No callback provided to pbkdf2/) + }) + }) + + describe('pbkdf2Sync', function() { + it('defaults to sha1', function() { + var result = compat.pbkdf2Sync('password', 'salt', 1, 32) + + assert.equal(result.toString('hex'), "0c60c80f961f0e71f3a9b524af6012062fe037a6e0f0eb94fe8fc46bdc637164") + }) + + ;['sha1', 'sha256', 'sha512'].forEach(function(algorithm) { + describe(algorithm, function() { + fixtures.valid.forEach(function(f, i) { + var expected = f.results[algorithm] + + it('Test case ' + i + ' for ' + algorithm + ' matches ' + expected, function() { + var result = compat.pbkdf2Sync(f.key, f.salt, f.iterations, f.dkLen, algorithm) + + assert.equal(result.toString('hex'), expected) + }) + }) + + fixtures.invalid.forEach(function(f) { + it('should throw ' + f.exception, function() { + assert.throws(function() { + compat.pbkdf2Sync(f.key, f.salt, f.iterations, f.dkLen, f.algo) + }, new RegExp(f.exception)) + }) + }) + }) + }) + }) +}) diff --git a/node_modules/pinkie-promise/index.js b/node_modules/pinkie-promise/index.js new file mode 100644 index 0000000..777377a --- /dev/null +++ b/node_modules/pinkie-promise/index.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = typeof Promise === 'function' ? Promise : require('pinkie'); diff --git a/node_modules/pinkie-promise/license b/node_modules/pinkie-promise/license new file mode 100644 index 0000000..1aeb74f --- /dev/null +++ b/node_modules/pinkie-promise/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pinkie-promise/package.json b/node_modules/pinkie-promise/package.json new file mode 100644 index 0000000..3515f8f --- /dev/null +++ b/node_modules/pinkie-promise/package.json @@ -0,0 +1,35 @@ +{ + "name": "pinkie-promise", + "version": "2.0.1", + "description": "ES2015 Promise ponyfill", + "license": "MIT", + "repository": "floatdrop/pinkie-promise", + "author": { + "name": "Vsevolod Strukchinsky", + "email": "floatdrop@gmail.com", + "url": "github.com/floatdrop" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "promises", + "es2015", + "es6", + "polyfill", + "ponyfill" + ], + "dependencies": { + "pinkie": "^2.0.0" + }, + "devDependencies": { + "mocha": "*" + } +} diff --git a/node_modules/pinkie-promise/readme.md b/node_modules/pinkie-promise/readme.md new file mode 100644 index 0000000..78477f4 --- /dev/null +++ b/node_modules/pinkie-promise/readme.md @@ -0,0 +1,28 @@ +# pinkie-promise [![Build Status](https://travis-ci.org/floatdrop/pinkie-promise.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie-promise) + +> [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) ponyfill + +Module exports global Promise object (if available) or [`pinkie`](http://github.com/floatdrop/pinkie) Promise polyfill. + +## Install + +``` +$ npm install --save pinkie-promise +``` + +## Usage + +```js +var Promise = require('pinkie-promise'); + +new Promise(function (resolve) { resolve('unicorns'); }); +//=> Promise { 'unicorns' } +``` + +## Related + +- [pify](https://github.com/sindresorhus/pify) - Promisify a callback-style function + +## License + +MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop) diff --git a/node_modules/pinkie/index.js b/node_modules/pinkie/index.js new file mode 100644 index 0000000..14ce1bf --- /dev/null +++ b/node_modules/pinkie/index.js @@ -0,0 +1,292 @@ +'use strict'; + +var PENDING = 'pending'; +var SETTLED = 'settled'; +var FULFILLED = 'fulfilled'; +var REJECTED = 'rejected'; +var NOOP = function () {}; +var isNode = typeof global !== 'undefined' && typeof global.process !== 'undefined' && typeof global.process.emit === 'function'; + +var asyncSetTimer = typeof setImmediate === 'undefined' ? setTimeout : setImmediate; +var asyncQueue = []; +var asyncTimer; + +function asyncFlush() { + // run promise callbacks + for (var i = 0; i < asyncQueue.length; i++) { + asyncQueue[i][0](asyncQueue[i][1]); + } + + // reset async asyncQueue + asyncQueue = []; + asyncTimer = false; +} + +function asyncCall(callback, arg) { + asyncQueue.push([callback, arg]); + + if (!asyncTimer) { + asyncTimer = true; + asyncSetTimer(asyncFlush, 0); + } +} + +function invokeResolver(resolver, promise) { + function resolvePromise(value) { + resolve(promise, value); + } + + function rejectPromise(reason) { + reject(promise, reason); + } + + try { + resolver(resolvePromise, rejectPromise); + } catch (e) { + rejectPromise(e); + } +} + +function invokeCallback(subscriber) { + var owner = subscriber.owner; + var settled = owner._state; + var value = owner._data; + var callback = subscriber[settled]; + var promise = subscriber.then; + + if (typeof callback === 'function') { + settled = FULFILLED; + try { + value = callback(value); + } catch (e) { + reject(promise, e); + } + } + + if (!handleThenable(promise, value)) { + if (settled === FULFILLED) { + resolve(promise, value); + } + + if (settled === REJECTED) { + reject(promise, value); + } + } +} + +function handleThenable(promise, value) { + var resolved; + + try { + if (promise === value) { + throw new TypeError('A promises callback cannot return that same promise.'); + } + + if (value && (typeof value === 'function' || typeof value === 'object')) { + // then should be retrieved only once + var then = value.then; + + if (typeof then === 'function') { + then.call(value, function (val) { + if (!resolved) { + resolved = true; + + if (value === val) { + fulfill(promise, val); + } else { + resolve(promise, val); + } + } + }, function (reason) { + if (!resolved) { + resolved = true; + + reject(promise, reason); + } + }); + + return true; + } + } + } catch (e) { + if (!resolved) { + reject(promise, e); + } + + return true; + } + + return false; +} + +function resolve(promise, value) { + if (promise === value || !handleThenable(promise, value)) { + fulfill(promise, value); + } +} + +function fulfill(promise, value) { + if (promise._state === PENDING) { + promise._state = SETTLED; + promise._data = value; + + asyncCall(publishFulfillment, promise); + } +} + +function reject(promise, reason) { + if (promise._state === PENDING) { + promise._state = SETTLED; + promise._data = reason; + + asyncCall(publishRejection, promise); + } +} + +function publish(promise) { + promise._then = promise._then.forEach(invokeCallback); +} + +function publishFulfillment(promise) { + promise._state = FULFILLED; + publish(promise); +} + +function publishRejection(promise) { + promise._state = REJECTED; + publish(promise); + if (!promise._handled && isNode) { + global.process.emit('unhandledRejection', promise._data, promise); + } +} + +function notifyRejectionHandled(promise) { + global.process.emit('rejectionHandled', promise); +} + +/** + * @class + */ +function Promise(resolver) { + if (typeof resolver !== 'function') { + throw new TypeError('Promise resolver ' + resolver + ' is not a function'); + } + + if (this instanceof Promise === false) { + throw new TypeError('Failed to construct \'Promise\': Please use the \'new\' operator, this object constructor cannot be called as a function.'); + } + + this._then = []; + + invokeResolver(resolver, this); +} + +Promise.prototype = { + constructor: Promise, + + _state: PENDING, + _then: null, + _data: undefined, + _handled: false, + + then: function (onFulfillment, onRejection) { + var subscriber = { + owner: this, + then: new this.constructor(NOOP), + fulfilled: onFulfillment, + rejected: onRejection + }; + + if ((onRejection || onFulfillment) && !this._handled) { + this._handled = true; + if (this._state === REJECTED && isNode) { + asyncCall(notifyRejectionHandled, this); + } + } + + if (this._state === FULFILLED || this._state === REJECTED) { + // already resolved, call callback async + asyncCall(invokeCallback, subscriber); + } else { + // subscribe + this._then.push(subscriber); + } + + return subscriber.then; + }, + + catch: function (onRejection) { + return this.then(null, onRejection); + } +}; + +Promise.all = function (promises) { + if (!Array.isArray(promises)) { + throw new TypeError('You must pass an array to Promise.all().'); + } + + return new Promise(function (resolve, reject) { + var results = []; + var remaining = 0; + + function resolver(index) { + remaining++; + return function (value) { + results[index] = value; + if (!--remaining) { + resolve(results); + } + }; + } + + for (var i = 0, promise; i < promises.length; i++) { + promise = promises[i]; + + if (promise && typeof promise.then === 'function') { + promise.then(resolver(i), reject); + } else { + results[i] = promise; + } + } + + if (!remaining) { + resolve(results); + } + }); +}; + +Promise.race = function (promises) { + if (!Array.isArray(promises)) { + throw new TypeError('You must pass an array to Promise.race().'); + } + + return new Promise(function (resolve, reject) { + for (var i = 0, promise; i < promises.length; i++) { + promise = promises[i]; + + if (promise && typeof promise.then === 'function') { + promise.then(resolve, reject); + } else { + resolve(promise); + } + } + }); +}; + +Promise.resolve = function (value) { + if (value && typeof value === 'object' && value.constructor === Promise) { + return value; + } + + return new Promise(function (resolve) { + resolve(value); + }); +}; + +Promise.reject = function (reason) { + return new Promise(function (resolve, reject) { + reject(reason); + }); +}; + +module.exports = Promise; diff --git a/node_modules/pinkie/license b/node_modules/pinkie/license new file mode 100644 index 0000000..1aeb74f --- /dev/null +++ b/node_modules/pinkie/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Vsevolod Strukchinsky (github.com/floatdrop) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pinkie/package.json b/node_modules/pinkie/package.json new file mode 100644 index 0000000..cb9057b --- /dev/null +++ b/node_modules/pinkie/package.json @@ -0,0 +1,36 @@ +{ + "name": "pinkie", + "version": "2.0.4", + "description": "Itty bitty little widdle twinkie pinkie ES2015 Promise implementation", + "license": "MIT", + "repository": "floatdrop/pinkie", + "author": { + "name": "Vsevolod Strukchinsky", + "email": "floatdrop@gmail.com", + "url": "github.com/floatdrop" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && nyc mocha", + "coverage": "nyc report --reporter=text-lcov | coveralls" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promise", + "promises", + "es2015", + "es6" + ], + "devDependencies": { + "core-assert": "^0.1.1", + "coveralls": "^2.11.4", + "mocha": "*", + "nyc": "^3.2.2", + "promises-aplus-tests": "*", + "xo": "^0.10.1" + } +} diff --git a/node_modules/pinkie/readme.md b/node_modules/pinkie/readme.md new file mode 100644 index 0000000..1565f95 --- /dev/null +++ b/node_modules/pinkie/readme.md @@ -0,0 +1,83 @@ +

+
+ pinkie +
+
+

+ +> Itty bitty little widdle twinkie pinkie [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation + +[![Build Status](https://travis-ci.org/floatdrop/pinkie.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie) [![Coverage Status](https://coveralls.io/repos/floatdrop/pinkie/badge.svg?branch=master&service=github)](https://coveralls.io/github/floatdrop/pinkie?branch=master) + +There are [tons of Promise implementations](https://github.com/promises-aplus/promises-spec/blob/master/implementations.md#standalone) out there, but all of them focus on browser compatibility and are often bloated with functionality. + +This module is an exact Promise specification polyfill (like [native-promise-only](https://github.com/getify/native-promise-only)), but in Node.js land (it should be browserify-able though). + + +## Install + +``` +$ npm install --save pinkie +``` + + +## Usage + +```js +var fs = require('fs'); +var Promise = require('pinkie'); + +new Promise(function (resolve, reject) { + fs.readFile('foo.json', 'utf8', function (err, data) { + if (err) { + reject(err); + return; + } + + resolve(data); + }); +}); +//=> Promise +``` + + +### API + +`pinkie` exports bare [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation and polyfills [Node.js rejection events](https://nodejs.org/api/process.html#process_event_unhandledrejection). In case you forgot: + +#### new Promise(executor) + +Returns new instance of `Promise`. + +##### executor + +*Required* +Type: `function` + +Function with two arguments `resolve` and `reject`. The first argument fulfills the promise, the second argument rejects it. + +#### pinkie.all(promises) + +Returns a promise that resolves when all of the promises in the `promises` Array argument have resolved. + +#### pinkie.race(promises) + +Returns a promise that resolves or rejects as soon as one of the promises in the `promises` Array resolves or rejects, with the value or reason from that promise. + +#### pinkie.reject(reason) + +Returns a Promise object that is rejected with the given `reason`. + +#### pinkie.resolve(value) + +Returns a Promise object that is resolved with the given `value`. If the `value` is a thenable (i.e. has a then method), the returned promise will "follow" that thenable, adopting its eventual state; otherwise the returned promise will be fulfilled with the `value`. + + +## Related + +- [pinkie-promise](https://github.com/floatdrop/pinkie-promise) - Returns the native Promise or this module + + +## License + +MIT © [Vsevolod Strukchinsky](http://github.com/floatdrop) diff --git a/node_modules/preserve/.gitattributes b/node_modules/preserve/.gitattributes new file mode 100644 index 0000000..759c2c5 --- /dev/null +++ b/node_modules/preserve/.gitattributes @@ -0,0 +1,14 @@ +# Enforce Unix newlines +*.* text eol=lf +*.css text eol=lf +*.html text eol=lf +*.js text eol=lf +*.json text eol=lf +*.less text eol=lf +*.md text eol=lf +*.yml text eol=lf + +*.jpg binary +*.gif binary +*.png binary +*.jpeg binary \ No newline at end of file diff --git a/node_modules/preserve/.jshintrc b/node_modules/preserve/.jshintrc new file mode 100644 index 0000000..e72045d --- /dev/null +++ b/node_modules/preserve/.jshintrc @@ -0,0 +1,24 @@ +{ + "asi": false, + "boss": true, + "curly": true, + "eqeqeq": true, + "eqnull": true, + "esnext": true, + "immed": true, + "latedef": true, + "laxcomma": false, + "newcap": true, + "noarg": true, + "node": true, + "sub": true, + "undef": true, + "unused": true, + "globals": { + "define": true, + "before": true, + "after": true, + "describe": true, + "it": true + } +} \ No newline at end of file diff --git a/node_modules/preserve/.npmignore b/node_modules/preserve/.npmignore new file mode 100644 index 0000000..1a2e47f --- /dev/null +++ b/node_modules/preserve/.npmignore @@ -0,0 +1,53 @@ +# Numerous always-ignore extensions +*.csv +*.dat +*.diff +*.err +*.gz +*.log +*.orig +*.out +*.pid +*.rar +*.rej +*.seed +*.swo +*.swp +*.vi +*.yo-rc.json +*.zip +*~ +.ruby-version +lib-cov +npm-debug.log + +# Always-ignore dirs +/bower_components/ +/node_modules/ +/temp/ +/tmp/ +/vendor/ +_gh_pages + +# OS or Editor folders +*.esproj +*.komodoproject +.komodotools +*.sublime-* +._* +.cache +.DS_Store +.idea +.project +.settings +.tmproj +nbproject +Thumbs.db + +# grunt-html-validation +validation-status.json +validation-report.json + +# misc +TODO.md +benchmark \ No newline at end of file diff --git a/node_modules/preserve/.travis.yml b/node_modules/preserve/.travis.yml new file mode 100644 index 0000000..ff74a05 --- /dev/null +++ b/node_modules/preserve/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - '0.10' \ No newline at end of file diff --git a/node_modules/preserve/.verb.md b/node_modules/preserve/.verb.md new file mode 100644 index 0000000..72344a9 --- /dev/null +++ b/node_modules/preserve/.verb.md @@ -0,0 +1,59 @@ +# {%= name %} {%= badge("fury") %} + +> {%= description %} + +Useful for protecting tokens, like templates in HTML, from being mutated when the string is transformed in some way, like from a formatter/beautifier. + +**Example without `preserve`** + +Let's say you want to use [js-beautify] on a string of html with Lo-Dash/Underscore templates, such as: `
  • <%= name %>
`: + +js-beautify will render the template unusable (and apply incorrect formatting because of the unfamiliar syntax from the Lo-Dash template): + +```html +
    +
  • + <%=n ame %> +
  • +
+``` + +**Example with `preserve`** + +Correct. + +```html +
    +
  • <%= name %>
  • +
+``` + +For the record, this is just a random example, I've had very few issues with js-beautify in general. But with or without js-beautify, this kind of token mangling does happen sometimes when you use formatters, beautifiers or similar tools. + +## Install +{%= include("install-npm", {save: true}) %} + +## Run tests + +```bash +npm test +``` + +## API +{%= apidocs("index.js") %} + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue]({%= bugs.url %}) + +## Author +{%= include("author") %} + +## License +{%= copyright() %} +{%= license() %} + +*** + +{%= include("footer") %} + +[js-beautify]: https://github.com/beautify-web/js-beautify \ No newline at end of file diff --git a/node_modules/preserve/LICENSE b/node_modules/preserve/LICENSE new file mode 100644 index 0000000..5a9956a --- /dev/null +++ b/node_modules/preserve/LICENSE @@ -0,0 +1,24 @@ +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/preserve/README.md b/node_modules/preserve/README.md new file mode 100644 index 0000000..75000b9 --- /dev/null +++ b/node_modules/preserve/README.md @@ -0,0 +1,90 @@ +# preserve [![NPM version](https://badge.fury.io/js/preserve.svg)](http://badge.fury.io/js/preserve) + +> Temporarily substitute tokens in the given `string` with placeholders, then put them back after transforming the string. + +Useful for protecting tokens, like templates in HTML, from being mutated when the string is transformed in some way, like from a formatter/beautifier. + +**Example without `preserve`** + +Let's say you want to use [js-beautify] on a string of html with Lo-Dash/Underscore templates, such as: `
  • <%= name %>
`: + +js-beautify will render the template unusable (and apply incorrect formatting because of the unfamiliar syntax from the Lo-Dash template): + +```html +
    +
  • + <%=n ame %> +
  • +
+``` + +**Example with `preserve`** + +Correct. + +```html +
    +
  • <%= name %>
  • +
+``` + +For the record, this is just a random example, I've had very few issues with js-beautify in general. But with or without js-beautify, this kind of token mangling does happen sometimes when you use formatters, beautifiers or similar tools. + +## Install +## Install with [npm](npmjs.org) + +```bash +npm i preserve --save +``` + +## Run tests + +```bash +npm test +``` + +## API +### [.before](index.js#L23) + +Replace tokens in `str` with a temporary, heuristic placeholder. + +* `str` **{String}** +* `returns` **{String}**: String with placeholders. + +```js +tokens.before('{a\\,b}'); +//=> '{__ID1__}' +``` + +### [.after](index.js#L44) + +Replace placeholders in `str` with original tokens. + +* `str` **{String}**: String with placeholders +* `returns` **{String}** `str`: String with original tokens. + +```js +tokens.after('{__ID1__}'); +//=> '{a\\,b}' +``` + + +## Contributing +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/preserve/issues) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License +Copyright (c) 2015-2015, Jon Schlinkert. +Released under the MIT license + +*** + +_This file was generated by [verb](https://github.com/assemble/verb) on January 10, 2015._ + +[js-beautify]: https://github.com/beautify-web/js-beautify \ No newline at end of file diff --git a/node_modules/preserve/index.js b/node_modules/preserve/index.js new file mode 100644 index 0000000..a6c5d48 --- /dev/null +++ b/node_modules/preserve/index.js @@ -0,0 +1,54 @@ +/*! + * preserve + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT license. + */ + +'use strict'; + +/** + * Replace tokens in `str` with a temporary, heuristic placeholder. + * + * ```js + * tokens.before('{a\\,b}'); + * //=> '{__ID1__}' + * ``` + * + * @param {String} `str` + * @return {String} String with placeholders. + * @api public + */ + +exports.before = function before(str, re) { + return str.replace(re, function (match) { + var id = randomize(); + cache[id] = match; + return '__ID' + id + '__'; + }); +}; + +/** + * Replace placeholders in `str` with original tokens. + * + * ```js + * tokens.after('{__ID1__}'); + * //=> '{a\\,b}' + * ``` + * + * @param {String} `str` String with placeholders + * @return {String} `str` String with original tokens. + * @api public + */ + +exports.after = function after(str) { + return str.replace(/__ID(.{5})__/g, function (_, id) { + return cache[id]; + }); +}; + +function randomize() { + return Math.random().toString().slice(2, 7); +} + +var cache = {}; \ No newline at end of file diff --git a/node_modules/preserve/package.json b/node_modules/preserve/package.json new file mode 100644 index 0000000..056c0fa --- /dev/null +++ b/node_modules/preserve/package.json @@ -0,0 +1,48 @@ +{ + "name": "preserve", + "description": "Temporarily substitute tokens in the given `string` with placeholders, then put them back after transforming the string.", + "version": "0.2.0", + "homepage": "https://github.com/jonschlinkert/preserve", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/preserve.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/preserve/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/jonschlinkert/preserve/blob/master/LICENSE-MIT" + }, + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha -R spec" + }, + "devDependencies": { + "benchmarked": "^0.1.3", + "chalk": "^0.5.1", + "js-beautify": "^1.5.4", + "mocha": "*", + "should": "*" + }, + "keywords": [ + "escape", + "format", + "placeholder", + "placeholders", + "prettify", + "regex", + "replace", + "template", + "templates", + "token", + "tokens" + ] +} \ No newline at end of file diff --git a/node_modules/preserve/test.js b/node_modules/preserve/test.js new file mode 100644 index 0000000..9bf174f --- /dev/null +++ b/node_modules/preserve/test.js @@ -0,0 +1,48 @@ +/*! + * preserve + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License + */ + +'use strict'; + +var should = require('should'); +var tokens = require('./'); + +var re = /<%=\s*[^>]+%>/g; +var pretty = function(str) { + return require('js-beautify').html(str, { + indent_char: ' ', + indent_size: 2, + }); +}; + +describe('preserve tokens', function () { + var testRe = /__ID.{5}__\n__ID.{5}__\n__ID.{5}__/; + var re = /<%=\s*[^>]+%>/g; + + it('should (e.g. shouldn\'t, but will) mangle tokens in the given string', function () { + var html = pretty('
  • <%= name %>
'); + html.should.equal('
    \n
  • \n <%=n ame %>\n
  • \n
'); + }); + + it('should preserve tokens in the given string', function () { + var html = tokens.after(pretty(tokens.before('
  • <%= name %>
', re))); + html.should.equal('
    \n
  • <%= name %>
  • \n
'); + }); + + describe('.before()', function () { + it('should replace matches with placeholder tokens:', function () { + tokens.before('<%= a %>\n<%= b %>\n<%= c %>', re).should.match(testRe); + }); + }); + + describe('tokens.after()', function () { + it('should replace placeholder tokens with original values:', function () { + var before = tokens.before('<%= a %>\n<%= b %>\n<%= c %>', re); + before.should.match(testRe); + tokens.after(before).should.equal('<%= a %>\n<%= b %>\n<%= c %>'); + }); + }); +}); diff --git a/node_modules/private/.travis.yml b/node_modules/private/.travis.yml new file mode 100644 index 0000000..ed05f88 --- /dev/null +++ b/node_modules/private/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - "0.11" + - "0.10" + - "0.8" + - "0.6" diff --git a/node_modules/private/LICENSE b/node_modules/private/LICENSE new file mode 100644 index 0000000..6c2f144 --- /dev/null +++ b/node_modules/private/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2014 Ben Newman + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/private/README.md b/node_modules/private/README.md new file mode 100644 index 0000000..a8990aa --- /dev/null +++ b/node_modules/private/README.md @@ -0,0 +1,246 @@ +private [![Build Status](https://travis-ci.org/benjamn/private.png?branch=master)](https://travis-ci.org/benjamn/private) +=== + +A general-purpose utility for associating truly private state with any JavaScript object. + +Installation +--- + +From NPM: + + npm install private + +From GitHub: + + cd path/to/node_modules + git clone git://github.com/benjamn/private.git + cd private + npm install . + +Usage +--- +**Get or create a secret object associated with any (non-frozen) object:** +```js +var getSecret = require("private").makeAccessor(); +var obj = Object.create(null); // any kind of object works +getSecret(obj).totallySafeProperty = "p455w0rd"; + +console.log(Object.keys(obj)); // [] +console.log(Object.getOwnPropertyNames(obj)); // [] +console.log(getSecret(obj)); // { totallySafeProperty: "p455w0rd" } +``` +Now, only code that has a reference to both `getSecret` and `obj` can possibly access `.totallySafeProperty`. + +*Importantly, no global references to the secret object are retained by the `private` package, so as soon as `obj` gets garbage collected, the secret will be reclaimed as well. In other words, you don't have to worry about memory leaks.* + +**Create a unique property name that cannot be enumerated or guessed:** +```js +var secretKey = require("private").makeUniqueKey(); +var obj = Object.create(null); // any kind of object works + +Object.defineProperty(obj, secretKey, { + value: { totallySafeProperty: "p455w0rd" }, + enumerable: false // optional; non-enumerability is the default +}); + +Object.defineProperty(obj, "nonEnumerableProperty", { + value: "anyone can guess my name", + enumerable: false +}); + +console.log(obj[secretKey].totallySafeProperty); // p455w0rd +console.log(obj.nonEnumerableProperty); // "anyone can guess my name" +console.log(Object.keys(obj)); // [] +console.log(Object.getOwnPropertyNames(obj)); // ["nonEnumerableProperty"] + +for (var key in obj) { + console.log(key); // never called +} +``` +Because these keys are non-enumerable, you can't discover them using a `for`-`in` loop. Because `secretKey` is a long string of random characters, you would have a lot of trouble guessing it. And because the `private` module wraps `Object.getOwnPropertyNames` to exclude the keys it generates, you can't even use that interface to discover it. + +Unless you have access to the value of the `secretKey` property name, there is no way to access the value associated with it. So your only responsibility as secret-keeper is to avoid handing out the value of `secretKey` to untrusted code. + +Think of this style as a home-grown version of the first style. Note, however, that it requires a full implementation of ES5's `Object.defineProperty` method in order to make any safety guarantees, whereas the first example will provide safety even in environments that do not support `Object.defineProperty`. + +Rationale +--- + +In JavaScript, the only data that are truly private are local variables +whose values do not *leak* from the scope in which they were defined. + +This notion of *closure privacy* is powerful, and it readily provides some +of the benefits of traditional data privacy, a la Java or C++: +```js +function MyClass(secret) { + this.increment = function() { + return ++secret; + }; +} + +var mc = new MyClass(3); +console.log(mc.increment()); // 4 +``` +You can learn something about `secret` by calling `.increment()`, and you +can increase its value by one as many times as you like, but you can never +decrease its value, because it is completely inaccessible except through +the `.increment` method. And if the `.increment` method were not +available, it would be as if no `secret` variable had ever been declared, +as far as you could tell. + +This style breaks down as soon as you want to inherit methods from the +prototype of a class: +```js +function MyClass(secret) { + this.secret = secret; +} + +MyClass.prototype.increment = function() { + return ++this.secret; +}; +``` +The only way to communicate between the `MyClass` constructor and the +`.increment` method in this example is to manipulate shared properties of +`this`. Unfortunately `this.secret` is now exposed to unlicensed +modification: +```js +var mc = new MyClass(6); +console.log(mc.increment()); // 7 +mc.secret -= Infinity; +console.log(mc.increment()); // -Infinity +mc.secret = "Go home JavaScript, you're drunk."; +mc.increment(); // NaN +``` +Another problem with closure privacy is that it only lends itself to +per-instance privacy, whereas the `private` keyword in most +object-oriented languages indicates that the data member in question is +visible to all instances of the same class. + +Suppose you have a `Node` class with a notion of parents and children: +```js +function Node() { + var parent; + var children = []; + + this.getParent = function() { + return parent; + }; + + this.appendChild = function(child) { + children.push(child); + child.parent = this; // Can this be made to work? + }; +} +``` +The desire here is to allow other `Node` objects to manipulate the value +returned by `.getParent()`, but otherwise disallow any modification of the +`parent` variable. You could expose a `.setParent` function, but then +anyone could call it, and you might as well give up on the getter/setter +pattern. + +This module solves both of these problems. + +Usage +--- + +Let's revisit the `Node` example from above: +```js +var p = require("private").makeAccessor(); + +function Node() { + var privates = p(this); + var children = []; + + this.getParent = function() { + return privates.parent; + }; + + this.appendChild = function(child) { + children.push(child); + var cp = p(child); + if (cp.parent) + cp.parent.removeChild(child); + cp.parent = this; + return child; + }; +} +``` +Now, in order to access the private data of a `Node` object, you need to +have access to the unique `p` function that is being used here. This is +already an improvement over the previous example, because it allows +restricted access by other `Node` instances, but can it help with the +`Node.prototype` problem too? + +Yes it can! +```js +var p = require("private").makeAccessor(); + +function Node() { + p(this).children = []; +} + +var Np = Node.prototype; + +Np.getParent = function() { + return p(this).parent; +}; + +Np.appendChild = function(child) { + p(this).children.push(child); + var cp = p(child); + if (cp.parent) + cp.parent.removeChild(child); + cp.parent = this; + return child; +}; +``` +Because `p` is in scope not only within the `Node` constructor but also +within `Node` methods, we can finally avoid redefining methods every time +the `Node` constructor is called. + +Now, you might be wondering how you can restrict access to `p` so that no +untrusted code is able to call it. The answer is to use your favorite +module pattern, be it CommonJS, AMD `define`, or even the old +Immediately-Invoked Function Expression: +```js +var Node = (function() { + var p = require("private").makeAccessor(); + + function Node() { + p(this).children = []; + } + + var Np = Node.prototype; + + Np.getParent = function() { + return p(this).parent; + }; + + Np.appendChild = function(child) { + p(this).children.push(child); + var cp = p(child); + if (cp.parent) + cp.parent.removeChild(child); + cp.parent = this; + return child; + }; + + return Node; +}()); + +var parent = new Node; +var child = new Node; +parent.appendChild(child); +assert.strictEqual(child.getParent(), parent); +``` +Because this version of `p` never leaks from the enclosing function scope, +only `Node` objects have access to it. + +So, you see, the claim I made at the beginning of this README remains +true: + +> In JavaScript, the only data that are truly private are local variables +> whose values do not *leak* from the scope in which they were defined. + +It just so happens that closure privacy is sufficient to implement a +privacy model similar to that provided by other languages. diff --git a/node_modules/private/package.json b/node_modules/private/package.json new file mode 100644 index 0000000..fdd0769 --- /dev/null +++ b/node_modules/private/package.json @@ -0,0 +1,33 @@ +{ + "author": { + "name": "Ben Newman", + "email": "bn@cs.stanford.edu" + }, + "name": "private", + "description": "Utility for associating truly private state with any JavaScript object", + "keywords": [ + "private", + "access control", + "access modifiers", + "encapsulation", + "secret", + "state", + "privilege", + "scope", + "es5" + ], + "version": "0.1.6", + "homepage": "http://github.com/benjamn/private", + "repository": { + "type": "git", + "url": "git://github.com/benjamn/private.git" + }, + "license": "MIT", + "main": "private.js", + "scripts": { + "test": "node test/run.js" + }, + "engines": { + "node": ">= 0.6" + } +} diff --git a/node_modules/private/private.js b/node_modules/private/private.js new file mode 100644 index 0000000..20bed57 --- /dev/null +++ b/node_modules/private/private.js @@ -0,0 +1,129 @@ +"use strict"; + +var originalObject = Object; +var originalDefProp = Object.defineProperty; +var originalCreate = Object.create; + +function defProp(obj, name, value) { + if (originalDefProp) try { + originalDefProp.call(originalObject, obj, name, { value: value }); + } catch (definePropertyIsBrokenInIE8) { + obj[name] = value; + } else { + obj[name] = value; + } +} + +// For functions that will be invoked using .call or .apply, we need to +// define those methods on the function objects themselves, rather than +// inheriting them from Function.prototype, so that a malicious or clumsy +// third party cannot interfere with the functionality of this module by +// redefining Function.prototype.call or .apply. +function makeSafeToCall(fun) { + if (fun) { + defProp(fun, "call", fun.call); + defProp(fun, "apply", fun.apply); + } + return fun; +} + +makeSafeToCall(originalDefProp); +makeSafeToCall(originalCreate); + +var hasOwn = makeSafeToCall(Object.prototype.hasOwnProperty); +var numToStr = makeSafeToCall(Number.prototype.toString); +var strSlice = makeSafeToCall(String.prototype.slice); + +var cloner = function(){}; +function create(prototype) { + if (originalCreate) { + return originalCreate.call(originalObject, prototype); + } + cloner.prototype = prototype || null; + return new cloner; +} + +var rand = Math.random; +var uniqueKeys = create(null); + +function makeUniqueKey() { + // Collisions are highly unlikely, but this module is in the business of + // making guarantees rather than safe bets. + do var uniqueKey = internString(strSlice.call(numToStr.call(rand(), 36), 2)); + while (hasOwn.call(uniqueKeys, uniqueKey)); + return uniqueKeys[uniqueKey] = uniqueKey; +} + +function internString(str) { + var obj = {}; + obj[str] = true; + return Object.keys(obj)[0]; +} + +// External users might find this function useful, but it is not necessary +// for the typical use of this module. +defProp(exports, "makeUniqueKey", makeUniqueKey); + +// Object.getOwnPropertyNames is the only way to enumerate non-enumerable +// properties, so if we wrap it to ignore our secret keys, there should be +// no way (except guessing) to access those properties. +var originalGetOPNs = Object.getOwnPropertyNames; +Object.getOwnPropertyNames = function getOwnPropertyNames(object) { + for (var names = originalGetOPNs(object), + src = 0, + dst = 0, + len = names.length; + src < len; + ++src) { + if (!hasOwn.call(uniqueKeys, names[src])) { + if (src > dst) { + names[dst] = names[src]; + } + ++dst; + } + } + names.length = dst; + return names; +}; + +function defaultCreatorFn(object) { + return create(null); +} + +function makeAccessor(secretCreatorFn) { + var brand = makeUniqueKey(); + var passkey = create(null); + + secretCreatorFn = secretCreatorFn || defaultCreatorFn; + + function register(object) { + var secret; // Created lazily. + + function vault(key, forget) { + // Only code that has access to the passkey can retrieve (or forget) + // the secret object. + if (key === passkey) { + return forget + ? secret = null + : secret || (secret = secretCreatorFn(object)); + } + } + + defProp(object, brand, vault); + } + + function accessor(object) { + if (!hasOwn.call(object, brand)) + register(object); + return object[brand](passkey); + } + + accessor.forget = function(object) { + if (hasOwn.call(object, brand)) + object[brand](passkey, true); + }; + + return accessor; +} + +defProp(exports, "makeAccessor", makeAccessor); diff --git a/node_modules/private/test/run.js b/node_modules/private/test/run.js new file mode 100644 index 0000000..dc3c888 --- /dev/null +++ b/node_modules/private/test/run.js @@ -0,0 +1,68 @@ +var assert = require("assert"); +var makeAccessor = require("../private").makeAccessor; +var acc1 = makeAccessor(); +var obj = {}; +var hasOwn = obj.hasOwnProperty; + +acc1(obj).foo = 42; +assert.deepEqual(obj, {}); +assert.deepEqual(acc1(obj), { foo: 42 }); +assert.deepEqual(acc1(acc1(obj)), {}); +assert.deepEqual(acc1(obj), { foo: 42 }); +assert.deepEqual(Object.keys(acc1(obj)), ["foo"]); +assert.strictEqual(Object.getOwnPropertyNames(acc1(obj)).length, 1); +assert.strictEqual(Object.getOwnPropertyNames(acc1(acc1(obj))).length, 0); +acc1(obj).bar = "baz"; +assert.deepEqual(acc1(obj), { foo: 42, bar: "baz" }); +delete acc1(obj).foo; +assert.deepEqual(acc1(obj), { bar: "baz" }); + +try { + acc1(42); + throw new Error("threw wrong error"); +} catch (err) { + assert.ok(err); +} + +var acc2 = makeAccessor(); +assert.notStrictEqual(acc1, acc2); +assert.notStrictEqual(acc1(obj), acc2(obj)); +assert.deepEqual(acc2(obj), {}); +assert.strictEqual(Object.getOwnPropertyNames(obj).length, 0); +assert.strictEqual(Object.keys(obj).length, 0); +acc2(obj).bar = "asdf"; +assert.deepEqual(acc2(obj), { bar: "asdf" }); + +acc2.forget(obj); +acc2(obj).bar = "asdf"; +var oldSecret = acc2(obj); +assert.strictEqual(oldSecret.bar, "asdf"); +acc2.forget(obj); +var newSecret = acc2(obj); +assert.notStrictEqual(oldSecret, newSecret); +assert.ok(hasOwn.call(oldSecret, "bar")); +assert.ok(!hasOwn.call(newSecret, "bar")); +newSecret.bar = "zxcv"; +assert.strictEqual(oldSecret.bar, "asdf"); +assert.strictEqual(acc2(obj).bar, "zxcv"); + +function creatorFn(object) { + return { self: object }; +} + +var acc3 = makeAccessor(creatorFn); + +acc3(obj).xxx = "yyy"; +assert.deepEqual(acc3(obj), { + self: obj, + xxx: "yyy" +}); + +acc3.forget(obj); +assert.deepEqual(acc3(obj), { + self: obj +}); + +var green = "\033[32m"; +var reset = "\033[0m"; +console.log(green + "ALL PASS" + reset); diff --git a/node_modules/process-nextick-args/.travis.yml b/node_modules/process-nextick-args/.travis.yml new file mode 100644 index 0000000..36201b1 --- /dev/null +++ b/node_modules/process-nextick-args/.travis.yml @@ -0,0 +1,12 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.11" + - "0.12" + - "1.7.1" + - 1 + - 2 + - 3 + - 4 + - 5 diff --git a/node_modules/process-nextick-args/index.js b/node_modules/process-nextick-args/index.js new file mode 100644 index 0000000..a4f40f8 --- /dev/null +++ b/node_modules/process-nextick-args/index.js @@ -0,0 +1,43 @@ +'use strict'; + +if (!process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = nextTick; +} else { + module.exports = process.nextTick; +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} diff --git a/node_modules/process-nextick-args/license.md b/node_modules/process-nextick-args/license.md new file mode 100644 index 0000000..c67e353 --- /dev/null +++ b/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/process-nextick-args/package.json b/node_modules/process-nextick-args/package.json new file mode 100644 index 0000000..e5c6c56 --- /dev/null +++ b/node_modules/process-nextick-args/package.json @@ -0,0 +1,22 @@ +{ + "name": "process-nextick-args", + "version": "1.0.7", + "description": "process.nextTick but always with args", + "main": "index.js", + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "devDependencies": { + "tap": "~0.2.6" + } +} diff --git a/node_modules/process-nextick-args/readme.md b/node_modules/process-nextick-args/readme.md new file mode 100644 index 0000000..78e7cfa --- /dev/null +++ b/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var nextTick = require('process-nextick-args'); + +nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/process-nextick-args/test.js b/node_modules/process-nextick-args/test.js new file mode 100644 index 0000000..ef15721 --- /dev/null +++ b/node_modules/process-nextick-args/test.js @@ -0,0 +1,24 @@ +var test = require("tap").test; +var nextTick = require('./'); + +test('should work', function (t) { + t.plan(5); + nextTick(function (a) { + t.ok(a); + nextTick(function (thing) { + t.equals(thing, 7); + }, 7); + }, true); + nextTick(function (a, b, c) { + t.equals(a, 'step'); + t.equals(b, 3); + t.equals(c, 'profit'); + }, 'step', 3, 'profit'); +}); + +test('correct number of arguments', function (t) { + t.plan(1); + nextTick(function () { + t.equals(2, arguments.length, 'correct number'); + }, 1, 2); +}); diff --git a/node_modules/process/.eslintrc b/node_modules/process/.eslintrc new file mode 100644 index 0000000..1e7aab7 --- /dev/null +++ b/node_modules/process/.eslintrc @@ -0,0 +1,21 @@ +{ +extends: "eslint:recommended", + "env": { + "node": true, + "browser": true, + "es6" : true, + "mocha": true + }, + "rules": { + "indent": [2, 4], + "brace-style": [2, "1tbs"], + "quotes": [2, "single"], + "no-console": 0, + "no-shadow": 0, + "no-use-before-define": [2, "nofunc"], + "no-underscore-dangle": 0, + "no-constant-condition": 0, + "space-after-function-name": 0, + "consistent-return": 0 + } +} diff --git a/node_modules/process/LICENSE b/node_modules/process/LICENSE new file mode 100644 index 0000000..b8c1246 --- /dev/null +++ b/node_modules/process/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2013 Roman Shtylman + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/process/README.md b/node_modules/process/README.md new file mode 100644 index 0000000..6570729 --- /dev/null +++ b/node_modules/process/README.md @@ -0,0 +1,26 @@ +# process + +```require('process');``` just like any other module. + +Works in node.js and browsers via the browser.js shim provided with the module. + +## browser implementation + +The goal of this module is not to be a full-fledged alternative to the builtin process module. This module mostly exists to provide the nextTick functionality and little more. We keep this module lean because it will often be included by default by tools like browserify when it detects a module has used the `process` global. + +It also exposes a "browser" member (i.e. `process.browser`) which is `true` in this implementation but `undefined` in node. This can be used in isomorphic code that adjusts it's behavior depending on which environment it's running in. + +If you are looking to provide other process methods, I suggest you monkey patch them onto the process global in your app. A list of user created patches is below. + +* [hrtime](https://github.com/kumavis/browser-process-hrtime) +* [stdout](https://github.com/kumavis/browser-stdout) + +## package manager notes + +If you are writing a bundler to package modules for client side use, make sure you use the ```browser``` field hint in package.json. + +See https://gist.github.com/4339901 for details. + +The [browserify](https://github.com/substack/node-browserify) module will properly handle this field when bundling your files. + + diff --git a/node_modules/process/browser.js b/node_modules/process/browser.js new file mode 100644 index 0000000..d3e30e9 --- /dev/null +++ b/node_modules/process/browser.js @@ -0,0 +1,180 @@ +// shim for using process in browser +var process = module.exports = {}; + +// cached from whatever global is present so that test runners that stub it +// don't break things. But we need to wrap it in a try catch in case it is +// wrapped in strict mode code which doesn't define any globals. It's inside a +// function because try/catches deoptimize in certain engines. + +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} +function defaultClearTimeout () { + throw new Error('clearTimeout has not been defined'); +} +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +} ()) +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } + // if setTimeout wasn't available but was latter defined + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch(e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch(e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + + +} +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } + // if clearTimeout wasn't available but was latter defined + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + + + +} +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + draining = false; + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + var timeout = runTimeout(cleanUpNextTick); + draining = true; + + var len = queue.length; + while(len) { + currentQueue = queue; + queue = []; + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + queueIndex = -1; + len = queue.length; + } + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + queue.push(new Item(fun, args)); + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +// v8 likes predictible objects +function Item(fun, array) { + this.fun = fun; + this.array = array; +} +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; // empty string to avoid regexp issues +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { return '/' }; +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; +process.umask = function() { return 0; }; diff --git a/node_modules/process/index.js b/node_modules/process/index.js new file mode 100644 index 0000000..8d8ed7d --- /dev/null +++ b/node_modules/process/index.js @@ -0,0 +1,2 @@ +// for now just expose the builtin process global from node.js +module.exports = global.process; diff --git a/node_modules/process/package.json b/node_modules/process/package.json new file mode 100644 index 0000000..907a3b9 --- /dev/null +++ b/node_modules/process/package.json @@ -0,0 +1,27 @@ +{ + "author": "Roman Shtylman ", + "name": "process", + "description": "process information for node.js and browsers", + "keywords": [ + "process" + ], + "scripts": { + "test": "mocha test.js", + "browser": "zuul --no-coverage --ui mocha-bdd --local 8080 -- test.js" + }, + "version": "0.11.9", + "repository": { + "type": "git", + "url": "git://github.com/shtylman/node-process.git" + }, + "license": "MIT", + "browser": "./browser.js", + "main": "./index.js", + "engines": { + "node": ">= 0.6.0" + }, + "devDependencies": { + "mocha": "2.2.1", + "zuul": "^3.10.3" + } +} diff --git a/node_modules/process/test.js b/node_modules/process/test.js new file mode 100644 index 0000000..8ba579c --- /dev/null +++ b/node_modules/process/test.js @@ -0,0 +1,199 @@ +var assert = require('assert'); +var ourProcess = require('./browser'); +describe('test against our process', function () { + test(ourProcess); +}); +if (!process.browser) { + describe('test against node', function () { + test(process); + }); + vmtest(); +} +function test (ourProcess) { + describe('test arguments', function () { + it ('works', function (done) { + var order = 0; + + + ourProcess.nextTick(function (num) { + assert.equal(num, order++, 'first one works'); + ourProcess.nextTick(function (num) { + assert.equal(num, order++, 'recursive one is 4th'); + }, 3); + }, 0); + ourProcess.nextTick(function (num) { + assert.equal(num, order++, 'second one starts'); + ourProcess.nextTick(function (num) { + assert.equal(num, order++, 'this is third'); + ourProcess.nextTick(function (num) { + assert.equal(num, order++, 'this is last'); + done(); + }, 5); + }, 4); + }, 1); + ourProcess.nextTick(function (num) { + + assert.equal(num, order++, '3rd schedualed happens after the error'); + }, 2); + }); + }); +if (!process.browser) { + describe('test errors', function (t) { + it ('works', function (done) { + var order = 0; + process.removeAllListeners('uncaughtException'); + process.once('uncaughtException', function(err) { + assert.equal(2, order++, 'error is third'); + ourProcess.nextTick(function () { + assert.equal(5, order++, 'schedualed in error is last'); + done(); + }); + }); + ourProcess.nextTick(function () { + assert.equal(0, order++, 'first one works'); + ourProcess.nextTick(function () { + assert.equal(4, order++, 'recursive one is 4th'); + }); + }); + ourProcess.nextTick(function () { + assert.equal(1, order++, 'second one starts'); + throw(new Error('an error is thrown')); + }); + ourProcess.nextTick(function () { + assert.equal(3, order++, '3rd schedualed happens after the error'); + }); + }); + }); +} + describe('rename globals', function (t) { + var oldTimeout = setTimeout; + var oldClear = clearTimeout; + + it('clearTimeout', function (done){ + + var ok = true; + clearTimeout = function () { + ok = false; + } + var ran = false; + function cleanup() { + clearTimeout = oldClear; + var err; + try { + assert.ok(ok, 'fake clearTimeout ran'); + assert.ok(ran, 'should have run'); + } catch (e) { + err = e; + } + done(err); + } + setTimeout(cleanup, 1000); + ourProcess.nextTick(function () { + ran = true; + }); + }); + it('just setTimeout', function (done){ + + + setTimeout = function () { + setTimeout = oldTimeout; + try { + assert.ok(false, 'fake setTimeout called') + } catch (e) { + done(e); + } + + } + + ourProcess.nextTick(function () { + setTimeout = oldTimeout; + done(); + }); + }); + }); +} +function vmtest() { + var vm = require('vm'); + var fs = require('fs'); + var process = fs.readFileSync('./browser.js', {encoding: 'utf8'}); + + + describe('should work in vm in strict mode with no globals', function () { + it('should parse', function (done) { + var str = '"use strict";var module = {exports:{}};'; + str += process; + str += 'this.works = process.browser;'; + var script = new vm.Script(str); + var context = { + works: false + }; + script.runInNewContext(context); + assert.ok(context.works); + done(); + }); + it('setTimeout throws error', function (done) { + var str = '"use strict";var module = {exports:{}};'; + str += process; + str += 'try {process.nextTick(function () {})} catch (e){this.works = e;}'; + var script = new vm.Script(str); + var context = { + works: false + }; + script.runInNewContext(context); + assert.ok(context.works); + done(); + }); + it('should generally work', function (done) { + var str = '"use strict";var module = {exports:{}};'; + str += process; + str += 'process.nextTick(function () {assert.ok(true);done();})'; + var script = new vm.Script(str); + var context = { + clearTimeout: clearTimeout, + setTimeout: setTimeout, + done: done, + assert: assert + }; + script.runInNewContext(context); + }); + it('late defs setTimeout', function (done) { + var str = '"use strict";var module = {exports:{}};'; + str += process; + str += 'var setTimeout = hiddenSetTimeout;process.nextTick(function () {assert.ok(true);done();})'; + var script = new vm.Script(str); + var context = { + clearTimeout: clearTimeout, + hiddenSetTimeout: setTimeout, + done: done, + assert: assert + }; + script.runInNewContext(context); + }); + it('late defs clearTimeout', function (done) { + var str = '"use strict";var module = {exports:{}};'; + str += process; + str += 'var clearTimeout = hiddenClearTimeout;process.nextTick(function () {assert.ok(true);done();})'; + var script = new vm.Script(str); + var context = { + hiddenClearTimeout: clearTimeout, + setTimeout: setTimeout, + done: done, + assert: assert + }; + script.runInNewContext(context); + }); + it('late defs setTimeout and then redefine', function (done) { + var str = '"use strict";var module = {exports:{}};'; + str += process; + str += 'var setTimeout = hiddenSetTimeout;process.nextTick(function () {setTimeout = function (){throw new Error("foo")};hiddenSetTimeout(function(){process.nextTick(function (){assert.ok(true);done();});});});'; + var script = new vm.Script(str); + var context = { + clearTimeout: clearTimeout, + hiddenSetTimeout: setTimeout, + done: done, + assert: assert + }; + script.runInNewContext(context); + }); + }); +} diff --git a/node_modules/proxy-addr/HISTORY.md b/node_modules/proxy-addr/HISTORY.md new file mode 100644 index 0000000..a7389db --- /dev/null +++ b/node_modules/proxy-addr/HISTORY.md @@ -0,0 +1,99 @@ +1.1.2 / 2016-05-29 +================== + + * deps: ipaddr.js@1.1.1 + - Fix IPv6-mapped IPv4 validation edge cases + +1.1.1 / 2016-05-03 +================== + + * Fix regression matching mixed versions against multiple subnets + +1.1.0 / 2016-05-01 +================== + + * Fix accepting various invalid netmasks + - IPv4 netmasks must be contingous + - IPv6 addresses cannot be used as a netmask + * deps: ipaddr.js@1.1.0 + +1.0.10 / 2015-12-09 +=================== + + * deps: ipaddr.js@1.0.5 + - Fix regression in `isValid` with non-string arguments + +1.0.9 / 2015-12-01 +================== + + * deps: ipaddr.js@1.0.4 + - Fix accepting some invalid IPv6 addresses + - Reject CIDRs with negative or overlong masks + * perf: enable strict mode + +1.0.8 / 2015-05-10 +================== + + * deps: ipaddr.js@1.0.1 + +1.0.7 / 2015-03-16 +================== + + * deps: ipaddr.js@0.1.9 + - Fix OOM on certain inputs to `isValid` + +1.0.6 / 2015-02-01 +================== + + * deps: ipaddr.js@0.1.8 + +1.0.5 / 2015-01-08 +================== + + * deps: ipaddr.js@0.1.6 + +1.0.4 / 2014-11-23 +================== + + * deps: ipaddr.js@0.1.5 + - Fix edge cases with `isValid` + +1.0.3 / 2014-09-21 +================== + + * Use `forwarded` npm module + +1.0.2 / 2014-09-18 +================== + + * Fix a global leak when multiple subnets are trusted + * Support Node.js 0.6 + * deps: ipaddr.js@0.1.3 + +1.0.1 / 2014-06-03 +================== + + * Fix links in npm package + +1.0.0 / 2014-05-08 +================== + + * Add `trust` argument to determine proxy trust on + * Accepts custom function + * Accepts IPv4/IPv6 address(es) + * Accepts subnets + * Accepts pre-defined names + * Add optional `trust` argument to `proxyaddr.all` to + stop at first untrusted + * Add `proxyaddr.compile` to pre-compile `trust` function + to make subsequent calls faster + +0.0.1 / 2014-05-04 +================== + + * Fix bad npm publish + +0.0.0 / 2014-05-04 +================== + + * Initial release diff --git a/node_modules/proxy-addr/LICENSE b/node_modules/proxy-addr/LICENSE new file mode 100644 index 0000000..cab251c --- /dev/null +++ b/node_modules/proxy-addr/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/proxy-addr/README.md b/node_modules/proxy-addr/README.md new file mode 100644 index 0000000..1bffc76 --- /dev/null +++ b/node_modules/proxy-addr/README.md @@ -0,0 +1,136 @@ +# proxy-addr + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Determine address of proxied request + +## Install + +```sh +$ npm install proxy-addr +``` + +## API + +```js +var proxyaddr = require('proxy-addr') +``` + +### proxyaddr(req, trust) + +Return the address of the request, using the given `trust` parameter. + +The `trust` argument is a function that returns `true` if you trust +the address, `false` if you don't. The closest untrusted address is +returned. + +```js +proxyaddr(req, function(addr){ return addr === '127.0.0.1' }) +proxyaddr(req, function(addr, i){ return i < 1 }) +``` + +The `trust` arugment may also be a single IP address string or an +array of trusted addresses, as plain IP addresses, CIDR-formatted +strings, or IP/netmask strings. + +```js +proxyaddr(req, '127.0.0.1') +proxyaddr(req, ['127.0.0.0/8', '10.0.0.0/8']) +proxyaddr(req, ['127.0.0.0/255.0.0.0', '192.168.0.0/255.255.0.0']) +``` + +This module also supports IPv6. Your IPv6 addresses will be normalized +automatically (i.e. `fe80::00ed:1` equals `fe80:0:0:0:0:0:ed:1`). + +```js +proxyaddr(req, '::1') +proxyaddr(req, ['::1/128', 'fe80::/10']) +``` + +This module will automatically work with IPv4-mapped IPv6 addresses +as well to support node.js in IPv6-only mode. This means that you do +not have to specify both `::ffff:a00:1` and `10.0.0.1`. + +As a convenience, this module also takes certain pre-defined names +in addition to IP addresses, which expand into IP addresses: + +```js +proxyaddr(req, 'loopback') +proxyaddr(req, ['loopback', 'fc00:ac:1ab5:fff::1/64']) +``` + + * `loopback`: IPv4 and IPv6 loopback addresses (like `::1` and + `127.0.0.1`). + * `linklocal`: IPv4 and IPv6 link-local addresses (like + `fe80::1:1:1:1` and `169.254.0.1`). + * `uniquelocal`: IPv4 private addresses and IPv6 unique-local + addresses (like `fc00:ac:1ab5:fff::1` and `192.168.0.1`). + +When `trust` is specified as a function, it will be called for each +address to determine if it is a trusted address. The function is +given two arguments: `addr` and `i`, where `addr` is a string of +the address to check and `i` is a number that represents the distance +from the socket address. + +### proxyaddr.all(req, [trust]) + +Return all the addresses of the request, optionally stopping at the +first untrusted. This array is ordered from closest to furthest +(i.e. `arr[0] === req.connection.remoteAddress`). + +```js +proxyaddr.all(req) +``` + +The optional `trust` argument takes the same arguments as `trust` +does in `proxyaddr(req, trust)`. + +```js +proxyaddr.all(req, 'loopback') +``` + +### proxyaddr.compile(val) + +Compiles argument `val` into a `trust` function. This function takes +the same arguments as `trust` does in `proxyaddr(req, trust)` and +returns a function suitable for `proxyaddr(req, trust)`. + +```js +var trust = proxyaddr.compile('localhost') +var addr = proxyaddr(req, trust) +``` + +This function is meant to be optimized for use against every request. +It is recommend to compile a trust function up-front for the trusted +configuration and pass that to `proxyaddr(req, trust)` for each request. + +## Testing + +```sh +$ npm test +``` + +## Benchmarks + +```sh +$ npm run-script bench +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/proxy-addr.svg +[npm-url]: https://npmjs.org/package/proxy-addr +[node-version-image]: https://img.shields.io/node/v/proxy-addr.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/proxy-addr/master.svg +[travis-url]: https://travis-ci.org/jshttp/proxy-addr +[coveralls-image]: https://img.shields.io/coveralls/jshttp/proxy-addr/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/proxy-addr?branch=master +[downloads-image]: https://img.shields.io/npm/dm/proxy-addr.svg +[downloads-url]: https://npmjs.org/package/proxy-addr diff --git a/node_modules/proxy-addr/index.js b/node_modules/proxy-addr/index.js new file mode 100644 index 0000000..0b3cf0b --- /dev/null +++ b/node_modules/proxy-addr/index.js @@ -0,0 +1,321 @@ +/*! + * proxy-addr + * Copyright(c) 2014-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + */ + +module.exports = proxyaddr; +module.exports.all = alladdrs; +module.exports.compile = compile; + +/** + * Module dependencies. + */ + +var forwarded = require('forwarded'); +var ipaddr = require('ipaddr.js'); + +/** + * Variables. + */ + +var digitre = /^[0-9]+$/; +var isip = ipaddr.isValid; +var parseip = ipaddr.parse; + +/** + * Pre-defined IP ranges. + */ + +var ipranges = { + linklocal: ['169.254.0.0/16', 'fe80::/10'], + loopback: ['127.0.0.1/8', '::1/128'], + uniquelocal: ['10.0.0.0/8', '172.16.0.0/12', '192.168.0.0/16', 'fc00::/7'] +}; + +/** + * Get all addresses in the request, optionally stopping + * at the first untrusted. + * + * @param {Object} request + * @param {Function|Array|String} [trust] + * @api public + */ + +function alladdrs(req, trust) { + // get addresses + var addrs = forwarded(req); + + if (!trust) { + // Return all addresses + return addrs; + } + + if (typeof trust !== 'function') { + trust = compile(trust); + } + + for (var i = 0; i < addrs.length - 1; i++) { + if (trust(addrs[i], i)) continue; + + addrs.length = i + 1; + } + + return addrs; +} + +/** + * Compile argument into trust function. + * + * @param {Array|String} val + * @api private + */ + +function compile(val) { + if (!val) { + throw new TypeError('argument is required'); + } + + var trust = typeof val === 'string' + ? [val] + : val; + + if (!Array.isArray(trust)) { + throw new TypeError('unsupported trust argument'); + } + + for (var i = 0; i < trust.length; i++) { + val = trust[i]; + + if (!ipranges.hasOwnProperty(val)) { + continue; + } + + // Splice in pre-defined range + val = ipranges[val]; + trust.splice.apply(trust, [i, 1].concat(val)); + i += val.length - 1; + } + + return compileTrust(compileRangeSubnets(trust)); +} + +/** + * Compile `arr` elements into range subnets. + * + * @param {Array} arr + * @api private + */ + +function compileRangeSubnets(arr) { + var rangeSubnets = new Array(arr.length); + + for (var i = 0; i < arr.length; i++) { + rangeSubnets[i] = parseipNotation(arr[i]); + } + + return rangeSubnets; +} + +/** + * Compile range subnet array into trust function. + * + * @param {Array} rangeSubnets + * @api private + */ + +function compileTrust(rangeSubnets) { + // Return optimized function based on length + var len = rangeSubnets.length; + return len === 0 + ? trustNone + : len === 1 + ? trustSingle(rangeSubnets[0]) + : trustMulti(rangeSubnets); +} + +/** + * Parse IP notation string into range subnet. + * + * @param {String} note + * @api private + */ + +function parseipNotation(note) { + var pos = note.lastIndexOf('/'); + var str = pos !== -1 + ? note.substring(0, pos) + : note; + + if (!isip(str)) { + throw new TypeError('invalid IP address: ' + str); + } + + var ip = parseip(str); + + if (pos === -1 && ip.kind() === 'ipv6' && ip.isIPv4MappedAddress()) { + // Store as IPv4 + ip = ip.toIPv4Address(); + } + + var max = ip.kind() === 'ipv6' + ? 128 + : 32; + + var range = pos !== -1 + ? note.substring(pos + 1, note.length) + : null; + + if (range === null) { + range = max; + } else if (digitre.test(range)) { + range = parseInt(range, 10); + } else if (ip.kind() === 'ipv4' && isip(range)) { + range = parseNetmask(range); + } else { + range = null; + } + + if (range <= 0 || range > max) { + throw new TypeError('invalid range on address: ' + note); + } + + return [ip, range]; +} + +/** + * Parse netmask string into CIDR range. + * + * @param {String} netmask + * @api private + */ + +function parseNetmask(netmask) { + var ip = parseip(netmask); + var kind = ip.kind(); + + return kind === 'ipv4' + ? ip.prefixLengthFromSubnetMask() + : null; +} + +/** + * Determine address of proxied request. + * + * @param {Object} request + * @param {Function|Array|String} trust + * @api public + */ + +function proxyaddr(req, trust) { + if (!req) { + throw new TypeError('req argument is required'); + } + + if (!trust) { + throw new TypeError('trust argument is required'); + } + + var addrs = alladdrs(req, trust); + var addr = addrs[addrs.length - 1]; + + return addr; +} + +/** + * Static trust function to trust nothing. + * + * @api private + */ + +function trustNone() { + return false; +} + +/** + * Compile trust function for multiple subnets. + * + * @param {Array} subnets + * @api private + */ + +function trustMulti(subnets) { + return function trust(addr) { + if (!isip(addr)) return false; + + var ip = parseip(addr); + var ipconv; + var kind = ip.kind(); + + for (var i = 0; i < subnets.length; i++) { + var subnet = subnets[i]; + var subnetip = subnet[0]; + var subnetkind = subnetip.kind(); + var subnetrange = subnet[1]; + var trusted = ip; + + if (kind !== subnetkind) { + if (subnetkind === 'ipv4' && !ip.isIPv4MappedAddress()) { + // Incompatible IP addresses + continue; + } + + if (!ipconv) { + // Convert IP to match subnet IP kind + ipconv = subnetkind === 'ipv4' + ? ip.toIPv4Address() + : ip.toIPv4MappedAddress(); + } + + trusted = ipconv; + } + + if (trusted.match(subnetip, subnetrange)) { + return true; + } + } + + return false; + }; +} + +/** + * Compile trust function for single subnet. + * + * @param {Object} subnet + * @api private + */ + +function trustSingle(subnet) { + var subnetip = subnet[0]; + var subnetkind = subnetip.kind(); + var subnetisipv4 = subnetkind === 'ipv4'; + var subnetrange = subnet[1]; + + return function trust(addr) { + if (!isip(addr)) return false; + + var ip = parseip(addr); + var kind = ip.kind(); + + if (kind !== subnetkind) { + if (subnetisipv4 && !ip.isIPv4MappedAddress()) { + // Incompatible IP addresses + return false; + } + + // Convert IP to match subnet IP kind + ip = subnetisipv4 + ? ip.toIPv4Address() + : ip.toIPv4MappedAddress(); + } + + return ip.match(subnetip, subnetrange); + }; +} diff --git a/node_modules/proxy-addr/package.json b/node_modules/proxy-addr/package.json new file mode 100644 index 0000000..3709121 --- /dev/null +++ b/node_modules/proxy-addr/package.json @@ -0,0 +1,38 @@ +{ + "name": "proxy-addr", + "description": "Determine address of proxied request", + "version": "1.1.2", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "ip", + "proxy", + "x-forwarded-for" + ], + "repository": "jshttp/proxy-addr", + "dependencies": { + "forwarded": "~0.1.0", + "ipaddr.js": "1.1.1" + }, + "devDependencies": { + "benchmark": "2.1.0", + "beautify-benchmark": "0.2.4", + "istanbul": "0.4.3", + "mocha": "~1.21.5" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "bench": "node benchmark/index.js", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/prr/.jshintrc b/node_modules/prr/.jshintrc new file mode 100644 index 0000000..6a7a956 --- /dev/null +++ b/node_modules/prr/.jshintrc @@ -0,0 +1,61 @@ +{ + "predef": [ ] + , "bitwise": false + , "camelcase": false + , "curly": false + , "eqeqeq": false + , "forin": false + , "immed": false + , "latedef": false + , "newcap": true + , "noarg": true + , "noempty": true + , "nonew": true + , "plusplus": false + , "quotmark": true + , "regexp": false + , "undef": true + , "unused": true + , "strict": false + , "trailing": true + , "maxlen": 120 + , "asi": true + , "boss": true + , "debug": true + , "eqnull": true + , "es5": true + , "esnext": true + , "evil": true + , "expr": true + , "funcscope": false + , "globalstrict": false + , "iterator": false + , "lastsemic": true + , "laxbreak": true + , "laxcomma": true + , "loopfunc": true + , "multistr": false + , "onecase": false + , "proto": false + , "regexdash": false + , "scripturl": true + , "smarttabs": false + , "shadow": false + , "sub": true + , "supernew": false + , "validthis": true + , "browser": true + , "couch": false + , "devel": false + , "dojo": false + , "mootools": false + , "node": true + , "nonstandard": true + , "prototypejs": false + , "rhino": false + , "worker": true + , "wsh": false + , "nomen": false + , "onevar": true + , "passfail": false +} \ No newline at end of file diff --git a/node_modules/prr/.npmignore b/node_modules/prr/.npmignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/node_modules/prr/.npmignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/node_modules/prr/.travis.yml b/node_modules/prr/.travis.yml new file mode 100644 index 0000000..fe3f4eb --- /dev/null +++ b/node_modules/prr/.travis.yml @@ -0,0 +1,10 @@ +language: node_js +node_js: + - 0.8 + - "0.10" +branches: + only: + - master +notifications: + email: + - rod@vagg.org diff --git a/node_modules/prr/LICENSE b/node_modules/prr/LICENSE new file mode 100644 index 0000000..f6a0029 --- /dev/null +++ b/node_modules/prr/LICENSE @@ -0,0 +1,39 @@ +Copyright 2013, Rod Vagg (the "Original Author") +All rights reserved. + +MIT +no-false-attribs License + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +Distributions of all or part of the Software intended to be used +by the recipients as they would use the unmodified Software, +containing modifications that substantially alter, remove, or +disable functionality of the Software, outside of the documented +configuration mechanisms provided by the Software, shall be +modified such that the Original Author's bug reporting email +addresses and urls are either replaced with the contact information +of the parties responsible for the changes, or removed entirely. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + +Except where noted, this license applies to any and all software +programs and associated documentation files created by the +Original Author, when distributed with the Software. \ No newline at end of file diff --git a/node_modules/prr/README.md b/node_modules/prr/README.md new file mode 100644 index 0000000..3e709e3 --- /dev/null +++ b/node_modules/prr/README.md @@ -0,0 +1,45 @@ +# prr [![Build Status](https://secure.travis-ci.org/rvagg/prr.png)](http://travis-ci.org/rvagg/prr) + +An sensible alternative to `Object.defineProperty()`. Available in npm and Ender as **prr**. + +## Usage + +Set the property `'foo'` (`obj.foo`) to have the value `'bar'` with default options (*enumerable, configurable and writable are all false*): + +```js +prr(obj, 'foo', 'bar') +``` + +Adjust the default options: + +```js +prr(obj, 'foo', 'bar', { enumerable: true, writable: true }) +``` + +Do the same operation for multiple properties: + +```js +prr(obj, { one: 'one', two: 'two' }) +// or with options: +prr(obj, { one: 'one', two: 'two' }, { enumerable: true, writable: true }) +``` + +But obviously, having to write out the full options object makes it nearly as bad as the original `Object.defineProperty()` so we can **simplify**. + +As an alternative method we can use an options string where each character represents a option: `'e'=='enumerable'`, `'c'=='configurable'` and `'w'=='writable'`: + +```js +prr(obj, 'foo', 'bar', 'ew') // enumerable and writable but not configurable +// muliple properties: +prr(obj, { one: 'one', two: 'two' }, 'ewc') // configurable too +``` + +## Where can I use it? + +Anywhere! For pre-ES5 environments *prr* will simply fall-back to an `object[property] = value` so you can get close to what you want. + +*prr* is Ender-compatible so you can include it in your Ender build and `$.prr(...)` or `var prr = require('prr'); prr(...)`. + +## Licence + +prr is Copyright (c) 2013 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licensed under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. \ No newline at end of file diff --git a/node_modules/prr/package.json b/node_modules/prr/package.json new file mode 100644 index 0000000..0adab2b --- /dev/null +++ b/node_modules/prr/package.json @@ -0,0 +1,23 @@ +{ + "name" : "prr" + , "description" : "A better Object.defineProperty()" + , "version" : "0.0.0" + , "homepage" : "https://github.com/rvagg/prr" + , "authors" : [ + "Rod Vagg (https://github.com/rvagg)" + ] + , "keywords" : [ "property", "properties", "defineProperty", "ender" ] + , "main" : "./prr.js" + , "repository" : { + "type" : "git" + , "url" : "https://github.com/rvagg/prr.git" + } + , "dependencies" : {} + , "devDependencies" : { + "tap" : "*" + } + , "scripts": { + "test" : "node ./test.js" + } + , "license" : "MIT" +} \ No newline at end of file diff --git a/node_modules/prr/prr.js b/node_modules/prr/prr.js new file mode 100644 index 0000000..94f5862 --- /dev/null +++ b/node_modules/prr/prr.js @@ -0,0 +1,63 @@ +/*! + * prr + * (c) 2013 Rod Vagg + * https://github.com/rvagg/prr + * License: MIT + */ + +(function (name, context, definition) { + if (typeof module != 'undefined' && module.exports) + module.exports = definition() + else + context[name] = definition() +})('prr', this, function() { + + var setProperty = typeof Object.defineProperty == 'function' + ? function (obj, key, options) { + Object.defineProperty(obj, key, options) + return obj + } + : function (obj, key, options) { // < es5 + obj[key] = options.value + return obj + } + + , makeOptions = function (value, options) { + var oo = typeof options == 'object' + , os = !oo && typeof options == 'string' + , op = function (p) { + return oo + ? !!options[p] + : os + ? options.indexOf(p[0]) > -1 + : false + } + + return { + enumerable : op('enumerable') + , configurable : op('configurable') + , writable : op('writable') + , value : value + } + } + + , prr = function (obj, key, value, options) { + var k + + options = makeOptions(value, options) + + if (typeof key == 'object') { + for (k in key) { + if (Object.hasOwnProperty.call(key, k)) { + options.value = key[k] + setProperty(obj, k, options) + } + } + return obj + } + + return setProperty(obj, key, options) + } + + return prr +}) \ No newline at end of file diff --git a/node_modules/prr/test.js b/node_modules/prr/test.js new file mode 100644 index 0000000..5222e30 --- /dev/null +++ b/node_modules/prr/test.js @@ -0,0 +1,169 @@ +const test = require('tap').test + , prr = require('./') + +test('test prr(o, key, value) form', function (t) { + t.plan(2) + + var o = {} + prr(o, 'foo', 'bar') + t.equal(o.foo, 'bar', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo') + , { + enumerable : false + , configurable : false + , writable : false + , value : 'bar' + } + , 'correct property descriptor' + ) + t.end() +}) + +test('test prr(o, { key: value }) form', function (t) { + t.plan(2) + + var o = {} + prr(o, { foo: 'bar' }) + + t.equal(o.foo, 'bar', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo') + , { + enumerable : false + , configurable : false + , writable : false + , value : 'bar' + } + , 'correct property descriptor' + ) + t.end() +}) + +test('test multiple key:value pairs', function (t) { + var o = { foo: 'bar' } + + prr(o, { one: 'ONE', two: 'TWO', obj: { o: 'o' }}) + + t.deepEqual(o, { foo: 'bar' }, 'properties are not enumerable') + t.equal(o.one, 'ONE', 'correctly set property') + t.equal(o.two, 'TWO', 'correctly set property') + t.deepEqual(o.obj, { o: 'o' }, 'correctly set property') + + ;[ 'one', 'two', 'obj' ].forEach(function (p) { + t.deepEqual( + Object.getOwnPropertyDescriptor(o, p) + , { + enumerable : false + , configurable : false + , writable : false + , value : p == 'obj' ? { o: 'o' } : p.toUpperCase() + } + , 'correct property descriptor' + ) + }) + + t.end() +}) + +test('test descriptor options', function (t) { + var o = {} + + prr(o, 'foo', 'bar', { + enumerable : true + , configurable : false + }) + t.equal(o.foo, 'bar', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo') + , { + enumerable : true + , configurable : false + , writable : false + , value : 'bar' + } + , 'correct property descriptor' + ) + + prr(o, 'foo2', 'bar2', { + enumerable : true + , configurable : true + , writable : false + }) + t.equal(o.foo2, 'bar2', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo2') + , { + enumerable : true + , configurable : true + , writable : false + , value : 'bar2' + } + , 'correct property descriptor' + ) + + prr(o, 'foo3', 'bar3', { + enumerable : true + , configurable : true + , writable : true + }) + t.equal(o.foo3, 'bar3', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo3') + , { + enumerable : true + , configurable : true + , writable : true + , value : 'bar3' + } + , 'correct property descriptor' + ) + + t.end() +}) + + +test('test descriptor options, string form', function (t) { + var o = {} + + prr(o, 'foo', 'bar', 'e') + t.equal(o.foo, 'bar', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo') + , { + enumerable : true + , configurable : false + , writable : false + , value : 'bar' + } + , 'correct property descriptor' + ) + + prr(o, 'foo2', 'bar2', 'ec') + t.equal(o.foo2, 'bar2', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo2') + , { + enumerable : true + , configurable : true + , writable : false + , value : 'bar2' + } + , 'correct property descriptor' + ) + + prr(o, 'foo3', 'bar3', 'ecw') + t.equal(o.foo3, 'bar3', 'correct value') + t.deepEqual( + Object.getOwnPropertyDescriptor(o, 'foo3') + , { + enumerable : true + , configurable : true + , writable : true + , value : 'bar3' + } + , 'correct property descriptor' + ) + + t.end() +}) diff --git a/node_modules/punycode/LICENSE-MIT.txt b/node_modules/punycode/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/punycode/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/punycode/README.md b/node_modules/punycode/README.md new file mode 100644 index 0000000..7ad7d1f --- /dev/null +++ b/node_modules/punycode/README.md @@ -0,0 +1,176 @@ +# Punycode.js [![Build status](https://travis-ci.org/bestiejs/punycode.js.svg?branch=master)](https://travis-ci.org/bestiejs/punycode.js) [![Code coverage status](http://img.shields.io/coveralls/bestiejs/punycode.js/master.svg)](https://coveralls.io/r/bestiejs/punycode.js) [![Dependency status](https://gemnasium.com/bestiejs/punycode.js.svg)](https://gemnasium.com/bestiejs/punycode.js) + +A robust Punycode converter that fully complies to [RFC 3492](https://tools.ietf.org/html/rfc3492) and [RFC 5891](https://tools.ietf.org/html/rfc5891), and works on nearly all JavaScript platforms. + +This JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm: + +* [The C example code from RFC 3492](https://tools.ietf.org/html/rfc3492#appendix-C) +* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c) +* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c) +* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287) +* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072)) + +This project is [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with [Node.js v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc) and [io.js v1.0.0+](https://github.com/iojs/io.js/blob/v1.x/lib/punycode.js). + +## Installation + +Via [npm](https://www.npmjs.com/) (only required for Node.js releases older than v0.6.2): + +```bash +npm install punycode +``` + +Via [Bower](http://bower.io/): + +```bash +bower install punycode +``` + +Via [Component](https://github.com/component/component): + +```bash +component install bestiejs/punycode.js +``` + +In a browser: + +```html + +``` + +In [Node.js](https://nodejs.org/), [io.js](https://iojs.org/), [Narwhal](http://narwhaljs.org/), and [RingoJS](http://ringojs.org/): + +```js +var punycode = require('punycode'); +``` + +In [Rhino](http://www.mozilla.org/rhino/): + +```js +load('punycode.js'); +``` + +Using an AMD loader like [RequireJS](http://requirejs.org/): + +```js +require( + { + 'paths': { + 'punycode': 'path/to/punycode' + } + }, + ['punycode'], + function(punycode) { + console.log(punycode); + } +); +``` + +## API + +### `punycode.decode(string)` + +Converts a Punycode string of ASCII symbols to a string of Unicode symbols. + +```js +// decode domain name parts +punycode.decode('maana-pta'); // 'mañana' +punycode.decode('--dqo34k'); // '☃-⌘' +``` + +### `punycode.encode(string)` + +Converts a string of Unicode symbols to a Punycode string of ASCII symbols. + +```js +// encode domain name parts +punycode.encode('mañana'); // 'maana-pta' +punycode.encode('☃-⌘'); // '--dqo34k' +``` + +### `punycode.toUnicode(input)` + +Converts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode. + +```js +// decode domain names +punycode.toUnicode('xn--maana-pta.com'); +// → 'mañana.com' +punycode.toUnicode('xn----dqo34k.com'); +// → '☃-⌘.com' + +// decode email addresses +punycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'); +// → 'джумла@джpумлатест.bрфa' +``` + +### `punycode.toASCII(input)` + +Converts a lowercased Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that’s already in ASCII. + +```js +// encode domain names +punycode.toASCII('mañana.com'); +// → 'xn--maana-pta.com' +punycode.toASCII('☃-⌘.com'); +// → 'xn----dqo34k.com' + +// encode email addresses +punycode.toASCII('джумла@джpумлатест.bрфa'); +// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq' +``` + +### `punycode.ucs2` + +#### `punycode.ucs2.decode(string)` + +Creates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](https://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16. + +```js +punycode.ucs2.decode('abc'); +// → [0x61, 0x62, 0x63] +// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE: +punycode.ucs2.decode('\uD834\uDF06'); +// → [0x1D306] +``` + +#### `punycode.ucs2.encode(codePoints)` + +Creates a string based on an array of numeric code point values. + +```js +punycode.ucs2.encode([0x61, 0x62, 0x63]); +// → 'abc' +punycode.ucs2.encode([0x1D306]); +// → '\uD834\uDF06' +``` + +### `punycode.version` + +A string representing the current Punycode.js version number. + +## Unit tests & code coverage + +After cloning this repository, run `npm install --dev` to install the dependencies needed for Punycode.js development and testing. You may want to install Istanbul _globally_ using `npm install istanbul -g`. + +Once that’s done, you can run the unit tests in Node using `npm test` or `node tests/tests.js`. To run the tests in Rhino, Ringo, Narwhal, PhantomJS, and web browsers as well, use `grunt test`. + +To generate the code coverage report, use `grunt cover`. + +Feel free to fork if you see possible improvements! + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## Contributors + +| [![twitter/jdalton](https://gravatar.com/avatar/299a3d891ff1920b69c364d061007043?s=70)](https://twitter.com/jdalton "Follow @jdalton on Twitter") | +|---| +| [John-David Dalton](http://allyoucanleet.com/) | + +## License + +Punycode.js is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/punycode/package.json b/node_modules/punycode/package.json new file mode 100644 index 0000000..accb530 --- /dev/null +++ b/node_modules/punycode/package.json @@ -0,0 +1,60 @@ +{ + "name": "punycode", + "version": "1.4.1", + "description": "A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, and works on nearly all JavaScript platforms.", + "homepage": "https://mths.be/punycode", + "main": "punycode.js", + "keywords": [ + "punycode", + "unicode", + "idn", + "idna", + "dns", + "url", + "domain" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "contributors": [ + { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + { + "name": "John-David Dalton", + "url": "http://allyoucanleet.com/" + } + ], + "repository": { + "type": "git", + "url": "https://github.com/bestiejs/punycode.js.git" + }, + "bugs": "https://github.com/bestiejs/punycode.js/issues", + "files": [ + "LICENSE-MIT.txt", + "punycode.js" + ], + "scripts": { + "test": "node tests/tests.js" + }, + "devDependencies": { + "coveralls": "^2.11.4", + "grunt": "^0.4.5", + "grunt-contrib-uglify": "^0.11.0", + "grunt-shell": "^1.1.2", + "istanbul": "^0.4.1", + "qunit-extras": "^1.4.4", + "qunitjs": "~1.11.0", + "requirejs": "^2.1.22" + }, + "jspm": { + "map": { + "./punycode.js": { + "node": "@node/punycode" + } + } + } +} diff --git a/node_modules/punycode/punycode.js b/node_modules/punycode/punycode.js new file mode 100644 index 0000000..2c87f6c --- /dev/null +++ b/node_modules/punycode/punycode.js @@ -0,0 +1,533 @@ +/*! https://mths.be/punycode v1.4.1 by @mathias */ +;(function(root) { + + /** Detect free variables */ + var freeExports = typeof exports == 'object' && exports && + !exports.nodeType && exports; + var freeModule = typeof module == 'object' && module && + !module.nodeType && module; + var freeGlobal = typeof global == 'object' && global; + if ( + freeGlobal.global === freeGlobal || + freeGlobal.window === freeGlobal || + freeGlobal.self === freeGlobal + ) { + root = freeGlobal; + } + + /** + * The `punycode` object. + * @name punycode + * @type Object + */ + var punycode, + + /** Highest positive signed 32-bit float value */ + maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 + + /** Bootstring parameters */ + base = 36, + tMin = 1, + tMax = 26, + skew = 38, + damp = 700, + initialBias = 72, + initialN = 128, // 0x80 + delimiter = '-', // '\x2D' + + /** Regular expressions */ + regexPunycode = /^xn--/, + regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars + regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators + + /** Error messages */ + errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' + }, + + /** Convenience shortcuts */ + baseMinusTMin = base - tMin, + floor = Math.floor, + stringFromCharCode = String.fromCharCode, + + /** Temporary variable */ + key; + + /*--------------------------------------------------------------------------*/ + + /** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ + function error(type) { + throw new RangeError(errors[type]); + } + + /** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ + function map(array, fn) { + var length = array.length; + var result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; + } + + /** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ + function mapDomain(string, fn) { + var parts = string.split('@'); + var result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + var labels = string.split('.'); + var encoded = map(labels, fn).join('.'); + return result + encoded; + } + + /** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ + function ucs2decode(string) { + var output = [], + counter = 0, + length = string.length, + value, + extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // low surrogate + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; + } + + /** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ + function ucs2encode(array) { + return map(array, function(value) { + var output = ''; + if (value > 0xFFFF) { + value -= 0x10000; + output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); + value = 0xDC00 | value & 0x3FF; + } + output += stringFromCharCode(value); + return output; + }).join(''); + } + + /** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ + function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; + } + + /** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ + function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); + } + + /** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ + function adapt(delta, numPoints, firstTime) { + var k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); + } + + /** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ + function decode(input) { + // Don't use UCS-2 + var output = [], + inputLength = input.length, + out, + i = 0, + n = initialN, + bias = initialBias, + basic, + j, + index, + oldi, + w, + k, + digit, + t, + /** Cached calculation results */ + baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + + } + + return ucs2encode(output); + } + + /** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ + function encode(input) { + var n, + delta, + handledCPCount, + basicLength, + bias, + j, + m, + q, + k, + t, + currentValue, + output = [], + /** `inputLength` will hold the number of code points in `input`. */ + inputLength, + /** Cached calculation results */ + handledCPCountPlusOne, + baseMinusT, + qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base; /* no condition */; k += base) { + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); + } + + /** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ + function toUnicode(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); + } + + /** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ + function toASCII(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); + } + + /*--------------------------------------------------------------------------*/ + + /** Define the public API */ + punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '1.4.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode + }; + + /** Expose `punycode` */ + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define('punycode', function() { + return punycode; + }); + } else if (freeExports && freeModule) { + if (module.exports == freeExports) { + // in Node.js, io.js, or RingoJS v0.8.0+ + freeModule.exports = punycode; + } else { + // in Narwhal or RingoJS v0.7.0- + for (key in punycode) { + punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]); + } + } + } else { + // in Rhino or a web browser + root.punycode = punycode; + } + +}(this)); diff --git a/node_modules/qs/.eslintignore b/node_modules/qs/.eslintignore new file mode 100644 index 0000000..1521c8b --- /dev/null +++ b/node_modules/qs/.eslintignore @@ -0,0 +1 @@ +dist diff --git a/node_modules/qs/.eslintrc b/node_modules/qs/.eslintrc new file mode 100644 index 0000000..1faac27 --- /dev/null +++ b/node_modules/qs/.eslintrc @@ -0,0 +1,19 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "complexity": [2, 22], + "consistent-return": [1], + "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], + "indent": [2, 4], + "max-params": [2, 9], + "max-statements": [2, 36], + "no-extra-parens": [1], + "no-continue": [1], + "no-magic-numbers": 0, + "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], + "operator-linebreak": 1 + } +} diff --git a/node_modules/qs/.jscs.json b/node_modules/qs/.jscs.json new file mode 100644 index 0000000..3d099c4 --- /dev/null +++ b/node_modules/qs/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 1 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/qs/CHANGELOG.md b/node_modules/qs/CHANGELOG.md new file mode 100644 index 0000000..e318a05 --- /dev/null +++ b/node_modules/qs/CHANGELOG.md @@ -0,0 +1,120 @@ +## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) +- [New] pass Buffers to the encoder/decoder directly (#161) +- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) +- [Fix] fix compacting of nested sparse arrays (#150) + +## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) +- [New] allowDots option for `stringify` (#151) +- [Fix] "sort" option should work at a depth of 3 or more (#151) +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) +- Revert ES6 requirement and restore support for node down to v0.8. + +## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) +- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json + +## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) +- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 + +## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) +- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string + +## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) +- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional +- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify + +## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) +- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false +- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm + +## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) +- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional + +## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) +- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" + +## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) +- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties +- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost +- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing +- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object +- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option +- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. +- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 +- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 +- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign +- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute + +## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) +- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function + +## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) +- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option + +## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) +- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 +- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader + +## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) +- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object + +## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) +- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". + +## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) +- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 + +## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) +- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? +- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 +- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 + +## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) +- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number + +## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) +- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array +- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x + +## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) +- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value +- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty +- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? + +## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) +- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 +- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects + +## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) +- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present +- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays +- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge +- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? + +## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) +- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter + +## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) +- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? +- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit +- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 + +## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) +- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values + +## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) +- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters +- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block + +## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) +- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument +- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed + +## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) +- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted +- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null +- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README + +## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) +- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/qs/CONTRIBUTING.md b/node_modules/qs/CONTRIBUTING.md new file mode 100644 index 0000000..8928361 --- /dev/null +++ b/node_modules/qs/CONTRIBUTING.md @@ -0,0 +1 @@ +Please view our [hapijs contributing guide](https://github.com/hapijs/hapi/blob/master/CONTRIBUTING.md). diff --git a/node_modules/qs/LICENSE b/node_modules/qs/LICENSE new file mode 100644 index 0000000..d456948 --- /dev/null +++ b/node_modules/qs/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2014 Nathan LaFreniere and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hapijs/qs/graphs/contributors diff --git a/node_modules/qs/dist/qs.js b/node_modules/qs/dist/qs.js new file mode 100644 index 0000000..4cc6f30 --- /dev/null +++ b/node_modules/qs/dist/qs.js @@ -0,0 +1,487 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o= 0 && + (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = parseObject(chain, val, options); + } else { + obj[cleanRoot] = parseObject(chain, val, options); + } + } + + return obj; +}; + +var parseKeys = function parseKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^\.\[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var parent = /^([^\[\]]*)/; + var child = /(\[[^\[\]]*\])/g; + + // Get the parent + + var segment = parent.exec(key); + + // Stash the parent if it exists + + var keys = []; + if (segment[1]) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1])) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1].replace(/\[|\]/g, ''))) { + if (!options.allowPrototypes) { + continue; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +module.exports = function (str, opts) { + var options = opts || {}; + + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + options.delimiter = typeof options.delimiter === 'string' || Utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = Utils.merge(obj, newObj, options); + } + + return Utils.compact(obj); +}; + +},{"./utils":4}],3:[function(require,module,exports){ +'use strict'; + +var Utils = require('./utils'); + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var defaults = { + delimiter: '&', + strictNullHandling: false, + skipNulls: false, + encode: true, + encoder: Utils.encode +}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = obj.toISOString(); + } else if (obj === null) { + if (strictNullHandling) { + return encoder ? encoder(prefix) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || Utils.isBuffer(obj)) { + if (encoder) { + return [encoder(prefix) + '=' + encoder(obj)]; + } + return [prefix + '=' + String(obj)]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (Array.isArray(obj)) { + values = values.concat(stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } else { + values = values.concat(stringify(obj[key], prefix + (allowDots ? '.' + key : '[' + key + ']'), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + } + + return values; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = opts || {}; + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = encode ? (typeof options.encoder === 'function' ? options.encoder : defaults.encoder) : null; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var objKeys; + var filter; + + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + objKeys = filter = options.filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (sort) { + objKeys.sort(sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + keys = keys.concat(stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + + return keys.join(delimiter); +}; + +},{"./utils":4}],4:[function(require,module,exports){ +'use strict'; + +var hexTable = (function () { + var array = new Array(256); + for (var i = 0; i < 256; ++i) { + array[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase(); + } + + return array; +}()); + +exports.arrayToObject = function (source, options) { + var obj = options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +exports.merge = function (target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + target[source] = true; + } else { + return [target, source]; + } + + return target; + } + + if (typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = exports.arrayToObject(target, options); + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (Object.prototype.hasOwnProperty.call(acc, key)) { + acc[key] = exports.merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +exports.decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; + +exports.encode = function (str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D || // - + c === 0x2E || // . + c === 0x5F || // _ + c === 0x7E || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5A) || // a-z + (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + hexTable[0x80 | ((c >> 12) & 0x3F)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +exports.compact = function (obj, references) { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + var refs = references || []; + var lookup = refs.indexOf(obj); + if (lookup !== -1) { + return refs[lookup]; + } + + refs.push(obj); + + if (Array.isArray(obj)) { + var compacted = []; + + for (var i = 0; i < obj.length; ++i) { + if (obj[i] && typeof obj[i] === 'object') { + compacted.push(exports.compact(obj[i], refs)); + } else if (typeof obj[i] !== 'undefined') { + compacted.push(obj[i]); + } + } + + return compacted; + } + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + obj[key] = exports.compact(obj[key], refs); + } + + return obj; +}; + +exports.isRegExp = function (obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +exports.isBuffer = function (obj) { + if (obj === null || typeof obj === 'undefined') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +},{}]},{},[1])(1) +}); \ No newline at end of file diff --git a/node_modules/qs/lib/index.js b/node_modules/qs/lib/index.js new file mode 100755 index 0000000..1901959 --- /dev/null +++ b/node_modules/qs/lib/index.js @@ -0,0 +1,9 @@ +'use strict'; + +var Stringify = require('./stringify'); +var Parse = require('./parse'); + +module.exports = { + stringify: Stringify, + parse: Parse +}; diff --git a/node_modules/qs/lib/parse.js b/node_modules/qs/lib/parse.js new file mode 100755 index 0000000..bf70fd8 --- /dev/null +++ b/node_modules/qs/lib/parse.js @@ -0,0 +1,167 @@ +'use strict'; + +var Utils = require('./utils'); + +var defaults = { + delimiter: '&', + depth: 5, + arrayLimit: 20, + parameterLimit: 1000, + strictNullHandling: false, + plainObjects: false, + allowPrototypes: false, + allowDots: false, + decoder: Utils.decode +}; + +var parseValues = function parseValues(str, options) { + var obj = {}; + var parts = str.split(options.delimiter, options.parameterLimit === Infinity ? undefined : options.parameterLimit); + + for (var i = 0; i < parts.length; ++i) { + var part = parts[i]; + var pos = part.indexOf(']=') === -1 ? part.indexOf('=') : part.indexOf(']=') + 1; + + if (pos === -1) { + obj[options.decoder(part)] = ''; + + if (options.strictNullHandling) { + obj[options.decoder(part)] = null; + } + } else { + var key = options.decoder(part.slice(0, pos)); + var val = options.decoder(part.slice(pos + 1)); + + if (Object.prototype.hasOwnProperty.call(obj, key)) { + obj[key] = [].concat(obj[key]).concat(val); + } else { + obj[key] = val; + } + } + } + + return obj; +}; + +var parseObject = function parseObject(chain, val, options) { + if (!chain.length) { + return val; + } + + var root = chain.shift(); + + var obj; + if (root === '[]') { + obj = []; + obj = obj.concat(parseObject(chain, val, options)); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root[0] === '[' && root[root.length - 1] === ']' ? root.slice(1, root.length - 1) : root; + var index = parseInt(cleanRoot, 10); + if ( + !isNaN(index) && + root !== cleanRoot && + String(index) === cleanRoot && + index >= 0 && + (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = parseObject(chain, val, options); + } else { + obj[cleanRoot] = parseObject(chain, val, options); + } + } + + return obj; +}; + +var parseKeys = function parseKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^\.\[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var parent = /^([^\[\]]*)/; + var child = /(\[[^\[\]]*\])/g; + + // Get the parent + + var segment = parent.exec(key); + + // Stash the parent if it exists + + var keys = []; + if (segment[1]) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1])) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && Object.prototype.hasOwnProperty(segment[1].replace(/\[|\]/g, ''))) { + if (!options.allowPrototypes) { + continue; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +module.exports = function (str, opts) { + var options = opts || {}; + + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + options.delimiter = typeof options.delimiter === 'string' || Utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = Utils.merge(obj, newObj, options); + } + + return Utils.compact(obj); +}; diff --git a/node_modules/qs/lib/stringify.js b/node_modules/qs/lib/stringify.js new file mode 100755 index 0000000..6e1c9a2 --- /dev/null +++ b/node_modules/qs/lib/stringify.js @@ -0,0 +1,137 @@ +'use strict'; + +var Utils = require('./utils'); + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var defaults = { + delimiter: '&', + strictNullHandling: false, + skipNulls: false, + encode: true, + encoder: Utils.encode +}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = obj.toISOString(); + } else if (obj === null) { + if (strictNullHandling) { + return encoder ? encoder(prefix) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || Utils.isBuffer(obj)) { + if (encoder) { + return [encoder(prefix) + '=' + encoder(obj)]; + } + return [prefix + '=' + String(obj)]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (Array.isArray(obj)) { + values = values.concat(stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } else { + values = values.concat(stringify(obj[key], prefix + (allowDots ? '.' + key : '[' + key + ']'), generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + } + + return values; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = opts || {}; + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = encode ? (typeof options.encoder === 'function' ? options.encoder : defaults.encoder) : null; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var objKeys; + var filter; + + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + objKeys = filter = options.filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (sort) { + objKeys.sort(sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + keys = keys.concat(stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots)); + } + + return keys.join(delimiter); +}; diff --git a/node_modules/qs/lib/utils.js b/node_modules/qs/lib/utils.js new file mode 100755 index 0000000..2c5c8ee --- /dev/null +++ b/node_modules/qs/lib/utils.js @@ -0,0 +1,164 @@ +'use strict'; + +var hexTable = (function () { + var array = new Array(256); + for (var i = 0; i < 256; ++i) { + array[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase(); + } + + return array; +}()); + +exports.arrayToObject = function (source, options) { + var obj = options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +exports.merge = function (target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + target[source] = true; + } else { + return [target, source]; + } + + return target; + } + + if (typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = exports.arrayToObject(target, options); + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (Object.prototype.hasOwnProperty.call(acc, key)) { + acc[key] = exports.merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +exports.decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; + +exports.encode = function (str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D || // - + c === 0x2E || // . + c === 0x5F || // _ + c === 0x7E || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5A) || // a-z + (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + hexTable[0x80 | ((c >> 12) & 0x3F)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +exports.compact = function (obj, references) { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + var refs = references || []; + var lookup = refs.indexOf(obj); + if (lookup !== -1) { + return refs[lookup]; + } + + refs.push(obj); + + if (Array.isArray(obj)) { + var compacted = []; + + for (var i = 0; i < obj.length; ++i) { + if (obj[i] && typeof obj[i] === 'object') { + compacted.push(exports.compact(obj[i], refs)); + } else if (typeof obj[i] !== 'undefined') { + compacted.push(obj[i]); + } + } + + return compacted; + } + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + obj[key] = exports.compact(obj[key], refs); + } + + return obj; +}; + +exports.isRegExp = function (obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +exports.isBuffer = function (obj) { + if (obj === null || typeof obj === 'undefined') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; diff --git a/node_modules/qs/package.json b/node_modules/qs/package.json new file mode 100644 index 0000000..3eb8123 --- /dev/null +++ b/node_modules/qs/package.json @@ -0,0 +1,48 @@ +{ + "name": "qs", + "description": "A querystring parser that supports nesting and arrays, with a depth limit", + "homepage": "https://github.com/ljharb/qs", + "version": "6.2.0", + "repository": { + "type": "git", + "url": "https://github.com/ljharb/qs.git" + }, + "main": "lib/index.js", + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "keywords": [ + "querystring", + "qs" + ], + "engines": { + "node": ">=0.6" + }, + "dependencies": {}, + "devDependencies": { + "browserify": "^13.0.1", + "tape": "^4.5.1", + "covert": "^1.1.0", + "mkdirp": "^0.5.1", + "eslint": "^2.9.0", + "@ljharb/eslint-config": "^4.0.0", + "parallelshell": "^2.0.0", + "iconv-lite": "^0.4.13", + "evalmd": "^0.0.17" + }, + "scripts": { + "pretest": "parallelshell 'npm run --silent readme' 'npm run --silent lint'", + "test": "npm run --silent coverage", + "tests-only": "node test", + "readme": "evalmd README.md", + "lint": "eslint lib/*.js text/*.js", + "coverage": "covert test", + "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js", + "prepublish": "npm run dist" + }, + "license": "BSD-3-Clause" +} diff --git a/node_modules/qs/test/index.js b/node_modules/qs/test/index.js new file mode 100644 index 0000000..b6a7d95 --- /dev/null +++ b/node_modules/qs/test/index.js @@ -0,0 +1,5 @@ +require('./parse'); + +require('./stringify'); + +require('./utils'); diff --git a/node_modules/qs/test/parse.js b/node_modules/qs/test/parse.js new file mode 100755 index 0000000..1b79daf --- /dev/null +++ b/node_modules/qs/test/parse.js @@ -0,0 +1,423 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var iconv = require('iconv-lite'); + +test('parse()', function (t) { + t.test('parses a simple string', function (st) { + st.deepEqual(qs.parse('0=foo'), { '0': 'foo' }); + st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); + st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); + st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); + st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); + st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); + st.deepEqual(qs.parse('foo'), { foo: '' }); + st.deepEqual(qs.parse('foo='), { foo: '' }); + st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); + st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); + st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); + st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); + st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); + st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); + st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); + st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { + cht: 'p3', + chd: 't:60,40', + chs: '250x100', + chl: 'Hello|World' + }); + st.end(); + }); + + t.test('allows enabling dot notation', function (st) { + st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); + st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); + t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); + t.deepEqual( + qs.parse('a[b][c][d][e][f][g][h]=i'), + { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, + 'defaults to a depth of 5' + ); + + t.test('only parses one level when depth = 1', function (st) { + st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); + st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); + st.end(); + }); + + t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); + + t.test('parses an explicit array', function (st) { + st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); + st.end(); + }); + + t.test('parses a mix of simple and explicit arrays', function (st) { + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[1]=c'), { a: ['b', 'c'] }); + st.end(); + }); + + t.test('parses a nested array', function (st) { + st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); + st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); + st.end(); + }); + + t.test('allows to specify array indices', function (st) { + st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); + st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); + st.end(); + }); + + t.test('limits specific array indices to 20', function (st) { + st.deepEqual(qs.parse('a[20]=a'), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a'), { a: { '21': 'a' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); + + t.test('supports encoded = signs', function (st) { + st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); + st.end(); + }); + + t.test('is ok with url encoded strings', function (st) { + st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); + st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); + st.end(); + }); + + t.test('allows brackets in the value', function (st) { + st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); + st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); + st.end(); + }); + + t.test('allows empty values', function (st) { + st.deepEqual(qs.parse(''), {}); + st.deepEqual(qs.parse(null), {}); + st.deepEqual(qs.parse(undefined), {}); + st.end(); + }); + + t.test('transforms arrays to objects', function (st) { + st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { '0': 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { '0': 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', '0': 'bar', '1': 'foo' } }); + st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c'), { a: { '0': 'b', t: 'u', c: true } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y'), { a: { '0': 'b', '1': 'c', x: 'y' } }); + st.end(); + }); + + t.test('transforms arrays to objects (dot notation)', function (st) { + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); + st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); + st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', '0': 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { '0': 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', '0': 'bar', '1': 'foo' } }); + st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c&a=d'), { a: { b: 'c', d: true } }, 'can add keys to objects'); + + t.test('correctly prunes undefined values when converting an array to an object', function (st) { + st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { '2': 'b', '99999999': 'c' } }); + st.end(); + }); + + t.test('supports malformed uri characters', function (st) { + st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); + st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); + st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); + st.end(); + }); + + t.test('doesn\'t produce empty keys', function (st) { + st.deepEqual(qs.parse('_r=1&'), { '_r': '1' }); + st.end(); + }); + + t.test('cannot access Object prototype', function (st) { + qs.parse('constructor[prototype][bad]=bad'); + qs.parse('bad[constructor][prototype][bad]=bad'); + st.equal(typeof Object.prototype.bad, 'undefined'); + st.end(); + }); + + t.test('parses arrays of objects', function (st) { + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); + st.end(); + }); + + t.test('allows for empty strings in arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true }), { a: ['b', null, 'c', ''] }); + st.deepEqual(qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true }), { a: ['b', '', 'c', null] }); + st.deepEqual(qs.parse('a[]=&a[]=b&a[]=c'), { a: ['', 'b', 'c'] }); + st.end(); + }); + + t.test('compacts sparse arrays', function (st) { + st.deepEqual(qs.parse('a[10]=1&a[2]=2'), { a: ['2', '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1'), { a: [{ b: [{ c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1'), { a: [[[{ c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1'), { a: [[[{ c: ['1'] }]]] }); + st.end(); + }); + + t.test('parses semi-parsed strings', function (st) { + st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); + st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); + st.end(); + }); + + t.test('parses buffers correctly', function (st) { + var b = new Buffer('test'); + st.deepEqual(qs.parse({ a: b }), { a: b }); + st.end(); + }); + + t.test('continues parsing when no parent is found', function (st) { + st.deepEqual(qs.parse('[]=&a=b'), { '0': '', a: 'b' }); + st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { '0': null, a: 'b' }); + st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); + st.end(); + }); + + t.test('does not error when parsing a very long array', function (st) { + var str = 'a[]=a'; + while (Buffer.byteLength(str) < 128 * 1024) { + str = str + '&' + str; + } + + st.doesNotThrow(function () { qs.parse(str); }); + + st.end(); + }); + + t.test('should not throw when a native prototype has an enumerable property', { parallel: false }, function (st) { + Object.prototype.crash = ''; + Array.prototype.crash = ''; + st.doesNotThrow(qs.parse.bind(null, 'a=b')); + st.deepEqual(qs.parse('a=b'), { a: 'b' }); + st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + delete Object.prototype.crash; + delete Array.prototype.crash; + st.end(); + }); + + t.test('parses a string with an alternative string delimiter', function (st) { + st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('parses a string with an alternative RegExp delimiter', function (st) { + st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not use non-splittable objects as delimiters', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding parameter limit', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); + st.end(); + }); + + t.test('allows setting the parameter limit to Infinity', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding array limit', function (st) { + st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { '0': 'b' } }); + st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { '0': 'b', '1': 'c' } }); + st.end(); + }); + + t.test('allows disabling array parsing', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { parseArrays: false }), { a: { '0': 'b', '1': 'c' } }); + st.end(); + }); + + t.test('parses an object', function (st) { + var input = { + 'user[name]': { 'pop[bob]': 3 }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object in dot notation', function (st) { + var input = { + 'user.name': { 'pop[bob]': 3 }, + 'user.email.': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input, { allowDots: true }); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object and not child values', function (st) { + var input = { + 'user[name]': { 'pop[bob]': { 'test': 3 } }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': { 'test': 3 } }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.parse('a=b&c=d'); + global.Buffer = tempBuffer; + st.deepEqual(result, { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + var parsed; + + st.doesNotThrow(function () { + parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + st.equal('bar' in parsed.foo, true); + st.equal('baz' in parsed.foo, true); + st.equal(parsed.foo.bar, 'baz'); + st.deepEqual(parsed.foo.baz, a); + st.end(); + }); + + t.test('parses plain objects correctly', function (st) { + var a = Object.create(null); + a.b = 'c'; + + st.deepEqual(qs.parse(a), { b: 'c' }); + var result = qs.parse({ a: a }); + st.equal('a' in result, true, 'result has "a" property'); + st.deepEqual(result.a, a); + st.end(); + }); + + t.test('parses dates correctly', function (st) { + var now = new Date(); + st.deepEqual(qs.parse({ a: now }), { a: now }); + st.end(); + }); + + t.test('parses regular expressions correctly', function (st) { + var re = /^test$/; + st.deepEqual(qs.parse({ a: re }), { a: re }); + st.end(); + }); + + t.test('can allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }, { prototype: false }); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }, { prototype: false }); + st.end(); + }); + + t.test('can return plain objects', function (st) { + var expected = Object.create(null); + expected.a = Object.create(null); + expected.a.b = 'c'; + expected.a.hasOwnProperty = 'd'; + st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); + st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); + var expectedArray = Object.create(null); + expectedArray.a = Object.create(null); + expectedArray.a['0'] = 'b'; + expectedArray.a.c = 'd'; + st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); + st.end(); + }); + + t.test('can parse with custom encoding', function (st) { + st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { + decoder: function (str) { + var reg = /\%([0-9A-F]{2})/ig; + var result = []; + var parts; + var last = 0; + while (parts = reg.exec(str)) { + result.push(parseInt(parts[1], 16)); + last = parts.index + parts[0].length; + } + return iconv.decode(new Buffer(result), 'shift_jis').toString(); + } + }), { 県: '大阪府' }); + st.end(); + }); + + t.test('throws error with wrong decoder', function (st) { + st.throws(function () { + qs.parse({}, { + decoder: 'string' + }); + }, new TypeError('Decoder has to be a function.')); + st.end(); + }); +}); diff --git a/node_modules/qs/test/stringify.js b/node_modules/qs/test/stringify.js new file mode 100755 index 0000000..699397e --- /dev/null +++ b/node_modules/qs/test/stringify.js @@ -0,0 +1,305 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var iconv = require('iconv-lite'); + +test('stringify()', function (t) { + t.test('stringifies a querystring object', function (st) { + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: 1 }), 'a=1'); + st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); + st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); + st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); + st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); + st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); + st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); + st.end(); + }); + + t.test('stringifies a nested object', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies a nested object with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); + st.end(); + }); + + t.test('stringifies an array value', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }), 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d'); + st.end(); + }); + + t.test('omits nulls when asked', function (st) { + st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); + st.end(); + }); + + + t.test('omits nested nulls when asked', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('omits array indices when asked', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); + st.end(); + }); + + t.test('stringifies a nested array value', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.end(); + }); + + t.test('stringifies a nested array value with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { allowDots: true, encode: false }), 'a.b[0]=c&a.b[1]=d'); + st.end(); + }); + + t.test('stringifies an object inside an array', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }), 'a%5B0%5D%5Bb%5D=c'); + st.equal(qs.stringify({ a: [{ b: { c: [1] } }] }), 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1'); + st.end(); + }); + + t.test('stringifies an array with mixed objects and primitives', function (st) { + st.equal(qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }), 'a[0][b]=1&a[1]=2&a[2]=3'); + st.end(); + }); + + t.test('stringifies an object inside an array with dots notation', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false }), 'a[0].b=c'); + st.equal(qs.stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false }), 'a[0].b.c[0]=1'); + st.end(); + }); + + t.test('does not omit object keys when indices = false', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when indices=true', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); + st.end(); + }); + + t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); + st.end(); + }); + + t.test('stringifies a complicated object', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies an empty value', function (st) { + st.equal(qs.stringify({ a: '' }), 'a='); + st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); + + st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); + st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); + + st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); + + st.end(); + }); + + t.test('stringifies an empty object', function (st) { + var obj = Object.create(null); + obj.a = 'b'; + st.equal(qs.stringify(obj), 'a=b'); + st.end(); + }); + + t.test('returns an empty string for invalid input', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(''), ''); + st.end(); + }); + + t.test('stringifies an object with an empty object as a child', function (st) { + var obj = { + a: Object.create(null) + }; + + obj.a.b = 'c'; + st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('drops keys with a value of undefined', function (st) { + st.equal(qs.stringify({ a: undefined }), ''); + + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); + st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); + st.end(); + }); + + t.test('url encodes values', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('stringifies a date', function (st) { + var now = new Date(); + var str = 'a=' + encodeURIComponent(now.toISOString()); + st.equal(qs.stringify({ a: now }), str); + st.end(); + }); + + t.test('stringifies the weird object from qs', function (st) { + st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); + st.end(); + }); + + t.test('skips properties that are part of the object prototype', function (st) { + Object.prototype.crash = 'test'; + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + delete Object.prototype.crash; + st.end(); + }); + + t.test('stringifies boolean values', function (st) { + st.equal(qs.stringify({ a: true }), 'a=true'); + st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); + st.equal(qs.stringify({ b: false }), 'b=false'); + st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies buffer values', function (st) { + st.equal(qs.stringify({ a: new Buffer('test') }), 'a=test'); + st.equal(qs.stringify({ a: { b: new Buffer('test') } }), 'a%5Bb%5D=test'); + st.end(); + }); + + t.test('stringifies an object using an alternative delimiter', function (st) { + st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); + st.end(); + }); + + t.test('doesn\'t blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.stringify({ a: 'b', c: 'd' }); + global.Buffer = tempBuffer; + st.equal(result, 'a=b&c=d'); + st.end(); + }); + + t.test('selects properties when filter=array', function (st) { + st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); + st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); + st.equal(qs.stringify({ a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, { filter: ['a', 'b', 0, 2] }), 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3'); + st.end(); + }); + + t.test('supports custom representations when filter=function', function (st) { + var calls = 0; + var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; + var filterFunc = function (prefix, value) { + calls++; + if (calls === 1) { + st.equal(prefix, '', 'prefix is empty'); + st.equal(value, obj); + } else if (prefix === 'c') { + return; + } else if (value instanceof Date) { + st.equal(prefix, 'e[f]'); + return value.getTime(); + } + return value; + }; + + st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); + st.equal(calls, 5); + st.end(); + }); + + t.test('can disable uri encoding', function (st) { + st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); + st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); + st.end(); + }); + + t.test('can sort the keys', function (st) { + var sort = function (a, b) { return a.localeCompare(b); }; + st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); + st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); + st.end(); + }); + + t.test('can sort the keys at depth 3 or more too', function (st) { + var sort = function (a, b) { return a.localeCompare(b); }; + st.equal(qs.stringify({ a: 'a', z: { zj: {zjb: 'zjb', zja: 'zja'}, zi: {zib: 'zib', zia: 'zia'} }, b: 'b' }, { sort: sort, encode: false }), 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb'); + st.equal(qs.stringify({ a: 'a', z: { zj: {zjb: 'zjb', zja: 'zja'}, zi: {zib: 'zib', zia: 'zia'} }, b: 'b' }, { sort: null, encode: false }), 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b'); + st.end(); + }); + + t.test('can stringify with custom encoding', function (st) { + st.equal(qs.stringify({ 県: '大阪府', '': ''}, { + encoder: function (str) { + if (str.length === 0) { + return ''; + } + var buf = iconv.encode(str, 'shiftjis'); + var result = []; + for (var i=0; i < buf.length; ++i) { + result.push(buf.readUInt8(i).toString(16)); + } + return '%' + result.join('%'); + } + }), '%8c%a7=%91%e5%8d%e3%95%7b&='); + st.end(); + }); + + t.test('throws error with wrong encoder', function (st) { + st.throws(function () { + qs.stringify({}, { + encoder: 'string' + }); + }, new TypeError('Encoder has to be a function.')); + st.end(); + }); + + t.test('can use custom encoder for a buffer object', { + skip: typeof Buffer === 'undefined' + }, function (st) { + st.equal(qs.stringify({ a: new Buffer([1]) }, { + encoder: function (buffer) { + if (typeof buffer === 'string') { + return buffer; + } + return String.fromCharCode(buffer.readUInt8(0) + 97); + } + }), 'a=b'); + st.end(); + }); +}); diff --git a/node_modules/qs/test/utils.js b/node_modules/qs/test/utils.js new file mode 100755 index 0000000..4a8d824 --- /dev/null +++ b/node_modules/qs/test/utils.js @@ -0,0 +1,9 @@ +'use strict'; + +var test = require('tape'); +var utils = require('../lib/utils'); + +test('merge()', function (t) { + t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); + t.end(); +}); diff --git a/node_modules/querystring-es3/.travis.yml b/node_modules/querystring-es3/.travis.yml new file mode 100644 index 0000000..895dbd3 --- /dev/null +++ b/node_modules/querystring-es3/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 diff --git a/node_modules/querystring-es3/History.md b/node_modules/querystring-es3/History.md new file mode 100644 index 0000000..4fddbaf --- /dev/null +++ b/node_modules/querystring-es3/History.md @@ -0,0 +1,20 @@ +# 0.2.0 / 2013-02-21 + + - Refactor into function per-module idiomatic style. + - Improved test coverage. + +# 0.1.0 / 2011-12-13 + + - Minor project reorganization + +# 0.0.3 / 2011-04-16 + - Support for AMD module loaders + +# 0.0.2 / 2011-04-16 + + - Ported unit tests + - Removed functionality that depended on Buffers + +# 0.0.1 / 2011-04-15 + + - Initial release diff --git a/node_modules/querystring-es3/License.md b/node_modules/querystring-es3/License.md new file mode 100644 index 0000000..fc80e85 --- /dev/null +++ b/node_modules/querystring-es3/License.md @@ -0,0 +1,19 @@ + +Copyright 2012 Irakli Gozalishvili. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/querystring-es3/Readme.md b/node_modules/querystring-es3/Readme.md new file mode 100644 index 0000000..be1668d --- /dev/null +++ b/node_modules/querystring-es3/Readme.md @@ -0,0 +1,15 @@ +# querystring + +[![Build Status](https://secure.travis-ci.org/mike-spainhower/querystring.png)](http://travis-ci.org/mike-spainhower/querystring) + + +[![Browser support](http://ci.testling.com/mike-spainhower/querystring.png)](http://ci.testling.com/mike-spainhower/querystring) + + + +Node's querystring module for all engines. + +## Install ## + + npm install querystring + diff --git a/node_modules/querystring-es3/decode.js b/node_modules/querystring-es3/decode.js new file mode 100644 index 0000000..b5825c0 --- /dev/null +++ b/node_modules/querystring-es3/decode.js @@ -0,0 +1,84 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +// If obj.hasOwnProperty has been overridden, then calling +// obj.hasOwnProperty(prop) will break. +// See: https://github.com/joyent/node/issues/1707 +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} + +module.exports = function(qs, sep, eq, options) { + sep = sep || '&'; + eq = eq || '='; + var obj = {}; + + if (typeof qs !== 'string' || qs.length === 0) { + return obj; + } + + var regexp = /\+/g; + qs = qs.split(sep); + + var maxKeys = 1000; + if (options && typeof options.maxKeys === 'number') { + maxKeys = options.maxKeys; + } + + var len = qs.length; + // maxKeys <= 0 means that we should not limit keys count + if (maxKeys > 0 && len > maxKeys) { + len = maxKeys; + } + + for (var i = 0; i < len; ++i) { + var x = qs[i].replace(regexp, '%20'), + idx = x.indexOf(eq), + kstr, vstr, k, v; + + if (idx >= 0) { + kstr = x.substr(0, idx); + vstr = x.substr(idx + 1); + } else { + kstr = x; + vstr = ''; + } + + k = decodeURIComponent(kstr); + v = decodeURIComponent(vstr); + + if (!hasOwnProperty(obj, k)) { + obj[k] = v; + } else if (isArray(obj[k])) { + obj[k].push(v); + } else { + obj[k] = [obj[k], v]; + } + } + + return obj; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/querystring-es3/encode.js b/node_modules/querystring-es3/encode.js new file mode 100644 index 0000000..76e4cfb --- /dev/null +++ b/node_modules/querystring-es3/encode.js @@ -0,0 +1,85 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +var stringifyPrimitive = function(v) { + switch (typeof v) { + case 'string': + return v; + + case 'boolean': + return v ? 'true' : 'false'; + + case 'number': + return isFinite(v) ? v : ''; + + default: + return ''; + } +}; + +module.exports = function(obj, sep, eq, name) { + sep = sep || '&'; + eq = eq || '='; + if (obj === null) { + obj = undefined; + } + + if (typeof obj === 'object') { + return map(objectKeys(obj), function(k) { + var ks = encodeURIComponent(stringifyPrimitive(k)) + eq; + if (isArray(obj[k])) { + return map(obj[k], function(v) { + return ks + encodeURIComponent(stringifyPrimitive(v)); + }).join(sep); + } else { + return ks + encodeURIComponent(stringifyPrimitive(obj[k])); + } + }).join(sep); + + } + + if (!name) return ''; + return encodeURIComponent(stringifyPrimitive(name)) + eq + + encodeURIComponent(stringifyPrimitive(obj)); +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + +function map (xs, f) { + if (xs.map) return xs.map(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + res.push(f(xs[i], i)); + } + return res; +} + +var objectKeys = Object.keys || function (obj) { + var res = []; + for (var key in obj) { + if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key); + } + return res; +}; diff --git a/node_modules/querystring-es3/index.js b/node_modules/querystring-es3/index.js new file mode 100644 index 0000000..99826ea --- /dev/null +++ b/node_modules/querystring-es3/index.js @@ -0,0 +1,4 @@ +'use strict'; + +exports.decode = exports.parse = require('./decode'); +exports.encode = exports.stringify = require('./encode'); diff --git a/node_modules/querystring-es3/package.json b/node_modules/querystring-es3/package.json new file mode 100644 index 0000000..695d8a0 --- /dev/null +++ b/node_modules/querystring-es3/package.json @@ -0,0 +1,65 @@ +{ + "name": "querystring-es3", + "id": "querystring-es3", + "version": "0.2.1", + "description": "Node's querystring module for all engines. (ES3 compat fork)", + "keywords": [ "commonjs", "query", "querystring" ], + "author": "Irakli Gozalishvili ", + "repository": { + "type": "git", + "url": "git://github.com/mike-spainhower/querystring.git", + "web": "https://github.com/mike-spainhower/querystring" + }, + "bugs": { + "url": "http://github.com/mike-spainhower/querystring/issues/" + }, + "devDependencies": { + "test": "~0.x.0", + "phantomify": "~0.x.0", + "retape": "~0.x.0", + "tape": "~0.1.5" + }, + "engines": { + "node": ">=0.4.x" + }, + "scripts": { + "test": "npm run test-node && npm run test-browser && npm run test-tap", + "test-browser": "node ./node_modules/phantomify/bin/cmd.js ./test/common-index.js", + "test-node": "node ./test/common-index.js", + "test-tap": "node ./test/tap-index.js" + }, + "testling": { + "files": "test/tap-index.js", + "browsers": { + "iexplore": [ + 9, + 10 + ], + "chrome": [ + 16, + 20, + 25, + "canary" + ], + "firefox": [ + 10, + 15, + 16, + 17, + 18, + "nightly" + ], + "safari": [ + 5, + 6 + ], + "opera": [ + 12 + ] + } + }, + "licenses": [{ + "type" : "MIT", + "url" : "https://github.com/Gozala/enchain/License.md" + }] +} diff --git a/node_modules/querystring-es3/test/common-index.js b/node_modules/querystring-es3/test/common-index.js new file mode 100644 index 0000000..f356f98 --- /dev/null +++ b/node_modules/querystring-es3/test/common-index.js @@ -0,0 +1,3 @@ +"use strict"; + +require("test").run(require("./index")) \ No newline at end of file diff --git a/node_modules/querystring-es3/test/index.js b/node_modules/querystring-es3/test/index.js new file mode 100644 index 0000000..62eb2ac --- /dev/null +++ b/node_modules/querystring-es3/test/index.js @@ -0,0 +1,210 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +"use strict"; + +// test using assert +var qs = require('../'); + +// folding block, commented to pass gjslint +// {{{ +// [ wonkyQS, canonicalQS, obj ] +var qsTestCases = [ + ['foo=918854443121279438895193', + 'foo=918854443121279438895193', + {'foo': '918854443121279438895193'}], + ['foo=bar', 'foo=bar', {'foo': 'bar'}], + ['foo=bar&foo=quux', 'foo=bar&foo=quux', {'foo': ['bar', 'quux']}], + ['foo=1&bar=2', 'foo=1&bar=2', {'foo': '1', 'bar': '2'}], + ['my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F', + 'my%20weird%20field=q1!2%22\'w%245%267%2Fz8)%3F', + {'my weird field': 'q1!2"\'w$5&7/z8)?' }], + ['foo%3Dbaz=bar', 'foo%3Dbaz=bar', {'foo=baz': 'bar'}], + ['foo=baz=bar', 'foo=baz%3Dbar', {'foo': 'baz=bar'}], + ['str=foo&arr=1&arr=2&arr=3&somenull=&undef=', + 'str=foo&arr=1&arr=2&arr=3&somenull=&undef=', + { 'str': 'foo', + 'arr': ['1', '2', '3'], + 'somenull': '', + 'undef': ''}], + [' foo = bar ', '%20foo%20=%20bar%20', {' foo ': ' bar '}], + // disable test that fails ['foo=%zx', 'foo=%25zx', {'foo': '%zx'}], + ['foo=%EF%BF%BD', 'foo=%EF%BF%BD', {'foo': '\ufffd' }], + // See: https://github.com/joyent/node/issues/1707 + ['hasOwnProperty=x&toString=foo&valueOf=bar&__defineGetter__=baz', + 'hasOwnProperty=x&toString=foo&valueOf=bar&__defineGetter__=baz', + { hasOwnProperty: 'x', + toString: 'foo', + valueOf: 'bar', + __defineGetter__: 'baz' }], + // See: https://github.com/joyent/node/issues/3058 + ['foo&bar=baz', 'foo=&bar=baz', { foo: '', bar: 'baz' }] +]; + +// [ wonkyQS, canonicalQS, obj ] +var qsColonTestCases = [ + ['foo:bar', 'foo:bar', {'foo': 'bar'}], + ['foo:bar;foo:quux', 'foo:bar;foo:quux', {'foo': ['bar', 'quux']}], + ['foo:1&bar:2;baz:quux', + 'foo:1%26bar%3A2;baz:quux', + {'foo': '1&bar:2', 'baz': 'quux'}], + ['foo%3Abaz:bar', 'foo%3Abaz:bar', {'foo:baz': 'bar'}], + ['foo:baz:bar', 'foo:baz%3Abar', {'foo': 'baz:bar'}] +]; + +// [wonkyObj, qs, canonicalObj] +var extendedFunction = function() {}; +extendedFunction.prototype = {a: 'b'}; +var qsWeirdObjects = [ + [{regexp: /./g}, 'regexp=', {'regexp': ''}], + [{regexp: new RegExp('.', 'g')}, 'regexp=', {'regexp': ''}], + [{fn: function() {}}, 'fn=', {'fn': ''}], + [{fn: new Function('')}, 'fn=', {'fn': ''}], + [{math: Math}, 'math=', {'math': ''}], + [{e: extendedFunction}, 'e=', {'e': ''}], + [{d: new Date()}, 'd=', {'d': ''}], + [{d: Date}, 'd=', {'d': ''}], + [{f: new Boolean(false), t: new Boolean(true)}, 'f=&t=', {'f': '', 't': ''}], + [{f: false, t: true}, 'f=false&t=true', {'f': 'false', 't': 'true'}], + [{n: null}, 'n=', {'n': ''}], + [{nan: NaN}, 'nan=', {'nan': ''}], + [{inf: Infinity}, 'inf=', {'inf': ''}] +]; +// }}} + +var qsNoMungeTestCases = [ + ['', {}], + ['foo=bar&foo=baz', {'foo': ['bar', 'baz']}], + ['blah=burp', {'blah': 'burp'}], + ['gragh=1&gragh=3&goo=2', {'gragh': ['1', '3'], 'goo': '2'}], + ['frappucino=muffin&goat%5B%5D=scone&pond=moose', + {'frappucino': 'muffin', 'goat[]': 'scone', 'pond': 'moose'}], + ['trololol=yes&lololo=no', {'trololol': 'yes', 'lololo': 'no'}] +]; + +exports['test basic'] = function(assert) { + assert.strictEqual('918854443121279438895193', + qs.parse('id=918854443121279438895193').id, + 'prase id=918854443121279438895193'); +}; + +exports['test that the canonical qs is parsed properly'] = function(assert) { + qsTestCases.forEach(function(testCase) { + assert.deepEqual(testCase[2], qs.parse(testCase[0]), + 'parse ' + testCase[0]); + }); +}; + + +exports['test that the colon test cases can do the same'] = function(assert) { + qsColonTestCases.forEach(function(testCase) { + assert.deepEqual(testCase[2], qs.parse(testCase[0], ';', ':'), + 'parse ' + testCase[0] + ' -> ; :'); + }); +}; + +exports['test the weird objects, that they get parsed properly'] = function(assert) { + qsWeirdObjects.forEach(function(testCase) { + assert.deepEqual(testCase[2], qs.parse(testCase[1]), + 'parse ' + testCase[1]); + }); +}; + +exports['test non munge test cases'] = function(assert) { + qsNoMungeTestCases.forEach(function(testCase) { + assert.deepEqual(testCase[0], qs.stringify(testCase[1], '&', '=', false), + 'stringify ' + JSON.stringify(testCase[1]) + ' -> & ='); + }); +}; + +exports['test the nested qs-in-qs case'] = function(assert) { + var f = qs.parse('a=b&q=x%3Dy%26y%3Dz'); + f.q = qs.parse(f.q); + assert.deepEqual(f, { a: 'b', q: { x: 'y', y: 'z' } }, + 'parse a=b&q=x%3Dy%26y%3Dz'); +}; + +exports['test nested in colon'] = function(assert) { + var f = qs.parse('a:b;q:x%3Ay%3By%3Az', ';', ':'); + f.q = qs.parse(f.q, ';', ':'); + assert.deepEqual(f, { a: 'b', q: { x: 'y', y: 'z' } }, + 'parse a:b;q:x%3Ay%3By%3Az -> ; :'); +}; + +exports['test stringifying'] = function(assert) { + qsTestCases.forEach(function(testCase) { + assert.equal(testCase[1], qs.stringify(testCase[2]), + 'stringify ' + JSON.stringify(testCase[2])); + }); + + qsColonTestCases.forEach(function(testCase) { + assert.equal(testCase[1], qs.stringify(testCase[2], ';', ':'), + 'stringify ' + JSON.stringify(testCase[2]) + ' -> ; :'); + }); + + qsWeirdObjects.forEach(function(testCase) { + assert.equal(testCase[1], qs.stringify(testCase[0]), + 'stringify ' + JSON.stringify(testCase[0])); + }); +}; + +exports['test stringifying nested'] = function(assert) { + var f = qs.stringify({ + a: 'b', + q: qs.stringify({ + x: 'y', + y: 'z' + }) + }); + assert.equal(f, 'a=b&q=x%3Dy%26y%3Dz', + JSON.stringify({ + a: 'b', + 'qs.stringify -> q': { + x: 'y', + y: 'z' + } + })); + + var threw = false; + try { qs.parse(undefined); } catch(error) { threw = true; } + assert.ok(!threw, "does not throws on undefined"); +}; + +exports['test nested in colon'] = function(assert) { + var f = qs.stringify({ + a: 'b', + q: qs.stringify({ + x: 'y', + y: 'z' + }, ';', ':') + }, ';', ':'); + assert.equal(f, 'a:b;q:x%3Ay%3By%3Az', + 'stringify ' + JSON.stringify({ + a: 'b', + 'qs.stringify -> q': { + x: 'y', + y: 'z' + } + }) + ' -> ; : '); + + + assert.deepEqual({}, qs.parse(), 'parse undefined'); +}; diff --git a/node_modules/querystring-es3/test/tap-index.js b/node_modules/querystring-es3/test/tap-index.js new file mode 100644 index 0000000..70679b3 --- /dev/null +++ b/node_modules/querystring-es3/test/tap-index.js @@ -0,0 +1,3 @@ +"use strict"; + +require("retape")(require("./index")) \ No newline at end of file diff --git a/node_modules/querystring/.History.md.un~ b/node_modules/querystring/.History.md.un~ new file mode 100644 index 0000000..c96a7dd Binary files /dev/null and b/node_modules/querystring/.History.md.un~ differ diff --git a/node_modules/querystring/.Readme.md.un~ b/node_modules/querystring/.Readme.md.un~ new file mode 100644 index 0000000..71613b5 Binary files /dev/null and b/node_modules/querystring/.Readme.md.un~ differ diff --git a/node_modules/querystring/.package.json.un~ b/node_modules/querystring/.package.json.un~ new file mode 100644 index 0000000..d86fe31 Binary files /dev/null and b/node_modules/querystring/.package.json.un~ differ diff --git a/node_modules/querystring/.travis.yml b/node_modules/querystring/.travis.yml new file mode 100644 index 0000000..895dbd3 --- /dev/null +++ b/node_modules/querystring/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 diff --git a/node_modules/querystring/History.md b/node_modules/querystring/History.md new file mode 100644 index 0000000..4fddbaf --- /dev/null +++ b/node_modules/querystring/History.md @@ -0,0 +1,20 @@ +# 0.2.0 / 2013-02-21 + + - Refactor into function per-module idiomatic style. + - Improved test coverage. + +# 0.1.0 / 2011-12-13 + + - Minor project reorganization + +# 0.0.3 / 2011-04-16 + - Support for AMD module loaders + +# 0.0.2 / 2011-04-16 + + - Ported unit tests + - Removed functionality that depended on Buffers + +# 0.0.1 / 2011-04-15 + + - Initial release diff --git a/node_modules/querystring/License.md b/node_modules/querystring/License.md new file mode 100644 index 0000000..fc80e85 --- /dev/null +++ b/node_modules/querystring/License.md @@ -0,0 +1,19 @@ + +Copyright 2012 Irakli Gozalishvili. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/querystring/Readme.md b/node_modules/querystring/Readme.md new file mode 100644 index 0000000..a4fe252 --- /dev/null +++ b/node_modules/querystring/Readme.md @@ -0,0 +1,15 @@ +# querystring + +[![Build Status](https://secure.travis-ci.org/Gozala/querystring.png)](http://travis-ci.org/Gozala/querystring) + + +[![Browser support](http://ci.testling.com/Gozala/querystring.png)](http://ci.testling.com/Gozala/querystring) + + + +Node's querystring module for all engines. + +## Install ## + + npm install querystring + diff --git a/node_modules/querystring/decode.js b/node_modules/querystring/decode.js new file mode 100644 index 0000000..a6518b8 --- /dev/null +++ b/node_modules/querystring/decode.js @@ -0,0 +1,80 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +// If obj.hasOwnProperty has been overridden, then calling +// obj.hasOwnProperty(prop) will break. +// See: https://github.com/joyent/node/issues/1707 +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} + +module.exports = function(qs, sep, eq, options) { + sep = sep || '&'; + eq = eq || '='; + var obj = {}; + + if (typeof qs !== 'string' || qs.length === 0) { + return obj; + } + + var regexp = /\+/g; + qs = qs.split(sep); + + var maxKeys = 1000; + if (options && typeof options.maxKeys === 'number') { + maxKeys = options.maxKeys; + } + + var len = qs.length; + // maxKeys <= 0 means that we should not limit keys count + if (maxKeys > 0 && len > maxKeys) { + len = maxKeys; + } + + for (var i = 0; i < len; ++i) { + var x = qs[i].replace(regexp, '%20'), + idx = x.indexOf(eq), + kstr, vstr, k, v; + + if (idx >= 0) { + kstr = x.substr(0, idx); + vstr = x.substr(idx + 1); + } else { + kstr = x; + vstr = ''; + } + + k = decodeURIComponent(kstr); + v = decodeURIComponent(vstr); + + if (!hasOwnProperty(obj, k)) { + obj[k] = v; + } else if (Array.isArray(obj[k])) { + obj[k].push(v); + } else { + obj[k] = [obj[k], v]; + } + } + + return obj; +}; diff --git a/node_modules/querystring/encode.js b/node_modules/querystring/encode.js new file mode 100644 index 0000000..4f2b561 --- /dev/null +++ b/node_modules/querystring/encode.js @@ -0,0 +1,64 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +var stringifyPrimitive = function(v) { + switch (typeof v) { + case 'string': + return v; + + case 'boolean': + return v ? 'true' : 'false'; + + case 'number': + return isFinite(v) ? v : ''; + + default: + return ''; + } +}; + +module.exports = function(obj, sep, eq, name) { + sep = sep || '&'; + eq = eq || '='; + if (obj === null) { + obj = undefined; + } + + if (typeof obj === 'object') { + return Object.keys(obj).map(function(k) { + var ks = encodeURIComponent(stringifyPrimitive(k)) + eq; + if (Array.isArray(obj[k])) { + return obj[k].map(function(v) { + return ks + encodeURIComponent(stringifyPrimitive(v)); + }).join(sep); + } else { + return ks + encodeURIComponent(stringifyPrimitive(obj[k])); + } + }).join(sep); + + } + + if (!name) return ''; + return encodeURIComponent(stringifyPrimitive(name)) + eq + + encodeURIComponent(stringifyPrimitive(obj)); +}; diff --git a/node_modules/querystring/index.js b/node_modules/querystring/index.js new file mode 100644 index 0000000..99826ea --- /dev/null +++ b/node_modules/querystring/index.js @@ -0,0 +1,4 @@ +'use strict'; + +exports.decode = exports.parse = require('./decode'); +exports.encode = exports.stringify = require('./encode'); diff --git a/node_modules/querystring/package.json b/node_modules/querystring/package.json new file mode 100644 index 0000000..bdbc970 --- /dev/null +++ b/node_modules/querystring/package.json @@ -0,0 +1,65 @@ +{ + "name": "querystring", + "id": "querystring", + "version": "0.2.0", + "description": "Node's querystring module for all engines.", + "keywords": [ "commonjs", "query", "querystring" ], + "author": "Irakli Gozalishvili ", + "repository": { + "type": "git", + "url": "git://github.com/Gozala/querystring.git", + "web": "https://github.com/Gozala/querystring" + }, + "bugs": { + "url": "http://github.com/Gozala/querystring/issues/" + }, + "devDependencies": { + "test": "~0.x.0", + "phantomify": "~0.x.0", + "retape": "~0.x.0", + "tape": "~0.1.5" + }, + "engines": { + "node": ">=0.4.x" + }, + "scripts": { + "test": "npm run test-node && npm run test-browser && npm run test-tap", + "test-browser": "node ./node_modules/phantomify/bin/cmd.js ./test/common-index.js", + "test-node": "node ./test/common-index.js", + "test-tap": "node ./test/tap-index.js" + }, + "testling": { + "files": "test/tap-index.js", + "browsers": { + "iexplore": [ + 9, + 10 + ], + "chrome": [ + 16, + 20, + 25, + "canary" + ], + "firefox": [ + 10, + 15, + 16, + 17, + 18, + "nightly" + ], + "safari": [ + 5, + 6 + ], + "opera": [ + 12 + ] + } + }, + "licenses": [{ + "type" : "MIT", + "url" : "https://github.com/Gozala/enchain/License.md" + }] +} diff --git a/node_modules/querystring/test/.index.js.un~ b/node_modules/querystring/test/.index.js.un~ new file mode 100644 index 0000000..898eced Binary files /dev/null and b/node_modules/querystring/test/.index.js.un~ differ diff --git a/node_modules/querystring/test/common-index.js b/node_modules/querystring/test/common-index.js new file mode 100644 index 0000000..f356f98 --- /dev/null +++ b/node_modules/querystring/test/common-index.js @@ -0,0 +1,3 @@ +"use strict"; + +require("test").run(require("./index")) \ No newline at end of file diff --git a/node_modules/querystring/test/index.js b/node_modules/querystring/test/index.js new file mode 100644 index 0000000..62eb2ac --- /dev/null +++ b/node_modules/querystring/test/index.js @@ -0,0 +1,210 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +"use strict"; + +// test using assert +var qs = require('../'); + +// folding block, commented to pass gjslint +// {{{ +// [ wonkyQS, canonicalQS, obj ] +var qsTestCases = [ + ['foo=918854443121279438895193', + 'foo=918854443121279438895193', + {'foo': '918854443121279438895193'}], + ['foo=bar', 'foo=bar', {'foo': 'bar'}], + ['foo=bar&foo=quux', 'foo=bar&foo=quux', {'foo': ['bar', 'quux']}], + ['foo=1&bar=2', 'foo=1&bar=2', {'foo': '1', 'bar': '2'}], + ['my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F', + 'my%20weird%20field=q1!2%22\'w%245%267%2Fz8)%3F', + {'my weird field': 'q1!2"\'w$5&7/z8)?' }], + ['foo%3Dbaz=bar', 'foo%3Dbaz=bar', {'foo=baz': 'bar'}], + ['foo=baz=bar', 'foo=baz%3Dbar', {'foo': 'baz=bar'}], + ['str=foo&arr=1&arr=2&arr=3&somenull=&undef=', + 'str=foo&arr=1&arr=2&arr=3&somenull=&undef=', + { 'str': 'foo', + 'arr': ['1', '2', '3'], + 'somenull': '', + 'undef': ''}], + [' foo = bar ', '%20foo%20=%20bar%20', {' foo ': ' bar '}], + // disable test that fails ['foo=%zx', 'foo=%25zx', {'foo': '%zx'}], + ['foo=%EF%BF%BD', 'foo=%EF%BF%BD', {'foo': '\ufffd' }], + // See: https://github.com/joyent/node/issues/1707 + ['hasOwnProperty=x&toString=foo&valueOf=bar&__defineGetter__=baz', + 'hasOwnProperty=x&toString=foo&valueOf=bar&__defineGetter__=baz', + { hasOwnProperty: 'x', + toString: 'foo', + valueOf: 'bar', + __defineGetter__: 'baz' }], + // See: https://github.com/joyent/node/issues/3058 + ['foo&bar=baz', 'foo=&bar=baz', { foo: '', bar: 'baz' }] +]; + +// [ wonkyQS, canonicalQS, obj ] +var qsColonTestCases = [ + ['foo:bar', 'foo:bar', {'foo': 'bar'}], + ['foo:bar;foo:quux', 'foo:bar;foo:quux', {'foo': ['bar', 'quux']}], + ['foo:1&bar:2;baz:quux', + 'foo:1%26bar%3A2;baz:quux', + {'foo': '1&bar:2', 'baz': 'quux'}], + ['foo%3Abaz:bar', 'foo%3Abaz:bar', {'foo:baz': 'bar'}], + ['foo:baz:bar', 'foo:baz%3Abar', {'foo': 'baz:bar'}] +]; + +// [wonkyObj, qs, canonicalObj] +var extendedFunction = function() {}; +extendedFunction.prototype = {a: 'b'}; +var qsWeirdObjects = [ + [{regexp: /./g}, 'regexp=', {'regexp': ''}], + [{regexp: new RegExp('.', 'g')}, 'regexp=', {'regexp': ''}], + [{fn: function() {}}, 'fn=', {'fn': ''}], + [{fn: new Function('')}, 'fn=', {'fn': ''}], + [{math: Math}, 'math=', {'math': ''}], + [{e: extendedFunction}, 'e=', {'e': ''}], + [{d: new Date()}, 'd=', {'d': ''}], + [{d: Date}, 'd=', {'d': ''}], + [{f: new Boolean(false), t: new Boolean(true)}, 'f=&t=', {'f': '', 't': ''}], + [{f: false, t: true}, 'f=false&t=true', {'f': 'false', 't': 'true'}], + [{n: null}, 'n=', {'n': ''}], + [{nan: NaN}, 'nan=', {'nan': ''}], + [{inf: Infinity}, 'inf=', {'inf': ''}] +]; +// }}} + +var qsNoMungeTestCases = [ + ['', {}], + ['foo=bar&foo=baz', {'foo': ['bar', 'baz']}], + ['blah=burp', {'blah': 'burp'}], + ['gragh=1&gragh=3&goo=2', {'gragh': ['1', '3'], 'goo': '2'}], + ['frappucino=muffin&goat%5B%5D=scone&pond=moose', + {'frappucino': 'muffin', 'goat[]': 'scone', 'pond': 'moose'}], + ['trololol=yes&lololo=no', {'trololol': 'yes', 'lololo': 'no'}] +]; + +exports['test basic'] = function(assert) { + assert.strictEqual('918854443121279438895193', + qs.parse('id=918854443121279438895193').id, + 'prase id=918854443121279438895193'); +}; + +exports['test that the canonical qs is parsed properly'] = function(assert) { + qsTestCases.forEach(function(testCase) { + assert.deepEqual(testCase[2], qs.parse(testCase[0]), + 'parse ' + testCase[0]); + }); +}; + + +exports['test that the colon test cases can do the same'] = function(assert) { + qsColonTestCases.forEach(function(testCase) { + assert.deepEqual(testCase[2], qs.parse(testCase[0], ';', ':'), + 'parse ' + testCase[0] + ' -> ; :'); + }); +}; + +exports['test the weird objects, that they get parsed properly'] = function(assert) { + qsWeirdObjects.forEach(function(testCase) { + assert.deepEqual(testCase[2], qs.parse(testCase[1]), + 'parse ' + testCase[1]); + }); +}; + +exports['test non munge test cases'] = function(assert) { + qsNoMungeTestCases.forEach(function(testCase) { + assert.deepEqual(testCase[0], qs.stringify(testCase[1], '&', '=', false), + 'stringify ' + JSON.stringify(testCase[1]) + ' -> & ='); + }); +}; + +exports['test the nested qs-in-qs case'] = function(assert) { + var f = qs.parse('a=b&q=x%3Dy%26y%3Dz'); + f.q = qs.parse(f.q); + assert.deepEqual(f, { a: 'b', q: { x: 'y', y: 'z' } }, + 'parse a=b&q=x%3Dy%26y%3Dz'); +}; + +exports['test nested in colon'] = function(assert) { + var f = qs.parse('a:b;q:x%3Ay%3By%3Az', ';', ':'); + f.q = qs.parse(f.q, ';', ':'); + assert.deepEqual(f, { a: 'b', q: { x: 'y', y: 'z' } }, + 'parse a:b;q:x%3Ay%3By%3Az -> ; :'); +}; + +exports['test stringifying'] = function(assert) { + qsTestCases.forEach(function(testCase) { + assert.equal(testCase[1], qs.stringify(testCase[2]), + 'stringify ' + JSON.stringify(testCase[2])); + }); + + qsColonTestCases.forEach(function(testCase) { + assert.equal(testCase[1], qs.stringify(testCase[2], ';', ':'), + 'stringify ' + JSON.stringify(testCase[2]) + ' -> ; :'); + }); + + qsWeirdObjects.forEach(function(testCase) { + assert.equal(testCase[1], qs.stringify(testCase[0]), + 'stringify ' + JSON.stringify(testCase[0])); + }); +}; + +exports['test stringifying nested'] = function(assert) { + var f = qs.stringify({ + a: 'b', + q: qs.stringify({ + x: 'y', + y: 'z' + }) + }); + assert.equal(f, 'a=b&q=x%3Dy%26y%3Dz', + JSON.stringify({ + a: 'b', + 'qs.stringify -> q': { + x: 'y', + y: 'z' + } + })); + + var threw = false; + try { qs.parse(undefined); } catch(error) { threw = true; } + assert.ok(!threw, "does not throws on undefined"); +}; + +exports['test nested in colon'] = function(assert) { + var f = qs.stringify({ + a: 'b', + q: qs.stringify({ + x: 'y', + y: 'z' + }, ';', ':') + }, ';', ':'); + assert.equal(f, 'a:b;q:x%3Ay%3By%3Az', + 'stringify ' + JSON.stringify({ + a: 'b', + 'qs.stringify -> q': { + x: 'y', + y: 'z' + } + }) + ' -> ; : '); + + + assert.deepEqual({}, qs.parse(), 'parse undefined'); +}; diff --git a/node_modules/querystring/test/tap-index.js b/node_modules/querystring/test/tap-index.js new file mode 100644 index 0000000..70679b3 --- /dev/null +++ b/node_modules/querystring/test/tap-index.js @@ -0,0 +1,3 @@ +"use strict"; + +require("retape")(require("./index")) \ No newline at end of file diff --git a/node_modules/randomatic/LICENSE b/node_modules/randomatic/LICENSE new file mode 100755 index 0000000..6d53705 --- /dev/null +++ b/node_modules/randomatic/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/randomatic/README.md b/node_modules/randomatic/README.md new file mode 100644 index 0000000..28b1ae0 --- /dev/null +++ b/node_modules/randomatic/README.md @@ -0,0 +1,132 @@ +# randomatic [![NPM version](https://img.shields.io/npm/v/randomatic.svg)](https://www.npmjs.com/package/randomatic) + +> Generate randomized strings of a specified length, fast. Only the length is necessary, but you can optionally generate patterns using any combination of numeric, alpha-numeric, alphabetical, special or custom characters. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i randomatic --save +``` + +Install with [bower](http://bower.io/) + +```sh +$ bower install randomatic --save +``` + +## Usage + +```js +var randomize = require('randomatic'); +``` + +## API + +```js +randomize(pattern, length, options); +``` + +* `pattern` **{String}**: The pattern to use for randomizing +* `length` **{Object}**: The length of the string to generate + +### pattern + +> The pattern to use for randomizing + +Patterns can contain any combination of the below characters, specified in any order. + +**Example:** + +To generate a 10-character randomized string using all available characters: + +```js +randomize('*', 10); +//=> + +randomize('Aa0!', 10); +//=> +``` + +* `a`: Lowercase alpha characters (`abcdefghijklmnopqrstuvwxyz'`) +* `A`: Uppercase alpha characters (`ABCDEFGHIJKLMNOPQRSTUVWXYZ'`) +* `0`: Numeric characters (`0123456789'`) +* `!`: Special characters (`~!@#$%^&()_+-={}[];\',.`) +* `*`: All characters (all of the above combined) +* `?`: Custom characters (pass a string of custom characters to the options) + +### length + +> the length of the string to generate + +**Examples:** + +* `randomize('A', 5)` will generate a 5-character, uppercase, alphabetical, randomized string, e.g. `KDJWJ`. +* `randomize('0', 2)` will generate a 2-digit random number +* `randomize('0', 3)` will generate a 3-digit random number +* `randomize('0', 12)` will generate a 12-digit random number +* `randomize('A0', 16)` will generate a 16-character, alpha-numeric randomized string + +If `length` is left undefined, the length of the pattern in the first parameter will be used. For example: + ++ `randomize('00')` will generate a 2-digit random number +* `randomize('000')` will generate a 3-digit random number +* `randomize('0000')` will generate a 4-digit random number... +* `randomize('AAAAA')` will generate a 5-character, uppercase alphabetical random string... + +These are just examples, [see the tests](./test.js) for more use cases and examples. + +## options + +#### chars + +Type: `String` + +Default: `undefined` + +Define a custom string to be randomized. + +**Example:** + +* `randomize('?', 20, {chars: 'jonschlinkert'})` will generate a 20-character randomized string from the letters contained in `jonschlinkert`. +* `randomize('?', {chars: 'jonschlinkert'})` will generate a 13-character randomized string from the letters contained in `jonschlinkert`. + +## Usage Examples + +* `randomize('A', 4)` (_whitespace insenstive_) would result in randomized 4-digit uppercase letters, like, `ZAKH`, `UJSL`... etc. +* `randomize('AAAA')` is equivelant to `randomize('A', 4)` +* `randomize('AAA0')` and `randomize('AA00')` and `randomize('A0A0')` are equivelant to `randomize('A0', 4)` +* `randomize('aa')`: results in double-digit, randomized, lower-case letters (`abcdefghijklmnopqrstuvwxyz`) +* `randomize('AAA')`: results in triple-digit, randomized, upper-case letters (`ABCDEFGHIJKLMNOPQRSTUVWXYZ`) +* `randomize('0', 6)`: results in six-digit, randomized nubmers (`0123456789`) +* `randomize('!', 5)`: results in single-digit randomized, _valid_ non-letter characters (`~!@#$%^&()_+-={}[];\',.`) +* `randomize('A!a0', 9)`: results in nine-digit, randomized characters (any of the above) + +_The order in which the characters are defined is insignificant._ + +## Related + +* [pad-left](https://www.npmjs.com/package/pad-left): Left pad a string with zeros or a specified string. Fastest implementation. | [homepage](https://github.com/jonschlinkert/pad-left) +* [pad-right](https://www.npmjs.com/package/pad-right): Right pad a string with zeros or a specified string. Fastest implementation. | [homepage](https://github.com/jonschlinkert/pad-right) +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string) + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/randomatic/issues/new). + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 [Jon Schlinkert](https://github.com/jonschlinkert) +Released under the MIT license. + +*** + +_This file was generated by [verb](https://github.com/verbose/verb) on December 10, 2015._ \ No newline at end of file diff --git a/node_modules/randomatic/index.js b/node_modules/randomatic/index.js new file mode 100644 index 0000000..85dbe21 --- /dev/null +++ b/node_modules/randomatic/index.js @@ -0,0 +1,83 @@ +/*! + * randomatic + * + * This was originally inspired by + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License (MIT) + */ + +'use strict'; + +var isNumber = require('is-number'); +var typeOf = require('kind-of'); + +/** + * Expose `randomatic` + */ + +module.exports = randomatic; + +/** + * Available mask characters + */ + +var type = { + lower: 'abcdefghijklmnopqrstuvwxyz', + upper: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + number: '0123456789', + special: '~!@#$%^&()_+-={}[];\',.' +}; + +type.all = type.lower + type.upper + type.number; + +/** + * Generate random character sequences of a specified `length`, + * based on the given `pattern`. + * + * @param {String} `pattern` The pattern to use for generating the random string. + * @param {String} `length` The length of the string to generate. + * @param {String} `options` + * @return {String} + * @api public + */ + +function randomatic(pattern, length, options) { + if (typeof pattern === 'undefined') { + throw new Error('randomatic expects a string or number.'); + } + + var custom = false; + if (arguments.length === 1) { + if (typeof pattern === 'string') { + length = pattern.length; + + } else if (isNumber(pattern)) { + options = {}; length = pattern; pattern = '*'; + } + } + + if (typeOf(length) === 'object' && length.hasOwnProperty('chars')) { + options = length; + pattern = options.chars; + length = pattern.length; + custom = true; + } + + var opts = options || {}; + var mask = ''; + var res = ''; + + // Characters to be used + if (pattern.indexOf('?') !== -1) mask += opts.chars; + if (pattern.indexOf('a') !== -1) mask += type.lower; + if (pattern.indexOf('A') !== -1) mask += type.upper; + if (pattern.indexOf('0') !== -1) mask += type.number; + if (pattern.indexOf('!') !== -1) mask += type.special; + if (pattern.indexOf('*') !== -1) mask += type.all; + if (custom) mask += pattern; + + while (length--) { + res += mask.charAt(parseInt(Math.random() * mask.length, 10)); + } + return res; +}; diff --git a/node_modules/randomatic/package.json b/node_modules/randomatic/package.json new file mode 100644 index 0000000..24872b1 --- /dev/null +++ b/node_modules/randomatic/package.json @@ -0,0 +1,54 @@ +{ + "name": "randomatic", + "description": "Generate randomized strings of a specified length, fast. Only the length is necessary, but you can optionally generate patterns using any combination of numeric, alpha-numeric, alphabetical, special or custom characters.", + "version": "1.1.5", + "homepage": "https://github.com/jonschlinkert/randomatic", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/randomatic", + "bugs": { + "url": "https://github.com/jonschlinkert/randomatic/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^2.0.2", + "kind-of": "^3.0.2" + }, + "devDependencies": { + "ansi-bold": "^0.1.1", + "benchmarked": "^0.1.4", + "glob": "^5.0.15", + "mocha": "*", + "should": "*" + }, + "keywords": [ + "alpha", + "alpha-numeric", + "alphanumeric", + "characters", + "chars", + "numeric", + "rand", + "random", + "randomize", + "randomized" + ], + "verb": { + "related": { + "list": [ + "repeat-string", + "pad-left", + "pad-right" + ] + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/node_modules/range-parser/HISTORY.md b/node_modules/range-parser/HISTORY.md new file mode 100644 index 0000000..5e01eef --- /dev/null +++ b/node_modules/range-parser/HISTORY.md @@ -0,0 +1,51 @@ +1.2.0 / 2016-06-01 +================== + + * Add `combine` option to combine overlapping ranges + +1.1.0 / 2016-05-13 +================== + + * Fix incorrectly returning -1 when there is at least one valid range + * perf: remove internal function + +1.0.3 / 2015-10-29 +================== + + * perf: enable strict mode + +1.0.2 / 2014-09-08 +================== + + * Support Node.js 0.6 + +1.0.1 / 2014-09-07 +================== + + * Move repository to jshttp + +1.0.0 / 2013-12-11 +================== + + * Add repository to package.json + * Add MIT license + +0.0.4 / 2012-06-17 +================== + + * Change ret -1 for unsatisfiable and -2 when invalid + +0.0.3 / 2012-06-17 +================== + + * Fix last-byte-pos default to len - 1 + +0.0.2 / 2012-06-14 +================== + + * Add `.type` + +0.0.1 / 2012-06-11 +================== + + * Initial release diff --git a/node_modules/range-parser/LICENSE b/node_modules/range-parser/LICENSE new file mode 100644 index 0000000..3599954 --- /dev/null +++ b/node_modules/range-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015-2016 Douglas Christopher Wilson [ +// { start: 0, end: 10 }, +// { start: 50, end: 60 } +// ] +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/range-parser.svg +[npm-url]: https://npmjs.org/package/range-parser +[node-version-image]: https://img.shields.io/node/v/range-parser.svg +[node-version-url]: https://nodejs.org/endownload +[travis-image]: https://img.shields.io/travis/jshttp/range-parser.svg +[travis-url]: https://travis-ci.org/jshttp/range-parser +[coveralls-image]: https://img.shields.io/coveralls/jshttp/range-parser.svg +[coveralls-url]: https://coveralls.io/r/jshttp/range-parser +[downloads-image]: https://img.shields.io/npm/dm/range-parser.svg +[downloads-url]: https://npmjs.org/package/range-parser diff --git a/node_modules/range-parser/index.js b/node_modules/range-parser/index.js new file mode 100644 index 0000000..83b2eb6 --- /dev/null +++ b/node_modules/range-parser/index.js @@ -0,0 +1,158 @@ +/*! + * range-parser + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015-2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = rangeParser + +/** + * Parse "Range" header `str` relative to the given file `size`. + * + * @param {Number} size + * @param {String} str + * @param {Object} [options] + * @return {Array} + * @public + */ + +function rangeParser (size, str, options) { + var index = str.indexOf('=') + + if (index === -1) { + return -2 + } + + // split the range string + var arr = str.slice(index + 1).split(',') + var ranges = [] + + // add ranges type + ranges.type = str.slice(0, index) + + // parse all ranges + for (var i = 0; i < arr.length; i++) { + var range = arr[i].split('-') + var start = parseInt(range[0], 10) + var end = parseInt(range[1], 10) + + // -nnn + if (isNaN(start)) { + start = size - end + end = size - 1 + // nnn- + } else if (isNaN(end)) { + end = size - 1 + } + + // limit last-byte-pos to current length + if (end > size - 1) { + end = size - 1 + } + + // invalid or unsatisifiable + if (isNaN(start) || isNaN(end) || start > end || start < 0) { + continue + } + + // add range + ranges.push({ + start: start, + end: end + }) + } + + if (ranges.length < 1) { + // unsatisifiable + return -1 + } + + return options && options.combine + ? combineRanges(ranges) + : ranges +} + +/** + * Combine overlapping & adjacent ranges. + * @private + */ + +function combineRanges (ranges) { + var ordered = ranges.map(mapWithIndex).sort(sortByRangeStart) + + for (var j = 0, i = 1; i < ordered.length; i++) { + var range = ordered[i] + var current = ordered[j] + + if (range.start > current.end + 1) { + // next range + ordered[++j] = range + } else if (range.end > current.end) { + // extend range + current.end = range.end + current.index = Math.min(current.index, range.index) + } + } + + // trim ordered array + ordered.length = j + 1 + + // generate combined range + var combined = ordered.sort(sortByRangeIndex).map(mapWithoutIndex) + + // copy ranges type + combined.type = ranges.type + + return combined +} + +/** + * Map function to add index value to ranges. + * @private + */ + +function mapWithIndex (range, index) { + return { + start: range.start, + end: range.end, + index: index + } +} + +/** + * Map function to remove index value from ranges. + * @private + */ + +function mapWithoutIndex (range) { + return { + start: range.start, + end: range.end + } +} + +/** + * Sort function to sort ranges by index. + * @private + */ + +function sortByRangeIndex (a, b) { + return a.index - b.index +} + +/** + * Sort function to sort ranges by start position. + * @private + */ + +function sortByRangeStart (a, b) { + return a.start - b.start +} diff --git a/node_modules/range-parser/package.json b/node_modules/range-parser/package.json new file mode 100644 index 0000000..0c28848 --- /dev/null +++ b/node_modules/range-parser/package.json @@ -0,0 +1,40 @@ +{ + "name": "range-parser", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "description": "Range header field string parser", + "version": "1.2.0", + "contributors": [ + "Douglas Christopher Wilson ", + "James Wyatt Cready ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "range", + "parser", + "http" + ], + "repository": "jshttp/range-parser", + "devDependencies": { + "eslint": "2.11.1", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.1.0", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "1.21.5" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint **/*.js", + "test": "mocha --reporter spec", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot" + } +} diff --git a/node_modules/raw-body/HISTORY.md b/node_modules/raw-body/HISTORY.md new file mode 100644 index 0000000..720c860 --- /dev/null +++ b/node_modules/raw-body/HISTORY.md @@ -0,0 +1,209 @@ +2.1.7 / 2016-06-19 +================== + + * deps: bytes@2.4.0 + * perf: remove double-cleanup on happy path + +2.1.6 / 2016-03-07 +================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + +2.1.5 / 2015-11-30 +================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + +2.1.4 / 2015-09-27 +================== + + * Fix masking critical errors from `iconv-lite` + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + +2.1.3 / 2015-09-12 +================== + + * Fix sync callback when attaching data listener causes sync read + - Node.js 0.10 compatibility issue + +2.1.2 / 2015-07-05 +================== + + * Fix error stack traces to skip `makeError` + * deps: iconv-lite@0.4.11 + - Add encoding CESU-8 + +2.1.1 / 2015-06-14 +================== + + * Use `unpipe` module for unpiping requests + +2.1.0 / 2015-05-28 +================== + + * deps: iconv-lite@0.4.10 + - Improved UTF-16 endianness detection + - Leading BOM is now removed when decoding + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + +2.0.2 / 2015-05-21 +================== + + * deps: bytes@2.1.0 + - Slight optimizations + +2.0.1 / 2015-05-10 +================== + + * Fix a false-positive when unpiping in Node.js 0.8 + +2.0.0 / 2015-05-08 +================== + + * Return a promise without callback instead of thunk + * deps: bytes@2.0.1 + - units no longer case sensitive when parsing + +1.3.4 / 2015-04-15 +================== + + * Fix hanging callback if request aborts during read + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + +1.3.3 / 2015-02-08 +================== + + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + +1.3.2 / 2015-01-20 +================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + +1.3.1 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + +1.3.0 / 2014-07-20 +================== + + * Fully unpipe the stream on error + - Fixes `Cannot switch to old mode now` error on Node.js 0.10+ + +1.2.3 / 2014-07-20 +================== + + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + +1.2.2 / 2014-06-19 +================== + + * Send invalid encoding error to callback + +1.2.1 / 2014-06-15 +================== + + * deps: iconv-lite@0.4.3 + - Added encodings UTF-16BE and UTF-16 with BOM + +1.2.0 / 2014-06-13 +================== + + * Passing string as `options` interpreted as encoding + * Support all encodings from `iconv-lite` + +1.1.7 / 2014-06-12 +================== + + * use `string_decoder` module from npm + +1.1.6 / 2014-05-27 +================== + + * check encoding for old streams1 + * support node.js < 0.10.6 + +1.1.5 / 2014-05-14 +================== + + * bump bytes + +1.1.4 / 2014-04-19 +================== + + * allow true as an option + * bump bytes + +1.1.3 / 2014-03-02 +================== + + * fix case when length=null + +1.1.2 / 2013-12-01 +================== + + * be less strict on state.encoding check + +1.1.1 / 2013-11-27 +================== + + * add engines + +1.1.0 / 2013-11-27 +================== + + * add err.statusCode and err.type + * allow for encoding option to be true + * pause the stream instead of dumping on error + * throw if the stream's encoding is set + +1.0.1 / 2013-11-19 +================== + + * dont support streams1, throw if dev set encoding + +1.0.0 / 2013-11-17 +================== + + * rename `expected` option to `length` + +0.2.0 / 2013-11-15 +================== + + * republish + +0.1.1 / 2013-11-15 +================== + + * use bytes + +0.1.0 / 2013-11-11 +================== + + * generator support + +0.0.3 / 2013-10-10 +================== + + * update repo + +0.0.2 / 2013-09-14 +================== + + * dump stream on bad headers + * listen to events after defining received and buffers + +0.0.1 / 2013-09-14 +================== + + * Initial release diff --git a/node_modules/raw-body/LICENSE b/node_modules/raw-body/LICENSE new file mode 100644 index 0000000..d695c8f --- /dev/null +++ b/node_modules/raw-body/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2013-2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/raw-body/README.md b/node_modules/raw-body/README.md new file mode 100644 index 0000000..5799c82 --- /dev/null +++ b/node_modules/raw-body/README.md @@ -0,0 +1,126 @@ +# raw-body + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +Gets the entire buffer of a stream either as a `Buffer` or a string. +Validates the stream's length against an expected length and maximum limit. +Ideal for parsing request bodies. + +## API + +```js +var getRawBody = require('raw-body') +``` + +### getRawBody(stream, [options], [callback]) + +**Returns a promise if no callback specified and global `Promise` exists.** + +Options: + +- `length` - The length of the stream. + If the contents of the stream do not add up to this length, + an `400` error code is returned. +- `limit` - The byte limit of the body. + If the body ends up being larger than this limit, + a `413` error code is returned. +- `encoding` - The requested encoding. + By default, a `Buffer` instance will be returned. + Most likely, you want `utf8`. + You can use any type of encoding supported by [iconv-lite](https://www.npmjs.org/package/iconv-lite#readme). + +You can also pass a string in place of options to just specify the encoding. + +`callback(err, res)`: + +- `err` - the following attributes will be defined if applicable: + + - `limit` - the limit in bytes + - `length` and `expected` - the expected length of the stream + - `received` - the received bytes + - `encoding` - the invalid encoding + - `status` and `statusCode` - the corresponding status code for the error + - `type` - either `entity.too.large`, `request.aborted`, `request.size.invalid`, `stream.encoding.set`, or `encoding.unsupported` + +- `res` - the result, either as a `String` if an encoding was set or a `Buffer` otherwise. + +If an error occurs, the stream will be paused, everything unpiped, +and you are responsible for correctly disposing the stream. +For HTTP requests, no handling is required if you send a response. +For streams that use file descriptors, you should `stream.destroy()` or `stream.close()` to prevent leaks. + +## Examples + +### Simple Express example + +```js +var getRawBody = require('raw-body') +var typer = require('media-typer') + +app.use(function (req, res, next) { + getRawBody(req, { + length: req.headers['content-length'], + limit: '1mb', + encoding: typer.parse(req.headers['content-type']).parameters.charset + }, function (err, string) { + if (err) return next(err) + req.text = string + next() + }) +}) +``` + +### Simple Koa example + +```js +app.use(function* (next) { + var string = yield getRawBody(this.req, { + length: this.length, + limit: '1mb', + encoding: this.charset + }) +}) +``` + +### Using as a promise + +To use this library as a promise, simply omit the `callback` and a promise is +returned, provided that a global `Promise` is defined. + +```js +var getRawBody = require('raw-body') +var http = require('http') + +var server = http.createServer(function (req, res) { + getRawBody(req) + .then(function (buf) { + res.statusCode = 200 + res.end(buf.length + ' bytes submitted') + }) + .catch(function (err) { + res.statusCode = 500 + res.end(err.message) + }) +}) + +server.listen(3000) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/raw-body.svg +[npm-url]: https://npmjs.org/package/raw-body +[node-version-image]: https://img.shields.io/node/v/raw-body.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/stream-utils/raw-body/master.svg +[travis-url]: https://travis-ci.org/stream-utils/raw-body +[coveralls-image]: https://img.shields.io/coveralls/stream-utils/raw-body/master.svg +[coveralls-url]: https://coveralls.io/r/stream-utils/raw-body?branch=master +[downloads-image]: https://img.shields.io/npm/dm/raw-body.svg +[downloads-url]: https://npmjs.org/package/raw-body diff --git a/node_modules/raw-body/index.js b/node_modules/raw-body/index.js new file mode 100644 index 0000000..57b9c11 --- /dev/null +++ b/node_modules/raw-body/index.js @@ -0,0 +1,320 @@ +/*! + * raw-body + * Copyright(c) 2013-2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var iconv = require('iconv-lite') +var unpipe = require('unpipe') + +/** + * Module exports. + * @public + */ + +module.exports = getRawBody + +/** + * Module variables. + * @private + */ + +var iconvEncodingMessageRegExp = /^Encoding not recognized: / + +/** + * Get the decoder for a given encoding. + * + * @param {string} encoding + * @private + */ + +function getDecoder (encoding) { + if (!encoding) return null + + try { + return iconv.getDecoder(encoding) + } catch (e) { + // error getting decoder + if (!iconvEncodingMessageRegExp.test(e.message)) throw e + + // the encoding was not found + throw createError(415, 'specified encoding unsupported', 'encoding.unsupported', { + encoding: encoding + }) + } +} + +/** + * Get the raw body of a stream (typically HTTP). + * + * @param {object} stream + * @param {object|string|function} [options] + * @param {function} [callback] + * @public + */ + +function getRawBody (stream, options, callback) { + var done = callback + var opts = options || {} + + if (options === true || typeof options === 'string') { + // short cut for encoding + opts = { + encoding: options + } + } + + if (typeof options === 'function') { + done = options + opts = {} + } + + // validate callback is a function, if provided + if (done !== undefined && typeof done !== 'function') { + throw new TypeError('argument callback must be a function') + } + + // require the callback without promises + if (!done && !global.Promise) { + throw new TypeError('argument callback is required') + } + + // get encoding + var encoding = opts.encoding !== true + ? opts.encoding + : 'utf-8' + + // convert the limit to an integer + var limit = bytes.parse(opts.limit) + + // convert the expected length to an integer + var length = opts.length != null && !isNaN(opts.length) + ? parseInt(opts.length, 10) + : null + + if (done) { + // classic callback style + return readStream(stream, encoding, length, limit, done) + } + + return new Promise(function executor (resolve, reject) { + readStream(stream, encoding, length, limit, function onRead (err, buf) { + if (err) return reject(err) + resolve(buf) + }) + }) +} + +/** + * Halt a stream. + * + * @param {Object} stream + * @private + */ + +function halt (stream) { + // unpipe everything from the stream + unpipe(stream) + + // pause stream + if (typeof stream.pause === 'function') { + stream.pause() + } +} + +/** + * Make a serializable error object. + * + * To create serializable errors you must re-set message so + * that it is enumerable and you must re configure the type + * property so that is writable and enumerable. + * + * @param {number} status + * @param {string} message + * @param {string} type + * @param {object} props + * @private + */ + +function createError (status, message, type, props) { + var error = new Error() + + // capture stack trace + Error.captureStackTrace(error, createError) + + // set free-form properties + for (var prop in props) { + error[prop] = props[prop] + } + + // set message + error.message = message + + // set status + error.status = status + error.statusCode = status + + // set type + Object.defineProperty(error, 'type', { + value: type, + enumerable: true, + writable: true, + configurable: true + }) + + return error +} + +/** + * Read the data from the stream. + * + * @param {object} stream + * @param {string} encoding + * @param {number} length + * @param {number} limit + * @param {function} callback + * @public + */ + +function readStream (stream, encoding, length, limit, callback) { + var complete = false + var sync = true + + // check the length and limit options. + // note: we intentionally leave the stream paused, + // so users should handle the stream themselves. + if (limit !== null && length !== null && length > limit) { + return done(createError(413, 'request entity too large', 'entity.too.large', { + expected: length, + length: length, + limit: limit + })) + } + + // streams1: assert request encoding is buffer. + // streams2+: assert the stream encoding is buffer. + // stream._decoder: streams1 + // state.encoding: streams2 + // state.decoder: streams2, specifically < 0.10.6 + var state = stream._readableState + if (stream._decoder || (state && (state.encoding || state.decoder))) { + // developer error + return done(createError(500, 'stream encoding should not be set', 'stream.encoding.set')) + } + + var received = 0 + var decoder + + try { + decoder = getDecoder(encoding) + } catch (err) { + return done(err) + } + + var buffer = decoder + ? '' + : [] + + // attach listeners + stream.on('aborted', onAborted) + stream.on('close', cleanup) + stream.on('data', onData) + stream.on('end', onEnd) + stream.on('error', onEnd) + + // mark sync section complete + sync = false + + function done () { + var args = new Array(arguments.length) + + // copy arguments + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + + // mark complete + complete = true + + if (sync) { + process.nextTick(invokeCallback) + } else { + invokeCallback() + } + + function invokeCallback () { + cleanup() + + if (args[0]) { + // halt the stream on error + halt(stream) + } + + callback.apply(null, args) + } + } + + function onAborted () { + if (complete) return + + done(createError(400, 'request aborted', 'request.aborted', { + code: 'ECONNABORTED', + expected: length, + length: length, + received: received + })) + } + + function onData (chunk) { + if (complete) return + + received += chunk.length + decoder + ? buffer += decoder.write(chunk) + : buffer.push(chunk) + + if (limit !== null && received > limit) { + done(createError(413, 'request entity too large', 'entity.too.large', { + limit: limit, + received: received + })) + } + } + + function onEnd (err) { + if (complete) return + if (err) return done(err) + + if (length !== null && received !== length) { + done(createError(400, 'request size did not match content length', 'request.size.invalid', { + expected: length, + length: length, + received: received + })) + } else { + var string = decoder + ? buffer + (decoder.end() || '') + : Buffer.concat(buffer) + done(null, string) + } + } + + function cleanup () { + buffer = null + + stream.removeListener('aborted', onAborted) + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onEnd) + stream.removeListener('close', cleanup) + } +} diff --git a/node_modules/raw-body/package.json b/node_modules/raw-body/package.json new file mode 100644 index 0000000..dbf7a6c --- /dev/null +++ b/node_modules/raw-body/package.json @@ -0,0 +1,43 @@ +{ + "name": "raw-body", + "description": "Get and validate the raw body of a readable stream.", + "version": "2.1.7", + "author": "Jonathan Ong (http://jongleberry.com)", + "contributors": [ + "Douglas Christopher Wilson ", + "Raynos " + ], + "license": "MIT", + "repository": "stream-utils/raw-body", + "dependencies": { + "bytes": "2.4.0", + "iconv-lite": "0.4.13", + "unpipe": "1.0.0" + }, + "devDependencies": { + "bluebird": "3.4.1", + "eslint": "2.13.0", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.3.2", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "2.5.3", + "readable-stream": "2.1.2", + "through2": "2.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "scripts": { + "lint": "eslint **/*.js", + "test": "mocha --trace-deprecation --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --trace-deprecation --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --trace-deprecation --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/rc/.npmignore b/node_modules/rc/.npmignore new file mode 100644 index 0000000..13abef4 --- /dev/null +++ b/node_modules/rc/.npmignore @@ -0,0 +1,3 @@ +node_modules +node_modules/* +npm_debug.log diff --git a/node_modules/rc/LICENSE.APACHE2 b/node_modules/rc/LICENSE.APACHE2 new file mode 100644 index 0000000..6366c04 --- /dev/null +++ b/node_modules/rc/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/rc/LICENSE.BSD b/node_modules/rc/LICENSE.BSD new file mode 100644 index 0000000..96bb796 --- /dev/null +++ b/node_modules/rc/LICENSE.BSD @@ -0,0 +1,26 @@ +Copyright (c) 2013, Dominic Tarr +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/node_modules/rc/LICENSE.MIT b/node_modules/rc/LICENSE.MIT new file mode 100644 index 0000000..6eafbd7 --- /dev/null +++ b/node_modules/rc/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/rc/README.md b/node_modules/rc/README.md new file mode 100644 index 0000000..65a5f06 --- /dev/null +++ b/node_modules/rc/README.md @@ -0,0 +1,149 @@ +# rc + +The non-configurable configuration loader for lazy people. + +## Usage + +The only option is to pass rc the name of your app, and your default configuration. + +```javascript +var conf = require('rc')(appname, { + //defaults go here. + port: 2468, + + //defaults which are objects will be merged, not replaced + views: { + engine: 'jade' + } +}); +``` + +`rc` will return your configuration options merged with the defaults you specify. +If you pass in a predefined defaults object, it will be mutated: + +```javascript +var conf = {}; +require('rc')(appname, conf); +``` + +If `rc` finds any config files for your app, the returned config object will have +a `configs` array containing their paths: + +```javascript +var appCfg = require('rc')(appname, conf); +appCfg.configs[0] // /etc/appnamerc +appCfg.configs[1] // /home/dominictarr/.config/appname +appCfg.config // same as appCfg.configs[appCfg.configs.length - 1] +``` + +## Standards + +Given your application name (`appname`), rc will look in all the obvious places for configuration. + + * command line arguments (parsed by minimist) + * environment variables prefixed with `${appname}_` + * or use "\_\_" to indicate nested properties
_(e.g. `appname_foo__bar__baz` => `foo.bar.baz`)_ + * if you passed an option `--config file` then from that file + * a local `.${appname}rc` or the first found looking in `./ ../ ../../ ../../../` etc. + * `$HOME/.${appname}rc` + * `$HOME/.${appname}/config` + * `$HOME/.config/${appname}` + * `$HOME/.config/${appname}/config` + * `/etc/${appname}rc` + * `/etc/${appname}/config` + * the defaults object you passed in. + +All configuration sources that were found will be flattened into one object, +so that sources **earlier** in this list override later ones. + + +## Configuration File Formats + +Configuration files (e.g. `.appnamerc`) may be in either [json](http://json.org/example) or [ini](http://en.wikipedia.org/wiki/INI_file) format. The example configurations below are equivalent: + + +#### Formatted as `ini` + +``` +; You can include comments in `ini` format if you want. + +dependsOn=0.10.0 + + +; `rc` has built-in support for ini sections, see? + +[commands] + www = ./commands/www + console = ./commands/repl + + +; You can even do nested sections + +[generators.options] + engine = ejs + +[generators.modules] + new = generate-new + engine = generate-backend + +``` + +#### Formatted as `json` + +```javascript +{ + // You can even comment your JSON, if you want + "dependsOn": "0.10.0", + "commands": { + "www": "./commands/www", + "console": "./commands/repl" + }, + "generators": { + "options": { + "engine": "ejs" + }, + "modules": { + "new": "generate-new", + "backend": "generate-backend" + } + } +} +``` + +Comments are stripped from JSON config via [strip-json-comments](https://github.com/sindresorhus/strip-json-comments). + +> Since ini, and env variables do not have a standard for types, your application needs be prepared for strings. + + + +## Advanced Usage + +#### Pass in your own `argv` + +You may pass in your own `argv` as the third argument to `rc`. This is in case you want to [use your own command-line opts parser](https://github.com/dominictarr/rc/pull/12). + +```javascript +require('rc')(appname, defaults, customArgvParser); +``` + +## Pass in your own parser + +If you have a special need to use a non-standard parser, +you can do so by passing in the parser as the 4th argument. +(leave the 3rd as null to get the default args parser) + +```javascript +require('rc')(appname, defaults, null, parser); +``` + +This may also be used to force a more strict format, +such as strict, valid JSON only. + +## Note on Performance + +`rc` is running `fs.statSync`-- so make sure you don't use it in a hot code path (e.g. a request handler) + + +## License + +Multi-licensed under the two-clause BSD License, MIT License, or Apache License, version 2.0 diff --git a/node_modules/rc/browser.js b/node_modules/rc/browser.js new file mode 100644 index 0000000..8c230c5 --- /dev/null +++ b/node_modules/rc/browser.js @@ -0,0 +1,7 @@ + +// when this is loaded into the browser, +// just use the defaults... + +module.exports = function (name, defaults) { + return defaults +} diff --git a/node_modules/rc/index.js b/node_modules/rc/index.js new file mode 100755 index 0000000..6f8f113 --- /dev/null +++ b/node_modules/rc/index.js @@ -0,0 +1,60 @@ +#! /usr/bin/env node +var cc = require('./lib/utils') +var join = require('path').join +var deepExtend = require('deep-extend') +var etc = '/etc' +var win = process.platform === "win32" +var home = win + ? process.env.USERPROFILE + : process.env.HOME + +module.exports = function (name, defaults, argv, parse) { + if('string' !== typeof name) + throw new Error('rc(name): name *must* be string') + if(!argv) + argv = require('minimist')(process.argv.slice(2)) + defaults = ( + 'string' === typeof defaults + ? cc.json(defaults) : defaults + ) || {} + + parse = parse || cc.parse + + var env = cc.env(name + '_') + + var configs = [defaults] + var configFiles = [] + function addConfigFile (file) { + if (configFiles.indexOf(file) >= 0) return + var fileConfig = cc.file(file) + if (fileConfig) { + configs.push(parse(fileConfig)) + configFiles.push(file) + } + } + + // which files do we look at? + if (!win) + [join(etc, name, 'config'), + join(etc, name + 'rc')].forEach(addConfigFile) + if (home) + [join(home, '.config', name, 'config'), + join(home, '.config', name), + join(home, '.' + name, 'config'), + join(home, '.' + name + 'rc')].forEach(addConfigFile) + addConfigFile(cc.find('.'+name+'rc')) + if (env.config) addConfigFile(env.config) + if (argv.config) addConfigFile(argv.config) + + return deepExtend.apply(null, configs.concat([ + env, + argv, + configFiles.length ? {configs: configFiles, config: configFiles[configFiles.length - 1]} : undefined, + ])) +} + +if(!module.parent) { + console.log( + JSON.stringify(module.exports(process.argv[2]), false, 2) + ) +} diff --git a/node_modules/rc/lib/utils.js b/node_modules/rc/lib/utils.js new file mode 100644 index 0000000..ae6dec0 --- /dev/null +++ b/node_modules/rc/lib/utils.js @@ -0,0 +1,103 @@ +'use strict'; +var fs = require('fs') +var ini = require('ini') +var path = require('path') +var stripJsonComments = require('strip-json-comments') + +var parse = exports.parse = function (content) { + + //if it ends in .json or starts with { then it must be json. + //must be done this way, because ini accepts everything. + //can't just try and parse it and let it throw if it's not ini. + //everything is ini. even json with a syntax error. + + if(/^\s*{/.test(content)) + return JSON.parse(stripJsonComments(content)) + return ini.parse(content) + +} + +var file = exports.file = function () { + var args = [].slice.call(arguments).filter(function (arg) { return arg != null }) + + //path.join breaks if it's a not a string, so just skip this. + for(var i in args) + if('string' !== typeof args[i]) + return + + var file = path.join.apply(null, args) + var content + try { + return fs.readFileSync(file,'utf-8') + } catch (err) { + return + } +} + +var json = exports.json = function () { + var content = file.apply(null, arguments) + return content ? parse(content) : null +} + +var env = exports.env = function (prefix, env) { + env = env || process.env + var obj = {} + var l = prefix.length + for(var k in env) { + if((k.indexOf(prefix)) === 0) { + + var keypath = k.substring(l).split('__') + + // Trim empty strings from keypath array + var _emptyStringIndex + while ((_emptyStringIndex=keypath.indexOf('')) > -1) { + keypath.splice(_emptyStringIndex, 1) + } + + var cursor = obj + keypath.forEach(function _buildSubObj(_subkey,i){ + + // (check for _subkey first so we ignore empty strings) + // (check for cursor to avoid assignment to primitive objects) + if (!_subkey || typeof cursor !== 'object') + return + + // If this is the last key, just stuff the value in there + // Assigns actual value from env variable to final key + // (unless it's just an empty string- in that case use the last valid key) + if (i === keypath.length-1) + cursor[_subkey] = env[k] + + + // Build sub-object if nothing already exists at the keypath + if (cursor[_subkey] === undefined) + cursor[_subkey] = {} + + // Increment cursor used to track the object at the current depth + cursor = cursor[_subkey] + + }) + + } + + } + + return obj +} + +var find = exports.find = function () { + var rel = path.join.apply(null, [].slice.call(arguments)) + + function find(start, rel) { + var file = path.join(start, rel) + try { + fs.statSync(file) + return file + } catch (err) { + if(path.dirname(start) !== start) // root + return find(path.dirname(start), rel) + } + } + return find(process.cwd(), rel) +} + diff --git a/node_modules/rc/node_modules/.bin/strip-json-comments b/node_modules/rc/node_modules/.bin/strip-json-comments new file mode 120000 index 0000000..0439d06 --- /dev/null +++ b/node_modules/rc/node_modules/.bin/strip-json-comments @@ -0,0 +1 @@ +../../../strip-json-comments/cli.js \ No newline at end of file diff --git a/node_modules/rc/node_modules/minimist/.travis.yml b/node_modules/rc/node_modules/minimist/.travis.yml new file mode 100644 index 0000000..74c57bf --- /dev/null +++ b/node_modules/rc/node_modules/minimist/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.12" + - "iojs" +before_install: + - npm install -g npm@~1.4.6 diff --git a/node_modules/rc/node_modules/minimist/LICENSE b/node_modules/rc/node_modules/minimist/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/rc/node_modules/minimist/example/parse.js b/node_modules/rc/node_modules/minimist/example/parse.js new file mode 100644 index 0000000..abff3e8 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/example/parse.js @@ -0,0 +1,2 @@ +var argv = require('../')(process.argv.slice(2)); +console.dir(argv); diff --git a/node_modules/rc/node_modules/minimist/index.js b/node_modules/rc/node_modules/minimist/index.js new file mode 100644 index 0000000..6a0559d --- /dev/null +++ b/node_modules/rc/node_modules/minimist/index.js @@ -0,0 +1,236 @@ +module.exports = function (args, opts) { + if (!opts) opts = {}; + + var flags = { bools : {}, strings : {}, unknownFn: null }; + + if (typeof opts['unknown'] === 'function') { + flags.unknownFn = opts['unknown']; + } + + if (typeof opts['boolean'] === 'boolean' && opts['boolean']) { + flags.allBools = true; + } else { + [].concat(opts['boolean']).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + flags.strings[aliases[key]] = true; + } + }); + + var defaults = opts['default'] || {}; + + var argv = { _ : [] }; + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--')+1); + args = args.slice(0, args.indexOf('--')); + } + + function argDefined(key, arg) { + return (flags.allBools && /^--[^=]+$/.test(arg)) || + flags.strings[key] || flags.bools[key] || aliases[key]; + } + + function setArg (key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) return; + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) : val + ; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + function setKey (obj, keys, value) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + if (o[key] === undefined) o[key] = {}; + o = o[key]; + }); + + var key = keys[keys.length - 1]; + if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') { + o[key] = value; + } + else if (Array.isArray(o[key])) { + o[key].push(value); + } + else { + o[key] = [ o[key], value ]; + } + } + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + + if (/^--.+=/.test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + var key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } + else if (/^--no-.+/.test(arg)) { + var key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } + else if (/^--.+/.test(arg)) { + var key = arg.match(/^--(.+)/)[1]; + var next = args[i + 1]; + if (next !== undefined && !/^-/.test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, next, arg); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + else if (/^-[^-]+/.test(arg)) { + var letters = arg.slice(1,-1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + var next = arg.slice(j+2); + + if (next === '-') { + setArg(letters[j], next, arg) + continue; + } + + if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) { + setArg(letters[j], next.split('=')[1], arg); + broken = true; + break; + } + + if (/[A-Za-z]/.test(letters[j]) + && /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j+1] && letters[j+1].match(/\W/)) { + setArg(letters[j], arg.slice(j+2), arg); + broken = true; + break; + } + else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + var key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true)) { + setArg(key, args[i+1], arg); + i++; + } + else if (args[i+1] && /true|false/.test(args[i+1])) { + setArg(key, args[i+1] === 'true', arg); + i++; + } + else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } + else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push( + flags.strings['_'] || !isNumber(arg) ? arg : Number(arg) + ); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) { + setKey(argv, key.split('.'), defaults[key]); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[key]); + }); + } + }); + + if (opts['--']) { + argv['--'] = new Array(); + notFlags.forEach(function(key) { + argv['--'].push(key); + }); + } + else { + notFlags.forEach(function(key) { + argv._.push(key); + }); + } + + return argv; +}; + +function hasKey (obj, keys) { + var o = obj; + keys.slice(0,-1).forEach(function (key) { + o = (o[key] || {}); + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber (x) { + if (typeof x === 'number') return true; + if (/^0x[0-9a-f]+$/i.test(x)) return true; + return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + diff --git a/node_modules/rc/node_modules/minimist/package.json b/node_modules/rc/node_modules/minimist/package.json new file mode 100644 index 0000000..326480c --- /dev/null +++ b/node_modules/rc/node_modules/minimist/package.json @@ -0,0 +1,45 @@ +{ + "name": "minimist", + "version": "1.2.0", + "description": "parse argument options", + "main": "index.js", + "devDependencies": { + "covert": "^1.0.0", + "tap": "~0.4.0", + "tape": "^3.5.0" + }, + "scripts": { + "test": "tap test/*.js", + "coverage": "covert test/*.js" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "ff/5", + "firefox/latest", + "chrome/10", + "chrome/latest", + "safari/5.1", + "safari/latest", + "opera/12" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/minimist.git" + }, + "homepage": "https://github.com/substack/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/rc/node_modules/minimist/readme.markdown b/node_modules/rc/node_modules/minimist/readme.markdown new file mode 100644 index 0000000..30a74cf --- /dev/null +++ b/node_modules/rc/node_modules/minimist/readme.markdown @@ -0,0 +1,91 @@ +# minimist + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist) + +[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist) + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.dir(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ _: [ 'foo', 'bar', 'baz' ], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' } +``` + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +``` +> require('./')('one two three -- four five --six'.split(' '), { '--': true }) +{ _: [ 'one', 'two', 'three' ], + '--': [ 'four', 'five', '--six' ] } +``` + +Note that with `opts['--']` set, parsing for arguments still stops after the +`--`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT diff --git a/node_modules/rc/node_modules/minimist/test/all_bool.js b/node_modules/rc/node_modules/minimist/test/all_bool.js new file mode 100644 index 0000000..ac83548 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/all_bool.js @@ -0,0 +1,32 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'] + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/bool.js b/node_modules/rc/node_modules/minimist/test/bool.js new file mode 100644 index 0000000..14b0717 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/bool.js @@ -0,0 +1,166 @@ +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false } + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], { + boolean: ['x','y','z'] + }); + + t.deepEqual(argv, { + x : true, + y : false, + z : true, + _ : [ 'one', 'two', 'three' ] + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + herp: { alias: 'h', boolean: true } + }; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' } + }); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var opts = { + alias: { 'h': 'herp' }, + boolean: 'herp' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = [ '-h', 'derp' ]; + var regular = [ '--herp', 'derp' ]; + var alt = [ '--harp', 'derp' ]; + var opts = { + alias: { 'h': ['herp', 'harp'] }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + '_': [ 'derp' ] + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = [ '-h', 'true' ]; + var regular = [ '--herp', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h' + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + '_': [ ] + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function(t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool' + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true + }, + boolean: ['boool'] + }); + + t.same(parsed.boool, false); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/dash.js b/node_modules/rc/node_modules/minimist/test/dash.js new file mode 100644 index 0000000..5a4fa5b --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/dash.js @@ -0,0 +1,31 @@ +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(5); + t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] }); + t.deepEqual(parse([ '-' ]), { _: [ '-' ] }); + t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] }); + t.deepEqual( + parse([ '-b', '-' ], { boolean: 'b' }), + { b: true, _: [ '-' ] } + ); + t.deepEqual( + parse([ '-s', '-' ], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(3); + t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); + t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] }); +}); + +test('move arguments after the -- into their own `--` array', function(t) { + t.plan(1); + t.deepEqual( + parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }), + { name: 'John', _: [ 'before' ], '--': [ 'after' ] }); +}); diff --git a/node_modules/rc/node_modules/minimist/test/default_bool.js b/node_modules/rc/node_modules/minimist/test/default_bool.js new file mode 100644 index 0000000..780a311 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/default_bool.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true } + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false } + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, null); + var argv = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null } + }); + t.equal(argv.maybe, true); + t.end(); + +}) diff --git a/node_modules/rc/node_modules/minimist/test/dotted.js b/node_modules/rc/node_modules/minimist/test/dotted.js new file mode 100644 index 0000000..d8b3e85 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/dotted.js @@ -0,0 +1,22 @@ +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}}); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', {default: {'a.b': 11}}); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/kv_short.js b/node_modules/rc/node_modules/minimist/test/kv_short.js new file mode 100644 index 0000000..f813b30 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/kv_short.js @@ -0,0 +1,16 @@ +var parse = require('../'); +var test = require('tape'); + +test('short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-b=123' ]); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v' , function (t) { + t.plan(1); + + var argv = parse([ '-a=whatever', '-b=robots' ]); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); diff --git a/node_modules/rc/node_modules/minimist/test/long.js b/node_modules/rc/node_modules/minimist/test/long.js new file mode 100644 index 0000000..5d3a1e0 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/long.js @@ -0,0 +1,31 @@ +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse([ '--bool' ]), + { bool : true, _ : [] }, + 'long boolean' + ); + t.deepEqual( + parse([ '--pow', 'xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture sp' + ); + t.deepEqual( + parse([ '--pow=xixxle' ]), + { pow : 'xixxle', _ : [] }, + 'long capture eq' + ); + t.deepEqual( + parse([ '--host', 'localhost', '--port', '555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures sp' + ); + t.deepEqual( + parse([ '--host=localhost', '--port=555' ]), + { host : 'localhost', port : 555, _ : [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/num.js b/node_modules/rc/node_modules/minimist/test/num.js new file mode 100644 index 0000000..2cc77f4 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/num.js @@ -0,0 +1,36 @@ +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789' + ]); + t.deepEqual(argv, { + x : 1234, + y : 5.67, + z : 1e7, + w : '10f', + hex : 0xdeadbeef, + _ : [ 789 ] + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse([ '-x', 1234, 789 ]); + t.deepEqual(argv, { x : 1234, _ : [ 789 ] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/parse.js b/node_modules/rc/node_modules/minimist/test/parse.js new file mode 100644 index 0000000..7b4a2a1 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/parse.js @@ -0,0 +1,197 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse([ '--no-moo' ]), + { moo : false, _ : [] }, + 'no' + ); + t.deepEqual( + parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]), + { v : ['a','b','c'], _ : [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek' + ]), + { + c : true, + a : true, + t : true, + s : 'woo', + h : 'awesome', + b : true, + bool : true, + key : 'value', + multi : [ 'quux', 'baz' ], + meep : false, + name : 'meowmers', + _ : [ 'bare', '--not-a-flag', 'eek' ] + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse([ '-t', 'moo' ], { boolean: 't' }); + t.deepEqual(argv, { t : true, _ : [ 'moo' ] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: [ 't', 'verbose' ], + default: { verbose: true } + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'] + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params' , function (t) { + var args = parse([ '-s', "X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse([ "--s=X\nX" ]) + t.deepEqual(args, { _ : [], s : "X\nX" }); + t.end(); +}); + +test('strings' , function (t) { + var s = parse([ '-s', '0001234' ], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse([ '-x', '56' ], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([ ' ', ' ' ], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function(t) { + var s = parse([ '-s' ], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse([ '--str' ], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse([ '-art' ], { + string: [ 'a', 't' ] + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + + +test('string and alias', function(t) { + var x = parse([ '--str', '000123' ], { + string: 's', + alias: { s: 'str' } + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse([ '-s', '000123' ], { + string: 'str', + alias: { str: 's' } + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse([ '-I/foo/bar/baz' ]), + { I : '/foo/bar/baz', _ : [] } + ); + t.same( + parse([ '-xyz/foo/bar/baz' ]), + { x : true, y : true, z : '/foo/bar/baz', _ : [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: 'zoom' } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse([ '-f', '11', '--zoom', '55' ], { + alias: { z: [ 'zm', 'zoom' ] } + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop' + ]); + + t.same(argv.foo, { + bar : 3, + baz : 4, + quux : { + quibble : 5, + o_O : true + } + }); + t.same(argv.beep, { boop : true }); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/parse_modified.js b/node_modules/rc/node_modules/minimist/test/parse_modified.js new file mode 100644 index 0000000..ab620dc --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,9 @@ +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions' , function (t) { + t.plan(1); + + var argv = parse([ '-b', '123' ], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/node_modules/rc/node_modules/minimist/test/short.js b/node_modules/rc/node_modules/minimist/test/short.js new file mode 100644 index 0000000..d513a1c --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/short.js @@ -0,0 +1,67 @@ +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] }); + t.deepEqual( + parse([ '-123', '456' ]), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse([ '-b' ]), + { b : true, _ : [] }, + 'short boolean' + ); + t.deepEqual( + parse([ 'foo', 'bar', 'baz' ]), + { _ : [ 'foo', 'bar', 'baz' ] }, + 'bare' + ); + t.deepEqual( + parse([ '-cats' ]), + { c : true, a : true, t : true, s : true, _ : [] }, + 'group' + ); + t.deepEqual( + parse([ '-cats', 'meow' ]), + { c : true, a : true, t : true, s : 'meow', _ : [] }, + 'short group next' + ); + t.deepEqual( + parse([ '-h', 'localhost' ]), + { h : 'localhost', _ : [] }, + 'short capture' + ); + t.deepEqual( + parse([ '-h', 'localhost', '-p', '555' ]), + { h : 'localhost', p : 555, _ : [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]), + { + f : true, p : 555, h : 'localhost', + _ : [ 'script.js' ] + } + ); + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/stop_early.js b/node_modules/rc/node_modules/minimist/test/stop_early.js new file mode 100644 index 0000000..bdf9fbc --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/stop_early.js @@ -0,0 +1,15 @@ +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'] + }); + + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/unknown.js b/node_modules/rc/node_modules/minimist/test/unknown.js new file mode 100644 index 0000000..462a36b --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/unknown.js @@ -0,0 +1,102 @@ +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'true', '--derp', 'true' ]; + var regular = [ '--herp', 'true', '-d', 'true' ]; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [] + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello', '--derp', 'goodbye' ]; + var regular = [ '--herp', 'hello', '-d', 'moon' ]; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '-h', 'hello' ]; + var regular = [ '--herp', 'hello' ]; + var opts = { + default: { 'h': 'bar' }, + alias: { 'h': 'herp' }, + unknown: unknownFn + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = [ '--bad', '--', 'good', 'arg' ]; + var opts = { + '--': true, + unknown: unknownFn + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + '_': [] + }) + t.end(); +}); diff --git a/node_modules/rc/node_modules/minimist/test/whitespace.js b/node_modules/rc/node_modules/minimist/test/whitespace.js new file mode 100644 index 0000000..8a52a58 --- /dev/null +++ b/node_modules/rc/node_modules/minimist/test/whitespace.js @@ -0,0 +1,8 @@ +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace' , function (t) { + t.plan(1); + var x = parse([ '-x', '\t' ]).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/rc/package.json b/node_modules/rc/package.json new file mode 100644 index 0000000..38991a7 --- /dev/null +++ b/node_modules/rc/package.json @@ -0,0 +1,29 @@ +{ + "name": "rc", + "version": "1.1.6", + "description": "hardwired configuration loader", + "main": "index.js", + "browserify": "browser.js", + "scripts": { + "test": "set -e; node test/test.js; node test/ini.js; node test/nested-env-vars.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/dominictarr/rc.git" + }, + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "keywords": [ + "config", + "rc", + "unix", + "defaults" + ], + "bin": "./index.js", + "author": "Dominic Tarr (dominictarr.com)", + "dependencies": { + "deep-extend": "~0.4.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~1.0.4" + } +} diff --git a/node_modules/rc/test/ini.js b/node_modules/rc/test/ini.js new file mode 100644 index 0000000..e6857f8 --- /dev/null +++ b/node_modules/rc/test/ini.js @@ -0,0 +1,16 @@ +var cc =require('../lib/utils') +var INI = require('ini') +var assert = require('assert') + +function test(obj) { + + var _json, _ini + var json = cc.parse (_json = JSON.stringify(obj)) + var ini = cc.parse (_ini = INI.stringify(obj)) + console.log(_ini, _json) + assert.deepEqual(json, ini) +} + + +test({hello: true}) + diff --git a/node_modules/rc/test/nested-env-vars.js b/node_modules/rc/test/nested-env-vars.js new file mode 100644 index 0000000..f576fb1 --- /dev/null +++ b/node_modules/rc/test/nested-env-vars.js @@ -0,0 +1,40 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + + +// Basic usage +process.env[n+'_someOpt__a'] = 42 +process.env[n+'_someOpt__x__'] = 99 +process.env[n+'_someOpt__a__b'] = 186 +process.env[n+'_someOpt__a__b__c'] = 243 +process.env[n+'_someOpt__x__y'] = 1862 +process.env[n+'_someOpt__z'] = 186577 + +// Should ignore empty strings from orphaned '__' +process.env[n+'_someOpt__z__x__'] = 18629 +process.env[n+'_someOpt__w__w__'] = 18629 + +// Leading '__' should ignore everything up to 'z' +process.env[n+'___z__i__'] = 9999 + +var config = require('../')(n, { + option: true +}) + +console.log('\n\n------ nested-env-vars ------\n',config) + +assert.equal(config.option, true) +assert.equal(config.someOpt.a, 42) +assert.equal(config.someOpt.x, 99) +// Should not override `a` once it's been set +assert.equal(config.someOpt.a/*.b*/, 42) +// Should not override `x` once it's been set +assert.equal(config.someOpt.x/*.y*/, 99) +assert.equal(config.someOpt.z, 186577) +// Should not override `z` once it's been set +assert.equal(config.someOpt.z/*.x*/, 186577) +assert.equal(config.someOpt.w.w, 18629) +assert.equal(config.z.i, 9999) + + diff --git a/node_modules/rc/test/test.js b/node_modules/rc/test/test.js new file mode 100644 index 0000000..4f63351 --- /dev/null +++ b/node_modules/rc/test/test.js @@ -0,0 +1,59 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + +process.env[n+'_envOption'] = 42 + +var config = require('../')(n, { + option: true +}) + +console.log(config) + +assert.equal(config.option, true) +assert.equal(config.envOption, 42) + +var customArgv = require('../')(n, { + option: true +}, { // nopt-like argv + option: false, + envOption: 24, + argv: { + remain: [], + cooked: ['--no-option', '--envOption', '24'], + original: ['--no-option', '--envOption=24'] + } +}) + +console.log(customArgv) + +assert.equal(customArgv.option, false) +assert.equal(customArgv.envOption, 24) + +var fs = require('fs') +var path = require('path') +var jsonrc = path.resolve('.' + n + 'rc'); + +fs.writeFileSync(jsonrc, [ + '{', + '// json overrides default', + '"option": false,', + '/* env overrides json */', + '"envOption": 24', + '}' +].join('\n')); + +var commentedJSON = require('../')(n, { + option: true +}) + +fs.unlinkSync(jsonrc); + +console.log(commentedJSON) + +assert.equal(commentedJSON.option, false) +assert.equal(commentedJSON.envOption, 42) + +assert.equal(commentedJSON.config, jsonrc) +assert.equal(commentedJSON.configs.length, 1) +assert.equal(commentedJSON.configs[0], jsonrc) diff --git a/node_modules/readable-stream/.npmignore b/node_modules/readable-stream/.npmignore new file mode 100644 index 0000000..38344f8 --- /dev/null +++ b/node_modules/readable-stream/.npmignore @@ -0,0 +1,5 @@ +build/ +test/ +examples/ +fs.js +zlib.js \ No newline at end of file diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..e3d4e69 --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 0000000..e46b823 --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,15 @@ +# readable-stream + +***Node-core streams for userland*** + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png&months=6&height=3)](https://nodei.co/npm/readable-stream/) + +This package is a mirror of the Streams2 and Streams3 implementations in Node-core. + +If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core. + +**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12. + +**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"` + diff --git a/node_modules/readable-stream/duplex.js b/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..ca807af --- /dev/null +++ b/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/readable-stream/float.patch b/node_modules/readable-stream/float.patch new file mode 100644 index 0000000..b984607 --- /dev/null +++ b/node_modules/readable-stream/float.patch @@ -0,0 +1,923 @@ +diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js +index c5a741c..a2e0d8e 100644 +--- a/lib/_stream_duplex.js ++++ b/lib/_stream_duplex.js +@@ -26,8 +26,8 @@ + + module.exports = Duplex; + var util = require('util'); +-var Readable = require('_stream_readable'); +-var Writable = require('_stream_writable'); ++var Readable = require('./_stream_readable'); ++var Writable = require('./_stream_writable'); + + util.inherits(Duplex, Readable); + +diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js +index a5e9864..330c247 100644 +--- a/lib/_stream_passthrough.js ++++ b/lib/_stream_passthrough.js +@@ -25,7 +25,7 @@ + + module.exports = PassThrough; + +-var Transform = require('_stream_transform'); ++var Transform = require('./_stream_transform'); + var util = require('util'); + util.inherits(PassThrough, Transform); + +diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js +index 0c3fe3e..90a8298 100644 +--- a/lib/_stream_readable.js ++++ b/lib/_stream_readable.js +@@ -23,10 +23,34 @@ module.exports = Readable; + Readable.ReadableState = ReadableState; + + var EE = require('events').EventEmitter; ++if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { ++ return emitter.listeners(type).length; ++}; ++ ++if (!global.setImmediate) global.setImmediate = function setImmediate(fn) { ++ return setTimeout(fn, 0); ++}; ++if (!global.clearImmediate) global.clearImmediate = function clearImmediate(i) { ++ return clearTimeout(i); ++}; ++ + var Stream = require('stream'); + var util = require('util'); ++if (!util.isUndefined) { ++ var utilIs = require('core-util-is'); ++ for (var f in utilIs) { ++ util[f] = utilIs[f]; ++ } ++} + var StringDecoder; +-var debug = util.debuglog('stream'); ++var debug; ++if (util.debuglog) ++ debug = util.debuglog('stream'); ++else try { ++ debug = require('debuglog')('stream'); ++} catch (er) { ++ debug = function() {}; ++} + + util.inherits(Readable, Stream); + +@@ -380,7 +404,7 @@ function chunkInvalid(state, chunk) { + + + function onEofChunk(stream, state) { +- if (state.decoder && !state.ended) { ++ if (state.decoder && !state.ended && state.decoder.end) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); +diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js +index b1f9fcc..b0caf57 100644 +--- a/lib/_stream_transform.js ++++ b/lib/_stream_transform.js +@@ -64,8 +64,14 @@ + + module.exports = Transform; + +-var Duplex = require('_stream_duplex'); ++var Duplex = require('./_stream_duplex'); + var util = require('util'); ++if (!util.isUndefined) { ++ var utilIs = require('core-util-is'); ++ for (var f in utilIs) { ++ util[f] = utilIs[f]; ++ } ++} + util.inherits(Transform, Duplex); + + +diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js +index ba2e920..f49288b 100644 +--- a/lib/_stream_writable.js ++++ b/lib/_stream_writable.js +@@ -27,6 +27,12 @@ module.exports = Writable; + Writable.WritableState = WritableState; + + var util = require('util'); ++if (!util.isUndefined) { ++ var utilIs = require('core-util-is'); ++ for (var f in utilIs) { ++ util[f] = utilIs[f]; ++ } ++} + var Stream = require('stream'); + + util.inherits(Writable, Stream); +@@ -119,7 +125,7 @@ function WritableState(options, stream) { + function Writable(options) { + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. +- if (!(this instanceof Writable) && !(this instanceof Stream.Duplex)) ++ if (!(this instanceof Writable) && !(this instanceof require('./_stream_duplex'))) + return new Writable(options); + + this._writableState = new WritableState(options, this); +diff --git a/test/simple/test-stream-big-push.js b/test/simple/test-stream-big-push.js +index e3787e4..8cd2127 100644 +--- a/test/simple/test-stream-big-push.js ++++ b/test/simple/test-stream-big-push.js +@@ -21,7 +21,7 @@ + + var common = require('../common'); + var assert = require('assert'); +-var stream = require('stream'); ++var stream = require('../../'); + var str = 'asdfasdfasdfasdfasdf'; + + var r = new stream.Readable({ +diff --git a/test/simple/test-stream-end-paused.js b/test/simple/test-stream-end-paused.js +index bb73777..d40efc7 100644 +--- a/test/simple/test-stream-end-paused.js ++++ b/test/simple/test-stream-end-paused.js +@@ -25,7 +25,7 @@ var gotEnd = false; + + // Make sure we don't miss the end event for paused 0-length streams + +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + var stream = new Readable(); + var calledRead = false; + stream._read = function() { +diff --git a/test/simple/test-stream-pipe-after-end.js b/test/simple/test-stream-pipe-after-end.js +index b46ee90..0be8366 100644 +--- a/test/simple/test-stream-pipe-after-end.js ++++ b/test/simple/test-stream-pipe-after-end.js +@@ -22,8 +22,8 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Readable = require('_stream_readable'); +-var Writable = require('_stream_writable'); ++var Readable = require('../../lib/_stream_readable'); ++var Writable = require('../../lib/_stream_writable'); + var util = require('util'); + + util.inherits(TestReadable, Readable); +diff --git a/test/simple/test-stream-pipe-cleanup.js b/test/simple/test-stream-pipe-cleanup.js +deleted file mode 100644 +index f689358..0000000 +--- a/test/simple/test-stream-pipe-cleanup.js ++++ /dev/null +@@ -1,122 +0,0 @@ +-// Copyright Joyent, Inc. and other Node contributors. +-// +-// Permission is hereby granted, free of charge, to any person obtaining a +-// copy of this software and associated documentation files (the +-// "Software"), to deal in the Software without restriction, including +-// without limitation the rights to use, copy, modify, merge, publish, +-// distribute, sublicense, and/or sell copies of the Software, and to permit +-// persons to whom the Software is furnished to do so, subject to the +-// following conditions: +-// +-// The above copyright notice and this permission notice shall be included +-// in all copies or substantial portions of the Software. +-// +-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +-// USE OR OTHER DEALINGS IN THE SOFTWARE. +- +-// This test asserts that Stream.prototype.pipe does not leave listeners +-// hanging on the source or dest. +- +-var common = require('../common'); +-var stream = require('stream'); +-var assert = require('assert'); +-var util = require('util'); +- +-function Writable() { +- this.writable = true; +- this.endCalls = 0; +- stream.Stream.call(this); +-} +-util.inherits(Writable, stream.Stream); +-Writable.prototype.end = function() { +- this.endCalls++; +-}; +- +-Writable.prototype.destroy = function() { +- this.endCalls++; +-}; +- +-function Readable() { +- this.readable = true; +- stream.Stream.call(this); +-} +-util.inherits(Readable, stream.Stream); +- +-function Duplex() { +- this.readable = true; +- Writable.call(this); +-} +-util.inherits(Duplex, Writable); +- +-var i = 0; +-var limit = 100; +- +-var w = new Writable(); +- +-var r; +- +-for (i = 0; i < limit; i++) { +- r = new Readable(); +- r.pipe(w); +- r.emit('end'); +-} +-assert.equal(0, r.listeners('end').length); +-assert.equal(limit, w.endCalls); +- +-w.endCalls = 0; +- +-for (i = 0; i < limit; i++) { +- r = new Readable(); +- r.pipe(w); +- r.emit('close'); +-} +-assert.equal(0, r.listeners('close').length); +-assert.equal(limit, w.endCalls); +- +-w.endCalls = 0; +- +-r = new Readable(); +- +-for (i = 0; i < limit; i++) { +- w = new Writable(); +- r.pipe(w); +- w.emit('close'); +-} +-assert.equal(0, w.listeners('close').length); +- +-r = new Readable(); +-w = new Writable(); +-var d = new Duplex(); +-r.pipe(d); // pipeline A +-d.pipe(w); // pipeline B +-assert.equal(r.listeners('end').length, 2); // A.onend, A.cleanup +-assert.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup +-assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup +-assert.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup +-assert.equal(w.listeners('end').length, 0); +-assert.equal(w.listeners('close').length, 1); // B.cleanup +- +-r.emit('end'); +-assert.equal(d.endCalls, 1); +-assert.equal(w.endCalls, 0); +-assert.equal(r.listeners('end').length, 0); +-assert.equal(r.listeners('close').length, 0); +-assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup +-assert.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup +-assert.equal(w.listeners('end').length, 0); +-assert.equal(w.listeners('close').length, 1); // B.cleanup +- +-d.emit('end'); +-assert.equal(d.endCalls, 1); +-assert.equal(w.endCalls, 1); +-assert.equal(r.listeners('end').length, 0); +-assert.equal(r.listeners('close').length, 0); +-assert.equal(d.listeners('end').length, 0); +-assert.equal(d.listeners('close').length, 0); +-assert.equal(w.listeners('end').length, 0); +-assert.equal(w.listeners('close').length, 0); +diff --git a/test/simple/test-stream-pipe-error-handling.js b/test/simple/test-stream-pipe-error-handling.js +index c5d724b..c7d6b7d 100644 +--- a/test/simple/test-stream-pipe-error-handling.js ++++ b/test/simple/test-stream-pipe-error-handling.js +@@ -21,7 +21,7 @@ + + var common = require('../common'); + var assert = require('assert'); +-var Stream = require('stream').Stream; ++var Stream = require('../../').Stream; + + (function testErrorListenerCatches() { + var source = new Stream(); +diff --git a/test/simple/test-stream-pipe-event.js b/test/simple/test-stream-pipe-event.js +index cb9d5fe..56f8d61 100644 +--- a/test/simple/test-stream-pipe-event.js ++++ b/test/simple/test-stream-pipe-event.js +@@ -20,7 +20,7 @@ + // USE OR OTHER DEALINGS IN THE SOFTWARE. + + var common = require('../common'); +-var stream = require('stream'); ++var stream = require('../../'); + var assert = require('assert'); + var util = require('util'); + +diff --git a/test/simple/test-stream-push-order.js b/test/simple/test-stream-push-order.js +index f2e6ec2..a5c9bf9 100644 +--- a/test/simple/test-stream-push-order.js ++++ b/test/simple/test-stream-push-order.js +@@ -20,7 +20,7 @@ + // USE OR OTHER DEALINGS IN THE SOFTWARE. + + var common = require('../common.js'); +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + var assert = require('assert'); + + var s = new Readable({ +diff --git a/test/simple/test-stream-push-strings.js b/test/simple/test-stream-push-strings.js +index 06f43dc..1701a9a 100644 +--- a/test/simple/test-stream-push-strings.js ++++ b/test/simple/test-stream-push-strings.js +@@ -22,7 +22,7 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + var util = require('util'); + + util.inherits(MyStream, Readable); +diff --git a/test/simple/test-stream-readable-event.js b/test/simple/test-stream-readable-event.js +index ba6a577..a8e6f7b 100644 +--- a/test/simple/test-stream-readable-event.js ++++ b/test/simple/test-stream-readable-event.js +@@ -22,7 +22,7 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + + (function first() { + // First test, not reading when the readable is added. +diff --git a/test/simple/test-stream-readable-flow-recursion.js b/test/simple/test-stream-readable-flow-recursion.js +index 2891ad6..11689ba 100644 +--- a/test/simple/test-stream-readable-flow-recursion.js ++++ b/test/simple/test-stream-readable-flow-recursion.js +@@ -27,7 +27,7 @@ var assert = require('assert'); + // more data continuously, but without triggering a nextTick + // warning or RangeError. + +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + + // throw an error if we trigger a nextTick warning. + process.throwDeprecation = true; +diff --git a/test/simple/test-stream-unshift-empty-chunk.js b/test/simple/test-stream-unshift-empty-chunk.js +index 0c96476..7827538 100644 +--- a/test/simple/test-stream-unshift-empty-chunk.js ++++ b/test/simple/test-stream-unshift-empty-chunk.js +@@ -24,7 +24,7 @@ var assert = require('assert'); + + // This test verifies that stream.unshift(Buffer(0)) or + // stream.unshift('') does not set state.reading=false. +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + + var r = new Readable(); + var nChunks = 10; +diff --git a/test/simple/test-stream-unshift-read-race.js b/test/simple/test-stream-unshift-read-race.js +index 83fd9fa..17c18aa 100644 +--- a/test/simple/test-stream-unshift-read-race.js ++++ b/test/simple/test-stream-unshift-read-race.js +@@ -29,7 +29,7 @@ var assert = require('assert'); + // 3. push() after the EOF signaling null is an error. + // 4. _read() is not called after pushing the EOF null chunk. + +-var stream = require('stream'); ++var stream = require('../../'); + var hwm = 10; + var r = stream.Readable({ highWaterMark: hwm }); + var chunks = 10; +@@ -51,7 +51,14 @@ r._read = function(n) { + + function push(fast) { + assert(!pushedNull, 'push() after null push'); +- var c = pos >= data.length ? null : data.slice(pos, pos + n); ++ var c; ++ if (pos >= data.length) ++ c = null; ++ else { ++ if (n + pos > data.length) ++ n = data.length - pos; ++ c = data.slice(pos, pos + n); ++ } + pushedNull = c === null; + if (fast) { + pos += n; +diff --git a/test/simple/test-stream-writev.js b/test/simple/test-stream-writev.js +index 5b49e6e..b5321f3 100644 +--- a/test/simple/test-stream-writev.js ++++ b/test/simple/test-stream-writev.js +@@ -22,7 +22,7 @@ + var common = require('../common'); + var assert = require('assert'); + +-var stream = require('stream'); ++var stream = require('../../'); + + var queue = []; + for (var decode = 0; decode < 2; decode++) { +diff --git a/test/simple/test-stream2-basic.js b/test/simple/test-stream2-basic.js +index 3814bf0..248c1be 100644 +--- a/test/simple/test-stream2-basic.js ++++ b/test/simple/test-stream2-basic.js +@@ -21,7 +21,7 @@ + + + var common = require('../common.js'); +-var R = require('_stream_readable'); ++var R = require('../../lib/_stream_readable'); + var assert = require('assert'); + + var util = require('util'); +diff --git a/test/simple/test-stream2-compatibility.js b/test/simple/test-stream2-compatibility.js +index 6cdd4e9..f0fa84b 100644 +--- a/test/simple/test-stream2-compatibility.js ++++ b/test/simple/test-stream2-compatibility.js +@@ -21,7 +21,7 @@ + + + var common = require('../common.js'); +-var R = require('_stream_readable'); ++var R = require('../../lib/_stream_readable'); + var assert = require('assert'); + + var util = require('util'); +diff --git a/test/simple/test-stream2-finish-pipe.js b/test/simple/test-stream2-finish-pipe.js +index 39b274f..006a19b 100644 +--- a/test/simple/test-stream2-finish-pipe.js ++++ b/test/simple/test-stream2-finish-pipe.js +@@ -20,7 +20,7 @@ + // USE OR OTHER DEALINGS IN THE SOFTWARE. + + var common = require('../common.js'); +-var stream = require('stream'); ++var stream = require('../../'); + var Buffer = require('buffer').Buffer; + + var r = new stream.Readable(); +diff --git a/test/simple/test-stream2-fs.js b/test/simple/test-stream2-fs.js +deleted file mode 100644 +index e162406..0000000 +--- a/test/simple/test-stream2-fs.js ++++ /dev/null +@@ -1,72 +0,0 @@ +-// Copyright Joyent, Inc. and other Node contributors. +-// +-// Permission is hereby granted, free of charge, to any person obtaining a +-// copy of this software and associated documentation files (the +-// "Software"), to deal in the Software without restriction, including +-// without limitation the rights to use, copy, modify, merge, publish, +-// distribute, sublicense, and/or sell copies of the Software, and to permit +-// persons to whom the Software is furnished to do so, subject to the +-// following conditions: +-// +-// The above copyright notice and this permission notice shall be included +-// in all copies or substantial portions of the Software. +-// +-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +-// USE OR OTHER DEALINGS IN THE SOFTWARE. +- +- +-var common = require('../common.js'); +-var R = require('_stream_readable'); +-var assert = require('assert'); +- +-var fs = require('fs'); +-var FSReadable = fs.ReadStream; +- +-var path = require('path'); +-var file = path.resolve(common.fixturesDir, 'x1024.txt'); +- +-var size = fs.statSync(file).size; +- +-var expectLengths = [1024]; +- +-var util = require('util'); +-var Stream = require('stream'); +- +-util.inherits(TestWriter, Stream); +- +-function TestWriter() { +- Stream.apply(this); +- this.buffer = []; +- this.length = 0; +-} +- +-TestWriter.prototype.write = function(c) { +- this.buffer.push(c.toString()); +- this.length += c.length; +- return true; +-}; +- +-TestWriter.prototype.end = function(c) { +- if (c) this.buffer.push(c.toString()); +- this.emit('results', this.buffer); +-} +- +-var r = new FSReadable(file); +-var w = new TestWriter(); +- +-w.on('results', function(res) { +- console.error(res, w.length); +- assert.equal(w.length, size); +- var l = 0; +- assert.deepEqual(res.map(function (c) { +- return c.length; +- }), expectLengths); +- console.log('ok'); +-}); +- +-r.pipe(w); +diff --git a/test/simple/test-stream2-httpclient-response-end.js b/test/simple/test-stream2-httpclient-response-end.js +deleted file mode 100644 +index 15cffc2..0000000 +--- a/test/simple/test-stream2-httpclient-response-end.js ++++ /dev/null +@@ -1,52 +0,0 @@ +-// Copyright Joyent, Inc. and other Node contributors. +-// +-// Permission is hereby granted, free of charge, to any person obtaining a +-// copy of this software and associated documentation files (the +-// "Software"), to deal in the Software without restriction, including +-// without limitation the rights to use, copy, modify, merge, publish, +-// distribute, sublicense, and/or sell copies of the Software, and to permit +-// persons to whom the Software is furnished to do so, subject to the +-// following conditions: +-// +-// The above copyright notice and this permission notice shall be included +-// in all copies or substantial portions of the Software. +-// +-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +-// USE OR OTHER DEALINGS IN THE SOFTWARE. +- +-var common = require('../common.js'); +-var assert = require('assert'); +-var http = require('http'); +-var msg = 'Hello'; +-var readable_event = false; +-var end_event = false; +-var server = http.createServer(function(req, res) { +- res.writeHead(200, {'Content-Type': 'text/plain'}); +- res.end(msg); +-}).listen(common.PORT, function() { +- http.get({port: common.PORT}, function(res) { +- var data = ''; +- res.on('readable', function() { +- console.log('readable event'); +- readable_event = true; +- data += res.read(); +- }); +- res.on('end', function() { +- console.log('end event'); +- end_event = true; +- assert.strictEqual(msg, data); +- server.close(); +- }); +- }); +-}); +- +-process.on('exit', function() { +- assert(readable_event); +- assert(end_event); +-}); +- +diff --git a/test/simple/test-stream2-large-read-stall.js b/test/simple/test-stream2-large-read-stall.js +index 2fbfbca..667985b 100644 +--- a/test/simple/test-stream2-large-read-stall.js ++++ b/test/simple/test-stream2-large-read-stall.js +@@ -30,7 +30,7 @@ var PUSHSIZE = 20; + var PUSHCOUNT = 1000; + var HWM = 50; + +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + var r = new Readable({ + highWaterMark: HWM + }); +@@ -39,23 +39,23 @@ var rs = r._readableState; + r._read = push; + + r.on('readable', function() { +- console.error('>> readable'); ++ //console.error('>> readable'); + do { +- console.error(' > read(%d)', READSIZE); ++ //console.error(' > read(%d)', READSIZE); + var ret = r.read(READSIZE); +- console.error(' < %j (%d remain)', ret && ret.length, rs.length); ++ //console.error(' < %j (%d remain)', ret && ret.length, rs.length); + } while (ret && ret.length === READSIZE); + +- console.error('<< after read()', +- ret && ret.length, +- rs.needReadable, +- rs.length); ++ //console.error('<< after read()', ++ // ret && ret.length, ++ // rs.needReadable, ++ // rs.length); + }); + + var endEmitted = false; + r.on('end', function() { + endEmitted = true; +- console.error('end'); ++ //console.error('end'); + }); + + var pushes = 0; +@@ -64,11 +64,11 @@ function push() { + return; + + if (pushes++ === PUSHCOUNT) { +- console.error(' push(EOF)'); ++ //console.error(' push(EOF)'); + return r.push(null); + } + +- console.error(' push #%d', pushes); ++ //console.error(' push #%d', pushes); + if (r.push(new Buffer(PUSHSIZE))) + setTimeout(push); + } +diff --git a/test/simple/test-stream2-objects.js b/test/simple/test-stream2-objects.js +index 3e6931d..ff47d89 100644 +--- a/test/simple/test-stream2-objects.js ++++ b/test/simple/test-stream2-objects.js +@@ -21,8 +21,8 @@ + + + var common = require('../common.js'); +-var Readable = require('_stream_readable'); +-var Writable = require('_stream_writable'); ++var Readable = require('../../lib/_stream_readable'); ++var Writable = require('../../lib/_stream_writable'); + var assert = require('assert'); + + // tiny node-tap lookalike. +diff --git a/test/simple/test-stream2-pipe-error-handling.js b/test/simple/test-stream2-pipe-error-handling.js +index cf7531c..e3f3e4e 100644 +--- a/test/simple/test-stream2-pipe-error-handling.js ++++ b/test/simple/test-stream2-pipe-error-handling.js +@@ -21,7 +21,7 @@ + + var common = require('../common'); + var assert = require('assert'); +-var stream = require('stream'); ++var stream = require('../../'); + + (function testErrorListenerCatches() { + var count = 1000; +diff --git a/test/simple/test-stream2-pipe-error-once-listener.js b/test/simple/test-stream2-pipe-error-once-listener.js +index 5e8e3cb..53b2616 100755 +--- a/test/simple/test-stream2-pipe-error-once-listener.js ++++ b/test/simple/test-stream2-pipe-error-once-listener.js +@@ -24,7 +24,7 @@ var common = require('../common.js'); + var assert = require('assert'); + + var util = require('util'); +-var stream = require('stream'); ++var stream = require('../../'); + + + var Read = function() { +diff --git a/test/simple/test-stream2-push.js b/test/simple/test-stream2-push.js +index b63edc3..eb2b0e9 100644 +--- a/test/simple/test-stream2-push.js ++++ b/test/simple/test-stream2-push.js +@@ -20,7 +20,7 @@ + // USE OR OTHER DEALINGS IN THE SOFTWARE. + + var common = require('../common.js'); +-var stream = require('stream'); ++var stream = require('../../'); + var Readable = stream.Readable; + var Writable = stream.Writable; + var assert = require('assert'); +diff --git a/test/simple/test-stream2-read-sync-stack.js b/test/simple/test-stream2-read-sync-stack.js +index e8a7305..9740a47 100644 +--- a/test/simple/test-stream2-read-sync-stack.js ++++ b/test/simple/test-stream2-read-sync-stack.js +@@ -21,7 +21,7 @@ + + var common = require('../common'); + var assert = require('assert'); +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + var r = new Readable(); + var N = 256 * 1024; + +diff --git a/test/simple/test-stream2-readable-empty-buffer-no-eof.js b/test/simple/test-stream2-readable-empty-buffer-no-eof.js +index cd30178..4b1659d 100644 +--- a/test/simple/test-stream2-readable-empty-buffer-no-eof.js ++++ b/test/simple/test-stream2-readable-empty-buffer-no-eof.js +@@ -22,10 +22,9 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Readable = require('stream').Readable; ++var Readable = require('../../').Readable; + + test1(); +-test2(); + + function test1() { + var r = new Readable(); +@@ -88,31 +87,3 @@ function test1() { + console.log('ok'); + }); + } +- +-function test2() { +- var r = new Readable({ encoding: 'base64' }); +- var reads = 5; +- r._read = function(n) { +- if (!reads--) +- return r.push(null); // EOF +- else +- return r.push(new Buffer('x')); +- }; +- +- var results = []; +- function flow() { +- var chunk; +- while (null !== (chunk = r.read())) +- results.push(chunk + ''); +- } +- r.on('readable', flow); +- r.on('end', function() { +- results.push('EOF'); +- }); +- flow(); +- +- process.on('exit', function() { +- assert.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]); +- console.log('ok'); +- }); +-} +diff --git a/test/simple/test-stream2-readable-from-list.js b/test/simple/test-stream2-readable-from-list.js +index 7c96ffe..04a96f5 100644 +--- a/test/simple/test-stream2-readable-from-list.js ++++ b/test/simple/test-stream2-readable-from-list.js +@@ -21,7 +21,7 @@ + + var assert = require('assert'); + var common = require('../common.js'); +-var fromList = require('_stream_readable')._fromList; ++var fromList = require('../../lib/_stream_readable')._fromList; + + // tiny node-tap lookalike. + var tests = []; +diff --git a/test/simple/test-stream2-readable-legacy-drain.js b/test/simple/test-stream2-readable-legacy-drain.js +index 675da8e..51fd3d5 100644 +--- a/test/simple/test-stream2-readable-legacy-drain.js ++++ b/test/simple/test-stream2-readable-legacy-drain.js +@@ -22,7 +22,7 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Stream = require('stream'); ++var Stream = require('../../'); + var Readable = Stream.Readable; + + var r = new Readable(); +diff --git a/test/simple/test-stream2-readable-non-empty-end.js b/test/simple/test-stream2-readable-non-empty-end.js +index 7314ae7..c971898 100644 +--- a/test/simple/test-stream2-readable-non-empty-end.js ++++ b/test/simple/test-stream2-readable-non-empty-end.js +@@ -21,7 +21,7 @@ + + var assert = require('assert'); + var common = require('../common.js'); +-var Readable = require('_stream_readable'); ++var Readable = require('../../lib/_stream_readable'); + + var len = 0; + var chunks = new Array(10); +diff --git a/test/simple/test-stream2-readable-wrap-empty.js b/test/simple/test-stream2-readable-wrap-empty.js +index 2e5cf25..fd8a3dc 100644 +--- a/test/simple/test-stream2-readable-wrap-empty.js ++++ b/test/simple/test-stream2-readable-wrap-empty.js +@@ -22,7 +22,7 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Readable = require('_stream_readable'); ++var Readable = require('../../lib/_stream_readable'); + var EE = require('events').EventEmitter; + + var oldStream = new EE(); +diff --git a/test/simple/test-stream2-readable-wrap.js b/test/simple/test-stream2-readable-wrap.js +index 90eea01..6b177f7 100644 +--- a/test/simple/test-stream2-readable-wrap.js ++++ b/test/simple/test-stream2-readable-wrap.js +@@ -22,8 +22,8 @@ + var common = require('../common'); + var assert = require('assert'); + +-var Readable = require('_stream_readable'); +-var Writable = require('_stream_writable'); ++var Readable = require('../../lib/_stream_readable'); ++var Writable = require('../../lib/_stream_writable'); + var EE = require('events').EventEmitter; + + var testRuns = 0, completedRuns = 0; +diff --git a/test/simple/test-stream2-set-encoding.js b/test/simple/test-stream2-set-encoding.js +index 5d2c32a..685531b 100644 +--- a/test/simple/test-stream2-set-encoding.js ++++ b/test/simple/test-stream2-set-encoding.js +@@ -22,7 +22,7 @@ + + var common = require('../common.js'); + var assert = require('assert'); +-var R = require('_stream_readable'); ++var R = require('../../lib/_stream_readable'); + var util = require('util'); + + // tiny node-tap lookalike. +diff --git a/test/simple/test-stream2-transform.js b/test/simple/test-stream2-transform.js +index 9c9ddd8..a0cacc6 100644 +--- a/test/simple/test-stream2-transform.js ++++ b/test/simple/test-stream2-transform.js +@@ -21,8 +21,8 @@ + + var assert = require('assert'); + var common = require('../common.js'); +-var PassThrough = require('_stream_passthrough'); +-var Transform = require('_stream_transform'); ++var PassThrough = require('../../').PassThrough; ++var Transform = require('../../').Transform; + + // tiny node-tap lookalike. + var tests = []; +diff --git a/test/simple/test-stream2-unpipe-drain.js b/test/simple/test-stream2-unpipe-drain.js +index d66dc3c..365b327 100644 +--- a/test/simple/test-stream2-unpipe-drain.js ++++ b/test/simple/test-stream2-unpipe-drain.js +@@ -22,7 +22,7 @@ + + var common = require('../common.js'); + var assert = require('assert'); +-var stream = require('stream'); ++var stream = require('../../'); + var crypto = require('crypto'); + + var util = require('util'); +diff --git a/test/simple/test-stream2-unpipe-leak.js b/test/simple/test-stream2-unpipe-leak.js +index 99f8746..17c92ae 100644 +--- a/test/simple/test-stream2-unpipe-leak.js ++++ b/test/simple/test-stream2-unpipe-leak.js +@@ -22,7 +22,7 @@ + + var common = require('../common.js'); + var assert = require('assert'); +-var stream = require('stream'); ++var stream = require('../../'); + + var chunk = new Buffer('hallo'); + +diff --git a/test/simple/test-stream2-writable.js b/test/simple/test-stream2-writable.js +index 704100c..209c3a6 100644 +--- a/test/simple/test-stream2-writable.js ++++ b/test/simple/test-stream2-writable.js +@@ -20,8 +20,8 @@ + // USE OR OTHER DEALINGS IN THE SOFTWARE. + + var common = require('../common.js'); +-var W = require('_stream_writable'); +-var D = require('_stream_duplex'); ++var W = require('../../').Writable; ++var D = require('../../').Duplex; + var assert = require('assert'); + + var util = require('util'); +diff --git a/test/simple/test-stream3-pause-then-read.js b/test/simple/test-stream3-pause-then-read.js +index b91bde3..2f72c15 100644 +--- a/test/simple/test-stream3-pause-then-read.js ++++ b/test/simple/test-stream3-pause-then-read.js +@@ -22,7 +22,7 @@ + var common = require('../common'); + var assert = require('assert'); + +-var stream = require('stream'); ++var stream = require('../../'); + var Readable = stream.Readable; + var Writable = stream.Writable; + diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..b513d61 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,89 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +module.exports = Duplex; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +} +/**/ + + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +forEach(objectKeys(Writable.prototype), function(method) { + if (!Duplex.prototype[method]) + Duplex.prototype[method] = Writable.prototype[method]; +}); + +function Duplex(options) { + if (!(this instanceof Duplex)) + return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) + this.readable = false; + + if (options && options.writable === false) + this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) + this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) + return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(this.end.bind(this)); +} + +function forEach (xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..895ca50 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,46 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) + return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function(chunk, encoding, cb) { + cb(null, chunk); +}; diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..19ab358 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,951 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + + +/**/ +var Buffer = require('buffer').Buffer; +/**/ + +Readable.ReadableState = ReadableState; + +var EE = require('events').EventEmitter; + +/**/ +if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +var Stream = require('stream'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var StringDecoder; + + +/**/ +var debug = require('util'); +if (debug && debug.debuglog) { + debug = debug.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + + +util.inherits(Readable, Stream); + +function ReadableState(options, stream) { + var Duplex = require('./_stream_duplex'); + + options = options || {}; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var defaultHwm = options.objectMode ? 16 : 16 * 1024; + this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~~this.highWaterMark; + + this.buffer = []; + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) + this.objectMode = this.objectMode || !!options.readableObjectMode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) + StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + var Duplex = require('./_stream_duplex'); + + if (!(this instanceof Readable)) + return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + Stream.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function(chunk, encoding) { + var state = this._readableState; + + if (util.isString(chunk) && !state.objectMode) { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = new Buffer(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function(chunk) { + var state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + var er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (util.isNullOrUndefined(chunk)) { + state.reading = false; + if (!state.ended) + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (state.ended && !addToFront) { + var e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + var e = new Error('stream.unshift() after end event'); + stream.emit('error', e); + } else { + if (state.decoder && !addToFront && !encoding) + chunk = state.decoder.write(chunk); + + if (!addToFront) + state.reading = false; + + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) + state.buffer.unshift(chunk); + else + state.buffer.push(chunk); + + if (state.needReadable) + emitReadable(stream); + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + + + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && + (state.needReadable || + state.length < state.highWaterMark || + state.length === 0); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function(enc) { + if (!StringDecoder) + StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 128MB +var MAX_HWM = 0x800000; +function roundUpToNextPowerOf2(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 + n--; + for (var p = 1; p < 32; p <<= 1) n |= n >> p; + n++; + } + return n; +} + +function howMuchToRead(n, state) { + if (state.length === 0 && state.ended) + return 0; + + if (state.objectMode) + return n === 0 ? 0 : 1; + + if (isNaN(n) || util.isNull(n)) { + // only flow one buffer at a time + if (state.flowing && state.buffer.length) + return state.buffer[0].length; + else + return state.length; + } + + if (n <= 0) + return 0; + + // If we're asking for more than the target buffer level, + // then raise the water mark. Bump up to the next highest + // power of 2, to prevent increasing it excessively in tiny + // amounts. + if (n > state.highWaterMark) + state.highWaterMark = roundUpToNextPowerOf2(n); + + // don't have that much. return null, unless we've ended. + if (n > state.length) { + if (!state.ended) { + state.needReadable = true; + return 0; + } else + return state.length; + } + + return n; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function(n) { + debug('read', n); + var state = this._readableState; + var nOrig = n; + + if (!util.isNumber(n) || n > 0) + state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && + state.needReadable && + (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) + endReadable(this); + else + emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) + endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } + + if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) + state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + } + + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (doRead && !state.reading) + n = howMuchToRead(nOrig, state); + + var ret; + if (n > 0) + ret = fromList(n, state); + else + ret = null; + + if (util.isNull(ret)) { + state.needReadable = true; + n = 0; + } + + state.length -= n; + + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (state.length === 0 && !state.ended) + state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended && state.length === 0) + endReadable(this); + + if (!util.isNull(ret)) + this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + var er = null; + if (!util.isBuffer(chunk) && + !util.isString(chunk) && + !util.isNullOrUndefined(chunk) && + !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + + +function onEofChunk(stream, state) { + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) + process.nextTick(function() { + emitReadable_(stream); + }); + else + emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(function() { + maybeReadMore_(stream, state); + }); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && + state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + else + len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function(n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function(dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && + dest !== process.stdout && + dest !== process.stderr; + + var endFn = doEnd ? onend : cleanup; + if (state.endEmitted) + process.nextTick(endFn); + else + src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && + (!dest._writableState || dest._writableState.needDrain)) + ondrain(); + } + + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + if (false === ret) { + debug('false write response, pause', + src._readableState.awaitDrain); + src._readableState.awaitDrain++; + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EE.listenerCount(dest, 'error') === 0) + dest.emit('error', er); + } + // This is a brutally ugly hack to make sure that our error handler + // is attached before any userland ones. NEVER DO THIS. + if (!dest._events || !dest._events.error) + dest.on('error', onerror); + else if (isArray(dest._events.error)) + dest._events.error.unshift(onerror); + else + dest._events.error = [onerror, dest._events.error]; + + + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) + state.awaitDrain--; + if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + + +Readable.prototype.unpipe = function(dest) { + var state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) + return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) + return this; + + if (!dest) + dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) + dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) + dests[i].emit('unpipe', this); + return this; + } + + // try to find the right one. + var i = indexOf(state.pipes, dest); + if (i === -1) + return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) + state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function(ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + // If listening to data, and it has not explicitly been paused, + // then call resume to start the flow of data on the next tick. + if (ev === 'data' && false !== this._readableState.flowing) { + this.resume(); + } + + if (ev === 'readable' && this.readable) { + var state = this._readableState; + if (!state.readableListening) { + state.readableListening = true; + state.emittedReadable = false; + state.needReadable = true; + if (!state.reading) { + var self = this; + process.nextTick(function() { + debug('readable nexttick read 0'); + self.read(0); + }); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function() { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + if (!state.reading) { + debug('resume read 0'); + this.read(0); + } + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(function() { + resume_(stream, state); + }); + } +} + +function resume_(stream, state) { + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) + stream.read(0); +} + +Readable.prototype.pause = function() { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + if (state.flowing) { + do { + var chunk = stream.read(); + } while (null !== chunk && state.flowing); + } +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function(stream) { + var state = this._readableState; + var paused = false; + + var self = this; + stream.on('end', function() { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) + self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', function(chunk) { + debug('wrapped data'); + if (state.decoder) + chunk = state.decoder.write(chunk); + if (!chunk || !state.objectMode && !chunk.length) + return; + + var ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (util.isFunction(stream[i]) && util.isUndefined(this[i])) { + this[i] = function(method) { return function() { + return stream[method].apply(stream, arguments); + }}(i); + } + } + + // proxy certain important events. + var events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, function(ev) { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function(n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + + + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +function fromList(n, state) { + var list = state.buffer; + var length = state.length; + var stringMode = !!state.decoder; + var objectMode = !!state.objectMode; + var ret; + + // nothing in the list, definitely empty. + if (list.length === 0) + return null; + + if (length === 0) + ret = null; + else if (objectMode) + ret = list.shift(); + else if (!n || n >= length) { + // read it all, truncate the array. + if (stringMode) + ret = list.join(''); + else + ret = Buffer.concat(list, length); + list.length = 0; + } else { + // read just some of it. + if (n < list[0].length) { + // just take a part of the first list item. + // slice is the same for buffers and strings. + var buf = list[0]; + ret = buf.slice(0, n); + list[0] = buf.slice(n); + } else if (n === list[0].length) { + // first list is a perfect match + ret = list.shift(); + } else { + // complex case. + // we have enough to cover it, but it spans past the first buffer. + if (stringMode) + ret = ''; + else + ret = new Buffer(n); + + var c = 0; + for (var i = 0, l = list.length; i < l && c < n; i++) { + var buf = list[0]; + var cpy = Math.min(n - c, buf.length); + + if (stringMode) + ret += buf.slice(0, cpy); + else + buf.copy(ret, c, 0, cpy); + + if (cpy < buf.length) + list[0] = buf.slice(cpy); + else + list.shift(); + + c += cpy; + } + } + } + + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) + throw new Error('endReadable called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(function() { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } + }); + } +} + +function forEach (xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf (xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..905c5e4 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,209 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + + +function TransformState(options, stream) { + this.afterTransform = function(er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; +} + +function afterTransform(stream, er, data) { + var ts = stream._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) + return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (!util.isNullOrUndefined(data)) + stream.push(data); + + if (cb) + cb(er); + + var rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} + + +function Transform(options) { + if (!(this instanceof Transform)) + return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(options, this); + + // when the writable side finishes, then flush out anything remaining. + var stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + this.once('prefinish', function() { + if (util.isFunction(this._flush)) + this._flush(function(er) { + done(stream, er); + }); + else + done(stream); + }); +} + +Transform.prototype.push = function(chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function(chunk, encoding, cb) { + throw new Error('not implemented'); +}; + +Transform.prototype._write = function(chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || + rs.needReadable || + rs.length < rs.highWaterMark) + this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function(n) { + var ts = this._transformState; + + if (!util.isNull(ts.writechunk) && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + + +function done(stream, er) { + if (er) + return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + var ws = stream._writableState; + var ts = stream._transformState; + + if (ws.length) + throw new Error('calling transform done when ws.length != 0'); + + if (ts.transforming) + throw new Error('calling transform done when still transforming'); + + return stream.push(null); +} diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..db8539c --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,477 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, cb), and it'll handle all +// the drain event emission and buffering. + +module.exports = Writable; + +/**/ +var Buffer = require('buffer').Buffer; +/**/ + +Writable.WritableState = WritableState; + + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Stream = require('stream'); + +util.inherits(Writable, Stream); + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; +} + +function WritableState(options, stream) { + var Duplex = require('./_stream_duplex'); + + options = options || {}; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var defaultHwm = options.objectMode ? 16 : 16 * 1024; + this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) + this.objectMode = this.objectMode || !!options.writableObjectMode; + + // cast to ints. + this.highWaterMark = ~~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function(er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.buffer = []; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; +} + +function Writable(options) { + var Duplex = require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) + return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function() { + this.emit('error', new Error('Cannot pipe. Not readable.')); +}; + + +function writeAfterEnd(stream, state, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + process.nextTick(function() { + cb(er); + }); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + var valid = true; + if (!util.isBuffer(chunk) && + !util.isString(chunk) && + !util.isNullOrUndefined(chunk) && + !state.objectMode) { + var er = new TypeError('Invalid non-string/buffer chunk'); + stream.emit('error', er); + process.nextTick(function() { + cb(er); + }); + valid = false; + } + return valid; +} + +Writable.prototype.write = function(chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + if (util.isFunction(encoding)) { + cb = encoding; + encoding = null; + } + + if (util.isBuffer(chunk)) + encoding = 'buffer'; + else if (!encoding) + encoding = state.defaultEncoding; + + if (!util.isFunction(cb)) + cb = function() {}; + + if (state.ended) + writeAfterEnd(this, state, cb); + else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function() { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function() { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && + !state.corked && + !state.finished && + !state.bufferProcessing && + state.buffer.length) + clearBuffer(this, state); + } +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && + state.decodeStrings !== false && + util.isString(chunk)) { + chunk = new Buffer(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + if (util.isBuffer(chunk)) + encoding = 'buffer'; + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) + state.needDrain = true; + + if (state.writing || state.corked) + state.buffer.push(new WriteReq(chunk, encoding, cb)); + else + doWrite(stream, state, false, len, chunk, encoding, cb); + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) + stream._writev(chunk, state.onwrite); + else + stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + if (sync) + process.nextTick(function() { + state.pendingcb--; + cb(er); + }); + else { + state.pendingcb--; + cb(er); + } + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) + onwriteError(stream, state, sync, er, cb); + else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(stream, state); + + if (!finished && + !state.corked && + !state.bufferProcessing && + state.buffer.length) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(function() { + afterWrite(stream, state, finished, cb); + }); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) + onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + + if (stream._writev && state.buffer.length > 1) { + // Fast case, write everything using _writev() + var cbs = []; + for (var c = 0; c < state.buffer.length; c++) + cbs.push(state.buffer[c].callback); + + // count the one we are adding, as well. + // TODO(isaacs) clean this up + state.pendingcb++; + doWrite(stream, state, true, state.length, state.buffer, '', function(err) { + for (var i = 0; i < cbs.length; i++) { + state.pendingcb--; + cbs[i](err); + } + }); + + // Clear buffer + state.buffer = []; + } else { + // Slow case, write chunks one-by-one + for (var c = 0; c < state.buffer.length; c++) { + var entry = state.buffer[c]; + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + c++; + break; + } + } + + if (c < state.buffer.length) + state.buffer = state.buffer.slice(c); + else + state.buffer.length = 0; + } + + state.bufferProcessing = false; +} + +Writable.prototype._write = function(chunk, encoding, cb) { + cb(new Error('not implemented')); + +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function(chunk, encoding, cb) { + var state = this._writableState; + + if (util.isFunction(chunk)) { + cb = chunk; + chunk = null; + encoding = null; + } else if (util.isFunction(encoding)) { + cb = encoding; + encoding = null; + } + + if (!util.isNullOrUndefined(chunk)) + this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) + endWritable(this, state, cb); +}; + + +function needFinish(stream, state) { + return (state.ending && + state.length === 0 && + !state.finished && + !state.writing); +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + var need = needFinish(stream, state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else + prefinish(stream, state); + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) + process.nextTick(cb); + else + stream.once('finish', cb); + } + state.ended = true; +} diff --git a/node_modules/readable-stream/node_modules/isarray/README.md b/node_modules/readable-stream/node_modules/isarray/README.md new file mode 100644 index 0000000..052a62b --- /dev/null +++ b/node_modules/readable-stream/node_modules/isarray/README.md @@ -0,0 +1,54 @@ + +# isarray + +`Array#isArray` for older browsers. + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/readable-stream/node_modules/isarray/build/build.js b/node_modules/readable-stream/node_modules/isarray/build/build.js new file mode 100644 index 0000000..ec58596 --- /dev/null +++ b/node_modules/readable-stream/node_modules/isarray/build/build.js @@ -0,0 +1,209 @@ + +/** + * Require the given path. + * + * @param {String} path + * @return {Object} exports + * @api public + */ + +function require(path, parent, orig) { + var resolved = require.resolve(path); + + // lookup failed + if (null == resolved) { + orig = orig || path; + parent = parent || 'root'; + var err = new Error('Failed to require "' + orig + '" from "' + parent + '"'); + err.path = orig; + err.parent = parent; + err.require = true; + throw err; + } + + var module = require.modules[resolved]; + + // perform real require() + // by invoking the module's + // registered function + if (!module.exports) { + module.exports = {}; + module.client = module.component = true; + module.call(this, module.exports, require.relative(resolved), module); + } + + return module.exports; +} + +/** + * Registered modules. + */ + +require.modules = {}; + +/** + * Registered aliases. + */ + +require.aliases = {}; + +/** + * Resolve `path`. + * + * Lookup: + * + * - PATH/index.js + * - PATH.js + * - PATH + * + * @param {String} path + * @return {String} path or null + * @api private + */ + +require.resolve = function(path) { + if (path.charAt(0) === '/') path = path.slice(1); + var index = path + '/index.js'; + + var paths = [ + path, + path + '.js', + path + '.json', + path + '/index.js', + path + '/index.json' + ]; + + for (var i = 0; i < paths.length; i++) { + var path = paths[i]; + if (require.modules.hasOwnProperty(path)) return path; + } + + if (require.aliases.hasOwnProperty(index)) { + return require.aliases[index]; + } +}; + +/** + * Normalize `path` relative to the current path. + * + * @param {String} curr + * @param {String} path + * @return {String} + * @api private + */ + +require.normalize = function(curr, path) { + var segs = []; + + if ('.' != path.charAt(0)) return path; + + curr = curr.split('/'); + path = path.split('/'); + + for (var i = 0; i < path.length; ++i) { + if ('..' == path[i]) { + curr.pop(); + } else if ('.' != path[i] && '' != path[i]) { + segs.push(path[i]); + } + } + + return curr.concat(segs).join('/'); +}; + +/** + * Register module at `path` with callback `definition`. + * + * @param {String} path + * @param {Function} definition + * @api private + */ + +require.register = function(path, definition) { + require.modules[path] = definition; +}; + +/** + * Alias a module definition. + * + * @param {String} from + * @param {String} to + * @api private + */ + +require.alias = function(from, to) { + if (!require.modules.hasOwnProperty(from)) { + throw new Error('Failed to alias "' + from + '", it does not exist'); + } + require.aliases[to] = from; +}; + +/** + * Return a require function relative to the `parent` path. + * + * @param {String} parent + * @return {Function} + * @api private + */ + +require.relative = function(parent) { + var p = require.normalize(parent, '..'); + + /** + * lastIndexOf helper. + */ + + function lastIndexOf(arr, obj) { + var i = arr.length; + while (i--) { + if (arr[i] === obj) return i; + } + return -1; + } + + /** + * The relative require() itself. + */ + + function localRequire(path) { + var resolved = localRequire.resolve(path); + return require(resolved, parent, path); + } + + /** + * Resolve relative to the parent. + */ + + localRequire.resolve = function(path) { + var c = path.charAt(0); + if ('/' == c) return path.slice(1); + if ('.' == c) return require.normalize(p, path); + + // resolve deps by returning + // the dep in the nearest "deps" + // directory + var segs = parent.split('/'); + var i = lastIndexOf(segs, 'deps') + 1; + if (!i) i = 0; + path = segs.slice(0, i + 1).join('/') + '/deps/' + path; + return path; + }; + + /** + * Check if module is defined at `path`. + */ + + localRequire.exists = function(path) { + return require.modules.hasOwnProperty(localRequire.resolve(path)); + }; + + return localRequire; +}; +require.register("isarray/index.js", function(exports, require, module){ +module.exports = Array.isArray || function (arr) { + return Object.prototype.toString.call(arr) == '[object Array]'; +}; + +}); +require.alias("isarray/index.js", "isarray/index.js"); + diff --git a/node_modules/readable-stream/node_modules/isarray/component.json b/node_modules/readable-stream/node_modules/isarray/component.json new file mode 100644 index 0000000..9e31b68 --- /dev/null +++ b/node_modules/readable-stream/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/node_modules/isarray/index.js b/node_modules/readable-stream/node_modules/isarray/index.js new file mode 100644 index 0000000..5f5ad45 --- /dev/null +++ b/node_modules/readable-stream/node_modules/isarray/index.js @@ -0,0 +1,3 @@ +module.exports = Array.isArray || function (arr) { + return Object.prototype.toString.call(arr) == '[object Array]'; +}; diff --git a/node_modules/readable-stream/node_modules/isarray/package.json b/node_modules/readable-stream/node_modules/isarray/package.json new file mode 100644 index 0000000..5a1e9c1 --- /dev/null +++ b/node_modules/readable-stream/node_modules/isarray/package.json @@ -0,0 +1,25 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/juliangruber/isarray.git" + }, + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : { + "test" : "tap test/*.js" + }, + "dependencies" : {}, + "devDependencies" : { + "tap" : "*" + }, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 0000000..0bb83ed --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,32 @@ +{ + "name": "readable-stream", + "version": "1.1.14", + "description": "Streams3, a user-land copy of the stream library from Node.js v0.11.x", + "main": "readable.js", + "dependencies": { + "core-util-is": "~1.0.0", + "isarray": "0.0.1", + "string_decoder": "~0.10.x", + "inherits": "~2.0.1" + }, + "devDependencies": { + "tap": "~0.2.6" + }, + "scripts": { + "test": "tap test/simple/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT" +} diff --git a/node_modules/readable-stream/passthrough.js b/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..27e8d8a --- /dev/null +++ b/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..2a8b5c6 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,10 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = require('stream'); +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +if (!process.browser && process.env.READABLE_STREAM === 'disable') { + module.exports = require('stream'); +} diff --git a/node_modules/readable-stream/transform.js b/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..5d482f0 --- /dev/null +++ b/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/readable-stream/writable.js b/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..e1e9efd --- /dev/null +++ b/node_modules/readable-stream/writable.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_writable.js") diff --git a/node_modules/readdirp/.npmignore b/node_modules/readdirp/.npmignore new file mode 100644 index 0000000..7dccd97 --- /dev/null +++ b/node_modules/readdirp/.npmignore @@ -0,0 +1,15 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +node_modules +npm-debug.log \ No newline at end of file diff --git a/node_modules/readdirp/.travis.yml b/node_modules/readdirp/.travis.yml new file mode 100644 index 0000000..0cdb6c3 --- /dev/null +++ b/node_modules/readdirp/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - "0.10" + - "0.12" + - "4.4" + - "6.2" diff --git a/node_modules/readdirp/LICENSE b/node_modules/readdirp/LICENSE new file mode 100644 index 0000000..8a63b80 --- /dev/null +++ b/node_modules/readdirp/LICENSE @@ -0,0 +1,20 @@ +This software is released under the MIT license: + +Copyright (c) 2012-2015 Thorsten Lorenz + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/readdirp/README.md b/node_modules/readdirp/README.md new file mode 100644 index 0000000..b0c1626 --- /dev/null +++ b/node_modules/readdirp/README.md @@ -0,0 +1,233 @@ +# readdirp [![Build Status](https://secure.travis-ci.org/thlorenz/readdirp.png)](http://travis-ci.org/thlorenz/readdirp) + +[![NPM](https://nodei.co/npm/readdirp.png?downloads=true&stars=true)](https://nodei.co/npm/readdirp/) + +Recursive version of [fs.readdir](http://nodejs.org/docs/latest/api/fs.html#fs_fs_readdir_path_callback). Exposes a **stream api**. + +```javascript +var readdirp = require('readdirp') + , path = require('path') + , es = require('event-stream'); + +// print out all JavaScript files along with their size + +var stream = readdirp({ root: path.join(__dirname), fileFilter: '*.js' }); +stream + .on('warn', function (err) { + console.error('non-fatal error', err); + // optionally call stream.destroy() here in order to abort and cause 'close' to be emitted + }) + .on('error', function (err) { console.error('fatal error', err); }) + .pipe(es.mapSync(function (entry) { + return { path: entry.path, size: entry.stat.size }; + })) + .pipe(es.stringify()) + .pipe(process.stdout); +``` + +Meant to be one of the recursive versions of [fs](http://nodejs.org/docs/latest/api/fs.html) functions, e.g., like [mkdirp](https://github.com/substack/node-mkdirp). + +**Table of Contents** *generated with [DocToc](http://doctoc.herokuapp.com/)* + +- [Installation](#installation) +- [API](#api) + - [entry stream](#entry-stream) + - [options](#options) + - [entry info](#entry-info) + - [Filters](#filters) + - [Callback API](#callback-api) + - [allProcessed ](#allprocessed) + - [fileProcessed](#fileprocessed) +- [More Examples](#more-examples) + - [stream api](#stream-api) + - [stream api pipe](#stream-api-pipe) + - [grep](#grep) + - [using callback api](#using-callback-api) + - [tests](#tests) + + +# Installation + + npm install readdirp + +# API + +***var entryStream = readdirp (options)*** + +Reads given root recursively and returns a `stream` of [entry info](#entry-info)s. + +## entry stream + +Behaves as follows: + +- `emit('data')` passes an [entry info](#entry-info) whenever one is found +- `emit('warn')` passes a non-fatal `Error` that prevents a file/directory from being processed (i.e., if it is + inaccessible to the user) +- `emit('error')` passes a fatal `Error` which also ends the stream (i.e., when illegal options where passed) +- `emit('end')` called when all entries were found and no more will be emitted (i.e., we are done) +- `emit('close')` called when the stream is destroyed via `stream.destroy()` (which could be useful if you want to + manually abort even on a non fatal error) - at that point the stream is no longer `readable` and no more entries, + warning or errors are emitted +- to learn more about streams, consult the very detailed + [nodejs streams documentation](http://nodejs.org/api/stream.html) or the + [stream-handbook](https://github.com/substack/stream-handbook) + + +## options + +- **root**: path in which to start reading and recursing into subdirectories + +- **fileFilter**: filter to include/exclude files found (see [Filters](#filters) for more) + +- **directoryFilter**: filter to include/exclude directories found and to recurse into (see [Filters](#filters) for more) + +- **depth**: depth at which to stop recursing even if more subdirectories are found + +- **entryType**: determines if data events on the stream should be emitted for `'files'`, `'directories'`, `'both'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. Defaults to `'files'`. + +- **lstat**: if `true`, readdirp uses `fs.lstat` instead of `fs.stat` in order to stat files and includes symlink entries in the stream along with files. + +## entry info + +Has the following properties: + +- **parentDir** : directory in which entry was found (relative to given root) +- **fullParentDir** : full path to parent directory +- **name** : name of the file/directory +- **path** : path to the file/directory (relative to given root) +- **fullPath** : full path to the file/directory found +- **stat** : built in [stat object](http://nodejs.org/docs/v0.4.9/api/fs.html#fs.Stats) +- **Example**: (assuming root was `/User/dev/readdirp`) + + parentDir : 'test/bed/root_dir1', + fullParentDir : '/User/dev/readdirp/test/bed/root_dir1', + name : 'root_dir1_subdir1', + path : 'test/bed/root_dir1/root_dir1_subdir1', + fullPath : '/User/dev/readdirp/test/bed/root_dir1/root_dir1_subdir1', + stat : [ ... ] + +## Filters + +There are three different ways to specify filters for files and directories respectively. + +- **function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry + +- **glob string**: a string (e.g., `*.js`) which is matched using [minimatch](https://github.com/isaacs/minimatch), so go there for more + information. + + Globstars (`**`) are not supported since specifiying a recursive pattern for an already recursive function doesn't make sense. + + Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files. + +- **array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown. + + `[ '*.json', '*.js' ]` includes all JavaScript and Json files. + + + `[ '!.git', '!node_modules' ]` includes all directories except the '.git' and 'node_modules'. + +Directories that do not pass a filter will not be recursed into. + +## Callback API + +Although the stream api is recommended, readdirp also exposes a callback based api. + +***readdirp (options, callback1 [, callback2])*** + +If callback2 is given, callback1 functions as the **fileProcessed** callback, and callback2 as the **allProcessed** callback. + +If only callback1 is given, it functions as the **allProcessed** callback. + +### allProcessed + +- function with err and res parameters, e.g., `function (err, res) { ... }` +- **err**: array of errors that occurred during the operation, **res may still be present, even if errors occurred** +- **res**: collection of file/directory [entry infos](#entry-info) + +### fileProcessed + +- function with [entry info](#entry-info) parameter e.g., `function (entryInfo) { ... }` + + +# More Examples + +`on('error', ..)`, `on('warn', ..)` and `on('end', ..)` handling omitted for brevity + +```javascript +var readdirp = require('readdirp'); + +// Glob file filter +readdirp({ root: './test/bed', fileFilter: '*.js' }) + .on('data', function (entry) { + // do something with each JavaScript file entry + }); + +// Combined glob file filters +readdirp({ root: './test/bed', fileFilter: [ '*.js', '*.json' ] }) + .on('data', function (entry) { + // do something with each JavaScript and Json file entry + }); + +// Combined negated directory filters +readdirp({ root: './test/bed', directoryFilter: [ '!.git', '!*modules' ] }) + .on('data', function (entry) { + // do something with each file entry found outside '.git' or any modules directory + }); + +// Function directory filter +readdirp({ root: './test/bed', directoryFilter: function (di) { return di.name.length === 9; } }) + .on('data', function (entry) { + // do something with each file entry found inside directories whose name has length 9 + }); + +// Limiting depth +readdirp({ root: './test/bed', depth: 1 }) + .on('data', function (entry) { + // do something with each file entry found up to 1 subdirectory deep + }); + +// callback api +readdirp( + { root: '.' } + , function(fileInfo) { + // do something with file entry here + } + , function (err, res) { + // all done, move on or do final step for all file entries here + } +); +``` + +Try more examples by following [instructions](https://github.com/thlorenz/readdirp/blob/master/examples/Readme.md) +on how to get going. + +## stream api + +[stream-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api.js) + +Demonstrates error and data handling by listening to events emitted from the readdirp stream. + +## stream api pipe + +[stream-api-pipe.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api-pipe.js) + +Demonstrates error handling by listening to events emitted from the readdirp stream and how to pipe the data stream into +another destination stream. + +## grep + +[grep.js](https://github.com/thlorenz/readdirp/blob/master/examples/grep.js) + +Very naive implementation of grep, for demonstration purposes only. + +## using callback api + +[callback-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/callback-api.js) + +Shows how to pass callbacks in order to handle errors and/or data. + +## tests + +The [readdirp tests](https://github.com/thlorenz/readdirp/blob/master/test/readdirp.js) also will give you a good idea on +how things work. + diff --git a/node_modules/readdirp/examples/Readme.md b/node_modules/readdirp/examples/Readme.md new file mode 100644 index 0000000..55fc461 --- /dev/null +++ b/node_modules/readdirp/examples/Readme.md @@ -0,0 +1,37 @@ +# readdirp examples + +## How to run the examples + +Assuming you installed readdirp (`npm install readdirp`), you can do the following: + +1. `npm explore readdirp` +2. `cd examples` +3. `npm install` + +At that point you can run the examples with node, i.e., `node grep`. + +## stream api + +[stream-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api.js) + +Demonstrates error and data handling by listening to events emitted from the readdirp stream. + +## stream api pipe + +[stream-api-pipe.js](https://github.com/thlorenz/readdirp/blob/master/examples/stream-api-pipe.js) + +Demonstrates error handling by listening to events emitted from the readdirp stream and how to pipe the data stream into +another destination stream. + +## grep + +[grep.js](https://github.com/thlorenz/readdirp/blob/master/examples/grep.js) + +Very naive implementation of grep, for demonstration purposes only. + +## using callback api + +[callback-api.js](https://github.com/thlorenz/readdirp/blob/master/examples/callback-api.js) + +Shows how to pass callbacks in order to handle errors and/or data. + diff --git a/node_modules/readdirp/examples/callback-api.js b/node_modules/readdirp/examples/callback-api.js new file mode 100644 index 0000000..39bd2d7 --- /dev/null +++ b/node_modules/readdirp/examples/callback-api.js @@ -0,0 +1,10 @@ +var readdirp = require('..'); + +readdirp({ root: '.', fileFilter: '*.js' }, function (errors, res) { + if (errors) { + errors.forEach(function (err) { + console.error('Error: ', err); + }); + } + console.log('all javascript files', res); +}); diff --git a/node_modules/readdirp/examples/grep.js b/node_modules/readdirp/examples/grep.js new file mode 100644 index 0000000..01d5f29 --- /dev/null +++ b/node_modules/readdirp/examples/grep.js @@ -0,0 +1,71 @@ +'use strict'; +var readdirp = require('..') + , util = require('util') + , fs = require('fs') + , path = require('path') + , es = require('event-stream') + ; + +function findLinesMatching (searchTerm) { + + return es.through(function (entry) { + var lineno = 0 + , matchingLines = [] + , fileStream = this; + + function filter () { + return es.mapSync(function (line) { + lineno++; + return ~line.indexOf(searchTerm) ? lineno + ': ' + line : undefined; + }); + } + + function aggregate () { + return es.through( + function write (data) { + matchingLines.push(data); + } + , function end () { + + // drop files that had no matches + if (matchingLines.length) { + var result = { file: entry, lines: matchingLines }; + + // pass result on to file stream + fileStream.emit('data', result); + } + this.emit('end'); + } + ); + } + + fs.createReadStream(entry.fullPath, { encoding: 'utf-8' }) + + // handle file contents line by line + .pipe(es.split('\n')) + + // keep only the lines that matched the term + .pipe(filter()) + + // aggregate all matching lines and delegate control back to the file stream + .pipe(aggregate()) + ; + }); +} + +console.log('grepping for "arguments"'); + +// create a stream of all javascript files found in this and all sub directories +readdirp({ root: path.join(__dirname), fileFilter: '*.js' }) + + // find all lines matching the term for each file (if none found, that file is ignored) + .pipe(findLinesMatching('arguments')) + + // format the results and output + .pipe( + es.mapSync(function (res) { + return '\n\n' + res.file.path + '\n\t' + res.lines.join('\n\t'); + }) + ) + .pipe(process.stdout) + ; diff --git a/node_modules/readdirp/examples/package.json b/node_modules/readdirp/examples/package.json new file mode 100644 index 0000000..2d9b341 --- /dev/null +++ b/node_modules/readdirp/examples/package.json @@ -0,0 +1,9 @@ +{ + "name": "readdirp-examples", + "version": "0.0.0", + "description": "Examples for readdirp.", + "dependencies": { + "tap-stream": "~0.1.0", + "event-stream": "~3.0.7" + } +} diff --git a/node_modules/readdirp/examples/stream-api-pipe.js b/node_modules/readdirp/examples/stream-api-pipe.js new file mode 100644 index 0000000..4003be8 --- /dev/null +++ b/node_modules/readdirp/examples/stream-api-pipe.js @@ -0,0 +1,19 @@ +var readdirp = require('..') + , path = require('path') + , through = require('through2') + +// print out all JavaScript files along with their size +readdirp({ root: path.join(__dirname), fileFilter: '*.js' }) + .on('warn', function (err) { console.error('non-fatal error', err); }) + .on('error', function (err) { console.error('fatal error', err); }) + .pipe(through.obj(function (entry, _, cb) { + this.push({ path: entry.path, size: entry.stat.size }); + cb(); + })) + .pipe(through.obj( + function (res, _, cb) { + this.push(JSON.stringify(res) + '\n'); + cb(); + }) + ) + .pipe(process.stdout); diff --git a/node_modules/readdirp/examples/stream-api.js b/node_modules/readdirp/examples/stream-api.js new file mode 100644 index 0000000..0f7b327 --- /dev/null +++ b/node_modules/readdirp/examples/stream-api.js @@ -0,0 +1,15 @@ +var readdirp = require('..') + , path = require('path'); + +readdirp({ root: path.join(__dirname), fileFilter: '*.js' }) + .on('warn', function (err) { + console.error('something went wrong when processing an entry', err); + }) + .on('error', function (err) { + console.error('something went fatally wrong and the stream was aborted', err); + }) + .on('data', function (entry) { + console.log('%s is ready for processing', entry.path); + // process entry here + }); + diff --git a/node_modules/readdirp/node_modules/readable-stream/.npmignore b/node_modules/readdirp/node_modules/readable-stream/.npmignore new file mode 100644 index 0000000..265ff73 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/.npmignore @@ -0,0 +1,8 @@ +build/ +test/ +examples/ +fs.js +zlib.js +.zuul.yml +.nyc_output +coverage diff --git a/node_modules/readdirp/node_modules/readable-stream/.travis.yml b/node_modules/readdirp/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000..84504c9 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/.travis.yml @@ -0,0 +1,49 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - npm install -g npm +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: TASK=test + - node_js: '0.10' + env: TASK=test + - node_js: '0.11' + env: TASK=test + - node_js: '0.12' + env: TASK=test + - node_js: 1 + env: TASK=test + - node_js: 2 + env: TASK=test + - node_js: 3 + env: TASK=test + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 5 + env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=microsoftedge BROWSER_VERSION=latest +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/readdirp/node_modules/readable-stream/LICENSE b/node_modules/readdirp/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..e3d4e69 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/readdirp/node_modules/readable-stream/README.md b/node_modules/readdirp/node_modules/readable-stream/README.md new file mode 100644 index 0000000..9fb4fea --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/README.md @@ -0,0 +1,36 @@ +# readable-stream + +***Node-core v6.3.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core, including [documentation](doc/stream.md). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams WG Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/node_modules/readdirp/node_modules/readable-stream/doc/stream.md b/node_modules/readdirp/node_modules/readable-stream/doc/stream.md new file mode 100644 index 0000000..fc269c8 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/doc/stream.md @@ -0,0 +1,2015 @@ +# Stream + + Stability: 2 - Stable + +A stream is an abstract interface for working with streaming data in Node.js. +The `stream` module provides a base API that makes it easy to build objects +that implement the stream interface. + +There are many stream objects provided by Node.js. For instance, a +[request to an HTTP server][http-incoming-message] and [`process.stdout`][] +are both stream instances. + +Streams can be readable, writable, or both. All streams are instances of +[`EventEmitter`][]. + +The `stream` module can be accessed using: + +```js +const stream = require('stream'); +``` + +While it is important for all Node.js users to understand how streams works, +the `stream` module itself is most useful for developer's that are creating new +types of stream instances. Developer's who are primarily *consuming* stream +objects will rarely (if ever) have need to use the `stream` module directly. + +## Organization of this document + +This document is divided into two primary sections and third section for +additional notes. The first section explains the elements of the stream API that +are required to *use* streams within an application. The second section explains +the elements of the API that are required to *implement* new types of streams. + +## Types of Streams + +There are four fundamental stream types within Node.js: + +* [Readable][] - streams from which data can be read (for example + [`fs.createReadStream()`][]). +* [Writable][] - streams to which data can be written (for example + [`fs.createWriteStream()`][]). +* [Duplex][] - streams that are both Readable and Writable (for example + [`net.Socket`][]). +* [Transform][] - Duplex streams that can modify or transform the data as it + is written and read (for example [`zlib.createDeflate()`][]). + +### Object Mode + +All streams created by Node.js APIs operate exclusively on strings and `Buffer` +objects. It is possible, however, for stream implementations to work with other +types of JavaScript values (with the exception of `null` which serves a special +purpose within streams). Such streams are considered to operate in "object +mode". + +Stream instances are switched into object mode using the `objectMode` option +when the stream is created. Attempting to switch an existing stream into +object mode is not safe. + +### Buffering + + + +Both [Writable][] and [Readable][] streams will store data in an internal +buffer that can be retrieved using `writable._writableState.getBuffer()` or +`readable._readableState.buffer`, respectively. + +The amount of data potentially buffered depends on the `highWaterMark` option +passed into the streams constructor. For normal streams, the `highWaterMark` +option specifies a total number of bytes. For streams operating in object mode, +the `highWaterMark` specifies a total number of objects. + +Data is buffered in Readable streams when the implementation calls +[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not +call [`stream.read()`][stream-read], the data will sit in the internal +queue until it is consumed. + +Once the total size of the internal read buffer reaches the threshold specified +by `highWaterMark`, the stream will temporarily stop reading data from the +underlying resource until the data currently buffered can be consumed (that is, +the stream will stop calling the internal `readable._read()` method that is +used to fill the read buffer). + +Data is buffered in Writable streams when the +[`writable.write(chunk)`][stream-write] method is called repeatedly. While the +total size of the internal write buffer is below the threshold set by +`highWaterMark`, calls to `writable.write()` will return `true`. Once the +the size of the internal buffer reaches or exceeds the `highWaterMark`, `false` +will be returned. + +A key goal of the `stream` API, and in particular the [`stream.pipe()`] method, +is to limit the buffering of data to acceptable levels such that sources and +destinations of differing speeds will not overwhelm the available memory. + +Because [Duplex][] and [Transform][] streams are both Readable and Writable, +each maintain *two* separate internal buffers used for reading and writing, +allowing each side to operate independently of the other while maintaining an +appropriate and efficient flow of data. For example, [`net.Socket`][] instances +are [Duplex][] streams whose Readable side allows consumption of data received +*from* the socket and whose Writable side allows writing data *to* the socket. +Because data may be written to the socket at a faster or slower rate than data +is received, it is important each side operate (and buffer) independently of +the other. + +## API for Stream Consumers + + + +Almost all Node.js applications, no matter how simple, use streams in some +manner. The following is an example of using streams in a Node.js application +that implements an HTTP server: + +```js +const http = require('http'); + +const server = http.createServer( (req, res) => { + // req is an http.IncomingMessage, which is a Readable Stream + // res is an http.ServerResponse, which is a Writable Stream + + let body = ''; + // Get the data as utf8 strings. + // If an encoding is not set, Buffer objects will be received. + req.setEncoding('utf8'); + + // Readable streams emit 'data' events once a listener is added + req.on('data', (chunk) => { + body += chunk; + }); + + // the end event indicates that the entire body has been received + req.on('end', () => { + try { + const data = JSON.parse(body); + } catch (er) { + // uh oh! bad json! + res.statusCode = 400; + return res.end(`error: ${er.message}`); + } + + // write back something interesting to the user: + res.write(typeof data); + res.end(); + }); +}); + +server.listen(1337); + +// $ curl localhost:1337 -d '{}' +// object +// $ curl localhost:1337 -d '"foo"' +// string +// $ curl localhost:1337 -d 'not json' +// error: Unexpected token o +``` + +[Writable][] streams (such as `res` in the example) expose methods such as +`write()` and `end()` that are used to write data onto the stream. + +[Readable][] streams use the [`EventEmitter`][] API for notifying application +code when data is available to be read off the stream. That available data can +be read from the stream in multiple ways. + +Both [Writable][] and [Readable][] streams use the [`EventEmitter`][] API in +various ways to communicate the current state of the stream. + +[Duplex][] and [Transform][] streams are both [Writable][] and [Readable][]. + +Applications that are either writing data to or consuming data from a stream +are not required to implement the stream interfaces directly and will generally +have no reason to call `require('stream')`. + +Developers wishing to implement new types of streams should refer to the +section [API for Stream Implementers][]. + +### Writable Streams + +Writable streams are an abstraction for a *destination* to which data is +written. + +Examples of [Writable][] streams include: + +* [HTTP requests, on the client][] +* [HTTP responses, on the server][] +* [fs write streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdin][] +* [`process.stdout`][], [`process.stderr`][] + +*Note*: Some of these examples are actually [Duplex][] streams that implement +the [Writable][] interface. + +All [Writable][] streams implement the interface defined by the +`stream.Writable` class. + +While specific instances of [Writable][] streams may differ in various ways, +all Writable streams follow the same fundamental usage pattern as illustrated +in the example below: + +```js +const myStream = getWritableStreamSomehow(); +myStream.write('some data'); +myStream.write('some more data'); +myStream.end('done writing data'); +``` + +#### Class: stream.Writable + + + + +##### Event: 'close' + + +The `'close'` event is emitted when the stream and any of its underlying +resources (a file descriptor, for example) have been closed. The event indicates +that no more events will be emitted, and no further computation will occur. + +Not all Writable streams will emit the `'close'` event. + +##### Event: 'drain' + + +If a call to [`stream.write(chunk)`][stream-write] returns `false`, the +`'drain'` event will be emitted when it is appropriate to resume writing data +to the stream. + +```js +// Write the data to the supplied writable stream one million times. +// Be attentive to back-pressure. +function writeOneMillionTimes(writer, data, encoding, callback) { + let i = 1000000; + write(); + function write() { + var ok = true; + do { + i--; + if (i === 0) { + // last time! + writer.write(data, encoding, callback); + } else { + // see if we should continue, or wait + // don't pass the callback, because we're not done yet. + ok = writer.write(data, encoding); + } + } while (i > 0 && ok); + if (i > 0) { + // had to stop early! + // write some more once it drains + writer.once('drain', write); + } + } +} +``` + +##### Event: 'error' + + +* {Error} + +The `'error'` event is emitted if an error occurred while writing or piping +data. The listener callback is passed a single `Error` argument when called. + +*Note*: The stream is not closed when the `'error'` event is emitted. + +##### Event: 'finish' + + +The `'finish'` event is emitted after the [`stream.end()`][stream-end] method +has been called, and all data has been flushed to the underlying system. + +```js +const writer = getWritableStreamSomehow(); +for (var i = 0; i < 100; i ++) { + writer.write('hello, #${i}!\n'); +} +writer.end('This is the end\n'); +writer.on('finish', () => { + console.error('All writes are now complete.'); +}); +``` + +##### Event: 'pipe' + + +* `src` {stream.Readable} source stream that is piping to this writable + +The `'pipe'` event is emitted when the [`stream.pipe()`][] method is called on +a readable stream, adding this writable to its set of destinations. + +```js +const writer = getWritableStreamSomehow(); +const reader = getReadableStreamSomehow(); +writer.on('pipe', (src) => { + console.error('something is piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +``` + +##### Event: 'unpipe' + + +* `src` {[Readable][] Stream} The source stream that + [unpiped][`stream.unpipe()`] this writable + +The `'unpipe'` event is emitted when the [`stream.unpipe()`][] method is called +on a [Readable][] stream, removing this [Writable][] from its set of +destinations. + +```js +const writer = getWritableStreamSomehow(); +const reader = getReadableStreamSomehow(); +writer.on('unpipe', (src) => { + console.error('Something has stopped piping into the writer.'); + assert.equal(src, reader); +}); +reader.pipe(writer); +reader.unpipe(writer); +``` + +##### writable.cork() + + +The `writable.cork()` method forces all written data to be buffered in memory. +The buffered data will be flushed when either the [`stream.uncork()`][] or +[`stream.end()`][stream-end] methods are called. + +The primary intent of `writable.cork()` is to avoid a situation where writing +many small chunks of data to a stream do not cause an backup in the internal +buffer that would have an adverse impact on performance. In such situations, +implementations that implement the `writable._writev()` method can perform +buffered writes in a more optimized manner. + +##### writable.end([chunk][, encoding][, callback]) + + +* `chunk` {String|Buffer|any} Optional data to write. For streams not operating + in object mode, `chunk` must be a string or a `Buffer`. For object mode + streams, `chunk` may be any JavaScript value other than `null`. +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Optional callback for when the stream is finished + +Calling the `writable.end()` method signals that no more data will be written +to the [Writable][]. The optional `chunk` and `encoding` arguments allow one +final additional chunk of data to be written immediately before closing the +stream. If provided, the optional `callback` function is attached as a listener +for the [`'finish'`][] event. + +Calling the [`stream.write()`][stream-write] method after calling +[`stream.end()`][stream-end] will raise an error. + +```js +// write 'hello, ' and then end with 'world!' +const file = fs.createWriteStream('example.txt'); +file.write('hello, '); +file.end('world!'); +// writing more now is not allowed! +``` + +##### writable.setDefaultEncoding(encoding) + + +* `encoding` {String} The new default encoding +* Return: `this` + +The `writable.setDefaultEncoding()` method sets the default `encoding` for a +[Writable][] stream. + +##### writable.uncork() + + +The `writable.uncork()` method flushes all data buffered since +[`stream.cork()`][] was called. + +When using `writable.cork()` and `writable.uncork()` to manage the buffering +of writes to a stream, it is recommended that calls to `writable.uncork()` be +deferred using `process.nextTick()`. Doing so allows batching of all +`writable.write()` calls that occur within a given Node.js event loop phase. + +```js +stream.cork(); +stream.write('some '); +stream.write('data '); +process.nextTick(() => stream.uncork()); +``` + +If the `writable.cork()` method is called multiple times on a stream, the same +number of calls to `writable.uncork()` must be called to flush the buffered +data. + +``` +stream.cork(); +stream.write('some '); +stream.cork(); +stream.write('data '); +process.nextTick(() => { + stream.uncork(); + // The data will not be flushed until uncork() is called a second time. + stream.uncork(); +}); +``` + +##### writable.write(chunk[, encoding][, callback]) + + +* `chunk` {String|Buffer} The data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Callback for when this chunk of data is flushed +* Returns: {Boolean} `false` if the stream wishes for the calling code to + wait for the `'drain'` event to be emitted before continuing to write + additional data; otherwise `true`. + +The `writable.write()` method writes some data to the stream, and calls the +supplied `callback` once the data has been fully handled. If an error +occurs, the `callback` *may or may not* be called with the error as its +first argument. To reliably detect write errors, add a listener for the +`'error'` event. + +The return value indicates whether the written `chunk` was buffered internally +and the buffer has exceeded the `highWaterMark` configured when the stream was +created. If `false` is returned, further attempts to write data to the stream +should be paused until the `'drain'` event is emitted. + +A Writable stream in object mode will always ignore the `encoding` argument. + +### Readable Streams + +Readable streams are an abstraction for a *source* from which data is +consumed. + +Examples of Readable streams include: + +* [HTTP responses, on the client][http-incoming-message] +* [HTTP requests, on the server][http-incoming-message] +* [fs read streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdout and stderr][] +* [`process.stdin`][] + +All [Readable][] streams implement the interface defined by the +`stream.Readable` class. + +#### Two Modes + +Readable streams effectively operate in one of two modes: flowing and paused. + +When in flowing mode, data is read from the underlying system automatically +and provided to an application as quickly as possible using events via the +[`EventEmitter`][] interface. + +In paused mode, the [`stream.read()`][stream-read] method must be called +explicitly to read chunks of data from the stream. + +All [Readable][] streams begin in paused mode but can be switched to flowing +mode in one of the following ways: + +* Adding a [`'data'`][] event handler. +* Calling the [`stream.resume()`][stream-resume] method. +* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. + +The Readable can switch back to paused mode using one of the following: + +* If there are no pipe destinations, by calling the + [`stream.pause()`][stream-pause] method. +* If there are pipe destinations, by removing any [`'data'`][] event + handlers, and removing all pipe destinations by calling the + [`stream.unpipe()`][] method. + +The important concept to remember is that a Readable will not generate data +until a mechanism for either consuming or ignoring that data is provided. If +the consuming mechanism is disabled or taken away, the Readable will *attempt* +to stop generating the data. + +*Note*: For backwards compatibility reasons, removing [`'data'`][] event +handlers will **not** automatically pause the stream. Also, if there are piped +destinations, then calling [`stream.pause()`][stream-pause] will not guarantee +that the stream will *remain* paused once those destinations drain and ask for +more data. + +*Note*: If a [Readable][] is switched into flowing mode and there are no +consumers available handle the data, that data will be lost. This can occur, +for instance, when the `readable.resume()` method is called without a listener +attached to the `'data'` event, or when a `'data'` event handler is removed +from the stream. + +#### Three States + +The "two modes" of operation for a Readable stream are a simplified abstraction +for the more complicated internal state management that is happening within the +Readable stream implementation. + +Specifically, at any given point in time, every Readable is in one of three +possible states: + +* `readable._readableState.flowing = null` +* `readable._readableState.flowing = false` +* `readable._readableState.flowing = true` + +When `readable._readableState.flowing` is `null`, no mechanism for consuming the +streams data is provided so the stream will not generate its data. + +Attaching a listener for the `'data'` event, calling the `readable.pipe()` +method, or calling the `readable.resume()` method will switch +`readable._readableState.flowing` to `true`, causing the Readable to begin +actively emitting events as data is generated. + +Calling `readable.pause()`, `readable.unpipe()`, or receiving "back pressure" +will cause the `readable._readableState.flowing` to be set as `false`, +temporarily halting the flowing of events but *not* halting the generation of +data. + +While `readable._readableState.flowing` is `false`, data may be accumulating +within the streams internal buffer. + +#### Choose One + +The Readable stream API evolved across multiple Node.js versions and provides +multiple methods of consuming stream data. In general, developers should choose +*one* of the methods of consuming data and *should never* use multiple methods +to consume data from a single stream. + +Use of the `readable.pipe()` method is recommended for most users as it has been +implemented to provide the easiest way of consuming stream data. Developers that +require more fine-grained control over the transfer and generation of data can +use the [`EventEmitter`][] and `readable.pause()`/`readable.resume()` APIs. + +#### Class: stream.Readable + + + + +##### Event: 'close' + + +The `'close'` event is emitted when the stream and any of its underlying +resources (a file descriptor, for example) have been closed. The event indicates +that no more events will be emitted, and no further computation will occur. + +Not all [Readable][] streams will emit the `'close'` event. + +##### Event: 'data' + + +* `chunk` {Buffer|String|any} The chunk of data. For streams that are not + operating in object mode, the chunk will be either a string or `Buffer`. + For streams that are in object mode, the chunk can be any JavaScript value + other than `null`. + +The `'data'` event is emitted whenever the stream is relinquishing ownership of +a chunk of data to a consumer. This may occur whenever the stream is switched +in flowing mode by calling `readable.pipe()`, `readable.resume()`, or by +attaching a listener callback to the `'data'` event. The `'data'` event will +also be emitted whenever the `readable.read()` method is called and a chunk of +data is available to be returned. + +Attaching a `'data'` event listener to a stream that has not been explicitly +paused will switch the stream into flowing mode. Data will then be passed as +soon as it is available. + +The listener callback will be passed the chunk of data as a string if a default +encoding has been specified for the stream using the +`readable.setEncoding()` method; otherwise the data will be passed as a +`Buffer`. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); +}); +``` + +##### Event: 'end' + + +The `'end'` event is emitted when there is no more data to be consumed from +the stream. + +*Note*: The `'end'` event **will not be emitted** unless the data is +completely consumed. This can be accomplished by switching the stream into +flowing mode, or by calling [`stream.read()`][stream-read] repeatedly until +all data has been consumed. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); +}); +readable.on('end', () => { + console.log('There will be no more data.'); +}); +``` + +##### Event: 'error' + + +* {Error} + +The `'error'` event may be emitted by a Readable implementation at any time. +Typically, this may occur if the underlying stream in unable to generate data +due to an underlying internal failure, or when a stream implementation attempts +to push an invalid chunk of data. + +The listener callback will be passed a single `Error` object. + +##### Event: 'readable' + + +The `'readable'` event is emitted when there is data available to be read from +the stream. In some cases, attaching a listener for the `'readable'` event will +cause some amount of data to be read into an internal buffer. + +```javascript +const readable = getReadableStreamSomehow(); +readable.on('readable', () => { + // there is some data to read now +}); +``` +The `'readable'` event will also be emitted once the end of the stream data +has been reached but before the `'end'` event is emitted. + +Effectively, the `'readable'` event indicates that the stream has new +information: either new data is available or the end of the stream has been +reached. In the former case, [`stream.read()`][stream-read] will return the +available data. In the latter case, [`stream.read()`][stream-read] will return +`null`. For instance, in the following example, `foo.txt` is an empty file: + +```js +const fs = require('fs'); +const rr = fs.createReadStream('foo.txt'); +rr.on('readable', () => { + console.log('readable:', rr.read()); +}); +rr.on('end', () => { + console.log('end'); +}); +``` + +The output of running this script is: + +``` +$ node test.js +readable: null +end +``` + +*Note*: In general, the `readable.pipe()` and `'data'` event mechanisms are +preferred over the use of the `'readable'` event. + +##### readable.isPaused() + + +* Return: {Boolean} + +The `readable.isPaused()` method returns the current operating state of the +Readable. This is used primarily by the mechanism that underlies the +`readable.pipe()` method. In most typical cases, there will be no reason to +use this method directly. + +```js +const readable = new stream.Readable + +readable.isPaused() // === false +readable.pause() +readable.isPaused() // === true +readable.resume() +readable.isPaused() // === false +``` + +##### readable.pause() + + +* Return: `this` + +The `readable.pause()` method will cause a stream in flowing mode to stop +emitting [`'data'`][] events, switching out of flowing mode. Any data that +becomes available will remain in the internal buffer. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); + readable.pause(); + console.log('There will be no additional data for 1 second.'); + setTimeout(() => { + console.log('Now data will start flowing again.'); + readable.resume(); + }, 1000); +}); +``` + +##### readable.pipe(destination[, options]) + + +* `destination` {stream.Writable} The destination for writing data +* `options` {Object} Pipe options + * `end` {Boolean} End the writer when the reader ends. Defaults to `true`. + +The `readable.pipe()` method attaches a [Writable][] stream to the `readable`, +causing it to switch automatically into flowing mode and push all of its data +to the attached [Writable][]. The flow of data will be automatically managed so +that the destination Writable stream is not overwhelmed by a faster Readable +stream. + +The following example pipes all of the data from the `readable` into a file +named `file.txt`: + +```js +const readable = getReadableStreamSomehow(); +const writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt' +readable.pipe(writable); +``` +It is possible to attach multiple Writable streams to a single Readable stream. + +The `readable.pipe()` method returns a reference to the *destination* stream +making it possible to set up chains of piped streams: + +```js +const r = fs.createReadStream('file.txt'); +const z = zlib.createGzip(); +const w = fs.createWriteStream('file.txt.gz'); +r.pipe(z).pipe(w); +``` + +By default, [`stream.end()`][stream-end] is called on the destination Writable +stream when the source Readable stream emits [`'end'`][], so that the +destination is no longer writable. To disable this default behavior, the `end` +option can be passed as `false`, causing the destination stream to remain open, +as illustrated in the following example: + +```js +reader.pipe(writer, { end: false }); +reader.on('end', () => { + writer.end('Goodbye\n'); +}); +``` + +One important caveat is that if the Readable stream emits an error during +processing, the Writable destination *is not closed* automatically. If an +error occurs, it will be necessary to *manually* close each stream in order +to prevent memory leaks. + +*Note*: The [`process.stderr`][] and [`process.stdout`][] Writable streams are +never closed until the Node.js process exits, regardless of the specified +options. + +##### readable.read([size]) + + +* `size` {Number} Optional argument to specify how much data to read. +* Return {String|Buffer|Null} + +The `readable.read()` method pulls some data out of the internal buffer and +returns it. If no data available to be read, `null` is returned. By default, +the data will be returned as a `Buffer` object unless an encoding has been +specified using the `readable.setEncoding()` method or the stream is operating +in object mode. + +The optional `size` argument specifies a specific number of bytes to read. If +`size` bytes are not available to be read, `null` will be returned *unless* +the stream has ended, in which case all of the data remaining in the internal +buffer will be returned (*even if it exceeds `size` bytes*). + +If the `size` argument is not specified, all of the data contained in the +internal buffer will be returned. + +The `readable.read()` method should only be called on Readable streams operating +in paused mode. In flowing mode, `readable.read()` is called automatically until +the internal buffer is fully drained. + +```js +const readable = getReadableStreamSomehow(); +readable.on('readable', () => { + var chunk; + while (null !== (chunk = readable.read())) { + console.log(`Received ${chunk.length} bytes of data.`); + } +}); +``` + +In general, it is recommended that developers avoid the use of the `'readable'` +event and the `readable.read()` method in favor of using either +`readable.pipe()` or the `'data'` event. + +A Readable stream in object mode will always return a single item from +a call to [`readable.read(size)`][stream-read], regardless of the value of the +`size` argument. + +*Note:* If the `readable.read()` method returns a chunk of data, a `'data'` +event will also be emitted. + +*Note*: Calling [`stream.read([size])`][stream-read] after the [`'end'`][] +event has been emitted will return `null`. No runtime error will be raised. + +##### readable.resume() + + +* Return: `this` + +The `readable.resume()` method causes an explicitly paused Readable stream to +resume emitting [`'data'`][] events, switching the stream into flowing mode. + +The `readable.resume()` method can be used to fully consume the data from a +stream without actually processing any of that data as illustrated in the +following example: + +```js +getReadableStreamSomehow() + .resume() + .on('end', () => { + console.log('Reached the end, but did not read anything.'); + }); +``` + +##### readable.setEncoding(encoding) + + +* `encoding` {String} The encoding to use. +* Return: `this` + +The `readable.setEncoding()` method sets the default character encoding for +data read from the Readable stream. + +Setting an encoding causes the stream data +to be returned as string of the specified encoding rather than as `Buffer` +objects. For instance, calling `readable.setEncoding('utf8')` will cause the +output data will be interpreted as UTF-8 data, and passed as strings. Calling +`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal +string format. + +The Readable stream will properly handle multi-byte characters delivered through +the stream that would otherwise become improperly decoded if simply pulled from +the stream as `Buffer` objects. + +Encoding can be disabled by calling `readable.setEncoding(null)`. This approach +is useful when working with binary data or with large multi-byte strings spread +out over multiple chunks. + +```js +const readable = getReadableStreamSomehow(); +readable.setEncoding('utf8'); +readable.on('data', (chunk) => { + assert.equal(typeof chunk, 'string'); + console.log('got %d characters of string data', chunk.length); +}); +``` + +##### readable.unpipe([destination]) + + +* `destination` {stream.Writable} Optional specific stream to unpipe + +The `readable.unpipe()` method detaches a Writable stream previously attached +using the [`stream.pipe()`][] method. + +If the `destination` is not specified, then *all* pipes are detached. + +If the `destination` is specified, but no pipe is set up for it, then +the method does nothing. + +```js +const readable = getReadableStreamSomehow(); +const writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt', +// but only for the first second +readable.pipe(writable); +setTimeout(() => { + console.log('Stop writing to file.txt'); + readable.unpipe(writable); + console.log('Manually close the file stream'); + writable.end(); +}, 1000); +``` + +##### readable.unshift(chunk) + + +* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue + +The `readable.unshift()` method pushes a chunk of data back into the internal +buffer. This is useful in certain situations where a stream is being consumed by +code that needs to "un-consume" some amount of data that it has optimistically +pulled out of the source, so that the data can be passed on to some other party. + +*Note*: The `stream.unshift(chunk)` method cannot be called after the +[`'end'`][] event has been emitted or a runtime error will be thrown. + +Developers using `stream.unshift()` often should consider switching to +use of a [Transform][] stream instead. See the [API for Stream Implementers][] +section for more information. + +```js +// Pull off a header delimited by \n\n +// use unshift() if we get too much +// Call the callback with (error, header, stream) +const StringDecoder = require('string_decoder').StringDecoder; +function parseHeader(stream, callback) { + stream.on('error', callback); + stream.on('readable', onReadable); + const decoder = new StringDecoder('utf8'); + var header = ''; + function onReadable() { + var chunk; + while (null !== (chunk = stream.read())) { + var str = decoder.write(chunk); + if (str.match(/\n\n/)) { + // found the header boundary + var split = str.split(/\n\n/); + header += split.shift(); + const remaining = split.join('\n\n'); + const buf = Buffer.from(remaining, 'utf8'); + if (buf.length) + stream.unshift(buf); + stream.removeListener('error', callback); + stream.removeListener('readable', onReadable); + // now the body of the message can be read from the stream. + callback(null, header, stream); + } else { + // still reading the header. + header += str; + } + } + } +} +``` + +*Note*: Unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` +will not end the reading process by resetting the internal reading state of the +stream. This can cause unexpected results if `readable.unshift()` is called +during a read (i.e. from within a [`stream._read()`][stream-_read] +implementation on a custom stream). Following the call to `readable.unshift()` +with an immediate [`stream.push('')`][stream-push] will reset the reading state +appropriately, however it is best to simply avoid calling `readable.unshift()` +while in the process of performing a read. + +##### readable.wrap(stream) + + +* `stream` {Stream} An "old style" readable stream + +Versions of Node.js prior to v0.10 had streams that did not implement the +entire `stream` module API as it is currently defined. (See [Compatibility][] +for more information.) + +When using an older Node.js library that emits [`'data'`][] events and has a +[`stream.pause()`][stream-pause] method that is advisory only, the +`readable.wrap()` method can be used to create a [Readable][] stream that uses +the old stream as its data source. + +It will rarely be necessary to use `readable.wrap()` but the method has been +provided as a convenience for interacting with older Node.js applications and +libraries. + +For example: + +```js +const OldReader = require('./old-api-module.js').OldReader; +const Readable = require('stream').Readable; +const oreader = new OldReader; +const myReader = new Readable().wrap(oreader); + +myReader.on('readable', () => { + myReader.read(); // etc. +}); +``` + +### Duplex and Transform Streams + +#### Class: stream.Duplex + + + + +Duplex streams are streams that implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Duplex streams include: + +* [TCP sockets][] +* [zlib streams][zlib] +* [crypto streams][crypto] + +#### Class: stream.Transform + + + + +Transform streams are [Duplex][] streams where the output is in some way +related to the input. Like all [Duplex][] streams, Transform streams +implement both the [Readable][] and [Writable][] interfaces. + +Examples of Transform streams include: + +* [zlib streams][zlib] +* [crypto streams][crypto] + + +## API for Stream Implementers + + + +The `stream` module API has been designed to make it possible to easily +implement streams using JavaScript's prototypical inheritance model. + +First, a stream developer would declare a new JavaScript class that extends one +of the four basic stream classes (`stream.Writable`, `stream.Readable`, +`stream.Duplex`, or `stream.Transform`), making sure the call the appropriate +parent class constructor: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + super(options); + } +} +``` + +The new stream class must then implement one or more specific methods, depending +on the type of stream being created, as detailed in the chart below: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Use-case

+
+

Class

+
+

Method(s) to implement

+
+

Reading only

+
+

[Readable](#stream_class_stream_readable)

+
+

[_read][stream-_read]

+
+

Writing only

+
+

[Writable](#stream_class_stream_writable)

+
+

[_write][stream-_write], [_writev][stream-_writev]

+
+

Reading and writing

+
+

[Duplex](#stream_class_stream_duplex)

+
+

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

+
+

Operate on written data, then read the result

+
+

[Transform](#stream_class_stream_transform)

+
+

[_transform][stream-_transform], [_flush][stream-_flush]

+
+ +*Note*: The implementation code for a stream should *never* call the "public" +methods of a stream that are intended for use by consumers (as described in +the [API for Stream Consumers][] section). Doing so may lead to adverse +side effects in application code consuming the stream. + +### Simplified Construction + +For many simple cases, it is possible to construct a stream without relying on +inheritance. This can be accomplished by directly creating instances of the +`stream.Writable`, `stream.Readable`, `stream.Duplex` or `stream.Transform` +objects and passing appropriate methods as constructor options. + +For example: + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + // ... + } +}); +``` + +### Implementing a Writable Stream + +The `stream.Writable` class is extended to implement a [Writable][] stream. + +Custom Writable streams *must* call the `new stream.Writable([options])` +constructor and implement the `writable._write()` method. The +`writable._writev()` method *may* also be implemented. + +#### Constructor: new stream.Writable([options]) + +* `options` {Object} + * `highWaterMark` {Number} Buffer level when + [`stream.write()`][stream-write] starts returning `false`. Defaults to + `16384` (16kb), or `16` for `objectMode` streams. + * `decodeStrings` {Boolean} Whether or not to decode strings into + Buffers before passing them to [`stream._write()`][stream-_write]. + Defaults to `true` + * `objectMode` {Boolean} Whether or not the + [`stream.write(anyObj)`][stream-write] is a valid operation. When set, + it becomes possible to write JavaScript values other than string or + `Buffer` if supported by the stream implementation. Defaults to `false` + * `write` {Function} Implementation for the + [`stream._write()`][stream-_write] method. + * `writev` {Function} Implementation for the + [`stream._writev()`][stream-_writev] method. + +For example: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + // Calls the stream.Writable() constructor + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Writable = require('stream').Writable; +const util = require('util'); + +function MyWritable(options) { + if (!(this instanceof MyWritable)) + return new MyWritable(options); + Writable.call(this, options); +} +util.inherits(MyWritable, Writable); +``` + +Or, using the Simplified Constructor approach: + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + // ... + }, + writev(chunks, callback) { + // ... + } +}); +``` + +#### writable.\_write(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be written. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then `encoding` is the + character encoding of that string. If chunk is a `Buffer`, or if the + stream is operating in object mode, `encoding` may be ignored. +* `callback` {Function} Call this function (optionally with an error + argument) when processing is complete for the supplied chunk. + +All Writable stream implementations must provide a +[`writable._write()`][stream-_write] method to send data to the underlying +resource. + +*Note*: [Transform][] streams provide their own implementation of the +[`writable._write()`][stream-_write]. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Writable +class methods only. + +The `callback` method must be called to signal either that the write completed +successfully or failed with an error. The first argument passed to the +`callback` must be the `Error` object if the call failed or `null` if the +write succeeded. + +It is important to note that all calls to `writable.write()` that occur between +the time `writable._write()` is called and the `callback` is called will cause +the written data to be buffered. Once the `callback` is invoked, the stream will +emit a `'drain'` event. If a stream implementation is capable of processing +multiple chunks of data at once, the `writable._writev()` method should be +implemented. + +If the `decodeStrings` property is set in the constructor options, then +`chunk` may be a string rather than a Buffer, and `encoding` will +indicate the character encoding of the string. This is to support +implementations that have an optimized handling for certain string +data encodings. If the `decodeStrings` property is explicitly set to `false`, +the `encoding` argument can be safely ignored, and `chunk` will always be a +`Buffer`. + +The `writable._write()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### writable.\_writev(chunks, callback) + +* `chunks` {Array} The chunks to be written. Each chunk has following + format: `{ chunk: ..., encoding: ... }`. +* `callback` {Function} A callback function (optionally with an error + argument) to be invoked when processing is complete for the supplied chunks. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Writable +class methods only. + +The `writable._writev()` method may be implemented in addition to +`writable._write()` in stream implementations that are capable of processing +multiple chunks of data at once. If implemented, the method will be called with +all chunks of data currently buffered in the write queue. + +The `writable._writev()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### Errors While Writing + +It is recommended that errors occurring during the processing of the +`writable._write()` and `writable._writev()` methods are reported by invoking +the callback and passing the error as the first argument. This will cause an +`'error'` event to be emitted by the Writable. Throwing an Error from within +`writable._write()` can result in expected and inconsistent behavior depending +on how the stream is being used. Using the callback ensures consistent and +predictable handling of errors. + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + if (chunk.toString().indexOf('a') >= 0) { + callback(new Error('chunk is invalid')); + } else { + callback(); + } + } +}); +``` + +#### An Example Writable Stream + +The following illustrates a rather simplistic (and somewhat pointless) custom +Writable stream implementation. While this specific Writable stream instance +is not of any real particular usefulness, the example illustrates each of the +required elements of a custom [Writable][] stream instance: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + super(options); + } + + _write(chunk, encoding, callback) { + if (chunk.toString().indexOf('a') >= 0) { + callback(new Error('chunk is invalid')); + } else { + callback(); + } + } +} +``` + +### Implementing a Readable Stream + +The `stream.Readable` class is extended to implement a [Readable][] stream. + +Custom Readable streams *must* call the `new stream.Readable([options])` +constructor and implement the `readable._read()` method. + +#### new stream.Readable([options]) + +* `options` {Object} + * `highWaterMark` {Number} The maximum number of bytes to store in + the internal buffer before ceasing to read from the underlying + resource. Defaults to `16384` (16kb), or `16` for `objectMode` streams + * `encoding` {String} If specified, then buffers will be decoded to + strings using the specified encoding. Defaults to `null` + * `objectMode` {Boolean} Whether this stream should behave + as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns + a single value instead of a Buffer of size n. Defaults to `false` + * `read` {Function} Implementation for the [`stream._read()`][stream-_read] + method. + +For example: + +```js +const Readable = require('stream').Readable; + +class MyReadable extends Readable { + constructor(options) { + // Calls the stream.Readable(options) constructor + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Readable = require('stream').Readable; +const util = require('util'); + +function MyReadable(options) { + if (!(this instanceof MyReadable)) + return new MyReadable(options); + Readable.call(this, options); +} +util.inherits(MyReadable, Readable); +``` + +Or, using the Simplified Constructor approach: + +```js +const Readable = require('stream').Readable; + +const myReadable = new Readable({ + read(size) { + // ... + } +}); +``` + +#### readable.\_read(size) + +* `size` {Number} Number of bytes to read asynchronously + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +All Readable stream implementations must provide an implementation of the +`readable._read()` method to fetch data from the underlying resource. + +When `readable._read()` is called, if data is available from the resource, the +implementation should begin pushing that data into the read queue using the +[`this.push(dataChunk)`][stream-push] method. `_read()` should continue reading +from the resource and pushing data until `readable.push()` returns `false`. Only +when `_read()` is called again after it has stopped should it resume pushing +additional data onto the queue. + +*Note*: Once the `readable._read()` method has been called, it will not be +called again until the [`readable.push()`][stream-push] method is called. + +The `size` argument is advisory. For implementations where a "read" is a +single operation that returns data can use the `size` argument to determine how +much data to fetch. Other implementations may ignore this argument and simply +provide data whenever it becomes available. There is no need to "wait" until +`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. + +The `readable._read()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### readable.push(chunk[, encoding]) + +* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue +* `encoding` {String} Encoding of String chunks. Must be a valid + Buffer encoding, such as `'utf8'` or `'ascii'` +* Returns {Boolean} `true` if additional chunks of data may continued to be + pushed; `false` otherwise. + +When `chunk` is a `Buffer` or `string`, the `chunk` of data will be added to the +internal queue for users of the stream to consume. Passing `chunk` as `null` +signals the end of the stream (EOF), after which no more data can be written. + +When the Readable is operating in paused mode, the data added with +`readable.push()` can be read out by calling the +[`readable.read()`][stream-read] method when the [`'readable'`][] event is +emitted. + +When the Readable is operating in flowing mode, the data added with +`readable.push()` will be delivered by emitting a `'data'` event. + +The `readable.push()` method is designed to be as flexible as possible. For +example, when wrapping a lower-level source that provides some form of +pause/resume mechanism, and a data callback, the low-level source can be wrapped +by the custom Readable instance as illustrated in the following example: + +```js +// source is an object with readStop() and readStart() methods, +// and an `ondata` member that gets called when it has data, and +// an `onend` member that gets called when the data is over. + +class SourceWrapper extends Readable { + constructor(options) { + super(options); + + this._source = getLowlevelSourceObject(); + + // Every time there's data, push it into the internal buffer. + this._source.ondata = (chunk) => { + // if push() returns false, then stop reading from source + if (!this.push(chunk)) + this._source.readStop(); + }; + + // When the source ends, push the EOF-signaling `null` chunk + this._source.onend = () => { + this.push(null); + }; + } + // _read will be called when the stream wants to pull more data in + // the advisory size argument is ignored in this case. + _read(size) { + this._source.readStart(); + } +} +``` +*Note*: The `readable.push()` method is intended be called only by Readable +Implementers, and only from within the `readable._read()` method. + +#### Errors While Reading + +It is recommended that errors occurring during the processing of the +`readable._read()` method are emitted using the `'error'` event rather than +being thrown. Throwing an Error from within `readable._read()` can result in +expected and inconsistent behavior depending on whether the stream is operating +in flowing or paused mode. Using the `'error'` event ensures consistent and +predictable handling of errors. + +```js +const Readable = require('stream').Readable; + +const myReadable = new Readable({ + read(size) { + if (checkSomeErrorCondition()) { + process.nextTick(() => this.emit('error', err)); + return; + } + // do some work + } +}); +``` + +#### An Example Counting Stream + + + +The following is a basic example of a Readable stream that emits the numerals +from 1 to 1,000,000 in ascending order, and then ends. + +```js +const Readable = require('stream').Readable; + +class Counter extends Readable { + constructor(opt) { + super(opt); + this._max = 1000000; + this._index = 1; + } + + _read() { + var i = this._index++; + if (i > this._max) + this.push(null); + else { + var str = '' + i; + var buf = Buffer.from(str, 'ascii'); + this.push(buf); + } + } +} +``` + +### Implementing a Duplex Stream + +A [Duplex][] stream is one that implements both [Readable][] and [Writable][], +such as a TCP socket connection. + +Because Javascript does not have support for multiple inheritance, the +`stream.Duplex` class is extended to implement a [Duplex][] stream (as opposed +to extending the `stream.Readable` *and* `stream.Writable` classes). + +*Note*: The `stream.Duplex` class prototypically inherits from `stream.Readable` +and parasitically from `stream.Writable`. + +Custom Duplex streams *must* call the `new stream.Duplex([options])` +constructor and implement *both* the `readable._read()` and +`writable._write()` methods. + +#### new stream.Duplex(options) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `allowHalfOpen` {Boolean} Defaults to `true`. If set to `false`, then + the stream will automatically end the readable side when the + writable side ends and vice versa. + * `readableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` + for readable side of the stream. Has no effect if `objectMode` + is `true`. + * `writableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` + for writable side of the stream. Has no effect if `objectMode` + is `true`. + +For example: + +```js +const Duplex = require('stream').Duplex; + +class MyDuplex extends Duplex { + constructor(options) { + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Duplex = require('stream').Duplex; +const util = require('util'); + +function MyDuplex(options) { + if (!(this instanceof MyDuplex)) + return new MyDuplex(options); + Duplex.call(this, options); +} +util.inherits(MyDuplex, Duplex); +``` + +Or, using the Simplified Constructor approach: + +```js +const Duplex = require('stream').Duplex; + +const myDuplex = new Duplex({ + read(size) { + // ... + }, + write(chunk, encoding, callback) { + // ... + } +}); +``` + +#### An Example Duplex Stream + +The following illustrates a simple example of a Duplex stream that wraps a +hypothetical lower-level source object to which data can be written, and +from which data can be read, albeit using an API that is not compatible with +Node.js streams. +The following illustrates a simple example of a Duplex stream that buffers +incoming written data via the [Writable][] interface that is read back out +via the [Readable][] interface. + +```js +const Duplex = require('stream').Duplex; +const kSource = Symbol('source'); + +class MyDuplex extends Duplex { + constructor(source, options) { + super(options); + this[kSource] = source; + } + + _write(chunk, encoding, callback) { + // The underlying source only deals with strings + if (Buffer.isBuffer(chunk)) + chunk = chunk.toString(encoding); + this[kSource].writeSomeData(chunk, encoding); + callback(); + } + + _read(size) { + this[kSource].fetchSomeData(size, (data, encoding) => { + this.push(Buffer.from(data, encoding)); + }); + } +} +``` + +The most important aspect of a Duplex stream is that the Readable and Writable +sides operate independently of one another despite co-existing within a single +object instance. + +#### Object Mode Duplex Streams + +For Duplex streams, `objectMode` can be set exclusively for either the Readable +or Writable side using the `readableObjectMode` and `writableObjectMode` options +respectively. + +In the following example, for instance, a new Transform stream (which is a +type of [Duplex][] stream) is created that has an object mode Writable side +that accepts JavaScript numbers that are converted to hexidecimal strings on +the Readable side. + +```js +const Transform = require('stream').Transform; + +// All Transform streams are also Duplex Streams +const myTransform = new Transform({ + writableObjectMode: true, + + transform(chunk, encoding, callback) { + // Coerce the chunk to a number if necessary + chunk |= 0; + + // Transform the chunk into something else. + const data = chunk.toString(16); + + // Push the data onto the readable queue. + callback(null, '0'.repeat(data.length % 2) + data); + } +}); + +myTransform.setEncoding('ascii'); +myTransform.on('data', (chunk) => console.log(chunk)); + +myTransform.write(1); + // Prints: 01 +myTransform.write(10); + // Prints: 0a +myTransform.write(100); + // Prints: 64 +``` + +### Implementing a Transform Stream + +A [Transform][] stream is a [Duplex][] stream where the output is computed +in some way from the input. Examples include [zlib][] streams or [crypto][] +streams that compress, encrypt, or decrypt data. + +*Note*: There is no requirement that the output be the same size as the input, +the same number of chunks, or arrive at the same time. For example, a +Hash stream will only ever have a single chunk of output which is +provided when the input is ended. A `zlib` stream will produce output +that is either much smaller or much larger than its input. + +The `stream.Transform` class is extended to implement a [Transform][] stream. + +The `stream.Transform` class prototypically inherits from `stream.Duplex` and +implements its own versions of the `writable._write()` and `readable._read()` +methods. Custom Transform implementations *must* implement the +[`transform._transform()`][stream-_transform] method and *may* also implement +the [`transform._flush()`][stream-_flush] method. + +*Note*: Care must be taken when using Transform streams in that data written +to the stream can cause the Writable side of the stream to become paused if +the output on the Readable side is not consumed. + +#### new stream.Transform([options]) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `transform` {Function} Implementation for the + [`stream._transform()`][stream-_transform] method. + * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] + method. + +For example: + +```js +const Transform = require('stream').Transform; + +class MyTransform extends Transform { + constructor(options) { + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Transform = require('stream').Transform; +const util = require('util'); + +function MyTransform(options) { + if (!(this instanceof MyTransform)) + return new MyTransform(options); + Transform.call(this, options); +} +util.inherits(MyTransform, Transform); +``` + +Or, using the Simplified Constructor approach: + +```js +const Transform = require('stream').Transform; + +const myTransform = new Transform({ + transform(chunk, encoding, callback) { + // ... + } +}); +``` + +#### Events: 'finish' and 'end' + +The [`'finish'`][] and [`'end'`][] events are from the `stream.Writable` +and `stream.Readable` classes, respectively. The `'finish'` event is emitted +after [`stream.end()`][stream-end] is called and all chunks have been processed +by [`stream._transform()`][stream-_transform]. The `'end'` event is emitted +after all data has been output, which occurs after the callback in +[`transform._flush()`][stream-_flush] has been called. + +#### transform.\_flush(callback) + +* `callback` {Function} A callback function (optionally with an error + argument) to be called when remaining data has been flushed. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +In some cases, a transform operation may need to emit an additional bit of +data at the end of the stream. For example, a `zlib` compression stream will +store an amount of internal state used to optimally compress the output. When +the stream ends, however, that additional data needs to be flushed so that the +compressed data will be complete. + +Custom [Transform][] implementations *may* implement the `transform._flush()` +method. This will be called when there is no more written data to be consumed, +but before the [`'end'`][] event is emitted signaling the end of the +[Readable][] stream. + +Within the `transform._flush()` implementation, the `readable.push()` method +may be called zero or more times, as appropriate. The `callback` function must +be called when the flush operation is complete. + +The `transform._flush()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### transform.\_transform(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be transformed. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} A callback function (optionally with an error + argument and data) to be called after the supplied `chunk` has been + processed. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +All Transform stream implementations must provide a `_transform()` +method to accept input and produce output. The `transform._transform()` +implementation handles the bytes being written, computes an output, then passes +that output off to the readable portion using the `readable.push()` method. + +The `transform.push()` method may be called zero or more times to generate +output from a single input chunk, depending on how much is to be output +as a result of the chunk. + +It is possible that no output is generated from any given chunk of input data. + +The `callback` function must be called only when the current chunk is completely +consumed. The first argument passed to the `callback` must be an `Error` object +if an error occurred while processing the input or `null` otherwise. If a second +argument is passed to the `callback`, it will be forwarded on to the +`readable.push()` method. In other words the following are equivalent: + +```js +transform.prototype._transform = function (data, encoding, callback) { + this.push(data); + callback(); +}; + +transform.prototype._transform = function (data, encoding, callback) { + callback(null, data); +}; +``` + +The `transform._transform()` method is prefixed with an underscore because it +is internal to the class that defines it, and should never be called directly by +user programs. + +#### Class: stream.PassThrough + +The `stream.PassThrough` class is a trivial implementation of a [Transform][] +stream that simply passes the input bytes across to the output. Its purpose is +primarily for examples and testing, but there are some use cases where +`stream.PassThrough` is useful as a building block for novel sorts of streams. + +## Additional Notes + + + +### Compatibility with Older Node.js Versions + + + +In versions of Node.js prior to v0.10, the Readable stream interface was +simpler, but also less powerful and less useful. + +* Rather than waiting for calls the [`stream.read()`][stream-read] method, + [`'data'`][] events would begin emitting immediately. Applications that + would need to perform some amount of work to decide how to handle data + were required to store read data into buffers so the data would not be lost. +* The [`stream.pause()`][stream-pause] method was advisory, rather than + guaranteed. This meant that it was still necessary to be prepared to receive + [`'data'`][] events *even when the stream was in a paused state*. + +In Node.js v0.10, the [Readable][] class was added. For backwards compatibility +with older Node.js programs, Readable streams switch into "flowing mode" when a +[`'data'`][] event handler is added, or when the +[`stream.resume()`][stream-resume] method is called. The effect is that, even +when not using the new [`stream.read()`][stream-read] method and +[`'readable'`][] event, it is no longer necessary to worry about losing +[`'data'`][] chunks. + +While most applications will continue to function normally, this introduces an +edge case in the following conditions: + +* No [`'data'`][] event listener is added. +* The [`stream.resume()`][stream-resume] method is never called. +* The stream is not piped to any writable destination. + +For example, consider the following code: + +```js +// WARNING! BROKEN! +net.createServer((socket) => { + + // we add an 'end' method, but never consume the data + socket.on('end', () => { + // It will never get here. + socket.end('The message was received but was not processed.\n'); + }); + +}).listen(1337); +``` + +In versions of Node.js prior to v0.10, the incoming message data would be +simply discarded. However, in Node.js v0.10 and beyond, the socket remains +paused forever. + +The workaround in this situation is to call the +[`stream.resume()`][stream-resume] method to begin the flow of data: + +```js +// Workaround +net.createServer((socket) => { + + socket.on('end', () => { + socket.end('The message was received but was not processed.\n'); + }); + + // start the flow of data, discarding it. + socket.resume(); + +}).listen(1337); +``` + +In addition to new Readable streams switching into flowing mode, +pre-v0.10 style streams can be wrapped in a Readable class using the +[`readable.wrap()`][`stream.wrap()`] method. + + +### `readable.read(0)` + +There are some cases where it is necessary to trigger a refresh of the +underlying readable stream mechanisms, without actually consuming any +data. In such cases, it is possible to call `readable.read(0)`, which will +always return `null`. + +If the internal read buffer is below the `highWaterMark`, and the +stream is not currently reading, then calling `stream.read(0)` will trigger +a low-level [`stream._read()`][stream-_read] call. + +While most applications will almost never need to do this, there are +situations within Node.js where this is done, particularly in the +Readable stream class internals. + +### `readable.push('')` + +Use of `readable.push('')` is not recommended. + +Pushing a zero-byte string or `Buffer` to a stream that is not in object mode +has an interesting side effect. Because it *is* a call to +[`readable.push()`][stream-push], the call will end the reading process. +However, because the argument is an empty string, no data is added to the +readable buffer so there is nothing for a user to consume. + +[`'data'`]: #stream_event_data +[`'drain'`]: #stream_event_drain +[`'end'`]: #stream_event_end +[`'finish'`]: #stream_event_finish +[`'readable'`]: #stream_event_readable +[`buf.toString(encoding)`]: https://nodejs.org/docs/v6.3.1/api/buffer.html#buffer_buf_tostring_encoding_start_end +[`EventEmitter`]: https://nodejs.org/docs/v6.3.1/api/events.html#events_class_eventemitter +[`process.stderr`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stderr +[`process.stdin`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdin +[`process.stdout`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdout +[`stream.cork()`]: #stream_writable_cork +[`stream.pipe()`]: #stream_readable_pipe_destination_options +[`stream.uncork()`]: #stream_writable_uncork +[`stream.unpipe()`]: #stream_readable_unpipe_destination +[`stream.wrap()`]: #stream_readable_wrap_stream +[`tls.CryptoStream`]: https://nodejs.org/docs/v6.3.1/api/tls.html#tls_class_cryptostream +[API for Stream Consumers]: #stream_api_for_stream_consumers +[API for Stream Implementers]: #stream_api_for_stream_implementers +[child process stdin]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdin +[child process stdout and stderr]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdout +[Compatibility]: #stream_compatibility_with_older_node_js_versions +[crypto]: crypto.html +[Duplex]: #stream_class_stream_duplex +[fs read streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_readstream +[fs write streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_writestream +[`fs.createReadStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createreadstream_path_options +[`fs.createWriteStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createwritestream_path_options +[`net.Socket`]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket +[`zlib.createDeflate()`]: https://nodejs.org/docs/v6.3.1/api/zlib.html#zlib_zlib_createdeflate_options +[HTTP requests, on the client]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_clientrequest +[HTTP responses, on the server]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_serverresponse +[http-incoming-message]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_incomingmessage +[Object mode]: #stream_object_mode +[Readable]: #stream_class_stream_readable +[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 +[stream-_flush]: #stream_transform_flush_callback +[stream-_read]: #stream_readable_read_size_1 +[stream-_transform]: #stream_transform_transform_chunk_encoding_callback +[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 +[stream-_writev]: #stream_writable_writev_chunks_callback +[stream-end]: #stream_writable_end_chunk_encoding_callback +[stream-pause]: #stream_readable_pause +[stream-push]: #stream_readable_push_chunk_encoding +[stream-read]: #stream_readable_read_size +[stream-resume]: #stream_readable_resume +[stream-write]: #stream_writable_write_chunk_encoding_callback +[TCP sockets]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket +[Transform]: #stream_class_stream_transform +[Writable]: #stream_class_stream_writable +[zlib]: zlib.html diff --git a/node_modules/readdirp/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/readdirp/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/readdirp/node_modules/readable-stream/duplex.js b/node_modules/readdirp/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..ca807af --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/readdirp/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..736693b --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,75 @@ +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +var keys = objectKeys(Writable.prototype); +for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + processNextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} \ No newline at end of file diff --git a/node_modules/readdirp/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..d06f71f --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,26 @@ +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readdirp/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..208cc65 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,937 @@ +'use strict'; + +module.exports = Readable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var isArray = require('isarray'); +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var StringDecoder; + +util.inherits(Readable, Stream); + +function prependListener(emitter, event, fn) { + if (typeof emitter.prependListener === 'function') { + return emitter.prependListener(event, fn); + } else { + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; + } +} + +var Duplex; +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +var Duplex; +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options && typeof options.read === 'function') this._read = options.read; + + Stream.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + + if (!state.objectMode && typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = bufferShim.from(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + var state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + var er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (state.ended && !addToFront) { + var e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + var _e = new Error('stream.unshift() after end event'); + stream.emit('error', _e); + } else { + var skipAdd; + if (state.decoder && !addToFront && !encoding) { + chunk = state.decoder.write(chunk); + skipAdd = !state.objectMode && chunk.length === 0; + } + + if (!addToFront) state.reading = false; + + // Don't add to the buffer if we've decoded to an empty string chunk and + // we're not in object mode + if (!skipAdd) { + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + var er = null; + if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + processNextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : cleanup; + if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var _i = 0; _i < len; _i++) { + dests[_i].emit('unpipe', this); + }return this; + } + + // try to find the right one. + var i = indexOf(state.pipes, dest); + if (i === -1) return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + processNextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + processNextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + + var self = this; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + var events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, function (ev) { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = bufferShim.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + processNextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/readdirp/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..dbc996e --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,180 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function TransformState(stream) { + this.afterTransform = function (er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; + this.writeencoding = null; +} + +function afterTransform(stream, er, data) { + var ts = stream._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (data !== null && data !== undefined) stream.push(data); + + cb(er); + + var rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(this); + + // when the writable side finishes, then flush out anything remaining. + var stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + this.once('prefinish', function () { + if (typeof this._flush === 'function') this._flush(function (er) { + done(stream, er); + });else done(stream); + }); +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('Not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +function done(stream, er) { + if (er) return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + var ws = stream._writableState; + var ts = stream._transformState; + + if (ws.length) throw new Error('Calling transform done when ws.length != 0'); + + if (ts.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readdirp/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..ed5efcb --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,526 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +util.inherits(Writable, Stream); + +function nop() {} + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +var Duplex; +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function writableStateGetBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') + }); + } catch (_) {} +})(); + +var Duplex; +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + processNextTick(cb, er); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + // Always throw error if a null is written + // if we are not in object mode then throw + // if it is not a buffer, string, or undefined. + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + processNextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = bufferShim.from(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) processNextTick(cb, er);else cb(er); + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + while (entry) { + buffer[count] = entry; + entry = entry.next; + count += 1; + } + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequestCount = 0; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else { + prefinish(stream, state); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) processNextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function (err) { + var entry = _this.entry; + _this.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = _this; + } else { + state.corkedRequestsFree = _this; + } + }; +} \ No newline at end of file diff --git a/node_modules/readdirp/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/readdirp/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000..e4bfcf0 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,64 @@ +'use strict'; + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +module.exports = BufferList; + +function BufferList() { + this.head = null; + this.tail = null; + this.length = 0; +} + +BufferList.prototype.push = function (v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; +}; + +BufferList.prototype.unshift = function (v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; +}; + +BufferList.prototype.shift = function () { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; +}; + +BufferList.prototype.clear = function () { + this.head = this.tail = null; + this.length = 0; +}; + +BufferList.prototype.join = function (s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; +}; + +BufferList.prototype.concat = function (n) { + if (this.length === 0) return bufferShim.alloc(0); + if (this.length === 1) return this.head.data; + var ret = bufferShim.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + p.data.copy(ret, i); + i += p.data.length; + p = p.next; + } + return ret; +}; \ No newline at end of file diff --git a/node_modules/readdirp/node_modules/readable-stream/package.json b/node_modules/readdirp/node_modules/readable-stream/package.json new file mode 100644 index 0000000..4293090 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/package.json @@ -0,0 +1,49 @@ +{ + "name": "readable-stream", + "version": "2.1.5", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "buffer-shims": "^1.0.0", + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~0.10.x", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "~1.4.0", + "babel-polyfill": "^6.9.1", + "nyc": "^6.4.0", + "tap": "~0.7.1", + "tape": "~4.5.1", + "zuul": "~3.10.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js", + "browser": "npm run write-zuul && zuul --browser-retries 2 -- test/browser.js", + "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml", + "local": "zuul --local 3000 --no-coverage -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readdirp/node_modules/readable-stream/passthrough.js b/node_modules/readdirp/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..27e8d8a --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/readdirp/node_modules/readable-stream/readable.js b/node_modules/readdirp/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..be2688a --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = (function (){ + try { + return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify + } catch(_){} +}()); +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = Stream || exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); + +if (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; +} diff --git a/node_modules/readdirp/node_modules/readable-stream/transform.js b/node_modules/readdirp/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..5d482f0 --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/readdirp/node_modules/readable-stream/writable.js b/node_modules/readdirp/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..e1e9efd --- /dev/null +++ b/node_modules/readdirp/node_modules/readable-stream/writable.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_writable.js") diff --git a/node_modules/readdirp/package.json b/node_modules/readdirp/package.json new file mode 100644 index 0000000..ca02cde --- /dev/null +++ b/node_modules/readdirp/package.json @@ -0,0 +1,47 @@ +{ + "author": "Thorsten Lorenz (thlorenz.com)", + "name": "readdirp", + "description": "Recursive version of fs.readdir with streaming api.", + "version": "2.1.0", + "homepage": "https://github.com/thlorenz/readdirp", + "repository": { + "type": "git", + "url": "git://github.com/thlorenz/readdirp.git" + }, + "engines": { + "node": ">=0.6" + }, + "keywords": [ + "recursive", + "fs", + "stream", + "streams", + "readdir", + "filesystem", + "find", + "filter" + ], + "main": "readdirp.js", + "scripts": { + "test-main": "(cd test && set -e; for t in ./*.js; do node $t; done)", + "test-0.10": "nave use 0.10 npm run test-main", + "test-0.12": "nave use 0.12 npm run test-main", + "test-4": "nave use 4.4 npm run test-main", + "test-6": "nave use 6.2 npm run test-main", + "test-all": "npm run test-main && npm run test-0.10 && npm run test-0.12 && npm run test-4 && npm run test-6", + "test": "if [ -e $TRAVIS ]; then npm run test-all; else npm run test-main; fi" + }, + "dependencies": { + "graceful-fs": "^4.1.2", + "minimatch": "^3.0.2", + "readable-stream": "^2.0.2", + "set-immediate-shim": "^1.0.1" + }, + "devDependencies": { + "nave": "^0.5.1", + "proxyquire": "^1.7.9", + "tap": "1.3.2", + "through2": "^2.0.0" + }, + "license": "MIT" +} diff --git a/node_modules/readdirp/readdirp.js b/node_modules/readdirp/readdirp.js new file mode 100644 index 0000000..b6b21e4 --- /dev/null +++ b/node_modules/readdirp/readdirp.js @@ -0,0 +1,300 @@ +'use strict'; + +var fs = require('graceful-fs') + , path = require('path') + , minimatch = require('minimatch') + , toString = Object.prototype.toString + , si = require('set-immediate-shim') + ; + + +// Standard helpers +function isFunction (obj) { + return toString.call(obj) === '[object Function]'; +} + +function isString (obj) { + return toString.call(obj) === '[object String]'; +} + +function isRegExp (obj) { + return toString.call(obj) === '[object RegExp]'; +} + +function isUndefined (obj) { + return obj === void 0; +} + +/** + * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. + * @param { Object } opts Options to specify root (start directory), filters and recursion depth + * @param { function } callback1 When callback2 is given calls back for each processed file - function (fileInfo) { ... }, + * when callback2 is not given, it behaves like explained in callback2 + * @param { function } callback2 Calls back once all files have been processed with an array of errors and file infos + * function (err, fileInfos) { ... } + */ +function readdir(opts, callback1, callback2) { + var stream + , handleError + , handleFatalError + , pending = 0 + , errors = [] + , readdirResult = { + directories: [] + , files: [] + } + , fileProcessed + , allProcessed + , realRoot + , aborted = false + , paused = false + ; + + // If no callbacks were given we will use a streaming interface + if (isUndefined(callback1)) { + var api = require('./stream-api')(); + stream = api.stream; + callback1 = api.processEntry; + callback2 = api.done; + handleError = api.handleError; + handleFatalError = api.handleFatalError; + + stream.on('close', function () { aborted = true; }); + stream.on('pause', function () { paused = true; }); + stream.on('resume', function () { paused = false; }); + } else { + handleError = function (err) { errors.push(err); }; + handleFatalError = function (err) { + handleError(err); + allProcessed(errors, null); + }; + } + + if (isUndefined(opts)){ + handleFatalError(new Error ( + 'Need to pass at least one argument: opts! \n' + + 'https://github.com/thlorenz/readdirp#options' + ) + ); + return stream; + } + + opts.root = opts.root || '.'; + opts.fileFilter = opts.fileFilter || function() { return true; }; + opts.directoryFilter = opts.directoryFilter || function() { return true; }; + opts.depth = typeof opts.depth === 'undefined' ? 999999999 : opts.depth; + opts.entryType = opts.entryType || 'files'; + + var statfn = opts.lstat === true ? fs.lstat.bind(fs) : fs.stat.bind(fs); + + if (isUndefined(callback2)) { + fileProcessed = function() { }; + allProcessed = callback1; + } else { + fileProcessed = callback1; + allProcessed = callback2; + } + + function normalizeFilter (filter) { + + if (isUndefined(filter)) return undefined; + + function isNegated (filters) { + + function negated(f) { + return f.indexOf('!') === 0; + } + + var some = filters.some(negated); + if (!some) { + return false; + } else { + if (filters.every(negated)) { + return true; + } else { + // if we detect illegal filters, bail out immediately + throw new Error( + 'Cannot mix negated with non negated glob filters: ' + filters + '\n' + + 'https://github.com/thlorenz/readdirp#filters' + ); + } + } + } + + // Turn all filters into a function + if (isFunction(filter)) { + + return filter; + + } else if (isString(filter)) { + + return function (entryInfo) { + return minimatch(entryInfo.name, filter.trim()); + }; + + } else if (filter && Array.isArray(filter)) { + + if (filter) filter = filter.map(function (f) { + return f.trim(); + }); + + return isNegated(filter) ? + // use AND to concat multiple negated filters + function (entryInfo) { + return filter.every(function (f) { + return minimatch(entryInfo.name, f); + }); + } + : + // use OR to concat multiple inclusive filters + function (entryInfo) { + return filter.some(function (f) { + return minimatch(entryInfo.name, f); + }); + }; + } + } + + function processDir(currentDir, entries, callProcessed) { + if (aborted) return; + var total = entries.length + , processed = 0 + , entryInfos = [] + ; + + fs.realpath(currentDir, function(err, realCurrentDir) { + if (aborted) return; + if (err) { + handleError(err); + callProcessed(entryInfos); + return; + } + + var relDir = path.relative(realRoot, realCurrentDir); + + if (entries.length === 0) { + callProcessed([]); + } else { + entries.forEach(function (entry) { + + var fullPath = path.join(realCurrentDir, entry) + , relPath = path.join(relDir, entry); + + statfn(fullPath, function (err, stat) { + if (err) { + handleError(err); + } else { + entryInfos.push({ + name : entry + , path : relPath // relative to root + , fullPath : fullPath + + , parentDir : relDir // relative to root + , fullParentDir : realCurrentDir + + , stat : stat + }); + } + processed++; + if (processed === total) callProcessed(entryInfos); + }); + }); + } + }); + } + + function readdirRec(currentDir, depth, callCurrentDirProcessed) { + var args = arguments; + if (aborted) return; + if (paused) { + si(function () { + readdirRec.apply(null, args); + }) + return; + } + + fs.readdir(currentDir, function (err, entries) { + if (err) { + handleError(err); + callCurrentDirProcessed(); + return; + } + + processDir(currentDir, entries, function(entryInfos) { + + var subdirs = entryInfos + .filter(function (ei) { return ei.stat.isDirectory() && opts.directoryFilter(ei); }); + + subdirs.forEach(function (di) { + if(opts.entryType === 'directories' || opts.entryType === 'both' || opts.entryType === 'all') { + fileProcessed(di); + } + readdirResult.directories.push(di); + }); + + entryInfos + .filter(function(ei) { + var isCorrectType = opts.entryType === 'all' ? + !ei.stat.isDirectory() : ei.stat.isFile() || ei.stat.isSymbolicLink(); + return isCorrectType && opts.fileFilter(ei); + }) + .forEach(function (fi) { + if(opts.entryType === 'files' || opts.entryType === 'both' || opts.entryType === 'all') { + fileProcessed(fi); + } + readdirResult.files.push(fi); + }); + + var pendingSubdirs = subdirs.length; + + // Be done if no more subfolders exist or we reached the maximum desired depth + if(pendingSubdirs === 0 || depth === opts.depth) { + callCurrentDirProcessed(); + } else { + // recurse into subdirs, keeping track of which ones are done + // and call back once all are processed + subdirs.forEach(function (subdir) { + readdirRec(subdir.fullPath, depth + 1, function () { + pendingSubdirs = pendingSubdirs - 1; + if(pendingSubdirs === 0) { + callCurrentDirProcessed(); + } + }); + }); + } + }); + }); + } + + // Validate and normalize filters + try { + opts.fileFilter = normalizeFilter(opts.fileFilter); + opts.directoryFilter = normalizeFilter(opts.directoryFilter); + } catch (err) { + // if we detect illegal filters, bail out immediately + handleFatalError(err); + return stream; + } + + // If filters were valid get on with the show + fs.realpath(opts.root, function(err, res) { + if (err) { + handleFatalError(err); + return stream; + } + + realRoot = res; + readdirRec(opts.root, 0, function () { + // All errors are collected into the errors array + if (errors.length > 0) { + allProcessed(errors, readdirResult); + } else { + allProcessed(null, readdirResult); + } + }); + }); + + return stream; +} + +module.exports = readdir; diff --git a/node_modules/readdirp/stream-api.js b/node_modules/readdirp/stream-api.js new file mode 100644 index 0000000..91c6808 --- /dev/null +++ b/node_modules/readdirp/stream-api.js @@ -0,0 +1,99 @@ +'use strict'; + +var si = require('set-immediate-shim'); +var stream = require('readable-stream'); +var util = require('util'); + +var Readable = stream.Readable; + +module.exports = ReaddirpReadable; + +util.inherits(ReaddirpReadable, Readable); + +function ReaddirpReadable (opts) { + if (!(this instanceof ReaddirpReadable)) return new ReaddirpReadable(opts); + + opts = opts || {}; + + opts.objectMode = true; + Readable.call(this, opts); + + // backpressure not implemented at this point + this.highWaterMark = Infinity; + + this._destroyed = false; + this._paused = false; + this._warnings = []; + this._errors = []; + + this._pauseResumeErrors(); +} + +var proto = ReaddirpReadable.prototype; + +proto._pauseResumeErrors = function () { + var self = this; + self.on('pause', function () { self._paused = true }); + self.on('resume', function () { + if (self._destroyed) return; + self._paused = false; + + self._warnings.forEach(function (err) { self.emit('warn', err) }); + self._warnings.length = 0; + + self._errors.forEach(function (err) { self.emit('error', err) }); + self._errors.length = 0; + }) +} + +// called for each entry +proto._processEntry = function (entry) { + if (this._destroyed) return; + this.push(entry); +} + +proto._read = function () { } + +proto.destroy = function () { + // when stream is destroyed it will emit nothing further, not even errors or warnings + this.push(null); + this.readable = false; + this._destroyed = true; + this.emit('close'); +} + +proto._done = function () { + this.push(null); +} + +// we emit errors and warnings async since we may handle errors like invalid args +// within the initial event loop before any event listeners subscribed +proto._handleError = function (err) { + var self = this; + si(function () { + if (self._paused) return self._warnings.push(err); + if (!self._destroyed) self.emit('warn', err); + }); +} + +proto._handleFatalError = function (err) { + var self = this; + si(function () { + if (self._paused) return self._errors.push(err); + if (!self._destroyed) self.emit('error', err); + }); +} + +function createStreamAPI () { + var stream = new ReaddirpReadable(); + + return { + stream : stream + , processEntry : stream._processEntry.bind(stream) + , done : stream._done.bind(stream) + , handleError : stream._handleError.bind(stream) + , handleFatalError : stream._handleFatalError.bind(stream) + }; +} + +module.exports = createStreamAPI; diff --git a/node_modules/readdirp/test/bed/root_dir1/root_dir1_file1.ext1 b/node_modules/readdirp/test/bed/root_dir1/root_dir1_file1.ext1 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_dir1/root_dir1_file2.ext2 b/node_modules/readdirp/test/bed/root_dir1/root_dir1_file2.ext2 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_dir1/root_dir1_file3.ext3 b/node_modules/readdirp/test/bed/root_dir1/root_dir1_file3.ext3 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_dir1/root_dir1_subdir1/root1_dir1_subdir1_file1.ext1 b/node_modules/readdirp/test/bed/root_dir1/root_dir1_subdir1/root1_dir1_subdir1_file1.ext1 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_dir2/root_dir2_file1.ext1 b/node_modules/readdirp/test/bed/root_dir2/root_dir2_file1.ext1 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_dir2/root_dir2_file2.ext2 b/node_modules/readdirp/test/bed/root_dir2/root_dir2_file2.ext2 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_file1.ext1 b/node_modules/readdirp/test/bed/root_file1.ext1 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_file2.ext2 b/node_modules/readdirp/test/bed/root_file2.ext2 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/bed/root_file3.ext3 b/node_modules/readdirp/test/bed/root_file3.ext3 new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/readdirp/test/readdirp-stream.js b/node_modules/readdirp/test/readdirp-stream.js new file mode 100644 index 0000000..c43d132 --- /dev/null +++ b/node_modules/readdirp/test/readdirp-stream.js @@ -0,0 +1,338 @@ +/*jshint asi:true */ + +var debug //= true; +var test = debug ? function () {} : require('tap').test +var test_ = !debug ? function () {} : require('tap').test + , path = require('path') + , fs = require('fs') + , util = require('util') + , TransformStream = require('readable-stream').Transform + , through = require('through2') + , proxyquire = require('proxyquire') + , streamapi = require('../stream-api') + , readdirp = require('..') + , root = path.join(__dirname, 'bed') + , totalDirs = 6 + , totalFiles = 12 + , ext1Files = 4 + , ext2Files = 3 + , ext3Files = 2 + ; + +// see test/readdirp.js for test bed layout + +function opts (extend) { + var o = { root: root }; + + if (extend) { + for (var prop in extend) { + o[prop] = extend[prop]; + } + } + return o; +} + +function capture () { + var result = { entries: [], errors: [], ended: false } + , dst = new TransformStream({ objectMode: true }); + + dst._transform = function (entry, _, cb) { + result.entries.push(entry); + cb(); + } + + dst._flush = function (cb) { + result.ended = true; + this.push(result); + cb(); + } + + return dst; +} + +test('\nintegrated', function (t) { + t.test('\n# reading root without filter', function (t) { + t.plan(2); + + readdirp(opts()) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, totalFiles, 'emits all files'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )); + }) + + t.test('\n# normal: ["*.ext1", "*.ext3"]', function (t) { + t.plan(2); + + readdirp(opts( { fileFilter: [ '*.ext1', '*.ext3' ] } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, ext1Files + ext3Files, 'all ext1 and ext3 files'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )) + }) + + t.test('\n# files only', function (t) { + t.plan(2); + + readdirp(opts( { entryType: 'files' } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, totalFiles, 'returned files'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )) + }) + + t.test('\n# directories only', function (t) { + t.plan(2); + + readdirp(opts( { entryType: 'directories' } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, totalDirs, 'returned directories'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )) + }) + + t.test('\n# both directories + files', function (t) { + t.plan(2); + + readdirp(opts( { entryType: 'both' } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, totalDirs + totalFiles, 'returned everything'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )) + }) + + t.test('\n# directory filter with directories only', function (t) { + t.plan(2); + + readdirp(opts( { entryType: 'directories', directoryFilter: [ 'root_dir1', '*dir1_subdir1' ] } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, 2, 'two directories'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )) + }) + + t.test('\n# directory and file filters with both entries', function (t) { + t.plan(2); + + readdirp(opts( { entryType: 'both', directoryFilter: [ 'root_dir1', '*dir1_subdir1' ], fileFilter: [ '!*.ext1' ] } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, 6, '2 directories and 4 files'); + t.ok(result.ended, 'ends stream'); + t.end(); + cb(); + } + )) + }) + + t.test('\n# negated: ["!*.ext1", "!*.ext3"]', function (t) { + t.plan(2); + + readdirp(opts( { fileFilter: [ '!*.ext1', '!*.ext3' ] } )) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .pipe(capture()) + .pipe(through.obj( + function (result, _ , cb) { + t.equals(result.entries.length, totalFiles - ext1Files - ext3Files, 'all but ext1 and ext3 files'); + t.ok(result.ended, 'ends stream'); + t.end(); + } + )) + }) + + t.test('\n# no options given', function (t) { + t.plan(1); + readdirp() + .on('error', function (err) { + t.similar(err.toString() , /Need to pass at least one argument/ , 'emits meaningful error'); + t.end(); + }) + }) + + t.test('\n# mixed: ["*.ext1", "!*.ext3"]', function (t) { + t.plan(1); + + readdirp(opts( { fileFilter: [ '*.ext1', '!*.ext3' ] } )) + .on('error', function (err) { + t.similar(err.toString() , /Cannot mix negated with non negated glob filters/ , 'emits meaningful error'); + t.end(); + }) + }) +}) + + +test('\napi separately', function (t) { + + t.test('\n# handleError', function (t) { + t.plan(1); + + var api = streamapi() + , warning = new Error('some file caused problems'); + + api.stream + .on('warn', function (err) { + t.equals(err, warning, 'warns with the handled error'); + }) + api.handleError(warning); + }) + + t.test('\n# when stream is paused and then resumed', function (t) { + t.plan(6); + var api = streamapi() + , resumed = false + , fatalError = new Error('fatal!') + , nonfatalError = new Error('nonfatal!') + , processedData = 'some data' + ; + + api.stream + .on('warn', function (err) { + t.equals(err, nonfatalError, 'emits the buffered warning'); + t.ok(resumed, 'emits warning only after it was resumed'); + }) + .on('error', function (err) { + t.equals(err, fatalError, 'emits the buffered fatal error'); + t.ok(resumed, 'emits errors only after it was resumed'); + }) + .on('data', function (data) { + t.equals(data, processedData, 'emits the buffered data'); + t.ok(resumed, 'emits data only after it was resumed'); + }) + .pause() + + api.processEntry(processedData); + api.handleError(nonfatalError); + api.handleFatalError(fatalError); + + setTimeout(function () { + resumed = true; + api.stream.resume(); + }, 1) + }) + + t.test('\n# when a stream is paused it stops walking the fs', function (t) { + var resumed = false, + mockedAPI = streamapi(); + + mockedAPI.processEntry = function (entry) { + if (!resumed) t.notOk(true, 'should not emit while paused') + t.ok(entry, 'emitted while resumed') + }.bind(mockedAPI.stream) + + function wrapper () { + return mockedAPI + } + + var readdirp = proxyquire('../readdirp', {'./stream-api': wrapper}) + , stream = readdirp(opts()) + .on('error', function (err) { + t.fail('should not throw error', err); + }) + .on('end', function () { + t.end() + }) + .pause(); + + setTimeout(function () { + resumed = true; + stream.resume(); + }, 5) + }) + + t.test('\n# when a stream is destroyed, it emits "closed", but no longer emits "data", "warn" and "error"', function (t) { + var api = streamapi() + , fatalError = new Error('fatal!') + , nonfatalError = new Error('nonfatal!') + , processedData = 'some data' + , plan = 0; + + t.plan(6) + var stream = api.stream + .on('warn', function (err) { + t.ok(!stream._destroyed, 'emits warning until destroyed'); + }) + .on('error', function (err) { + t.ok(!stream._destroyed, 'emits errors until destroyed'); + }) + .on('data', function (data) { + t.ok(!stream._destroyed, 'emits data until destroyed'); + }) + .on('close', function () { + t.ok(stream._destroyed, 'emits close when stream is destroyed'); + }) + + + api.processEntry(processedData); + api.handleError(nonfatalError); + api.handleFatalError(fatalError); + + setTimeout(function () { + stream.destroy() + + t.notOk(stream.readable, 'stream is no longer readable after it is destroyed') + + api.processEntry(processedData); + api.handleError(nonfatalError); + api.handleFatalError(fatalError); + + process.nextTick(function () { + t.pass('emits no more data, warn or error events after it was destroyed') + t.end(); + }) + }, 10) + }) +}) diff --git a/node_modules/readdirp/test/readdirp.js b/node_modules/readdirp/test/readdirp.js new file mode 100644 index 0000000..812afbb --- /dev/null +++ b/node_modules/readdirp/test/readdirp.js @@ -0,0 +1,289 @@ +/*jshint asi:true */ + +var test = require('tap').test + , path = require('path') + , fs = require('fs') + , util = require('util') + , net = require('net') + , readdirp = require('../readdirp.js') + , root = path.join(__dirname, '../test/bed') + , totalDirs = 6 + , totalFiles = 12 + , ext1Files = 4 + , ext2Files = 3 + , ext3Files = 2 + , rootDir2Files = 2 + , nameHasLength9Dirs = 2 + , depth1Files = 8 + , depth0Files = 3 + ; + +/* +Structure of test bed: + . + ├── root_dir1 + │   ├── root_dir1_file1.ext1 + │   ├── root_dir1_file2.ext2 + │   ├── root_dir1_file3.ext3 + │   ├── root_dir1_subdir1 + │   │   └── root1_dir1_subdir1_file1.ext1 + │   └── root_dir1_subdir2 + │   └── .gitignore + ├── root_dir2 + │   ├── root_dir2_file1.ext1 + │   ├── root_dir2_file2.ext2 + │   ├── root_dir2_subdir1 + │   │   └── .gitignore + │   └── root_dir2_subdir2 + │   └── .gitignore + ├── root_file1.ext1 + ├── root_file2.ext2 + └── root_file3.ext3 + + 6 directories, 13 files +*/ + +// console.log('\033[2J'); // clear console + +function opts (extend) { + var o = { root: root }; + + if (extend) { + for (var prop in extend) { + o[prop] = extend[prop]; + } + } + return o; +} + +test('\nreading root without filter', function (t) { + t.plan(2); + readdirp(opts(), function (err, res) { + t.equals(res.directories.length, totalDirs, 'all directories'); + t.equals(res.files.length, totalFiles, 'all files'); + t.end(); + }) +}) + +test('\nreading root without filter using lstat', function (t) { + t.plan(2); + readdirp(opts({ lstat: true }), function (err, res) { + t.equals(res.directories.length, totalDirs, 'all directories'); + t.equals(res.files.length, totalFiles, 'all files'); + t.end(); + }) +}) + +test('\nreading root with symlinks using lstat', function (t) { + t.plan(2); + fs.symlinkSync(path.join(root, 'root_dir1'), path.join(root, 'dirlink')); + fs.symlinkSync(path.join(root, 'root_file1.ext1'), path.join(root, 'link.ext1')); + readdirp(opts({ lstat: true }), function (err, res) { + t.equals(res.directories.length, totalDirs, 'all directories'); + t.equals(res.files.length, totalFiles + 2, 'all files + symlinks'); + fs.unlinkSync(path.join(root, 'dirlink')); + fs.unlinkSync(path.join(root, 'link.ext1')); + t.end(); + }) +}) + +test('\nreading non-standard fds', function (t) { + t.plan(2); + var server = net.createServer().listen(path.join(root, 'test.sock'), function(){ + readdirp(opts({ entryType: 'all' }), function (err, res) { + t.equals(res.files.length, totalFiles + 1, 'all files + socket'); + readdirp(opts({ entryType: 'both' }), function (err, res) { + t.equals(res.files.length, totalFiles, 'all regular files only'); + server.close(); + t.end(); + }) + }) + }); +}) + +test('\nreading root using glob filter', function (t) { + // normal + t.test('\n# "*.ext1"', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: '*.ext1' } ), function (err, res) { + t.equals(res.files.length, ext1Files, 'all ext1 files'); + t.end(); + }) + }) + t.test('\n# ["*.ext1", "*.ext3"]', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: [ '*.ext1', '*.ext3' ] } ), function (err, res) { + t.equals(res.files.length, ext1Files + ext3Files, 'all ext1 and ext3 files'); + t.end(); + }) + }) + t.test('\n# "root_dir1"', function (t) { + t.plan(1); + readdirp(opts( { directoryFilter: 'root_dir1' }), function (err, res) { + t.equals(res.directories.length, 1, 'one directory'); + t.end(); + }) + }) + t.test('\n# ["root_dir1", "*dir1_subdir1"]', function (t) { + t.plan(1); + readdirp(opts( { directoryFilter: [ 'root_dir1', '*dir1_subdir1' ]}), function (err, res) { + t.equals(res.directories.length, 2, 'two directories'); + t.end(); + }) + }) + + t.test('\n# negated: "!*.ext1"', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: '!*.ext1' } ), function (err, res) { + t.equals(res.files.length, totalFiles - ext1Files, 'all but ext1 files'); + t.end(); + }) + }) + t.test('\n# negated: ["!*.ext1", "!*.ext3"]', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: [ '!*.ext1', '!*.ext3' ] } ), function (err, res) { + t.equals(res.files.length, totalFiles - ext1Files - ext3Files, 'all but ext1 and ext3 files'); + t.end(); + }) + }) + + t.test('\n# mixed: ["*.ext1", "!*.ext3"]', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: [ '*.ext1', '!*.ext3' ] } ), function (err, res) { + t.similar(err[0].toString(), /Cannot mix negated with non negated glob filters/, 'returns meaningfull error'); + t.end(); + }) + }) + + t.test('\n# leading and trailing spaces: [" *.ext1", "*.ext3 "]', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: [ ' *.ext1', '*.ext3 ' ] } ), function (err, res) { + t.equals(res.files.length, ext1Files + ext3Files, 'all ext1 and ext3 files'); + t.end(); + }) + }) + t.test('\n# leading and trailing spaces: [" !*.ext1", " !*.ext3 "]', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: [ ' !*.ext1', ' !*.ext3' ] } ), function (err, res) { + t.equals(res.files.length, totalFiles - ext1Files - ext3Files, 'all but ext1 and ext3 files'); + t.end(); + }) + }) + + t.test('\n# ** glob pattern', function (t) { + t.plan(1); + readdirp(opts( { fileFilter: '**/*.ext1' } ), function (err, res) { + t.equals(res.files.length, ext1Files, 'ignores ** in **/*.ext1 -> only *.ext1 files'); + t.end(); + }) + }) +}) + +test('\n\nreading root using function filter', function (t) { + t.test('\n# file filter -> "contains root_dir2"', function (t) { + t.plan(1); + readdirp( + opts( { fileFilter: function (fi) { return fi.name.indexOf('root_dir2') >= 0; } }) + , function (err, res) { + t.equals(res.files.length, rootDir2Files, 'all rootDir2Files'); + t.end(); + } + ) + }) + + t.test('\n# directory filter -> "name has length 9"', function (t) { + t.plan(1); + readdirp( + opts( { directoryFilter: function (di) { return di.name.length === 9; } }) + , function (err, res) { + t.equals(res.directories.length, nameHasLength9Dirs, 'all all dirs with name length 9'); + t.end(); + } + ) + }) +}) + +test('\nreading root specifying maximum depth', function (t) { + t.test('\n# depth 1', function (t) { + t.plan(1); + readdirp(opts( { depth: 1 } ), function (err, res) { + t.equals(res.files.length, depth1Files, 'does not return files at depth 2'); + }) + }) +}) + +test('\nreading root with no recursion', function (t) { + t.test('\n# depth 0', function (t) { + t.plan(1); + readdirp(opts( { depth: 0 } ), function (err, res) { + t.equals(res.files.length, depth0Files, 'does not return files at depth 0'); + }) + }) +}) + +test('\nprogress callbacks', function (t) { + t.plan(2); + + var pluckName = function(fi) { return fi.name; } + , processedFiles = []; + + readdirp( + opts() + , function(fi) { + processedFiles.push(fi); + } + , function (err, res) { + t.equals(processedFiles.length, res.files.length, 'calls back for each file processed'); + t.deepEquals(processedFiles.map(pluckName).sort(),res.files.map(pluckName).sort(), 'same file names'); + t.end(); + } + ) +}) + +test('resolving of name, full and relative paths', function (t) { + var expected = { + name : 'root_dir1_file1.ext1' + , parentDirName : 'root_dir1' + , path : 'root_dir1/root_dir1_file1.ext1' + , fullPath : 'test/bed/root_dir1/root_dir1_file1.ext1' + } + , opts = [ + { root: './bed' , prefix: '' } + , { root: './bed/' , prefix: '' } + , { root: 'bed' , prefix: '' } + , { root: 'bed/' , prefix: '' } + , { root: '../test/bed/' , prefix: '' } + , { root: '.' , prefix: 'bed' } + ] + t.plan(opts.length); + + opts.forEach(function (op) { + op.fileFilter = 'root_dir1_file1.ext1'; + + t.test('\n' + util.inspect(op), function (t) { + t.plan(4); + + readdirp (op, function(err, res) { + t.equals(res.files[0].name, expected.name, 'correct name'); + t.equals(res.files[0].path, path.join(op.prefix, expected.path), 'correct path'); + }) + + fs.realpath(op.root, function(err, fullRoot) { + readdirp (op, function(err, res) { + t.equals( + res.files[0].fullParentDir + , path.join(fullRoot, op.prefix, expected.parentDirName) + , 'correct parentDir' + ); + t.equals( + res.files[0].fullPath + , path.join(fullRoot, op.prefix, expected.parentDirName, expected.name) + , 'correct fullPath' + ); + }) + }) + }) + }) +}) + + diff --git a/node_modules/regenerate/LICENSE-MIT.txt b/node_modules/regenerate/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/regenerate/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/regenerate/package.json b/node_modules/regenerate/package.json new file mode 100644 index 0000000..52c6c28 --- /dev/null +++ b/node_modules/regenerate/package.json @@ -0,0 +1,37 @@ +{ + "name": "regenerate", + "version": "1.3.1", + "description": "Generate JavaScript-compatible regular expressions based on a given set of Unicode symbols or code points.", + "homepage": "https://mths.be/regenerate", + "main": "regenerate.js", + "keywords": [ + "regex", + "regexp", + "javascript", + "unicode", + "generator", + "tool" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/regenerate.git" + }, + "bugs": "https://github.com/mathiasbynens/regenerate/issues", + "scripts": { + "test": "node tests/tests.js" + }, + "devDependencies": { + "coveralls": "^2.11.2", + "grunt": "^0.4.5", + "grunt-shell": "^1.1.1", + "istanbul": "^0.3.2", + "qunit-extras": "^1.1.0", + "qunitjs": "~1.11.0", + "requirejs": "^2.1.15" + } +} diff --git a/node_modules/regenerate/regenerate.js b/node_modules/regenerate/regenerate.js new file mode 100644 index 0000000..9b0b5be --- /dev/null +++ b/node_modules/regenerate/regenerate.js @@ -0,0 +1,1207 @@ +/*! https://mths.be/regenerate v1.3.1 by @mathias | MIT license */ +;(function(root) { + + // Detect free variables `exports`. + var freeExports = typeof exports == 'object' && exports; + + // Detect free variable `module`. + var freeModule = typeof module == 'object' && module && + module.exports == freeExports && module; + + // Detect free variable `global`, from Node.js/io.js or Browserified code, + // and use it as `root`. + var freeGlobal = typeof global == 'object' && global; + if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) { + root = freeGlobal; + } + + /*--------------------------------------------------------------------------*/ + + var ERRORS = { + 'rangeOrder': 'A range\u2019s `stop` value must be greater than or equal ' + + 'to the `start` value.', + 'codePointRange': 'Invalid code point value. Code points range from ' + + 'U+000000 to U+10FFFF.' + }; + + // https://mathiasbynens.be/notes/javascript-encoding#surrogate-pairs + var HIGH_SURROGATE_MIN = 0xD800; + var HIGH_SURROGATE_MAX = 0xDBFF; + var LOW_SURROGATE_MIN = 0xDC00; + var LOW_SURROGATE_MAX = 0xDFFF; + + // In Regenerate output, `\0` is never preceded by `\` because we sort by + // code point value, so let’s keep this regular expression simple. + var regexNull = /\\x00([^0123456789]|$)/g; + + var object = {}; + var hasOwnProperty = object.hasOwnProperty; + var extend = function(destination, source) { + var key; + for (key in source) { + if (hasOwnProperty.call(source, key)) { + destination[key] = source[key]; + } + } + return destination; + }; + + var forEach = function(array, callback) { + var index = -1; + var length = array.length; + while (++index < length) { + callback(array[index], index); + } + }; + + var toString = object.toString; + var isArray = function(value) { + return toString.call(value) == '[object Array]'; + }; + var isNumber = function(value) { + return typeof value == 'number' || + toString.call(value) == '[object Number]'; + }; + + // This assumes that `number` is a positive integer that `toString()`s nicely + // (which is the case for all code point values). + var zeroes = '0000'; + var pad = function(number, totalCharacters) { + var string = String(number); + return string.length < totalCharacters + ? (zeroes + string).slice(-totalCharacters) + : string; + }; + + var hex = function(number) { + return Number(number).toString(16).toUpperCase(); + }; + + var slice = [].slice; + + /*--------------------------------------------------------------------------*/ + + var dataFromCodePoints = function(codePoints) { + var index = -1; + var length = codePoints.length; + var max = length - 1; + var result = []; + var isStart = true; + var tmp; + var previous = 0; + while (++index < length) { + tmp = codePoints[index]; + if (isStart) { + result.push(tmp); + previous = tmp; + isStart = false; + } else { + if (tmp == previous + 1) { + if (index != max) { + previous = tmp; + continue; + } else { + isStart = true; + result.push(tmp + 1); + } + } else { + // End the previous range and start a new one. + result.push(previous + 1, tmp); + previous = tmp; + } + } + } + if (!isStart) { + result.push(tmp + 1); + } + return result; + }; + + var dataRemove = function(data, codePoint) { + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var length = data.length; + while (index < length) { + start = data[index]; + end = data[index + 1]; + if (codePoint >= start && codePoint < end) { + // Modify this pair. + if (codePoint == start) { + if (end == start + 1) { + // Just remove `start` and `end`. + data.splice(index, 2); + return data; + } else { + // Just replace `start` with a new value. + data[index] = codePoint + 1; + return data; + } + } else if (codePoint == end - 1) { + // Just replace `end` with a new value. + data[index + 1] = codePoint; + return data; + } else { + // Replace `[start, end]` with `[startA, endA, startB, endB]`. + data.splice(index, 2, start, codePoint, codePoint + 1, end); + return data; + } + } + index += 2; + } + return data; + }; + + var dataRemoveRange = function(data, rangeStart, rangeEnd) { + if (rangeEnd < rangeStart) { + throw Error(ERRORS.rangeOrder); + } + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + while (index < data.length) { + start = data[index]; + end = data[index + 1] - 1; // Note: the `- 1` makes `end` inclusive. + + // Exit as soon as no more matching pairs can be found. + if (start > rangeEnd) { + return data; + } + + // Check if this range pair is equal to, or forms a subset of, the range + // to be removed. + // E.g. we have `[0, 11, 40, 51]` and want to remove 0-10 → `[40, 51]`. + // E.g. we have `[40, 51]` and want to remove 0-100 → `[]`. + if (rangeStart <= start && rangeEnd >= end) { + // Remove this pair. + data.splice(index, 2); + continue; + } + + // Check if both `rangeStart` and `rangeEnd` are within the bounds of + // this pair. + // E.g. we have `[0, 11]` and want to remove 4-6 → `[0, 4, 7, 11]`. + if (rangeStart >= start && rangeEnd < end) { + if (rangeStart == start) { + // Replace `[start, end]` with `[startB, endB]`. + data[index] = rangeEnd + 1; + data[index + 1] = end + 1; + return data; + } + // Replace `[start, end]` with `[startA, endA, startB, endB]`. + data.splice(index, 2, start, rangeStart, rangeEnd + 1, end + 1); + return data; + } + + // Check if only `rangeStart` is within the bounds of this pair. + // E.g. we have `[0, 11]` and want to remove 4-20 → `[0, 4]`. + if (rangeStart >= start && rangeStart <= end) { + // Replace `end` with `rangeStart`. + data[index + 1] = rangeStart; + // Note: we cannot `return` just yet, in case any following pairs still + // contain matching code points. + // E.g. we have `[0, 11, 14, 31]` and want to remove 4-20 + // → `[0, 4, 21, 31]`. + } + + // Check if only `rangeEnd` is within the bounds of this pair. + // E.g. we have `[14, 31]` and want to remove 4-20 → `[21, 31]`. + else if (rangeEnd >= start && rangeEnd <= end) { + // Just replace `start`. + data[index] = rangeEnd + 1; + return data; + } + + index += 2; + } + return data; + }; + + var dataAdd = function(data, codePoint) { + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var lastIndex = null; + var length = data.length; + if (codePoint < 0x0 || codePoint > 0x10FFFF) { + throw RangeError(ERRORS.codePointRange); + } + while (index < length) { + start = data[index]; + end = data[index + 1]; + + // Check if the code point is already in the set. + if (codePoint >= start && codePoint < end) { + return data; + } + + if (codePoint == start - 1) { + // Just replace `start` with a new value. + data[index] = codePoint; + return data; + } + + // At this point, if `start` is `greater` than `codePoint`, insert a new + // `[start, end]` pair before the current pair, or after the current pair + // if there is a known `lastIndex`. + if (start > codePoint) { + data.splice( + lastIndex != null ? lastIndex + 2 : 0, + 0, + codePoint, + codePoint + 1 + ); + return data; + } + + if (codePoint == end) { + // Check if adding this code point causes two separate ranges to become + // a single range, e.g. `dataAdd([0, 4, 5, 10], 4)` → `[0, 10]`. + if (codePoint + 1 == data[index + 2]) { + data.splice(index, 4, start, data[index + 3]); + return data; + } + // Else, just replace `end` with a new value. + data[index + 1] = codePoint + 1; + return data; + } + lastIndex = index; + index += 2; + } + // The loop has finished; add the new pair to the end of the data set. + data.push(codePoint, codePoint + 1); + return data; + }; + + var dataAddData = function(dataA, dataB) { + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var data = dataA.slice(); + var length = dataB.length; + while (index < length) { + start = dataB[index]; + end = dataB[index + 1] - 1; + if (start == end) { + data = dataAdd(data, start); + } else { + data = dataAddRange(data, start, end); + } + index += 2; + } + return data; + }; + + var dataRemoveData = function(dataA, dataB) { + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var data = dataA.slice(); + var length = dataB.length; + while (index < length) { + start = dataB[index]; + end = dataB[index + 1] - 1; + if (start == end) { + data = dataRemove(data, start); + } else { + data = dataRemoveRange(data, start, end); + } + index += 2; + } + return data; + }; + + var dataAddRange = function(data, rangeStart, rangeEnd) { + if (rangeEnd < rangeStart) { + throw Error(ERRORS.rangeOrder); + } + if ( + rangeStart < 0x0 || rangeStart > 0x10FFFF || + rangeEnd < 0x0 || rangeEnd > 0x10FFFF + ) { + throw RangeError(ERRORS.codePointRange); + } + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var added = false; + var length = data.length; + while (index < length) { + start = data[index]; + end = data[index + 1]; + + if (added) { + // The range has already been added to the set; at this point, we just + // need to get rid of the following ranges in case they overlap. + + // Check if this range can be combined with the previous range. + if (start == rangeEnd + 1) { + data.splice(index - 1, 2); + return data; + } + + // Exit as soon as no more possibly overlapping pairs can be found. + if (start > rangeEnd) { + return data; + } + + // E.g. `[0, 11, 12, 16]` and we’ve added 5-15, so we now have + // `[0, 16, 12, 16]`. Remove the `12,16` part, as it lies within the + // `0,16` range that was previously added. + if (start >= rangeStart && start <= rangeEnd) { + // `start` lies within the range that was previously added. + + if (end > rangeStart && end - 1 <= rangeEnd) { + // `end` lies within the range that was previously added as well, + // so remove this pair. + data.splice(index, 2); + index -= 2; + // Note: we cannot `return` just yet, as there may still be other + // overlapping pairs. + } else { + // `start` lies within the range that was previously added, but + // `end` doesn’t. E.g. `[0, 11, 12, 31]` and we’ve added 5-15, so + // now we have `[0, 16, 12, 31]`. This must be written as `[0, 31]`. + // Remove the previously added `end` and the current `start`. + data.splice(index - 1, 2); + index -= 2; + } + + // Note: we cannot return yet. + } + + } + + else if (start == rangeEnd + 1) { + data[index] = rangeStart; + return data; + } + + // Check if a new pair must be inserted *before* the current one. + else if (start > rangeEnd) { + data.splice(index, 0, rangeStart, rangeEnd + 1); + return data; + } + + else if (rangeStart >= start && rangeStart < end && rangeEnd + 1 <= end) { + // The new range lies entirely within an existing range pair. No action + // needed. + return data; + } + + else if ( + // E.g. `[0, 11]` and you add 5-15 → `[0, 16]`. + (rangeStart >= start && rangeStart < end) || + // E.g. `[0, 3]` and you add 3-6 → `[0, 7]`. + end == rangeStart + ) { + // Replace `end` with the new value. + data[index + 1] = rangeEnd + 1; + // Make sure the next range pair doesn’t overlap, e.g. `[0, 11, 12, 14]` + // and you add 5-15 → `[0, 16]`, i.e. remove the `12,14` part. + added = true; + // Note: we cannot `return` just yet. + } + + else if (rangeStart <= start && rangeEnd + 1 >= end) { + // The new range is a superset of the old range. + data[index] = rangeStart; + data[index + 1] = rangeEnd + 1; + added = true; + } + + index += 2; + } + // The loop has finished without doing anything; add the new pair to the end + // of the data set. + if (!added) { + data.push(rangeStart, rangeEnd + 1); + } + return data; + }; + + var dataContains = function(data, codePoint) { + var index = 0; + var length = data.length; + // Exit early if `codePoint` is not within `data`’s overall range. + var start = data[index]; + var end = data[length - 1]; + if (length >= 2) { + if (codePoint < start || codePoint > end) { + return false; + } + } + // Iterate over the data per `(start, end)` pair. + while (index < length) { + start = data[index]; + end = data[index + 1]; + if (codePoint >= start && codePoint < end) { + return true; + } + index += 2; + } + return false; + }; + + var dataIntersection = function(data, codePoints) { + var index = 0; + var length = codePoints.length; + var codePoint; + var result = []; + while (index < length) { + codePoint = codePoints[index]; + if (dataContains(data, codePoint)) { + result.push(codePoint); + } + ++index; + } + return dataFromCodePoints(result); + }; + + var dataIsEmpty = function(data) { + return !data.length; + }; + + var dataIsSingleton = function(data) { + // Check if the set only represents a single code point. + return data.length == 2 && data[0] + 1 == data[1]; + }; + + var dataToArray = function(data) { + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var result = []; + var length = data.length; + while (index < length) { + start = data[index]; + end = data[index + 1]; + while (start < end) { + result.push(start); + ++start; + } + index += 2; + } + return result; + }; + + /*--------------------------------------------------------------------------*/ + + // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + var floor = Math.floor; + var highSurrogate = function(codePoint) { + return parseInt( + floor((codePoint - 0x10000) / 0x400) + HIGH_SURROGATE_MIN, + 10 + ); + }; + + var lowSurrogate = function(codePoint) { + return parseInt( + (codePoint - 0x10000) % 0x400 + LOW_SURROGATE_MIN, + 10 + ); + }; + + var stringFromCharCode = String.fromCharCode; + var codePointToString = function(codePoint) { + var string; + // https://mathiasbynens.be/notes/javascript-escapes#single + // Note: the `\b` escape sequence for U+0008 BACKSPACE in strings has a + // different meaning in regular expressions (word boundary), so it cannot + // be used here. + if (codePoint == 0x09) { + string = '\\t'; + } + // Note: IE < 9 treats `'\v'` as `'v'`, so avoid using it. + // else if (codePoint == 0x0B) { + // string = '\\v'; + // } + else if (codePoint == 0x0A) { + string = '\\n'; + } + else if (codePoint == 0x0C) { + string = '\\f'; + } + else if (codePoint == 0x0D) { + string = '\\r'; + } + else if (codePoint == 0x5C) { + string = '\\\\'; + } + else if ( + codePoint == 0x24 || + (codePoint >= 0x28 && codePoint <= 0x2B) || + codePoint == 0x2D || codePoint == 0x2E || codePoint == 0x3F || + (codePoint >= 0x5B && codePoint <= 0x5E) || + (codePoint >= 0x7B && codePoint <= 0x7D) + ) { + // The code point maps to an unsafe printable ASCII character; + // backslash-escape it. Here’s the list of those symbols: + // + // $()*+-.?[\]^{|} + // + // See #7 for more info. + string = '\\' + stringFromCharCode(codePoint); + } + else if (codePoint >= 0x20 && codePoint <= 0x7E) { + // The code point maps to one of these printable ASCII symbols + // (including the space character): + // + // !"#%&',/0123456789:;<=>@ABCDEFGHIJKLMNO + // PQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz~ + // + // These can safely be used directly. + string = stringFromCharCode(codePoint); + } + else if (codePoint <= 0xFF) { + // https://mathiasbynens.be/notes/javascript-escapes#hexadecimal + string = '\\x' + pad(hex(codePoint), 2); + } + else { // `codePoint <= 0xFFFF` holds true. + // https://mathiasbynens.be/notes/javascript-escapes#unicode + string = '\\u' + pad(hex(codePoint), 4); + } + + // There’s no need to account for astral symbols / surrogate pairs here, + // since `codePointToString` is private and only used for BMP code points. + // But if that’s what you need, just add an `else` block with this code: + // + // string = '\\u' + pad(hex(highSurrogate(codePoint)), 4) + // + '\\u' + pad(hex(lowSurrogate(codePoint)), 4); + + return string; + }; + + var codePointToStringUnicode = function(codePoint) { + if (codePoint <= 0xFFFF) { + return codePointToString(codePoint); + } + return '\\u{' + codePoint.toString(16).toUpperCase() + '}'; + }; + + var symbolToCodePoint = function(symbol) { + var length = symbol.length; + var first = symbol.charCodeAt(0); + var second; + if ( + first >= HIGH_SURROGATE_MIN && first <= HIGH_SURROGATE_MAX && + length > 1 // There is a next code unit. + ) { + // `first` is a high surrogate, and there is a next character. Assume + // it’s a low surrogate (else it’s invalid usage of Regenerate anyway). + second = symbol.charCodeAt(1); + // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + return (first - HIGH_SURROGATE_MIN) * 0x400 + + second - LOW_SURROGATE_MIN + 0x10000; + } + return first; + }; + + var createBMPCharacterClasses = function(data) { + // Iterate over the data per `(start, end)` pair. + var result = ''; + var index = 0; + var start; + var end; + var length = data.length; + if (dataIsSingleton(data)) { + return codePointToString(data[0]); + } + while (index < length) { + start = data[index]; + end = data[index + 1] - 1; // Note: the `- 1` makes `end` inclusive. + if (start == end) { + result += codePointToString(start); + } else if (start + 1 == end) { + result += codePointToString(start) + codePointToString(end); + } else { + result += codePointToString(start) + '-' + codePointToString(end); + } + index += 2; + } + return '[' + result + ']'; + }; + + var createUnicodeCharacterClasses = function(data) { + // Iterate over the data per `(start, end)` pair. + var result = ''; + var index = 0; + var start; + var end; + var length = data.length; + if (dataIsSingleton(data)) { + return codePointToStringUnicode(data[0]); + } + while (index < length) { + start = data[index]; + end = data[index + 1] - 1; // Note: the `- 1` makes `end` inclusive. + if (start == end) { + result += codePointToStringUnicode(start); + } else if (start + 1 == end) { + result += codePointToStringUnicode(start) + codePointToStringUnicode(end); + } else { + result += codePointToStringUnicode(start) + '-' + codePointToStringUnicode(end); + } + index += 2; + } + return '[' + result + ']'; + }; + + var splitAtBMP = function(data) { + // Iterate over the data per `(start, end)` pair. + var loneHighSurrogates = []; + var loneLowSurrogates = []; + var bmp = []; + var astral = []; + var index = 0; + var start; + var end; + var length = data.length; + while (index < length) { + start = data[index]; + end = data[index + 1] - 1; // Note: the `- 1` makes `end` inclusive. + + if (start < HIGH_SURROGATE_MIN) { + + // The range starts and ends before the high surrogate range. + // E.g. (0, 0x10). + if (end < HIGH_SURROGATE_MIN) { + bmp.push(start, end + 1); + } + + // The range starts before the high surrogate range and ends within it. + // E.g. (0, 0xD855). + if (end >= HIGH_SURROGATE_MIN && end <= HIGH_SURROGATE_MAX) { + bmp.push(start, HIGH_SURROGATE_MIN); + loneHighSurrogates.push(HIGH_SURROGATE_MIN, end + 1); + } + + // The range starts before the high surrogate range and ends in the low + // surrogate range. E.g. (0, 0xDCFF). + if (end >= LOW_SURROGATE_MIN && end <= LOW_SURROGATE_MAX) { + bmp.push(start, HIGH_SURROGATE_MIN); + loneHighSurrogates.push(HIGH_SURROGATE_MIN, HIGH_SURROGATE_MAX + 1); + loneLowSurrogates.push(LOW_SURROGATE_MIN, end + 1); + } + + // The range starts before the high surrogate range and ends after the + // low surrogate range. E.g. (0, 0x10FFFF). + if (end > LOW_SURROGATE_MAX) { + bmp.push(start, HIGH_SURROGATE_MIN); + loneHighSurrogates.push(HIGH_SURROGATE_MIN, HIGH_SURROGATE_MAX + 1); + loneLowSurrogates.push(LOW_SURROGATE_MIN, LOW_SURROGATE_MAX + 1); + if (end <= 0xFFFF) { + bmp.push(LOW_SURROGATE_MAX + 1, end + 1); + } else { + bmp.push(LOW_SURROGATE_MAX + 1, 0xFFFF + 1); + astral.push(0xFFFF + 1, end + 1); + } + } + + } else if (start >= HIGH_SURROGATE_MIN && start <= HIGH_SURROGATE_MAX) { + + // The range starts and ends in the high surrogate range. + // E.g. (0xD855, 0xD866). + if (end >= HIGH_SURROGATE_MIN && end <= HIGH_SURROGATE_MAX) { + loneHighSurrogates.push(start, end + 1); + } + + // The range starts in the high surrogate range and ends in the low + // surrogate range. E.g. (0xD855, 0xDCFF). + if (end >= LOW_SURROGATE_MIN && end <= LOW_SURROGATE_MAX) { + loneHighSurrogates.push(start, HIGH_SURROGATE_MAX + 1); + loneLowSurrogates.push(LOW_SURROGATE_MIN, end + 1); + } + + // The range starts in the high surrogate range and ends after the low + // surrogate range. E.g. (0xD855, 0x10FFFF). + if (end > LOW_SURROGATE_MAX) { + loneHighSurrogates.push(start, HIGH_SURROGATE_MAX + 1); + loneLowSurrogates.push(LOW_SURROGATE_MIN, LOW_SURROGATE_MAX + 1); + if (end <= 0xFFFF) { + bmp.push(LOW_SURROGATE_MAX + 1, end + 1); + } else { + bmp.push(LOW_SURROGATE_MAX + 1, 0xFFFF + 1); + astral.push(0xFFFF + 1, end + 1); + } + } + + } else if (start >= LOW_SURROGATE_MIN && start <= LOW_SURROGATE_MAX) { + + // The range starts and ends in the low surrogate range. + // E.g. (0xDCFF, 0xDDFF). + if (end >= LOW_SURROGATE_MIN && end <= LOW_SURROGATE_MAX) { + loneLowSurrogates.push(start, end + 1); + } + + // The range starts in the low surrogate range and ends after the low + // surrogate range. E.g. (0xDCFF, 0x10FFFF). + if (end > LOW_SURROGATE_MAX) { + loneLowSurrogates.push(start, LOW_SURROGATE_MAX + 1); + if (end <= 0xFFFF) { + bmp.push(LOW_SURROGATE_MAX + 1, end + 1); + } else { + bmp.push(LOW_SURROGATE_MAX + 1, 0xFFFF + 1); + astral.push(0xFFFF + 1, end + 1); + } + } + + } else if (start > LOW_SURROGATE_MAX && start <= 0xFFFF) { + + // The range starts and ends after the low surrogate range. + // E.g. (0xFFAA, 0x10FFFF). + if (end <= 0xFFFF) { + bmp.push(start, end + 1); + } else { + bmp.push(start, 0xFFFF + 1); + astral.push(0xFFFF + 1, end + 1); + } + + } else { + + // The range starts and ends in the astral range. + astral.push(start, end + 1); + + } + + index += 2; + } + return { + 'loneHighSurrogates': loneHighSurrogates, + 'loneLowSurrogates': loneLowSurrogates, + 'bmp': bmp, + 'astral': astral + }; + }; + + var optimizeSurrogateMappings = function(surrogateMappings) { + var result = []; + var tmpLow = []; + var addLow = false; + var mapping; + var nextMapping; + var highSurrogates; + var lowSurrogates; + var nextHighSurrogates; + var nextLowSurrogates; + var index = -1; + var length = surrogateMappings.length; + while (++index < length) { + mapping = surrogateMappings[index]; + nextMapping = surrogateMappings[index + 1]; + if (!nextMapping) { + result.push(mapping); + continue; + } + highSurrogates = mapping[0]; + lowSurrogates = mapping[1]; + nextHighSurrogates = nextMapping[0]; + nextLowSurrogates = nextMapping[1]; + + // Check for identical high surrogate ranges. + tmpLow = lowSurrogates; + while ( + nextHighSurrogates && + highSurrogates[0] == nextHighSurrogates[0] && + highSurrogates[1] == nextHighSurrogates[1] + ) { + // Merge with the next item. + if (dataIsSingleton(nextLowSurrogates)) { + tmpLow = dataAdd(tmpLow, nextLowSurrogates[0]); + } else { + tmpLow = dataAddRange( + tmpLow, + nextLowSurrogates[0], + nextLowSurrogates[1] - 1 + ); + } + ++index; + mapping = surrogateMappings[index]; + highSurrogates = mapping[0]; + lowSurrogates = mapping[1]; + nextMapping = surrogateMappings[index + 1]; + nextHighSurrogates = nextMapping && nextMapping[0]; + nextLowSurrogates = nextMapping && nextMapping[1]; + addLow = true; + } + result.push([ + highSurrogates, + addLow ? tmpLow : lowSurrogates + ]); + addLow = false; + } + return optimizeByLowSurrogates(result); + }; + + var optimizeByLowSurrogates = function(surrogateMappings) { + if (surrogateMappings.length == 1) { + return surrogateMappings; + } + var index = -1; + var innerIndex = -1; + while (++index < surrogateMappings.length) { + var mapping = surrogateMappings[index]; + var lowSurrogates = mapping[1]; + var lowSurrogateStart = lowSurrogates[0]; + var lowSurrogateEnd = lowSurrogates[1]; + innerIndex = index; // Note: the loop starts at the next index. + while (++innerIndex < surrogateMappings.length) { + var otherMapping = surrogateMappings[innerIndex]; + var otherLowSurrogates = otherMapping[1]; + var otherLowSurrogateStart = otherLowSurrogates[0]; + var otherLowSurrogateEnd = otherLowSurrogates[1]; + if ( + lowSurrogateStart == otherLowSurrogateStart && + lowSurrogateEnd == otherLowSurrogateEnd + ) { + // Add the code points in the other item to this one. + if (dataIsSingleton(otherMapping[0])) { + mapping[0] = dataAdd(mapping[0], otherMapping[0][0]); + } else { + mapping[0] = dataAddRange( + mapping[0], + otherMapping[0][0], + otherMapping[0][1] - 1 + ); + } + // Remove the other, now redundant, item. + surrogateMappings.splice(innerIndex, 1); + --innerIndex; + } + } + } + return surrogateMappings; + }; + + var surrogateSet = function(data) { + // Exit early if `data` is an empty set. + if (!data.length) { + return []; + } + + // Iterate over the data per `(start, end)` pair. + var index = 0; + var start; + var end; + var startHigh; + var startLow; + var prevStartHigh = 0; + var prevEndHigh = 0; + var tmpLow = []; + var endHigh; + var endLow; + var surrogateMappings = []; + var length = data.length; + var dataHigh = []; + while (index < length) { + start = data[index]; + end = data[index + 1] - 1; + + startHigh = highSurrogate(start); + startLow = lowSurrogate(start); + endHigh = highSurrogate(end); + endLow = lowSurrogate(end); + + var startsWithLowestLowSurrogate = startLow == LOW_SURROGATE_MIN; + var endsWithHighestLowSurrogate = endLow == LOW_SURROGATE_MAX; + var complete = false; + + // Append the previous high-surrogate-to-low-surrogate mappings. + // Step 1: `(startHigh, startLow)` to `(startHigh, LOW_SURROGATE_MAX)`. + if ( + startHigh == endHigh || + startsWithLowestLowSurrogate && endsWithHighestLowSurrogate + ) { + surrogateMappings.push([ + [startHigh, endHigh + 1], + [startLow, endLow + 1] + ]); + complete = true; + } else { + surrogateMappings.push([ + [startHigh, startHigh + 1], + [startLow, LOW_SURROGATE_MAX + 1] + ]); + } + + // Step 2: `(startHigh + 1, LOW_SURROGATE_MIN)` to + // `(endHigh - 1, LOW_SURROGATE_MAX)`. + if (!complete && startHigh + 1 < endHigh) { + if (endsWithHighestLowSurrogate) { + // Combine step 2 and step 3. + surrogateMappings.push([ + [startHigh + 1, endHigh + 1], + [LOW_SURROGATE_MIN, endLow + 1] + ]); + complete = true; + } else { + surrogateMappings.push([ + [startHigh + 1, endHigh], + [LOW_SURROGATE_MIN, LOW_SURROGATE_MAX + 1] + ]); + } + } + + // Step 3. `(endHigh, LOW_SURROGATE_MIN)` to `(endHigh, endLow)`. + if (!complete) { + surrogateMappings.push([ + [endHigh, endHigh + 1], + [LOW_SURROGATE_MIN, endLow + 1] + ]); + } + + prevStartHigh = startHigh; + prevEndHigh = endHigh; + + index += 2; + } + + // The format of `surrogateMappings` is as follows: + // + // [ surrogateMapping1, surrogateMapping2 ] + // + // i.e.: + // + // [ + // [ highSurrogates1, lowSurrogates1 ], + // [ highSurrogates2, lowSurrogates2 ] + // ] + return optimizeSurrogateMappings(surrogateMappings); + }; + + var createSurrogateCharacterClasses = function(surrogateMappings) { + var result = []; + forEach(surrogateMappings, function(surrogateMapping) { + var highSurrogates = surrogateMapping[0]; + var lowSurrogates = surrogateMapping[1]; + result.push( + createBMPCharacterClasses(highSurrogates) + + createBMPCharacterClasses(lowSurrogates) + ); + }); + return result.join('|'); + }; + + var createCharacterClassesFromData = function(data, bmpOnly, hasUnicodeFlag) { + if (hasUnicodeFlag) { + return createUnicodeCharacterClasses(data); + } + var result = []; + + var parts = splitAtBMP(data); + var loneHighSurrogates = parts.loneHighSurrogates; + var loneLowSurrogates = parts.loneLowSurrogates; + var bmp = parts.bmp; + var astral = parts.astral; + var hasAstral = !dataIsEmpty(parts.astral); + var hasLoneHighSurrogates = !dataIsEmpty(loneHighSurrogates); + var hasLoneLowSurrogates = !dataIsEmpty(loneLowSurrogates); + + var surrogateMappings = surrogateSet(astral); + + if (bmpOnly) { + bmp = dataAddData(bmp, loneHighSurrogates); + hasLoneHighSurrogates = false; + bmp = dataAddData(bmp, loneLowSurrogates); + hasLoneLowSurrogates = false; + } + + if (!dataIsEmpty(bmp)) { + // The data set contains BMP code points that are not high surrogates + // needed for astral code points in the set. + result.push(createBMPCharacterClasses(bmp)); + } + if (surrogateMappings.length) { + // The data set contains astral code points; append character classes + // based on their surrogate pairs. + result.push(createSurrogateCharacterClasses(surrogateMappings)); + } + // https://gist.github.com/mathiasbynens/bbe7f870208abcfec860 + if (hasLoneHighSurrogates) { + result.push( + createBMPCharacterClasses(loneHighSurrogates) + + // Make sure the high surrogates aren’t part of a surrogate pair. + '(?![\\uDC00-\\uDFFF])' + ); + } + if (hasLoneLowSurrogates) { + result.push( + // It is not possible to accurately assert the low surrogates aren’t + // part of a surrogate pair, since JavaScript regular expressions do + // not support lookbehind. + '(?:[^\\uD800-\\uDBFF]|^)' + + createBMPCharacterClasses(loneLowSurrogates) + ); + } + return result.join('|'); + }; + + /*--------------------------------------------------------------------------*/ + + // `regenerate` can be used as a constructor (and new methods can be added to + // its prototype) but also as a regular function, the latter of which is the + // documented and most common usage. For that reason, it’s not capitalized. + var regenerate = function(value) { + if (arguments.length > 1) { + value = slice.call(arguments); + } + if (this instanceof regenerate) { + this.data = []; + return value ? this.add(value) : this; + } + return (new regenerate).add(value); + }; + + regenerate.version = '1.3.1'; + + var proto = regenerate.prototype; + extend(proto, { + 'add': function(value) { + var $this = this; + if (value == null) { + return $this; + } + if (value instanceof regenerate) { + // Allow passing other Regenerate instances. + $this.data = dataAddData($this.data, value.data); + return $this; + } + if (arguments.length > 1) { + value = slice.call(arguments); + } + if (isArray(value)) { + forEach(value, function(item) { + $this.add(item); + }); + return $this; + } + $this.data = dataAdd( + $this.data, + isNumber(value) ? value : symbolToCodePoint(value) + ); + return $this; + }, + 'remove': function(value) { + var $this = this; + if (value == null) { + return $this; + } + if (value instanceof regenerate) { + // Allow passing other Regenerate instances. + $this.data = dataRemoveData($this.data, value.data); + return $this; + } + if (arguments.length > 1) { + value = slice.call(arguments); + } + if (isArray(value)) { + forEach(value, function(item) { + $this.remove(item); + }); + return $this; + } + $this.data = dataRemove( + $this.data, + isNumber(value) ? value : symbolToCodePoint(value) + ); + return $this; + }, + 'addRange': function(start, end) { + var $this = this; + $this.data = dataAddRange($this.data, + isNumber(start) ? start : symbolToCodePoint(start), + isNumber(end) ? end : symbolToCodePoint(end) + ); + return $this; + }, + 'removeRange': function(start, end) { + var $this = this; + var startCodePoint = isNumber(start) ? start : symbolToCodePoint(start); + var endCodePoint = isNumber(end) ? end : symbolToCodePoint(end); + $this.data = dataRemoveRange( + $this.data, + startCodePoint, + endCodePoint + ); + return $this; + }, + 'intersection': function(argument) { + var $this = this; + // Allow passing other Regenerate instances. + // TODO: Optimize this by writing and using `dataIntersectionData()`. + var array = argument instanceof regenerate ? + dataToArray(argument.data) : + argument; + $this.data = dataIntersection($this.data, array); + return $this; + }, + 'contains': function(codePoint) { + return dataContains( + this.data, + isNumber(codePoint) ? codePoint : symbolToCodePoint(codePoint) + ); + }, + 'clone': function() { + var set = new regenerate; + set.data = this.data.slice(0); + return set; + }, + 'toString': function(options) { + var result = createCharacterClassesFromData( + this.data, + options ? options.bmpOnly : false, + options ? options.hasUnicodeFlag : false + ); + if (!result) { + // For an empty set, return something that can be inserted `/here/` to + // form a valid regular expression. Avoid `(?:)` since that matches the + // empty string. + return '[]'; + } + // Use `\0` instead of `\x00` where possible. + return result.replace(regexNull, '\\0$1'); + }, + 'toRegExp': function(flags) { + var pattern = this.toString( + flags && flags.indexOf('u') != -1 ? + { 'hasUnicodeFlag': true } : + null + ); + return RegExp(pattern, flags || ''); + }, + 'valueOf': function() { // Note: `valueOf` is aliased as `toArray`. + return dataToArray(this.data); + } + }); + + proto.toArray = proto.valueOf; + + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define(function() { + return regenerate; + }); + } else if (freeExports && !freeExports.nodeType) { + if (freeModule) { // in Node.js, io.js, or RingoJS v0.8.0+ + freeModule.exports = regenerate; + } else { // in Narwhal or RingoJS v0.7.0- + freeExports.regenerate = regenerate; + } + } else { // in Rhino or a web browser + root.regenerate = regenerate; + } + +}(this)); diff --git a/node_modules/regenerator-runtime/package.json b/node_modules/regenerator-runtime/package.json new file mode 100644 index 0000000..062e891 --- /dev/null +++ b/node_modules/regenerator-runtime/package.json @@ -0,0 +1,18 @@ +{ + "name": "regenerator-runtime", + "author": "Ben Newman ", + "description": "Runtime for Regenerator-compiled generator and async functions.", + "version": "0.9.5", + "main": "runtime-module.js", + "keywords": [ + "regenerator", + "runtime", + "generator", + "async" + ], + "repository": { + "type": "git", + "url": "https://github.com/facebook/regenerator/tree/master/packages/regenerator-runtime" + }, + "license": "MIT" +} diff --git a/node_modules/regenerator-runtime/path.js b/node_modules/regenerator-runtime/path.js new file mode 100644 index 0000000..a450a75 --- /dev/null +++ b/node_modules/regenerator-runtime/path.js @@ -0,0 +1,4 @@ +exports.path = require("path").join( + __dirname, + "runtime.js" +); diff --git a/node_modules/regenerator-runtime/runtime-module.js b/node_modules/regenerator-runtime/runtime-module.js new file mode 100644 index 0000000..8e7e2e4 --- /dev/null +++ b/node_modules/regenerator-runtime/runtime-module.js @@ -0,0 +1,31 @@ +// This method of obtaining a reference to the global object needs to be +// kept identical to the way it is obtained in runtime.js +var g = + typeof global === "object" ? global : + typeof window === "object" ? window : + typeof self === "object" ? self : this; + +// Use `getOwnPropertyNames` because not all browsers support calling +// `hasOwnProperty` on the global `self` object in a worker. See #183. +var hadRuntime = g.regeneratorRuntime && + Object.getOwnPropertyNames(g).indexOf("regeneratorRuntime") >= 0; + +// Save the old regeneratorRuntime in case it needs to be restored later. +var oldRuntime = hadRuntime && g.regeneratorRuntime; + +// Force reevalutation of runtime.js. +g.regeneratorRuntime = undefined; + +module.exports = require("./runtime"); + +if (hadRuntime) { + // Restore the original runtime. + g.regeneratorRuntime = oldRuntime; +} else { + // Remove the global property added by runtime.js. + try { + delete g.regeneratorRuntime; + } catch(e) { + g.regeneratorRuntime = undefined; + } +} diff --git a/node_modules/regenerator-runtime/runtime.js b/node_modules/regenerator-runtime/runtime.js new file mode 100644 index 0000000..b6f0f39 --- /dev/null +++ b/node_modules/regenerator-runtime/runtime.js @@ -0,0 +1,668 @@ +/** + * Copyright (c) 2014, Facebook, Inc. + * All rights reserved. + * + * This source code is licensed under the BSD-style license found in the + * https://raw.github.com/facebook/regenerator/master/LICENSE file. An + * additional grant of patent rights can be found in the PATENTS file in + * the same directory. + */ + +!(function(global) { + "use strict"; + + var hasOwn = Object.prototype.hasOwnProperty; + var undefined; // More compressible than void 0. + var $Symbol = typeof Symbol === "function" ? Symbol : {}; + var iteratorSymbol = $Symbol.iterator || "@@iterator"; + var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag"; + + var inModule = typeof module === "object"; + var runtime = global.regeneratorRuntime; + if (runtime) { + if (inModule) { + // If regeneratorRuntime is defined globally and we're in a module, + // make the exports object identical to regeneratorRuntime. + module.exports = runtime; + } + // Don't bother evaluating the rest of this file if the runtime was + // already defined globally. + return; + } + + // Define the runtime globally (as expected by generated code) as either + // module.exports (if we're in a module) or a new, empty object. + runtime = global.regeneratorRuntime = inModule ? module.exports : {}; + + function wrap(innerFn, outerFn, self, tryLocsList) { + // If outerFn provided, then outerFn.prototype instanceof Generator. + var generator = Object.create((outerFn || Generator).prototype); + var context = new Context(tryLocsList || []); + + // The ._invoke method unifies the implementations of the .next, + // .throw, and .return methods. + generator._invoke = makeInvokeMethod(innerFn, self, context); + + return generator; + } + runtime.wrap = wrap; + + // Try/catch helper to minimize deoptimizations. Returns a completion + // record like context.tryEntries[i].completion. This interface could + // have been (and was previously) designed to take a closure to be + // invoked without arguments, but in all the cases we care about we + // already have an existing method we want to call, so there's no need + // to create a new function object. We can even get away with assuming + // the method takes exactly one argument, since that happens to be true + // in every case, so we don't have to touch the arguments object. The + // only additional allocation required is the completion record, which + // has a stable shape and so hopefully should be cheap to allocate. + function tryCatch(fn, obj, arg) { + try { + return { type: "normal", arg: fn.call(obj, arg) }; + } catch (err) { + return { type: "throw", arg: err }; + } + } + + var GenStateSuspendedStart = "suspendedStart"; + var GenStateSuspendedYield = "suspendedYield"; + var GenStateExecuting = "executing"; + var GenStateCompleted = "completed"; + + // Returning this object from the innerFn has the same effect as + // breaking out of the dispatch switch statement. + var ContinueSentinel = {}; + + // Dummy constructor functions that we use as the .constructor and + // .constructor.prototype properties for functions that return Generator + // objects. For full spec compliance, you may wish to configure your + // minifier not to mangle the names of these two functions. + function Generator() {} + function GeneratorFunction() {} + function GeneratorFunctionPrototype() {} + + var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype; + GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype; + GeneratorFunctionPrototype.constructor = GeneratorFunction; + GeneratorFunctionPrototype[toStringTagSymbol] = GeneratorFunction.displayName = "GeneratorFunction"; + + // Helper for defining the .next, .throw, and .return methods of the + // Iterator interface in terms of a single ._invoke method. + function defineIteratorMethods(prototype) { + ["next", "throw", "return"].forEach(function(method) { + prototype[method] = function(arg) { + return this._invoke(method, arg); + }; + }); + } + + runtime.isGeneratorFunction = function(genFun) { + var ctor = typeof genFun === "function" && genFun.constructor; + return ctor + ? ctor === GeneratorFunction || + // For the native GeneratorFunction constructor, the best we can + // do is to check its .name property. + (ctor.displayName || ctor.name) === "GeneratorFunction" + : false; + }; + + runtime.mark = function(genFun) { + if (Object.setPrototypeOf) { + Object.setPrototypeOf(genFun, GeneratorFunctionPrototype); + } else { + genFun.__proto__ = GeneratorFunctionPrototype; + if (!(toStringTagSymbol in genFun)) { + genFun[toStringTagSymbol] = "GeneratorFunction"; + } + } + genFun.prototype = Object.create(Gp); + return genFun; + }; + + // Within the body of any async function, `await x` is transformed to + // `yield regeneratorRuntime.awrap(x)`, so that the runtime can test + // `value instanceof AwaitArgument` to determine if the yielded value is + // meant to be awaited. Some may consider the name of this method too + // cutesy, but they are curmudgeons. + runtime.awrap = function(arg) { + return new AwaitArgument(arg); + }; + + function AwaitArgument(arg) { + this.arg = arg; + } + + function AsyncIterator(generator) { + function invoke(method, arg, resolve, reject) { + var record = tryCatch(generator[method], generator, arg); + if (record.type === "throw") { + reject(record.arg); + } else { + var result = record.arg; + var value = result.value; + if (value instanceof AwaitArgument) { + return Promise.resolve(value.arg).then(function(value) { + invoke("next", value, resolve, reject); + }, function(err) { + invoke("throw", err, resolve, reject); + }); + } + + return Promise.resolve(value).then(function(unwrapped) { + // When a yielded Promise is resolved, its final value becomes + // the .value of the Promise<{value,done}> result for the + // current iteration. If the Promise is rejected, however, the + // result for this iteration will be rejected with the same + // reason. Note that rejections of yielded Promises are not + // thrown back into the generator function, as is the case + // when an awaited Promise is rejected. This difference in + // behavior between yield and await is important, because it + // allows the consumer to decide what to do with the yielded + // rejection (swallow it and continue, manually .throw it back + // into the generator, abandon iteration, whatever). With + // await, by contrast, there is no opportunity to examine the + // rejection reason outside the generator function, so the + // only option is to throw it from the await expression, and + // let the generator function handle the exception. + result.value = unwrapped; + resolve(result); + }, reject); + } + } + + if (typeof process === "object" && process.domain) { + invoke = process.domain.bind(invoke); + } + + var previousPromise; + + function enqueue(method, arg) { + function callInvokeWithMethodAndArg() { + return new Promise(function(resolve, reject) { + invoke(method, arg, resolve, reject); + }); + } + + return previousPromise = + // If enqueue has been called before, then we want to wait until + // all previous Promises have been resolved before calling invoke, + // so that results are always delivered in the correct order. If + // enqueue has not been called before, then it is important to + // call invoke immediately, without waiting on a callback to fire, + // so that the async generator function has the opportunity to do + // any necessary setup in a predictable way. This predictability + // is why the Promise constructor synchronously invokes its + // executor callback, and why async functions synchronously + // execute code before the first await. Since we implement simple + // async functions in terms of async generators, it is especially + // important to get this right, even though it requires care. + previousPromise ? previousPromise.then( + callInvokeWithMethodAndArg, + // Avoid propagating failures to Promises returned by later + // invocations of the iterator. + callInvokeWithMethodAndArg + ) : callInvokeWithMethodAndArg(); + } + + // Define the unified helper method that is used to implement .next, + // .throw, and .return (see defineIteratorMethods). + this._invoke = enqueue; + } + + defineIteratorMethods(AsyncIterator.prototype); + + // Note that simple async functions are implemented on top of + // AsyncIterator objects; they just return a Promise for the value of + // the final result produced by the iterator. + runtime.async = function(innerFn, outerFn, self, tryLocsList) { + var iter = new AsyncIterator( + wrap(innerFn, outerFn, self, tryLocsList) + ); + + return runtime.isGeneratorFunction(outerFn) + ? iter // If outerFn is a generator, return the full iterator. + : iter.next().then(function(result) { + return result.done ? result.value : iter.next(); + }); + }; + + function makeInvokeMethod(innerFn, self, context) { + var state = GenStateSuspendedStart; + + return function invoke(method, arg) { + if (state === GenStateExecuting) { + throw new Error("Generator is already running"); + } + + if (state === GenStateCompleted) { + if (method === "throw") { + throw arg; + } + + // Be forgiving, per 25.3.3.3.3 of the spec: + // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume + return doneResult(); + } + + while (true) { + var delegate = context.delegate; + if (delegate) { + if (method === "return" || + (method === "throw" && delegate.iterator[method] === undefined)) { + // A return or throw (when the delegate iterator has no throw + // method) always terminates the yield* loop. + context.delegate = null; + + // If the delegate iterator has a return method, give it a + // chance to clean up. + var returnMethod = delegate.iterator["return"]; + if (returnMethod) { + var record = tryCatch(returnMethod, delegate.iterator, arg); + if (record.type === "throw") { + // If the return method threw an exception, let that + // exception prevail over the original return or throw. + method = "throw"; + arg = record.arg; + continue; + } + } + + if (method === "return") { + // Continue with the outer return, now that the delegate + // iterator has been terminated. + continue; + } + } + + var record = tryCatch( + delegate.iterator[method], + delegate.iterator, + arg + ); + + if (record.type === "throw") { + context.delegate = null; + + // Like returning generator.throw(uncaught), but without the + // overhead of an extra function call. + method = "throw"; + arg = record.arg; + continue; + } + + // Delegate generator ran and handled its own exceptions so + // regardless of what the method was, we continue as if it is + // "next" with an undefined arg. + method = "next"; + arg = undefined; + + var info = record.arg; + if (info.done) { + context[delegate.resultName] = info.value; + context.next = delegate.nextLoc; + } else { + state = GenStateSuspendedYield; + return info; + } + + context.delegate = null; + } + + if (method === "next") { + // Setting context._sent for legacy support of Babel's + // function.sent implementation. + context.sent = context._sent = arg; + + } else if (method === "throw") { + if (state === GenStateSuspendedStart) { + state = GenStateCompleted; + throw arg; + } + + if (context.dispatchException(arg)) { + // If the dispatched exception was caught by a catch block, + // then let that catch block handle the exception normally. + method = "next"; + arg = undefined; + } + + } else if (method === "return") { + context.abrupt("return", arg); + } + + state = GenStateExecuting; + + var record = tryCatch(innerFn, self, context); + if (record.type === "normal") { + // If an exception is thrown from innerFn, we leave state === + // GenStateExecuting and loop back for another invocation. + state = context.done + ? GenStateCompleted + : GenStateSuspendedYield; + + var info = { + value: record.arg, + done: context.done + }; + + if (record.arg === ContinueSentinel) { + if (context.delegate && method === "next") { + // Deliberately forget the last sent value so that we don't + // accidentally pass it on to the delegate. + arg = undefined; + } + } else { + return info; + } + + } else if (record.type === "throw") { + state = GenStateCompleted; + // Dispatch the exception by looping back around to the + // context.dispatchException(arg) call above. + method = "throw"; + arg = record.arg; + } + } + }; + } + + // Define Generator.prototype.{next,throw,return} in terms of the + // unified ._invoke helper method. + defineIteratorMethods(Gp); + + Gp[iteratorSymbol] = function() { + return this; + }; + + Gp[toStringTagSymbol] = "Generator"; + + Gp.toString = function() { + return "[object Generator]"; + }; + + function pushTryEntry(locs) { + var entry = { tryLoc: locs[0] }; + + if (1 in locs) { + entry.catchLoc = locs[1]; + } + + if (2 in locs) { + entry.finallyLoc = locs[2]; + entry.afterLoc = locs[3]; + } + + this.tryEntries.push(entry); + } + + function resetTryEntry(entry) { + var record = entry.completion || {}; + record.type = "normal"; + delete record.arg; + entry.completion = record; + } + + function Context(tryLocsList) { + // The root entry object (effectively a try statement without a catch + // or a finally block) gives us a place to store values thrown from + // locations where there is no enclosing try statement. + this.tryEntries = [{ tryLoc: "root" }]; + tryLocsList.forEach(pushTryEntry, this); + this.reset(true); + } + + runtime.keys = function(object) { + var keys = []; + for (var key in object) { + keys.push(key); + } + keys.reverse(); + + // Rather than returning an object with a next method, we keep + // things simple and return the next function itself. + return function next() { + while (keys.length) { + var key = keys.pop(); + if (key in object) { + next.value = key; + next.done = false; + return next; + } + } + + // To avoid creating an additional object, we just hang the .value + // and .done properties off the next function object itself. This + // also ensures that the minifier will not anonymize the function. + next.done = true; + return next; + }; + }; + + function values(iterable) { + if (iterable) { + var iteratorMethod = iterable[iteratorSymbol]; + if (iteratorMethod) { + return iteratorMethod.call(iterable); + } + + if (typeof iterable.next === "function") { + return iterable; + } + + if (!isNaN(iterable.length)) { + var i = -1, next = function next() { + while (++i < iterable.length) { + if (hasOwn.call(iterable, i)) { + next.value = iterable[i]; + next.done = false; + return next; + } + } + + next.value = undefined; + next.done = true; + + return next; + }; + + return next.next = next; + } + } + + // Return an iterator with no values. + return { next: doneResult }; + } + runtime.values = values; + + function doneResult() { + return { value: undefined, done: true }; + } + + Context.prototype = { + constructor: Context, + + reset: function(skipTempReset) { + this.prev = 0; + this.next = 0; + // Resetting context._sent for legacy support of Babel's + // function.sent implementation. + this.sent = this._sent = undefined; + this.done = false; + this.delegate = null; + + this.tryEntries.forEach(resetTryEntry); + + if (!skipTempReset) { + for (var name in this) { + // Not sure about the optimal order of these conditions: + if (name.charAt(0) === "t" && + hasOwn.call(this, name) && + !isNaN(+name.slice(1))) { + this[name] = undefined; + } + } + } + }, + + stop: function() { + this.done = true; + + var rootEntry = this.tryEntries[0]; + var rootRecord = rootEntry.completion; + if (rootRecord.type === "throw") { + throw rootRecord.arg; + } + + return this.rval; + }, + + dispatchException: function(exception) { + if (this.done) { + throw exception; + } + + var context = this; + function handle(loc, caught) { + record.type = "throw"; + record.arg = exception; + context.next = loc; + return !!caught; + } + + for (var i = this.tryEntries.length - 1; i >= 0; --i) { + var entry = this.tryEntries[i]; + var record = entry.completion; + + if (entry.tryLoc === "root") { + // Exception thrown outside of any try block that could handle + // it, so set the completion value of the entire function to + // throw the exception. + return handle("end"); + } + + if (entry.tryLoc <= this.prev) { + var hasCatch = hasOwn.call(entry, "catchLoc"); + var hasFinally = hasOwn.call(entry, "finallyLoc"); + + if (hasCatch && hasFinally) { + if (this.prev < entry.catchLoc) { + return handle(entry.catchLoc, true); + } else if (this.prev < entry.finallyLoc) { + return handle(entry.finallyLoc); + } + + } else if (hasCatch) { + if (this.prev < entry.catchLoc) { + return handle(entry.catchLoc, true); + } + + } else if (hasFinally) { + if (this.prev < entry.finallyLoc) { + return handle(entry.finallyLoc); + } + + } else { + throw new Error("try statement without catch or finally"); + } + } + } + }, + + abrupt: function(type, arg) { + for (var i = this.tryEntries.length - 1; i >= 0; --i) { + var entry = this.tryEntries[i]; + if (entry.tryLoc <= this.prev && + hasOwn.call(entry, "finallyLoc") && + this.prev < entry.finallyLoc) { + var finallyEntry = entry; + break; + } + } + + if (finallyEntry && + (type === "break" || + type === "continue") && + finallyEntry.tryLoc <= arg && + arg <= finallyEntry.finallyLoc) { + // Ignore the finally entry if control is not jumping to a + // location outside the try/catch block. + finallyEntry = null; + } + + var record = finallyEntry ? finallyEntry.completion : {}; + record.type = type; + record.arg = arg; + + if (finallyEntry) { + this.next = finallyEntry.finallyLoc; + } else { + this.complete(record); + } + + return ContinueSentinel; + }, + + complete: function(record, afterLoc) { + if (record.type === "throw") { + throw record.arg; + } + + if (record.type === "break" || + record.type === "continue") { + this.next = record.arg; + } else if (record.type === "return") { + this.rval = record.arg; + this.next = "end"; + } else if (record.type === "normal" && afterLoc) { + this.next = afterLoc; + } + }, + + finish: function(finallyLoc) { + for (var i = this.tryEntries.length - 1; i >= 0; --i) { + var entry = this.tryEntries[i]; + if (entry.finallyLoc === finallyLoc) { + this.complete(entry.completion, entry.afterLoc); + resetTryEntry(entry); + return ContinueSentinel; + } + } + }, + + "catch": function(tryLoc) { + for (var i = this.tryEntries.length - 1; i >= 0; --i) { + var entry = this.tryEntries[i]; + if (entry.tryLoc === tryLoc) { + var record = entry.completion; + if (record.type === "throw") { + var thrown = record.arg; + resetTryEntry(entry); + } + return thrown; + } + } + + // The context.catch method must only be called with a location + // argument that corresponds to a known catch block. + throw new Error("illegal catch attempt"); + }, + + delegateYield: function(iterable, resultName, nextLoc) { + this.delegate = { + iterator: values(iterable), + resultName: resultName, + nextLoc: nextLoc + }; + + return ContinueSentinel; + } + }; +})( + // Among the various tricks for obtaining a reference to the global + // object, this seems to be the most reliable technique that does not + // use indirect eval (which violates Content Security Policy). + typeof global === "object" ? global : + typeof window === "object" ? window : + typeof self === "object" ? self : this +); diff --git a/node_modules/regex-cache/LICENSE b/node_modules/regex-cache/LICENSE new file mode 100644 index 0000000..1e49edf --- /dev/null +++ b/node_modules/regex-cache/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/regex-cache/README.md b/node_modules/regex-cache/README.md new file mode 100644 index 0000000..ab19174 --- /dev/null +++ b/node_modules/regex-cache/README.md @@ -0,0 +1,160 @@ +# regex-cache [![NPM version](https://img.shields.io/npm/v/regex-cache.svg?style=flat)](https://www.npmjs.com/package/regex-cache) [![NPM downloads](https://img.shields.io/npm/dm/regex-cache.svg?style=flat)](https://npmjs.org/package/regex-cache) [![Build Status](https://img.shields.io/travis/jonschlinkert/regex-cache.svg?style=flat)](https://travis-ci.org/jonschlinkert/regex-cache) + +> Memoize the results of a call to the RegExp constructor, avoiding repetitious runtime compilation of the same string and options, resulting in suprising performance improvements. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install regex-cache --save +``` + +* Read [what this does](#what-this-does). +* See [the benchmarks](#benchmarks) + +## Usage + +Wrap a function like this: + +```js +var cache = require('regex-cache'); +var someRegex = cache(require('some-regex-lib')); +``` + +**Caching a regex** + +If you want to cache a regex after calling `new RegExp()`, or you're requiring a module that returns a regex, wrap it with a function first: + +```js +var cache = require('regex-cache'); + +function yourRegex(str, opts) { + // do stuff to str and opts + return new RegExp(str, opts.flags); +} + +var regex = cache(yourRegex); +``` + +## Recommendations + +### Use this when... + +* **No options are passed** to the function that creates the regex. Regardless of how big or small the regex is, when zero options are passed, caching will be faster than not. +* **A few options are passed**, and the values are primitives. The limited benchmarks I did show that caching is beneficial when up to 8 or 9 options are passed. + +### Do not use this when... + +* **The values of options are not primitives**. When non-primitives must be compared for equality, the time to compare the options is most likely as long or longer than the time to just create a new regex. + +### Example benchmarks + +Performance results, with and without regex-cache: + +```bash +# no args passed (defaults) + with-cache x 8,699,231 ops/sec ±0.86% (93 runs sampled) + without-cache x 2,777,551 ops/sec ±0.63% (95 runs sampled) + +# string and six options passed + with-cache x 1,885,934 ops/sec ±0.80% (93 runs sampled) + without-cache x 1,256,893 ops/sec ±0.65% (97 runs sampled) + +# string only + with-cache x 7,723,256 ops/sec ±0.87% (92 runs sampled) + without-cache x 2,303,060 ops/sec ±0.47% (99 runs sampled) + +# one option passed + with-cache x 4,179,877 ops/sec ±0.53% (100 runs sampled) + without-cache x 2,198,422 ops/sec ±0.47% (95 runs sampled) + +# two options passed + with-cache x 3,256,222 ops/sec ±0.51% (99 runs sampled) + without-cache x 2,121,401 ops/sec ±0.79% (97 runs sampled) + +# six options passed + with-cache x 1,816,018 ops/sec ±1.08% (96 runs sampled) + without-cache x 1,157,176 ops/sec ±0.53% (100 runs sampled) + +# +# diminishing returns happen about here +# + +# ten options passed + with-cache x 1,210,598 ops/sec ±0.56% (92 runs sampled) + without-cache x 1,665,588 ops/sec ±1.07% (100 runs sampled) + +# twelve options passed + with-cache x 1,042,096 ops/sec ±0.68% (92 runs sampled) + without-cache x 1,389,414 ops/sec ±0.68% (97 runs sampled) + +# twenty options passed + with-cache x 661,125 ops/sec ±0.80% (93 runs sampled) + without-cache x 1,208,757 ops/sec ±0.65% (97 runs sampled) + +# +# when non-primitive values are compared +# + +# single value on the options is an object + with-cache x 1,398,313 ops/sec ±1.05% (95 runs sampled) + without-cache x 2,228,281 ops/sec ±0.56% (99 runs sampled) +``` + +## Run benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm run benchmarks +``` + +## What this does + +If you're using `new RegExp('foo')` instead of a regex literal, it's probably because you need to dyamically generate a regex based on user options or some other potentially changing factors. + +When your function creates a string based on user inputs and passes it to the `RegExp` constructor, regex-cache caches the results. The next time the function is called if the key of a cached regex matches the user input (or no input was given), the cached regex is returned, avoiding unnecessary runtime compilation. + +Using the RegExp constructor offers a lot of flexibility, but the runtime compilation comes at a price - it's slow. Not specifically because of the call to the RegExp constructor, but **because you have to build up the string before `new RegExp()` is even called**. +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/regex-cache/issues/new). + +## Building docs + +Generate readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install verb && npm run docs +``` + +Or, if [verb](https://github.com/verbose/verb) is installed globally: + +```sh +$ verb +``` + +## Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +## Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/regex-cache/blob/master/LICENSE). + +*** + +_This file was generated by [verb](https://github.com/verbose/verb), v, on April 01, 2016._ \ No newline at end of file diff --git a/node_modules/regex-cache/index.js b/node_modules/regex-cache/index.js new file mode 100644 index 0000000..13d2022 --- /dev/null +++ b/node_modules/regex-cache/index.js @@ -0,0 +1,69 @@ +/*! + * regex-cache + * + * Copyright (c) 2015 Jon Schlinkert. + * Licensed under the MIT license. + */ + +'use strict'; + +var isPrimitive = require('is-primitive'); +var equal = require('is-equal-shallow'); +var basic = {}; +var cache = {}; + +/** + * Expose `regexCache` + */ + +module.exports = regexCache; + +/** + * Memoize the results of a call to the new RegExp constructor. + * + * @param {Function} fn [description] + * @param {String} str [description] + * @param {Options} options [description] + * @param {Boolean} nocompare [description] + * @return {RegExp} + */ + +function regexCache(fn, str, opts) { + var key = '_default_', regex, cached; + + if (!str && !opts) { + if (typeof fn !== 'function') { + return fn; + } + return basic[key] || (basic[key] = fn(str)); + } + + var isString = typeof str === 'string'; + if (isString) { + if (!opts) { + return basic[str] || (basic[str] = fn(str)); + } + key = str; + } else { + opts = str; + } + + cached = cache[key]; + if (cached && equal(cached.opts, opts)) { + return cached.regex; + } + + memo(key, opts, (regex = fn(str, opts))); + return regex; +} + +function memo(key, opts, regex) { + cache[key] = {regex: regex, opts: opts}; +} + +/** + * Expose `cache` + */ + +module.exports.cache = cache; +module.exports.basic = basic; diff --git a/node_modules/regex-cache/package.json b/node_modules/regex-cache/package.json new file mode 100644 index 0000000..9507d58 --- /dev/null +++ b/node_modules/regex-cache/package.json @@ -0,0 +1,61 @@ +{ + "name": "regex-cache", + "description": "Memoize the results of a call to the RegExp constructor, avoiding repetitious runtime compilation of the same string and options, resulting in suprising performance improvements.", + "version": "0.4.3", + "homepage": "https://github.com/jonschlinkert/regex-cache", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/regex-cache", + "bugs": { + "url": "https://github.com/jonschlinkert/regex-cache/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha", + "benchmarks": "node benchmark" + }, + "dependencies": { + "is-equal-shallow": "^0.1.3", + "is-primitive": "^2.0.0" + }, + "devDependencies": { + "benchmarked": "^0.1.5", + "chalk": "^1.1.3", + "gulp-format-md": "^0.1.7", + "micromatch": "^2.3.7", + "should": "^8.3.0" + }, + "keywords": [ + "cache", + "expression", + "regex", + "regexp", + "regular", + "regular expression", + "store", + "to-regex" + ], + "verb": { + "run": true, + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "reflinks": [ + "verb" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/regexpu-core/LICENSE-MIT.txt b/node_modules/regexpu-core/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/regexpu-core/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/regexpu-core/README.md b/node_modules/regexpu-core/README.md new file mode 100644 index 0000000..1f80cd8 --- /dev/null +++ b/node_modules/regexpu-core/README.md @@ -0,0 +1,62 @@ +# regexpu-core [![Build status](https://travis-ci.org/mathiasbynens/regexpu-core.svg?branch=master)](https://travis-ci.org/mathiasbynens/regexpu-core) [![Code coverage status](http://img.shields.io/coveralls/mathiasbynens/regexpu-core/master.svg)](https://coveralls.io/r/mathiasbynens/regexpu-core) [![Dependency status](https://gemnasium.com/mathiasbynens/regexpu-core.svg)](https://gemnasium.com/mathiasbynens/regexpu-core) + +_regexpu_ is a source code transpiler that enables the use of ES6 Unicode regular expressions in JavaScript-of-today (ES5). + +_regexpu-core_ contains _regexpu_’s core functionality, i.e. `rewritePattern(pattern, flag)`, which enables rewriting regular expressions that make use of [the ES6 `u` flag](https://mathiasbynens.be/notes/es6-unicode-regex) into equivalent ES5-compatible regular expression patterns. + +## Installation + +To use _regexpu-core_ programmatically, install it as a dependency via [npm](https://www.npmjs.com/): + +```bash +npm install regexpu-core --save-dev +``` + +Then, `require` it: + +```js +const rewritePattern = require('regexpu-core'); +``` + +## API + +This module exports a single function named `rewritePattern`. + +### `rewritePattern(pattern, flags)` + +This function takes a string that represents a regular expression pattern as well as a string representing its flags, and returns an ES5-compatible version of the pattern. + +```js +rewritePattern('foo.bar', 'u'); +// → 'foo(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\uDC00-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF])bar' + +rewritePattern('[\\u{1D306}-\\u{1D308}a-z]', 'u'); +// → '(?:[a-z]|\\uD834[\\uDF06-\\uDF08])' + +rewritePattern('[\\u{1D306}-\\u{1D308}a-z]', 'ui'); +// → '(?:[a-z\\u017F\\u212A]|\\uD834[\\uDF06-\\uDF08])' +``` + +_regexpu-core_ can rewrite non-ES6 regular expressions too, which is useful to demonstrate how their behavior changes once the `u` and `i` flags are added: + +```js +// In ES5, the dot operator only matches BMP symbols: +rewritePattern('foo.bar'); +// → 'foo(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uFFFF])bar' + +// But with the ES6 `u` flag, it matches astral symbols too: +rewritePattern('foo.bar', 'u'); +// → 'foo(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\uDC00-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF])bar' +``` + +`rewritePattern` uses [regjsgen](https://github.com/d10/regjsgen), [regjsparser](https://github.com/jviereck/regjsparser), and [regenerate](https://github.com/mathiasbynens/regenerate) as internal dependencies. + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## License + +_regexpu-core_ is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/regexpu-core/data/character-class-escape-sets.js b/node_modules/regexpu-core/data/character-class-escape-sets.js new file mode 100644 index 0000000..fcc482e --- /dev/null +++ b/node_modules/regexpu-core/data/character-class-escape-sets.js @@ -0,0 +1,101 @@ +// Generated by `/scripts/character-class-escape-sets.js`. Do not edit. +var regenerate = require('regenerate'); + +exports.REGULAR = { + 'd': regenerate() + .addRange(0x30, 0x39), + 'D': regenerate() + .addRange(0x0, 0x2F) + .addRange(0x3A, 0xFFFF), + 's': regenerate(0x20, 0xA0, 0x1680, 0x202F, 0x205F, 0x3000, 0xFEFF) + .addRange(0x9, 0xD) + .addRange(0x2000, 0x200A) + .addRange(0x2028, 0x2029), + 'S': regenerate() + .addRange(0x0, 0x8) + .addRange(0xE, 0x1F) + .addRange(0x21, 0x9F) + .addRange(0xA1, 0x167F) + .addRange(0x1681, 0x1FFF) + .addRange(0x200B, 0x2027) + .addRange(0x202A, 0x202E) + .addRange(0x2030, 0x205E) + .addRange(0x2060, 0x2FFF) + .addRange(0x3001, 0xFEFE) + .addRange(0xFF00, 0xFFFF), + 'w': regenerate(0x5F) + .addRange(0x30, 0x39) + .addRange(0x41, 0x5A) + .addRange(0x61, 0x7A), + 'W': regenerate(0x60) + .addRange(0x0, 0x2F) + .addRange(0x3A, 0x40) + .addRange(0x5B, 0x5E) + .addRange(0x7B, 0xFFFF) +}; + +exports.UNICODE = { + 'd': regenerate() + .addRange(0x30, 0x39), + 'D': regenerate() + .addRange(0x0, 0x2F) + .addRange(0x3A, 0x10FFFF), + 's': regenerate(0x20, 0xA0, 0x1680, 0x202F, 0x205F, 0x3000, 0xFEFF) + .addRange(0x9, 0xD) + .addRange(0x2000, 0x200A) + .addRange(0x2028, 0x2029), + 'S': regenerate() + .addRange(0x0, 0x8) + .addRange(0xE, 0x1F) + .addRange(0x21, 0x9F) + .addRange(0xA1, 0x167F) + .addRange(0x1681, 0x1FFF) + .addRange(0x200B, 0x2027) + .addRange(0x202A, 0x202E) + .addRange(0x2030, 0x205E) + .addRange(0x2060, 0x2FFF) + .addRange(0x3001, 0xFEFE) + .addRange(0xFF00, 0x10FFFF), + 'w': regenerate(0x5F) + .addRange(0x30, 0x39) + .addRange(0x41, 0x5A) + .addRange(0x61, 0x7A), + 'W': regenerate(0x60) + .addRange(0x0, 0x2F) + .addRange(0x3A, 0x40) + .addRange(0x5B, 0x5E) + .addRange(0x7B, 0x10FFFF) +}; + +exports.UNICODE_IGNORE_CASE = { + 'd': regenerate() + .addRange(0x30, 0x39), + 'D': regenerate() + .addRange(0x0, 0x2F) + .addRange(0x3A, 0x10FFFF), + 's': regenerate(0x20, 0xA0, 0x1680, 0x202F, 0x205F, 0x3000, 0xFEFF) + .addRange(0x9, 0xD) + .addRange(0x2000, 0x200A) + .addRange(0x2028, 0x2029), + 'S': regenerate() + .addRange(0x0, 0x8) + .addRange(0xE, 0x1F) + .addRange(0x21, 0x9F) + .addRange(0xA1, 0x167F) + .addRange(0x1681, 0x1FFF) + .addRange(0x200B, 0x2027) + .addRange(0x202A, 0x202E) + .addRange(0x2030, 0x205E) + .addRange(0x2060, 0x2FFF) + .addRange(0x3001, 0xFEFE) + .addRange(0xFF00, 0x10FFFF), + 'w': regenerate(0x5F, 0x17F, 0x212A) + .addRange(0x30, 0x39) + .addRange(0x41, 0x5A) + .addRange(0x61, 0x7A), + 'W': regenerate(0x4B, 0x53, 0x60) + .addRange(0x0, 0x2F) + .addRange(0x3A, 0x40) + .addRange(0x5B, 0x5E) + .addRange(0x7B, 0x10FFFF) +}; diff --git a/node_modules/regexpu-core/data/iu-mappings.json b/node_modules/regexpu-core/data/iu-mappings.json new file mode 100644 index 0000000..dbf36a2 --- /dev/null +++ b/node_modules/regexpu-core/data/iu-mappings.json @@ -0,0 +1,296 @@ +{ + "75": 8490, + "83": 383, + "107": 8490, + "115": 383, + "181": 924, + "197": 8491, + "383": 83, + "452": 453, + "453": 452, + "455": 456, + "456": 455, + "458": 459, + "459": 458, + "497": 498, + "498": 497, + "837": 8126, + "914": 976, + "917": 1013, + "920": 1012, + "921": 8126, + "922": 1008, + "924": 181, + "928": 982, + "929": 1009, + "931": 962, + "934": 981, + "937": 8486, + "962": 931, + "976": 914, + "977": 1012, + "981": 934, + "982": 928, + "1008": 922, + "1009": 929, + "1012": [ + 920, + 977 + ], + "1013": 917, + "7776": 7835, + "7835": 7776, + "8126": [ + 837, + 921 + ], + "8486": 937, + "8490": 75, + "8491": 197, + "66560": 66600, + "66561": 66601, + "66562": 66602, + "66563": 66603, + "66564": 66604, + "66565": 66605, + "66566": 66606, + "66567": 66607, + "66568": 66608, + "66569": 66609, + "66570": 66610, + "66571": 66611, + "66572": 66612, + "66573": 66613, + "66574": 66614, + "66575": 66615, + "66576": 66616, + "66577": 66617, + "66578": 66618, + "66579": 66619, + "66580": 66620, + "66581": 66621, + "66582": 66622, + "66583": 66623, + "66584": 66624, + "66585": 66625, + "66586": 66626, + "66587": 66627, + "66588": 66628, + "66589": 66629, + "66590": 66630, + "66591": 66631, + "66592": 66632, + "66593": 66633, + "66594": 66634, + "66595": 66635, + "66596": 66636, + "66597": 66637, + "66598": 66638, + "66599": 66639, + "66600": 66560, + "66601": 66561, + "66602": 66562, + "66603": 66563, + "66604": 66564, + "66605": 66565, + "66606": 66566, + "66607": 66567, + "66608": 66568, + "66609": 66569, + "66610": 66570, + "66611": 66571, + "66612": 66572, + "66613": 66573, + "66614": 66574, + "66615": 66575, + "66616": 66576, + "66617": 66577, + "66618": 66578, + "66619": 66579, + "66620": 66580, + "66621": 66581, + "66622": 66582, + "66623": 66583, + "66624": 66584, + "66625": 66585, + "66626": 66586, + "66627": 66587, + "66628": 66588, + "66629": 66589, + "66630": 66590, + "66631": 66591, + "66632": 66592, + "66633": 66593, + "66634": 66594, + "66635": 66595, + "66636": 66596, + "66637": 66597, + "66638": 66598, + "66639": 66599, + "68736": 68800, + "68737": 68801, + "68738": 68802, + "68739": 68803, + "68740": 68804, + "68741": 68805, + "68742": 68806, + "68743": 68807, + "68744": 68808, + "68745": 68809, + "68746": 68810, + "68747": 68811, + "68748": 68812, + "68749": 68813, + "68750": 68814, + "68751": 68815, + "68752": 68816, + "68753": 68817, + "68754": 68818, + "68755": 68819, + "68756": 68820, + "68757": 68821, + "68758": 68822, + "68759": 68823, + "68760": 68824, + "68761": 68825, + "68762": 68826, + "68763": 68827, + "68764": 68828, + "68765": 68829, + "68766": 68830, + "68767": 68831, + "68768": 68832, + "68769": 68833, + "68770": 68834, + "68771": 68835, + "68772": 68836, + "68773": 68837, + "68774": 68838, + "68775": 68839, + "68776": 68840, + "68777": 68841, + "68778": 68842, + "68779": 68843, + "68780": 68844, + "68781": 68845, + "68782": 68846, + "68783": 68847, + "68784": 68848, + "68785": 68849, + "68786": 68850, + "68800": 68736, + "68801": 68737, + "68802": 68738, + "68803": 68739, + "68804": 68740, + "68805": 68741, + "68806": 68742, + "68807": 68743, + "68808": 68744, + "68809": 68745, + "68810": 68746, + "68811": 68747, + "68812": 68748, + "68813": 68749, + "68814": 68750, + "68815": 68751, + "68816": 68752, + "68817": 68753, + "68818": 68754, + "68819": 68755, + "68820": 68756, + "68821": 68757, + "68822": 68758, + "68823": 68759, + "68824": 68760, + "68825": 68761, + "68826": 68762, + "68827": 68763, + "68828": 68764, + "68829": 68765, + "68830": 68766, + "68831": 68767, + "68832": 68768, + "68833": 68769, + "68834": 68770, + "68835": 68771, + "68836": 68772, + "68837": 68773, + "68838": 68774, + "68839": 68775, + "68840": 68776, + "68841": 68777, + "68842": 68778, + "68843": 68779, + "68844": 68780, + "68845": 68781, + "68846": 68782, + "68847": 68783, + "68848": 68784, + "68849": 68785, + "68850": 68786, + "71840": 71872, + "71841": 71873, + "71842": 71874, + "71843": 71875, + "71844": 71876, + "71845": 71877, + "71846": 71878, + "71847": 71879, + "71848": 71880, + "71849": 71881, + "71850": 71882, + "71851": 71883, + "71852": 71884, + "71853": 71885, + "71854": 71886, + "71855": 71887, + "71856": 71888, + "71857": 71889, + "71858": 71890, + "71859": 71891, + "71860": 71892, + "71861": 71893, + "71862": 71894, + "71863": 71895, + "71864": 71896, + "71865": 71897, + "71866": 71898, + "71867": 71899, + "71868": 71900, + "71869": 71901, + "71870": 71902, + "71871": 71903, + "71872": 71840, + "71873": 71841, + "71874": 71842, + "71875": 71843, + "71876": 71844, + "71877": 71845, + "71878": 71846, + "71879": 71847, + "71880": 71848, + "71881": 71849, + "71882": 71850, + "71883": 71851, + "71884": 71852, + "71885": 71853, + "71886": 71854, + "71887": 71855, + "71888": 71856, + "71889": 71857, + "71890": 71858, + "71891": 71859, + "71892": 71860, + "71893": 71861, + "71894": 71862, + "71895": 71863, + "71896": 71864, + "71897": 71865, + "71898": 71866, + "71899": 71867, + "71900": 71868, + "71901": 71869, + "71902": 71870, + "71903": 71871 +} diff --git a/node_modules/regexpu-core/node_modules/.bin/regjsparser b/node_modules/regexpu-core/node_modules/.bin/regjsparser new file mode 120000 index 0000000..2e1b492 --- /dev/null +++ b/node_modules/regexpu-core/node_modules/.bin/regjsparser @@ -0,0 +1 @@ +../../../regjsparser/bin/parser \ No newline at end of file diff --git a/node_modules/regexpu-core/package.json b/node_modules/regexpu-core/package.json new file mode 100644 index 0000000..ef69fa3 --- /dev/null +++ b/node_modules/regexpu-core/package.json @@ -0,0 +1,61 @@ +{ + "name": "regexpu-core", + "version": "2.0.0", + "description": "regexpu’s core functionality (i.e. `rewritePattern(pattern, flag)`), capable of translating ES6 Unicode regular expressions to ES5.", + "homepage": "https://mths.be/regexpu", + "main": "rewrite-pattern.js", + "keywords": [ + "codegen", + "desugaring", + "ecmascript", + "es5", + "es6", + "harmony", + "javascript", + "refactoring", + "regex", + "regexp", + "regular expressions", + "rewriting", + "syntax", + "transformation", + "transpile", + "transpiler", + "unicode" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/regexpu-core.git" + }, + "bugs": "https://github.com/mathiasbynens/regexpu-core/issues", + "files": [ + "LICENSE-MIT.txt", + "rewrite-pattern.js", + "data/character-class-escape-sets.js", + "data/iu-mappings.json" + ], + "scripts": { + "build": "node scripts/iu-mappings.js && node scripts/character-class-escape-sets.js", + "test": "mocha tests", + "coverage": "istanbul cover --report html node_modules/.bin/_mocha tests/tests.js -- -u exports -R spec" + }, + "dependencies": { + "regenerate": "^1.2.1", + "regjsgen": "^0.2.0", + "regjsparser": "^0.1.4" + }, + "devDependencies": { + "coveralls": "^2.11.2", + "istanbul": "^0.4.0", + "jsesc": "^0.5.0", + "lodash": "^3.6.0", + "mocha": "^2.2.1", + "regexpu-fixtures": "^2.0.0", + "unicode-8.0.0": "^0.1.5" + } +} diff --git a/node_modules/regexpu-core/rewrite-pattern.js b/node_modules/regexpu-core/rewrite-pattern.js new file mode 100644 index 0000000..47a7854 --- /dev/null +++ b/node_modules/regexpu-core/rewrite-pattern.js @@ -0,0 +1,193 @@ +var generate = require('regjsgen').generate; +var parse = require('regjsparser').parse; +var regenerate = require('regenerate'); +var iuMappings = require('./data/iu-mappings.json'); +var ESCAPE_SETS = require('./data/character-class-escape-sets.js'); + +function getCharacterClassEscapeSet(character) { + if (unicode) { + if (ignoreCase) { + return ESCAPE_SETS.UNICODE_IGNORE_CASE[character]; + } + return ESCAPE_SETS.UNICODE[character]; + } + return ESCAPE_SETS.REGULAR[character]; +} + +var object = {}; +var hasOwnProperty = object.hasOwnProperty; +function has(object, property) { + return hasOwnProperty.call(object, property); +} + +// Prepare a Regenerate set containing all code points, used for negative +// character classes (if any). +var UNICODE_SET = regenerate().addRange(0x0, 0x10FFFF); +// Without the `u` flag, the range stops at 0xFFFF. +// https://mths.be/es6#sec-pattern-semantics +var BMP_SET = regenerate().addRange(0x0, 0xFFFF); + +// Prepare a Regenerate set containing all code points that are supposed to be +// matched by `/./u`. https://mths.be/es6#sec-atom +var DOT_SET_UNICODE = UNICODE_SET.clone() // all Unicode code points + .remove( + // minus `LineTerminator`s (https://mths.be/es6#sec-line-terminators): + 0x000A, // Line Feed + 0x000D, // Carriage Return + 0x2028, // Line Separator + 0x2029 // Paragraph Separator + ); +// Prepare a Regenerate set containing all code points that are supposed to be +// matched by `/./` (only BMP code points). +var DOT_SET = DOT_SET_UNICODE.clone() + .intersection(BMP_SET); + +// Add a range of code points + any case-folded code points in that range to a +// set. +regenerate.prototype.iuAddRange = function(min, max) { + var $this = this; + do { + var folded = caseFold(min); + if (folded) { + $this.add(folded); + } + } while (++min <= max); + return $this; +}; + +function assign(target, source) { + for (var key in source) { + // Note: `hasOwnProperty` is not needed here. + target[key] = source[key]; + } +} + +function update(item, pattern) { + // TODO: Test if memoizing `pattern` here is worth the effort. + if (!pattern) { + return; + } + var tree = parse(pattern, ''); + switch (tree.type) { + case 'characterClass': + case 'group': + case 'value': + // No wrapping needed. + break; + default: + // Wrap the pattern in a non-capturing group. + tree = wrap(tree, pattern); + } + assign(item, tree); +} + +function wrap(tree, pattern) { + // Wrap the pattern in a non-capturing group. + return { + 'type': 'group', + 'behavior': 'ignore', + 'body': [tree], + 'raw': '(?:' + pattern + ')' + }; +} + +function caseFold(codePoint) { + return has(iuMappings, codePoint) ? iuMappings[codePoint] : false; +} + +var ignoreCase = false; +var unicode = false; +function processCharacterClass(characterClassItem) { + var set = regenerate(); + var body = characterClassItem.body.forEach(function(item) { + switch (item.type) { + case 'value': + set.add(item.codePoint); + if (ignoreCase && unicode) { + var folded = caseFold(item.codePoint); + if (folded) { + set.add(folded); + } + } + break; + case 'characterClassRange': + var min = item.min.codePoint; + var max = item.max.codePoint; + set.addRange(min, max); + if (ignoreCase && unicode) { + set.iuAddRange(min, max); + } + break; + case 'characterClassEscape': + set.add(getCharacterClassEscapeSet(item.value)); + break; + // The `default` clause is only here as a safeguard; it should never be + // reached. Code coverage tools should ignore it. + /* istanbul ignore next */ + default: + throw Error('Unknown term type: ' + item.type); + } + }); + if (characterClassItem.negative) { + set = (unicode ? UNICODE_SET : BMP_SET).clone().remove(set); + } + update(characterClassItem, set.toString()); + return characterClassItem; +} + +function processTerm(item) { + switch (item.type) { + case 'dot': + update( + item, + (unicode ? DOT_SET_UNICODE : DOT_SET).toString() + ); + break; + case 'characterClass': + item = processCharacterClass(item); + break; + case 'characterClassEscape': + update( + item, + getCharacterClassEscapeSet(item.value).toString() + ); + break; + case 'alternative': + case 'disjunction': + case 'group': + case 'quantifier': + item.body = item.body.map(processTerm); + break; + case 'value': + var codePoint = item.codePoint; + var set = regenerate(codePoint); + if (ignoreCase && unicode) { + var folded = caseFold(codePoint); + if (folded) { + set.add(folded); + } + } + update(item, set.toString()); + break; + case 'anchor': + case 'empty': + case 'group': + case 'reference': + // Nothing to do here. + break; + // The `default` clause is only here as a safeguard; it should never be + // reached. Code coverage tools should ignore it. + /* istanbul ignore next */ + default: + throw Error('Unknown term type: ' + item.type); + } + return item; +}; + +module.exports = function(pattern, flags) { + var tree = parse(pattern, flags); + ignoreCase = flags ? flags.indexOf('i') > -1 : false; + unicode = flags ? flags.indexOf('u') > -1 : false; + assign(tree, processTerm(tree)); + return generate(tree); +}; diff --git a/node_modules/regjsgen/LICENSE.txt b/node_modules/regjsgen/LICENSE.txt new file mode 100644 index 0000000..2aab83e --- /dev/null +++ b/node_modules/regjsgen/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright 2014 Benjamin Tan (https://d10.github.io/) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/regjsgen/README.md b/node_modules/regjsgen/README.md new file mode 100644 index 0000000..0a4dcd0 --- /dev/null +++ b/node_modules/regjsgen/README.md @@ -0,0 +1,60 @@ +# RegJSGen + +Generate `RegExp`s from [RegJSParser](https://github.com/jviereck/regjsparser)’s AST. + +## Installation + +```bash +npm install --save regjsgen +``` + +## Usage + +```js +var regjsgen = require('regjsgen'); +// With `regjsparser` +var regjsparser = require('regjsparser'); +var regex = '^a$'; +var ast = regjsparser.parse(regex); +// Modify AST +// ... +// Regenerate `RegExp` +regex = regjsgen.generate(ast); +``` + +## See Also + + * [RegJSParser](https://github.com/jviereck/regjsparser) + * [RegExp.js](https://github.com/jviereck/regexp.js) + +## Testing + +Run the command + +```bash +npm test +``` + +To create a new reference file, execute + +```bash +node test/update-fixture.js +``` + +from the repo top directory. + +## Support + +Tested in Node.js 0.8.26~0.10.30. + +## Author + +| [![twitter/demoneaux](http://gravatar.com/avatar/029b19dba521584d83398ada3ecf6131?s=70)](https://twitter.com/demoneaux "Follow @demoneaux on Twitter") | +|---| +| [Benjamin Tan](http://d10.github.io/) | + +## Contributors + +| [![twitter/mathias](http://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](http://mathiasbynens.be/) | diff --git a/node_modules/regjsgen/package.json b/node_modules/regjsgen/package.json new file mode 100644 index 0000000..92c6853 --- /dev/null +++ b/node_modules/regjsgen/package.json @@ -0,0 +1,33 @@ +{ + "name": "regjsgen", + "version": "0.2.0", + "description": "Generate `RegExp`s from RegJSParser’s AST", + "homepage": "https://github.com/d10/regjsgen", + "license": "MIT", + "main": "regjsgen.js", + "keywords": [ + "ast", + "generate", + "regex", + "regexp", + "regular expressions" + ], + "author": "Benjamin Tan (https://d10.github.io/)", + "contributors": [ + "Benjamin Tan (https://d10.github.io/)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "d10/regjsgen", + "scripts": { + "test": "node test/test.js" + }, + "files": [ + "LICENSE.txt", + "regjsgen.js", + "README.md" + ], + "devDependencies": { + "got": "~1.2.0", + "jsesc": "~0.5.0" + } +} diff --git a/node_modules/regjsgen/regjsgen.js b/node_modules/regjsgen/regjsgen.js new file mode 100644 index 0000000..58ff151 --- /dev/null +++ b/node_modules/regjsgen/regjsgen.js @@ -0,0 +1,408 @@ +/*! + * RegJSGen + * Copyright 2014 Benjamin Tan + * Available under MIT license + */ +;(function() { + 'use strict'; + + /** Used to determine if values are of the language type `Object` */ + var objectTypes = { + 'function': true, + 'object': true + }; + + /** Used as a reference to the global object */ + var root = (objectTypes[typeof window] && window) || this; + + /** Backup possible global object */ + var oldRoot = root; + + /** Detect free variable `exports` */ + var freeExports = objectTypes[typeof exports] && exports; + + /** Detect free variable `module` */ + var freeModule = objectTypes[typeof module] && module && !module.nodeType && module; + + /** Detect free variable `global` from Node.js or Browserified code and use it as `root` */ + var freeGlobal = freeExports && freeModule && typeof global == 'object' && global; + if (freeGlobal && (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal || freeGlobal.self === freeGlobal)) { + root = freeGlobal; + } + + /*--------------------------------------------------------------------------*/ + + /*! Based on https://mths.be/fromcodepoint v0.2.0 by @mathias */ + + var stringFromCharCode = String.fromCharCode; + var floor = Math.floor; + function fromCodePoint() { + var MAX_SIZE = 0x4000; + var codeUnits = []; + var highSurrogate; + var lowSurrogate; + var index = -1; + var length = arguments.length; + if (!length) { + return ''; + } + var result = ''; + while (++index < length) { + var codePoint = Number(arguments[index]); + if ( + !isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity` + codePoint < 0 || // not a valid Unicode code point + codePoint > 0x10FFFF || // not a valid Unicode code point + floor(codePoint) != codePoint // not an integer + ) { + throw RangeError('Invalid code point: ' + codePoint); + } + if (codePoint <= 0xFFFF) { + // BMP code point + codeUnits.push(codePoint); + } else { + // Astral code point; split in surrogate halves + // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + codePoint -= 0x10000; + highSurrogate = (codePoint >> 10) + 0xD800; + lowSurrogate = (codePoint % 0x400) + 0xDC00; + codeUnits.push(highSurrogate, lowSurrogate); + } + if (index + 1 == length || codeUnits.length > MAX_SIZE) { + result += stringFromCharCode.apply(null, codeUnits); + codeUnits.length = 0; + } + } + return result; + } + + function assertType(type, expected) { + if (expected.indexOf('|') == -1) { + if (type == expected) { + return; + } + + throw Error('Invalid node type: ' + type); + } + + expected = assertType.hasOwnProperty(expected) + ? assertType[expected] + : (assertType[expected] = RegExp('^(?:' + expected + ')$')); + + if (expected.test(type)) { + return; + } + + throw Error('Invalid node type: ' + type); + } + + /*--------------------------------------------------------------------------*/ + + function generate(node) { + var type = node.type; + + if (generate.hasOwnProperty(type) && typeof generate[type] == 'function') { + return generate[type](node); + } + + throw Error('Invalid node type: ' + type); + } + + /*--------------------------------------------------------------------------*/ + + function generateAlternative(node) { + assertType(node.type, 'alternative'); + + var terms = node.body, + length = terms ? terms.length : 0; + + if (length == 1) { + return generateTerm(terms[0]); + } else { + var i = -1, + result = ''; + + while (++i < length) { + result += generateTerm(terms[i]); + } + + return result; + } + } + + function generateAnchor(node) { + assertType(node.type, 'anchor'); + + switch (node.kind) { + case 'start': + return '^'; + case 'end': + return '$'; + case 'boundary': + return '\\b'; + case 'not-boundary': + return '\\B'; + default: + throw Error('Invalid assertion'); + } + } + + function generateAtom(node) { + assertType(node.type, 'anchor|characterClass|characterClassEscape|dot|group|reference|value'); + + return generate(node); + } + + function generateCharacterClass(node) { + assertType(node.type, 'characterClass'); + + var classRanges = node.body, + length = classRanges ? classRanges.length : 0; + + var i = -1, + result = '['; + + if (node.negative) { + result += '^'; + } + + while (++i < length) { + result += generateClassAtom(classRanges[i]); + } + + result += ']'; + + return result; + } + + function generateCharacterClassEscape(node) { + assertType(node.type, 'characterClassEscape'); + + return '\\' + node.value; + } + + function generateCharacterClassRange(node) { + assertType(node.type, 'characterClassRange'); + + var min = node.min, + max = node.max; + + if (min.type == 'characterClassRange' || max.type == 'characterClassRange') { + throw Error('Invalid character class range'); + } + + return generateClassAtom(min) + '-' + generateClassAtom(max); + } + + function generateClassAtom(node) { + assertType(node.type, 'anchor|characterClassEscape|characterClassRange|dot|value'); + + return generate(node); + } + + function generateDisjunction(node) { + assertType(node.type, 'disjunction'); + + var body = node.body, + length = body ? body.length : 0; + + if (length == 0) { + throw Error('No body'); + } else if (length == 1) { + return generate(body[0]); + } else { + var i = -1, + result = ''; + + while (++i < length) { + if (i != 0) { + result += '|'; + } + result += generate(body[i]); + } + + return result; + } + } + + function generateDot(node) { + assertType(node.type, 'dot'); + + return '.'; + } + + function generateGroup(node) { + assertType(node.type, 'group'); + + var result = '('; + + switch (node.behavior) { + case 'normal': + break; + case 'ignore': + result += '?:'; + break; + case 'lookahead': + result += '?='; + break; + case 'negativeLookahead': + result += '?!'; + break; + default: + throw Error('Invalid behaviour: ' + node.behaviour); + } + + var body = node.body, + length = body ? body.length : 0; + + if (length == 1) { + result += generate(body[0]); + } else { + var i = -1; + + while (++i < length) { + result += generate(body[i]); + } + } + + result += ')'; + + return result; + } + + function generateQuantifier(node) { + assertType(node.type, 'quantifier'); + + var quantifier = '', + min = node.min, + max = node.max; + + switch (max) { + case undefined: + case null: + switch (min) { + case 0: + quantifier = '*' + break; + case 1: + quantifier = '+'; + break; + default: + quantifier = '{' + min + ',}'; + break; + } + break; + default: + if (min == max) { + quantifier = '{' + min + '}'; + } + else if (min == 0 && max == 1) { + quantifier = '?'; + } else { + quantifier = '{' + min + ',' + max + '}'; + } + break; + } + + if (!node.greedy) { + quantifier += '?'; + } + + return generateAtom(node.body[0]) + quantifier; + } + + function generateReference(node) { + assertType(node.type, 'reference'); + + return '\\' + node.matchIndex; + } + + function generateTerm(node) { + assertType(node.type, 'anchor|characterClass|characterClassEscape|empty|group|quantifier|reference|value'); + + return generate(node); + } + + function generateValue(node) { + assertType(node.type, 'value'); + + var kind = node.kind, + codePoint = node.codePoint; + + switch (kind) { + case 'controlLetter': + return '\\c' + fromCodePoint(codePoint + 64); + case 'hexadecimalEscape': + return '\\x' + ('00' + codePoint.toString(16).toUpperCase()).slice(-2); + case 'identifier': + return '\\' + fromCodePoint(codePoint); + case 'null': + return '\\' + codePoint; + case 'octal': + return '\\' + codePoint.toString(8); + case 'singleEscape': + switch (codePoint) { + case 0x0008: + return '\\b'; + case 0x009: + return '\\t'; + case 0x00A: + return '\\n'; + case 0x00B: + return '\\v'; + case 0x00C: + return '\\f'; + case 0x00D: + return '\\r'; + default: + throw Error('Invalid codepoint: ' + codePoint); + } + case 'symbol': + return fromCodePoint(codePoint); + case 'unicodeEscape': + return '\\u' + ('0000' + codePoint.toString(16).toUpperCase()).slice(-4); + case 'unicodeCodePointEscape': + return '\\u{' + codePoint.toString(16).toUpperCase() + '}'; + default: + throw Error('Unsupported node kind: ' + kind); + } + } + + /*--------------------------------------------------------------------------*/ + + generate.alternative = generateAlternative; + generate.anchor = generateAnchor; + generate.characterClass = generateCharacterClass; + generate.characterClassEscape = generateCharacterClassEscape; + generate.characterClassRange = generateCharacterClassRange; + generate.disjunction = generateDisjunction; + generate.dot = generateDot; + generate.group = generateGroup; + generate.quantifier = generateQuantifier; + generate.reference = generateReference; + generate.value = generateValue; + + /*--------------------------------------------------------------------------*/ + + // export regjsgen + // some AMD build optimizers, like r.js, check for condition patterns like the following: + if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { + // define as an anonymous module so, through path mapping, it can be aliased + define(function() { + return { + 'generate': generate + }; + }); + } + // check for `exports` after `define` in case a build optimizer adds an `exports` object + else if (freeExports && freeModule) { + // in Narwhal, Node.js, Rhino -require, or RingoJS + freeExports.generate = generate; + } + // in a browser or Rhino + else { + root.regjsgen = { + 'generate': generate + }; + } +}.call(this)); diff --git a/node_modules/regjsparser/CHANGELOG b/node_modules/regjsparser/CHANGELOG new file mode 100644 index 0000000..d2fe483 --- /dev/null +++ b/node_modules/regjsparser/CHANGELOG @@ -0,0 +1,12 @@ +2014-08-31: Version 0.1.2 + * Change the field ref to matchIndex on the type=refernce node (issue #67) + +2014-08-30: Version 0.1.1 + * Only handled unicode code point escapes if 'u' flag is set (issue #56) + * Removed `matchIdx` from the AST + * References like /\1/ were broken (issue #57) + * Renamed type `ref` to `reference` in the AST + * Update regex to match identifier and include script to generate regex + +2014-06-29: Version 0.1.0 + * first tagged release diff --git a/node_modules/regjsparser/LICENSE.BSD b/node_modules/regjsparser/LICENSE.BSD new file mode 100644 index 0000000..3e580c3 --- /dev/null +++ b/node_modules/regjsparser/LICENSE.BSD @@ -0,0 +1,19 @@ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/regjsparser/README.md b/node_modules/regjsparser/README.md new file mode 100644 index 0000000..83f67e9 --- /dev/null +++ b/node_modules/regjsparser/README.md @@ -0,0 +1,34 @@ +# RegJSParser + +Parsing the JavaScript's RegExp in JavaScript. + +## Installation + +```bash +npm install regjsparser +``` + +## Usage + +```js +var parse = require('regjsparser').parse; + +var parseTree = parse('^a'); // /^a/ +console.log(parseTree); +``` + +## Testing + +To run the tests, run the following command: + +```bash +npm test +``` + +To create a new reference file, execute… + +```bash +node test/update-fixtures.js +``` + +…from the repo top directory. diff --git a/node_modules/regjsparser/bin/parser b/node_modules/regjsparser/bin/parser new file mode 100755 index 0000000..09a6036 --- /dev/null +++ b/node_modules/regjsparser/bin/parser @@ -0,0 +1,50 @@ +#!/usr/bin/env node +(function() { + + var fs = require('fs'); + var parse = require('../parser').parse; + var jsesc = require('jsesc'); + var regexes = process.argv.splice(2); + var first = regexes[0]; + var data; + var log = console.log; + var main = function() { + if (/^(?:-h|--help|undefined)$/.test(first)) { + log([ + '\nUsage:\n', + '\tregjsparser [regex ...]', + '\tregjsparser [-h | --help]', + '\nExamples:\n', + '\tregjsparser \'^foo.bar$\'', + '\tregjsparser \'[a-zA-Z0-9]\'' + ].join('\n')); + return process.exit(1); + } + + regexes.forEach(function(snippet) { + var result; + try { + result = parse(snippet); + log(jsesc(result, { + 'json': true, + 'compact': false, + 'indent': '\t' + })); + } catch(error) { + log(error.message + '\n'); + log('Error: failed to parse. Make sure the regular expression is valid.'); + log('If you think this is a bug in regjsparser, please report it:'); + log('\thttps://github.com/jviereck/regjsparser/issues/new'); + log('\nStack trace:\n'); + log(error.stack); + return process.exit(1); + } + }); + // Return with exit status 0 outside of the `forEach` loop, in case + // multiple regular expressions were passed in. + return process.exit(0); + }; + + main(); + +}()); diff --git a/node_modules/regjsparser/node_modules/.bin/jsesc b/node_modules/regjsparser/node_modules/.bin/jsesc new file mode 120000 index 0000000..7237604 --- /dev/null +++ b/node_modules/regjsparser/node_modules/.bin/jsesc @@ -0,0 +1 @@ +../jsesc/bin/jsesc \ No newline at end of file diff --git a/node_modules/regjsparser/node_modules/jsesc/LICENSE-MIT.txt b/node_modules/regjsparser/node_modules/jsesc/LICENSE-MIT.txt new file mode 100644 index 0000000..97067e5 --- /dev/null +++ b/node_modules/regjsparser/node_modules/jsesc/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/regjsparser/node_modules/jsesc/README.md b/node_modules/regjsparser/node_modules/jsesc/README.md new file mode 100644 index 0000000..7a083c7 --- /dev/null +++ b/node_modules/regjsparser/node_modules/jsesc/README.md @@ -0,0 +1,375 @@ +# jsesc [![Build status](https://travis-ci.org/mathiasbynens/jsesc.svg?branch=master)](https://travis-ci.org/mathiasbynens/jsesc) [![Code coverage status](http://img.shields.io/coveralls/mathiasbynens/jsesc/master.svg)](https://coveralls.io/r/mathiasbynens/jsesc) [![Dependency status](https://gemnasium.com/mathiasbynens/jsesc.svg)](https://gemnasium.com/mathiasbynens/jsesc) + +This is a JavaScript library for [escaping JavaScript strings](http://mathiasbynens.be/notes/javascript-escapes) while generating the shortest possible valid ASCII-only output. [Here’s an online demo.](http://mothereff.in/js-escapes) + +This can be used to avoid [mojibake](http://en.wikipedia.org/wiki/Mojibake) and other encoding issues, or even to [avoid errors](https://twitter.com/annevk/status/380000829643571200) when passing JSON-formatted data (which may contain U+2028 LINE SEPARATOR, U+2029 PARAGRAPH SEPARATOR, or [lone surrogates](http://esdiscuss.org/topic/code-points-vs-unicode-scalar-values#content-14)) to a JavaScript parser or an UTF-8 encoder, respectively. + +Feel free to fork if you see possible improvements! + +## Installation + +Via [Bower](http://bower.io/): + +```bash +bower install jsesc +``` + +Via [Component](https://github.com/component/component): + +```bash +component install mathiasbynens/jsesc +``` + +Via [npm](http://npmjs.org/): + +```bash +npm install jsesc +``` + +In a browser: + +```html + +``` + +In [Node.js](http://nodejs.org/) and [RingoJS](http://ringojs.org/): + +```js +var jsesc = require('jsesc'); +``` + +In [Narwhal](http://narwhaljs.org/): + +```js +var jsesc = require('jsesc').jsesc; +``` + +In [Rhino](http://www.mozilla.org/rhino/): + +```js +load('jsesc.js'); +``` + +Using an AMD loader like [RequireJS](http://requirejs.org/): + +```js +require( + { + 'paths': { + 'jsesc': 'path/to/jsesc' + } + }, + ['jsesc'], + function(jsesc) { + console.log(jsesc); + } +); +``` + +## API + +### `jsesc(value, options)` + +This function takes a value and returns an escaped version of the value where any characters that are not printable ASCII symbols are escaped using the shortest possible (but valid) [escape sequences for use in JavaScript strings](http://mathiasbynens.be/notes/javascript-escapes). The first supported value type is strings: + +```js +jsesc('Ich ♥ Bücher'); +// → 'Ich \\u2665 B\\xFCcher' + +jsesc('foo 𝌆 bar'); +// → 'foo \\uD834\\uDF06 bar' +``` + +Instead of a string, the `value` can also be an array, or an object. In such cases, `jsesc` will return a stringified version of the value where any characters that are not printable ASCII symbols are escaped in the same way. + +```js +// Escaping an array +jsesc([ + 'Ich ♥ Bücher', 'foo 𝌆 bar' +]); +// → '[\'Ich \\u2665 B\\xFCcher\',\'foo \\uD834\\uDF06 bar\']' + +// Escaping an object +jsesc({ + 'Ich ♥ Bücher': 'foo 𝌆 bar' +}); +// → '{\'Ich \\u2665 B\\xFCcher\':\'foo \\uD834\\uDF06 bar\'}' +``` + +The optional `options` argument accepts an object with the following options: + +#### `quotes` + +The default value for the `quotes` option is `'single'`. This means that any occurences of `'` in the input string will be escaped as `\'`, so that the output can be used in a string literal wrapped in single quotes. + +```js +jsesc('Lorem ipsum "dolor" sit \'amet\' etc.'); +// → 'Lorem ipsum "dolor" sit \\\'amet\\\' etc.' + +jsesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'single' +}); +// → 'Lorem ipsum "dolor" sit \\\'amet\\\' etc.' +// → "Lorem ipsum \"dolor\" sit \\'amet\\' etc." +``` + +If you want to use the output as part of a string literal wrapped in double quotes, set the `quotes` option to `'double'`. + +```js +jsesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'double' +}); +// → 'Lorem ipsum \\"dolor\\" sit \'amet\' etc.' +// → "Lorem ipsum \\\"dolor\\\" sit 'amet' etc." +``` + +This setting also affects the output for arrays and objects: + +```js +jsesc({ 'Ich ♥ Bücher': 'foo 𝌆 bar' }, { + 'quotes': 'double' +}); +// → '{"Ich \\u2665 B\\xFCcher":"foo \\uD834\\uDF06 bar"}' + +jsesc([ 'Ich ♥ Bücher', 'foo 𝌆 bar' ], { + 'quotes': 'double' +}); +// → '["Ich \\u2665 B\\xFCcher","foo \\uD834\\uDF06 bar"]' +``` + +#### `wrap` + +The `wrap` option takes a boolean value (`true` or `false`), and defaults to `false` (disabled). When enabled, the output will be a valid JavaScript string literal wrapped in quotes. The type of quotes can be specified through the `quotes` setting. + +```js +jsesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'single', + 'wrap': true +}); +// → '\'Lorem ipsum "dolor" sit \\\'amet\\\' etc.\'' +// → "\'Lorem ipsum \"dolor\" sit \\\'amet\\\' etc.\'" + +jsesc('Lorem ipsum "dolor" sit \'amet\' etc.', { + 'quotes': 'double', + 'wrap': true +}); +// → '"Lorem ipsum \\"dolor\\" sit \'amet\' etc."' +// → "\"Lorem ipsum \\\"dolor\\\" sit \'amet\' etc.\"" +``` + +#### `es6` + +The `es6` option takes a boolean value (`true` or `false`), and defaults to `false` (disabled). When enabled, any astral Unicode symbols in the input will be escaped using [ECMAScript 6 Unicode code point escape sequences](http://mathiasbynens.be/notes/javascript-escapes#unicode-code-point) instead of using separate escape sequences for each surrogate half. If backwards compatibility with ES5 environments is a concern, don’t enable this setting. If the `json` setting is enabled, the value for the `es6` setting is ignored (as if it was `false`). + +```js +// By default, the `es6` option is disabled: +jsesc('foo 𝌆 bar 💩 baz'); +// → 'foo \\uD834\\uDF06 bar \\uD83D\\uDCA9 baz' + +// To explicitly disable it: +jsesc('foo 𝌆 bar 💩 baz', { + 'es6': false +}); +// → 'foo \\uD834\\uDF06 bar \\uD83D\\uDCA9 baz' + +// To enable it: +jsesc('foo 𝌆 bar 💩 baz', { + 'es6': true +}); +// → 'foo \\u{1D306} bar \\u{1F4A9} baz' +``` + +#### `escapeEverything` + +The `escapeEverything` option takes a boolean value (`true` or `false`), and defaults to `false` (disabled). When enabled, all the symbols in the output will be escaped, even printable ASCII symbols. + +```js +jsesc('lolwat"foo\'bar', { + 'escapeEverything': true +}); +// → '\\x6C\\x6F\\x6C\\x77\\x61\\x74\\"\\x66\\x6F\\x6F\\\'\\x62\\x61\\x72' +// → "\\x6C\\x6F\\x6C\\x77\\x61\\x74\\\"\\x66\\x6F\\x6F\\'\\x62\\x61\\x72" +``` + +This setting also affects the output for arrays and objects: + +```js +jsesc({ 'Ich ♥ Bücher': 'foo 𝌆 bar' }, { + 'escapeEverything': true +}); +// → '{\'\x49\x63\x68\x20\u2665\x20\x42\xFC\x63\x68\x65\x72\':\'\x66\x6F\x6F\x20\uD834\uDF06\x20\x62\x61\x72\'}' +// → "{'\x49\x63\x68\x20\u2665\x20\x42\xFC\x63\x68\x65\x72':'\x66\x6F\x6F\x20\uD834\uDF06\x20\x62\x61\x72'}" + +jsesc([ 'Ich ♥ Bücher': 'foo 𝌆 bar' ], { + 'escapeEverything': true +}); +// → '[\'\x49\x63\x68\x20\u2665\x20\x42\xFC\x63\x68\x65\x72\',\'\x66\x6F\x6F\x20\uD834\uDF06\x20\x62\x61\x72\']' +``` + +#### `compact` + +The `compact` option takes a boolean value (`true` or `false`), and defaults to `true` (enabled). When enabled, the output for arrays and objects will be as compact as possible; it won’t be formatted nicely. + +```js +jsesc({ 'Ich ♥ Bücher': 'foo 𝌆 bar' }, { + 'compact': true // this is the default +}); +// → '{\'Ich \u2665 B\xFCcher\':\'foo \uD834\uDF06 bar\'}' + +jsesc({ 'Ich ♥ Bücher': 'foo 𝌆 bar' }, { + 'compact': false +}); +// → '{\n\t\'Ich \u2665 B\xFCcher\': \'foo \uD834\uDF06 bar\'\n}' + +jsesc([ 'Ich ♥ Bücher', 'foo 𝌆 bar' ], { + 'compact': false +}); +// → '[\n\t\'Ich \u2665 B\xFCcher\',\n\t\'foo \uD834\uDF06 bar\'\n]' +``` + +This setting has no effect on the output for strings. + +#### `indent` + +The `indent` option takes a string value, and defaults to `'\t'`. When the `compact` setting is enabled (`true`), the value of the `indent` option is used to format the output for arrays and objects. + +```js +jsesc({ 'Ich ♥ Bücher': 'foo 𝌆 bar' }, { + 'compact': false, + 'indent': '\t' // this is the default +}); +// → '{\n\t\'Ich \u2665 B\xFCcher\': \'foo \uD834\uDF06 bar\'\n}' + +jsesc({ 'Ich ♥ Bücher': 'foo 𝌆 bar' }, { + 'compact': false, + 'indent': ' ' +}); +// → '{\n \'Ich \u2665 B\xFCcher\': \'foo \uD834\uDF06 bar\'\n}' + +jsesc([ 'Ich ♥ Bücher', 'foo 𝌆 bar' ], { + 'compact': false, + 'indent': ' ' +}); +// → '[\n \'Ich \u2665 B\xFCcher\',\n\ t\'foo \uD834\uDF06 bar\'\n]' +``` + +This setting has no effect on the output for strings. + +#### `json` + +The `json` option takes a boolean value (`true` or `false`), and defaults to `false` (disabled). When enabled, the output is valid JSON. [Hexadecimal character escape sequences](http://mathiasbynens.be/notes/javascript-escapes#hexadecimal) and [the `\v` or `\0` escape sequences](http://mathiasbynens.be/notes/javascript-escapes#single) will not be used. Setting `json: true` implies `quotes: 'double', wrap: true, es6: false`, although these values can still be overridden if needed — but in such cases, the output won’t be valid JSON anymore. + +```js +jsesc('foo\x00bar\xFF\uFFFDbaz', { + 'json': true +}); +// → '"foo\\u0000bar\\u00FF\\uFFFDbaz"' + +jsesc({ 'foo\x00bar\xFF\uFFFDbaz': 'foo\x00bar\xFF\uFFFDbaz' }, { + 'json': true +}); +// → '{"foo\\u0000bar\\u00FF\\uFFFDbaz":"foo\\u0000bar\\u00FF\\uFFFDbaz"}' + +jsesc([ 'foo\x00bar\xFF\uFFFDbaz', 'foo\x00bar\xFF\uFFFDbaz' ], { + 'json': true +}); +// → '["foo\\u0000bar\\u00FF\\uFFFDbaz","foo\\u0000bar\\u00FF\\uFFFDbaz"]' + +// Values that are acceptable in JSON but aren’t strings, arrays, or object +// literals can’t be escaped, so they’ll just be preserved: +jsesc([ 'foo\x00bar', [1, '©', { 'foo': true, 'qux': null }], 42 ], { + 'json': true +}); +// → '["foo\\u0000bar",[1,"\\u00A9",{"foo":true,"qux":null}],42]' +// Values that aren’t allowed in JSON are run through `JSON.stringify()`: +jsesc([ undefined, -Infinity ], { + 'json': true +}); +// → '[null,null]' +``` + +**Note:** Using this option on objects or arrays that contain non-string values relies on `JSON.stringify()`. For legacy environments like IE ≤ 7, use [a `JSON` polyfill](http://bestiejs.github.io/json3/). + +### `jsesc.version` + +A string representing the semantic version number. + +### Using the `jsesc` binary + +To use the `jsesc` binary in your shell, simply install jsesc globally using npm: + +```bash +npm install -g jsesc +``` + +After that you will be able to escape strings from the command line: + +```bash +$ jsesc 'föo ♥ bår 𝌆 baz' +f\xF6o \u2665 b\xE5r \uD834\uDF06 baz +``` + +To escape arrays or objects containing string values, use the `-o`/`--object` option: + +```bash +$ jsesc --object '{ "föo": "♥", "bår": "𝌆 baz" }' +{'f\xF6o':'\u2665','b\xE5r':'\uD834\uDF06 baz'} +``` + +To prettify the output in such cases, use the `-p`/`--pretty` option: + +```bash +$ jsesc --pretty '{ "föo": "♥", "bår": "𝌆 baz" }' +{ + 'f\xF6o': '\u2665', + 'b\xE5r': '\uD834\uDF06 baz' +} +``` + +For valid JSON output, use the `-j`/`--json` option: + +```bash +$ jsesc --json --pretty '{ "föo": "♥", "bår": "𝌆 baz" }' +{ + "f\u00F6o": "\u2665", + "b\u00E5r": "\uD834\uDF06 baz" +} +``` + +Read a local JSON file, escape any non-ASCII symbols, and save the result to a new file: + +```bash +$ jsesc --json --object < data-raw.json > data-escaped.json +``` + +Or do the same with an online JSON file: + +```bash +$ curl -sL "http://git.io/aorKgQ" | jsesc --json --object > data-escaped.json +``` + +See `jsesc --help` for the full list of options. + +## Support + +This library has been tested in at least Chrome 27-29, Firefox 3-22, Safari 4-6, Opera 10-12, IE 6-10, Node.js v0.10.0, Narwhal 0.3.2, RingoJS 0.8-0.9, PhantomJS 1.9.0, and Rhino 1.7RC4. + +**Note:** Using the `json` option on objects or arrays that contain non-string values relies on `JSON.parse()`. For legacy environments like IE ≤ 7, use [a `JSON` polyfill](http://bestiejs.github.io/json3/). + +## Unit tests & code coverage + +After cloning this repository, run `npm install` to install the dependencies needed for development and testing. You may want to install Istanbul _globally_ using `npm install istanbul -g`. + +Once that’s done, you can run the unit tests in Node using `npm test` or `node tests/tests.js`. To run the tests in Rhino, Ringo, Narwhal, and web browsers as well, use `grunt test`. + +To generate the code coverage report, use `grunt cover`. + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](http://mathiasbynens.be/) | + +## License + +This library is available under the [MIT](http://mths.be/mit) license. diff --git a/node_modules/regjsparser/node_modules/jsesc/bin/jsesc b/node_modules/regjsparser/node_modules/jsesc/bin/jsesc new file mode 100755 index 0000000..5900dd4 --- /dev/null +++ b/node_modules/regjsparser/node_modules/jsesc/bin/jsesc @@ -0,0 +1,138 @@ +#!/usr/bin/env node +(function() { + + var fs = require('fs'); + var stringEscape = require('../jsesc.js'); + var strings = process.argv.splice(2); + var stdin = process.stdin; + var data; + var timeout; + var isObject = false; + var options = {}; + var log = console.log; + + var main = function() { + var option = strings[0]; + + if (/^(?:-h|--help|undefined)$/.test(option)) { + log( + 'jsesc v%s - http://mths.be/jsesc', + stringEscape.version + ); + log([ + '\nUsage:\n', + '\tjsesc [string]', + '\tjsesc [-s | --single-quotes] [string]', + '\tjsesc [-d | --double-quotes] [string]', + '\tjsesc [-w | --wrap] [string]', + '\tjsesc [-e | --escape-everything] [string]', + '\tjsesc [-6 | --es6] [string]', + '\tjsesc [-j | --json] [string]', + '\tjsesc [-o | --object] [stringified_object]', // `JSON.parse()` the argument + '\tjsesc [-p | --pretty] [string]', // `compact: false` + '\tjsesc [-v | --version]', + '\tjsesc [-h | --help]', + '\nExamples:\n', + '\tjsesc \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tjsesc --json \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tjsesc --json --escape-everything \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tjsesc --double-quotes --wrap \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\techo \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\' | jsesc' + ].join('\n')); + return process.exit(1); + } + + if (/^(?:-v|--version)$/.test(option)) { + log('v%s', stringEscape.version); + return process.exit(1); + } + + strings.forEach(function(string) { + // Process options + if (/^(?:-s|--single-quotes)$/.test(string)) { + options.quotes = 'single'; + return; + } + if (/^(?:-d|--double-quotes)$/.test(string)) { + options.quotes = 'double'; + return; + } + if (/^(?:-w|--wrap)$/.test(string)) { + options.wrap = true; + return; + } + if (/^(?:-6|--es6)$/.test(string)) { + options.es6 = true; + return; + } + if (/^(?:-e|--escape-everything)$/.test(string)) { + options.escapeEverything = true; + return; + } + if (/^(?:-j|--json)$/.test(string)) { + options.json = true; + return; + } + if (/^(?:-o|--object)$/.test(string)) { + isObject = true; + return; + } + if (/^(?:-p|--pretty)$/.test(string)) { + isObject = true; + options.compact = false; + return; + } + + // Process string(s) + var result; + try { + if (isObject) { + string = JSON.parse(string); + } + result = stringEscape(string, options); + log(result); + } catch(error) { + log(error.message + '\n'); + log('Error: failed to escape.'); + log('If you think this is a bug in jsesc, please report it:'); + log('https://github.com/mathiasbynens/jsesc/issues/new'); + log( + '\nStack trace using jsesc@%s:\n', + stringEscape.version + ); + log(error.stack); + return process.exit(1); + } + }); + // Return with exit status 0 outside of the `forEach` loop, in case + // multiple strings were passed in. + return process.exit(0); + + }; + + if (stdin.isTTY) { + // handle shell arguments + main(); + } else { + // Either the script is called from within a non-TTY context, + // or `stdin` content is being piped in. + if (!process.stdout.isTTY) { // called from a non-TTY context + timeout = setTimeout(function() { + // if no piped data arrived after a while, handle shell arguments + main(); + }, 250); + } + + data = ''; + stdin.on('data', function(chunk) { + clearTimeout(timeout); + data += chunk; + }); + stdin.on('end', function() { + strings.push(data.trim()); + main(); + }); + stdin.resume(); + } + +}()); diff --git a/node_modules/regjsparser/node_modules/jsesc/jsesc.js b/node_modules/regjsparser/node_modules/jsesc/jsesc.js new file mode 100644 index 0000000..03bbd68 --- /dev/null +++ b/node_modules/regjsparser/node_modules/jsesc/jsesc.js @@ -0,0 +1,265 @@ +/*! http://mths.be/jsesc v0.5.0 by @mathias */ +;(function(root) { + + // Detect free variables `exports` + var freeExports = typeof exports == 'object' && exports; + + // Detect free variable `module` + var freeModule = typeof module == 'object' && module && + module.exports == freeExports && module; + + // Detect free variable `global`, from Node.js or Browserified code, + // and use it as `root` + var freeGlobal = typeof global == 'object' && global; + if (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal) { + root = freeGlobal; + } + + /*--------------------------------------------------------------------------*/ + + var object = {}; + var hasOwnProperty = object.hasOwnProperty; + var forOwn = function(object, callback) { + var key; + for (key in object) { + if (hasOwnProperty.call(object, key)) { + callback(key, object[key]); + } + } + }; + + var extend = function(destination, source) { + if (!source) { + return destination; + } + forOwn(source, function(key, value) { + destination[key] = value; + }); + return destination; + }; + + var forEach = function(array, callback) { + var length = array.length; + var index = -1; + while (++index < length) { + callback(array[index]); + } + }; + + var toString = object.toString; + var isArray = function(value) { + return toString.call(value) == '[object Array]'; + }; + var isObject = function(value) { + // This is a very simple check, but it’s good enough for what we need. + return toString.call(value) == '[object Object]'; + }; + var isString = function(value) { + return typeof value == 'string' || + toString.call(value) == '[object String]'; + }; + var isFunction = function(value) { + // In a perfect world, the `typeof` check would be sufficient. However, + // in Chrome 1–12, `typeof /x/ == 'object'`, and in IE 6–8 + // `typeof alert == 'object'` and similar for other host objects. + return typeof value == 'function' || + toString.call(value) == '[object Function]'; + }; + + /*--------------------------------------------------------------------------*/ + + // http://mathiasbynens.be/notes/javascript-escapes#single + var singleEscapes = { + '"': '\\"', + '\'': '\\\'', + '\\': '\\\\', + '\b': '\\b', + '\f': '\\f', + '\n': '\\n', + '\r': '\\r', + '\t': '\\t' + // `\v` is omitted intentionally, because in IE < 9, '\v' == 'v'. + // '\v': '\\x0B' + }; + var regexSingleEscape = /["'\\\b\f\n\r\t]/; + + var regexDigit = /[0-9]/; + var regexWhitelist = /[ !#-&\(-\[\]-~]/; + + var jsesc = function(argument, options) { + // Handle options + var defaults = { + 'escapeEverything': false, + 'quotes': 'single', + 'wrap': false, + 'es6': false, + 'json': false, + 'compact': true, + 'indent': '\t', + '__indent__': '' + }; + var json = options && options.json; + if (json) { + defaults.quotes = 'double'; + defaults.wrap = true; + } + options = extend(defaults, options); + if (options.quotes != 'single' && options.quotes != 'double') { + options.quotes = 'single'; + } + var quote = options.quotes == 'double' ? '"' : '\''; + var compact = options.compact; + var indent = options.indent; + var oldIndent; + var newLine = compact ? '' : '\n'; + var result; + var isEmpty = true; + + if (json && argument && isFunction(argument.toJSON)) { + argument = argument.toJSON(); + } + + if (!isString(argument)) { + if (isArray(argument)) { + result = []; + options.wrap = true; + oldIndent = options.__indent__; + indent += oldIndent; + options.__indent__ = indent; + forEach(argument, function(value) { + isEmpty = false; + result.push( + (compact ? '' : indent) + + jsesc(value, options) + ); + }); + if (isEmpty) { + return '[]'; + } + return '[' + newLine + result.join(',' + newLine) + newLine + + (compact ? '' : oldIndent) + ']'; + } else if (!isObject(argument)) { + if (json) { + // For some values (e.g. `undefined`, `function` objects), + // `JSON.stringify(value)` returns `undefined` (which isn’t valid + // JSON) instead of `'null'`. + return JSON.stringify(argument) || 'null'; + } + return String(argument); + } else { // it’s an object + result = []; + options.wrap = true; + oldIndent = options.__indent__; + indent += oldIndent; + options.__indent__ = indent; + forOwn(argument, function(key, value) { + isEmpty = false; + result.push( + (compact ? '' : indent) + + jsesc(key, options) + ':' + + (compact ? '' : ' ') + + jsesc(value, options) + ); + }); + if (isEmpty) { + return '{}'; + } + return '{' + newLine + result.join(',' + newLine) + newLine + + (compact ? '' : oldIndent) + '}'; + } + } + + var string = argument; + // Loop over each code unit in the string and escape it + var index = -1; + var length = string.length; + var first; + var second; + var codePoint; + result = ''; + while (++index < length) { + var character = string.charAt(index); + if (options.es6) { + first = string.charCodeAt(index); + if ( // check if it’s the start of a surrogate pair + first >= 0xD800 && first <= 0xDBFF && // high surrogate + length > index + 1 // there is a next code unit + ) { + second = string.charCodeAt(index + 1); + if (second >= 0xDC00 && second <= 0xDFFF) { // low surrogate + // http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + codePoint = (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; + result += '\\u{' + codePoint.toString(16).toUpperCase() + '}'; + index++; + continue; + } + } + } + if (!options.escapeEverything) { + if (regexWhitelist.test(character)) { + // It’s a printable ASCII character that is not `"`, `'` or `\`, + // so don’t escape it. + result += character; + continue; + } + if (character == '"') { + result += quote == character ? '\\"' : character; + continue; + } + if (character == '\'') { + result += quote == character ? '\\\'' : character; + continue; + } + } + if ( + character == '\0' && + !json && + !regexDigit.test(string.charAt(index + 1)) + ) { + result += '\\0'; + continue; + } + if (regexSingleEscape.test(character)) { + // no need for a `hasOwnProperty` check here + result += singleEscapes[character]; + continue; + } + var charCode = character.charCodeAt(0); + var hexadecimal = charCode.toString(16).toUpperCase(); + var longhand = hexadecimal.length > 2 || json; + var escaped = '\\' + (longhand ? 'u' : 'x') + + ('0000' + hexadecimal).slice(longhand ? -4 : -2); + result += escaped; + continue; + } + if (options.wrap) { + result = quote + result + quote; + } + return result; + }; + + jsesc.version = '0.5.0'; + + /*--------------------------------------------------------------------------*/ + + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define(function() { + return jsesc; + }); + } else if (freeExports && !freeExports.nodeType) { + if (freeModule) { // in Node.js or RingoJS v0.8.0+ + freeModule.exports = jsesc; + } else { // in Narwhal or RingoJS v0.7.0- + freeExports.jsesc = jsesc; + } + } else { // in Rhino or a web browser + root.jsesc = jsesc; + } + +}(this)); diff --git a/node_modules/regjsparser/node_modules/jsesc/man/jsesc.1 b/node_modules/regjsparser/node_modules/jsesc/man/jsesc.1 new file mode 100644 index 0000000..5257768 --- /dev/null +++ b/node_modules/regjsparser/node_modules/jsesc/man/jsesc.1 @@ -0,0 +1,90 @@ +.Dd October 25, 2013 +.Dt jsesc 1 +.Sh NAME +.Nm jsesc +.Nd escape strings for use in JavaScript string literals +.Sh SYNOPSIS +.Nm +.Op Fl s | -single-quotes Ar string +.br +.Op Fl d | -double-quotes Ar string +.br +.Op Fl w | -wrap Ar string +.br +.Op Fl 6 | -es6 Ar string +.br +.Op Fl e | -escape-everything Ar string +.br +.Op Fl j | -json Ar string +.br +.Op Fl p | -object Ar string +.br +.Op Fl p | -pretty Ar string +.br +.Op Fl v | -version +.br +.Op Fl h | -help +.Sh DESCRIPTION +.Nm +escapes strings for use in JavaScript string literals while generating the shortest possible valid ASCII-only output. +.Sh OPTIONS +.Bl -ohang -offset +.It Sy "-s, --single-quotes" +Escape any occurences of ' in the input string as \\', so that the output can be used in a JavaScript string literal wrapped in single quotes. +.It Sy "-d, --double-quotes" +Escape any occurences of " in the input string as \\", so that the output can be used in a JavaScript string literal wrapped in double quotes. +.It Sy "-w, --wrap" +Make sure the output is a valid JavaScript string literal wrapped in quotes. The type of quotes can be specified using the +.Ar -s | --single-quotes +or +.Ar -d | --double-quotes +settings. +.It Sy "-6, --es6" +Escape any astral Unicode symbols using ECMAScript 6 Unicode code point escape sequences. +.It Sy "-e, --escape-everything" +Escape all the symbols in the output, even printable ASCII symbols. +.It Sy "-j, --json" +Make sure the output is valid JSON. Hexadecimal character escape sequences and the \\v or \\0 escape sequences will not be used. Setting this flag enables the +.Ar -d | --double-quotes +and +.Ar -w | --wrap +settings. +.It Sy "-o, --object" +Treat the input as a JavaScript object rather than a string. Accepted values are flat arrays containing only string values, and flat objects containing only string values. +.It Sy "-p, --pretty" +Pretty-print the output for objects, using whitespace to make it more readable. Setting this flag enables the +.Ar -o | --object +setting. +.It Sy "-v, --version" +Print jsesc's version. +.It Sy "-h, --help" +Show the help screen. +.El +.Sh EXIT STATUS +The +.Nm jsesc +utility exits with one of the following values: +.Pp +.Bl -tag -width flag -compact +.It Li 0 +.Nm +successfully escaped the given string and printed the result. +.It Li 1 +.Nm +wasn't instructed to escape anything (for example, the +.Ar --help +flag was set); or, an error occurred. +.El +.Sh EXAMPLES +.Bl -ohang -offset +.It Sy "jsesc 'foo bar baz'" +Print an escaped version of the given string. +.It Sy echo\ 'foo bar baz'\ |\ jsesc +Print an escaped version of the string that gets piped in. +.El +.Sh BUGS +jsesc's bug tracker is located at . +.Sh AUTHOR +Mathias Bynens +.Sh WWW + diff --git a/node_modules/regjsparser/node_modules/jsesc/package.json b/node_modules/regjsparser/node_modules/jsesc/package.json new file mode 100644 index 0000000..1216eef --- /dev/null +++ b/node_modules/regjsparser/node_modules/jsesc/package.json @@ -0,0 +1,55 @@ +{ + "name": "jsesc", + "version": "0.5.0", + "description": "A JavaScript library for escaping JavaScript strings while generating the shortest possible valid output.", + "homepage": "http://mths.be/jsesc", + "main": "jsesc.js", + "bin": "bin/jsesc", + "man": "man/jsesc.1", + "keywords": [ + "string", + "escape", + "javascript", + "tool" + ], + "licenses": [ + { + "type": "MIT", + "url": "http://mths.be/mit" + } + ], + "author": { + "name": "Mathias Bynens", + "url": "http://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/jsesc.git" + }, + "bugs": { + "url": "https://github.com/mathiasbynens/jsesc/issues" + }, + "files": [ + "LICENSE-MIT.txt", + "jsesc.js", + "bin/", + "man/" + ], + "directories": { + "test": "tests" + }, + "scripts": { + "test": "node tests/tests.js" + }, + "devDependencies": { + "coveralls": "^2.10.0", + "grunt": "^0.4.5", + "grunt-shell": "^0.7.0", + "grunt-template": "^0.2.3", + "istanbul": "^0.3.0", + "qunit-extras": "^1.2.0", + "qunitjs": "~1.11.0", + "regenerate": "^0.6.2", + "requirejs": "^2.1.14" + } +} diff --git a/node_modules/regjsparser/package.json b/node_modules/regjsparser/package.json new file mode 100644 index 0000000..5c5e65f --- /dev/null +++ b/node_modules/regjsparser/package.json @@ -0,0 +1,29 @@ +{ + "name": "regjsparser", + "version": "0.1.5", + "author": "'Julian Viereck' ", + "license": "BSD", + "main": "./parser", + "bin": "bin/parser", + "homepage": "https://github.com/jviereck/regjsparser", + "repository": { + "type": "git", + "url": "git@github.com:jviereck/regjsparser.git" + }, + "scripts": { + "test": "node test/index.js" + }, + "files": [ + "bin/", + "LICENSE.BSD", + "parser.js", + "README.md" + ], + "dependencies": { + "jsesc": "~0.5.0" + }, + "devDependencies": { + "regenerate": "~1.0.1", + "unicode-7.0.0": "~0.1.5" + } +} diff --git a/node_modules/regjsparser/parser.js b/node_modules/regjsparser/parser.js new file mode 100644 index 0000000..ae414a3 --- /dev/null +++ b/node_modules/regjsparser/parser.js @@ -0,0 +1,962 @@ +// regjsparser +// +// ================================================================== +// +// See ECMA-262 Standard: 15.10.1 +// +// NOTE: The ECMA-262 standard uses the term "Assertion" for /^/. Here the +// term "Anchor" is used. +// +// Pattern :: +// Disjunction +// +// Disjunction :: +// Alternative +// Alternative | Disjunction +// +// Alternative :: +// [empty] +// Alternative Term +// +// Term :: +// Anchor +// Atom +// Atom Quantifier +// +// Anchor :: +// ^ +// $ +// \ b +// \ B +// ( ? = Disjunction ) +// ( ? ! Disjunction ) +// +// Quantifier :: +// QuantifierPrefix +// QuantifierPrefix ? +// +// QuantifierPrefix :: +// * +// + +// ? +// { DecimalDigits } +// { DecimalDigits , } +// { DecimalDigits , DecimalDigits } +// +// Atom :: +// PatternCharacter +// . +// \ AtomEscape +// CharacterClass +// ( Disjunction ) +// ( ? : Disjunction ) +// +// PatternCharacter :: +// SourceCharacter but not any of: ^ $ \ . * + ? ( ) [ ] { } | +// +// AtomEscape :: +// DecimalEscape +// CharacterEscape +// CharacterClassEscape +// +// CharacterEscape[U] :: +// ControlEscape +// c ControlLetter +// HexEscapeSequence +// RegExpUnicodeEscapeSequence[?U] (ES6) +// IdentityEscape[?U] +// +// ControlEscape :: +// one of f n r t v +// ControlLetter :: +// one of +// a b c d e f g h i j k l m n o p q r s t u v w x y z +// A B C D E F G H I J K L M N O P Q R S T U V W X Y Z +// +// IdentityEscape :: +// SourceCharacter but not IdentifierPart +// +// +// +// DecimalEscape :: +// DecimalIntegerLiteral [lookahead ∉ DecimalDigit] +// +// CharacterClassEscape :: +// one of d D s S w W +// +// CharacterClass :: +// [ [lookahead ∉ {^}] ClassRanges ] +// [ ^ ClassRanges ] +// +// ClassRanges :: +// [empty] +// NonemptyClassRanges +// +// NonemptyClassRanges :: +// ClassAtom +// ClassAtom NonemptyClassRangesNoDash +// ClassAtom - ClassAtom ClassRanges +// +// NonemptyClassRangesNoDash :: +// ClassAtom +// ClassAtomNoDash NonemptyClassRangesNoDash +// ClassAtomNoDash - ClassAtom ClassRanges +// +// ClassAtom :: +// - +// ClassAtomNoDash +// +// ClassAtomNoDash :: +// SourceCharacter but not one of \ or ] or - +// \ ClassEscape +// +// ClassEscape :: +// DecimalEscape +// b +// CharacterEscape +// CharacterClassEscape + +(function() { + + function parse(str, flags) { + function addRaw(node) { + node.raw = str.substring(node.range[0], node.range[1]); + return node; + } + + function updateRawStart(node, start) { + node.range[0] = start; + return addRaw(node); + } + + function createAnchor(kind, rawLength) { + return addRaw({ + type: 'anchor', + kind: kind, + range: [ + pos - rawLength, + pos + ] + }); + } + + function createValue(kind, codePoint, from, to) { + return addRaw({ + type: 'value', + kind: kind, + codePoint: codePoint, + range: [from, to] + }); + } + + function createEscaped(kind, codePoint, value, fromOffset) { + fromOffset = fromOffset || 0; + return createValue(kind, codePoint, pos - (value.length + fromOffset), pos); + } + + function createCharacter(matches) { + var _char = matches[0]; + var first = _char.charCodeAt(0); + if (hasUnicodeFlag) { + var second; + if (_char.length === 1 && first >= 0xD800 && first <= 0xDBFF) { + second = lookahead().charCodeAt(0); + if (second >= 0xDC00 && second <= 0xDFFF) { + // Unicode surrogate pair + pos++; + return createValue( + 'symbol', + (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000, + pos - 2, pos); + } + } + } + return createValue('symbol', first, pos - 1, pos); + } + + function createDisjunction(alternatives, from, to) { + return addRaw({ + type: 'disjunction', + body: alternatives, + range: [ + from, + to + ] + }); + } + + function createDot() { + return addRaw({ + type: 'dot', + range: [ + pos - 1, + pos + ] + }); + } + + function createCharacterClassEscape(value) { + return addRaw({ + type: 'characterClassEscape', + value: value, + range: [ + pos - 2, + pos + ] + }); + } + + function createReference(matchIndex) { + return addRaw({ + type: 'reference', + matchIndex: parseInt(matchIndex, 10), + range: [ + pos - 1 - matchIndex.length, + pos + ] + }); + } + + function createGroup(behavior, disjunction, from, to) { + return addRaw({ + type: 'group', + behavior: behavior, + body: disjunction, + range: [ + from, + to + ] + }); + } + + function createQuantifier(min, max, from, to) { + if (to == null) { + from = pos - 1; + to = pos; + } + + return addRaw({ + type: 'quantifier', + min: min, + max: max, + greedy: true, + body: null, // set later on + range: [ + from, + to + ] + }); + } + + function createAlternative(terms, from, to) { + return addRaw({ + type: 'alternative', + body: terms, + range: [ + from, + to + ] + }); + } + + function createCharacterClass(classRanges, negative, from, to) { + return addRaw({ + type: 'characterClass', + body: classRanges, + negative: negative, + range: [ + from, + to + ] + }); + } + + function createClassRange(min, max, from, to) { + // See 15.10.2.15: + if (min.codePoint > max.codePoint) { + bail('invalid range in character class', min.raw + '-' + max.raw, from, to); + } + + return addRaw({ + type: 'characterClassRange', + min: min, + max: max, + range: [ + from, + to + ] + }); + } + + function flattenBody(body) { + if (body.type === 'alternative') { + return body.body; + } else { + return [body]; + } + } + + function isEmpty(obj) { + return obj.type === 'empty'; + } + + function incr(amount) { + amount = (amount || 1); + var res = str.substring(pos, pos + amount); + pos += (amount || 1); + return res; + } + + function skip(value) { + if (!match(value)) { + bail('character', value); + } + } + + function match(value) { + if (str.indexOf(value, pos) === pos) { + return incr(value.length); + } + } + + function lookahead() { + return str[pos]; + } + + function current(value) { + return str.indexOf(value, pos) === pos; + } + + function next(value) { + return str[pos + 1] === value; + } + + function matchReg(regExp) { + var subStr = str.substring(pos); + var res = subStr.match(regExp); + if (res) { + res.range = []; + res.range[0] = pos; + incr(res[0].length); + res.range[1] = pos; + } + return res; + } + + function parseDisjunction() { + // Disjunction :: + // Alternative + // Alternative | Disjunction + var res = [], from = pos; + res.push(parseAlternative()); + + while (match('|')) { + res.push(parseAlternative()); + } + + if (res.length === 1) { + return res[0]; + } + + return createDisjunction(res, from, pos); + } + + function parseAlternative() { + var res = [], from = pos; + var term; + + // Alternative :: + // [empty] + // Alternative Term + while (term = parseTerm()) { + res.push(term); + } + + if (res.length === 1) { + return res[0]; + } + + return createAlternative(res, from, pos); + } + + function parseTerm() { + // Term :: + // Anchor + // Atom + // Atom Quantifier + + if (pos >= str.length || current('|') || current(')')) { + return null; /* Means: The term is empty */ + } + + var anchor = parseAnchor(); + + if (anchor) { + return anchor; + } + + var atom = parseAtom(); + if (!atom) { + bail('Expected atom'); + } + var quantifier = parseQuantifier() || false; + if (quantifier) { + quantifier.body = flattenBody(atom); + // The quantifier contains the atom. Therefore, the beginning of the + // quantifier range is given by the beginning of the atom. + updateRawStart(quantifier, atom.range[0]); + return quantifier; + } + return atom; + } + + function parseGroup(matchA, typeA, matchB, typeB) { + var type = null, from = pos; + + if (match(matchA)) { + type = typeA; + } else if (match(matchB)) { + type = typeB; + } else { + return false; + } + + var body = parseDisjunction(); + if (!body) { + bail('Expected disjunction'); + } + skip(')'); + var group = createGroup(type, flattenBody(body), from, pos); + + if (type == 'normal') { + // Keep track of the number of closed groups. This is required for + // parseDecimalEscape(). In case the string is parsed a second time the + // value already holds the total count and no incrementation is required. + if (firstIteration) { + closedCaptureCounter++; + } + } + return group; + } + + function parseAnchor() { + // Anchor :: + // ^ + // $ + // \ b + // \ B + // ( ? = Disjunction ) + // ( ? ! Disjunction ) + var res, from = pos; + + if (match('^')) { + return createAnchor('start', 1 /* rawLength */); + } else if (match('$')) { + return createAnchor('end', 1 /* rawLength */); + } else if (match('\\b')) { + return createAnchor('boundary', 2 /* rawLength */); + } else if (match('\\B')) { + return createAnchor('not-boundary', 2 /* rawLength */); + } else { + return parseGroup('(?=', 'lookahead', '(?!', 'negativeLookahead'); + } + } + + function parseQuantifier() { + // Quantifier :: + // QuantifierPrefix + // QuantifierPrefix ? + // + // QuantifierPrefix :: + // * + // + + // ? + // { DecimalDigits } + // { DecimalDigits , } + // { DecimalDigits , DecimalDigits } + + var res, from = pos; + var quantifier; + var min, max; + + if (match('*')) { + quantifier = createQuantifier(0); + } + else if (match('+')) { + quantifier = createQuantifier(1); + } + else if (match('?')) { + quantifier = createQuantifier(0, 1); + } + else if (res = matchReg(/^\{([0-9]+)\}/)) { + min = parseInt(res[1], 10); + quantifier = createQuantifier(min, min, res.range[0], res.range[1]); + } + else if (res = matchReg(/^\{([0-9]+),\}/)) { + min = parseInt(res[1], 10); + quantifier = createQuantifier(min, undefined, res.range[0], res.range[1]); + } + else if (res = matchReg(/^\{([0-9]+),([0-9]+)\}/)) { + min = parseInt(res[1], 10); + max = parseInt(res[2], 10); + if (min > max) { + bail('numbers out of order in {} quantifier', '', from, pos); + } + quantifier = createQuantifier(min, max, res.range[0], res.range[1]); + } + + if (quantifier) { + if (match('?')) { + quantifier.greedy = false; + quantifier.range[1] += 1; + } + } + + return quantifier; + } + + function parseAtom() { + // Atom :: + // PatternCharacter + // . + // \ AtomEscape + // CharacterClass + // ( Disjunction ) + // ( ? : Disjunction ) + + var res; + + // jviereck: allow ']', '}' here as well to be compatible with browser's + // implementations: ']'.match(/]/); + // if (res = matchReg(/^[^^$\\.*+?()[\]{}|]/)) { + if (res = matchReg(/^[^^$\\.*+?(){[|]/)) { + // PatternCharacter + return createCharacter(res); + } + else if (match('.')) { + // . + return createDot(); + } + else if (match('\\')) { + // \ AtomEscape + res = parseAtomEscape(); + if (!res) { + bail('atomEscape'); + } + return res; + } + else if (res = parseCharacterClass()) { + return res; + } + else { + // ( Disjunction ) + // ( ? : Disjunction ) + return parseGroup('(?:', 'ignore', '(', 'normal'); + } + } + + function parseUnicodeSurrogatePairEscape(firstEscape) { + if (hasUnicodeFlag) { + var first, second; + if (firstEscape.kind == 'unicodeEscape' && + (first = firstEscape.codePoint) >= 0xD800 && first <= 0xDBFF && + current('\\') && next('u') ) { + var prevPos = pos; + pos++; + var secondEscape = parseClassEscape(); + if (secondEscape.kind == 'unicodeEscape' && + (second = secondEscape.codePoint) >= 0xDC00 && second <= 0xDFFF) { + // Unicode surrogate pair + firstEscape.range[1] = secondEscape.range[1]; + firstEscape.codePoint = (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; + firstEscape.type = 'value'; + firstEscape.kind = 'unicodeCodePointEscape'; + addRaw(firstEscape); + } + else { + pos = prevPos; + } + } + } + return firstEscape; + } + + function parseClassEscape() { + return parseAtomEscape(true); + } + + function parseAtomEscape(insideCharacterClass) { + // AtomEscape :: + // DecimalEscape + // CharacterEscape + // CharacterClassEscape + + var res, from = pos; + + res = parseDecimalEscape(); + if (res) { + return res; + } + + // For ClassEscape + if (insideCharacterClass) { + if (match('b')) { + // 15.10.2.19 + // The production ClassEscape :: b evaluates by returning the + // CharSet containing the one character (Unicode value 0008). + return createEscaped('singleEscape', 0x0008, '\\b'); + } else if (match('B')) { + bail('\\B not possible inside of CharacterClass', '', from); + } + } + + res = parseCharacterEscape(); + + return res; + } + + + function parseDecimalEscape() { + // DecimalEscape :: + // DecimalIntegerLiteral [lookahead ∉ DecimalDigit] + // CharacterClassEscape :: one of d D s S w W + + var res, match; + + if (res = matchReg(/^(?!0)\d+/)) { + match = res[0]; + var refIdx = parseInt(res[0], 10); + if (refIdx <= closedCaptureCounter) { + // If the number is smaller than the normal-groups found so + // far, then it is a reference... + return createReference(res[0]); + } else { + // ... otherwise it needs to be interpreted as a octal (if the + // number is in an octal format). If it is NOT octal format, + // then the slash is ignored and the number is matched later + // as normal characters. + + // Recall the negative decision to decide if the input must be parsed + // a second time with the total normal-groups. + backrefDenied.push(refIdx); + + // Reset the position again, as maybe only parts of the previous + // matched numbers are actual octal numbers. E.g. in '019' only + // the '01' should be matched. + incr(-res[0].length); + if (res = matchReg(/^[0-7]{1,3}/)) { + return createEscaped('octal', parseInt(res[0], 8), res[0], 1); + } else { + // If we end up here, we have a case like /\91/. Then the + // first slash is to be ignored and the 9 & 1 to be treated + // like ordinary characters. Create a character for the + // first number only here - other number-characters + // (if available) will be matched later. + res = createCharacter(matchReg(/^[89]/)); + return updateRawStart(res, res.range[0] - 1); + } + } + } + // Only allow octal numbers in the following. All matched numbers start + // with a zero (if the do not, the previous if-branch is executed). + // If the number is not octal format and starts with zero (e.g. `091`) + // then only the zeros `0` is treated here and the `91` are ordinary + // characters. + // Example: + // /\091/.exec('\091')[0].length === 3 + else if (res = matchReg(/^[0-7]{1,3}/)) { + match = res[0]; + if (/^0{1,3}$/.test(match)) { + // If they are all zeros, then only take the first one. + return createEscaped('null', 0x0000, '0', match.length + 1); + } else { + return createEscaped('octal', parseInt(match, 8), match, 1); + } + } else if (res = matchReg(/^[dDsSwW]/)) { + return createCharacterClassEscape(res[0]); + } + return false; + } + + function parseCharacterEscape() { + // CharacterEscape :: + // ControlEscape + // c ControlLetter + // HexEscapeSequence + // UnicodeEscapeSequence + // IdentityEscape + + var res; + if (res = matchReg(/^[fnrtv]/)) { + // ControlEscape + var codePoint = 0; + switch (res[0]) { + case 't': codePoint = 0x009; break; + case 'n': codePoint = 0x00A; break; + case 'v': codePoint = 0x00B; break; + case 'f': codePoint = 0x00C; break; + case 'r': codePoint = 0x00D; break; + } + return createEscaped('singleEscape', codePoint, '\\' + res[0]); + } else if (res = matchReg(/^c([a-zA-Z])/)) { + // c ControlLetter + return createEscaped('controlLetter', res[1].charCodeAt(0) % 32, res[1], 2); + } else if (res = matchReg(/^x([0-9a-fA-F]{2})/)) { + // HexEscapeSequence + return createEscaped('hexadecimalEscape', parseInt(res[1], 16), res[1], 2); + } else if (res = matchReg(/^u([0-9a-fA-F]{4})/)) { + // UnicodeEscapeSequence + return parseUnicodeSurrogatePairEscape( + createEscaped('unicodeEscape', parseInt(res[1], 16), res[1], 2) + ); + } else if (hasUnicodeFlag && (res = matchReg(/^u\{([0-9a-fA-F]+)\}/))) { + // RegExpUnicodeEscapeSequence (ES6 Unicode code point escape) + return createEscaped('unicodeCodePointEscape', parseInt(res[1], 16), res[1], 4); + } else { + // IdentityEscape + return parseIdentityEscape(); + } + } + + // Taken from the Esprima parser. + function isIdentifierPart(ch) { + // Generated by `tools/generate-identifier-regex.js`. + var NonAsciiIdentifierPart = new RegExp('[\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0-\u08B2\u08E4-\u0963\u0966-\u096F\u0971-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C00-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58\u0C59\u0C60-\u0C63\u0C66-\u0C6F\u0C81-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D01-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D60-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DE6-\u0DEF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1380-\u138F\u13A0-\u13F4\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191E\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19D9\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1AB0-\u1ABD\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1CF8\u1CF9\u1D00-\u1DF5\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u2E2F\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099\u309A\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA69D\uA69F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA78E\uA790-\uA7AD\uA7B0\uA7B1\uA7F7-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uA9E0-\uA9FE\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB5F\uAB64\uAB65\uABC0-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE2D\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]'); + + return (ch === 36) || (ch === 95) || // $ (dollar) and _ (underscore) + (ch >= 65 && ch <= 90) || // A..Z + (ch >= 97 && ch <= 122) || // a..z + (ch >= 48 && ch <= 57) || // 0..9 + (ch === 92) || // \ (backslash) + ((ch >= 0x80) && NonAsciiIdentifierPart.test(String.fromCharCode(ch))); + } + + function parseIdentityEscape() { + // IdentityEscape :: + // SourceCharacter but not IdentifierPart + // + // + + var ZWJ = '\u200C'; + var ZWNJ = '\u200D'; + + var tmp; + + if (!isIdentifierPart(lookahead())) { + tmp = incr(); + return createEscaped('identifier', tmp.charCodeAt(0), tmp, 1); + } + + if (match(ZWJ)) { + // + return createEscaped('identifier', 0x200C, ZWJ); + } else if (match(ZWNJ)) { + // + return createEscaped('identifier', 0x200D, ZWNJ); + } + + return null; + } + + function parseCharacterClass() { + // CharacterClass :: + // [ [lookahead ∉ {^}] ClassRanges ] + // [ ^ ClassRanges ] + + var res, from = pos; + if (res = matchReg(/^\[\^/)) { + res = parseClassRanges(); + skip(']'); + return createCharacterClass(res, true, from, pos); + } else if (match('[')) { + res = parseClassRanges(); + skip(']'); + return createCharacterClass(res, false, from, pos); + } + + return null; + } + + function parseClassRanges() { + // ClassRanges :: + // [empty] + // NonemptyClassRanges + + var res; + if (current(']')) { + // Empty array means nothing insinde of the ClassRange. + return []; + } else { + res = parseNonemptyClassRanges(); + if (!res) { + bail('nonEmptyClassRanges'); + } + return res; + } + } + + function parseHelperClassRanges(atom) { + var from, to, res; + if (current('-') && !next(']')) { + // ClassAtom - ClassAtom ClassRanges + skip('-'); + + res = parseClassAtom(); + if (!res) { + bail('classAtom'); + } + to = pos; + var classRanges = parseClassRanges(); + if (!classRanges) { + bail('classRanges'); + } + from = atom.range[0]; + if (classRanges.type === 'empty') { + return [createClassRange(atom, res, from, to)]; + } + return [createClassRange(atom, res, from, to)].concat(classRanges); + } + + res = parseNonemptyClassRangesNoDash(); + if (!res) { + bail('nonEmptyClassRangesNoDash'); + } + + return [atom].concat(res); + } + + function parseNonemptyClassRanges() { + // NonemptyClassRanges :: + // ClassAtom + // ClassAtom NonemptyClassRangesNoDash + // ClassAtom - ClassAtom ClassRanges + + var atom = parseClassAtom(); + if (!atom) { + bail('classAtom'); + } + + if (current(']')) { + // ClassAtom + return [atom]; + } + + // ClassAtom NonemptyClassRangesNoDash + // ClassAtom - ClassAtom ClassRanges + return parseHelperClassRanges(atom); + } + + function parseNonemptyClassRangesNoDash() { + // NonemptyClassRangesNoDash :: + // ClassAtom + // ClassAtomNoDash NonemptyClassRangesNoDash + // ClassAtomNoDash - ClassAtom ClassRanges + + var res = parseClassAtom(); + if (!res) { + bail('classAtom'); + } + if (current(']')) { + // ClassAtom + return res; + } + + // ClassAtomNoDash NonemptyClassRangesNoDash + // ClassAtomNoDash - ClassAtom ClassRanges + return parseHelperClassRanges(res); + } + + function parseClassAtom() { + // ClassAtom :: + // - + // ClassAtomNoDash + if (match('-')) { + return createCharacter('-'); + } else { + return parseClassAtomNoDash(); + } + } + + function parseClassAtomNoDash() { + // ClassAtomNoDash :: + // SourceCharacter but not one of \ or ] or - + // \ ClassEscape + + var res; + if (res = matchReg(/^[^\\\]-]/)) { + return createCharacter(res[0]); + } else if (match('\\')) { + res = parseClassEscape(); + if (!res) { + bail('classEscape'); + } + + return parseUnicodeSurrogatePairEscape(res); + } + } + + function bail(message, details, from, to) { + from = from == null ? pos : from; + to = to == null ? from : to; + + var contextStart = Math.max(0, from - 10); + var contextEnd = Math.min(to + 10, str.length); + + // Output a bit of context and a line pointing to where our error is. + // + // We are assuming that there are no actual newlines in the content as this is a regular expression. + var context = ' ' + str.substring(contextStart, contextEnd); + var pointer = ' ' + new Array(from - contextStart + 1).join(' ') + '^'; + + throw SyntaxError(message + ' at position ' + from + (details ? ': ' + details : '') + '\n' + context + '\n' + pointer); + } + + var backrefDenied = []; + var closedCaptureCounter = 0; + var firstIteration = true; + var hasUnicodeFlag = (flags || "").indexOf("u") !== -1; + var pos = 0; + + // Convert the input to a string and treat the empty string special. + str = String(str); + if (str === '') { + str = '(?:)'; + } + + var result = parseDisjunction(); + + if (result.range[1] !== str.length) { + bail('Could not parse entire input - got stuck', '', result.range[1]); + } + + // The spec requires to interpret the `\2` in `/\2()()/` as backreference. + // As the parser collects the number of capture groups as the string is + // parsed it is impossible to make these decisions at the point when the + // `\2` is handled. In case the local decision turns out to be wrong after + // the parsing has finished, the input string is parsed a second time with + // the total number of capture groups set. + // + // SEE: https://github.com/jviereck/regjsparser/issues/70 + for (var i = 0; i < backrefDenied.length; i++) { + if (backrefDenied[i] <= closedCaptureCounter) { + // Parse the input a second time. + pos = 0; + firstIteration = false; + return parseDisjunction(); + } + } + + return result; + } + + var regjsparser = { + parse: parse + }; + + if (typeof module !== 'undefined' && module.exports) { + module.exports = regjsparser; + } else { + window.regjsparser = regjsparser; + } + +}()); diff --git a/node_modules/repeat-element/LICENSE b/node_modules/repeat-element/LICENSE new file mode 100644 index 0000000..33754da --- /dev/null +++ b/node_modules/repeat-element/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/repeat-element/README.md b/node_modules/repeat-element/README.md new file mode 100644 index 0000000..008e20e --- /dev/null +++ b/node_modules/repeat-element/README.md @@ -0,0 +1,71 @@ +# repeat-element [![NPM version](https://badge.fury.io/js/repeat-element.svg)](http://badge.fury.io/js/repeat-element) + +> Create an array by repeating the given value n times. + +## Install + +Install with [npm](https://www.npmjs.com/) + +```bash +npm i repeat-element --save +``` + +## Usage + +```js +var repeat = require('repeat-element'); + +repeat('a', 5); +//=> ['a', 'a', 'a', 'a', 'a'] + +repeat('a', 1); +//=> ['a'] + +repeat('a', 0); +//=> [] + +repeat(null, 5) +//» [ null, null, null, null, null ] + +repeat({some: 'object'}, 5) +//» [ { some: 'object' }, +// { some: 'object' }, +// { some: 'object' }, +// { some: 'object' }, +// { some: 'object' } ] + +repeat(5, 5) +//» [ 5, 5, 5, 5, 5 ] +``` + +## Related projects + +[repeat-string](https://github.com/jonschlinkert/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. + +## Running tests + +Install dev dependencies: + +```bash +npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/repeat-element/issues) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright (c) 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on May 06, 2015._ diff --git a/node_modules/repeat-element/index.js b/node_modules/repeat-element/index.js new file mode 100644 index 0000000..0ad45ab --- /dev/null +++ b/node_modules/repeat-element/index.js @@ -0,0 +1,18 @@ +/*! + * repeat-element + * + * Copyright (c) 2015 Jon Schlinkert. + * Licensed under the MIT license. + */ + +'use strict'; + +module.exports = function repeat(ele, num) { + var arr = new Array(num); + + for (var i = 0; i < num; i++) { + arr[i] = ele; + } + + return arr; +}; diff --git a/node_modules/repeat-element/package.json b/node_modules/repeat-element/package.json new file mode 100644 index 0000000..5b359b0 --- /dev/null +++ b/node_modules/repeat-element/package.json @@ -0,0 +1,44 @@ +{ + "name": "repeat-element", + "description": "Create an array by repeating the given value n times.", + "version": "1.1.2", + "homepage": "https://github.com/jonschlinkert/repeat-element", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/repeat-element.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/repeat-element/issues" + }, + "license": { + "type": "MIT", + "url": "https://github.com/jonschlinkert/repeat-element/blob/master/LICENSE" + }, + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "files": [ + "index.js" + ], + "keywords": [ + "array", + "element", + "repeat", + "string" + ], + "devDependencies": { + "benchmarked": "^0.1.4", + "chalk": "^1.0.0", + "glob": "^5.0.5", + "minimist": "^1.1.1", + "mocha": "^2.2.4" + } +} diff --git a/node_modules/repeat-string/LICENSE b/node_modules/repeat-string/LICENSE new file mode 100644 index 0000000..39245ac --- /dev/null +++ b/node_modules/repeat-string/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/repeat-string/README.md b/node_modules/repeat-string/README.md new file mode 100644 index 0000000..aaa5e91 --- /dev/null +++ b/node_modules/repeat-string/README.md @@ -0,0 +1,136 @@ +# repeat-string [![NPM version](https://img.shields.io/npm/v/repeat-string.svg?style=flat)](https://www.npmjs.com/package/repeat-string) [![NPM monthly downloads](https://img.shields.io/npm/dm/repeat-string.svg?style=flat)](https://npmjs.org/package/repeat-string) [![NPM total downloads](https://img.shields.io/npm/dt/repeat-string.svg?style=flat)](https://npmjs.org/package/repeat-string) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/repeat-string.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/repeat-string) + +> Repeat the given string n times. Fastest implementation for repeating a string. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save repeat-string +``` + +## Usage + +### [repeat](index.js#L41) + +Repeat the given `string` the specified `number` of times. + +**Example:** + +**Example** + +```js +var repeat = require('repeat-string'); +repeat('A', 5); +//=> AAAAA +``` + +**Params** + +* `string` **{String}**: The string to repeat +* `number` **{Number}**: The number of times to repeat the string +* `returns` **{String}**: Repeated string + +## Benchmarks + +Repeat string is significantly faster than the native method (which is itself faster than [repeating](https://github.com/sindresorhus/repeating)): + +```sh +# 2x +repeat-string █████████████████████████ (26,953,977 ops/sec) +repeating █████████ (9,855,695 ops/sec) +native ██████████████████ (19,453,895 ops/sec) + +# 3x +repeat-string █████████████████████████ (19,445,252 ops/sec) +repeating ███████████ (8,661,565 ops/sec) +native ████████████████████ (16,020,598 ops/sec) + +# 10x +repeat-string █████████████████████████ (23,792,521 ops/sec) +repeating █████████ (8,571,332 ops/sec) +native ███████████████ (14,582,955 ops/sec) + +# 50x +repeat-string █████████████████████████ (23,640,179 ops/sec) +repeating █████ (5,505,509 ops/sec) +native ██████████ (10,085,557 ops/sec) + +# 250x +repeat-string █████████████████████████ (23,489,618 ops/sec) +repeating ████ (3,962,937 ops/sec) +native ████████ (7,724,892 ops/sec) + +# 2000x +repeat-string █████████████████████████ (20,315,172 ops/sec) +repeating ████ (3,297,079 ops/sec) +native ███████ (6,203,331 ops/sec) + +# 20000x +repeat-string █████████████████████████ (23,382,915 ops/sec) +repeating ███ (2,980,058 ops/sec) +native █████ (5,578,808 ops/sec) +``` + +**Run the benchmarks** + +Install dev dependencies: + +```sh +npm i -d && node benchmark +``` + +## About + +### Related projects + +[repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Contributors + +| **Commits** | **Contributor**
| +| --- | --- | +| 51 | [jonschlinkert](https://github.com/jonschlinkert) | +| 2 | [LinusU](https://github.com/LinusU) | +| 2 | [tbusser](https://github.com/tbusser) | +| 1 | [doowb](https://github.com/doowb) | +| 1 | [wooorm](https://github.com/wooorm) | + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](http://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/repeat-string/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.2.0, on October 23, 2016._ \ No newline at end of file diff --git a/node_modules/repeat-string/index.js b/node_modules/repeat-string/index.js new file mode 100644 index 0000000..4459afd --- /dev/null +++ b/node_modules/repeat-string/index.js @@ -0,0 +1,70 @@ +/*! + * repeat-string + * + * Copyright (c) 2014-2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +/** + * Results cache + */ + +var res = ''; +var cache; + +/** + * Expose `repeat` + */ + +module.exports = repeat; + +/** + * Repeat the given `string` the specified `number` + * of times. + * + * **Example:** + * + * ```js + * var repeat = require('repeat-string'); + * repeat('A', 5); + * //=> AAAAA + * ``` + * + * @param {String} `string` The string to repeat + * @param {Number} `number` The number of times to repeat the string + * @return {String} Repeated string + * @api public + */ + +function repeat(str, num) { + if (typeof str !== 'string') { + throw new TypeError('expected a string'); + } + + // cover common, quick use cases + if (num === 1) return str; + if (num === 2) return str + str; + + var max = str.length * num; + if (cache !== str || typeof cache === 'undefined') { + cache = str; + res = ''; + } else if (res.length >= max) { + return res.substr(0, max); + } + + while (max > res.length && num > 1) { + if (num & 1) { + res += str; + } + + num >>= 1; + str += str; + } + + res += str; + res = res.substr(0, max); + return res; +} diff --git a/node_modules/repeat-string/package.json b/node_modules/repeat-string/package.json new file mode 100644 index 0000000..09f8892 --- /dev/null +++ b/node_modules/repeat-string/package.json @@ -0,0 +1,77 @@ +{ + "name": "repeat-string", + "description": "Repeat the given string n times. Fastest implementation for repeating a string.", + "version": "1.6.1", + "homepage": "https://github.com/jonschlinkert/repeat-string", + "author": "Jon Schlinkert (http://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://github.com/doowb)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Linus Unnebäck (http://linus.unnebäck.se)", + "Thijs Busser (http://tbusser.net)", + "Titus (wooorm.com)" + ], + "repository": "jonschlinkert/repeat-string", + "bugs": { + "url": "https://github.com/jonschlinkert/repeat-string/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi-cyan": "^0.1.1", + "benchmarked": "^0.2.5", + "gulp-format-md": "^0.1.11", + "isobject": "^2.1.0", + "mocha": "^3.1.2", + "repeating": "^3.0.0", + "text-table": "^0.2.0", + "yargs-parser": "^4.0.2" + }, + "keywords": [ + "fast", + "fastest", + "fill", + "left", + "left-pad", + "multiple", + "pad", + "padding", + "repeat", + "repeating", + "repetition", + "right", + "right-pad", + "string", + "times" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "repeat-element" + ] + }, + "helpers": [ + "./benchmark/helper.js" + ], + "reflinks": [ + "verb" + ] + } +} diff --git a/node_modules/repeating/index.js b/node_modules/repeating/index.js new file mode 100644 index 0000000..ccae0d7 --- /dev/null +++ b/node_modules/repeating/index.js @@ -0,0 +1,24 @@ +'use strict'; +var isFinite = require('is-finite'); + +module.exports = function (str, n) { + if (typeof str !== 'string') { + throw new TypeError('Expected `input` to be a string'); + } + + if (n < 0 || !isFinite(n)) { + throw new TypeError('Expected `count` to be a positive finite number'); + } + + var ret = ''; + + do { + if (n & 1) { + ret += str; + } + + str += str; + } while ((n >>= 1)); + + return ret; +}; diff --git a/node_modules/repeating/license b/node_modules/repeating/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/repeating/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/repeating/package.json b/node_modules/repeating/package.json new file mode 100644 index 0000000..0247a23 --- /dev/null +++ b/node_modules/repeating/package.json @@ -0,0 +1,37 @@ +{ + "name": "repeating", + "version": "2.0.1", + "description": "Repeat a string - fast", + "license": "MIT", + "repository": "sindresorhus/repeating", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "repeat", + "string", + "repeating", + "str", + "text", + "fill", + "pad" + ], + "dependencies": { + "is-finite": "^1.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/repeating/readme.md b/node_modules/repeating/readme.md new file mode 100644 index 0000000..06f0b69 --- /dev/null +++ b/node_modules/repeating/readme.md @@ -0,0 +1,30 @@ +# repeating [![Build Status](https://travis-ci.org/sindresorhus/repeating.svg?branch=master)](https://travis-ci.org/sindresorhus/repeating) + +> Repeat a string - fast + + +## Install + +``` +$ npm install --save repeating +``` + + +## Usage + +```js +const repeating = require('repeating'); + +repeating('unicorn ', 100); +//=> 'unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn ' +``` + + +## Related + +- [repeating-cli](https://github.com/sindresorhus/repeating-cli) - CLI for this module + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/request/.eslintrc b/node_modules/request/.eslintrc new file mode 100644 index 0000000..5a59481 --- /dev/null +++ b/node_modules/request/.eslintrc @@ -0,0 +1,45 @@ +{ + "env": { + "node": true + }, + "rules": { + // 2-space indentation + "indent": [2, 2, {"SwitchCase": 1}], + // Disallow semi-colons, unless needed to disambiguate statement + "semi": [2, "never"], + // Require strings to use single quotes + "quotes": [2, "single"], + // Require curly braces for all control statements + "curly": 2, + // Disallow using variables and functions before they've been defined + "no-use-before-define": 2, + // Allow any case for variable naming + "camelcase": 0, + // Disallow unused variables, except as function arguments + "no-unused-vars": [2, {"args":"none"}], + // Allow leading underscores for method names + // REASON: we use underscores to denote private methods + "no-underscore-dangle": 0, + // Allow multi spaces around operators since they are + // used for alignment. This is not consistent in the + // code. + "no-multi-spaces": 0, + // Style rule is: most objects use { beforeColon: false, afterColon: true }, unless aligning which uses: + // + // { + // beforeColon : true, + // afterColon : true + // } + // + // eslint can't handle this, so the check is disabled. + "key-spacing": 0, + // Allow shadowing vars in outer scope (needs discussion) + "no-shadow": 0, + // Use if () { } + // ^ space + "keyword-spacing": [2, {"after": true}], + // Use if () { } + // ^ space + "space-before-blocks": [2, "always"] + } +} diff --git a/node_modules/request/.npmignore b/node_modules/request/.npmignore new file mode 100644 index 0000000..67fe11c --- /dev/null +++ b/node_modules/request/.npmignore @@ -0,0 +1,6 @@ +coverage +tests +node_modules +examples +release.sh +disabled.appveyor.yml diff --git a/node_modules/request/.travis.yml b/node_modules/request/.travis.yml new file mode 100644 index 0000000..9be8247 --- /dev/null +++ b/node_modules/request/.travis.yml @@ -0,0 +1,21 @@ + +language: node_js + +node_js: + - node + - 6 + - 4 + - 0.12 + +after_script: + - npm run test-cov + - codecov + - cat ./coverage/lcov.info | coveralls + +webhooks: + urls: https://webhooks.gitter.im/e/237280ed4796c19cc626 + on_success: change # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: false # default: false + +sudo: false diff --git a/node_modules/request/CHANGELOG.md b/node_modules/request/CHANGELOG.md new file mode 100644 index 0000000..c8f4545 --- /dev/null +++ b/node_modules/request/CHANGELOG.md @@ -0,0 +1,650 @@ +## Change Log + +### v2.76.0 (2016/10/25) +- [#2424](https://github.com/request/request/pull/2424) Handle buffers directly instead of using "bl" (@zertosh) +- [#2415](https://github.com/request/request/pull/2415) Re-enable timeout tests on Travis + other fixes (@mscdex) +- [#2431](https://github.com/request/request/pull/2431) Improve timeouts accuracy and node v6.8.0+ compatibility (@mscdex, @greenkeeperio-bot) +- [#2428](https://github.com/request/request/pull/2428) Update qs to version 6.3.0 🚀 (@greenkeeperio-bot) +- [#2420](https://github.com/request/request/pull/2420) change .on to .once, remove possible memory leaks (@duereg) +- [#2426](https://github.com/request/request/pull/2426) Remove "isFunction" helper in favor of "typeof" check (@zertosh) +- [#2425](https://github.com/request/request/pull/2425) Simplify "defer" helper creation (@zertosh) +- [#2402](https://github.com/request/request/pull/2402) form-data@2.1.1 breaks build 🚨 (@greenkeeperio-bot) +- [#2393](https://github.com/request/request/pull/2393) Update form-data to version 2.1.0 🚀 (@greenkeeperio-bot) + +### v2.75.0 (2016/09/17) +- [#2381](https://github.com/request/request/pull/2381) Drop support for Node 0.10 (@simov) +- [#2377](https://github.com/request/request/pull/2377) Update form-data to version 2.0.0 🚀 (@greenkeeperio-bot) +- [#2353](https://github.com/request/request/pull/2353) Add greenkeeper ignored packages (@simov) +- [#2351](https://github.com/request/request/pull/2351) Update karma-tap to version 3.0.1 🚀 (@greenkeeperio-bot) +- [#2348](https://github.com/request/request/pull/2348) form-data@1.0.1 breaks build 🚨 (@greenkeeperio-bot) +- [#2349](https://github.com/request/request/pull/2349) Check error type instead of string (@scotttrinh) + +### v2.74.0 (2016/07/22) +- [#2295](https://github.com/request/request/pull/2295) Update tough-cookie to 2.3.0 (@stash-sfdc) +- [#2280](https://github.com/request/request/pull/2280) Update karma-tap to version 2.0.1 🚀 (@greenkeeperio-bot) + +### v2.73.0 (2016/07/09) +- [#2240](https://github.com/request/request/pull/2240) Remove connectionErrorHandler to fix #1903 (@zarenner) +- [#2251](https://github.com/request/request/pull/2251) tape@4.6.0 breaks build 🚨 (@greenkeeperio-bot) +- [#2225](https://github.com/request/request/pull/2225) Update docs (@ArtskydJ) +- [#2203](https://github.com/request/request/pull/2203) Update browserify to version 13.0.1 🚀 (@greenkeeperio-bot) +- [#2275](https://github.com/request/request/pull/2275) Update karma to version 1.1.1 🚀 (@greenkeeperio-bot) +- [#2204](https://github.com/request/request/pull/2204) Add codecov.yml and disable PR comments (@simov) +- [#2212](https://github.com/request/request/pull/2212) Fix link to http.IncomingMessage documentation (@nazieb) +- [#2208](https://github.com/request/request/pull/2208) Update to form-data RC4 and pass null values to it (@simov) +- [#2207](https://github.com/request/request/pull/2207) Move aws4 require statement to the top (@simov) +- [#2199](https://github.com/request/request/pull/2199) Update karma-coverage to version 1.0.0 🚀 (@greenkeeperio-bot) +- [#2206](https://github.com/request/request/pull/2206) Update qs to version 6.2.0 🚀 (@greenkeeperio-bot) +- [#2205](https://github.com/request/request/pull/2205) Use server-destory to close hanging sockets in tests (@simov) +- [#2200](https://github.com/request/request/pull/2200) Update karma-cli to version 1.0.0 🚀 (@greenkeeperio-bot) + +### v2.72.0 (2016/04/17) +- [#2176](https://github.com/request/request/pull/2176) Do not try to pipe Gzip responses with no body (@simov) +- [#2175](https://github.com/request/request/pull/2175) Add 'delete' alias for the 'del' API method (@simov, @MuhanZou) +- [#2172](https://github.com/request/request/pull/2172) Add support for deflate content encoding (@czardoz) +- [#2169](https://github.com/request/request/pull/2169) Add callback option (@simov) +- [#2165](https://github.com/request/request/pull/2165) Check for self.req existence inside the write method (@simov) +- [#2167](https://github.com/request/request/pull/2167) Fix TravisCI badge reference master branch (@a0viedo) + +### v2.71.0 (2016/04/12) +- [#2164](https://github.com/request/request/pull/2164) Catch errors from the underlying http module (@simov) + +### v2.70.0 (2016/04/05) +- [#2147](https://github.com/request/request/pull/2147) Update eslint to version 2.5.3 🚀 (@simov, @greenkeeperio-bot) +- [#2009](https://github.com/request/request/pull/2009) Support JSON stringify replacer argument. (@elyobo) +- [#2142](https://github.com/request/request/pull/2142) Update eslint to version 2.5.1 🚀 (@greenkeeperio-bot) +- [#2128](https://github.com/request/request/pull/2128) Update browserify-istanbul to version 2.0.0 🚀 (@greenkeeperio-bot) +- [#2115](https://github.com/request/request/pull/2115) Update eslint to version 2.3.0 🚀 (@simov, @greenkeeperio-bot) +- [#2089](https://github.com/request/request/pull/2089) Fix badges (@simov) +- [#2092](https://github.com/request/request/pull/2092) Update browserify-istanbul to version 1.0.0 🚀 (@greenkeeperio-bot) +- [#2079](https://github.com/request/request/pull/2079) Accept read stream as body option (@simov) +- [#2070](https://github.com/request/request/pull/2070) Update bl to version 1.1.2 🚀 (@greenkeeperio-bot) +- [#2063](https://github.com/request/request/pull/2063) Up bluebird and oauth-sign (@simov) +- [#2058](https://github.com/request/request/pull/2058) Karma fixes for latest versions (@eiriksm) +- [#2057](https://github.com/request/request/pull/2057) Update contributing guidelines (@simov) +- [#2054](https://github.com/request/request/pull/2054) Update qs to version 6.1.0 🚀 (@greenkeeperio-bot) + +### v2.69.0 (2016/01/27) +- [#2041](https://github.com/request/request/pull/2041) restore aws4 as regular dependency (@rmg) + +### v2.68.0 (2016/01/27) +- [#2036](https://github.com/request/request/pull/2036) Add AWS Signature Version 4 (@simov, @mirkods) +- [#2022](https://github.com/request/request/pull/2022) Convert numeric multipart bodies to string (@simov, @feross) +- [#2024](https://github.com/request/request/pull/2024) Update har-validator dependency for nsp advisory #76 (@TylerDixon) +- [#2016](https://github.com/request/request/pull/2016) Update qs to version 6.0.2 🚀 (@greenkeeperio-bot) +- [#2007](https://github.com/request/request/pull/2007) Use the `extend` module instead of util._extend (@simov) +- [#2003](https://github.com/request/request/pull/2003) Update browserify to version 13.0.0 🚀 (@greenkeeperio-bot) +- [#1989](https://github.com/request/request/pull/1989) Update buffer-equal to version 1.0.0 🚀 (@greenkeeperio-bot) +- [#1956](https://github.com/request/request/pull/1956) Check form-data content-length value before setting up the header (@jongyoonlee) +- [#1958](https://github.com/request/request/pull/1958) Use IncomingMessage.destroy method (@simov) +- [#1952](https://github.com/request/request/pull/1952) Adds example for Tor proxy (@prometheansacrifice) +- [#1943](https://github.com/request/request/pull/1943) Update eslint to version 1.10.3 🚀 (@simov, @greenkeeperio-bot) +- [#1924](https://github.com/request/request/pull/1924) Update eslint to version 1.10.1 🚀 (@greenkeeperio-bot) +- [#1915](https://github.com/request/request/pull/1915) Remove content-length and transfer-encoding headers from defaultProxyHeaderWhiteList (@yaxia) + +### v2.67.0 (2015/11/19) +- [#1913](https://github.com/request/request/pull/1913) Update http-signature to version 1.1.0 🚀 (@greenkeeperio-bot) + +### v2.66.0 (2015/11/18) +- [#1906](https://github.com/request/request/pull/1906) Update README URLs based on HTTP redirects (@ReadmeCritic) +- [#1905](https://github.com/request/request/pull/1905) Convert typed arrays into regular buffers (@simov) +- [#1902](https://github.com/request/request/pull/1902) node-uuid@1.4.7 breaks build 🚨 (@greenkeeperio-bot) +- [#1894](https://github.com/request/request/pull/1894) Fix tunneling after redirection from https (Original: #1881) (@simov, @falms) +- [#1893](https://github.com/request/request/pull/1893) Update eslint to version 1.9.0 🚀 (@greenkeeperio-bot) +- [#1852](https://github.com/request/request/pull/1852) Update eslint to version 1.7.3 🚀 (@simov, @greenkeeperio-bot, @paulomcnally, @michelsalib, @arbaaz, @vladimirich, @LoicMahieu, @JoshWillik, @jzaefferer, @ryanwholey, @djchie, @thisconnect, @mgenereu, @acroca, @Sebmaster, @KoltesDigital) +- [#1876](https://github.com/request/request/pull/1876) Implement loose matching for har mime types (@simov) +- [#1875](https://github.com/request/request/pull/1875) Update bluebird to version 3.0.2 🚀 (@simov, @greenkeeperio-bot) +- [#1871](https://github.com/request/request/pull/1871) Update browserify to version 12.0.1 🚀 (@greenkeeperio-bot) +- [#1866](https://github.com/request/request/pull/1866) Add missing quotes on x-token property in README (@miguelmota) +- [#1874](https://github.com/request/request/pull/1874) Fix typo in README.md (@gswalden) +- [#1860](https://github.com/request/request/pull/1860) Improve referer header tests and docs (@simov) +- [#1861](https://github.com/request/request/pull/1861) Remove redundant call to Stream constructor (@watson) +- [#1857](https://github.com/request/request/pull/1857) Fix Referer header to point to the original host name (@simov) +- [#1850](https://github.com/request/request/pull/1850) Update karma-coverage to version 0.5.3 🚀 (@greenkeeperio-bot) +- [#1847](https://github.com/request/request/pull/1847) Use node's latest version when building (@simov) +- [#1836](https://github.com/request/request/pull/1836) Tunnel: fix wrong property name (@KoltesDigital) +- [#1820](https://github.com/request/request/pull/1820) Set href as request.js uses it (@mgenereu) +- [#1840](https://github.com/request/request/pull/1840) Update http-signature to version 1.0.2 🚀 (@greenkeeperio-bot) +- [#1845](https://github.com/request/request/pull/1845) Update istanbul to version 0.4.0 🚀 (@greenkeeperio-bot) + +### v2.65.0 (2015/10/11) +- [#1833](https://github.com/request/request/pull/1833) Update aws-sign2 to version 0.6.0 🚀 (@greenkeeperio-bot) +- [#1811](https://github.com/request/request/pull/1811) Enable loose cookie parsing in tough-cookie (@Sebmaster) +- [#1830](https://github.com/request/request/pull/1830) Bring back tilde ranges for all dependencies (@simov) +- [#1821](https://github.com/request/request/pull/1821) Implement support for RFC 2617 MD5-sess algorithm. (@BigDSK) +- [#1828](https://github.com/request/request/pull/1828) Updated qs dependency to 5.2.0 (@acroca) +- [#1818](https://github.com/request/request/pull/1818) Extract `readResponseBody` method out of `onRequestResponse` (@pvoisin) +- [#1819](https://github.com/request/request/pull/1819) Run stringify once (@mgenereu) +- [#1814](https://github.com/request/request/pull/1814) Updated har-validator to version 2.0.2 (@greenkeeperio-bot) +- [#1807](https://github.com/request/request/pull/1807) Updated tough-cookie to version 2.1.0 (@greenkeeperio-bot) +- [#1800](https://github.com/request/request/pull/1800) Add caret ranges for devDependencies, except eslint (@simov) +- [#1799](https://github.com/request/request/pull/1799) Updated karma-browserify to version 4.4.0 (@greenkeeperio-bot) +- [#1797](https://github.com/request/request/pull/1797) Updated tape to version 4.2.0 (@greenkeeperio-bot) +- [#1788](https://github.com/request/request/pull/1788) Pinned all dependencies (@greenkeeperio-bot) + +### v2.64.0 (2015/09/25) +- [#1787](https://github.com/request/request/pull/1787) npm ignore examples, release.sh and disabled.appveyor.yml (@thisconnect) +- [#1775](https://github.com/request/request/pull/1775) Fix typo in README.md (@djchie) +- [#1776](https://github.com/request/request/pull/1776) Changed word 'conjuction' to read 'conjunction' in README.md (@ryanwholey) +- [#1785](https://github.com/request/request/pull/1785) Revert: Set default application/json content-type when using json option #1772 (@simov) + +### v2.63.0 (2015/09/21) +- [#1772](https://github.com/request/request/pull/1772) Set default application/json content-type when using json option (@jzaefferer) + +### v2.62.0 (2015/09/15) +- [#1768](https://github.com/request/request/pull/1768) Add node 4.0 to the list of build targets (@simov) +- [#1767](https://github.com/request/request/pull/1767) Query strings now cooperate with unix sockets (@JoshWillik) +- [#1750](https://github.com/request/request/pull/1750) Revert doc about installation of tough-cookie added in #884 (@LoicMahieu) +- [#1746](https://github.com/request/request/pull/1746) Missed comma in Readme (@vladimirich) +- [#1743](https://github.com/request/request/pull/1743) Fix options not being initialized in defaults method (@simov) + +### v2.61.0 (2015/08/19) +- [#1721](https://github.com/request/request/pull/1721) Minor fix in README.md (@arbaaz) +- [#1733](https://github.com/request/request/pull/1733) Avoid useless Buffer transformation (@michelsalib) +- [#1726](https://github.com/request/request/pull/1726) Update README.md (@paulomcnally) +- [#1715](https://github.com/request/request/pull/1715) Fix forever option in node > 0.10 #1709 (@calibr) +- [#1716](https://github.com/request/request/pull/1716) Do not create Buffer from Object in setContentLength(iojs v3.0 issue) (@calibr) +- [#1711](https://github.com/request/request/pull/1711) Add ability to detect connect timeouts (@kevinburke) +- [#1712](https://github.com/request/request/pull/1712) Set certificate expiration to August 2, 2018 (@kevinburke) +- [#1700](https://github.com/request/request/pull/1700) debug() when JSON.parse() on a response body fails (@phillipj) + +### v2.60.0 (2015/07/21) +- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews) + +### v2.59.0 (2015/07/20) +- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options. Forever option defaults to using http(s).Agent in node 0.12+ (@simov) +- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman) +- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme) +- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov) +- [#1651](https://github.com/request/request/pull/1651) Preserve HEAD method when using followAllRedirects (@simov) +- [#1652](https://github.com/request/request/pull/1652) Update `encoding` option documentation in README.md (@daniel347x) +- [#1650](https://github.com/request/request/pull/1650) Allow content-type overriding when using the `form` option (@simov) +- [#1646](https://github.com/request/request/pull/1646) Clarify the nature of setting `ca` in `agentOptions` (@jeffcharles) + +### v2.58.0 (2015/06/16) +- [#1638](https://github.com/request/request/pull/1638) Use the `extend` module to deep extend in the defaults method (@simov) +- [#1631](https://github.com/request/request/pull/1631) Move tunnel logic into separate module (@simov) +- [#1634](https://github.com/request/request/pull/1634) Fix OAuth query transport_method (@simov) +- [#1603](https://github.com/request/request/pull/1603) Add codecov (@simov) + +### v2.57.0 (2015/05/31) +- [#1615](https://github.com/request/request/pull/1615) Replace '.client' with '.socket' as the former was deprecated in 2.2.0. (@ChALkeR) + +### v2.56.0 (2015/05/28) +- [#1610](https://github.com/request/request/pull/1610) Bump module dependencies (@simov) +- [#1600](https://github.com/request/request/pull/1600) Extract the querystring logic into separate module (@simov) +- [#1607](https://github.com/request/request/pull/1607) Re-generate certificates (@simov) +- [#1599](https://github.com/request/request/pull/1599) Move getProxyFromURI logic below the check for Invaild URI (#1595) (@simov) +- [#1598](https://github.com/request/request/pull/1598) Fix the way http verbs are defined in order to please intellisense IDEs (@simov, @flannelJesus) +- [#1591](https://github.com/request/request/pull/1591) A few minor fixes: (@simov) +- [#1584](https://github.com/request/request/pull/1584) Refactor test-default tests (according to comments in #1430) (@simov) +- [#1585](https://github.com/request/request/pull/1585) Fixing documentation regarding TLS options (#1583) (@mainakae) +- [#1574](https://github.com/request/request/pull/1574) Refresh the oauth_nonce on redirect (#1573) (@simov) +- [#1570](https://github.com/request/request/pull/1570) Discovered tests that weren't properly running (@seanstrom) +- [#1569](https://github.com/request/request/pull/1569) Fix pause before response arrives (@kevinoid) +- [#1558](https://github.com/request/request/pull/1558) Emit error instead of throw (@simov) +- [#1568](https://github.com/request/request/pull/1568) Fix stall when piping gzipped response (@kevinoid) +- [#1560](https://github.com/request/request/pull/1560) Update combined-stream (@apechimp) +- [#1543](https://github.com/request/request/pull/1543) Initial support for oauth_body_hash on json payloads (@simov, @aesopwolf) +- [#1541](https://github.com/request/request/pull/1541) Fix coveralls (@simov) +- [#1540](https://github.com/request/request/pull/1540) Fix recursive defaults for convenience methods (@simov) +- [#1536](https://github.com/request/request/pull/1536) More eslint style rules (@froatsnook) +- [#1533](https://github.com/request/request/pull/1533) Adding dependency status bar to README.md (@YasharF) +- [#1539](https://github.com/request/request/pull/1539) ensure the latest version of har-validator is included (@ahmadnassri) +- [#1516](https://github.com/request/request/pull/1516) forever+pool test (@devTristan) + +### v2.55.0 (2015/04/05) +- [#1520](https://github.com/request/request/pull/1520) Refactor defaults (@simov) +- [#1525](https://github.com/request/request/pull/1525) Delete request headers with undefined value. (@froatsnook) +- [#1521](https://github.com/request/request/pull/1521) Add promise tests (@simov) +- [#1518](https://github.com/request/request/pull/1518) Fix defaults (@simov) +- [#1515](https://github.com/request/request/pull/1515) Allow static invoking of convenience methods (@simov) +- [#1505](https://github.com/request/request/pull/1505) Fix multipart boundary extraction regexp (@simov) +- [#1510](https://github.com/request/request/pull/1510) Fix basic auth form data (@simov) + +### v2.54.0 (2015/03/24) +- [#1501](https://github.com/request/request/pull/1501) HTTP Archive 1.2 support (@ahmadnassri) +- [#1486](https://github.com/request/request/pull/1486) Add a test for the forever agent (@akshayp) +- [#1500](https://github.com/request/request/pull/1500) Adding handling for no auth method and null bearer (@philberg) +- [#1498](https://github.com/request/request/pull/1498) Add table of contents in readme (@simov) +- [#1477](https://github.com/request/request/pull/1477) Add support for qs options via qsOptions key (@simov) +- [#1496](https://github.com/request/request/pull/1496) Parameters encoded to base 64 should be decoded as UTF-8, not ASCII. (@albanm) +- [#1494](https://github.com/request/request/pull/1494) Update eslint (@froatsnook) +- [#1474](https://github.com/request/request/pull/1474) Require Colon in Basic Auth (@erykwalder) +- [#1481](https://github.com/request/request/pull/1481) Fix baseUrl and redirections. (@burningtree) +- [#1469](https://github.com/request/request/pull/1469) Feature/base url (@froatsnook) +- [#1459](https://github.com/request/request/pull/1459) Add option to time request/response cycle (including rollup of redirects) (@aaron-em) +- [#1468](https://github.com/request/request/pull/1468) Re-enable io.js/node 0.12 build (@simov, @mikeal, @BBB) +- [#1442](https://github.com/request/request/pull/1442) Fixed the issue with strictSSL tests on 0.12 & io.js by explicitly setting a cipher that matches the cert. (@BBB, @nickmccurdy, @demohi, @simov, @0x4139) +- [#1460](https://github.com/request/request/pull/1460) localAddress or proxy config is lost when redirecting (@simov, @0x4139) +- [#1453](https://github.com/request/request/pull/1453) Test on Node.js 0.12 and io.js with allowed failures (@nickmccurdy, @demohi) +- [#1426](https://github.com/request/request/pull/1426) Fixing tests to pass on io.js and node 0.12 (only test-https.js stiff failing) (@mikeal) +- [#1446](https://github.com/request/request/pull/1446) Missing HTTP referer header with redirects Fixes #1038 (@simov, @guimon) +- [#1428](https://github.com/request/request/pull/1428) Deprecate Node v0.8.x (@nylen) +- [#1436](https://github.com/request/request/pull/1436) Add ability to set a requester without setting default options (@tikotzky) +- [#1435](https://github.com/request/request/pull/1435) dry up verb methods (@sethpollack) +- [#1423](https://github.com/request/request/pull/1423) Allow fully qualified multipart content-type header (@simov) +- [#1430](https://github.com/request/request/pull/1430) Fix recursive requester (@tikotzky) +- [#1429](https://github.com/request/request/pull/1429) Throw error when making HEAD request with a body (@tikotzky) +- [#1419](https://github.com/request/request/pull/1419) Add note that the project is broken in 0.12.x (@nylen) +- [#1413](https://github.com/request/request/pull/1413) Fix basic auth (@simov) +- [#1397](https://github.com/request/request/pull/1397) Improve pipe-from-file tests (@nylen) + +### v2.53.0 (2015/02/02) +- [#1396](https://github.com/request/request/pull/1396) Do not rfc3986 escape JSON bodies (@nylen, @simov) +- [#1392](https://github.com/request/request/pull/1392) Improve `timeout` option description (@watson) + +### v2.52.0 (2015/02/02) +- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen) +- [#1388](https://github.com/request/request/pull/1388) Upgrade mime-types package version (@roderickhsiao) +- [#1389](https://github.com/request/request/pull/1389) Revise Setup Tunnel Function (@seanstrom) +- [#1374](https://github.com/request/request/pull/1374) Allow explicitly disabling tunneling for proxied https destinations (@nylen) +- [#1376](https://github.com/request/request/pull/1376) Use karma-browserify for tests. Add browser test coverage reporter. (@eiriksm) +- [#1366](https://github.com/request/request/pull/1366) Refactor OAuth into separate module (@simov) +- [#1373](https://github.com/request/request/pull/1373) Rewrite tunnel test to be pure Node.js (@nylen) +- [#1371](https://github.com/request/request/pull/1371) Upgrade test reporter (@nylen) +- [#1360](https://github.com/request/request/pull/1360) Refactor basic, bearer, digest auth logic into separate class (@simov) +- [#1354](https://github.com/request/request/pull/1354) Remove circular dependency from debugging code (@nylen) +- [#1351](https://github.com/request/request/pull/1351) Move digest auth into private prototype method (@simov) +- [#1352](https://github.com/request/request/pull/1352) Update hawk dependency to ~2.3.0 (@mridgway) +- [#1353](https://github.com/request/request/pull/1353) Correct travis-ci badge (@dogancelik) +- [#1349](https://github.com/request/request/pull/1349) Make sure we return on errored browser requests. (@eiriksm) +- [#1346](https://github.com/request/request/pull/1346) getProxyFromURI Extraction Refactor (@seanstrom) +- [#1337](https://github.com/request/request/pull/1337) Standardize test ports on 6767 (@nylen) +- [#1341](https://github.com/request/request/pull/1341) Emit FormData error events as Request error events (@nylen, @rwky) +- [#1343](https://github.com/request/request/pull/1343) Clean up readme badges, and add Travis and Coveralls badges (@nylen) +- [#1345](https://github.com/request/request/pull/1345) Update README.md (@Aaron-Hartwig) +- [#1338](https://github.com/request/request/pull/1338) Always wait for server.close() callback in tests (@nylen) +- [#1342](https://github.com/request/request/pull/1342) Add mock https server and redo start of browser tests for this purpose. (@eiriksm) +- [#1339](https://github.com/request/request/pull/1339) Improve auth docs (@nylen) +- [#1335](https://github.com/request/request/pull/1335) Add support for OAuth plaintext signature method (@simov) +- [#1332](https://github.com/request/request/pull/1332) Add clean script to remove test-browser.js after the tests run (@seanstrom) +- [#1327](https://github.com/request/request/pull/1327) Fix errors generating coverage reports. (@nylen) +- [#1330](https://github.com/request/request/pull/1330) Return empty buffer upon empty response body and encoding is set to null (@seanstrom) +- [#1326](https://github.com/request/request/pull/1326) Use faster container-based infrastructure on Travis (@nylen) +- [#1315](https://github.com/request/request/pull/1315) Implement rfc3986 option (@simov, @nylen, @apoco, @DullReferenceException, @mmalecki, @oliamb, @cliffcrosland, @LewisJEllis, @eiriksm, @poislagarde) +- [#1314](https://github.com/request/request/pull/1314) Detect urlencoded form data header via regex (@simov) +- [#1317](https://github.com/request/request/pull/1317) Improve OAuth1.0 server side flow example (@simov) + +### v2.51.0 (2014/12/10) +- [#1310](https://github.com/request/request/pull/1310) Revert changes introduced in https://github.com/request/request/pull/1282 (@simov) + +### v2.50.0 (2014/12/09) +- [#1308](https://github.com/request/request/pull/1308) Add browser test to keep track of browserify compability. (@eiriksm) +- [#1299](https://github.com/request/request/pull/1299) Add optional support for jsonReviver (@poislagarde) +- [#1277](https://github.com/request/request/pull/1277) Add Coveralls configuration (@simov) +- [#1307](https://github.com/request/request/pull/1307) Upgrade form-data, add back browserify compability. Fixes #455. (@eiriksm) +- [#1305](https://github.com/request/request/pull/1305) Fix typo in README.md (@LewisJEllis) +- [#1288](https://github.com/request/request/pull/1288) Update README.md to explain custom file use case (@cliffcrosland) + +### v2.49.0 (2014/11/28) +- [#1295](https://github.com/request/request/pull/1295) fix(proxy): no-proxy false positive (@oliamb) +- [#1292](https://github.com/request/request/pull/1292) Upgrade `caseless` to 0.8.1 (@mmalecki) +- [#1276](https://github.com/request/request/pull/1276) Set transfer encoding for multipart/related to chunked by default (@simov) +- [#1275](https://github.com/request/request/pull/1275) Fix multipart content-type headers detection (@simov) +- [#1269](https://github.com/request/request/pull/1269) adds streams example for review (@tbuchok) +- [#1238](https://github.com/request/request/pull/1238) Add examples README.md (@simov) + +### v2.48.0 (2014/11/12) +- [#1263](https://github.com/request/request/pull/1263) Fixed a syntax error / typo in README.md (@xna2) +- [#1253](https://github.com/request/request/pull/1253) Add multipart chunked flag (@simov, @nylen) +- [#1251](https://github.com/request/request/pull/1251) Clarify that defaults() does not modify global defaults (@nylen) +- [#1250](https://github.com/request/request/pull/1250) Improve documentation for pool and maxSockets options (@nylen) +- [#1237](https://github.com/request/request/pull/1237) Documenting error handling when using streams (@vmattos) +- [#1244](https://github.com/request/request/pull/1244) Finalize changelog command (@nylen) +- [#1241](https://github.com/request/request/pull/1241) Fix typo (@alexanderGugel) +- [#1223](https://github.com/request/request/pull/1223) Show latest version number instead of "upcoming" in changelog (@nylen) +- [#1236](https://github.com/request/request/pull/1236) Document how to use custom CA in README (#1229) (@hypesystem) +- [#1228](https://github.com/request/request/pull/1228) Support for oauth with RSA-SHA1 signing (@nylen) +- [#1216](https://github.com/request/request/pull/1216) Made json and multipart options coexist (@nylen, @simov) +- [#1225](https://github.com/request/request/pull/1225) Allow header white/exclusive lists in any case. (@RReverser) + +### v2.47.0 (2014/10/26) +- [#1222](https://github.com/request/request/pull/1222) Move from mikeal/request to request/request (@nylen) +- [#1220](https://github.com/request/request/pull/1220) update qs dependency to 2.3.1 (@FredKSchott) +- [#1212](https://github.com/request/request/pull/1212) Improve tests/test-timeout.js (@nylen) +- [#1219](https://github.com/request/request/pull/1219) remove old globalAgent workaround for node 0.4 (@request) +- [#1214](https://github.com/request/request/pull/1214) Remove cruft left over from optional dependencies (@nylen) +- [#1215](https://github.com/request/request/pull/1215) Add proxyHeaderExclusiveList option for proxy-only headers. (@RReverser) +- [#1211](https://github.com/request/request/pull/1211) Allow 'Host' header instead of 'host' and remember case across redirects (@nylen) +- [#1208](https://github.com/request/request/pull/1208) Improve release script (@nylen) +- [#1213](https://github.com/request/request/pull/1213) Support for custom cookie store (@nylen, @mitsuru) +- [#1197](https://github.com/request/request/pull/1197) Clean up some code around setting the agent (@FredKSchott) +- [#1209](https://github.com/request/request/pull/1209) Improve multipart form append test (@simov) +- [#1207](https://github.com/request/request/pull/1207) Update changelog (@nylen) +- [#1185](https://github.com/request/request/pull/1185) Stream multipart/related bodies (@simov) + +### v2.46.0 (2014/10/23) +- [#1198](https://github.com/request/request/pull/1198) doc for TLS/SSL protocol options (@shawnzhu) +- [#1200](https://github.com/request/request/pull/1200) Add a Gitter chat badge to README.md (@gitter-badger) +- [#1196](https://github.com/request/request/pull/1196) Upgrade taper test reporter to v0.3.0 (@nylen) +- [#1199](https://github.com/request/request/pull/1199) Fix lint error: undeclared var i (@nylen) +- [#1191](https://github.com/request/request/pull/1191) Move self.proxy decision logic out of init and into a helper (@FredKSchott) +- [#1190](https://github.com/request/request/pull/1190) Move _buildRequest() logic back into init (@FredKSchott) +- [#1186](https://github.com/request/request/pull/1186) Support Smarter Unix URL Scheme (@FredKSchott) +- [#1178](https://github.com/request/request/pull/1178) update form documentation for new usage (@FredKSchott) +- [#1180](https://github.com/request/request/pull/1180) Enable no-mixed-requires linting rule (@nylen) +- [#1184](https://github.com/request/request/pull/1184) Don't forward authorization header across redirects to different hosts (@nylen) +- [#1183](https://github.com/request/request/pull/1183) Correct README about pre and postamble CRLF using multipart and not mult... (@netpoetica) +- [#1179](https://github.com/request/request/pull/1179) Lint tests directory (@nylen) +- [#1169](https://github.com/request/request/pull/1169) add metadata for form-data file field (@dotcypress) +- [#1173](https://github.com/request/request/pull/1173) remove optional dependencies (@seanstrom) +- [#1165](https://github.com/request/request/pull/1165) Cleanup event listeners and remove function creation from init (@FredKSchott) +- [#1174](https://github.com/request/request/pull/1174) update the request.cookie docs to have a valid cookie example (@seanstrom) +- [#1168](https://github.com/request/request/pull/1168) create a detach helper and use detach helper in replace of nextTick (@seanstrom) +- [#1171](https://github.com/request/request/pull/1171) in post can send form data and use callback (@MiroRadenovic) +- [#1159](https://github.com/request/request/pull/1159) accept charset for x-www-form-urlencoded content-type (@seanstrom) +- [#1157](https://github.com/request/request/pull/1157) Update README.md: body with json=true (@Rob--W) +- [#1164](https://github.com/request/request/pull/1164) Disable tests/test-timeout.js on Travis (@nylen) +- [#1153](https://github.com/request/request/pull/1153) Document how to run a single test (@nylen) +- [#1144](https://github.com/request/request/pull/1144) adds documentation for the "response" event within the streaming section (@tbuchok) +- [#1162](https://github.com/request/request/pull/1162) Update eslintrc file to no longer allow past errors (@FredKSchott) +- [#1155](https://github.com/request/request/pull/1155) Support/use self everywhere (@seanstrom) +- [#1161](https://github.com/request/request/pull/1161) fix no-use-before-define lint warnings (@emkay) +- [#1156](https://github.com/request/request/pull/1156) adding curly brackets to get rid of lint errors (@emkay) +- [#1151](https://github.com/request/request/pull/1151) Fix localAddress test on OS X (@nylen) +- [#1145](https://github.com/request/request/pull/1145) documentation: fix outdated reference to setCookieSync old name in README (@FredKSchott) +- [#1131](https://github.com/request/request/pull/1131) Update pool documentation (@FredKSchott) +- [#1143](https://github.com/request/request/pull/1143) Rewrite all tests to use tape (@nylen) +- [#1137](https://github.com/request/request/pull/1137) Add ability to specifiy querystring lib in options. (@jgrund) +- [#1138](https://github.com/request/request/pull/1138) allow hostname and port in place of host on uri (@cappslock) +- [#1134](https://github.com/request/request/pull/1134) Fix multiple redirects and `self.followRedirect` (@blakeembrey) +- [#1130](https://github.com/request/request/pull/1130) documentation fix: add note about npm test for contributing (@FredKSchott) +- [#1120](https://github.com/request/request/pull/1120) Support/refactor request setup tunnel (@seanstrom) +- [#1129](https://github.com/request/request/pull/1129) linting fix: convert double quote strings to use single quotes (@FredKSchott) +- [#1124](https://github.com/request/request/pull/1124) linting fix: remove unneccesary semi-colons (@FredKSchott) + +### v2.45.0 (2014/10/06) +- [#1128](https://github.com/request/request/pull/1128) Add test for setCookie regression (@nylen) +- [#1127](https://github.com/request/request/pull/1127) added tests around using objects as values in a query string (@bcoe) +- [#1103](https://github.com/request/request/pull/1103) Support/refactor request constructor (@nylen, @seanstrom) +- [#1119](https://github.com/request/request/pull/1119) add basic linting to request library (@FredKSchott) +- [#1121](https://github.com/request/request/pull/1121) Revert "Explicitly use sync versions of cookie functions" (@nylen) +- [#1118](https://github.com/request/request/pull/1118) linting fix: Restructure bad empty if statement (@FredKSchott) +- [#1117](https://github.com/request/request/pull/1117) Fix a bad check for valid URIs (@FredKSchott) +- [#1113](https://github.com/request/request/pull/1113) linting fix: space out operators (@FredKSchott) +- [#1116](https://github.com/request/request/pull/1116) Fix typo in `noProxyHost` definition (@FredKSchott) +- [#1114](https://github.com/request/request/pull/1114) linting fix: Added a `new` operator that was missing when creating and throwing a new error (@FredKSchott) +- [#1096](https://github.com/request/request/pull/1096) No_proxy support (@samcday) +- [#1107](https://github.com/request/request/pull/1107) linting-fix: remove unused variables (@FredKSchott) +- [#1112](https://github.com/request/request/pull/1112) linting fix: Make return values consistent and more straitforward (@FredKSchott) +- [#1111](https://github.com/request/request/pull/1111) linting fix: authPieces was getting redeclared (@FredKSchott) +- [#1105](https://github.com/request/request/pull/1105) Use strict mode in request (@FredKSchott) +- [#1110](https://github.com/request/request/pull/1110) linting fix: replace lazy '==' with more strict '===' (@FredKSchott) +- [#1109](https://github.com/request/request/pull/1109) linting fix: remove function call from if-else conditional statement (@FredKSchott) +- [#1102](https://github.com/request/request/pull/1102) Fix to allow setting a `requester` on recursive calls to `request.defaults` (@tikotzky) +- [#1095](https://github.com/request/request/pull/1095) Tweaking engines in package.json (@pdehaan) +- [#1082](https://github.com/request/request/pull/1082) Forward the socket event from the httpModule request (@seanstrom) +- [#972](https://github.com/request/request/pull/972) Clarify gzip handling in the README (@kevinoid) +- [#1089](https://github.com/request/request/pull/1089) Mention that encoding defaults to utf8, not Buffer (@stuartpb) +- [#1088](https://github.com/request/request/pull/1088) Fix cookie example in README.md and make it more clear (@pipi32167) +- [#1027](https://github.com/request/request/pull/1027) Add support for multipart form data in request options. (@crocket) +- [#1076](https://github.com/request/request/pull/1076) use Request.abort() to abort the request when the request has timed-out (@seanstrom) +- [#1068](https://github.com/request/request/pull/1068) add optional postamble required by .NET multipart requests (@netpoetica) + +### v2.43.0 (2014/09/18) +- [#1057](https://github.com/request/request/pull/1057) Defaults should not overwrite defined options (@davidwood) +- [#1046](https://github.com/request/request/pull/1046) Propagate datastream errors, useful in case gzip fails. (@ZJONSSON, @Janpot) +- [#1063](https://github.com/request/request/pull/1063) copy the input headers object #1060 (@finnp) +- [#1031](https://github.com/request/request/pull/1031) Explicitly use sync versions of cookie functions (@ZJONSSON) +- [#1056](https://github.com/request/request/pull/1056) Fix redirects when passing url.parse(x) as URL to convenience method (@nylen) + +### v2.42.0 (2014/09/04) +- [#1053](https://github.com/request/request/pull/1053) Fix #1051 Parse auth properly when using non-tunneling proxy (@isaacs) + +### v2.41.0 (2014/09/04) +- [#1050](https://github.com/request/request/pull/1050) Pass whitelisted headers to tunneling proxy. Organize all tunneling logic. (@isaacs, @Feldhacker) +- [#1035](https://github.com/request/request/pull/1035) souped up nodei.co badge (@rvagg) +- [#1048](https://github.com/request/request/pull/1048) Aws is now possible over a proxy (@steven-aerts) +- [#1039](https://github.com/request/request/pull/1039) extract out helper functions to a helper file (@seanstrom) +- [#1021](https://github.com/request/request/pull/1021) Support/refactor indexjs (@seanstrom) +- [#1033](https://github.com/request/request/pull/1033) Improve and document debug options (@nylen) +- [#1034](https://github.com/request/request/pull/1034) Fix readme headings (@nylen) +- [#1030](https://github.com/request/request/pull/1030) Allow recursive request.defaults (@tikotzky) +- [#1029](https://github.com/request/request/pull/1029) Fix a couple of typos (@nylen) +- [#675](https://github.com/request/request/pull/675) Checking for SSL fault on connection before reading SSL properties (@VRMink) +- [#989](https://github.com/request/request/pull/989) Added allowRedirect function. Should return true if redirect is allowed or false otherwise (@doronin) +- [#1025](https://github.com/request/request/pull/1025) [fixes #1023] Set self._ended to true once response has ended (@mridgway) +- [#1020](https://github.com/request/request/pull/1020) Add back removed debug metadata (@FredKSchott) +- [#1008](https://github.com/request/request/pull/1008) Moving to module instead of cutomer buffer concatenation. (@mikeal) +- [#770](https://github.com/request/request/pull/770) Added dependency badge for README file; (@timgluz, @mafintosh, @lalitkapoor, @stash, @bobyrizov) +- [#1016](https://github.com/request/request/pull/1016) toJSON no longer results in an infinite loop, returns simple objects (@FredKSchott) +- [#1018](https://github.com/request/request/pull/1018) Remove pre-0.4.4 HTTPS fix (@mmalecki) +- [#1006](https://github.com/request/request/pull/1006) Migrate to caseless, fixes #1001 (@mikeal) +- [#995](https://github.com/request/request/pull/995) Fix parsing array of objects (@sjonnet19) +- [#999](https://github.com/request/request/pull/999) Fix fallback for browserify for optional modules. (@eiriksm) +- [#996](https://github.com/request/request/pull/996) Wrong oauth signature when multiple same param keys exist [updated] (@bengl, @hyjin) + +### v2.40.0 (2014/08/06) +- [#992](https://github.com/request/request/pull/992) Fix security vulnerability. Update qs (@poeticninja) +- [#988](https://github.com/request/request/pull/988) “--” -> “—” (@upisfree) +- [#987](https://github.com/request/request/pull/987) Show optional modules as being loaded by the module that reqeusted them (@iarna) + +### v2.39.0 (2014/07/24) +- [#976](https://github.com/request/request/pull/976) Update README.md (@pvoznenko) + +### v2.38.0 (2014/07/22) +- [#952](https://github.com/request/request/pull/952) Adding support to client certificate with proxy use case (@ofirshaked) +- [#884](https://github.com/request/request/pull/884) Documented tough-cookie installation. (@wbyoung) +- [#935](https://github.com/request/request/pull/935) Correct repository url (@fritx) +- [#963](https://github.com/request/request/pull/963) Update changelog (@nylen) +- [#960](https://github.com/request/request/pull/960) Support gzip with encoding on node pre-v0.9.4 (@kevinoid) +- [#953](https://github.com/request/request/pull/953) Add async Content-Length computation when using form-data (@LoicMahieu) +- [#844](https://github.com/request/request/pull/844) Add support for HTTP[S]_PROXY environment variables. Fixes #595. (@jvmccarthy) +- [#946](https://github.com/request/request/pull/946) defaults: merge headers (@aj0strow) + +### v2.37.0 (2014/07/07) +- [#957](https://github.com/request/request/pull/957) Silence EventEmitter memory leak warning #311 (@watson) +- [#955](https://github.com/request/request/pull/955) check for content-length header before setting it in nextTick (@camilleanne) +- [#951](https://github.com/request/request/pull/951) Add support for gzip content decoding (@kevinoid) +- [#949](https://github.com/request/request/pull/949) Manually enter querystring in form option (@charlespwd) +- [#944](https://github.com/request/request/pull/944) Make request work with browserify (@eiriksm) +- [#943](https://github.com/request/request/pull/943) New mime module (@eiriksm) +- [#927](https://github.com/request/request/pull/927) Bump version of hawk dep. (@samccone) +- [#907](https://github.com/request/request/pull/907) append secureOptions to poolKey (@medovob) + +### v2.35.0 (2014/05/17) +- [#901](https://github.com/request/request/pull/901) Fixes #555 (@pigulla) +- [#897](https://github.com/request/request/pull/897) merge with default options (@vohof) +- [#891](https://github.com/request/request/pull/891) fixes 857 - options object is mutated by calling request (@lalitkapoor) +- [#869](https://github.com/request/request/pull/869) Pipefilter test (@tgohn) +- [#866](https://github.com/request/request/pull/866) Fix typo (@dandv) +- [#861](https://github.com/request/request/pull/861) Add support for RFC 6750 Bearer Tokens (@phedny) +- [#809](https://github.com/request/request/pull/809) upgrade tunnel-proxy to 0.4.0 (@ksato9700) +- [#850](https://github.com/request/request/pull/850) Fix word consistency in readme (@0xNobody) +- [#810](https://github.com/request/request/pull/810) add some exposition to mpu example in README.md (@mikermcneil) +- [#840](https://github.com/request/request/pull/840) improve error reporting for invalid protocols (@FND) +- [#821](https://github.com/request/request/pull/821) added secureOptions back (@nw) +- [#815](https://github.com/request/request/pull/815) Create changelog based on pull requests (@lalitkapoor) + +### v2.34.0 (2014/02/18) +- [#516](https://github.com/request/request/pull/516) UNIX Socket URL Support (@lyuzashi) +- [#801](https://github.com/request/request/pull/801) 794 ignore cookie parsing and domain errors (@lalitkapoor) +- [#802](https://github.com/request/request/pull/802) Added the Apache license to the package.json. (@keskival) +- [#793](https://github.com/request/request/pull/793) Adds content-length calculation when submitting forms using form-data li... (@Juul) +- [#785](https://github.com/request/request/pull/785) Provide ability to override content-type when `json` option used (@vvo) +- [#781](https://github.com/request/request/pull/781) simpler isReadStream function (@joaojeronimo) + +### v2.32.0 (2014/01/16) +- [#767](https://github.com/request/request/pull/767) Use tough-cookie CookieJar sync API (@stash) +- [#764](https://github.com/request/request/pull/764) Case-insensitive authentication scheme (@bobyrizov) +- [#763](https://github.com/request/request/pull/763) Upgrade tough-cookie to 0.10.0 (@stash) +- [#744](https://github.com/request/request/pull/744) Use Cookie.parse (@lalitkapoor) +- [#757](https://github.com/request/request/pull/757) require aws-sign2 (@mafintosh) + +### v2.31.0 (2014/01/08) +- [#645](https://github.com/request/request/pull/645) update twitter api url to v1.1 (@mick) +- [#746](https://github.com/request/request/pull/746) README: Markdown code highlight (@weakish) +- [#745](https://github.com/request/request/pull/745) updating setCookie example to make it clear that the callback is required (@emkay) +- [#742](https://github.com/request/request/pull/742) Add note about JSON output body type (@iansltx) +- [#741](https://github.com/request/request/pull/741) README example is using old cookie jar api (@emkay) +- [#736](https://github.com/request/request/pull/736) Fix callback arguments documentation (@mmalecki) + +### v2.30.0 (2013/12/13) +- [#732](https://github.com/request/request/pull/732) JSHINT: Creating global 'for' variable. Should be 'for (var ...'. (@Fritz-Lium) +- [#730](https://github.com/request/request/pull/730) better HTTP DIGEST support (@dai-shi) +- [#728](https://github.com/request/request/pull/728) Fix TypeError when calling request.cookie (@scarletmeow) + +### v2.29.0 (2013/12/06) +- [#727](https://github.com/request/request/pull/727) fix requester bug (@jchris) + +### v2.28.0 (2013/12/04) +- [#724](https://github.com/request/request/pull/724) README.md: add custom HTTP Headers example. (@tcort) +- [#719](https://github.com/request/request/pull/719) Made a comment gender neutral. (@unsetbit) +- [#715](https://github.com/request/request/pull/715) Request.multipart no longer crashes when header 'Content-type' present (@pastaclub) +- [#710](https://github.com/request/request/pull/710) Fixing listing in callback part of docs. (@lukasz-zak) +- [#696](https://github.com/request/request/pull/696) Edited README.md for formatting and clarity of phrasing (@Zearin) +- [#694](https://github.com/request/request/pull/694) Typo in README (@VRMink) +- [#690](https://github.com/request/request/pull/690) Handle blank password in basic auth. (@diversario) +- [#682](https://github.com/request/request/pull/682) Optional dependencies (@Turbo87) +- [#683](https://github.com/request/request/pull/683) Travis CI support (@Turbo87) +- [#674](https://github.com/request/request/pull/674) change cookie module,to tough-cookie.please check it . (@sxyizhiren) +- [#666](https://github.com/request/request/pull/666) make `ciphers` and `secureProtocol` to work in https request (@richarddong) +- [#656](https://github.com/request/request/pull/656) Test case for #304. (@diversario) +- [#662](https://github.com/request/request/pull/662) option.tunnel to explicitly disable tunneling (@seanmonstar) +- [#659](https://github.com/request/request/pull/659) fix failure when running with NODE_DEBUG=request, and a test for that (@jrgm) +- [#630](https://github.com/request/request/pull/630) Send random cnonce for HTTP Digest requests (@wprl) + +### v2.27.0 (2013/08/15) +- [#619](https://github.com/request/request/pull/619) decouple things a bit (@joaojeronimo) + +### v2.26.0 (2013/08/07) +- [#613](https://github.com/request/request/pull/613) Fixes #583, moved initialization of self.uri.pathname (@lexander) +- [#605](https://github.com/request/request/pull/605) Only include ":" + pass in Basic Auth if it's defined (fixes #602) (@bendrucker) +- [#596](https://github.com/request/request/pull/596) Global agent is being used when pool is specified (@Cauldrath) +- [#594](https://github.com/request/request/pull/594) Emit complete event when there is no callback (@RomainLK) +- [#601](https://github.com/request/request/pull/601) Fixed a small typo (@michalstanko) +- [#589](https://github.com/request/request/pull/589) Prevent setting headers after they are sent (@geek) +- [#587](https://github.com/request/request/pull/587) Global cookie jar disabled by default (@threepointone) +- [#544](https://github.com/request/request/pull/544) Update http-signature version. (@davidlehn) +- [#581](https://github.com/request/request/pull/581) Fix spelling of "ignoring." (@bigeasy) +- [#568](https://github.com/request/request/pull/568) use agentOptions to create agent when specified in request (@SamPlacette) +- [#564](https://github.com/request/request/pull/564) Fix redirections (@criloz) +- [#541](https://github.com/request/request/pull/541) The exported request function doesn't have an auth method (@tschaub) +- [#542](https://github.com/request/request/pull/542) Expose Request class (@regality) +- [#536](https://github.com/request/request/pull/536) Allow explicitly empty user field for basic authentication. (@mikeando) +- [#532](https://github.com/request/request/pull/532) fix typo (@fredericosilva) +- [#497](https://github.com/request/request/pull/497) Added redirect event (@Cauldrath) +- [#503](https://github.com/request/request/pull/503) Fix basic auth for passwords that contain colons (@tonistiigi) +- [#521](https://github.com/request/request/pull/521) Improving test-localAddress.js (@noway421) +- [#529](https://github.com/request/request/pull/529) dependencies versions bump (@jodaka) +- [#523](https://github.com/request/request/pull/523) Updating dependencies (@noway421) +- [#520](https://github.com/request/request/pull/520) Fixing test-tunnel.js (@noway421) +- [#519](https://github.com/request/request/pull/519) Update internal path state on post-creation QS changes (@jblebrun) +- [#510](https://github.com/request/request/pull/510) Add HTTP Signature support. (@davidlehn) +- [#502](https://github.com/request/request/pull/502) Fix POST (and probably other) requests that are retried after 401 Unauthorized (@nylen) +- [#508](https://github.com/request/request/pull/508) Honor the .strictSSL option when using proxies (tunnel-agent) (@jhs) +- [#512](https://github.com/request/request/pull/512) Make password optional to support the format: http://username@hostname/ (@pajato1) +- [#513](https://github.com/request/request/pull/513) add 'localAddress' support (@yyfrankyy) +- [#498](https://github.com/request/request/pull/498) Moving response emit above setHeaders on destination streams (@kenperkins) +- [#490](https://github.com/request/request/pull/490) Empty response body (3-rd argument) must be passed to callback as an empty string (@Olegas) +- [#479](https://github.com/request/request/pull/479) Changing so if Accept header is explicitly set, sending json does not ov... (@RoryH) +- [#475](https://github.com/request/request/pull/475) Use `unescape` from `querystring` (@shimaore) +- [#473](https://github.com/request/request/pull/473) V0.10 compat (@isaacs) +- [#471](https://github.com/request/request/pull/471) Using querystring library from visionmedia (@kbackowski) +- [#461](https://github.com/request/request/pull/461) Strip the UTF8 BOM from a UTF encoded response (@kppullin) +- [#460](https://github.com/request/request/pull/460) hawk 0.10.0 (@hueniverse) +- [#462](https://github.com/request/request/pull/462) if query params are empty, then request path shouldn't end with a '?' (merges cleanly now) (@jaipandya) +- [#456](https://github.com/request/request/pull/456) hawk 0.9.0 (@hueniverse) +- [#429](https://github.com/request/request/pull/429) Copy options before adding callback. (@nrn, @nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki) +- [#454](https://github.com/request/request/pull/454) Destroy the response if present when destroying the request (clean merge) (@mafintosh) +- [#310](https://github.com/request/request/pull/310) Twitter Oauth Stuff Out of Date; Now Updated (@joemccann, @isaacs, @mscdex) +- [#413](https://github.com/request/request/pull/413) rename googledoodle.png to .jpg (@nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki) +- [#448](https://github.com/request/request/pull/448) Convenience method for PATCH (@mloar) +- [#444](https://github.com/request/request/pull/444) protect against double callbacks on error path (@spollack) +- [#433](https://github.com/request/request/pull/433) Added support for HTTPS cert & key (@mmalecki) +- [#430](https://github.com/request/request/pull/430) Respect specified {Host,host} headers, not just {host} (@andrewschaaf) +- [#415](https://github.com/request/request/pull/415) Fixed a typo. (@jerem) +- [#338](https://github.com/request/request/pull/338) Add more auth options, including digest support (@nylen) +- [#403](https://github.com/request/request/pull/403) Optimize environment lookup to happen once only (@mmalecki) +- [#398](https://github.com/request/request/pull/398) Add more reporting to tests (@mmalecki) +- [#388](https://github.com/request/request/pull/388) Ensure "safe" toJSON doesn't break EventEmitters (@othiym23) +- [#381](https://github.com/request/request/pull/381) Resolving "Invalid signature. Expected signature base string: " (@landeiro) +- [#380](https://github.com/request/request/pull/380) Fixes missing host header on retried request when using forever agent (@mac-) +- [#376](https://github.com/request/request/pull/376) Headers lost on redirect (@kapetan) +- [#375](https://github.com/request/request/pull/375) Fix for missing oauth_timestamp parameter (@jplock) +- [#374](https://github.com/request/request/pull/374) Correct Host header for proxy tunnel CONNECT (@youurayy) +- [#370](https://github.com/request/request/pull/370) Twitter reverse auth uses x_auth_mode not x_auth_type (@drudge) +- [#369](https://github.com/request/request/pull/369) Don't remove x_auth_mode for Twitter reverse auth (@drudge) +- [#344](https://github.com/request/request/pull/344) Make AWS auth signing find headers correctly (@nlf) +- [#363](https://github.com/request/request/pull/363) rfc3986 on base_uri, now passes tests (@jeffmarshall) +- [#362](https://github.com/request/request/pull/362) Running `rfc3986` on `base_uri` in `oauth.hmacsign` instead of just `encodeURIComponent` (@jeffmarshall) +- [#361](https://github.com/request/request/pull/361) Don't create a Content-Length header if we already have it set (@danjenkins) +- [#360](https://github.com/request/request/pull/360) Delete self._form along with everything else on redirect (@jgautier) +- [#355](https://github.com/request/request/pull/355) stop sending erroneous headers on redirected requests (@azylman) +- [#332](https://github.com/request/request/pull/332) Fix #296 - Only set Content-Type if body exists (@Marsup) +- [#343](https://github.com/request/request/pull/343) Allow AWS to work in more situations, added a note in the README on its usage (@nlf) +- [#320](https://github.com/request/request/pull/320) request.defaults() doesn't need to wrap jar() (@StuartHarris) +- [#322](https://github.com/request/request/pull/322) Fix + test for piped into request bumped into redirect. #321 (@alexindigo) +- [#326](https://github.com/request/request/pull/326) Do not try to remove listener from an undefined connection (@CartoDB) +- [#318](https://github.com/request/request/pull/318) Pass servername to tunneling secure socket creation (@isaacs) +- [#317](https://github.com/request/request/pull/317) Workaround for #313 (@isaacs) +- [#293](https://github.com/request/request/pull/293) Allow parser errors to bubble up to request (@mscdex) +- [#290](https://github.com/request/request/pull/290) A test for #289 (@isaacs) +- [#280](https://github.com/request/request/pull/280) Like in node.js print options if NODE_DEBUG contains the word request (@Filirom1) +- [#207](https://github.com/request/request/pull/207) Fix #206 Change HTTP/HTTPS agent when redirecting between protocols (@isaacs) +- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas, @vpulim) +- [#272](https://github.com/request/request/pull/272) Boundary begins with CRLF? (@elspoono, @timshadel, @naholyr, @nanodocumet, @TehShrike) +- [#284](https://github.com/request/request/pull/284) Remove stray `console.log()` call in multipart generator. (@bcherry) +- [#241](https://github.com/request/request/pull/241) Composability updates suggested by issue #239 (@polotek) +- [#282](https://github.com/request/request/pull/282) OAuth Authorization header contains non-"oauth_" parameters (@jplock) +- [#279](https://github.com/request/request/pull/279) fix tests with boundary by injecting boundry from header (@benatkin) +- [#273](https://github.com/request/request/pull/273) Pipe back pressure issue (@mafintosh) +- [#268](https://github.com/request/request/pull/268) I'm not OCD seriously (@TehShrike) +- [#263](https://github.com/request/request/pull/263) Bug in OAuth key generation for sha1 (@nanodocumet) +- [#265](https://github.com/request/request/pull/265) uncaughtException when redirected to invalid URI (@naholyr) +- [#262](https://github.com/request/request/pull/262) JSON test should check for equality (@timshadel) +- [#261](https://github.com/request/request/pull/261) Setting 'pool' to 'false' does NOT disable Agent pooling (@timshadel) +- [#249](https://github.com/request/request/pull/249) Fix for the fix of your (closed) issue #89 where self.headers[content-length] is set to 0 for all methods (@sethbridges, @polotek, @zephrax, @jeromegn) +- [#255](https://github.com/request/request/pull/255) multipart allow body === '' ( the empty string ) (@Filirom1) +- [#260](https://github.com/request/request/pull/260) fixed just another leak of 'i' (@sreuter) +- [#246](https://github.com/request/request/pull/246) Fixing the set-cookie header (@jeromegn) +- [#243](https://github.com/request/request/pull/243) Dynamic boundary (@zephrax) +- [#240](https://github.com/request/request/pull/240) don't error when null is passed for options (@polotek) +- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso, @vpulim) +- [#224](https://github.com/request/request/pull/224) Multipart content-type change (@janjongboom) +- [#217](https://github.com/request/request/pull/217) need to use Authorization (titlecase) header with Tumblr OAuth (@visnup) +- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@vpulim) +- [#199](https://github.com/request/request/pull/199) Tunnel (@isaacs) +- [#198](https://github.com/request/request/pull/198) Bugfix on forever usage of util.inherits (@isaacs) +- [#197](https://github.com/request/request/pull/197) Make ForeverAgent work with HTTPS (@isaacs) +- [#193](https://github.com/request/request/pull/193) Fixes GH-119 (@goatslacker) +- [#188](https://github.com/request/request/pull/188) Add abort support to the returned request (@itay) +- [#176](https://github.com/request/request/pull/176) Querystring option (@csainty) +- [#182](https://github.com/request/request/pull/182) Fix request.defaults to support (uri, options, callback) api (@twilson63) +- [#180](https://github.com/request/request/pull/180) Modified the post, put, head and del shortcuts to support uri optional param (@twilson63) +- [#179](https://github.com/request/request/pull/179) fix to add opts in .pipe(stream, opts) (@substack) +- [#177](https://github.com/request/request/pull/177) Issue #173 Support uri as first and optional config as second argument (@twilson63) +- [#170](https://github.com/request/request/pull/170) can't create a cookie in a wrapped request (defaults) (@fabianonunes) +- [#168](https://github.com/request/request/pull/168) Picking off an EasyFix by adding some missing mimetypes. (@serby) +- [#161](https://github.com/request/request/pull/161) Fix cookie jar/headers.cookie collision (#125) (@papandreou) +- [#162](https://github.com/request/request/pull/162) Fix issue #159 (@dpetukhov) +- [#90](https://github.com/request/request/pull/90) add option followAllRedirects to follow post/put redirects (@jroes) +- [#148](https://github.com/request/request/pull/148) Retry Agent (@thejh) +- [#146](https://github.com/request/request/pull/146) Multipart should respect content-type if previously set (@apeace) +- [#144](https://github.com/request/request/pull/144) added "form" option to readme (@petejkim) +- [#133](https://github.com/request/request/pull/133) Fixed cookies parsing (@afanasy) +- [#135](https://github.com/request/request/pull/135) host vs hostname (@iangreenleaf) +- [#132](https://github.com/request/request/pull/132) return the body as a Buffer when encoding is set to null (@jahewson) +- [#112](https://github.com/request/request/pull/112) Support using a custom http-like module (@jhs) +- [#104](https://github.com/request/request/pull/104) Cookie handling contains bugs (@janjongboom) +- [#121](https://github.com/request/request/pull/121) Another patch for cookie handling regression (@jhurliman) +- [#117](https://github.com/request/request/pull/117) Remove the global `i` (@3rd-Eden) +- [#110](https://github.com/request/request/pull/110) Update to Iris Couch URL (@jhs) +- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@kkaefer) +- [#105](https://github.com/request/request/pull/105) added test for proxy option. (@dominictarr) +- [#102](https://github.com/request/request/pull/102) Implemented cookies - closes issue 82: https://github.com/mikeal/request/issues/82 (@alessioalex) +- [#97](https://github.com/request/request/pull/97) Typo in previous pull causes TypeError in non-0.5.11 versions (@isaacs) +- [#96](https://github.com/request/request/pull/96) Authless parsed url host support (@isaacs) +- [#81](https://github.com/request/request/pull/81) Enhance redirect handling (@danmactough) +- [#78](https://github.com/request/request/pull/78) Don't try to do strictSSL for non-ssl connections (@isaacs) +- [#76](https://github.com/request/request/pull/76) Bug when a request fails and a timeout is set (@Marsup) +- [#70](https://github.com/request/request/pull/70) add test script to package.json (@isaacs, @aheckmann) +- [#73](https://github.com/request/request/pull/73) Fix #71 Respect the strictSSL flag (@isaacs) +- [#69](https://github.com/request/request/pull/69) Flatten chunked requests properly (@isaacs) +- [#67](https://github.com/request/request/pull/67) fixed global variable leaks (@aheckmann) +- [#66](https://github.com/request/request/pull/66) Do not overwrite established content-type headers for read stream deliver (@voodootikigod) +- [#53](https://github.com/request/request/pull/53) Parse json: Issue #51 (@benatkin) +- [#45](https://github.com/request/request/pull/45) Added timeout option (@mbrevoort) +- [#35](https://github.com/request/request/pull/35) The "end" event isn't emitted for some responses (@voxpelli) +- [#31](https://github.com/request/request/pull/31) Error on piping a request to a destination (@tobowers) \ No newline at end of file diff --git a/node_modules/request/CONTRIBUTING.md b/node_modules/request/CONTRIBUTING.md new file mode 100644 index 0000000..8aa6999 --- /dev/null +++ b/node_modules/request/CONTRIBUTING.md @@ -0,0 +1,81 @@ + +# Contributing to Request + +:+1::tada: First off, thanks for taking the time to contribute! :tada::+1: + +The following is a set of guidelines for contributing to Request and its packages, which are hosted in the [Request Organization](https://github.com/request) on GitHub. +These are just guidelines, not rules, use your best judgment and feel free to propose changes to this document in a pull request. + + +## Submitting an Issue + +1. Provide a small self **sufficient** code example to **reproduce** the issue. +2. Run your test code using [request-debug](https://github.com/request/request-debug) and copy/paste the results inside the issue. +3. You should **always** use fenced code blocks when submitting code examples or any other formatted output: +
+  ```js
+  put your javascript code here
+  ```
+
+  ```
+  put any other formatted output here,
+  like for example the one returned from using request-debug
+  ```
+  
+ +If the problem cannot be reliably reproduced, the issue will be marked as `Not enough info (see CONTRIBUTING.md)`. + +If the problem is not related to request the issue will be marked as `Help (please use Stackoverflow)`. + + +## Submitting a Pull Request + +1. In almost all of the cases your PR **needs tests**. Make sure you have any. +2. Run `npm test` locally. Fix any errors before pushing to GitHub. +3. After submitting the PR a build will be triggered on TravisCI. Wait for it to ends and make sure all jobs are passing. + + +----------------------------------------- + + +## Becoming a Contributor + +Individuals making significant and valuable contributions are given +commit-access to the project to contribute as they see fit. This project is +more like an open wiki than a standard guarded open source project. + + +## Rules + +There are a few basic ground-rules for contributors: + +1. **No `--force` pushes** or modifying the Git history in any way. +1. **Non-master branches** ought to be used for ongoing work. +1. **Any** change should be added through Pull Request. +1. **External API changes and significant modifications** ought to be subject + to an **internal pull-request** to solicit feedback from other contributors. +1. Internal pull-requests to solicit feedback are *encouraged* for any other + non-trivial contribution but left to the discretion of the contributor. +1. For significant changes wait a full 24 hours before merging so that active + contributors who are distributed throughout the world have a chance to weigh + in. +1. Contributors should attempt to adhere to the prevailing code-style. +1. Run `npm test` locally before submitting your PR, to catch any easy to miss + style & testing issues. To diagnose test failures, there are two ways to + run a single test file: + - `node_modules/.bin/taper tests/test-file.js` - run using the default + [`taper`](https://github.com/nylen/taper) test reporter. + - `node tests/test-file.js` - view the raw + [tap](https://testanything.org/) output. + + +## Releases + +Declaring formal releases remains the prerogative of the project maintainer. + + +## Changes to this arrangement + +This is an experiment and feedback is welcome! This document may also be +subject to pull-requests or changes by contributors where you believe you have +something valuable to add or change. diff --git a/node_modules/request/LICENSE b/node_modules/request/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/request/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/request/README.md b/node_modules/request/README.md new file mode 100644 index 0000000..6eaaa05 --- /dev/null +++ b/node_modules/request/README.md @@ -0,0 +1,1097 @@ + +# Request - Simplified HTTP client + +[![npm package](https://nodei.co/npm/request.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/request/) + +[![Build status](https://img.shields.io/travis/request/request/master.svg?style=flat-square)](https://travis-ci.org/request/request) +[![Coverage](https://img.shields.io/codecov/c/github/request/request.svg?style=flat-square)](https://codecov.io/github/request/request?branch=master) +[![Coverage](https://img.shields.io/coveralls/request/request.svg?style=flat-square)](https://coveralls.io/r/request/request) +[![Dependency Status](https://img.shields.io/david/request/request.svg?style=flat-square)](https://david-dm.org/request/request) +[![Known Vulnerabilities](https://snyk.io/test/npm/request/badge.svg?style=flat-square)](https://snyk.io/test/npm/request) +[![Gitter](https://img.shields.io/badge/gitter-join_chat-blue.svg?style=flat-square)](https://gitter.im/request/request?utm_source=badge) + + +## Super simple to use + +Request is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default. + +```js +var request = require('request'); +request('http://www.google.com', function (error, response, body) { + if (!error && response.statusCode == 200) { + console.log(body) // Show the HTML for the Google homepage. + } +}) +``` + + +## Table of contents + +- [Streaming](#streaming) +- [Forms](#forms) +- [HTTP Authentication](#http-authentication) +- [Custom HTTP Headers](#custom-http-headers) +- [OAuth Signing](#oauth-signing) +- [Proxies](#proxies) +- [Unix Domain Sockets](#unix-domain-sockets) +- [TLS/SSL Protocol](#tlsssl-protocol) +- [Support for HAR 1.2](#support-for-har-12) +- [**All Available Options**](#requestoptions-callback) + +Request also offers [convenience methods](#convenience-methods) like +`request.defaults` and `request.post`, and there are +lots of [usage examples](#examples) and several +[debugging techniques](#debugging). + + +--- + + +## Streaming + +You can stream any response to a file stream. + +```js +request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png')) +``` + +You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types (in this case `application/json`) and use the proper `content-type` in the PUT request (if the headers don’t already provide one). + +```js +fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json')) +``` + +Request can also `pipe` to itself. When doing so, `content-type` and `content-length` are preserved in the PUT headers. + +```js +request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png')) +``` + +Request emits a "response" event when a response is received. The `response` argument will be an instance of [http.IncomingMessage](https://nodejs.org/api/http.html#http_class_http_incomingmessage). + +```js +request + .get('http://google.com/img.png') + .on('response', function(response) { + console.log(response.statusCode) // 200 + console.log(response.headers['content-type']) // 'image/png' + }) + .pipe(request.put('http://mysite.com/img.png')) +``` + +To easily handle errors when streaming requests, listen to the `error` event before piping: + +```js +request + .get('http://mysite.com/doodle.png') + .on('error', function(err) { + console.log(err) + }) + .pipe(fs.createWriteStream('doodle.png')) +``` + +Now let’s get fancy. + +```js +http.createServer(function (req, resp) { + if (req.url === '/doodle.png') { + if (req.method === 'PUT') { + req.pipe(request.put('http://mysite.com/doodle.png')) + } else if (req.method === 'GET' || req.method === 'HEAD') { + request.get('http://mysite.com/doodle.png').pipe(resp) + } + } +}) +``` + +You can also `pipe()` from `http.ServerRequest` instances, as well as to `http.ServerResponse` instances. The HTTP method, headers, and entity-body data will be sent. Which means that, if you don't really care about security, you can do: + +```js +http.createServer(function (req, resp) { + if (req.url === '/doodle.png') { + var x = request('http://mysite.com/doodle.png') + req.pipe(x) + x.pipe(resp) + } +}) +``` + +And since `pipe()` returns the destination stream in ≥ Node 0.5.x you can do one line proxying. :) + +```js +req.pipe(request('http://mysite.com/doodle.png')).pipe(resp) +``` + +Also, none of this new functionality conflicts with requests previous features, it just expands them. + +```js +var r = request.defaults({'proxy':'http://localproxy.com'}) + +http.createServer(function (req, resp) { + if (req.url === '/doodle.png') { + r.get('http://google.com/doodle.png').pipe(resp) + } +}) +``` + +You can still use intermediate proxies, the requests will still follow HTTP forwards, etc. + +[back to top](#table-of-contents) + + +--- + + +## Forms + +`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API. + + +#### application/x-www-form-urlencoded (URL-Encoded Forms) + +URL-encoded forms are simple. + +```js +request.post('http://service.com/upload', {form:{key:'value'}}) +// or +request.post('http://service.com/upload').form({key:'value'}) +// or +request.post({url:'http://service.com/upload', form: {key:'value'}}, function(err,httpResponse,body){ /* ... */ }) +``` + + +#### multipart/form-data (Multipart Form Uploads) + +For `multipart/form-data` we use the [form-data](https://github.com/form-data/form-data) library by [@felixge](https://github.com/felixge). For the most cases, you can pass your upload form data via the `formData` option. + + +```js +var formData = { + // Pass a simple key-value pair + my_field: 'my_value', + // Pass data via Buffers + my_buffer: new Buffer([1, 2, 3]), + // Pass data via Streams + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), + // Pass multiple values /w an Array + attachments: [ + fs.createReadStream(__dirname + '/attachment1.jpg'), + fs.createReadStream(__dirname + '/attachment2.jpg') + ], + // Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS} + // Use case: for some types of streams, you'll need to provide "file"-related information manually. + // See the `form-data` README for more information about options: https://github.com/form-data/form-data + custom_file: { + value: fs.createReadStream('/dev/urandom'), + options: { + filename: 'topsecret.jpg', + contentType: 'image/jpg' + } + } +}; +request.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For advanced cases, you can access the form-data object itself via `r.form()`. This can be modified until the request is fired on the next cycle of the event-loop. (Note that this calling `form()` will clear the currently set form data for that request.) + +```js +// NOTE: Advanced use-case, for normal use see 'formData' usage above +var r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {...}) +var form = r.form(); +form.append('my_field', 'my_value'); +form.append('my_buffer', new Buffer([1, 2, 3])); +form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'}); +``` +See the [form-data README](https://github.com/form-data/form-data) for more information & examples. + + +#### multipart/related + +Some variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/related` request (using the multipart option). This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options. + +```js + request({ + method: 'PUT', + preambleCRLF: true, + postambleCRLF: true, + uri: 'http://service.com/upload', + multipart: [ + { + 'content-type': 'application/json', + body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) + }, + { body: 'I am an attachment' }, + { body: fs.createReadStream('image.png') } + ], + // alternatively pass an object containing additional options + multipart: { + chunked: false, + data: [ + { + 'content-type': 'application/json', + body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) + }, + { body: 'I am an attachment' } + ] + } + }, + function (error, response, body) { + if (error) { + return console.error('upload failed:', error); + } + console.log('Upload successful! Server responded with:', body); + }) +``` + +[back to top](#table-of-contents) + + +--- + + +## HTTP Authentication + +```js +request.get('http://some.server.com/').auth('username', 'password', false); +// or +request.get('http://some.server.com/', { + 'auth': { + 'user': 'username', + 'pass': 'password', + 'sendImmediately': false + } +}); +// or +request.get('http://some.server.com/').auth(null, null, true, 'bearerToken'); +// or +request.get('http://some.server.com/', { + 'auth': { + 'bearer': 'bearerToken' + } +}); +``` + +If passed as an option, `auth` should be a hash containing values: + +- `user` || `username` +- `pass` || `password` +- `sendImmediately` (optional) +- `bearer` (optional) + +The method form takes parameters +`auth(username, password, sendImmediately, bearer)`. + +`sendImmediately` defaults to `true`, which causes a basic or bearer +authentication header to be sent. If `sendImmediately` is `false`, then +`request` will retry with a proper authentication header after receiving a +`401` response from the server (which must contain a `WWW-Authenticate` header +indicating the required authentication method). + +Note that you can also specify basic authentication using the URL itself, as +detailed in [RFC 1738](http://www.ietf.org/rfc/rfc1738.txt). Simply pass the +`user:password` before the host with an `@` sign: + +```js +var username = 'username', + password = 'password', + url = 'http://' + username + ':' + password + '@some.server.com'; + +request({url: url}, function (error, response, body) { + // Do more stuff with 'body' here +}); +``` + +Digest authentication is supported, but it only works with `sendImmediately` +set to `false`; otherwise `request` will send basic authentication on the +initial request, which will probably cause the request to fail. + +Bearer authentication is supported, and is activated when the `bearer` value is +available. The value may be either a `String` or a `Function` returning a +`String`. Using a function to supply the bearer token is particularly useful if +used in conjunction with `defaults` to allow a single function to supply the +last known token at the time of sending a request, or to compute one on the fly. + +[back to top](#table-of-contents) + + +--- + + +## Custom HTTP Headers + +HTTP Headers, such as `User-Agent`, can be set in the `options` object. +In the example below, we call the github API to find out the number +of stars and forks for the request repository. This requires a +custom `User-Agent` header as well as https. + +```js +var request = require('request'); + +var options = { + url: 'https://api.github.com/repos/request/request', + headers: { + 'User-Agent': 'request' + } +}; + +function callback(error, response, body) { + if (!error && response.statusCode == 200) { + var info = JSON.parse(body); + console.log(info.stargazers_count + " Stars"); + console.log(info.forks_count + " Forks"); + } +} + +request(options, callback); +``` + +[back to top](#table-of-contents) + + +--- + + +## OAuth Signing + +[OAuth version 1.0](https://tools.ietf.org/html/rfc5849) is supported. The +default signing algorithm is +[HMAC-SHA1](https://tools.ietf.org/html/rfc5849#section-3.4.2): + +```js +// OAuth1.0 - 3-legged server side flow (Twitter example) +// step 1 +var qs = require('querystring') + , oauth = + { callback: 'http://mysite.com/callback/' + , consumer_key: CONSUMER_KEY + , consumer_secret: CONSUMER_SECRET + } + , url = 'https://api.twitter.com/oauth/request_token' + ; +request.post({url:url, oauth:oauth}, function (e, r, body) { + // Ideally, you would take the body in the response + // and construct a URL that a user clicks on (like a sign in button). + // The verifier is only available in the response after a user has + // verified with twitter that they are authorizing your app. + + // step 2 + var req_data = qs.parse(body) + var uri = 'https://api.twitter.com/oauth/authenticate' + + '?' + qs.stringify({oauth_token: req_data.oauth_token}) + // redirect the user to the authorize uri + + // step 3 + // after the user is redirected back to your server + var auth_data = qs.parse(body) + , oauth = + { consumer_key: CONSUMER_KEY + , consumer_secret: CONSUMER_SECRET + , token: auth_data.oauth_token + , token_secret: req_data.oauth_token_secret + , verifier: auth_data.oauth_verifier + } + , url = 'https://api.twitter.com/oauth/access_token' + ; + request.post({url:url, oauth:oauth}, function (e, r, body) { + // ready to make signed requests on behalf of the user + var perm_data = qs.parse(body) + , oauth = + { consumer_key: CONSUMER_KEY + , consumer_secret: CONSUMER_SECRET + , token: perm_data.oauth_token + , token_secret: perm_data.oauth_token_secret + } + , url = 'https://api.twitter.com/1.1/users/show.json' + , qs = + { screen_name: perm_data.screen_name + , user_id: perm_data.user_id + } + ; + request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) { + console.log(user) + }) + }) +}) +``` + +For [RSA-SHA1 signing](https://tools.ietf.org/html/rfc5849#section-3.4.3), make +the following changes to the OAuth options object: +* Pass `signature_method : 'RSA-SHA1'` +* Instead of `consumer_secret`, specify a `private_key` string in + [PEM format](http://how2ssl.com/articles/working_with_pem_files/) + +For [PLAINTEXT signing](http://oauth.net/core/1.0/#anchor22), make +the following changes to the OAuth options object: +* Pass `signature_method : 'PLAINTEXT'` + +To send OAuth parameters via query params or in a post body as described in The +[Consumer Request Parameters](http://oauth.net/core/1.0/#consumer_req_param) +section of the oauth1 spec: +* Pass `transport_method : 'query'` or `transport_method : 'body'` in the OAuth + options object. +* `transport_method` defaults to `'header'` + +To use [Request Body Hash](https://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html) you can either +* Manually generate the body hash and pass it as a string `body_hash: '...'` +* Automatically generate the body hash by passing `body_hash: true` + +[back to top](#table-of-contents) + + +--- + + +## Proxies + +If you specify a `proxy` option, then the request (and any subsequent +redirects) will be sent via a connection to the proxy server. + +If your endpoint is an `https` url, and you are using a proxy, then +request will send a `CONNECT` request to the proxy server *first*, and +then use the supplied connection to connect to the endpoint. + +That is, first it will make a request like: + +``` +HTTP/1.1 CONNECT endpoint-server.com:80 +Host: proxy-server.com +User-Agent: whatever user agent you specify +``` + +and then the proxy server make a TCP connection to `endpoint-server` +on port `80`, and return a response that looks like: + +``` +HTTP/1.1 200 OK +``` + +At this point, the connection is left open, and the client is +communicating directly with the `endpoint-server.com` machine. + +See [the wikipedia page on HTTP Tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel) +for more information. + +By default, when proxying `http` traffic, request will simply make a +standard proxied `http` request. This is done by making the `url` +section of the initial line of the request a fully qualified url to +the endpoint. + +For example, it will make a single request that looks like: + +``` +HTTP/1.1 GET http://endpoint-server.com/some-url +Host: proxy-server.com +Other-Headers: all go here + +request body or whatever +``` + +Because a pure "http over http" tunnel offers no additional security +or other features, it is generally simpler to go with a +straightforward HTTP proxy in this case. However, if you would like +to force a tunneling proxy, you may set the `tunnel` option to `true`. + +You can also make a standard proxied `http` request by explicitly setting +`tunnel : false`, but **note that this will allow the proxy to see the traffic +to/from the destination server**. + +If you are using a tunneling proxy, you may set the +`proxyHeaderWhiteList` to share certain headers with the proxy. + +You can also set the `proxyHeaderExclusiveList` to share certain +headers only with the proxy and not with destination host. + +By default, this set is: + +``` +accept +accept-charset +accept-encoding +accept-language +accept-ranges +cache-control +content-encoding +content-language +content-length +content-location +content-md5 +content-range +content-type +connection +date +expect +max-forwards +pragma +proxy-authorization +referer +te +transfer-encoding +user-agent +via +``` + +Note that, when using a tunneling proxy, the `proxy-authorization` +header and any headers from custom `proxyHeaderExclusiveList` are +*never* sent to the endpoint server, but only to the proxy server. + + +### Controlling proxy behaviour using environment variables + +The following environment variables are respected by `request`: + + * `HTTP_PROXY` / `http_proxy` + * `HTTPS_PROXY` / `https_proxy` + * `NO_PROXY` / `no_proxy` + +When `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request. + +`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables. + +Here's some examples of valid `no_proxy` values: + + * `google.com` - don't proxy HTTP/HTTPS requests to Google. + * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google. + * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo! + * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether. + +[back to top](#table-of-contents) + + +--- + + +## UNIX Domain Sockets + +`request` supports making requests to [UNIX Domain Sockets](https://en.wikipedia.org/wiki/Unix_domain_socket). To make one, use the following URL scheme: + +```js +/* Pattern */ 'http://unix:SOCKET:PATH' +/* Example */ request.get('http://unix:/absolute/path/to/unix.socket:/request/path') +``` + +Note: The `SOCKET` path is assumed to be absolute to the root of the host file system. + +[back to top](#table-of-contents) + + +--- + + +## TLS/SSL Protocol + +TLS/SSL Protocol options, such as `cert`, `key` and `passphrase`, can be +set directly in `options` object, in the `agentOptions` property of the `options` object, or even in `https.globalAgent.options`. Keep in mind that, although `agentOptions` allows for a slightly wider range of configurations, the recommended way is via `options` object directly, as using `agentOptions` or `https.globalAgent.options` would not be applied in the same way in proxied environments (as data travels through a TLS connection instead of an http/https agent). + +```js +var fs = require('fs') + , path = require('path') + , certFile = path.resolve(__dirname, 'ssl/client.crt') + , keyFile = path.resolve(__dirname, 'ssl/client.key') + , caFile = path.resolve(__dirname, 'ssl/ca.cert.pem') + , request = require('request'); + +var options = { + url: 'https://api.some-server.com/', + cert: fs.readFileSync(certFile), + key: fs.readFileSync(keyFile), + passphrase: 'password', + ca: fs.readFileSync(caFile) +}; + +request.get(options); +``` + +### Using `options.agentOptions` + +In the example below, we call an API requires client side SSL certificate +(in PEM format) with passphrase protected private key (in PEM format) and disable the SSLv3 protocol: + +```js +var fs = require('fs') + , path = require('path') + , certFile = path.resolve(__dirname, 'ssl/client.crt') + , keyFile = path.resolve(__dirname, 'ssl/client.key') + , request = require('request'); + +var options = { + url: 'https://api.some-server.com/', + agentOptions: { + cert: fs.readFileSync(certFile), + key: fs.readFileSync(keyFile), + // Or use `pfx` property replacing `cert` and `key` when using private key, certificate and CA certs in PFX or PKCS12 format: + // pfx: fs.readFileSync(pfxFilePath), + passphrase: 'password', + securityOptions: 'SSL_OP_NO_SSLv3' + } +}; + +request.get(options); +``` + +It is able to force using SSLv3 only by specifying `secureProtocol`: + +```js +request.get({ + url: 'https://api.some-server.com/', + agentOptions: { + secureProtocol: 'SSLv3_method' + } +}); +``` + +It is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs). +This can be useful, for example, when using self-signed certificates. +To require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`. +The certificate the domain presents must be signed by the root certificate specified: + +```js +request.get({ + url: 'https://api.some-server.com/', + agentOptions: { + ca: fs.readFileSync('ca.cert.pem') + } +}); +``` + +[back to top](#table-of-contents) + + +--- + +## Support for HAR 1.2 + +The `options.har` property will override the values: `url`, `method`, `qs`, `headers`, `form`, `formData`, `body`, `json`, as well as construct multipart data and read files from disk when `request.postData.params[].fileName` is present without a matching `value`. + +a validation step will check if the HAR Request format matches the latest spec (v1.2) and will skip parsing if not matching. + +```js + var request = require('request') + request({ + // will be ignored + method: 'GET', + uri: 'http://www.google.com', + + // HTTP Archive Request Object + har: { + url: 'http://www.mockbin.com/har', + method: 'POST', + headers: [ + { + name: 'content-type', + value: 'application/x-www-form-urlencoded' + } + ], + postData: { + mimeType: 'application/x-www-form-urlencoded', + params: [ + { + name: 'foo', + value: 'bar' + }, + { + name: 'hello', + value: 'world' + } + ] + } + } + }) + + // a POST request will be sent to http://www.mockbin.com + // with body an application/x-www-form-urlencoded body: + // foo=bar&hello=world +``` + +[back to top](#table-of-contents) + + +--- + +## request(options, callback) + +The first argument can be either a `url` or an `options` object. The only required option is `uri`; all others are optional. + +- `uri` || `url` - fully qualified uri or a parsed url object from `url.parse()` +- `baseUrl` - fully qualified uri string used as the base url. Most useful with `request.defaults`, for example when you want to do many requests to the same domain. If `baseUrl` is `https://example.com/api/`, then requesting `/end/point?test=true` will fetch `https://example.com/api/end/point?test=true`. When `baseUrl` is given, `uri` must also be a string. +- `method` - http method (default: `"GET"`) +- `headers` - http headers (default: `{}`) + +--- + +- `qs` - object containing querystring values to be appended to the `uri` +- `qsParseOptions` - object containing options to pass to the [qs.parse](https://github.com/hapijs/qs#parsing-objects) method. Alternatively pass options to the [querystring.parse](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_parse_str_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}` +- `qsStringifyOptions` - object containing options to pass to the [qs.stringify](https://github.com/hapijs/qs#stringifying) method. Alternatively pass options to the [querystring.stringify](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`. For example, to change the way arrays are converted to query strings using the `qs` module pass the `arrayFormat` option with one of `indices|brackets|repeat` +- `useQuerystring` - If true, use `querystring` to stringify and parse + querystrings, otherwise use `qs` (default: `false`). Set this option to + `true` if you need arrays to be serialized as `foo=bar&foo=baz` instead of the + default `foo[0]=bar&foo[1]=baz`. + +--- + +- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer`, `String` or `ReadStream`. If `json` is `true`, then `body` must be a JSON-serializable object. +- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See "Forms" section above. +- `formData` - Data to pass for a `multipart/form-data` request. See + [Forms](#forms) section above. +- `multipart` - array of objects which contain their own headers and `body` + attributes. Sends a `multipart/related` request. See [Forms](#forms) section + above. + - Alternatively you can pass in an object `{chunked: false, data: []}` where + `chunked` is used to specify whether the request is sent in + [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding) + In non-chunked requests, data items with body streams are not allowed. +- `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request. +- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request. +- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON. +- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body. +- `jsonReplacer` - a [replacer function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) that will be passed to `JSON.stringify()` when stringifying a JSON request body. + +--- + +- `auth` - A hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above. +- `oauth` - Options for OAuth HMAC-SHA1 signing. See documentation above. +- `hawk` - Options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example). +- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`. Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. **Note:** you need to `npm install aws4` first. +- `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options. + +--- + +- `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise. +- `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`) +- `maxRedirects` - the maximum number of redirects to follow (default: `10`) +- `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain. + +--- + +- `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.) +- `gzip` - If `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below. +- `jar` - If `true`, remember cookies for future use (or define your custom cookie jar; see examples section) + +--- + +- `agent` - `http(s).Agent` instance to use +- `agentClass` - alternatively specify your agent's class name +- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions). +- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+ +- `pool` - An object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified. + - A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`). + - Note that if you are sending multiple requests in a loop and creating + multiple new `pool` objects, `maxSockets` will not work as intended. To + work around this, either use [`request.defaults`](#requestdefaultsoptions) + with your pool options or create the pool object with the `maxSockets` + property outside of the loop. +- `timeout` - Integer containing the number of milliseconds to wait for a +server to send response headers (and start the response body) before aborting +the request. Note that if the underlying TCP connection cannot be established, +the OS-wide TCP connection timeout will overrule the `timeout` option ([the +default in Linux can be anywhere from 20-120 seconds][linux-timeout]). + +[linux-timeout]: http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout + +--- + +- `localAddress` - Local interface to bind for network connections. +- `proxy` - An HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`) +- `strictSSL` - If `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option. +- `tunnel` - controls the behavior of + [HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling) + as follows: + - `undefined` (default) - `true` if the destination is `https`, `false` otherwise + - `true` - always tunnel to the destination by making a `CONNECT` request to + the proxy + - `false` - request the destination as a `GET` request. +- `proxyHeaderWhiteList` - A whitelist of headers to send to a + tunneling proxy. +- `proxyHeaderExclusiveList` - A whitelist of headers to send + exclusively to a tunneling proxy and not to destination. + +--- + +- `time` - If `true`, the request-response cycle (including all redirects) is timed at millisecond resolution, and the result provided on the response's `elapsedTime` property. The `responseStartTime` property is also available to indicate the timestamp when the response begins. +- `har` - A [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-1.2) for details)* +- `callback` - alternatively pass the request's callback in the options object + +The callback argument gets 3 arguments: + +1. An `error` when applicable (usually from [`http.ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) object) +2. An [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) object +3. The third is the `response` body (`String` or `Buffer`, or JSON object if the `json` option is supplied) + +[back to top](#table-of-contents) + + +--- + +## Convenience methods + +There are also shorthand methods for different HTTP METHODs and some other conveniences. + + +### request.defaults(options) + +This method **returns a wrapper** around the normal request API that defaults +to whatever options you pass to it. + +**Note:** `request.defaults()` **does not** modify the global request API; +instead, it **returns a wrapper** that has your default settings applied to it. + +**Note:** You can call `.defaults()` on the wrapper that is returned from +`request.defaults` to add/override defaults that were previously defaulted. + +For example: +```js +//requests using baseRequest() will set the 'x-token' header +var baseRequest = request.defaults({ + headers: {'x-token': 'my-token'} +}) + +//requests using specialRequest() will include the 'x-token' header set in +//baseRequest and will also include the 'special' header +var specialRequest = baseRequest.defaults({ + headers: {special: 'special value'} +}) +``` + +### request.put + +Same as `request()`, but defaults to `method: "PUT"`. + +```js +request.put(url) +``` + +### request.patch + +Same as `request()`, but defaults to `method: "PATCH"`. + +```js +request.patch(url) +``` + +### request.post + +Same as `request()`, but defaults to `method: "POST"`. + +```js +request.post(url) +``` + +### request.head + +Same as `request()`, but defaults to `method: "HEAD"`. + +```js +request.head(url) +``` + +### request.del / request.delete + +Same as `request()`, but defaults to `method: "DELETE"`. + +```js +request.del(url) +request.delete(url) +``` + +### request.get + +Same as `request()` (for uniformity). + +```js +request.get(url) +``` +### request.cookie + +Function that creates a new cookie. + +```js +request.cookie('key1=value1') +``` +### request.jar() + +Function that creates a new cookie jar. + +```js +request.jar() +``` + +[back to top](#table-of-contents) + + +--- + + +## Debugging + +There are at least three ways to debug the operation of `request`: + +1. Launch the node process like `NODE_DEBUG=request node script.js` + (`lib,request,otherlib` works too). + +2. Set `require('request').debug = true` at any time (this does the same thing + as #1). + +3. Use the [request-debug module](https://github.com/request/request-debug) to + view request and response headers and bodies. + +[back to top](#table-of-contents) + + +--- + +## Timeouts + +Most requests to external servers should have a timeout attached, in case the +server is not responding in a timely manner. Without a timeout, your code may +have a socket open/consume resources for minutes or more. + +There are two main types of timeouts: **connection timeouts** and **read +timeouts**. A connect timeout occurs if the timeout is hit while your client is +attempting to establish a connection to a remote machine (corresponding to the +[connect() call][connect] on the socket). A read timeout occurs any time the +server is too slow to send back a part of the response. + +These two situations have widely different implications for what went wrong +with the request, so it's useful to be able to distinguish them. You can detect +timeout errors by checking `err.code` for an 'ETIMEDOUT' value. Further, you +can detect whether the timeout was a connection timeout by checking if the +`err.connect` property is set to `true`. + +```js +request.get('http://10.255.255.1', {timeout: 1500}, function(err) { + console.log(err.code === 'ETIMEDOUT'); + // Set to `true` if the timeout was a connection timeout, `false` or + // `undefined` otherwise. + console.log(err.connect === true); + process.exit(0); +}); +``` + +[connect]: http://linux.die.net/man/2/connect + +## Examples: + +```js + var request = require('request') + , rand = Math.floor(Math.random()*100000000).toString() + ; + request( + { method: 'PUT' + , uri: 'http://mikeal.iriscouch.com/testjs/' + rand + , multipart: + [ { 'content-type': 'application/json' + , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}}) + } + , { body: 'I am an attachment' } + ] + } + , function (error, response, body) { + if(response.statusCode == 201){ + console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand) + } else { + console.log('error: '+ response.statusCode) + console.log(body) + } + } + ) +``` + +For backwards-compatibility, response compression is not supported by default. +To accept gzip-compressed responses, set the `gzip` option to `true`. Note +that the body data passed through `request` is automatically decompressed +while the response object is unmodified and will contain compressed data if +the server sent a compressed response. + +```js + var request = require('request') + request( + { method: 'GET' + , uri: 'http://www.google.com' + , gzip: true + } + , function (error, response, body) { + // body is the decompressed response body + console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity')) + console.log('the decoded data is: ' + body) + } + ).on('data', function(data) { + // decompressed data as it is received + console.log('decoded chunk: ' + data) + }) + .on('response', function(response) { + // unmodified http.IncomingMessage object + response.on('data', function(data) { + // compressed data as it is received + console.log('received ' + data.length + ' bytes of compressed data') + }) + }) +``` + +Cookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`). + +```js +var request = request.defaults({jar: true}) +request('http://www.google.com', function () { + request('http://images.google.com') +}) +``` + +To use a custom cookie jar (instead of `request`’s global cookie jar), set `jar` to an instance of `request.jar()` (either in `defaults` or `options`) + +```js +var j = request.jar() +var request = request.defaults({jar:j}) +request('http://www.google.com', function () { + request('http://images.google.com') +}) +``` + +OR + +```js +var j = request.jar(); +var cookie = request.cookie('key1=value1'); +var url = 'http://www.google.com'; +j.setCookie(cookie, url); +request({url: url, jar: j}, function () { + request('http://images.google.com') +}) +``` + +To use a custom cookie store (such as a +[`FileCookieStore`](https://github.com/mitsuru/tough-cookie-filestore) +which supports saving to and restoring from JSON files), pass it as a parameter +to `request.jar()`: + +```js +var FileCookieStore = require('tough-cookie-filestore'); +// NOTE - currently the 'cookies.json' file must already exist! +var j = request.jar(new FileCookieStore('cookies.json')); +request = request.defaults({ jar : j }) +request('http://www.google.com', function() { + request('http://images.google.com') +}) +``` + +The cookie store must be a +[`tough-cookie`](https://github.com/SalesforceEng/tough-cookie) +store and it must support synchronous operations; see the +[`CookieStore` API docs](https://github.com/SalesforceEng/tough-cookie#cookiestore-api) +for details. + +To inspect your cookie jar after a request: + +```js +var j = request.jar() +request({url: 'http://www.google.com', jar: j}, function () { + var cookie_string = j.getCookieString(url); // "key1=value1; key2=value2; ..." + var cookies = j.getCookies(url); + // [{key: 'key1', value: 'value1', domain: "www.google.com", ...}, ...] +}) +``` + +[back to top](#table-of-contents) diff --git a/node_modules/request/codecov.yml b/node_modules/request/codecov.yml new file mode 100644 index 0000000..acd3f33 --- /dev/null +++ b/node_modules/request/codecov.yml @@ -0,0 +1,2 @@ + +comment: false diff --git a/node_modules/request/index.js b/node_modules/request/index.js new file mode 100755 index 0000000..9ec65ea --- /dev/null +++ b/node_modules/request/index.js @@ -0,0 +1,156 @@ +// Copyright 2010-2012 Mikeal Rogers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict' + +var extend = require('extend') + , cookies = require('./lib/cookies') + , helpers = require('./lib/helpers') + +var paramsHaveRequestBody = helpers.paramsHaveRequestBody + + +// organize params for patch, post, put, head, del +function initParams(uri, options, callback) { + if (typeof options === 'function') { + callback = options + } + + var params = {} + if (typeof options === 'object') { + extend(params, options, {uri: uri}) + } else if (typeof uri === 'string') { + extend(params, {uri: uri}) + } else { + extend(params, uri) + } + + params.callback = callback || params.callback + return params +} + +function request (uri, options, callback) { + if (typeof uri === 'undefined') { + throw new Error('undefined is not a valid uri or options object.') + } + + var params = initParams(uri, options, callback) + + if (params.method === 'HEAD' && paramsHaveRequestBody(params)) { + throw new Error('HTTP HEAD requests MUST NOT include a request body.') + } + + return new request.Request(params) +} + +function verbFunc (verb) { + var method = verb.toUpperCase() + return function (uri, options, callback) { + var params = initParams(uri, options, callback) + params.method = method + return request(params, params.callback) + } +} + +// define like this to please codeintel/intellisense IDEs +request.get = verbFunc('get') +request.head = verbFunc('head') +request.post = verbFunc('post') +request.put = verbFunc('put') +request.patch = verbFunc('patch') +request.del = verbFunc('delete') +request['delete'] = verbFunc('delete') + +request.jar = function (store) { + return cookies.jar(store) +} + +request.cookie = function (str) { + return cookies.parse(str) +} + +function wrapRequestMethod (method, options, requester, verb) { + + return function (uri, opts, callback) { + var params = initParams(uri, opts, callback) + + var target = {} + extend(true, target, options, params) + + target.pool = params.pool || options.pool + + if (verb) { + target.method = verb.toUpperCase() + } + + if (typeof requester === 'function') { + method = requester + } + + return method(target, target.callback) + } +} + +request.defaults = function (options, requester) { + var self = this + + options = options || {} + + if (typeof options === 'function') { + requester = options + options = {} + } + + var defaults = wrapRequestMethod(self, options, requester) + + var verbs = ['get', 'head', 'post', 'put', 'patch', 'del', 'delete'] + verbs.forEach(function(verb) { + defaults[verb] = wrapRequestMethod(self[verb], options, requester, verb) + }) + + defaults.cookie = wrapRequestMethod(self.cookie, options, requester) + defaults.jar = self.jar + defaults.defaults = self.defaults + return defaults +} + +request.forever = function (agentOptions, optionsArg) { + var options = {} + if (optionsArg) { + extend(options, optionsArg) + } + if (agentOptions) { + options.agentOptions = agentOptions + } + + options.forever = true + return request.defaults(options) +} + +// Exports + +module.exports = request +request.Request = require('./request') +request.initParams = initParams + +// Backwards compatibility for request.debug +Object.defineProperty(request, 'debug', { + enumerable : true, + get : function() { + return request.Request.debug + }, + set : function(debug) { + request.Request.debug = debug + } +}) diff --git a/node_modules/request/lib/auth.js b/node_modules/request/lib/auth.js new file mode 100644 index 0000000..1cb6952 --- /dev/null +++ b/node_modules/request/lib/auth.js @@ -0,0 +1,168 @@ +'use strict' + +var caseless = require('caseless') + , uuid = require('node-uuid') + , helpers = require('./helpers') + +var md5 = helpers.md5 + , toBase64 = helpers.toBase64 + + +function Auth (request) { + // define all public properties here + this.request = request + this.hasAuth = false + this.sentAuth = false + this.bearerToken = null + this.user = null + this.pass = null +} + +Auth.prototype.basic = function (user, pass, sendImmediately) { + var self = this + if (typeof user !== 'string' || (pass !== undefined && typeof pass !== 'string')) { + self.request.emit('error', new Error('auth() received invalid user or password')) + } + self.user = user + self.pass = pass + self.hasAuth = true + var header = user + ':' + (pass || '') + if (sendImmediately || typeof sendImmediately === 'undefined') { + var authHeader = 'Basic ' + toBase64(header) + self.sentAuth = true + return authHeader + } +} + +Auth.prototype.bearer = function (bearer, sendImmediately) { + var self = this + self.bearerToken = bearer + self.hasAuth = true + if (sendImmediately || typeof sendImmediately === 'undefined') { + if (typeof bearer === 'function') { + bearer = bearer() + } + var authHeader = 'Bearer ' + (bearer || '') + self.sentAuth = true + return authHeader + } +} + +Auth.prototype.digest = function (method, path, authHeader) { + // TODO: More complete implementation of RFC 2617. + // - handle challenge.domain + // - support qop="auth-int" only + // - handle Authentication-Info (not necessarily?) + // - check challenge.stale (not necessarily?) + // - increase nc (not necessarily?) + // For reference: + // http://tools.ietf.org/html/rfc2617#section-3 + // https://github.com/bagder/curl/blob/master/lib/http_digest.c + + var self = this + + var challenge = {} + var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi + for (;;) { + var match = re.exec(authHeader) + if (!match) { + break + } + challenge[match[1]] = match[2] || match[3] + } + + /** + * RFC 2617: handle both MD5 and MD5-sess algorithms. + * + * If the algorithm directive's value is "MD5" or unspecified, then HA1 is + * HA1=MD5(username:realm:password) + * If the algorithm directive's value is "MD5-sess", then HA1 is + * HA1=MD5(MD5(username:realm:password):nonce:cnonce) + */ + var ha1Compute = function (algorithm, user, realm, pass, nonce, cnonce) { + var ha1 = md5(user + ':' + realm + ':' + pass) + if (algorithm && algorithm.toLowerCase() === 'md5-sess') { + return md5(ha1 + ':' + nonce + ':' + cnonce) + } else { + return ha1 + } + } + + var qop = /(^|,)\s*auth\s*($|,)/.test(challenge.qop) && 'auth' + var nc = qop && '00000001' + var cnonce = qop && uuid().replace(/-/g, '') + var ha1 = ha1Compute(challenge.algorithm, self.user, challenge.realm, self.pass, challenge.nonce, cnonce) + var ha2 = md5(method + ':' + path) + var digestResponse = qop + ? md5(ha1 + ':' + challenge.nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2) + : md5(ha1 + ':' + challenge.nonce + ':' + ha2) + var authValues = { + username: self.user, + realm: challenge.realm, + nonce: challenge.nonce, + uri: path, + qop: qop, + response: digestResponse, + nc: nc, + cnonce: cnonce, + algorithm: challenge.algorithm, + opaque: challenge.opaque + } + + authHeader = [] + for (var k in authValues) { + if (authValues[k]) { + if (k === 'qop' || k === 'nc' || k === 'algorithm') { + authHeader.push(k + '=' + authValues[k]) + } else { + authHeader.push(k + '="' + authValues[k] + '"') + } + } + } + authHeader = 'Digest ' + authHeader.join(', ') + self.sentAuth = true + return authHeader +} + +Auth.prototype.onRequest = function (user, pass, sendImmediately, bearer) { + var self = this + , request = self.request + + var authHeader + if (bearer === undefined && user === undefined) { + self.request.emit('error', new Error('no auth mechanism defined')) + } else if (bearer !== undefined) { + authHeader = self.bearer(bearer, sendImmediately) + } else { + authHeader = self.basic(user, pass, sendImmediately) + } + if (authHeader) { + request.setHeader('authorization', authHeader) + } +} + +Auth.prototype.onResponse = function (response) { + var self = this + , request = self.request + + if (!self.hasAuth || self.sentAuth) { return null } + + var c = caseless(response.headers) + + var authHeader = c.get('www-authenticate') + var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase() + request.debug('reauth', authVerb) + + switch (authVerb) { + case 'basic': + return self.basic(self.user, self.pass, true) + + case 'bearer': + return self.bearer(self.bearerToken, true) + + case 'digest': + return self.digest(request.method, request.path, authHeader) + } +} + +exports.Auth = Auth diff --git a/node_modules/request/lib/cookies.js b/node_modules/request/lib/cookies.js new file mode 100644 index 0000000..412c07d --- /dev/null +++ b/node_modules/request/lib/cookies.js @@ -0,0 +1,39 @@ +'use strict' + +var tough = require('tough-cookie') + +var Cookie = tough.Cookie + , CookieJar = tough.CookieJar + + +exports.parse = function(str) { + if (str && str.uri) { + str = str.uri + } + if (typeof str !== 'string') { + throw new Error('The cookie function only accepts STRING as param') + } + return Cookie.parse(str, {loose: true}) +} + +// Adapt the sometimes-Async api of tough.CookieJar to our requirements +function RequestJar(store) { + var self = this + self._jar = new CookieJar(store, {looseMode: true}) +} +RequestJar.prototype.setCookie = function(cookieOrStr, uri, options) { + var self = this + return self._jar.setCookieSync(cookieOrStr, uri, options || {}) +} +RequestJar.prototype.getCookieString = function(uri) { + var self = this + return self._jar.getCookieStringSync(uri) +} +RequestJar.prototype.getCookies = function(uri) { + var self = this + return self._jar.getCookiesSync(uri) +} + +exports.jar = function(store) { + return new RequestJar(store) +} diff --git a/node_modules/request/lib/getProxyFromURI.js b/node_modules/request/lib/getProxyFromURI.js new file mode 100644 index 0000000..c2013a6 --- /dev/null +++ b/node_modules/request/lib/getProxyFromURI.js @@ -0,0 +1,79 @@ +'use strict' + +function formatHostname(hostname) { + // canonicalize the hostname, so that 'oogle.com' won't match 'google.com' + return hostname.replace(/^\.*/, '.').toLowerCase() +} + +function parseNoProxyZone(zone) { + zone = zone.trim().toLowerCase() + + var zoneParts = zone.split(':', 2) + , zoneHost = formatHostname(zoneParts[0]) + , zonePort = zoneParts[1] + , hasPort = zone.indexOf(':') > -1 + + return {hostname: zoneHost, port: zonePort, hasPort: hasPort} +} + +function uriInNoProxy(uri, noProxy) { + var port = uri.port || (uri.protocol === 'https:' ? '443' : '80') + , hostname = formatHostname(uri.hostname) + , noProxyList = noProxy.split(',') + + // iterate through the noProxyList until it finds a match. + return noProxyList.map(parseNoProxyZone).some(function(noProxyZone) { + var isMatchedAt = hostname.indexOf(noProxyZone.hostname) + , hostnameMatched = ( + isMatchedAt > -1 && + (isMatchedAt === hostname.length - noProxyZone.hostname.length) + ) + + if (noProxyZone.hasPort) { + return (port === noProxyZone.port) && hostnameMatched + } + + return hostnameMatched + }) +} + +function getProxyFromURI(uri) { + // Decide the proper request proxy to use based on the request URI object and the + // environmental variables (NO_PROXY, HTTP_PROXY, etc.) + // respect NO_PROXY environment variables (see: http://lynx.isc.org/current/breakout/lynx_help/keystrokes/environments.html) + + var noProxy = process.env.NO_PROXY || process.env.no_proxy || '' + + // if the noProxy is a wildcard then return null + + if (noProxy === '*') { + return null + } + + // if the noProxy is not empty and the uri is found return null + + if (noProxy !== '' && uriInNoProxy(uri, noProxy)) { + return null + } + + // Check for HTTP or HTTPS Proxy in environment Else default to null + + if (uri.protocol === 'http:') { + return process.env.HTTP_PROXY || + process.env.http_proxy || null + } + + if (uri.protocol === 'https:') { + return process.env.HTTPS_PROXY || + process.env.https_proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || null + } + + // if none of that works, return null + // (What uri protocol are you using then?) + + return null +} + +module.exports = getProxyFromURI diff --git a/node_modules/request/lib/har.js b/node_modules/request/lib/har.js new file mode 100644 index 0000000..3059574 --- /dev/null +++ b/node_modules/request/lib/har.js @@ -0,0 +1,215 @@ +'use strict' + +var fs = require('fs') +var qs = require('querystring') +var validate = require('har-validator') +var extend = require('extend') + +function Har (request) { + this.request = request +} + +Har.prototype.reducer = function (obj, pair) { + // new property ? + if (obj[pair.name] === undefined) { + obj[pair.name] = pair.value + return obj + } + + // existing? convert to array + var arr = [ + obj[pair.name], + pair.value + ] + + obj[pair.name] = arr + + return obj +} + +Har.prototype.prep = function (data) { + // construct utility properties + data.queryObj = {} + data.headersObj = {} + data.postData.jsonObj = false + data.postData.paramsObj = false + + // construct query objects + if (data.queryString && data.queryString.length) { + data.queryObj = data.queryString.reduce(this.reducer, {}) + } + + // construct headers objects + if (data.headers && data.headers.length) { + // loweCase header keys + data.headersObj = data.headers.reduceRight(function (headers, header) { + headers[header.name] = header.value + return headers + }, {}) + } + + // construct Cookie header + if (data.cookies && data.cookies.length) { + var cookies = data.cookies.map(function (cookie) { + return cookie.name + '=' + cookie.value + }) + + if (cookies.length) { + data.headersObj.cookie = cookies.join('; ') + } + } + + // prep body + function some (arr) { + return arr.some(function (type) { + return data.postData.mimeType.indexOf(type) === 0 + }) + } + + if (some([ + 'multipart/mixed', + 'multipart/related', + 'multipart/form-data', + 'multipart/alternative'])) { + + // reset values + data.postData.mimeType = 'multipart/form-data' + } + + else if (some([ + 'application/x-www-form-urlencoded'])) { + + if (!data.postData.params) { + data.postData.text = '' + } else { + data.postData.paramsObj = data.postData.params.reduce(this.reducer, {}) + + // always overwrite + data.postData.text = qs.stringify(data.postData.paramsObj) + } + } + + else if (some([ + 'text/json', + 'text/x-json', + 'application/json', + 'application/x-json'])) { + + data.postData.mimeType = 'application/json' + + if (data.postData.text) { + try { + data.postData.jsonObj = JSON.parse(data.postData.text) + } catch (e) { + this.request.debug(e) + + // force back to text/plain + data.postData.mimeType = 'text/plain' + } + } + } + + return data +} + +Har.prototype.options = function (options) { + // skip if no har property defined + if (!options.har) { + return options + } + + var har = {} + extend(har, options.har) + + // only process the first entry + if (har.log && har.log.entries) { + har = har.log.entries[0] + } + + // add optional properties to make validation successful + har.url = har.url || options.url || options.uri || options.baseUrl || '/' + har.httpVersion = har.httpVersion || 'HTTP/1.1' + har.queryString = har.queryString || [] + har.headers = har.headers || [] + har.cookies = har.cookies || [] + har.postData = har.postData || {} + har.postData.mimeType = har.postData.mimeType || 'application/octet-stream' + + har.bodySize = 0 + har.headersSize = 0 + har.postData.size = 0 + + if (!validate.request(har)) { + return options + } + + // clean up and get some utility properties + var req = this.prep(har) + + // construct new options + if (req.url) { + options.url = req.url + } + + if (req.method) { + options.method = req.method + } + + if (Object.keys(req.queryObj).length) { + options.qs = req.queryObj + } + + if (Object.keys(req.headersObj).length) { + options.headers = req.headersObj + } + + function test (type) { + return req.postData.mimeType.indexOf(type) === 0 + } + if (test('application/x-www-form-urlencoded')) { + options.form = req.postData.paramsObj + } + else if (test('application/json')) { + if (req.postData.jsonObj) { + options.body = req.postData.jsonObj + options.json = true + } + } + else if (test('multipart/form-data')) { + options.formData = {} + + req.postData.params.forEach(function (param) { + var attachment = {} + + if (!param.fileName && !param.fileName && !param.contentType) { + options.formData[param.name] = param.value + return + } + + // attempt to read from disk! + if (param.fileName && !param.value) { + attachment.value = fs.createReadStream(param.fileName) + } else if (param.value) { + attachment.value = param.value + } + + if (param.fileName) { + attachment.options = { + filename: param.fileName, + contentType: param.contentType ? param.contentType : null + } + } + + options.formData[param.name] = attachment + }) + } + else { + if (req.postData.text) { + options.body = req.postData.text + } + } + + return options +} + +exports.Har = Har diff --git a/node_modules/request/lib/helpers.js b/node_modules/request/lib/helpers.js new file mode 100644 index 0000000..f9d727e --- /dev/null +++ b/node_modules/request/lib/helpers.js @@ -0,0 +1,65 @@ +'use strict' + +var jsonSafeStringify = require('json-stringify-safe') + , crypto = require('crypto') + +var defer = typeof setImmediate === 'undefined' + ? process.nextTick + : setImmediate + +function paramsHaveRequestBody(params) { + return ( + params.body || + params.requestBodyStream || + (params.json && typeof params.json !== 'boolean') || + params.multipart + ) +} + +function safeStringify (obj, replacer) { + var ret + try { + ret = JSON.stringify(obj, replacer) + } catch (e) { + ret = jsonSafeStringify(obj, replacer) + } + return ret +} + +function md5 (str) { + return crypto.createHash('md5').update(str).digest('hex') +} + +function isReadStream (rs) { + return rs.readable && rs.path && rs.mode +} + +function toBase64 (str) { + return (new Buffer(str || '', 'utf8')).toString('base64') +} + +function copy (obj) { + var o = {} + Object.keys(obj).forEach(function (i) { + o[i] = obj[i] + }) + return o +} + +function version () { + var numbers = process.version.replace('v', '').split('.') + return { + major: parseInt(numbers[0], 10), + minor: parseInt(numbers[1], 10), + patch: parseInt(numbers[2], 10) + } +} + +exports.paramsHaveRequestBody = paramsHaveRequestBody +exports.safeStringify = safeStringify +exports.md5 = md5 +exports.isReadStream = isReadStream +exports.toBase64 = toBase64 +exports.copy = copy +exports.version = version +exports.defer = defer diff --git a/node_modules/request/lib/multipart.js b/node_modules/request/lib/multipart.js new file mode 100644 index 0000000..c128172 --- /dev/null +++ b/node_modules/request/lib/multipart.js @@ -0,0 +1,112 @@ +'use strict' + +var uuid = require('node-uuid') + , CombinedStream = require('combined-stream') + , isstream = require('isstream') + + +function Multipart (request) { + this.request = request + this.boundary = uuid() + this.chunked = false + this.body = null +} + +Multipart.prototype.isChunked = function (options) { + var self = this + , chunked = false + , parts = options.data || options + + if (!parts.forEach) { + self.request.emit('error', new Error('Argument error, options.multipart.')) + } + + if (options.chunked !== undefined) { + chunked = options.chunked + } + + if (self.request.getHeader('transfer-encoding') === 'chunked') { + chunked = true + } + + if (!chunked) { + parts.forEach(function (part) { + if (typeof part.body === 'undefined') { + self.request.emit('error', new Error('Body attribute missing in multipart.')) + } + if (isstream(part.body)) { + chunked = true + } + }) + } + + return chunked +} + +Multipart.prototype.setHeaders = function (chunked) { + var self = this + + if (chunked && !self.request.hasHeader('transfer-encoding')) { + self.request.setHeader('transfer-encoding', 'chunked') + } + + var header = self.request.getHeader('content-type') + + if (!header || header.indexOf('multipart') === -1) { + self.request.setHeader('content-type', 'multipart/related; boundary=' + self.boundary) + } else { + if (header.indexOf('boundary') !== -1) { + self.boundary = header.replace(/.*boundary=([^\s;]+).*/, '$1') + } else { + self.request.setHeader('content-type', header + '; boundary=' + self.boundary) + } + } +} + +Multipart.prototype.build = function (parts, chunked) { + var self = this + var body = chunked ? new CombinedStream() : [] + + function add (part) { + if (typeof part === 'number') { + part = part.toString() + } + return chunked ? body.append(part) : body.push(new Buffer(part)) + } + + if (self.request.preambleCRLF) { + add('\r\n') + } + + parts.forEach(function (part) { + var preamble = '--' + self.boundary + '\r\n' + Object.keys(part).forEach(function (key) { + if (key === 'body') { return } + preamble += key + ': ' + part[key] + '\r\n' + }) + preamble += '\r\n' + add(preamble) + add(part.body) + add('\r\n') + }) + add('--' + self.boundary + '--') + + if (self.request.postambleCRLF) { + add('\r\n') + } + + return body +} + +Multipart.prototype.onRequest = function (options) { + var self = this + + var chunked = self.isChunked(options) + , parts = options.data || options + + self.setHeaders(chunked) + self.chunked = chunked + self.body = self.build(parts, chunked) +} + +exports.Multipart = Multipart diff --git a/node_modules/request/lib/oauth.js b/node_modules/request/lib/oauth.js new file mode 100644 index 0000000..c24209b --- /dev/null +++ b/node_modules/request/lib/oauth.js @@ -0,0 +1,147 @@ +'use strict' + +var url = require('url') + , qs = require('qs') + , caseless = require('caseless') + , uuid = require('node-uuid') + , oauth = require('oauth-sign') + , crypto = require('crypto') + + +function OAuth (request) { + this.request = request + this.params = null +} + +OAuth.prototype.buildParams = function (_oauth, uri, method, query, form, qsLib) { + var oa = {} + for (var i in _oauth) { + oa['oauth_' + i] = _oauth[i] + } + if (!oa.oauth_version) { + oa.oauth_version = '1.0' + } + if (!oa.oauth_timestamp) { + oa.oauth_timestamp = Math.floor( Date.now() / 1000 ).toString() + } + if (!oa.oauth_nonce) { + oa.oauth_nonce = uuid().replace(/-/g, '') + } + if (!oa.oauth_signature_method) { + oa.oauth_signature_method = 'HMAC-SHA1' + } + + var consumer_secret_or_private_key = oa.oauth_consumer_secret || oa.oauth_private_key + delete oa.oauth_consumer_secret + delete oa.oauth_private_key + + var token_secret = oa.oauth_token_secret + delete oa.oauth_token_secret + + var realm = oa.oauth_realm + delete oa.oauth_realm + delete oa.oauth_transport_method + + var baseurl = uri.protocol + '//' + uri.host + uri.pathname + var params = qsLib.parse([].concat(query, form, qsLib.stringify(oa)).join('&')) + + oa.oauth_signature = oauth.sign( + oa.oauth_signature_method, + method, + baseurl, + params, + consumer_secret_or_private_key, + token_secret) + + if (realm) { + oa.realm = realm + } + + return oa +} + +OAuth.prototype.buildBodyHash = function(_oauth, body) { + if (['HMAC-SHA1', 'RSA-SHA1'].indexOf(_oauth.signature_method || 'HMAC-SHA1') < 0) { + this.request.emit('error', new Error('oauth: ' + _oauth.signature_method + + ' signature_method not supported with body_hash signing.')) + } + + var shasum = crypto.createHash('sha1') + shasum.update(body || '') + var sha1 = shasum.digest('hex') + + return new Buffer(sha1).toString('base64') +} + +OAuth.prototype.concatParams = function (oa, sep, wrap) { + wrap = wrap || '' + + var params = Object.keys(oa).filter(function (i) { + return i !== 'realm' && i !== 'oauth_signature' + }).sort() + + if (oa.realm) { + params.splice(0, 0, 'realm') + } + params.push('oauth_signature') + + return params.map(function (i) { + return i + '=' + wrap + oauth.rfc3986(oa[i]) + wrap + }).join(sep) +} + +OAuth.prototype.onRequest = function (_oauth) { + var self = this + self.params = _oauth + + var uri = self.request.uri || {} + , method = self.request.method || '' + , headers = caseless(self.request.headers) + , body = self.request.body || '' + , qsLib = self.request.qsLib || qs + + var form + , query + , contentType = headers.get('content-type') || '' + , formContentType = 'application/x-www-form-urlencoded' + , transport = _oauth.transport_method || 'header' + + if (contentType.slice(0, formContentType.length) === formContentType) { + contentType = formContentType + form = body + } + if (uri.query) { + query = uri.query + } + if (transport === 'body' && (method !== 'POST' || contentType !== formContentType)) { + self.request.emit('error', new Error('oauth: transport_method of body requires POST ' + + 'and content-type ' + formContentType)) + } + + if (!form && typeof _oauth.body_hash === 'boolean') { + _oauth.body_hash = self.buildBodyHash(_oauth, self.request.body.toString()) + } + + var oa = self.buildParams(_oauth, uri, method, query, form, qsLib) + + switch (transport) { + case 'header': + self.request.setHeader('Authorization', 'OAuth ' + self.concatParams(oa, ',', '"')) + break + + case 'query': + var href = self.request.uri.href += (query ? '&' : '?') + self.concatParams(oa, '&') + self.request.uri = url.parse(href) + self.request.path = self.request.uri.path + break + + case 'body': + self.request.body = (form ? form + '&' : '') + self.concatParams(oa, '&') + break + + default: + self.request.emit('error', new Error('oauth: transport_method invalid')) + } +} + +exports.OAuth = OAuth diff --git a/node_modules/request/lib/querystring.js b/node_modules/request/lib/querystring.js new file mode 100644 index 0000000..baf5e80 --- /dev/null +++ b/node_modules/request/lib/querystring.js @@ -0,0 +1,51 @@ +'use strict' + +var qs = require('qs') + , querystring = require('querystring') + + +function Querystring (request) { + this.request = request + this.lib = null + this.useQuerystring = null + this.parseOptions = null + this.stringifyOptions = null +} + +Querystring.prototype.init = function (options) { + if (this.lib) {return} + + this.useQuerystring = options.useQuerystring + this.lib = (this.useQuerystring ? querystring : qs) + + this.parseOptions = options.qsParseOptions || {} + this.stringifyOptions = options.qsStringifyOptions || {} +} + +Querystring.prototype.stringify = function (obj) { + return (this.useQuerystring) + ? this.rfc3986(this.lib.stringify(obj, + this.stringifyOptions.sep || null, + this.stringifyOptions.eq || null, + this.stringifyOptions)) + : this.lib.stringify(obj, this.stringifyOptions) +} + +Querystring.prototype.parse = function (str) { + return (this.useQuerystring) + ? this.lib.parse(str, + this.parseOptions.sep || null, + this.parseOptions.eq || null, + this.parseOptions) + : this.lib.parse(str, this.parseOptions) +} + +Querystring.prototype.rfc3986 = function (str) { + return str.replace(/[!'()*]/g, function (c) { + return '%' + c.charCodeAt(0).toString(16).toUpperCase() + }) +} + +Querystring.prototype.unescape = querystring.unescape + +exports.Querystring = Querystring diff --git a/node_modules/request/lib/redirect.js b/node_modules/request/lib/redirect.js new file mode 100644 index 0000000..040dfe0 --- /dev/null +++ b/node_modules/request/lib/redirect.js @@ -0,0 +1,153 @@ +'use strict' + +var url = require('url') +var isUrl = /^https?:/ + +function Redirect (request) { + this.request = request + this.followRedirect = true + this.followRedirects = true + this.followAllRedirects = false + this.allowRedirect = function () {return true} + this.maxRedirects = 10 + this.redirects = [] + this.redirectsFollowed = 0 + this.removeRefererHeader = false +} + +Redirect.prototype.onRequest = function (options) { + var self = this + + if (options.maxRedirects !== undefined) { + self.maxRedirects = options.maxRedirects + } + if (typeof options.followRedirect === 'function') { + self.allowRedirect = options.followRedirect + } + if (options.followRedirect !== undefined) { + self.followRedirects = !!options.followRedirect + } + if (options.followAllRedirects !== undefined) { + self.followAllRedirects = options.followAllRedirects + } + if (self.followRedirects || self.followAllRedirects) { + self.redirects = self.redirects || [] + } + if (options.removeRefererHeader !== undefined) { + self.removeRefererHeader = options.removeRefererHeader + } +} + +Redirect.prototype.redirectTo = function (response) { + var self = this + , request = self.request + + var redirectTo = null + if (response.statusCode >= 300 && response.statusCode < 400 && response.caseless.has('location')) { + var location = response.caseless.get('location') + request.debug('redirect', location) + + if (self.followAllRedirects) { + redirectTo = location + } else if (self.followRedirects) { + switch (request.method) { + case 'PATCH': + case 'PUT': + case 'POST': + case 'DELETE': + // Do not follow redirects + break + default: + redirectTo = location + break + } + } + } else if (response.statusCode === 401) { + var authHeader = request._auth.onResponse(response) + if (authHeader) { + request.setHeader('authorization', authHeader) + redirectTo = request.uri + } + } + return redirectTo +} + +Redirect.prototype.onResponse = function (response) { + var self = this + , request = self.request + + var redirectTo = self.redirectTo(response) + if (!redirectTo || !self.allowRedirect.call(request, response)) { + return false + } + + request.debug('redirect to', redirectTo) + + // ignore any potential response body. it cannot possibly be useful + // to us at this point. + // response.resume should be defined, but check anyway before calling. Workaround for browserify. + if (response.resume) { + response.resume() + } + + if (self.redirectsFollowed >= self.maxRedirects) { + request.emit('error', new Error('Exceeded maxRedirects. Probably stuck in a redirect loop ' + request.uri.href)) + return false + } + self.redirectsFollowed += 1 + + if (!isUrl.test(redirectTo)) { + redirectTo = url.resolve(request.uri.href, redirectTo) + } + + var uriPrev = request.uri + request.uri = url.parse(redirectTo) + + // handle the case where we change protocol from https to http or vice versa + if (request.uri.protocol !== uriPrev.protocol) { + delete request.agent + } + + self.redirects.push( + { statusCode : response.statusCode + , redirectUri: redirectTo + } + ) + if (self.followAllRedirects && request.method !== 'HEAD' + && response.statusCode !== 401 && response.statusCode !== 307) { + request.method = 'GET' + } + // request.method = 'GET' // Force all redirects to use GET || commented out fixes #215 + delete request.src + delete request.req + delete request._started + if (response.statusCode !== 401 && response.statusCode !== 307) { + // Remove parameters from the previous response, unless this is the second request + // for a server that requires digest authentication. + delete request.body + delete request._form + if (request.headers) { + request.removeHeader('host') + request.removeHeader('content-type') + request.removeHeader('content-length') + if (request.uri.hostname !== request.originalHost.split(':')[0]) { + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of curl: + // https://github.com/bagder/curl/blob/6beb0eee/lib/http.c#L710 + request.removeHeader('authorization') + } + } + } + + if (!self.removeRefererHeader) { + request.setHeader('referer', uriPrev.href) + } + + request.emit('redirect') + + request.init() + + return true +} + +exports.Redirect = Redirect diff --git a/node_modules/request/lib/tunnel.js b/node_modules/request/lib/tunnel.js new file mode 100644 index 0000000..bf96a8f --- /dev/null +++ b/node_modules/request/lib/tunnel.js @@ -0,0 +1,176 @@ +'use strict' + +var url = require('url') + , tunnel = require('tunnel-agent') + +var defaultProxyHeaderWhiteList = [ + 'accept', + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept-ranges', + 'cache-control', + 'content-encoding', + 'content-language', + 'content-location', + 'content-md5', + 'content-range', + 'content-type', + 'connection', + 'date', + 'expect', + 'max-forwards', + 'pragma', + 'referer', + 'te', + 'user-agent', + 'via' +] + +var defaultProxyHeaderExclusiveList = [ + 'proxy-authorization' +] + +function constructProxyHost(uriObject) { + var port = uriObject.port + , protocol = uriObject.protocol + , proxyHost = uriObject.hostname + ':' + + if (port) { + proxyHost += port + } else if (protocol === 'https:') { + proxyHost += '443' + } else { + proxyHost += '80' + } + + return proxyHost +} + +function constructProxyHeaderWhiteList(headers, proxyHeaderWhiteList) { + var whiteList = proxyHeaderWhiteList + .reduce(function (set, header) { + set[header.toLowerCase()] = true + return set + }, {}) + + return Object.keys(headers) + .filter(function (header) { + return whiteList[header.toLowerCase()] + }) + .reduce(function (set, header) { + set[header] = headers[header] + return set + }, {}) +} + +function constructTunnelOptions (request, proxyHeaders) { + var proxy = request.proxy + + var tunnelOptions = { + proxy : { + host : proxy.hostname, + port : +proxy.port, + proxyAuth : proxy.auth, + headers : proxyHeaders + }, + headers : request.headers, + ca : request.ca, + cert : request.cert, + key : request.key, + passphrase : request.passphrase, + pfx : request.pfx, + ciphers : request.ciphers, + rejectUnauthorized : request.rejectUnauthorized, + secureOptions : request.secureOptions, + secureProtocol : request.secureProtocol + } + + return tunnelOptions +} + +function constructTunnelFnName(uri, proxy) { + var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http') + var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http') + return [uriProtocol, proxyProtocol].join('Over') +} + +function getTunnelFn(request) { + var uri = request.uri + var proxy = request.proxy + var tunnelFnName = constructTunnelFnName(uri, proxy) + return tunnel[tunnelFnName] +} + + +function Tunnel (request) { + this.request = request + this.proxyHeaderWhiteList = defaultProxyHeaderWhiteList + this.proxyHeaderExclusiveList = [] + if (typeof request.tunnel !== 'undefined') { + this.tunnelOverride = request.tunnel + } +} + +Tunnel.prototype.isEnabled = function () { + var self = this + , request = self.request + // Tunnel HTTPS by default. Allow the user to override this setting. + + // If self.tunnelOverride is set (the user specified a value), use it. + if (typeof self.tunnelOverride !== 'undefined') { + return self.tunnelOverride + } + + // If the destination is HTTPS, tunnel. + if (request.uri.protocol === 'https:') { + return true + } + + // Otherwise, do not use tunnel. + return false +} + +Tunnel.prototype.setup = function (options) { + var self = this + , request = self.request + + options = options || {} + + if (typeof request.proxy === 'string') { + request.proxy = url.parse(request.proxy) + } + + if (!request.proxy || !request.tunnel) { + return false + } + + // Setup Proxy Header Exclusive List and White List + if (options.proxyHeaderWhiteList) { + self.proxyHeaderWhiteList = options.proxyHeaderWhiteList + } + if (options.proxyHeaderExclusiveList) { + self.proxyHeaderExclusiveList = options.proxyHeaderExclusiveList + } + + var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList) + var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList) + + // Setup Proxy Headers and Proxy Headers Host + // Only send the Proxy White Listed Header names + var proxyHeaders = constructProxyHeaderWhiteList(request.headers, proxyHeaderWhiteList) + proxyHeaders.host = constructProxyHost(request.uri) + + proxyHeaderExclusiveList.forEach(request.removeHeader, request) + + // Set Agent from Tunnel Data + var tunnelFn = getTunnelFn(request) + var tunnelOptions = constructTunnelOptions(request, proxyHeaders) + request.agent = tunnelFn(tunnelOptions) + + return true +} + +Tunnel.defaultProxyHeaderWhiteList = defaultProxyHeaderWhiteList +Tunnel.defaultProxyHeaderExclusiveList = defaultProxyHeaderExclusiveList +exports.Tunnel = Tunnel diff --git a/node_modules/request/node_modules/.bin/har-validator b/node_modules/request/node_modules/.bin/har-validator new file mode 120000 index 0000000..36cec6d --- /dev/null +++ b/node_modules/request/node_modules/.bin/har-validator @@ -0,0 +1 @@ +../../../har-validator/bin/har-validator \ No newline at end of file diff --git a/node_modules/request/node_modules/.bin/uuid b/node_modules/request/node_modules/.bin/uuid new file mode 120000 index 0000000..56fb209 --- /dev/null +++ b/node_modules/request/node_modules/.bin/uuid @@ -0,0 +1 @@ +../../../node-uuid/bin/uuid \ No newline at end of file diff --git a/node_modules/request/node_modules/qs/.eslintignore b/node_modules/request/node_modules/qs/.eslintignore new file mode 100644 index 0000000..1521c8b --- /dev/null +++ b/node_modules/request/node_modules/qs/.eslintignore @@ -0,0 +1 @@ +dist diff --git a/node_modules/request/node_modules/qs/.eslintrc b/node_modules/request/node_modules/qs/.eslintrc new file mode 100644 index 0000000..e5c4f80 --- /dev/null +++ b/node_modules/request/node_modules/qs/.eslintrc @@ -0,0 +1,19 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "complexity": [2, 25], + "consistent-return": [1], + "id-length": [2, { "min": 1, "max": 25, "properties": "never" }], + "indent": [2, 4], + "max-params": [2, 11], + "max-statements": [2, 42], + "no-extra-parens": [1], + "no-continue": [1], + "no-magic-numbers": 0, + "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"], + "operator-linebreak": 1 + } +} diff --git a/node_modules/request/node_modules/qs/CHANGELOG.md b/node_modules/request/node_modules/qs/CHANGELOG.md new file mode 100644 index 0000000..351edd4 --- /dev/null +++ b/node_modules/request/node_modules/qs/CHANGELOG.md @@ -0,0 +1,144 @@ +## **6.3.0** +- [New] Add support for RFC 1738 (#174, #173) +- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159) +- [Fix] ensure `utils.merge` handles merging two arrays +- [Refactor] only constructors should be capitalized +- [Refactor] capitalized var names are for constructors only +- [Refactor] avoid using a sparse array +- [Robustness] `formats`: cache `String#replace` +- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest` +- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix +- [Tests] flesh out arrayLimit/arrayFormat tests (#107) +- [Tests] skip Object.create tests when null objects are not available +- [Tests] Turn on eslint for test files (#175) + +## **6.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values +- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call` +- [Tests] remove `parallelshell` since it does not reliably report failures +- [Tests] up to `node` `v6.3`, `v5.12` +- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv` + +## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed) +- [New] pass Buffers to the encoder/decoder directly (#161) +- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160) +- [Fix] fix compacting of nested sparse arrays (#150) + +## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed) +- [New] allowDots option for `stringify` (#151) +- [Fix] "sort" option should work at a depth of 3 or more (#151) +- [Fix] Restore `dist` directory; will be removed in v7 (#148) + +## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed) +- Revert ES6 requirement and restore support for node down to v0.8. + +## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed) +- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json + +## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed) +- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4 + +## **5.2.1** +- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values + +## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed) +- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string + +## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed) +- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional +- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify + +## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed) +- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false +- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm + +## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed) +- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional + +## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed) +- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation" + +## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed) +- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties +- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost +- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing +- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object +- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option +- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects. +- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47 +- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986 +- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign +- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute + +## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed) +- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object # is not a function + +## [**2.4.0**](https://github.com/ljharb/qs/issues?milestone=19&state=closed) +- [**#70**](https://github.com/ljharb/qs/issues/70) Add arrayFormat option + +## [**2.3.3**](https://github.com/ljharb/qs/issues?milestone=18&state=closed) +- [**#59**](https://github.com/ljharb/qs/issues/59) make sure array indexes are >= 0, closes #57 +- [**#58**](https://github.com/ljharb/qs/issues/58) make qs usable for browser loader + +## [**2.3.2**](https://github.com/ljharb/qs/issues?milestone=17&state=closed) +- [**#55**](https://github.com/ljharb/qs/issues/55) allow merging a string into an object + +## [**2.3.1**](https://github.com/ljharb/qs/issues?milestone=16&state=closed) +- [**#52**](https://github.com/ljharb/qs/issues/52) Return "undefined" and "false" instead of throwing "TypeError". + +## [**2.3.0**](https://github.com/ljharb/qs/issues?milestone=15&state=closed) +- [**#50**](https://github.com/ljharb/qs/issues/50) add option to omit array indices, closes #46 + +## [**2.2.5**](https://github.com/ljharb/qs/issues?milestone=14&state=closed) +- [**#39**](https://github.com/ljharb/qs/issues/39) Is there an alternative to Buffer.isBuffer? +- [**#49**](https://github.com/ljharb/qs/issues/49) refactor utils.merge, fixes #45 +- [**#41**](https://github.com/ljharb/qs/issues/41) avoid browserifying Buffer, for #39 + +## [**2.2.4**](https://github.com/ljharb/qs/issues?milestone=13&state=closed) +- [**#38**](https://github.com/ljharb/qs/issues/38) how to handle object keys beginning with a number + +## [**2.2.3**](https://github.com/ljharb/qs/issues?milestone=12&state=closed) +- [**#37**](https://github.com/ljharb/qs/issues/37) parser discards first empty value in array +- [**#36**](https://github.com/ljharb/qs/issues/36) Update to lab 4.x + +## [**2.2.2**](https://github.com/ljharb/qs/issues?milestone=11&state=closed) +- [**#33**](https://github.com/ljharb/qs/issues/33) Error when plain object in a value +- [**#34**](https://github.com/ljharb/qs/issues/34) use Object.prototype.hasOwnProperty.call instead of obj.hasOwnProperty +- [**#24**](https://github.com/ljharb/qs/issues/24) Changelog? Semver? + +## [**2.2.1**](https://github.com/ljharb/qs/issues?milestone=10&state=closed) +- [**#32**](https://github.com/ljharb/qs/issues/32) account for circular references properly, closes #31 +- [**#31**](https://github.com/ljharb/qs/issues/31) qs.parse stackoverflow on circular objects + +## [**2.2.0**](https://github.com/ljharb/qs/issues?milestone=9&state=closed) +- [**#26**](https://github.com/ljharb/qs/issues/26) Don't use Buffer global if it's not present +- [**#30**](https://github.com/ljharb/qs/issues/30) Bug when merging non-object values into arrays +- [**#29**](https://github.com/ljharb/qs/issues/29) Don't call Utils.clone at the top of Utils.merge +- [**#23**](https://github.com/ljharb/qs/issues/23) Ability to not limit parameters? + +## [**2.1.0**](https://github.com/ljharb/qs/issues?milestone=8&state=closed) +- [**#22**](https://github.com/ljharb/qs/issues/22) Enable using a RegExp as delimiter + +## [**2.0.0**](https://github.com/ljharb/qs/issues?milestone=7&state=closed) +- [**#18**](https://github.com/ljharb/qs/issues/18) Why is there arrayLimit? +- [**#20**](https://github.com/ljharb/qs/issues/20) Configurable parametersLimit +- [**#21**](https://github.com/ljharb/qs/issues/21) make all limits optional, for #18, for #20 + +## [**1.2.2**](https://github.com/ljharb/qs/issues?milestone=6&state=closed) +- [**#19**](https://github.com/ljharb/qs/issues/19) Don't overwrite null values + +## [**1.2.1**](https://github.com/ljharb/qs/issues?milestone=5&state=closed) +- [**#16**](https://github.com/ljharb/qs/issues/16) ignore non-string delimiters +- [**#15**](https://github.com/ljharb/qs/issues/15) Close code block + +## [**1.2.0**](https://github.com/ljharb/qs/issues?milestone=4&state=closed) +- [**#12**](https://github.com/ljharb/qs/issues/12) Add optional delim argument +- [**#13**](https://github.com/ljharb/qs/issues/13) fix #11: flattened keys in array are now correctly parsed + +## [**1.1.0**](https://github.com/ljharb/qs/issues?milestone=3&state=closed) +- [**#7**](https://github.com/ljharb/qs/issues/7) Empty values of a POST array disappear after being submitted +- [**#9**](https://github.com/ljharb/qs/issues/9) Should not omit equals signs (=) when value is null +- [**#6**](https://github.com/ljharb/qs/issues/6) Minor grammar fix in README + +## [**1.0.2**](https://github.com/ljharb/qs/issues?milestone=2&state=closed) +- [**#5**](https://github.com/ljharb/qs/issues/5) array holes incorrectly copied into object on large index diff --git a/node_modules/request/node_modules/qs/CONTRIBUTING.md b/node_modules/request/node_modules/qs/CONTRIBUTING.md new file mode 100644 index 0000000..8928361 --- /dev/null +++ b/node_modules/request/node_modules/qs/CONTRIBUTING.md @@ -0,0 +1 @@ +Please view our [hapijs contributing guide](https://github.com/hapijs/hapi/blob/master/CONTRIBUTING.md). diff --git a/node_modules/request/node_modules/qs/LICENSE b/node_modules/request/node_modules/qs/LICENSE new file mode 100644 index 0000000..d456948 --- /dev/null +++ b/node_modules/request/node_modules/qs/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2014 Nathan LaFreniere and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hapijs/qs/graphs/contributors diff --git a/node_modules/request/node_modules/qs/README.md b/node_modules/request/node_modules/qs/README.md new file mode 100644 index 0000000..ac1e7f1 --- /dev/null +++ b/node_modules/request/node_modules/qs/README.md @@ -0,0 +1,398 @@ +# qs + +A querystring parsing and stringifying library with some added security. + +[![Build Status](https://api.travis-ci.org/ljharb/qs.svg)](http://travis-ci.org/ljharb/qs) + +Lead Maintainer: [Jordan Harband](https://github.com/ljharb) + +The **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring). + +## Usage + +```javascript +var qs = require('qs'); +var assert = require('assert'); + +var obj = qs.parse('a=c'); +assert.deepEqual(obj, { a: 'c' }); + +var str = qs.stringify(obj); +assert.equal(str, 'a=c'); +``` + +### Parsing Objects + +[](#preventEval) +```javascript +qs.parse(string, [options]); +``` + +**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`. +For example, the string `'foo[bar]=baz'` converts to: + +```javascript +assert.deepEqual(qs.parse('foo[bar]=baz'), { + foo: { + bar: 'baz' + } +}); +``` + +When using the `plainObjects` option the parsed value is returned as a null object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like: + +```javascript +var nullObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true }); +assert.deepEqual(nullObject, { a: { hasOwnProperty: 'b' } }); +``` + +By default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option. + +```javascript +var protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }); +assert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } }); +``` + +URI encoded strings work too: + +```javascript +assert.deepEqual(qs.parse('a%5Bb%5D=c'), { + a: { b: 'c' } +}); +``` + +You can also nest your objects, like `'foo[bar][baz]=foobarbaz'`: + +```javascript +assert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), { + foo: { + bar: { + baz: 'foobarbaz' + } + } +}); +``` + +By default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like +`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be: + +```javascript +var expected = { + a: { + b: { + c: { + d: { + e: { + f: { + '[g][h][i]': 'j' + } + } + } + } + } + } +}; +var string = 'a[b][c][d][e][f][g][h][i]=j'; +assert.deepEqual(qs.parse(string), expected); +``` + +This depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`: + +```javascript +var deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 }); +assert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } }); +``` + +The depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number. + +For similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option: + +```javascript +var limited = qs.parse('a=b&c=d', { parameterLimit: 1 }); +assert.deepEqual(limited, { a: 'b' }); +``` + +An optional delimiter can also be passed: + +```javascript +var delimited = qs.parse('a=b;c=d', { delimiter: ';' }); +assert.deepEqual(delimited, { a: 'b', c: 'd' }); +``` + +Delimiters can be a regular expression too: + +```javascript +var regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ }); +assert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' }); +``` + +Option `allowDots` can be used to enable dot notation: + +```javascript +var withDots = qs.parse('a.b=c', { allowDots: true }); +assert.deepEqual(withDots, { a: { b: 'c' } }); +``` + +### Parsing Arrays + +**qs** can also parse arrays using a similar `[]` notation: + +```javascript +var withArray = qs.parse('a[]=b&a[]=c'); +assert.deepEqual(withArray, { a: ['b', 'c'] }); +``` + +You may specify an index as well: + +```javascript +var withIndexes = qs.parse('a[1]=c&a[0]=b'); +assert.deepEqual(withIndexes, { a: ['b', 'c'] }); +``` + +Note that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number +to create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving +their order: + +```javascript +var noSparse = qs.parse('a[1]=b&a[15]=c'); +assert.deepEqual(noSparse, { a: ['b', 'c'] }); +``` + +Note that an empty string is also a value, and will be preserved: + +```javascript +var withEmptyString = qs.parse('a[]=&a[]=b'); +assert.deepEqual(withEmptyString, { a: ['', 'b'] }); + +var withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c'); +assert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] }); +``` + +**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will +instead be converted to an object with the index as the key: + +```javascript +var withMaxIndex = qs.parse('a[100]=b'); +assert.deepEqual(withMaxIndex, { a: { '100': 'b' } }); +``` + +This limit can be overridden by passing an `arrayLimit` option: + +```javascript +var withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 }); +assert.deepEqual(withArrayLimit, { a: { '1': 'b' } }); +``` + +To disable array parsing entirely, set `parseArrays` to `false`. + +```javascript +var noParsingArrays = qs.parse('a[]=b', { parseArrays: false }); +assert.deepEqual(noParsingArrays, { a: { '0': 'b' } }); +``` + +If you mix notations, **qs** will merge the two items into an object: + +```javascript +var mixedNotation = qs.parse('a[0]=b&a[b]=c'); +assert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } }); +``` + +You can also create arrays of objects: + +```javascript +var arraysOfObjects = qs.parse('a[][b]=c'); +assert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] }); +``` + +### Stringifying + +[](#preventEval) +```javascript +qs.stringify(object, [options]); +``` + +When stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect: + +```javascript +assert.equal(qs.stringify({ a: 'b' }), 'a=b'); +assert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); +``` + +This encoding can be disabled by setting the `encode` option to `false`: + +```javascript +var unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false }); +assert.equal(unencoded, 'a[b]=c'); +``` + +This encoding can also be replaced by a custom encoding method set as `encoder` option: + +```javascript +var encoded = qs.stringify({ a: { b: 'c' } }, { encoder: function (str) { + // Passed in values `a`, `b`, `c` + return // Return encoded string +}}) +``` + +_(Note: the `encoder` option does not apply if `encode` is `false`)_ + +Analogue to the `encoder` there is a `decoder` option for `parse` to override decoding of properties and values: + +```javascript +var decoded = qs.parse('x=z', { decoder: function (str) { + // Passed in values `x`, `z` + return // Return decoded string +}}) +``` + +Examples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage. + +When arrays are stringified, by default they are given explicit indices: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }); +// 'a[0]=b&a[1]=c&a[2]=d' +``` + +You may override this by setting the `indices` option to `false`: + +```javascript +qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }); +// 'a=b&a=c&a=d' +``` + +You may use the `arrayFormat` option to specify the format of the output array + +```javascript +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }) +// 'a[0]=b&a[1]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }) +// 'a[]=b&a[]=c' +qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }) +// 'a=b&a=c' +``` + +Empty strings and null values will omit the value, but the equals sign (=) remains in place: + +```javascript +assert.equal(qs.stringify({ a: '' }), 'a='); +``` + +Properties that are set to `undefined` will be omitted entirely: + +```javascript +assert.equal(qs.stringify({ a: null, b: undefined }), 'a='); +``` + +The delimiter may be overridden with stringify as well: + +```javascript +assert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); +``` + +If you only want to override the serialization of `Date` objects, you can provide a `serializeDate` option: + +```javascript +var date = new Date(7); +assert.equal(qs.stringify({ a: date }), 'a=1970-01-01T00:00:00.007Z'.replace(/:/g, '%3A')); +assert.equal( + qs.stringify({ a: date }, { serializeDate: function (d) { return d.getTime(); } }), + 'a=7' +); +``` + +Finally, you can use the `filter` option to restrict which keys will be included in the stringified output. +If you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you +pass an array, it will be used to select properties and array indices for stringification: + +```javascript +function filterFunc(prefix, value) { + if (prefix == 'b') { + // Return an `undefined` value to omit a property. + return; + } + if (prefix == 'e[f]') { + return value.getTime(); + } + if (prefix == 'e[g][0]') { + return value * 2; + } + return value; +} +qs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc }); +// 'a=b&c=d&e[f]=123&e[g][0]=4' +qs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] }); +// 'a=b&e=f' +qs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] }); +// 'a[0]=b&a[2]=d' +``` + +### Handling of `null` values + +By default, `null` values are treated like empty strings: + +```javascript +var withNull = qs.stringify({ a: null, b: '' }); +assert.equal(withNull, 'a=&b='); +``` + +Parsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings. + +```javascript +var equalsInsensitive = qs.parse('a&b='); +assert.deepEqual(equalsInsensitive, { a: '', b: '' }); +``` + +To distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null` +values have no `=` sign: + +```javascript +var strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true }); +assert.equal(strictNull, 'a&b='); +``` + +To parse values without `=` back to `null` use the `strictNullHandling` flag: + +```javascript +var parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true }); +assert.deepEqual(parsedStrictNull, { a: null, b: '' }); +``` + +To completely skip rendering keys with `null` values, use the `skipNulls` flag: + +```javascript +var nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true }); +assert.equal(nullsSkipped, 'a=b'); +``` + +### Dealing with special character sets + +By default the encoding and decoding of characters is done in `utf-8`. If you +wish to encode querystrings to a different character set (i.e. +[Shift JIS](https://en.wikipedia.org/wiki/Shift_JIS)) you can use the +[`qs-iconv`](https://github.com/martinheidegger/qs-iconv) library: + +```javascript +var encoder = require('qs-iconv/encoder')('shift_jis'); +var shiftJISEncoded = qs.stringify({ a: 'こんにちは!' }, { encoder: encoder }); +assert.equal(shiftJISEncoded, 'a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I'); +``` + +This also works for decoding of query strings: + +```javascript +var decoder = require('qs-iconv/decoder')('shift_jis'); +var obj = qs.parse('a=%82%B1%82%F1%82%C9%82%BF%82%CD%81I', { decoder: decoder }); +assert.deepEqual(obj, { a: 'こんにちは!' }); +``` + +### RFC 3986 and RFC 1738 space encoding + +RFC3986 used as default option and encodes ' ' to *%20* which is backward compatible. +In the same time, output can be stringified as per RFC1738 with ' ' equal to '+'. + +``` +assert.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC3986' }), 'a=b%20c'); +assert.equal(qs.stringify({ a: 'b c' }, { format : 'RFC1738' }), 'a=b+c'); +``` diff --git a/node_modules/request/node_modules/qs/dist/qs.js b/node_modules/request/node_modules/qs/dist/qs.js new file mode 100644 index 0000000..1a80d2d --- /dev/null +++ b/node_modules/request/node_modules/qs/dist/qs.js @@ -0,0 +1,574 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Qs = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o= 0 && + (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = parseObject(chain, val, options); + } else { + obj[cleanRoot] = parseObject(chain, val, options); + } + } + + return obj; +}; + +var parseKeys = function parseKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^\.\[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var parent = /^([^\[\]]*)/; + var child = /(\[[^\[\]]*\])/g; + + // Get the parent + + var segment = parent.exec(key); + + // Stash the parent if it exists + + var keys = []; + if (segment[1]) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, segment[1])) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].replace(/\[|\]/g, ''))) { + if (!options.allowPrototypes) { + continue; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +module.exports = function (str, opts) { + var options = opts || {}; + + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + options.delimiter = typeof options.delimiter === 'string' || utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; + +},{"./utils":5}],4:[function(require,module,exports){ +'use strict'; + +var utils = require('./utils'); +var formats = require('./formats'); + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var toISO = Date.prototype.toISOString; + +var defaults = { + delimiter: '&', + encode: true, + encoder: utils.encode, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots, serializeDate, formatter) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (obj === null) { + if (strictNullHandling) { + return encoder ? encoder(prefix) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || utils.isBuffer(obj)) { + if (encoder) { + return [formatter(encoder(prefix)) + '=' + formatter(encoder(obj))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (Array.isArray(obj)) { + values = values.concat(stringify( + obj[key], + generateArrayPrefix(prefix, key), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter + )); + } else { + values = values.concat(stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter + )); + } + } + + return values; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = opts || {}; + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = encode ? (typeof options.encoder === 'function' ? options.encoder : defaults.encoder) : null; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var serializeDate = typeof options.serializeDate === 'function' ? options.serializeDate : defaults.serializeDate; + if (typeof options.format === 'undefined') { + options.format = formats.default; + } else if (!Object.prototype.hasOwnProperty.call(formats.formatters, options.format)) { + throw new TypeError('Unknown format option provided.'); + } + var formatter = formats.formatters[options.format]; + var objKeys; + var filter; + + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (sort) { + objKeys.sort(sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + keys = keys.concat(stringify( + obj[key], + key, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter + )); + } + + return keys.join(delimiter); +}; + +},{"./formats":1,"./utils":5}],5:[function(require,module,exports){ +'use strict'; + +var has = Object.prototype.hasOwnProperty; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +exports.arrayToObject = function (source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +exports.merge = function (target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + target[source] = true; + } else { + return [target, source]; + } + + return target; + } + + if (typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = exports.arrayToObject(target, options); + } + + if (Array.isArray(target) && Array.isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + if (target[i] && typeof target[i] === 'object') { + target[i] = exports.merge(target[i], item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (Object.prototype.hasOwnProperty.call(acc, key)) { + acc[key] = exports.merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +exports.decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; + +exports.encode = function (str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D || // - + c === 0x2E || // . + c === 0x5F || // _ + c === 0x7E || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5A) || // a-z + (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + hexTable[0x80 | ((c >> 12) & 0x3F)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +exports.compact = function (obj, references) { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + var refs = references || []; + var lookup = refs.indexOf(obj); + if (lookup !== -1) { + return refs[lookup]; + } + + refs.push(obj); + + if (Array.isArray(obj)) { + var compacted = []; + + for (var i = 0; i < obj.length; ++i) { + if (obj[i] && typeof obj[i] === 'object') { + compacted.push(exports.compact(obj[i], refs)); + } else if (typeof obj[i] !== 'undefined') { + compacted.push(obj[i]); + } + } + + return compacted; + } + + var keys = Object.keys(obj); + keys.forEach(function (key) { + obj[key] = exports.compact(obj[key], refs); + }); + + return obj; +}; + +exports.isRegExp = function (obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +exports.isBuffer = function (obj) { + if (obj === null || typeof obj === 'undefined') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +},{}]},{},[2])(2) +}); \ No newline at end of file diff --git a/node_modules/request/node_modules/qs/lib/formats.js b/node_modules/request/node_modules/qs/lib/formats.js new file mode 100644 index 0000000..df45997 --- /dev/null +++ b/node_modules/request/node_modules/qs/lib/formats.js @@ -0,0 +1,18 @@ +'use strict'; + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; + +module.exports = { + 'default': 'RFC3986', + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return value; + } + }, + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; diff --git a/node_modules/request/node_modules/qs/lib/index.js b/node_modules/request/node_modules/qs/lib/index.js new file mode 100755 index 0000000..0d6a97d --- /dev/null +++ b/node_modules/request/node_modules/qs/lib/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var stringify = require('./stringify'); +var parse = require('./parse'); +var formats = require('./formats'); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; diff --git a/node_modules/request/node_modules/qs/lib/parse.js b/node_modules/request/node_modules/qs/lib/parse.js new file mode 100755 index 0000000..97387a6 --- /dev/null +++ b/node_modules/request/node_modules/qs/lib/parse.js @@ -0,0 +1,166 @@ +'use strict'; + +var utils = require('./utils'); + +var has = Object.prototype.hasOwnProperty; + +var defaults = { + allowDots: false, + allowPrototypes: false, + arrayLimit: 20, + decoder: utils.decode, + delimiter: '&', + depth: 5, + parameterLimit: 1000, + plainObjects: false, + strictNullHandling: false +}; + +var parseValues = function parseValues(str, options) { + var obj = {}; + var parts = str.split(options.delimiter, options.parameterLimit === Infinity ? undefined : options.parameterLimit); + + for (var i = 0; i < parts.length; ++i) { + var part = parts[i]; + var pos = part.indexOf(']=') === -1 ? part.indexOf('=') : part.indexOf(']=') + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos)); + val = options.decoder(part.slice(pos + 1)); + } + if (has.call(obj, key)) { + obj[key] = [].concat(obj[key]).concat(val); + } else { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function parseObject(chain, val, options) { + if (!chain.length) { + return val; + } + + var root = chain.shift(); + + var obj; + if (root === '[]') { + obj = []; + obj = obj.concat(parseObject(chain, val, options)); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root[0] === '[' && root[root.length - 1] === ']' ? root.slice(1, root.length - 1) : root; + var index = parseInt(cleanRoot, 10); + if ( + !isNaN(index) && + root !== cleanRoot && + String(index) === cleanRoot && + index >= 0 && + (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = parseObject(chain, val, options); + } else { + obj[cleanRoot] = parseObject(chain, val, options); + } + } + + return obj; +}; + +var parseKeys = function parseKeys(givenKey, val, options) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^\.\[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var parent = /^([^\[\]]*)/; + var child = /(\[[^\[\]]*\])/g; + + // Get the parent + + var segment = parent.exec(key); + + // Stash the parent if it exists + + var keys = []; + if (segment[1]) { + // If we aren't using plain objects, optionally prefix keys + // that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, segment[1])) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(segment[1]); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while ((segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].replace(/\[|\]/g, ''))) { + if (!options.allowPrototypes) { + continue; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options); +}; + +module.exports = function (str, opts) { + var options = opts || {}; + + if (options.decoder !== null && options.decoder !== undefined && typeof options.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + options.delimiter = typeof options.delimiter === 'string' || utils.isRegExp(options.delimiter) ? options.delimiter : defaults.delimiter; + options.depth = typeof options.depth === 'number' ? options.depth : defaults.depth; + options.arrayLimit = typeof options.arrayLimit === 'number' ? options.arrayLimit : defaults.arrayLimit; + options.parseArrays = options.parseArrays !== false; + options.decoder = typeof options.decoder === 'function' ? options.decoder : defaults.decoder; + options.allowDots = typeof options.allowDots === 'boolean' ? options.allowDots : defaults.allowDots; + options.plainObjects = typeof options.plainObjects === 'boolean' ? options.plainObjects : defaults.plainObjects; + options.allowPrototypes = typeof options.allowPrototypes === 'boolean' ? options.allowPrototypes : defaults.allowPrototypes; + options.parameterLimit = typeof options.parameterLimit === 'number' ? options.parameterLimit : defaults.parameterLimit; + options.strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options); + obj = utils.merge(obj, newObj, options); + } + + return utils.compact(obj); +}; diff --git a/node_modules/request/node_modules/qs/lib/stringify.js b/node_modules/request/node_modules/qs/lib/stringify.js new file mode 100755 index 0000000..88cd791 --- /dev/null +++ b/node_modules/request/node_modules/qs/lib/stringify.js @@ -0,0 +1,187 @@ +'use strict'; + +var utils = require('./utils'); +var formats = require('./formats'); + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var toISO = Date.prototype.toISOString; + +var defaults = { + delimiter: '&', + encode: true, + encoder: utils.encode, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var stringify = function stringify(object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encoder, filter, sort, allowDots, serializeDate, formatter) { + var obj = object; + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (obj === null) { + if (strictNullHandling) { + return encoder ? encoder(prefix) : prefix; + } + + obj = ''; + } + + if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || utils.isBuffer(obj)) { + if (encoder) { + return [formatter(encoder(prefix)) + '=' + formatter(encoder(obj))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (Array.isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + if (Array.isArray(obj)) { + values = values.concat(stringify( + obj[key], + generateArrayPrefix(prefix, key), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter + )); + } else { + values = values.concat(stringify( + obj[key], + prefix + (allowDots ? '.' + key : '[' + key + ']'), + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter + )); + } + } + + return values; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = opts || {}; + var delimiter = typeof options.delimiter === 'undefined' ? defaults.delimiter : options.delimiter; + var strictNullHandling = typeof options.strictNullHandling === 'boolean' ? options.strictNullHandling : defaults.strictNullHandling; + var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : defaults.skipNulls; + var encode = typeof options.encode === 'boolean' ? options.encode : defaults.encode; + var encoder = encode ? (typeof options.encoder === 'function' ? options.encoder : defaults.encoder) : null; + var sort = typeof options.sort === 'function' ? options.sort : null; + var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots; + var serializeDate = typeof options.serializeDate === 'function' ? options.serializeDate : defaults.serializeDate; + if (typeof options.format === 'undefined') { + options.format = formats.default; + } else if (!Object.prototype.hasOwnProperty.call(formats.formatters, options.format)) { + throw new TypeError('Unknown format option provided.'); + } + var formatter = formats.formatters[options.format]; + var objKeys; + var filter; + + if (options.encoder !== null && options.encoder !== undefined && typeof options.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (Array.isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var arrayFormat; + if (options.arrayFormat in arrayPrefixGenerators) { + arrayFormat = options.arrayFormat; + } else if ('indices' in options) { + arrayFormat = options.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = 'indices'; + } + + var generateArrayPrefix = arrayPrefixGenerators[arrayFormat]; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (sort) { + objKeys.sort(sort); + } + + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (skipNulls && obj[key] === null) { + continue; + } + + keys = keys.concat(stringify( + obj[key], + key, + generateArrayPrefix, + strictNullHandling, + skipNulls, + encoder, + filter, + sort, + allowDots, + serializeDate, + formatter + )); + } + + return keys.join(delimiter); +}; diff --git a/node_modules/request/node_modules/qs/lib/utils.js b/node_modules/request/node_modules/qs/lib/utils.js new file mode 100755 index 0000000..33c01c0 --- /dev/null +++ b/node_modules/request/node_modules/qs/lib/utils.js @@ -0,0 +1,180 @@ +'use strict'; + +var has = Object.prototype.hasOwnProperty; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +exports.arrayToObject = function (source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +exports.merge = function (target, source, options) { + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (Array.isArray(target)) { + target.push(source); + } else if (typeof target === 'object') { + target[source] = true; + } else { + return [target, source]; + } + + return target; + } + + if (typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (Array.isArray(target) && !Array.isArray(source)) { + mergeTarget = exports.arrayToObject(target, options); + } + + if (Array.isArray(target) && Array.isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + if (target[i] && typeof target[i] === 'object') { + target[i] = exports.merge(target[i], item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (Object.prototype.hasOwnProperty.call(acc, key)) { + acc[key] = exports.merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +exports.decode = function (str) { + try { + return decodeURIComponent(str.replace(/\+/g, ' ')); + } catch (e) { + return str; + } +}; + +exports.encode = function (str) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = typeof str === 'string' ? str : String(str); + + var out = ''; + for (var i = 0; i < string.length; ++i) { + var c = string.charCodeAt(i); + + if ( + c === 0x2D || // - + c === 0x2E || // . + c === 0x5F || // _ + c === 0x7E || // ~ + (c >= 0x30 && c <= 0x39) || // 0-9 + (c >= 0x41 && c <= 0x5A) || // a-z + (c >= 0x61 && c <= 0x7A) // A-Z + ) { + out += string.charAt(i); + continue; + } + + if (c < 0x80) { + out = out + hexTable[c]; + continue; + } + + if (c < 0x800) { + out = out + (hexTable[0xC0 | (c >> 6)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + out = out + (hexTable[0xE0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]); + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (string.charCodeAt(i) & 0x3FF)); + out += hexTable[0xF0 | (c >> 18)] + hexTable[0x80 | ((c >> 12) & 0x3F)] + hexTable[0x80 | ((c >> 6) & 0x3F)] + hexTable[0x80 | (c & 0x3F)]; + } + + return out; +}; + +exports.compact = function (obj, references) { + if (typeof obj !== 'object' || obj === null) { + return obj; + } + + var refs = references || []; + var lookup = refs.indexOf(obj); + if (lookup !== -1) { + return refs[lookup]; + } + + refs.push(obj); + + if (Array.isArray(obj)) { + var compacted = []; + + for (var i = 0; i < obj.length; ++i) { + if (obj[i] && typeof obj[i] === 'object') { + compacted.push(exports.compact(obj[i], refs)); + } else if (typeof obj[i] !== 'undefined') { + compacted.push(obj[i]); + } + } + + return compacted; + } + + var keys = Object.keys(obj); + keys.forEach(function (key) { + obj[key] = exports.compact(obj[key], refs); + }); + + return obj; +}; + +exports.isRegExp = function (obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +exports.isBuffer = function (obj) { + if (obj === null || typeof obj === 'undefined') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; diff --git a/node_modules/request/node_modules/qs/package.json b/node_modules/request/node_modules/qs/package.json new file mode 100644 index 0000000..fc53357 --- /dev/null +++ b/node_modules/request/node_modules/qs/package.json @@ -0,0 +1,50 @@ +{ + "name": "qs", + "description": "A querystring parser that supports nesting and arrays, with a depth limit", + "homepage": "https://github.com/ljharb/qs", + "version": "6.3.0", + "repository": { + "type": "git", + "url": "https://github.com/ljharb/qs.git" + }, + "main": "lib/index.js", + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "keywords": [ + "querystring", + "qs" + ], + "engines": { + "node": ">=0.6" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^8.0.0", + "browserify": "^13.1.0", + "covert": "^1.1.0", + "eslint": "^3.8.0", + "evalmd": "^0.0.17", + "iconv-lite": "^0.4.13", + "mkdirp": "^0.5.1", + "parallelshell": "^2.0.0", + "qs-iconv": "^1.0.3", + "safe-publish-latest": "^1.1.1", + "tape": "^4.6.2" + }, + "scripts": { + "prepublish": "safe-publish-latest && npm run dist", + "pretest": "npm run --silent readme && npm run --silent lint", + "test": "npm run --silent coverage", + "tests-only": "node test", + "readme": "evalmd README.md", + "lint": "eslint lib/*.js test/*.js", + "coverage": "covert test", + "dist": "mkdirp dist && browserify --standalone Qs lib/index.js > dist/qs.js" + }, + "license": "BSD-3-Clause" +} diff --git a/node_modules/request/node_modules/qs/test/.eslintrc b/node_modules/request/node_modules/qs/test/.eslintrc new file mode 100644 index 0000000..7155394 --- /dev/null +++ b/node_modules/request/node_modules/qs/test/.eslintrc @@ -0,0 +1,9 @@ +{ + "rules": { + "max-lines": 0, + "max-nested-callbacks": [2, 3], + "max-statements": 0, + "no-extend-native": 0, + "sort-keys": 1 + } +} diff --git a/node_modules/request/node_modules/qs/test/index.js b/node_modules/request/node_modules/qs/test/index.js new file mode 100644 index 0000000..5e6bc8f --- /dev/null +++ b/node_modules/request/node_modules/qs/test/index.js @@ -0,0 +1,7 @@ +'use strict'; + +require('./parse'); + +require('./stringify'); + +require('./utils'); diff --git a/node_modules/request/node_modules/qs/test/parse.js b/node_modules/request/node_modules/qs/test/parse.js new file mode 100755 index 0000000..3a9611e --- /dev/null +++ b/node_modules/request/node_modules/qs/test/parse.js @@ -0,0 +1,459 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var iconv = require('iconv-lite'); + +test('parse()', function (t) { + t.test('parses a simple string', function (st) { + st.deepEqual(qs.parse('0=foo'), { 0: 'foo' }); + st.deepEqual(qs.parse('foo=c++'), { foo: 'c ' }); + st.deepEqual(qs.parse('a[>=]=23'), { a: { '>=': '23' } }); + st.deepEqual(qs.parse('a[<=>]==23'), { a: { '<=>': '=23' } }); + st.deepEqual(qs.parse('a[==]=23'), { a: { '==': '23' } }); + st.deepEqual(qs.parse('foo', { strictNullHandling: true }), { foo: null }); + st.deepEqual(qs.parse('foo'), { foo: '' }); + st.deepEqual(qs.parse('foo='), { foo: '' }); + st.deepEqual(qs.parse('foo=bar'), { foo: 'bar' }); + st.deepEqual(qs.parse(' foo = bar = baz '), { ' foo ': ' bar = baz ' }); + st.deepEqual(qs.parse('foo=bar=baz'), { foo: 'bar=baz' }); + st.deepEqual(qs.parse('foo=bar&bar=baz'), { foo: 'bar', bar: 'baz' }); + st.deepEqual(qs.parse('foo2=bar2&baz2='), { foo2: 'bar2', baz2: '' }); + st.deepEqual(qs.parse('foo=bar&baz', { strictNullHandling: true }), { foo: 'bar', baz: null }); + st.deepEqual(qs.parse('foo=bar&baz'), { foo: 'bar', baz: '' }); + st.deepEqual(qs.parse('cht=p3&chd=t:60,40&chs=250x100&chl=Hello|World'), { + cht: 'p3', + chd: 't:60,40', + chs: '250x100', + chl: 'Hello|World' + }); + st.end(); + }); + + t.test('allows enabling dot notation', function (st) { + st.deepEqual(qs.parse('a.b=c'), { 'a.b': 'c' }); + st.deepEqual(qs.parse('a.b=c', { allowDots: true }), { a: { b: 'c' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c'), { a: { b: 'c' } }, 'parses a single nested string'); + t.deepEqual(qs.parse('a[b][c]=d'), { a: { b: { c: 'd' } } }, 'parses a double nested string'); + t.deepEqual( + qs.parse('a[b][c][d][e][f][g][h]=i'), + { a: { b: { c: { d: { e: { f: { '[g][h]': 'i' } } } } } } }, + 'defaults to a depth of 5' + ); + + t.test('only parses one level when depth = 1', function (st) { + st.deepEqual(qs.parse('a[b][c]=d', { depth: 1 }), { a: { b: { '[c]': 'd' } } }); + st.deepEqual(qs.parse('a[b][c][d]=e', { depth: 1 }), { a: { b: { '[c][d]': 'e' } } }); + st.end(); + }); + + t.deepEqual(qs.parse('a=b&a=c'), { a: ['b', 'c'] }, 'parses a simple array'); + + t.test('parses an explicit array', function (st) { + st.deepEqual(qs.parse('a[]=b'), { a: ['b'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a[]=c&a[]=d'), { a: ['b', 'c', 'd'] }); + st.end(); + }); + + t.test('parses a mix of simple and explicit arrays', function (st) { + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[0]=b&a=c'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[0]=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a[1]=b&a=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[]=b&a=c'), { a: ['b', 'c'] }); + + st.deepEqual(qs.parse('a=b&a[1]=c', { arrayLimit: 20 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c', { arrayLimit: 0 }), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a=b&a[]=c'), { a: ['b', 'c'] }); + + st.end(); + }); + + t.test('parses a nested array', function (st) { + st.deepEqual(qs.parse('a[b][]=c&a[b][]=d'), { a: { b: ['c', 'd'] } }); + st.deepEqual(qs.parse('a[>=]=25'), { a: { '>=': '25' } }); + st.end(); + }); + + t.test('allows to specify array indices', function (st) { + st.deepEqual(qs.parse('a[1]=c&a[0]=b&a[2]=d'), { a: ['b', 'c', 'd'] }); + st.deepEqual(qs.parse('a[1]=c&a[0]=b'), { a: ['b', 'c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 20 }), { a: ['c'] }); + st.deepEqual(qs.parse('a[1]=c', { arrayLimit: 0 }), { a: { 1: 'c' } }); + st.deepEqual(qs.parse('a[1]=c'), { a: ['c'] }); + st.end(); + }); + + t.test('limits specific array indices to arrayLimit', function (st) { + st.deepEqual(qs.parse('a[20]=a', { arrayLimit: 20 }), { a: ['a'] }); + st.deepEqual(qs.parse('a[21]=a', { arrayLimit: 20 }), { a: { 21: 'a' } }); + st.end(); + }); + + t.deepEqual(qs.parse('a[12b]=c'), { a: { '12b': 'c' } }, 'supports keys that begin with a number'); + + t.test('supports encoded = signs', function (st) { + st.deepEqual(qs.parse('he%3Dllo=th%3Dere'), { 'he=llo': 'th=ere' }); + st.end(); + }); + + t.test('is ok with url encoded strings', function (st) { + st.deepEqual(qs.parse('a[b%20c]=d'), { a: { 'b c': 'd' } }); + st.deepEqual(qs.parse('a[b]=c%20d'), { a: { b: 'c d' } }); + st.end(); + }); + + t.test('allows brackets in the value', function (st) { + st.deepEqual(qs.parse('pets=["tobi"]'), { pets: '["tobi"]' }); + st.deepEqual(qs.parse('operators=[">=", "<="]'), { operators: '[">=", "<="]' }); + st.end(); + }); + + t.test('allows empty values', function (st) { + st.deepEqual(qs.parse(''), {}); + st.deepEqual(qs.parse(null), {}); + st.deepEqual(qs.parse(undefined), {}); + st.end(); + }); + + t.test('transforms arrays to objects', function (st) { + st.deepEqual(qs.parse('foo[0]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[0]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar'), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo[bad]=baz'), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo[bad]=baz&foo[]=bar&foo[]=foo'), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0][a]=a&foo[0][b]=b&foo[1][a]=aa&foo[1][b]=bb'), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: false }), { a: { 0: 'b', c: true, t: 'u' } }); + st.deepEqual(qs.parse('a[]=b&a[t]=u&a[hasOwnProperty]=c', { allowPrototypes: true }), { a: { 0: 'b', t: 'u', hasOwnProperty: 'c' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: false }), { a: { 0: 'b', 1: 'c', x: 'y' } }); + st.deepEqual(qs.parse('a[]=b&a[hasOwnProperty]=c&a[x]=y', { allowPrototypes: true }), { a: { 0: 'b', hasOwnProperty: 'c', x: 'y' } }); + st.end(); + }); + + t.test('transforms arrays to objects (dot notation)', function (st) { + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz=bar&fool.bad.boo=baz', { allowDots: true }), { foo: [{ baz: 'bar' }], fool: { bad: { boo: 'baz' } } }); + st.deepEqual(qs.parse('foo[0][0].baz=bar&fool.bad=baz', { allowDots: true }), { foo: [[{ baz: 'bar' }]], fool: { bad: 'baz' } }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15'], bar: '2' }] }); + st.deepEqual(qs.parse('foo[0].baz[0]=15&foo[0].baz[1]=16&foo[0].bar=2', { allowDots: true }), { foo: [{ baz: ['15', '16'], bar: '2' }] }); + st.deepEqual(qs.parse('foo.bad=baz&foo[0]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar' } }); + st.deepEqual(qs.parse('foo[]=bar&foo.bad=baz', { allowDots: true }), { foo: { 0: 'bar', bad: 'baz' } }); + st.deepEqual(qs.parse('foo.bad=baz&foo[]=bar&foo[]=foo', { allowDots: true }), { foo: { bad: 'baz', 0: 'bar', 1: 'foo' } }); + st.deepEqual(qs.parse('foo[0].a=a&foo[0].b=b&foo[1].a=aa&foo[1].b=bb', { allowDots: true }), { foo: [{ a: 'a', b: 'b' }, { a: 'aa', b: 'bb' }] }); + st.end(); + }); + + t.deepEqual(qs.parse('a[b]=c&a=d'), { a: { b: 'c', d: true } }, 'can add keys to objects'); + + t.test('correctly prunes undefined values when converting an array to an object', function (st) { + st.deepEqual(qs.parse('a[2]=b&a[99999999]=c'), { a: { 2: 'b', 99999999: 'c' } }); + st.end(); + }); + + t.test('supports malformed uri characters', function (st) { + st.deepEqual(qs.parse('{%:%}', { strictNullHandling: true }), { '{%:%}': null }); + st.deepEqual(qs.parse('{%:%}='), { '{%:%}': '' }); + st.deepEqual(qs.parse('foo=%:%}'), { foo: '%:%}' }); + st.end(); + }); + + t.test('doesn\'t produce empty keys', function (st) { + st.deepEqual(qs.parse('_r=1&'), { _r: '1' }); + st.end(); + }); + + t.test('cannot access Object prototype', function (st) { + qs.parse('constructor[prototype][bad]=bad'); + qs.parse('bad[constructor][prototype][bad]=bad'); + st.equal(typeof Object.prototype.bad, 'undefined'); + st.end(); + }); + + t.test('parses arrays of objects', function (st) { + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + st.deepEqual(qs.parse('a[0][b]=c'), { a: [{ b: 'c' }] }); + st.end(); + }); + + t.test('allows for empty strings in arrays', function (st) { + st.deepEqual(qs.parse('a[]=b&a[]=&a[]=c'), { a: ['b', '', 'c'] }); + + st.deepEqual( + qs.parse('a[0]=b&a[1]&a[2]=c&a[19]=', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 20 + array indices: null then empty string works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]&a[]=c&a[]=', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', null, 'c', ''] }, + 'with arrayLimit 0 + array brackets: null then empty string works' + ); + + st.deepEqual( + qs.parse('a[0]=b&a[1]=&a[2]=c&a[19]', { strictNullHandling: true, arrayLimit: 20 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 20 + array indices: empty string then null works' + ); + st.deepEqual( + qs.parse('a[]=b&a[]=&a[]=c&a[]', { strictNullHandling: true, arrayLimit: 0 }), + { a: ['b', '', 'c', null] }, + 'with arrayLimit 0 + array brackets: empty string then null works' + ); + + st.deepEqual( + qs.parse('a[]=&a[]=b&a[]=c'), + { a: ['', 'b', 'c'] }, + 'array brackets: empty strings work' + ); + st.end(); + }); + + t.test('compacts sparse arrays', function (st) { + st.deepEqual(qs.parse('a[10]=1&a[2]=2', { arrayLimit: 20 }), { a: ['2', '1'] }); + st.deepEqual(qs.parse('a[1][b][2][c]=1', { arrayLimit: 20 }), { a: [{ b: [{ c: '1' }] }] }); + st.deepEqual(qs.parse('a[1][2][3][c]=1', { arrayLimit: 20 }), { a: [[[{ c: '1' }]]] }); + st.deepEqual(qs.parse('a[1][2][3][c][1]=1', { arrayLimit: 20 }), { a: [[[{ c: ['1'] }]]] }); + st.end(); + }); + + t.test('parses semi-parsed strings', function (st) { + st.deepEqual(qs.parse({ 'a[b]': 'c' }), { a: { b: 'c' } }); + st.deepEqual(qs.parse({ 'a[b]': 'c', 'a[d]': 'e' }), { a: { b: 'c', d: 'e' } }); + st.end(); + }); + + t.test('parses buffers correctly', function (st) { + var b = new Buffer('test'); + st.deepEqual(qs.parse({ a: b }), { a: b }); + st.end(); + }); + + t.test('continues parsing when no parent is found', function (st) { + st.deepEqual(qs.parse('[]=&a=b'), { 0: '', a: 'b' }); + st.deepEqual(qs.parse('[]&a=b', { strictNullHandling: true }), { 0: null, a: 'b' }); + st.deepEqual(qs.parse('[foo]=bar'), { foo: 'bar' }); + st.end(); + }); + + t.test('does not error when parsing a very long array', function (st) { + var str = 'a[]=a'; + while (Buffer.byteLength(str) < 128 * 1024) { + str = str + '&' + str; + } + + st.doesNotThrow(function () { + qs.parse(str); + }); + + st.end(); + }); + + t.test('should not throw when a native prototype has an enumerable property', { parallel: false }, function (st) { + Object.prototype.crash = ''; + Array.prototype.crash = ''; + st.doesNotThrow(qs.parse.bind(null, 'a=b')); + st.deepEqual(qs.parse('a=b'), { a: 'b' }); + st.doesNotThrow(qs.parse.bind(null, 'a[][b]=c')); + st.deepEqual(qs.parse('a[][b]=c'), { a: [{ b: 'c' }] }); + delete Object.prototype.crash; + delete Array.prototype.crash; + st.end(); + }); + + t.test('parses a string with an alternative string delimiter', function (st) { + st.deepEqual(qs.parse('a=b;c=d', { delimiter: ';' }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('parses a string with an alternative RegExp delimiter', function (st) { + st.deepEqual(qs.parse('a=b; c=d', { delimiter: /[;,] */ }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not use non-splittable objects as delimiters', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { delimiter: true }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding parameter limit', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: 1 }), { a: 'b' }); + st.end(); + }); + + t.test('allows setting the parameter limit to Infinity', function (st) { + st.deepEqual(qs.parse('a=b&c=d', { parameterLimit: Infinity }), { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('allows overriding array limit', function (st) { + st.deepEqual(qs.parse('a[0]=b', { arrayLimit: -1 }), { a: { 0: 'b' } }); + st.deepEqual(qs.parse('a[-1]=b', { arrayLimit: -1 }), { a: { '-1': 'b' } }); + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { arrayLimit: 0 }), { a: { 0: 'b', 1: 'c' } }); + st.end(); + }); + + t.test('allows disabling array parsing', function (st) { + st.deepEqual(qs.parse('a[0]=b&a[1]=c', { parseArrays: false }), { a: { 0: 'b', 1: 'c' } }); + st.end(); + }); + + t.test('parses an object', function (st) { + var input = { + 'user[name]': { 'pop[bob]': 3 }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object in dot notation', function (st) { + var input = { + 'user.name': { 'pop[bob]': 3 }, + 'user.email.': null + }; + + var expected = { + user: { + name: { 'pop[bob]': 3 }, + email: null + } + }; + + var result = qs.parse(input, { allowDots: true }); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('parses an object and not child values', function (st) { + var input = { + 'user[name]': { 'pop[bob]': { test: 3 } }, + 'user[email]': null + }; + + var expected = { + user: { + name: { 'pop[bob]': { test: 3 } }, + email: null + } + }; + + var result = qs.parse(input); + + st.deepEqual(result, expected); + st.end(); + }); + + t.test('does not blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.parse('a=b&c=d'); + global.Buffer = tempBuffer; + st.deepEqual(result, { a: 'b', c: 'd' }); + st.end(); + }); + + t.test('does not crash when parsing circular references', function (st) { + var a = {}; + a.b = a; + + var parsed; + + st.doesNotThrow(function () { + parsed = qs.parse({ 'foo[bar]': 'baz', 'foo[baz]': a }); + }); + + st.equal('foo' in parsed, true, 'parsed has "foo" property'); + st.equal('bar' in parsed.foo, true); + st.equal('baz' in parsed.foo, true); + st.equal(parsed.foo.bar, 'baz'); + st.deepEqual(parsed.foo.baz, a); + st.end(); + }); + + t.test('parses null objects correctly', { skip: !Object.create }, function (st) { + var a = Object.create(null); + a.b = 'c'; + + st.deepEqual(qs.parse(a), { b: 'c' }); + var result = qs.parse({ a: a }); + st.equal('a' in result, true, 'result has "a" property'); + st.deepEqual(result.a, a); + st.end(); + }); + + t.test('parses dates correctly', function (st) { + var now = new Date(); + st.deepEqual(qs.parse({ a: now }), { a: now }); + st.end(); + }); + + t.test('parses regular expressions correctly', function (st) { + var re = /^test$/; + st.deepEqual(qs.parse({ a: re }), { a: re }); + st.end(); + }); + + t.test('can allow overwriting prototype properties', function (st) { + st.deepEqual(qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true }), { a: { hasOwnProperty: 'b' } }, { prototype: false }); + st.deepEqual(qs.parse('hasOwnProperty=b', { allowPrototypes: true }), { hasOwnProperty: 'b' }, { prototype: false }); + st.end(); + }); + + t.test('can return null objects', { skip: !Object.create }, function (st) { + var expected = Object.create(null); + expected.a = Object.create(null); + expected.a.b = 'c'; + expected.a.hasOwnProperty = 'd'; + st.deepEqual(qs.parse('a[b]=c&a[hasOwnProperty]=d', { plainObjects: true }), expected); + st.deepEqual(qs.parse(null, { plainObjects: true }), Object.create(null)); + var expectedArray = Object.create(null); + expectedArray.a = Object.create(null); + expectedArray.a[0] = 'b'; + expectedArray.a.c = 'd'; + st.deepEqual(qs.parse('a[]=b&a[c]=d', { plainObjects: true }), expectedArray); + st.end(); + }); + + t.test('can parse with custom encoding', function (st) { + st.deepEqual(qs.parse('%8c%a7=%91%e5%8d%e3%95%7b', { + decoder: function (str) { + var reg = /%([0-9A-F]{2})/ig; + var result = []; + var parts = reg.exec(str); + while (parts) { + result.push(parseInt(parts[1], 16)); + parts = reg.exec(str); + } + return iconv.decode(new Buffer(result), 'shift_jis').toString(); + } + }), { 県: '大阪府' }); + st.end(); + }); + + t.test('throws error with wrong decoder', function (st) { + st.throws(function () { + qs.parse({}, { decoder: 'string' }); + }, new TypeError('Decoder has to be a function.')); + st.end(); + }); +}); diff --git a/node_modules/request/node_modules/qs/test/stringify.js b/node_modules/request/node_modules/qs/test/stringify.js new file mode 100755 index 0000000..8818eac --- /dev/null +++ b/node_modules/request/node_modules/qs/test/stringify.js @@ -0,0 +1,538 @@ +'use strict'; + +var test = require('tape'); +var qs = require('../'); +var iconv = require('iconv-lite'); + +test('stringify()', function (t) { + t.test('stringifies a querystring object', function (st) { + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: 1 }), 'a=1'); + st.equal(qs.stringify({ a: 1, b: 2 }), 'a=1&b=2'); + st.equal(qs.stringify({ a: 'A_Z' }), 'a=A_Z'); + st.equal(qs.stringify({ a: '€' }), 'a=%E2%82%AC'); + st.equal(qs.stringify({ a: '' }), 'a=%EE%80%80'); + st.equal(qs.stringify({ a: 'א' }), 'a=%D7%90'); + st.equal(qs.stringify({ a: '𐐷' }), 'a=%F0%90%90%B7'); + st.end(); + }); + + t.test('stringifies a nested object', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }), 'a%5Bb%5D%5Bc%5D%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies a nested object with dots notation', function (st) { + st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c'); + st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e'); + st.end(); + }); + + t.test('stringifies an array value', function (st) { + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'indices' }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }, { arrayFormat: 'brackets' }), + 'a%5B%5D=b&a%5B%5D=c&a%5B%5D=d', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: ['b', 'c', 'd'] }), + 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d', + 'default => indices' + ); + st.end(); + }); + + t.test('omits nulls when asked', function (st) { + st.equal(qs.stringify({ a: 'b', c: null }, { skipNulls: true }), 'a=b'); + st.end(); + }); + + t.test('omits nested nulls when asked', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: null } }, { skipNulls: true }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('omits array indices when asked', function (st) { + st.equal(qs.stringify({ a: ['b', 'c', 'd'] }, { indices: false }), 'a=b&a=c&a=d'); + st.end(); + }); + + t.test('stringifies a nested array value', function (st) { + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'indices' }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { arrayFormat: 'brackets' }), 'a%5Bb%5D%5B%5D=c&a%5Bb%5D%5B%5D=d'); + st.equal(qs.stringify({ a: { b: ['c', 'd'] } }), 'a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d'); + st.end(); + }); + + t.test('stringifies a nested array value with dots notation', function (st) { + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a.b[0]=c&a.b[1]=d', + 'indices: stringifies with dots + indices' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a.b[]=c&a.b[]=d', + 'brackets: stringifies with dots + brackets' + ); + st.equal( + qs.stringify( + { a: { b: ['c', 'd'] } }, + { allowDots: true, encode: false } + ), + 'a.b[0]=c&a.b[1]=d', + 'default: stringifies with dots + indices' + ); + st.end(); + }); + + t.test('stringifies an object inside an array', function (st) { + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D=c', + 'indices => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D=c', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 'c' }] }), + 'a%5B0%5D%5Bb%5D=c', + 'default => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'indices' }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'indices => indices' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }, { arrayFormat: 'brackets' }), + 'a%5B%5D%5Bb%5D%5Bc%5D%5B%5D=1', + 'brackets => brackets' + ); + + st.equal( + qs.stringify({ a: [{ b: { c: [1] } }] }), + 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an array with mixed objects and primitives', function (st) { + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'indices' }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'indices => indices' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false, arrayFormat: 'brackets' }), + 'a[][b]=1&a[]=2&a[]=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify({ a: [{ b: 1 }, 2, 3] }, { encode: false }), + 'a[0][b]=1&a[1]=2&a[2]=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('stringifies an object inside an array with dots notation', function (st) { + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b=c', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b=c', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: 'c' }] }, + { allowDots: true, encode: false } + ), + 'a[0].b=c', + 'default => indices' + ); + + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'indices' } + ), + 'a[0].b.c[0]=1', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false, arrayFormat: 'brackets' } + ), + 'a[].b.c[]=1', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: [{ b: { c: [1] } }] }, + { allowDots: true, encode: false } + ), + 'a[0].b.c[0]=1', + 'default => indices' + ); + + st.end(); + }); + + t.test('does not omit object keys when indices = false', function (st) { + st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when indices=true', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { indices: true }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat is specified', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses indices notation for arrays when no arrayFormat=indices', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' }), 'a%5B0%5D=b&a%5B1%5D=c'); + st.end(); + }); + + t.test('uses repeat notation for arrays when no arrayFormat=repeat', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' }), 'a=b&a=c'); + st.end(); + }); + + t.test('uses brackets notation for arrays when no arrayFormat=brackets', function (st) { + st.equal(qs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' }), 'a%5B%5D=b&a%5B%5D=c'); + st.end(); + }); + + t.test('stringifies a complicated object', function (st) { + st.equal(qs.stringify({ a: { b: 'c', d: 'e' } }), 'a%5Bb%5D=c&a%5Bd%5D=e'); + st.end(); + }); + + t.test('stringifies an empty value', function (st) { + st.equal(qs.stringify({ a: '' }), 'a='); + st.equal(qs.stringify({ a: null }, { strictNullHandling: true }), 'a'); + + st.equal(qs.stringify({ a: '', b: '' }), 'a=&b='); + st.equal(qs.stringify({ a: null, b: '' }, { strictNullHandling: true }), 'a&b='); + + st.equal(qs.stringify({ a: { b: '' } }), 'a%5Bb%5D='); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: true }), 'a%5Bb%5D'); + st.equal(qs.stringify({ a: { b: null } }, { strictNullHandling: false }), 'a%5Bb%5D='); + + st.end(); + }); + + t.test('stringifies a null object', { skip: !Object.create }, function (st) { + var obj = Object.create(null); + obj.a = 'b'; + st.equal(qs.stringify(obj), 'a=b'); + st.end(); + }); + + t.test('returns an empty string for invalid input', function (st) { + st.equal(qs.stringify(undefined), ''); + st.equal(qs.stringify(false), ''); + st.equal(qs.stringify(null), ''); + st.equal(qs.stringify(''), ''); + st.end(); + }); + + t.test('stringifies an object with a null object as a child', { skip: !Object.create }, function (st) { + var obj = { a: Object.create(null) }; + + obj.a.b = 'c'; + st.equal(qs.stringify(obj), 'a%5Bb%5D=c'); + st.end(); + }); + + t.test('drops keys with a value of undefined', function (st) { + st.equal(qs.stringify({ a: undefined }), ''); + + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: true }), 'a%5Bc%5D'); + st.equal(qs.stringify({ a: { b: undefined, c: null } }, { strictNullHandling: false }), 'a%5Bc%5D='); + st.equal(qs.stringify({ a: { b: undefined, c: '' } }), 'a%5Bc%5D='); + st.end(); + }); + + t.test('url encodes values', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('stringifies a date', function (st) { + var now = new Date(); + var str = 'a=' + encodeURIComponent(now.toISOString()); + st.equal(qs.stringify({ a: now }), str); + st.end(); + }); + + t.test('stringifies the weird object from qs', function (st) { + st.equal(qs.stringify({ 'my weird field': '~q1!2"\'w$5&7/z8)?' }), 'my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F'); + st.end(); + }); + + t.test('skips properties that are part of the object prototype', function (st) { + Object.prototype.crash = 'test'; + st.equal(qs.stringify({ a: 'b' }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c'); + delete Object.prototype.crash; + st.end(); + }); + + t.test('stringifies boolean values', function (st) { + st.equal(qs.stringify({ a: true }), 'a=true'); + st.equal(qs.stringify({ a: { b: true } }), 'a%5Bb%5D=true'); + st.equal(qs.stringify({ b: false }), 'b=false'); + st.equal(qs.stringify({ b: { c: false } }), 'b%5Bc%5D=false'); + st.end(); + }); + + t.test('stringifies buffer values', function (st) { + st.equal(qs.stringify({ a: new Buffer('test') }), 'a=test'); + st.equal(qs.stringify({ a: { b: new Buffer('test') } }), 'a%5Bb%5D=test'); + st.end(); + }); + + t.test('stringifies an object using an alternative delimiter', function (st) { + st.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d'); + st.end(); + }); + + t.test('doesn\'t blow up when Buffer global is missing', function (st) { + var tempBuffer = global.Buffer; + delete global.Buffer; + var result = qs.stringify({ a: 'b', c: 'd' }); + global.Buffer = tempBuffer; + st.equal(result, 'a=b&c=d'); + st.end(); + }); + + t.test('selects properties when filter=array', function (st) { + st.equal(qs.stringify({ a: 'b' }, { filter: ['a'] }), 'a=b'); + st.equal(qs.stringify({ a: 1 }, { filter: [] }), ''); + + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'indices' } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'indices => indices' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2], arrayFormat: 'brackets' } + ), + 'a%5Bb%5D%5B%5D=1&a%5Bb%5D%5B%5D=3', + 'brackets => brackets' + ); + st.equal( + qs.stringify( + { a: { b: [1, 2, 3, 4], c: 'd' }, c: 'f' }, + { filter: ['a', 'b', 0, 2] } + ), + 'a%5Bb%5D%5B0%5D=1&a%5Bb%5D%5B2%5D=3', + 'default => indices' + ); + + st.end(); + }); + + t.test('supports custom representations when filter=function', function (st) { + var calls = 0; + var obj = { a: 'b', c: 'd', e: { f: new Date(1257894000000) } }; + var filterFunc = function (prefix, value) { + calls += 1; + if (calls === 1) { + st.equal(prefix, '', 'prefix is empty'); + st.equal(value, obj); + } else if (prefix === 'c') { + return; + } else if (value instanceof Date) { + st.equal(prefix, 'e[f]'); + return value.getTime(); + } + return value; + }; + + st.equal(qs.stringify(obj, { filter: filterFunc }), 'a=b&e%5Bf%5D=1257894000000'); + st.equal(calls, 5); + st.end(); + }); + + t.test('can disable uri encoding', function (st) { + st.equal(qs.stringify({ a: 'b' }, { encode: false }), 'a=b'); + st.equal(qs.stringify({ a: { b: 'c' } }, { encode: false }), 'a[b]=c'); + st.equal(qs.stringify({ a: 'b', c: null }, { strictNullHandling: true, encode: false }), 'a=b&c'); + st.end(); + }); + + t.test('can sort the keys', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal(qs.stringify({ a: 'c', z: 'y', b: 'f' }, { sort: sort }), 'a=c&b=f&z=y'); + st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a'); + st.end(); + }); + + t.test('can sort the keys at depth 3 or more too', function (st) { + var sort = function (a, b) { + return a.localeCompare(b); + }; + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: sort, encode: false } + ), + 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb' + ); + st.equal( + qs.stringify( + { a: 'a', z: { zj: { zjb: 'zjb', zja: 'zja' }, zi: { zib: 'zib', zia: 'zia' } }, b: 'b' }, + { sort: null, encode: false } + ), + 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b' + ); + st.end(); + }); + + t.test('can stringify with custom encoding', function (st) { + st.equal(qs.stringify({ 県: '大阪府', '': '' }, { + encoder: function (str) { + if (str.length === 0) { + return ''; + } + var buf = iconv.encode(str, 'shiftjis'); + var result = []; + for (var i = 0; i < buf.length; ++i) { + result.push(buf.readUInt8(i).toString(16)); + } + return '%' + result.join('%'); + } + }), '%8c%a7=%91%e5%8d%e3%95%7b&='); + st.end(); + }); + + t.test('throws error with wrong encoder', function (st) { + st.throws(function () { + qs.stringify({}, { encoder: 'string' }); + }, new TypeError('Encoder has to be a function.')); + st.end(); + }); + + t.test('can use custom encoder for a buffer object', { skip: typeof Buffer === 'undefined' }, function (st) { + st.equal(qs.stringify({ a: new Buffer([1]) }, { + encoder: function (buffer) { + if (typeof buffer === 'string') { + return buffer; + } + return String.fromCharCode(buffer.readUInt8(0) + 97); + } + }), 'a=b'); + st.end(); + }); + + t.test('serializeDate option', function (st) { + var date = new Date(); + st.equal( + qs.stringify({ a: date }), + 'a=' + date.toISOString().replace(/:/g, '%3A'), + 'default is toISOString' + ); + + var mutatedDate = new Date(); + mutatedDate.toISOString = function () { + throw new SyntaxError(); + }; + st.throws(function () { + mutatedDate.toISOString(); + }, SyntaxError); + st.equal( + qs.stringify({ a: mutatedDate }), + 'a=' + Date.prototype.toISOString.call(mutatedDate).replace(/:/g, '%3A'), + 'toISOString works even when method is not locally present' + ); + + var specificDate = new Date(6); + st.equal( + qs.stringify( + { a: specificDate }, + { serializeDate: function (d) { return d.getTime() * 7; } } + ), + 'a=42', + 'custom serializeDate function called' + ); + + st.end(); + }); + + t.test('RFC 1738 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC1738 }), 'a=b+c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC1738 }), 'a+b=c+d'); + st.end(); + }); + + t.test('RFC 3986 spaces serialization', function (st) { + st.equal(qs.stringify({ a: 'b c' }, { format: qs.formats.RFC3986 }), 'a=b%20c'); + st.equal(qs.stringify({ 'a b': 'c d' }, { format: qs.formats.RFC3986 }), 'a%20b=c%20d'); + st.end(); + }); + + t.test('Backward compatibility to RFC 3986', function (st) { + st.equal(qs.stringify({ a: 'b c' }), 'a=b%20c'); + st.end(); + }); + + t.test('Edge cases and unknown formats', function (st) { + ['UFO1234', false, 1234, null, {}, []].forEach( + function (format) { + st.throws( + function () { + qs.stringify({ a: 'b c' }, { format: format }); + }, + new TypeError('Unknown format option provided.') + ); + } + ); + st.end(); + }); +}); diff --git a/node_modules/request/node_modules/qs/test/utils.js b/node_modules/request/node_modules/qs/test/utils.js new file mode 100755 index 0000000..0721dd8 --- /dev/null +++ b/node_modules/request/node_modules/qs/test/utils.js @@ -0,0 +1,22 @@ +'use strict'; + +var test = require('tape'); +var utils = require('../lib/utils'); + +test('merge()', function (t) { + t.deepEqual(utils.merge({ a: 'b' }, { a: 'c' }), { a: ['b', 'c'] }, 'merges two objects with the same key'); + + var oneMerged = utils.merge({ foo: 'bar' }, { foo: { first: '123' } }); + t.deepEqual(oneMerged, { foo: ['bar', { first: '123' }] }, 'merges a standalone and an object into an array'); + + var twoMerged = utils.merge({ foo: ['bar', { first: '123' }] }, { foo: { second: '456' } }); + t.deepEqual(twoMerged, { foo: { 0: 'bar', 1: { first: '123' }, second: '456' } }, 'merges a standalone and two objects into an array'); + + var sandwiched = utils.merge({ foo: ['bar', { first: '123', second: '456' }] }, { foo: 'baz' }); + t.deepEqual(sandwiched, { foo: ['bar', { first: '123', second: '456' }, 'baz'] }, 'merges an object sandwiched by two standalones into an array'); + + var nestedArrays = utils.merge({ foo: ['baz'] }, { foo: ['bar', 'xyzzy'] }); + t.deepEqual(nestedArrays, { foo: ['baz', 'bar', 'xyzzy'] }); + + t.end(); +}); diff --git a/node_modules/request/package.json b/node_modules/request/package.json new file mode 100644 index 0000000..73d8675 --- /dev/null +++ b/node_modules/request/package.json @@ -0,0 +1,82 @@ +{ + "name": "request", + "description": "Simplified HTTP request client.", + "tags": [ + "http", + "simple", + "util", + "utility" + ], + "version": "2.76.0", + "author": "Mikeal Rogers ", + "repository": { + "type": "git", + "url": "https://github.com/request/request.git" + }, + "bugs": { + "url": "http://github.com/request/request/issues" + }, + "license": "Apache-2.0", + "engines": { + "node": ">=0.8.0" + }, + "main": "index.js", + "dependencies": { + "aws-sign2": "~0.6.0", + "aws4": "^1.2.1", + "caseless": "~0.11.0", + "combined-stream": "~1.0.5", + "extend": "~3.0.0", + "forever-agent": "~0.6.1", + "form-data": "~2.1.1", + "har-validator": "~2.0.6", + "hawk": "~3.1.3", + "http-signature": "~1.1.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.7", + "node-uuid": "~1.4.7", + "oauth-sign": "~0.8.1", + "qs": "~6.3.0", + "stringstream": "~0.0.4", + "tough-cookie": "~2.3.0", + "tunnel-agent": "~0.4.1" + }, + "scripts": { + "test": "npm run lint && npm run test-ci && npm run test-browser", + "test-ci": "taper tests/test-*.js", + "test-cov": "istanbul cover tape tests/test-*.js", + "test-browser": "node tests/browser/start.js", + "lint": "eslint lib/ *.js tests/ && echo Lint passed." + }, + "devDependencies": { + "bluebird": "^3.2.1", + "browserify": "^13.0.1", + "browserify-istanbul": "^2.0.0", + "buffer-equal": "^1.0.0", + "codecov": "^1.0.1", + "coveralls": "^2.11.4", + "eslint": "^2.5.3", + "function-bind": "^1.0.2", + "istanbul": "^0.4.0", + "karma": "^1.1.1", + "karma-browserify": "^5.0.1", + "karma-cli": "^1.0.0", + "karma-coverage": "^1.0.0", + "karma-phantomjs-launcher": "^1.0.0", + "karma-tap": "^3.0.1", + "phantomjs-prebuilt": "^2.1.3", + "rimraf": "^2.2.8", + "server-destroy": "^1.0.1", + "tape": "^4.6.0", + "taper": "^0.4.0" + }, + "greenkeeper": { + "ignore": [ + "eslint", + "hawk", + "har-validator" + ] + } +} diff --git a/node_modules/request/request.js b/node_modules/request/request.js new file mode 100644 index 0000000..8ed57b7 --- /dev/null +++ b/node_modules/request/request.js @@ -0,0 +1,1450 @@ +'use strict' + +var http = require('http') + , https = require('https') + , url = require('url') + , util = require('util') + , stream = require('stream') + , zlib = require('zlib') + , hawk = require('hawk') + , aws2 = require('aws-sign2') + , aws4 = require('aws4') + , httpSignature = require('http-signature') + , mime = require('mime-types') + , stringstream = require('stringstream') + , caseless = require('caseless') + , ForeverAgent = require('forever-agent') + , FormData = require('form-data') + , extend = require('extend') + , isstream = require('isstream') + , isTypedArray = require('is-typedarray').strict + , helpers = require('./lib/helpers') + , cookies = require('./lib/cookies') + , getProxyFromURI = require('./lib/getProxyFromURI') + , Querystring = require('./lib/querystring').Querystring + , Har = require('./lib/har').Har + , Auth = require('./lib/auth').Auth + , OAuth = require('./lib/oauth').OAuth + , Multipart = require('./lib/multipart').Multipart + , Redirect = require('./lib/redirect').Redirect + , Tunnel = require('./lib/tunnel').Tunnel + +var safeStringify = helpers.safeStringify + , isReadStream = helpers.isReadStream + , toBase64 = helpers.toBase64 + , defer = helpers.defer + , copy = helpers.copy + , version = helpers.version + , globalCookieJar = cookies.jar() + + +var globalPool = {} + +function filterForNonReserved(reserved, options) { + // Filter out properties that are not reserved. + // Reserved values are passed in at call site. + + var object = {} + for (var i in options) { + var notReserved = (reserved.indexOf(i) === -1) + if (notReserved) { + object[i] = options[i] + } + } + return object +} + +function filterOutReservedFunctions(reserved, options) { + // Filter out properties that are functions and are reserved. + // Reserved values are passed in at call site. + + var object = {} + for (var i in options) { + var isReserved = !(reserved.indexOf(i) === -1) + var isFunction = (typeof options[i] === 'function') + if (!(isReserved && isFunction)) { + object[i] = options[i] + } + } + return object + +} + +// Return a simpler request object to allow serialization +function requestToJSON() { + var self = this + return { + uri: self.uri, + method: self.method, + headers: self.headers + } +} + +// Return a simpler response object to allow serialization +function responseToJSON() { + var self = this + return { + statusCode: self.statusCode, + body: self.body, + headers: self.headers, + request: requestToJSON.call(self.request) + } +} + +function Request (options) { + // if given the method property in options, set property explicitMethod to true + + // extend the Request instance with any non-reserved properties + // remove any reserved functions from the options object + // set Request instance to be readable and writable + // call init + + var self = this + + // start with HAR, then override with additional options + if (options.har) { + self._har = new Har(self) + options = self._har.options(options) + } + + stream.Stream.call(self) + var reserved = Object.keys(Request.prototype) + var nonReserved = filterForNonReserved(reserved, options) + + extend(self, nonReserved) + options = filterOutReservedFunctions(reserved, options) + + self.readable = true + self.writable = true + if (options.method) { + self.explicitMethod = true + } + self._qs = new Querystring(self) + self._auth = new Auth(self) + self._oauth = new OAuth(self) + self._multipart = new Multipart(self) + self._redirect = new Redirect(self) + self._tunnel = new Tunnel(self) + self.init(options) +} + +util.inherits(Request, stream.Stream) + +// Debugging +Request.debug = process.env.NODE_DEBUG && /\brequest\b/.test(process.env.NODE_DEBUG) +function debug() { + if (Request.debug) { + console.error('REQUEST %s', util.format.apply(util, arguments)) + } +} +Request.prototype.debug = debug + +Request.prototype.init = function (options) { + // init() contains all the code to setup the request object. + // the actual outgoing request is not started until start() is called + // this function is called from both the constructor and on redirect. + var self = this + if (!options) { + options = {} + } + self.headers = self.headers ? copy(self.headers) : {} + + // Delete headers with value undefined since they break + // ClientRequest.OutgoingMessage.setHeader in node 0.12 + for (var headerName in self.headers) { + if (typeof self.headers[headerName] === 'undefined') { + delete self.headers[headerName] + } + } + + caseless.httpify(self, self.headers) + + if (!self.method) { + self.method = options.method || 'GET' + } + if (!self.localAddress) { + self.localAddress = options.localAddress + } + + self._qs.init(options) + + debug(options) + if (!self.pool && self.pool !== false) { + self.pool = globalPool + } + self.dests = self.dests || [] + self.__isRequestRequest = true + + // Protect against double callback + if (!self._callback && self.callback) { + self._callback = self.callback + self.callback = function () { + if (self._callbackCalled) { + return // Print a warning maybe? + } + self._callbackCalled = true + self._callback.apply(self, arguments) + } + self.on('error', self.callback.bind()) + self.on('complete', self.callback.bind(self, null)) + } + + // People use this property instead all the time, so support it + if (!self.uri && self.url) { + self.uri = self.url + delete self.url + } + + // If there's a baseUrl, then use it as the base URL (i.e. uri must be + // specified as a relative path and is appended to baseUrl). + if (self.baseUrl) { + if (typeof self.baseUrl !== 'string') { + return self.emit('error', new Error('options.baseUrl must be a string')) + } + + if (typeof self.uri !== 'string') { + return self.emit('error', new Error('options.uri must be a string when using options.baseUrl')) + } + + if (self.uri.indexOf('//') === 0 || self.uri.indexOf('://') !== -1) { + return self.emit('error', new Error('options.uri must be a path when using options.baseUrl')) + } + + // Handle all cases to make sure that there's only one slash between + // baseUrl and uri. + var baseUrlEndsWithSlash = self.baseUrl.lastIndexOf('/') === self.baseUrl.length - 1 + var uriStartsWithSlash = self.uri.indexOf('/') === 0 + + if (baseUrlEndsWithSlash && uriStartsWithSlash) { + self.uri = self.baseUrl + self.uri.slice(1) + } else if (baseUrlEndsWithSlash || uriStartsWithSlash) { + self.uri = self.baseUrl + self.uri + } else if (self.uri === '') { + self.uri = self.baseUrl + } else { + self.uri = self.baseUrl + '/' + self.uri + } + delete self.baseUrl + } + + // A URI is needed by this point, emit error if we haven't been able to get one + if (!self.uri) { + return self.emit('error', new Error('options.uri is a required argument')) + } + + // If a string URI/URL was given, parse it into a URL object + if (typeof self.uri === 'string') { + self.uri = url.parse(self.uri) + } + + // Some URL objects are not from a URL parsed string and need href added + if (!self.uri.href) { + self.uri.href = url.format(self.uri) + } + + // DEPRECATED: Warning for users of the old Unix Sockets URL Scheme + if (self.uri.protocol === 'unix:') { + return self.emit('error', new Error('`unix://` URL scheme is no longer supported. Please use the format `http://unix:SOCKET:PATH`')) + } + + // Support Unix Sockets + if (self.uri.host === 'unix') { + self.enableUnixSocket() + } + + if (self.strictSSL === false) { + self.rejectUnauthorized = false + } + + if (!self.uri.pathname) {self.uri.pathname = '/'} + + if (!(self.uri.host || (self.uri.hostname && self.uri.port)) && !self.uri.isUnix) { + // Invalid URI: it may generate lot of bad errors, like 'TypeError: Cannot call method `indexOf` of undefined' in CookieJar + // Detect and reject it as soon as possible + var faultyUri = url.format(self.uri) + var message = 'Invalid URI "' + faultyUri + '"' + if (Object.keys(options).length === 0) { + // No option ? This can be the sign of a redirect + // As this is a case where the user cannot do anything (they didn't call request directly with this URL) + // they should be warned that it can be caused by a redirection (can save some hair) + message += '. This can be caused by a crappy redirection.' + } + // This error was fatal + self.abort() + return self.emit('error', new Error(message)) + } + + if (!self.hasOwnProperty('proxy')) { + self.proxy = getProxyFromURI(self.uri) + } + + self.tunnel = self._tunnel.isEnabled() + if (self.proxy) { + self._tunnel.setup(options) + } + + self._redirect.onRequest(options) + + self.setHost = false + if (!self.hasHeader('host')) { + var hostHeaderName = self.originalHostHeaderName || 'host' + self.setHeader(hostHeaderName, self.uri.hostname) + if (self.uri.port) { + if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') && + !(self.uri.port === 443 && self.uri.protocol === 'https:') ) { + self.setHeader(hostHeaderName, self.getHeader('host') + (':' + self.uri.port) ) + } + } + self.setHost = true + } + + self.jar(self._jar || options.jar) + + if (!self.uri.port) { + if (self.uri.protocol === 'http:') {self.uri.port = 80} + else if (self.uri.protocol === 'https:') {self.uri.port = 443} + } + + if (self.proxy && !self.tunnel) { + self.port = self.proxy.port + self.host = self.proxy.hostname + } else { + self.port = self.uri.port + self.host = self.uri.hostname + } + + if (options.form) { + self.form(options.form) + } + + if (options.formData) { + var formData = options.formData + var requestForm = self.form() + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty('value') && value.hasOwnProperty('options')) { + requestForm.append(key, value.value, value.options) + } else { + requestForm.append(key, value) + } + } + for (var formKey in formData) { + if (formData.hasOwnProperty(formKey)) { + var formValue = formData[formKey] + if (formValue instanceof Array) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]) + } + } else { + appendFormValue(formKey, formValue) + } + } + } + } + + if (options.qs) { + self.qs(options.qs) + } + + if (self.uri.path) { + self.path = self.uri.path + } else { + self.path = self.uri.pathname + (self.uri.search || '') + } + + if (self.path.length === 0) { + self.path = '/' + } + + // Auth must happen last in case signing is dependent on other headers + if (options.aws) { + self.aws(options.aws) + } + + if (options.hawk) { + self.hawk(options.hawk) + } + + if (options.httpSignature) { + self.httpSignature(options.httpSignature) + } + + if (options.auth) { + if (Object.prototype.hasOwnProperty.call(options.auth, 'username')) { + options.auth.user = options.auth.username + } + if (Object.prototype.hasOwnProperty.call(options.auth, 'password')) { + options.auth.pass = options.auth.password + } + + self.auth( + options.auth.user, + options.auth.pass, + options.auth.sendImmediately, + options.auth.bearer + ) + } + + if (self.gzip && !self.hasHeader('accept-encoding')) { + self.setHeader('accept-encoding', 'gzip, deflate') + } + + if (self.uri.auth && !self.hasHeader('authorization')) { + var uriAuthPieces = self.uri.auth.split(':').map(function(item) {return self._qs.unescape(item)}) + self.auth(uriAuthPieces[0], uriAuthPieces.slice(1).join(':'), true) + } + + if (!self.tunnel && self.proxy && self.proxy.auth && !self.hasHeader('proxy-authorization')) { + var proxyAuthPieces = self.proxy.auth.split(':').map(function(item) {return self._qs.unescape(item)}) + var authHeader = 'Basic ' + toBase64(proxyAuthPieces.join(':')) + self.setHeader('proxy-authorization', authHeader) + } + + if (self.proxy && !self.tunnel) { + self.path = (self.uri.protocol + '//' + self.uri.host + self.path) + } + + if (options.json) { + self.json(options.json) + } + if (options.multipart) { + self.multipart(options.multipart) + } + + if (options.time) { + self.timing = true + self.elapsedTime = self.elapsedTime || 0 + } + + function setContentLength () { + if (isTypedArray(self.body)) { + self.body = new Buffer(self.body) + } + + if (!self.hasHeader('content-length')) { + var length + if (typeof self.body === 'string') { + length = Buffer.byteLength(self.body) + } + else if (Array.isArray(self.body)) { + length = self.body.reduce(function (a, b) {return a + b.length}, 0) + } + else { + length = self.body.length + } + + if (length) { + self.setHeader('content-length', length) + } else { + self.emit('error', new Error('Argument error, options.body.')) + } + } + } + if (self.body && !isstream(self.body)) { + setContentLength() + } + + if (options.oauth) { + self.oauth(options.oauth) + } else if (self._oauth.params && self.hasHeader('authorization')) { + self.oauth(self._oauth.params) + } + + var protocol = self.proxy && !self.tunnel ? self.proxy.protocol : self.uri.protocol + , defaultModules = {'http:':http, 'https:':https} + , httpModules = self.httpModules || {} + + self.httpModule = httpModules[protocol] || defaultModules[protocol] + + if (!self.httpModule) { + return self.emit('error', new Error('Invalid protocol: ' + protocol)) + } + + if (options.ca) { + self.ca = options.ca + } + + if (!self.agent) { + if (options.agentOptions) { + self.agentOptions = options.agentOptions + } + + if (options.agentClass) { + self.agentClass = options.agentClass + } else if (options.forever) { + var v = version() + // use ForeverAgent in node 0.10- only + if (v.major === 0 && v.minor <= 10) { + self.agentClass = protocol === 'http:' ? ForeverAgent : ForeverAgent.SSL + } else { + self.agentClass = self.httpModule.Agent + self.agentOptions = self.agentOptions || {} + self.agentOptions.keepAlive = true + } + } else { + self.agentClass = self.httpModule.Agent + } + } + + if (self.pool === false) { + self.agent = false + } else { + self.agent = self.agent || self.getNewAgent() + } + + self.on('pipe', function (src) { + if (self.ntick && self._started) { + self.emit('error', new Error('You cannot pipe to this stream after the outbound request has started.')) + } + self.src = src + if (isReadStream(src)) { + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', mime.lookup(src.path)) + } + } else { + if (src.headers) { + for (var i in src.headers) { + if (!self.hasHeader(i)) { + self.setHeader(i, src.headers[i]) + } + } + } + if (self._json && !self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + if (src.method && !self.explicitMethod) { + self.method = src.method + } + } + + // self.on('pipe', function () { + // console.error('You have already piped to this stream. Pipeing twice is likely to break the request.') + // }) + }) + + defer(function () { + if (self._aborted) { + return + } + + var end = function () { + if (self._form) { + if (!self._auth.hasAuth) { + self._form.pipe(self) + } + else if (self._auth.hasAuth && self._auth.sentAuth) { + self._form.pipe(self) + } + } + if (self._multipart && self._multipart.chunked) { + self._multipart.body.pipe(self) + } + if (self.body) { + if (isstream(self.body)) { + self.body.pipe(self) + } else { + setContentLength() + if (Array.isArray(self.body)) { + self.body.forEach(function (part) { + self.write(part) + }) + } else { + self.write(self.body) + } + self.end() + } + } else if (self.requestBodyStream) { + console.warn('options.requestBodyStream is deprecated, please pass the request object to stream.pipe.') + self.requestBodyStream.pipe(self) + } else if (!self.src) { + if (self._auth.hasAuth && !self._auth.sentAuth) { + self.end() + return + } + if (self.method !== 'GET' && typeof self.method !== 'undefined') { + self.setHeader('content-length', 0) + } + self.end() + } + } + + if (self._form && !self.hasHeader('content-length')) { + // Before ending the request, we had to compute the length of the whole form, asyncly + self.setHeader(self._form.getHeaders(), true) + self._form.getLength(function (err, length) { + if (!err && !isNaN(length)) { + self.setHeader('content-length', length) + } + end() + }) + } else { + end() + } + + self.ntick = true + }) + +} + +Request.prototype.getNewAgent = function () { + var self = this + var Agent = self.agentClass + var options = {} + if (self.agentOptions) { + for (var i in self.agentOptions) { + options[i] = self.agentOptions[i] + } + } + if (self.ca) { + options.ca = self.ca + } + if (self.ciphers) { + options.ciphers = self.ciphers + } + if (self.secureProtocol) { + options.secureProtocol = self.secureProtocol + } + if (self.secureOptions) { + options.secureOptions = self.secureOptions + } + if (typeof self.rejectUnauthorized !== 'undefined') { + options.rejectUnauthorized = self.rejectUnauthorized + } + + if (self.cert && self.key) { + options.key = self.key + options.cert = self.cert + } + + if (self.pfx) { + options.pfx = self.pfx + } + + if (self.passphrase) { + options.passphrase = self.passphrase + } + + var poolKey = '' + + // different types of agents are in different pools + if (Agent !== self.httpModule.Agent) { + poolKey += Agent.name + } + + // ca option is only relevant if proxy or destination are https + var proxy = self.proxy + if (typeof proxy === 'string') { + proxy = url.parse(proxy) + } + var isHttps = (proxy && proxy.protocol === 'https:') || this.uri.protocol === 'https:' + + if (isHttps) { + if (options.ca) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.ca + } + + if (typeof options.rejectUnauthorized !== 'undefined') { + if (poolKey) { + poolKey += ':' + } + poolKey += options.rejectUnauthorized + } + + if (options.cert) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.cert.toString('ascii') + options.key.toString('ascii') + } + + if (options.pfx) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.pfx.toString('ascii') + } + + if (options.ciphers) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.ciphers + } + + if (options.secureProtocol) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.secureProtocol + } + + if (options.secureOptions) { + if (poolKey) { + poolKey += ':' + } + poolKey += options.secureOptions + } + } + + if (self.pool === globalPool && !poolKey && Object.keys(options).length === 0 && self.httpModule.globalAgent) { + // not doing anything special. Use the globalAgent + return self.httpModule.globalAgent + } + + // we're using a stored agent. Make sure it's protocol-specific + poolKey = self.uri.protocol + poolKey + + // generate a new agent for this setting if none yet exists + if (!self.pool[poolKey]) { + self.pool[poolKey] = new Agent(options) + // properly set maxSockets on new agents + if (self.pool.maxSockets) { + self.pool[poolKey].maxSockets = self.pool.maxSockets + } + } + + return self.pool[poolKey] +} + +Request.prototype.start = function () { + // start() is called once we are ready to send the outgoing HTTP request. + // this is usually called on the first write(), end() or on nextTick() + var self = this + + if (self._aborted) { + return + } + + self._started = true + self.method = self.method || 'GET' + self.href = self.uri.href + + if (self.src && self.src.stat && self.src.stat.size && !self.hasHeader('content-length')) { + self.setHeader('content-length', self.src.stat.size) + } + if (self._aws) { + self.aws(self._aws, true) + } + + // We have a method named auth, which is completely different from the http.request + // auth option. If we don't remove it, we're gonna have a bad time. + var reqOptions = copy(self) + delete reqOptions.auth + + debug('make request', self.uri.href) + + // node v6.8.0 now supports a `timeout` value in `http.request()`, but we + // should delete it for now since we handle timeouts manually for better + // consistency with node versions before v6.8.0 + delete reqOptions.timeout + + try { + self.req = self.httpModule.request(reqOptions) + } catch (err) { + self.emit('error', err) + return + } + + if (self.timing) { + self.startTime = new Date().getTime() + } + + var timeout + if (self.timeout && !self.timeoutTimer) { + if (self.timeout < 0) { + timeout = 0 + } else if (typeof self.timeout === 'number' && isFinite(self.timeout)) { + timeout = self.timeout + } + } + + self.req.on('response', self.onRequestResponse.bind(self)) + self.req.on('error', self.onRequestError.bind(self)) + self.req.on('drain', function() { + self.emit('drain') + }) + self.req.on('socket', function(socket) { + if (timeout !== undefined) { + socket.once('connect', function() { + clearTimeout(self.timeoutTimer) + self.timeoutTimer = null + // Set an additional timeout on the socket, via the `setsockopt` syscall. + // This timeout sets the amount of time to wait *between* bytes sent + // from the server once connected. + // + // In particular, it's useful for erroring if the server fails to send + // data halfway through streaming a response. + self.req.setTimeout(timeout, function () { + if (self.req) { + self.abort() + var e = new Error('ESOCKETTIMEDOUT') + e.code = 'ESOCKETTIMEDOUT' + e.connect = false + self.emit('error', e) + } + }) + }) + + // Set a timeout in memory - this block will throw if the server takes more + // than `timeout` to write the HTTP status and headers (corresponding to + // the on('response') event on the client). NB: this measures wall-clock + // time, not the time between bytes sent by the server. + self.timeoutTimer = setTimeout(function () { + self.abort() + var e = new Error('ETIMEDOUT') + e.code = 'ETIMEDOUT' + e.connect = true + self.emit('error', e) + }, timeout) + } + self.emit('socket', socket) + }) + + self.emit('request', self.req) +} + +Request.prototype.onRequestError = function (error) { + var self = this + if (self._aborted) { + return + } + if (self.req && self.req._reusedSocket && error.code === 'ECONNRESET' + && self.agent.addRequestNoreuse) { + self.agent = { addRequest: self.agent.addRequestNoreuse.bind(self.agent) } + self.start() + self.req.end() + return + } + if (self.timeout && self.timeoutTimer) { + clearTimeout(self.timeoutTimer) + self.timeoutTimer = null + } + self.emit('error', error) +} + +Request.prototype.onRequestResponse = function (response) { + var self = this + debug('onRequestResponse', self.uri.href, response.statusCode, response.headers) + response.on('end', function() { + if (self.timing) { + self.elapsedTime += (new Date().getTime() - self.startTime) + debug('elapsed time', self.elapsedTime) + response.elapsedTime = self.elapsedTime + } + debug('response end', self.uri.href, response.statusCode, response.headers) + }) + + if (self._aborted) { + debug('aborted', self.uri.href) + response.resume() + return + } + + self.response = response + response.request = self + response.toJSON = responseToJSON + + // XXX This is different on 0.10, because SSL is strict by default + if (self.httpModule === https && + self.strictSSL && (!response.hasOwnProperty('socket') || + !response.socket.authorized)) { + debug('strict ssl error', self.uri.href) + var sslErr = response.hasOwnProperty('socket') ? response.socket.authorizationError : self.uri.href + ' does not support SSL' + self.emit('error', new Error('SSL Error: ' + sslErr)) + return + } + + // Save the original host before any redirect (if it changes, we need to + // remove any authorization headers). Also remember the case of the header + // name because lots of broken servers expect Host instead of host and we + // want the caller to be able to specify this. + self.originalHost = self.getHeader('host') + if (!self.originalHostHeaderName) { + self.originalHostHeaderName = self.hasHeader('host') + } + if (self.setHost) { + self.removeHeader('host') + } + if (self.timeout && self.timeoutTimer) { + clearTimeout(self.timeoutTimer) + self.timeoutTimer = null + } + + var targetCookieJar = (self._jar && self._jar.setCookie) ? self._jar : globalCookieJar + var addCookie = function (cookie) { + //set the cookie if it's domain in the href's domain. + try { + targetCookieJar.setCookie(cookie, self.uri.href, {ignoreError: true}) + } catch (e) { + self.emit('error', e) + } + } + + response.caseless = caseless(response.headers) + + if (response.caseless.has('set-cookie') && (!self._disableCookies)) { + var headerName = response.caseless.has('set-cookie') + if (Array.isArray(response.headers[headerName])) { + response.headers[headerName].forEach(addCookie) + } else { + addCookie(response.headers[headerName]) + } + } + + if (self._redirect.onResponse(response)) { + return // Ignore the rest of the response + } else { + // Be a good stream and emit end when the response is finished. + // Hack to emit end on close because of a core bug that never fires end + response.on('close', function () { + if (!self._ended) { + self.response.emit('end') + } + }) + + response.once('end', function () { + self._ended = true + }) + + var noBody = function (code) { + return ( + self.method === 'HEAD' + // Informational + || (code >= 100 && code < 200) + // No Content + || code === 204 + // Not Modified + || code === 304 + ) + } + + var responseContent + if (self.gzip && !noBody(response.statusCode)) { + var contentEncoding = response.headers['content-encoding'] || 'identity' + contentEncoding = contentEncoding.trim().toLowerCase() + + if (contentEncoding === 'gzip') { + responseContent = zlib.createGunzip() + response.pipe(responseContent) + } else if (contentEncoding === 'deflate') { + responseContent = zlib.createInflate() + response.pipe(responseContent) + } else { + // Since previous versions didn't check for Content-Encoding header, + // ignore any invalid values to preserve backwards-compatibility + if (contentEncoding !== 'identity') { + debug('ignoring unrecognized Content-Encoding ' + contentEncoding) + } + responseContent = response + } + } else { + responseContent = response + } + + if (self.encoding) { + if (self.dests.length !== 0) { + console.error('Ignoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.') + } else if (responseContent.setEncoding) { + responseContent.setEncoding(self.encoding) + } else { + // Should only occur on node pre-v0.9.4 (joyent/node@9b5abe5) with + // zlib streams. + // If/When support for 0.9.4 is dropped, this should be unnecessary. + responseContent = responseContent.pipe(stringstream(self.encoding)) + } + } + + if (self._paused) { + responseContent.pause() + } + + self.responseContent = responseContent + + self.emit('response', response) + + self.dests.forEach(function (dest) { + self.pipeDest(dest) + }) + + responseContent.on('data', function (chunk) { + if (self.timing && !self.responseStarted) { + self.responseStartTime = (new Date()).getTime() + response.responseStartTime = self.responseStartTime + } + self._destdata = true + self.emit('data', chunk) + }) + responseContent.once('end', function (chunk) { + self.emit('end', chunk) + }) + responseContent.on('error', function (error) { + self.emit('error', error) + }) + responseContent.on('close', function () {self.emit('close')}) + + if (self.callback) { + self.readResponseBody(response) + } + //if no callback + else { + self.on('end', function () { + if (self._aborted) { + debug('aborted', self.uri.href) + return + } + self.emit('complete', response) + }) + } + } + debug('finish init function', self.uri.href) +} + +Request.prototype.readResponseBody = function (response) { + var self = this + debug('reading response\'s body') + var buffers = [] + , bufferLength = 0 + , strings = [] + + self.on('data', function (chunk) { + if (!Buffer.isBuffer(chunk)) { + strings.push(chunk) + } else if (chunk.length) { + bufferLength += chunk.length + buffers.push(chunk) + } + }) + self.on('end', function () { + debug('end event', self.uri.href) + if (self._aborted) { + debug('aborted', self.uri.href) + // `buffer` is defined in the parent scope and used in a closure it exists for the life of the request. + // This can lead to leaky behavior if the user retains a reference to the request object. + buffers = [] + bufferLength = 0 + return + } + + if (bufferLength) { + debug('has body', self.uri.href, bufferLength) + response.body = Buffer.concat(buffers, bufferLength) + if (self.encoding !== null) { + response.body = response.body.toString(self.encoding) + } + // `buffer` is defined in the parent scope and used in a closure it exists for the life of the Request. + // This can lead to leaky behavior if the user retains a reference to the request object. + buffers = [] + bufferLength = 0 + } else if (strings.length) { + // The UTF8 BOM [0xEF,0xBB,0xBF] is converted to [0xFE,0xFF] in the JS UTC16/UCS2 representation. + // Strip this value out when the encoding is set to 'utf8', as upstream consumers won't expect it and it breaks JSON.parse(). + if (self.encoding === 'utf8' && strings[0].length > 0 && strings[0][0] === '\uFEFF') { + strings[0] = strings[0].substring(1) + } + response.body = strings.join('') + } + + if (self._json) { + try { + response.body = JSON.parse(response.body, self._jsonReviver) + } catch (e) { + debug('invalid JSON received', self.uri.href) + } + } + debug('emitting complete', self.uri.href) + if (typeof response.body === 'undefined' && !self._json) { + response.body = self.encoding === null ? new Buffer(0) : '' + } + self.emit('complete', response, response.body) + }) +} + +Request.prototype.abort = function () { + var self = this + self._aborted = true + + if (self.req) { + self.req.abort() + } + else if (self.response) { + self.response.destroy() + } + + self.emit('abort') +} + +Request.prototype.pipeDest = function (dest) { + var self = this + var response = self.response + // Called after the response is received + if (dest.headers && !dest.headersSent) { + if (response.caseless.has('content-type')) { + var ctname = response.caseless.has('content-type') + if (dest.setHeader) { + dest.setHeader(ctname, response.headers[ctname]) + } + else { + dest.headers[ctname] = response.headers[ctname] + } + } + + if (response.caseless.has('content-length')) { + var clname = response.caseless.has('content-length') + if (dest.setHeader) { + dest.setHeader(clname, response.headers[clname]) + } else { + dest.headers[clname] = response.headers[clname] + } + } + } + if (dest.setHeader && !dest.headersSent) { + for (var i in response.headers) { + // If the response content is being decoded, the Content-Encoding header + // of the response doesn't represent the piped content, so don't pass it. + if (!self.gzip || i !== 'content-encoding') { + dest.setHeader(i, response.headers[i]) + } + } + dest.statusCode = response.statusCode + } + if (self.pipefilter) { + self.pipefilter(response, dest) + } +} + +Request.prototype.qs = function (q, clobber) { + var self = this + var base + if (!clobber && self.uri.query) { + base = self._qs.parse(self.uri.query) + } else { + base = {} + } + + for (var i in q) { + base[i] = q[i] + } + + var qs = self._qs.stringify(base) + + if (qs === '') { + return self + } + + self.uri = url.parse(self.uri.href.split('?')[0] + '?' + qs) + self.url = self.uri + self.path = self.uri.path + + if (self.uri.host === 'unix') { + self.enableUnixSocket() + } + + return self +} +Request.prototype.form = function (form) { + var self = this + if (form) { + if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) { + self.setHeader('content-type', 'application/x-www-form-urlencoded') + } + self.body = (typeof form === 'string') + ? self._qs.rfc3986(form.toString('utf8')) + : self._qs.stringify(form).toString('utf8') + return self + } + // create form-data object + self._form = new FormData() + self._form.on('error', function(err) { + err.message = 'form-data: ' + err.message + self.emit('error', err) + self.abort() + }) + return self._form +} +Request.prototype.multipart = function (multipart) { + var self = this + + self._multipart.onRequest(multipart) + + if (!self._multipart.chunked) { + self.body = self._multipart.body + } + + return self +} +Request.prototype.json = function (val) { + var self = this + + if (!self.hasHeader('accept')) { + self.setHeader('accept', 'application/json') + } + + if (typeof self.jsonReplacer === 'function') { + self._jsonReplacer = self.jsonReplacer + } + + self._json = true + if (typeof val === 'boolean') { + if (self.body !== undefined) { + if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) { + self.body = safeStringify(self.body, self._jsonReplacer) + } else { + self.body = self._qs.rfc3986(self.body) + } + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + } + } else { + self.body = safeStringify(val, self._jsonReplacer) + if (!self.hasHeader('content-type')) { + self.setHeader('content-type', 'application/json') + } + } + + if (typeof self.jsonReviver === 'function') { + self._jsonReviver = self.jsonReviver + } + + return self +} +Request.prototype.getHeader = function (name, headers) { + var self = this + var result, re, match + if (!headers) { + headers = self.headers + } + Object.keys(headers).forEach(function (key) { + if (key.length !== name.length) { + return + } + re = new RegExp(name, 'i') + match = key.match(re) + if (match) { + result = headers[key] + } + }) + return result +} +Request.prototype.enableUnixSocket = function () { + // Get the socket & request paths from the URL + var unixParts = this.uri.path.split(':') + , host = unixParts[0] + , path = unixParts[1] + // Apply unix properties to request + this.socketPath = host + this.uri.pathname = path + this.uri.path = path + this.uri.host = host + this.uri.hostname = host + this.uri.isUnix = true +} + + +Request.prototype.auth = function (user, pass, sendImmediately, bearer) { + var self = this + + self._auth.onRequest(user, pass, sendImmediately, bearer) + + return self +} +Request.prototype.aws = function (opts, now) { + var self = this + + if (!now) { + self._aws = opts + return self + } + + if (opts.sign_version == 4 || opts.sign_version == '4') { + // use aws4 + var options = { + host: self.uri.host, + path: self.uri.path, + method: self.method, + headers: { + 'content-type': self.getHeader('content-type') || '' + }, + body: self.body + } + var signRes = aws4.sign(options, { + accessKeyId: opts.key, + secretAccessKey: opts.secret + }) + self.setHeader('authorization', signRes.headers.Authorization) + self.setHeader('x-amz-date', signRes.headers['X-Amz-Date']) + } + else { + // default: use aws-sign2 + var date = new Date() + self.setHeader('date', date.toUTCString()) + var auth = + { key: opts.key + , secret: opts.secret + , verb: self.method.toUpperCase() + , date: date + , contentType: self.getHeader('content-type') || '' + , md5: self.getHeader('content-md5') || '' + , amazonHeaders: aws2.canonicalizeHeaders(self.headers) + } + var path = self.uri.path + if (opts.bucket && path) { + auth.resource = '/' + opts.bucket + path + } else if (opts.bucket && !path) { + auth.resource = '/' + opts.bucket + } else if (!opts.bucket && path) { + auth.resource = path + } else if (!opts.bucket && !path) { + auth.resource = '/' + } + auth.resource = aws2.canonicalizeResource(auth.resource) + self.setHeader('authorization', aws2.authorization(auth)) + } + + return self +} +Request.prototype.httpSignature = function (opts) { + var self = this + httpSignature.signRequest({ + getHeader: function(header) { + return self.getHeader(header, self.headers) + }, + setHeader: function(header, value) { + self.setHeader(header, value) + }, + method: self.method, + path: self.path + }, opts) + debug('httpSignature authorization', self.getHeader('authorization')) + + return self +} +Request.prototype.hawk = function (opts) { + var self = this + self.setHeader('Authorization', hawk.client.header(self.uri, self.method, opts).field) +} +Request.prototype.oauth = function (_oauth) { + var self = this + + self._oauth.onRequest(_oauth) + + return self +} + +Request.prototype.jar = function (jar) { + var self = this + var cookies + + if (self._redirect.redirectsFollowed === 0) { + self.originalCookieHeader = self.getHeader('cookie') + } + + if (!jar) { + // disable cookies + cookies = false + self._disableCookies = true + } else { + var targetCookieJar = (jar && jar.getCookieString) ? jar : globalCookieJar + var urihref = self.uri.href + //fetch cookie in the Specified host + if (targetCookieJar) { + cookies = targetCookieJar.getCookieString(urihref) + } + } + + //if need cookie and cookie is not empty + if (cookies && cookies.length) { + if (self.originalCookieHeader) { + // Don't overwrite existing Cookie header + self.setHeader('cookie', self.originalCookieHeader + '; ' + cookies) + } else { + self.setHeader('cookie', cookies) + } + } + self._jar = jar + return self +} + + +// Stream API +Request.prototype.pipe = function (dest, opts) { + var self = this + + if (self.response) { + if (self._destdata) { + self.emit('error', new Error('You cannot pipe after data has been emitted from the response.')) + } else if (self._ended) { + self.emit('error', new Error('You cannot pipe after the response has been ended.')) + } else { + stream.Stream.prototype.pipe.call(self, dest, opts) + self.pipeDest(dest) + return dest + } + } else { + self.dests.push(dest) + stream.Stream.prototype.pipe.call(self, dest, opts) + return dest + } +} +Request.prototype.write = function () { + var self = this + if (self._aborted) {return} + + if (!self._started) { + self.start() + } + if (self.req) { + return self.req.write.apply(self.req, arguments) + } +} +Request.prototype.end = function (chunk) { + var self = this + if (self._aborted) {return} + + if (chunk) { + self.write(chunk) + } + if (!self._started) { + self.start() + } + if (self.req) { + self.req.end() + } +} +Request.prototype.pause = function () { + var self = this + if (!self.responseContent) { + self._paused = true + } else { + self.responseContent.pause.apply(self.responseContent, arguments) + } +} +Request.prototype.resume = function () { + var self = this + if (!self.responseContent) { + self._paused = false + } else { + self.responseContent.resume.apply(self.responseContent, arguments) + } +} +Request.prototype.destroy = function () { + var self = this + if (!self._ended) { + self.end() + } else if (self.response) { + self.response.destroy() + } +} + +Request.defaultProxyHeaderWhiteList = + Tunnel.defaultProxyHeaderWhiteList.slice() + +Request.defaultProxyHeaderExclusiveList = + Tunnel.defaultProxyHeaderExclusiveList.slice() + +// Exports + +Request.prototype.toJSON = requestToJSON +module.exports = Request diff --git a/node_modules/right-align/LICENSE b/node_modules/right-align/LICENSE new file mode 100644 index 0000000..65f90ac --- /dev/null +++ b/node_modules/right-align/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/right-align/README.md b/node_modules/right-align/README.md new file mode 100644 index 0000000..3abdfb3 --- /dev/null +++ b/node_modules/right-align/README.md @@ -0,0 +1,77 @@ +# right-align [![NPM version](https://badge.fury.io/js/right-align.svg)](http://badge.fury.io/js/right-align) + +> Right-align the text in a string. + +Install with [npm](https://www.npmjs.com/) + +```sh +$ npm i right-align --save +``` + +## Usage + +```js +var rightAlign = require('right-align'); +rightAlign(string); +``` + +**Example** + +If used on the following: + +``` +Lorem ipsum dolor sit amet, +consectetur adipiscing +elit, sed do eiusmod tempor incididunt +ut labore et dolore +magna aliqua. Ut enim ad minim +veniam, quis +``` + +The result would be: + +``` + Lorem ipsum dolor sit amet, + consectetur adipiscing +elit, sed do eiusmod tempor incididunt + ut labore et dolore + magna aliqua. Ut enim ad minim + veniam, quis +``` + +## Related projects + +* [align-text](https://github.com/jonschlinkert/align-text): Align the text in a string. +* [center-align](https://github.com/jonschlinkert/center-align): Center-align the text in a string. +* [justify](https://github.com/bahamas10/node-justify): Left or right (or both) justify text using a custom width and character +* [repeat-string](https://github.com/jonschlinkert/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. +* [repeat-element](https://github.com/jonschlinkert/repeat-element): Create an array by repeating the given value n times. +* [word-wrap](https://github.com/jonschlinkert/word-wrap): Wrap words to a specified length. + +## Running tests + +Install dev dependencies: + +```sh +$ npm i -d && npm test +``` + +## Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](https://github.com/jonschlinkert/right-align/issues/new) + +## Author + +**Jon Schlinkert** + ++ [github/jonschlinkert](https://github.com/jonschlinkert) ++ [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +## License + +Copyright © 2015 Jon Schlinkert +Released under the MIT license. + +*** + +_This file was generated by [verb-cli](https://github.com/assemble/verb-cli) on June 09, 2015._ diff --git a/node_modules/right-align/index.js b/node_modules/right-align/index.js new file mode 100644 index 0000000..ad54f5f --- /dev/null +++ b/node_modules/right-align/index.js @@ -0,0 +1,16 @@ +/*! + * right-align + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +var align = require('align-text'); + +module.exports = function rightAlign(val) { + return align(val, function (len, longest) { + return longest - len; + }); +}; diff --git a/node_modules/right-align/package.json b/node_modules/right-align/package.json new file mode 100644 index 0000000..fe20d26 --- /dev/null +++ b/node_modules/right-align/package.json @@ -0,0 +1,45 @@ +{ + "name": "right-align", + "description": "Right-align the text in a string.", + "version": "0.1.3", + "homepage": "https://github.com/jonschlinkert/right-align", + "author": { + "name": "Jon Schlinkert", + "url": "https://github.com/jonschlinkert" + }, + "repository": { + "type": "git", + "url": "git://github.com/jonschlinkert/right-align.git" + }, + "bugs": { + "url": "https://github.com/jonschlinkert/right-align/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "align-text": "^0.1.1" + }, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "keywords": [ + "align", + "align-center", + "center", + "center-align", + "right", + "right-align", + "text", + "typography" + ] +} diff --git a/node_modules/rimraf/LICENSE b/node_modules/rimraf/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/rimraf/README.md b/node_modules/rimraf/README.md new file mode 100644 index 0000000..423b8cf --- /dev/null +++ b/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/rimraf/bin.js b/node_modules/rimraf/bin.js new file mode 100755 index 0000000..1bd5a0d --- /dev/null +++ b/node_modules/rimraf/bin.js @@ -0,0 +1,40 @@ +#!/usr/bin/env node + +var rimraf = require('./') + +var help = false +var dashdash = false +var args = process.argv.slice(2).filter(function(arg) { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else + return !!arg +}); + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + var log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + process.exit(help ? 0 : 1) +} else + go(0) + +function go (n) { + if (n >= args.length) + return + rimraf(args[n], function (er) { + if (er) + throw er + go(n+1) + }) +} diff --git a/node_modules/rimraf/package.json b/node_modules/rimraf/package.json new file mode 100644 index 0000000..c2cc045 --- /dev/null +++ b/node_modules/rimraf/package.json @@ -0,0 +1,26 @@ +{ + "name": "rimraf", + "version": "2.5.4", + "main": "rimraf.js", + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "test": "tap test/*.js" + }, + "bin": "./bin.js", + "dependencies": { + "glob": "^7.0.5" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^6.1.1" + } +} diff --git a/node_modules/rimraf/rimraf.js b/node_modules/rimraf/rimraf.js new file mode 100644 index 0000000..5d9a576 --- /dev/null +++ b/node_modules/rimraf/rimraf.js @@ -0,0 +1,343 @@ +module.exports = rimraf +rimraf.sync = rimrafSync + +var assert = require("assert") +var path = require("path") +var fs = require("fs") +var glob = require("glob") + +var defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +var timeout = 0 + +var isWindows = (process.platform === "win32") + +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + var busyTries = 0 + var errState = null + var n = 0 + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) + } + + function afterGlob (er, results) { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if (isWindows && (er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) + } +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) + + options.chmod(p, 666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) + + try { + options.chmodSync(p, 666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (var i = 0; i < results.length; i++) { + var p = results[i] + + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + rmdirSync(p, options, er) + } + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + options.rmdirSync(p, options) +} diff --git a/node_modules/ripemd160/.min-wd b/node_modules/ripemd160/.min-wd new file mode 100644 index 0000000..91c647b --- /dev/null +++ b/node_modules/ripemd160/.min-wd @@ -0,0 +1,7 @@ +{ + "hostname" : "localhost", + "port" : 4444, + "browsers" : [{ + "name" : "chrome" + }] +} \ No newline at end of file diff --git a/node_modules/ripemd160/.npmignore b/node_modules/ripemd160/.npmignore new file mode 100644 index 0000000..f547b9c --- /dev/null +++ b/node_modules/ripemd160/.npmignore @@ -0,0 +1,3 @@ +.gitignore +test/ +.DS_Store diff --git a/node_modules/ripemd160/CHANGELOG.md b/node_modules/ripemd160/CHANGELOG.md new file mode 100644 index 0000000..b408699 --- /dev/null +++ b/node_modules/ripemd160/CHANGELOG.md @@ -0,0 +1,19 @@ +0.2.0 / 2014-03-09 +------------------ +* removed bower.json and component.json +* changed 4 spacing to 2 +* returns `Buffer` type now, input must be Array, Uint8Array, Buffer, or string +* remove deps: `convert-hex` and `convert-string` + +0.1.0 / 2013-11-20 +------------------ +* changed package name +* removed AMD support + +0.0.2 / 2013-11-06 +------------------ +* fixed component.json file + +0.0.1 / 2013-11-03 +------------------ +* initial release \ No newline at end of file diff --git a/node_modules/ripemd160/Makefile b/node_modules/ripemd160/Makefile new file mode 100644 index 0000000..5836921 --- /dev/null +++ b/node_modules/ripemd160/Makefile @@ -0,0 +1,12 @@ +test: node-test browser-test + +node-test: + @./node_modules/.bin/mocha + +browser-test: + @./node_modules/.bin/mochify --wd -R spec + +browser-manual-test: + node browser-test.js + +.PHONY: node-test browser-test \ No newline at end of file diff --git a/node_modules/ripemd160/README.md b/node_modules/ripemd160/README.md new file mode 100644 index 0000000..89bdee7 --- /dev/null +++ b/node_modules/ripemd160/README.md @@ -0,0 +1,36 @@ +ripemd160 +========= + +JavaScript component to compute the RIPEMD160 hash of strings or bytes. + + +Install +------- + +### Node.js/Browserify + + npm install --save ripemd160 + + +Usage +----- + +### ripemd160(input) + +Input either a string or `Buffer`. Output is a `Buffer`. + +```js +console.log(ripemd160("hello").toString('hex')) // => 108f07b8382412612c048d07d13f814118445acd" +``` + + + +Credits +------- + +Most of the code from CryptoJS https://code.google.com/p/crypto-js/ + + + + + diff --git a/node_modules/ripemd160/lib/ripemd160.js b/node_modules/ripemd160/lib/ripemd160.js new file mode 100644 index 0000000..19f57a4 --- /dev/null +++ b/node_modules/ripemd160/lib/ripemd160.js @@ -0,0 +1,205 @@ + +module.exports = ripemd160 + + + +/* +CryptoJS v3.1.2 +code.google.com/p/crypto-js +(c) 2009-2013 by Jeff Mott. All rights reserved. +code.google.com/p/crypto-js/wiki/License +*/ +/** @preserve +(c) 2012 by Cédric Mesnil. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +// Constants table +var zl = [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, + 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, + 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, + 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13]; +var zr = [ + 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, + 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, + 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, + 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, + 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11]; +var sl = [ + 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, + 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, + 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, + 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, + 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6 ]; +var sr = [ + 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, + 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, + 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, + 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, + 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11 ]; + +var hl = [ 0x00000000, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E]; +var hr = [ 0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0x00000000]; + +var bytesToWords = function (bytes) { + var words = []; + for (var i = 0, b = 0; i < bytes.length; i++, b += 8) { + words[b >>> 5] |= bytes[i] << (24 - b % 32); + } + return words; +}; + +var wordsToBytes = function (words) { + var bytes = []; + for (var b = 0; b < words.length * 32; b += 8) { + bytes.push((words[b >>> 5] >>> (24 - b % 32)) & 0xFF); + } + return bytes; +}; + +var processBlock = function (H, M, offset) { + + // Swap endian + for (var i = 0; i < 16; i++) { + var offset_i = offset + i; + var M_offset_i = M[offset_i]; + + // Swap + M[offset_i] = ( + (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | + (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) + ); + } + + // Working variables + var al, bl, cl, dl, el; + var ar, br, cr, dr, er; + + ar = al = H[0]; + br = bl = H[1]; + cr = cl = H[2]; + dr = dl = H[3]; + er = el = H[4]; + // Computation + var t; + for (var i = 0; i < 80; i += 1) { + t = (al + M[offset+zl[i]])|0; + if (i<16){ + t += f1(bl,cl,dl) + hl[0]; + } else if (i<32) { + t += f2(bl,cl,dl) + hl[1]; + } else if (i<48) { + t += f3(bl,cl,dl) + hl[2]; + } else if (i<64) { + t += f4(bl,cl,dl) + hl[3]; + } else {// if (i<80) { + t += f5(bl,cl,dl) + hl[4]; + } + t = t|0; + t = rotl(t,sl[i]); + t = (t+el)|0; + al = el; + el = dl; + dl = rotl(cl, 10); + cl = bl; + bl = t; + + t = (ar + M[offset+zr[i]])|0; + if (i<16){ + t += f5(br,cr,dr) + hr[0]; + } else if (i<32) { + t += f4(br,cr,dr) + hr[1]; + } else if (i<48) { + t += f3(br,cr,dr) + hr[2]; + } else if (i<64) { + t += f2(br,cr,dr) + hr[3]; + } else {// if (i<80) { + t += f1(br,cr,dr) + hr[4]; + } + t = t|0; + t = rotl(t,sr[i]) ; + t = (t+er)|0; + ar = er; + er = dr; + dr = rotl(cr, 10); + cr = br; + br = t; + } + // Intermediate hash value + t = (H[1] + cl + dr)|0; + H[1] = (H[2] + dl + er)|0; + H[2] = (H[3] + el + ar)|0; + H[3] = (H[4] + al + br)|0; + H[4] = (H[0] + bl + cr)|0; + H[0] = t; +}; + +function f1(x, y, z) { + return ((x) ^ (y) ^ (z)); +} + +function f2(x, y, z) { + return (((x)&(y)) | ((~x)&(z))); +} + +function f3(x, y, z) { + return (((x) | (~(y))) ^ (z)); +} + +function f4(x, y, z) { + return (((x) & (z)) | ((y)&(~(z)))); +} + +function f5(x, y, z) { + return ((x) ^ ((y) |(~(z)))); +} + +function rotl(x,n) { + return (x<>>(32-n)); +} + +function ripemd160(message) { + var H = [0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0]; + + if (typeof message == 'string') + message = new Buffer(message, 'utf8'); + + var m = bytesToWords(message); + + var nBitsLeft = message.length * 8; + var nBitsTotal = message.length * 8; + + // Add padding + m[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + m[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( + (((nBitsTotal << 8) | (nBitsTotal >>> 24)) & 0x00ff00ff) | + (((nBitsTotal << 24) | (nBitsTotal >>> 8)) & 0xff00ff00) + ); + + for (var i=0 ; i>> 24)) & 0x00ff00ff) | + (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); + } + + var digestbytes = wordsToBytes(H); + return new Buffer(digestbytes); +} + + diff --git a/node_modules/ripemd160/package.json b/node_modules/ripemd160/package.json new file mode 100644 index 0000000..e86e5c6 --- /dev/null +++ b/node_modules/ripemd160/package.json @@ -0,0 +1,25 @@ +{ + "name": "ripemd160", + "version": "0.2.0", + "description": "Compute RIPEMD160 of bytes or strings.", + "keywords": [ + "string", + "strings", + "ripemd160", + "ripe160", + "bitcoin", + "bytes", + "cryptography" + ], + "devDependencies": { + "mocha": "~1.17.1", + "terst": "~0.1.0", + "mochify": "~0.4.2" + }, + "repository": { + "url": "https://github.com/cryptocoinjs/ripemd160", + "type": "git" + }, + "main": "./lib/ripemd160.js", + "dependencies": {} +} diff --git a/node_modules/sax/LICENSE b/node_modules/sax/LICENSE new file mode 100644 index 0000000..ccffa08 --- /dev/null +++ b/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/sax/LICENSE-W3C.html b/node_modules/sax/LICENSE-W3C.html new file mode 100644 index 0000000..a611e3f --- /dev/null +++ b/node_modules/sax/LICENSE-W3C.html @@ -0,0 +1,188 @@ + +W3C Software Notice and License
+ + + +
+

+ W3C + +

+ +
+ + + +
+
+ +
+ + +
+
+ +
+ + +
+
+
+ +
+
+

W3C Software Notice and License

+
+
+

This work (and included software, documentation such as READMEs, or other +related items) is being provided by the copyright holders under the following +license.

+

License

+ +

+By obtaining, using and/or copying this work, you (the licensee) +agree that you have read, understood, and will comply with the following +terms and conditions.

+ +

Permission to copy, modify, and distribute this software and its +documentation, with or without modification, for any purpose and without +fee or royalty is hereby granted, provided that you include the following on +ALL copies of the software and documentation or portions thereof, including +modifications:

+ +
  • The full text of this NOTICE in a location viewable to users of the + redistributed or derivative work.
  • Any pre-existing intellectual property disclaimers, notices, or terms + and conditions. If none exist, the W3C Software Short + Notice should be included (hypertext is preferred, text is permitted) + within the body of any redistributed or derivative code.
  • Notice of any changes or modifications to the files, including the date + changes were made. (We recommend you provide URIs to the location from + which the code is derived.)
+ +

Disclaimers

+ +

THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS +MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR +PURPOSE OR THAT THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE +ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS.

+ +

COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR +DOCUMENTATION.

+ +

The name and trademarks of copyright holders may NOT be used in +advertising or publicity pertaining to the software without specific, written +prior permission. Title to copyright in this software and any associated +documentation will at all times remain with copyright holders.

+ +

Notes

+ +

This version: http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231

+ +

This formulation of W3C's notice and license became active on December 31 +2002. This version removes the copyright ownership notice such that this +license can be used with materials other than those owned by the W3C, +reflects that ERCIM is now a host of the W3C, includes references to this +specific dated version of the license, and removes the ambiguous grant of +"use". Otherwise, this version is the same as the previous +version and is written so as to preserve the Free +Software Foundation's assessment of GPL compatibility and OSI's certification +under the Open Source +Definition.

+
+
+
+
+ + + +
+ +
diff --git a/node_modules/sax/README.md b/node_modules/sax/README.md new file mode 100644 index 0000000..afcd3f3 --- /dev/null +++ b/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/node_modules/sax/lib/sax.js b/node_modules/sax/lib/sax.js new file mode 100644 index 0000000..f125c5f --- /dev/null +++ b/node_modules/sax/lib/sax.js @@ -0,0 +1,1576 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // character classes and tokens + var whitespace = '\r\n\t ' + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var number = '0124356789' + var letter = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + + // (Letter | "_" | ":") + var quote = '\'"' + var attribEnd = whitespace + '>' + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // turn all the string character sets into character class objects. + whitespace = charClass(whitespace) + number = charClass(number) + letter = charClass(letter) + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040\.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040\.\d-]/ + + quote = charClass(quote) + attribEnd = charClass(attribEnd) + + function charClass (str) { + return str.split('').reduce(function (s, c) { + s[c] = true + return s + }, {}) + } + + function isRegExp (c) { + return Object.prototype.toString.call(c) === '[object RegExp]' + } + + function is (charclass, c) { + return isRegExp(charclass) ? !!c.match(charclass) : charclass[c] + } + + function not (charclass, c) { + return !is(charclass, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, // + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/lib/compile.js.html b/node_modules/serverless-webpack/coverage/lcov-report/lib/compile.js.html new file mode 100644 index 0000000..cef7739 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/lib/compile.js.html @@ -0,0 +1,164 @@ + + + + Code coverage report for lib/compile.js + + + + + + + +
+
+

+ all files / lib/ compile.js +

+
+
+ 100% + Statements + 15/15 +
+
+ 100% + Branches + 2/2 +
+
+ 100% + Functions + 1/1 +
+
+ 100% + Lines + 15/15 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34  +  + + +  + +  + +  + +  + + +  +  + +  +  +  +  +  +  + + +  + + + + + +  +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const webpack = require('webpack');
+ 
+module.exports = {
+  compile() {
+    this.serverless.cli.log('Bundling with Webpack...');
+ 
+    const compiler = webpack(this.webpackConfig);
+ 
+    return BbPromise
+      .fromCallback(cb => compiler.run(cb))
+      .then(stats => {
+ 
+        this.serverless.cli.consoleLog(stats.toString({
+          colors: true,
+          hash: false,
+          version: false,
+          chunks: false,
+          children: false
+        }));
+        if (stats.compilation.errors.length) {
+          throw new Error('Webpack compilation error, see above');
+        }
+        const outputPath = stats.compilation.compiler.outputPath;
+        this.webpackOutputPath = outputPath;
+        this.originalServicePath = this.serverless.config.servicePath;
+        this.serverless.config.servicePath = outputPath;
+        return stats;
+      });
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/lib/index.html b/node_modules/serverless-webpack/coverage/lcov-report/lib/index.html new file mode 100644 index 0000000..730962c --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/lib/index.html @@ -0,0 +1,132 @@ + + + + Code coverage report for lib/ + + + + + + + +
+
+

+ all files lib/ +

+
+
+ 100% + Statements + 140/140 +
+
+ 100% + Branches + 41/41 +
+
+ 100% + Functions + 14/14 +
+
+ 100% + Lines + 139/139 +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
compile.js
100%15/15100%2/2100%1/1100%15/15
run.js
100%43/43100%10/10100%5/5100%42/42
serve.js
100%62/62100%12/12100%7/7100%62/62
validate.js
100%20/20100%17/17100%1/1100%20/20
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/lib/run.js.html b/node_modules/serverless-webpack/coverage/lcov-report/lib/run.js.html new file mode 100644 index 0000000..6474417 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/lib/run.js.html @@ -0,0 +1,359 @@ + + + + Code coverage report for lib/run.js + + + + + + + +
+
+

+ all files / lib/ run.js +

+
+
+ 100% + Statements + 43/43 +
+
+ 100% + Branches + 10/10 +
+
+ 100% + Functions + 5/5 +
+
+ 100% + Lines + 42/42 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99  +  + + + + +  + +  + + + +  +  +  + + +  + + + + + +  + +  +  +  + +  +  +  +  +  + +  +  +  +  +  +  +  +  +  +  +  +  + +  + +  + + + +  + +  +  +  + + +  + +  +  +  +  +  +  + + +  + +  + + + +  + + + + + +  +  +  + + +  + +  +  +  +  +  + +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const path = require('path');
+const webpack = require('webpack');
+const utils = require('./utils');
+ 
+module.exports = {
+  loadHandler(stats, functionId, purge) {
+    const handler = this.serverless.service.functions[functionId].handler.split('.');
+    const moduleFileName = `${handler[0]}.js`;
+    const handlerFilePath = path.join(
+      path.resolve(stats.compilation.options.output.path),
+      moduleFileName
+    );
+    if (purge) {
+      utils.purgeCache(handlerFilePath);
+    }
+    const module = require(handlerFilePath);
+    const functionObjectPath = handler.slice(1);
+    let func = module;
+    for (let p of functionObjectPath) {
+      func = func[p];
+    }
+    return func;
+  },
+ 
+  getEvent() {
+    return this.options.path
+      ? this.serverless.utils.readFileSync(this.options.path)
+      : null; // TODO use get-stdin instead
+  },
+ 
+  getContext(functionName) {
+    return {
+      awsRequestId: utils.guid(),
+      invokeid: utils.guid(),
+      logGroupName: `/aws/lambda/${functionName}`,
+      logStreamName: '2016/02/14/[HEAD]13370a84ca4ed8b77c427af260',
+      functionVersion: '$LATEST',
+      isDefaultFunctionVersion: true,
+      functionName: functionName,
+      memoryLimitInMB: '1024',
+    };
+  },
+ 
+  run(stats) {
+    const functionName = this.options.function;
+ 
+    this.serverless.cli.log(`Run function ${functionName}...`);
+ 
+    const handler = this.loadHandler(stats, functionName);
+    const event = this.getEvent();
+    const context = this.getContext(functionName);
+ 
+    return new BbPromise((resolve, reject) => handler(
+      event,
+      context,
+      (err, res) => {
+        if (err) {
+          reject(err);
+        } else {
+          resolve(res);
+        }
+      }
+    ));
+  },
+ 
+  watch() {
+    const functionName = this.options.function;
+    this.serverless.cli.log(`Watch function ${functionName}...`);
+ 
+    const compiler = webpack(this.webpackConfig);
+ 
+    compiler.watch({}, (err, stats) => {
+      if (err) {
+        throw err;
+      }
+      this.serverless.cli.log(`Run function ${functionName}...`);
+      const handler = this.loadHandler(stats, functionName, true);
+      const event = this.getEvent();
+      const context = this.getContext(functionName);
+      handler(
+        event,
+        context,
+        (err, res) => {
+          if (err) {
+            throw err;
+          } else {
+            this.serverless.cli.consoleLog(res);
+          }
+        }
+      )
+    });
+ 
+    return BbPromise.resolve();
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/lib/serve.js.html b/node_modules/serverless-webpack/coverage/lcov-report/lib/serve.js.html new file mode 100644 index 0000000..8bb7a26 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/lib/serve.js.html @@ -0,0 +1,446 @@ + + + + Code coverage report for lib/serve.js + + + + + + + +
+
+

+ all files / lib/ serve.js +

+
+
+ 100% + Statements + 62/62 +
+
+ 100% + Branches + 12/12 +
+
+ 100% + Functions + 7/7 +
+
+ 100% + Lines + 62/62 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128  +  + + + + +  + +  + +  + + + + +  + + + + +  + + + +  +  +  +  + +  +  +  +  + +  +  +  + +  + +  + + + + + + +  + + + + + + +  + + +  +  +  + +  +  +  + +  +  +  + + + + + + + + + +  +  +  +  +  +  +  + +  +  +  +12× +  +  +  + + + + + +  +  +  +  + + + +  +  +  +  +  +  +  +  + + + + +  + +  +  +  +  +  +  + +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const webpack = require('webpack');
+const express = require('express');
+const bodyParser = require('body-parser');
+ 
+module.exports = {
+  serve() {
+    this.serverless.cli.log('Serving functions...');
+ 
+    const compiler = webpack(this.webpackConfig);
+    const funcConfs = this._getFuncConfigs();
+    const app = this._newExpressApp(funcConfs);
+    const port = this._getPort();
+ 
+    app.listen(port, () =>
+      compiler.watch({}, (err, stats) => {
+        if (err) {
+          throw err;
+        }
+        const loadedModules = [];
+        for (let funcConf of funcConfs) {
+          funcConf.handlerFunc = this.loadHandler(
+            stats,
+            funcConf.id,
+            loadedModules.indexOf(funcConf.moduleName) < 0
+          );
+          loadedModules.push(funcConf.moduleName);
+        }
+      })
+    );
+ 
+    return BbPromise.resolve();
+  },
+ 
+  _newExpressApp(funcConfs) {
+    const app = express();
+ 
+    app.use(bodyParser.json({ limit: '5mb' }));
+ 
+    for (let funcConf of funcConfs) {
+      for (let httpEvent of funcConf.events) {
+        const method = httpEvent.method.toLowerCase();
+        let endpoint = `/${httpEvent.path}`;
+        if (this.options.stage) {
+          endpoint = `/${this.options.stage}${endpoint}`;
+        }
+        const path = endpoint.replace(/\{(.+?)\}/g, ':$1');
+        let handler = this._handlerBase(funcConf);
+        let optionsHandler = this._optionsHandler;
+        if (httpEvent.cors) {
+          handler = this._handlerAddCors(handler);
+          optionsHandler = this._handlerAddCors(optionsHandler);
+        }
+        app.options(path, optionsHandler);
+        app[method](
+          path,
+          handler
+        );
+        this.serverless.cli.consoleLog(`  ${method.toUpperCase()} - http://localhost:${this._getPort()}${endpoint}`);
+      }
+    }
+ 
+    return app;
+  },
+ 
+  _getFuncConfigs() {
+    const funcConfs = [];
+    const inputfuncConfs = this.serverless.service.functions;
+    for (let funcName in inputfuncConfs) {
+      const funcConf = inputfuncConfs[funcName];
+      const httpEvents = funcConf.events
+        .filter(e => e.hasOwnProperty('http'))
+        .map(e => e.http);
+      if (httpEvents.length > 0) {
+        funcConfs.push(Object.assign({}, funcConf, {
+          id: funcName,
+          events: httpEvents,
+          moduleName: funcConf.handler.split('.')[0],
+          handlerFunc: null,
+        }));
+      }
+    }
+    return funcConfs;
+  },
+ 
+  _getPort() {
+    return this.options.port || 8000;
+  },
+ 
+  _handlerAddCors(handler) {
+    return (req, res, next) => {
+      res.header('Access-Control-Allow-Origin', '*');
+      res.header('Access-Control-Allow-Methods', 'GET,PUT,HEAD,PATCH,POST,DELETE,OPTIONS');
+      res.header('Access-Control-Allow-Headers', 'Authorization,Content-Type,x-amz-date,x-amz-security-token');
+      handler(req, res, next);
+    };
+  },
+ 
+  _handlerBase(funcConf) {
+    return (req, res) => {
+      const func = funcConf.handlerFunc;
+      const event = {
+        method: req.method,
+        headers: req.headers,
+        body: req.body,
+        path: req.params,
+        query: req.query,
+        // principalId,
+        // stageVariables,
+      };
+      const context = this.getContext(funcConf.id);
+      func(event, context, (err, resp) => {
+        if (err) {
+          res.status(500).send(err);
+        } else {
+          res.status(200).send(resp);
+        }
+      });
+    }
+  },
+ 
+  _optionsHandler(req, res) {
+    res.sendStatus(200);
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/lib/validate.js.html b/node_modules/serverless-webpack/coverage/lcov-report/lib/validate.js.html new file mode 100644 index 0000000..8bc417b --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/lib/validate.js.html @@ -0,0 +1,224 @@ + + + + Code coverage report for lib/validate.js + + + + + + + +
+
+

+ all files / lib/ validate.js +

+
+
+ 100% + Statements + 20/20 +
+
+ 100% + Branches + 17/17 +
+
+ 100% + Functions + 1/1 +
+
+ 100% + Lines + 20/20 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54  +  + + + +  + +  +10× +  +  +  +  +  +10× + + + +  +  + +  +  +  + + +  +  +  + + + +  +  +  +  + +  +  +  +  +  +  +  + + +  +  + +  + +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const path = require('path');
+const fse = require('fs-extra');
+ 
+module.exports = {
+  validate() {
+    this.webpackConfig = (
+      this.serverless.service.custom &&
+      this.serverless.service.custom.webpack ||
+      'webpack.config.js'
+    );
+ 
+    if (typeof this.webpackConfig === 'string') {
+      const webpackConfigFilePath = path.join(this.serverless.config.servicePath, this.webpackConfig);
+      if (!this.serverless.utils.fileExistsSync(webpackConfigFilePath)) {
+        throw new this.serverless.classes
+          .Error('The webpack plugin could not find the configuration file at: ' + webpackConfigFilePath);
+      }
+      this.webpackConfig = require(webpackConfigFilePath);
+    }
+ 
+    // Default context
+    if (!this.webpackConfig.context) {
+      this.webpackConfig.context = this.serverless.config.servicePath;
+    }
+ 
+    // Default output
+    if (!this.webpackConfig.output) {
+      const outputPath = path.join(this.serverless.config.servicePath, '.webpack');
+      const outputFilename = (
+        Array.isArray(this.webpackConfig.entry)
+        ? this.webpackConfig.entry[this.webpackConfig.entry.length - 1]
+        : this.webpackConfig.entry
+      ) || 'handler.js';
+      this.webpackConfig.output = {
+        libraryTarget: 'commonjs',
+        path: outputPath,
+        filename: outputFilename,
+      };
+    }
+ 
+    // Custom output path
+    if (this.options.out) {
+      this.webpackConfig.output.path = path.join(this.serverless.config.servicePath, this.options.out);
+    }
+ 
+    fse.removeSync(this.webpackConfig.output.path);
+ 
+    return BbPromise.resolve();
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/prettify.css b/node_modules/serverless-webpack/coverage/lcov-report/prettify.css new file mode 100644 index 0000000..b317a7c --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/prettify.css @@ -0,0 +1 @@ +.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} diff --git a/node_modules/serverless-webpack/coverage/lcov-report/prettify.js b/node_modules/serverless-webpack/coverage/lcov-report/prettify.js new file mode 100644 index 0000000..ef51e03 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/prettify.js @@ -0,0 +1 @@ +window.PR_SHOULD_USE_CONTINUATION=true;(function(){var h=["break,continue,do,else,for,if,return,while"];var u=[h,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"];var p=[u,"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"];var l=[p,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"];var x=[p,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"];var R=[x,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"];var r="all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes";var w=[p,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"];var s="caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END";var I=[h,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"];var f=[h,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"];var H=[h,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"];var A=[l,R,w,s+I,f,H];var e=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/;var C="str";var z="kwd";var j="com";var O="typ";var G="lit";var L="pun";var F="pln";var m="tag";var E="dec";var J="src";var P="atn";var n="atv";var N="nocode";var M="(?:^^\\.?|[+-]|\\!|\\!=|\\!==|\\#|\\%|\\%=|&|&&|&&=|&=|\\(|\\*|\\*=|\\+=|\\,|\\-=|\\->|\\/|\\/=|:|::|\\;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|\\?|\\@|\\[|\\^|\\^=|\\^\\^|\\^\\^=|\\{|\\||\\|=|\\|\\||\\|\\|=|\\~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*";function k(Z){var ad=0;var S=false;var ac=false;for(var V=0,U=Z.length;V122)){if(!(al<65||ag>90)){af.push([Math.max(65,ag)|32,Math.min(al,90)|32])}if(!(al<97||ag>122)){af.push([Math.max(97,ag)&~32,Math.min(al,122)&~32])}}}}af.sort(function(av,au){return(av[0]-au[0])||(au[1]-av[1])});var ai=[];var ap=[NaN,NaN];for(var ar=0;arat[0]){if(at[1]+1>at[0]){an.push("-")}an.push(T(at[1]))}}an.push("]");return an.join("")}function W(al){var aj=al.source.match(new RegExp("(?:\\[(?:[^\\x5C\\x5D]|\\\\[\\s\\S])*\\]|\\\\u[A-Fa-f0-9]{4}|\\\\x[A-Fa-f0-9]{2}|\\\\[0-9]+|\\\\[^ux0-9]|\\(\\?[:!=]|[\\(\\)\\^]|[^\\x5B\\x5C\\(\\)\\^]+)","g"));var ah=aj.length;var an=[];for(var ak=0,am=0;ak=2&&ai==="["){aj[ak]=X(ag)}else{if(ai!=="\\"){aj[ak]=ag.replace(/[a-zA-Z]/g,function(ao){var ap=ao.charCodeAt(0);return"["+String.fromCharCode(ap&~32,ap|32)+"]"})}}}}return aj.join("")}var aa=[];for(var V=0,U=Z.length;V=0;){S[ac.charAt(ae)]=Y}}var af=Y[1];var aa=""+af;if(!ag.hasOwnProperty(aa)){ah.push(af);ag[aa]=null}}ah.push(/[\0-\uffff]/);V=k(ah)})();var X=T.length;var W=function(ah){var Z=ah.sourceCode,Y=ah.basePos;var ad=[Y,F];var af=0;var an=Z.match(V)||[];var aj={};for(var ae=0,aq=an.length;ae=5&&"lang-"===ap.substring(0,5);if(am&&!(ai&&typeof ai[1]==="string")){am=false;ap=J}if(!am){aj[ag]=ap}}var ab=af;af+=ag.length;if(!am){ad.push(Y+ab,ap)}else{var al=ai[1];var ak=ag.indexOf(al);var ac=ak+al.length;if(ai[2]){ac=ag.length-ai[2].length;ak=ac-al.length}var ar=ap.substring(5);B(Y+ab,ag.substring(0,ak),W,ad);B(Y+ab+ak,al,q(ar,al),ad);B(Y+ab+ac,ag.substring(ac),W,ad)}}ah.decorations=ad};return W}function i(T){var W=[],S=[];if(T.tripleQuotedStrings){W.push([C,/^(?:\'\'\'(?:[^\'\\]|\\[\s\S]|\'{1,2}(?=[^\']))*(?:\'\'\'|$)|\"\"\"(?:[^\"\\]|\\[\s\S]|\"{1,2}(?=[^\"]))*(?:\"\"\"|$)|\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$))/,null,"'\""])}else{if(T.multiLineStrings){W.push([C,/^(?:\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$)|\`(?:[^\\\`]|\\[\s\S])*(?:\`|$))/,null,"'\"`"])}else{W.push([C,/^(?:\'(?:[^\\\'\r\n]|\\.)*(?:\'|$)|\"(?:[^\\\"\r\n]|\\.)*(?:\"|$))/,null,"\"'"])}}if(T.verbatimStrings){S.push([C,/^@\"(?:[^\"]|\"\")*(?:\"|$)/,null])}var Y=T.hashComments;if(Y){if(T.cStyleComments){if(Y>1){W.push([j,/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,null,"#"])}else{W.push([j,/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\r\n]*)/,null,"#"])}S.push([C,/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,null])}else{W.push([j,/^#[^\r\n]*/,null,"#"])}}if(T.cStyleComments){S.push([j,/^\/\/[^\r\n]*/,null]);S.push([j,/^\/\*[\s\S]*?(?:\*\/|$)/,null])}if(T.regexLiterals){var X=("/(?=[^/*])(?:[^/\\x5B\\x5C]|\\x5C[\\s\\S]|\\x5B(?:[^\\x5C\\x5D]|\\x5C[\\s\\S])*(?:\\x5D|$))+/");S.push(["lang-regex",new RegExp("^"+M+"("+X+")")])}var V=T.types;if(V){S.push([O,V])}var U=(""+T.keywords).replace(/^ | $/g,"");if(U.length){S.push([z,new RegExp("^(?:"+U.replace(/[\s,]+/g,"|")+")\\b"),null])}W.push([F,/^\s+/,null," \r\n\t\xA0"]);S.push([G,/^@[a-z_$][a-z_$@0-9]*/i,null],[O,/^(?:[@_]?[A-Z]+[a-z][A-Za-z_$@0-9]*|\w+_t\b)/,null],[F,/^[a-z_$][a-z_$@0-9]*/i,null],[G,new RegExp("^(?:0x[a-f0-9]+|(?:\\d(?:_\\d+)*\\d*(?:\\.\\d*)?|\\.\\d\\+)(?:e[+\\-]?\\d+)?)[a-z]*","i"),null,"0123456789"],[F,/^\\[\s\S]?/,null],[L,/^.[^\s\w\.$@\'\"\`\/\#\\]*/,null]);return g(W,S)}var K=i({keywords:A,hashComments:true,cStyleComments:true,multiLineStrings:true,regexLiterals:true});function Q(V,ag){var U=/(?:^|\s)nocode(?:\s|$)/;var ab=/\r\n?|\n/;var ac=V.ownerDocument;var S;if(V.currentStyle){S=V.currentStyle.whiteSpace}else{if(window.getComputedStyle){S=ac.defaultView.getComputedStyle(V,null).getPropertyValue("white-space")}}var Z=S&&"pre"===S.substring(0,3);var af=ac.createElement("LI");while(V.firstChild){af.appendChild(V.firstChild)}var W=[af];function ae(al){switch(al.nodeType){case 1:if(U.test(al.className)){break}if("BR"===al.nodeName){ad(al);if(al.parentNode){al.parentNode.removeChild(al)}}else{for(var an=al.firstChild;an;an=an.nextSibling){ae(an)}}break;case 3:case 4:if(Z){var am=al.nodeValue;var aj=am.match(ab);if(aj){var ai=am.substring(0,aj.index);al.nodeValue=ai;var ah=am.substring(aj.index+aj[0].length);if(ah){var ak=al.parentNode;ak.insertBefore(ac.createTextNode(ah),al.nextSibling)}ad(al);if(!ai){al.parentNode.removeChild(al)}}}break}}function ad(ak){while(!ak.nextSibling){ak=ak.parentNode;if(!ak){return}}function ai(al,ar){var aq=ar?al.cloneNode(false):al;var ao=al.parentNode;if(ao){var ap=ai(ao,1);var an=al.nextSibling;ap.appendChild(aq);for(var am=an;am;am=an){an=am.nextSibling;ap.appendChild(am)}}return aq}var ah=ai(ak.nextSibling,0);for(var aj;(aj=ah.parentNode)&&aj.nodeType===1;){ah=aj}W.push(ah)}for(var Y=0;Y=S){ah+=2}if(V>=ap){Z+=2}}}var t={};function c(U,V){for(var S=V.length;--S>=0;){var T=V[S];if(!t.hasOwnProperty(T)){t[T]=U}else{if(window.console){console.warn("cannot override language handler %s",T)}}}}function q(T,S){if(!(T&&t.hasOwnProperty(T))){T=/^\s*]*(?:>|$)/],[j,/^<\!--[\s\S]*?(?:-\->|$)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],[L,/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]),["default-markup","htm","html","mxml","xhtml","xml","xsl"]);c(g([[F,/^[\s]+/,null," \t\r\n"],[n,/^(?:\"[^\"]*\"?|\'[^\']*\'?)/,null,"\"'"]],[[m,/^^<\/?[a-z](?:[\w.:-]*\w)?|\/?>$/i],[P,/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^>\'\"\s]*(?:[^>\'\"\s\/]|\/(?=\s)))/],[L,/^[=<>\/]+/],["lang-js",/^on\w+\s*=\s*\"([^\"]+)\"/i],["lang-js",/^on\w+\s*=\s*\'([^\']+)\'/i],["lang-js",/^on\w+\s*=\s*([^\"\'>\s]+)/i],["lang-css",/^style\s*=\s*\"([^\"]+)\"/i],["lang-css",/^style\s*=\s*\'([^\']+)\'/i],["lang-css",/^style\s*=\s*([^\"\'>\s]+)/i]]),["in.tag"]);c(g([],[[n,/^[\s\S]+/]]),["uq.val"]);c(i({keywords:l,hashComments:true,cStyleComments:true,types:e}),["c","cc","cpp","cxx","cyc","m"]);c(i({keywords:"null,true,false"}),["json"]);c(i({keywords:R,hashComments:true,cStyleComments:true,verbatimStrings:true,types:e}),["cs"]);c(i({keywords:x,cStyleComments:true}),["java"]);c(i({keywords:H,hashComments:true,multiLineStrings:true}),["bsh","csh","sh"]);c(i({keywords:I,hashComments:true,multiLineStrings:true,tripleQuotedStrings:true}),["cv","py"]);c(i({keywords:s,hashComments:true,multiLineStrings:true,regexLiterals:true}),["perl","pl","pm"]);c(i({keywords:f,hashComments:true,multiLineStrings:true,regexLiterals:true}),["rb"]);c(i({keywords:w,cStyleComments:true,regexLiterals:true}),["js"]);c(i({keywords:r,hashComments:3,cStyleComments:true,multilineStrings:true,tripleQuotedStrings:true,regexLiterals:true}),["coffee"]);c(g([],[[C,/^[\s\S]+/]]),["regex"]);function d(V){var U=V.langExtension;try{var S=a(V.sourceNode);var T=S.sourceCode;V.sourceCode=T;V.spans=S.spans;V.basePos=0;q(U,T)(V);D(V)}catch(W){if("console" in window){console.log(W&&W.stack?W.stack:W)}}}function y(W,V,U){var S=document.createElement("PRE");S.innerHTML=W;if(U){Q(S,U)}var T={langExtension:V,numberLines:U,sourceNode:S};d(T);return S.innerHTML}function b(ad){function Y(af){return document.getElementsByTagName(af)}var ac=[Y("pre"),Y("code"),Y("xmp")];var T=[];for(var aa=0;aa=0){var ah=ai.match(ab);var am;if(!ah&&(am=o(aj))&&"CODE"===am.tagName){ah=am.className.match(ab)}if(ah){ah=ah[1]}var al=false;for(var ak=aj.parentNode;ak;ak=ak.parentNode){if((ak.tagName==="pre"||ak.tagName==="code"||ak.tagName==="xmp")&&ak.className&&ak.className.indexOf("prettyprint")>=0){al=true;break}}if(!al){var af=aj.className.match(/\blinenums\b(?::(\d+))?/);af=af?af[1]&&af[1].length?+af[1]:true:false;if(af){Q(aj,af)}S={langExtension:ah,sourceNode:aj,numberLines:af};d(S)}}}if(X]*(?:>|$)/],[PR.PR_COMMENT,/^<\!--[\s\S]*?(?:-\->|$)/],[PR.PR_PUNCTUATION,/^(?:<[%?]|[%?]>)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-handlebars",/^]*type\s*=\s*['"]?text\/x-handlebars-template['"]?\b[^>]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i],[PR.PR_DECLARATION,/^{{[#^>/]?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{&?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{{>?\s*[\w.][^}]*}}}/],[PR.PR_COMMENT,/^{{![^}]*}}/]]),["handlebars","hbs"]);PR.registerLangHandler(PR.createSimpleLexer([[PR.PR_PLAIN,/^[ \t\r\n\f]+/,null," \t\r\n\f"]],[[PR.PR_STRING,/^\"(?:[^\n\r\f\\\"]|\\(?:\r\n?|\n|\f)|\\[\s\S])*\"/,null],[PR.PR_STRING,/^\'(?:[^\n\r\f\\\']|\\(?:\r\n?|\n|\f)|\\[\s\S])*\'/,null],["lang-css-str",/^url\(([^\)\"\']*)\)/i],[PR.PR_KEYWORD,/^(?:url|rgb|\!important|@import|@page|@media|@charset|inherit)(?=[^\-\w]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|(?:\\[0-9a-f]+ ?))(?:[_a-z0-9\-]|\\(?:\\[0-9a-f]+ ?))*)\s*:/i],[PR.PR_COMMENT,/^\/\*[^*]*\*+(?:[^\/*][^*]*\*+)*\//],[PR.PR_COMMENT,/^(?:)/],[PR.PR_LITERAL,/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],[PR.PR_LITERAL,/^#(?:[0-9a-f]{3}){1,2}/i],[PR.PR_PLAIN,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i],[PR.PR_PUNCTUATION,/^[^\s\w\'\"]+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_KEYWORD,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_STRING,/^[^\)\"\']+/]]),["css-str"]); diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/index.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/index.html new file mode 100644 index 0000000..6073c75 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/index.html @@ -0,0 +1,93 @@ + + + + Code coverage report for serverless-webpack/ + + + + + + + +
+
+

+ all files serverless-webpack/ +

+
+
+ 35% + Statements + 7/20 +
+
+ 100% + Branches + 0/0 +
+
+ 0% + Functions + 0/1 +
+
+ 35% + Lines + 7/20 +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
index.js
35%7/20100%0/00%0/135%7/20
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/index.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/index.js.html new file mode 100644 index 0000000..bcefd24 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/index.js.html @@ -0,0 +1,425 @@ + + + + Code coverage report for serverless-webpack/index.js + + + + + + + +
+
+

+ all files / serverless-webpack/ index.js +

+
+
+ 35% + Statements + 7/20 +
+
+ 100% + Branches + 0/0 +
+
+ 0% + Functions + 0/1 +
+
+ 35% + Lines + 7/20 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121  +  + +  + + + + + +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  + + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+ 
+const validate = require('./lib/validate');
+const compile = require('./lib/compile');
+const cleanup = require('./lib/cleanup');
+const run = require('./lib/run');
+const serve = require('./lib/serve');
+ 
+class ServerlessWebpack {
+  constructor(serverless, options) {
+    this.serverless = serverless;
+    this.options = options;
+ 
+    Object.assign(
+      this,
+      validate,
+      compile,
+      cleanup,
+      run,
+      serve
+    );
+ 
+    this.commands = {
+      webpack: {
+        usage: 'Bundle with Webpack',
+        lifecycleEvents: [
+          'validate',
+          'compile',
+        ],
+        options: {
+          out: {
+            usage: 'Path to output directory',
+            shortcut: 'o',
+          },
+        },
+        commands: {
+          invoke: {
+            usage: 'Run a function locally from the webpack output bundle',
+            lifecycleEvents: [
+              'invoke',
+            ],
+            options: {
+              function: {
+                usage: 'Name of the function',
+                shortcut: 'f',
+                required: true,
+              },
+              path: {
+                usage: 'Path to JSON file holding input data',
+                shortcut: 'p',
+              },
+            },
+          },
+          watch: {
+            usage: 'Run a function from the webpack output bundle every time the source is changed',
+            lifecycleEvents: [
+              'watch',
+            ],
+            options: {
+              function: {
+                usage: 'Name of the function',
+                shortcut: 'f',
+                required: true,
+              },
+              path: {
+                usage: 'Path to JSON file holding input data',
+                shortcut: 'p',
+              },
+            },
+          },
+          serve: {
+            usage: 'Simulate the API Gateway and serves lambdas locally',
+            lifecycleEvents: [
+              'serve',
+            ],
+            options: {
+              port: {
+                usage: 'The local server port',
+                shortcut: 'p',
+              },
+            },
+          },
+        },
+      },
+    };
+ 
+    this.hooks = {
+      'before:deploy:createDeploymentArtifacts': () => BbPromise.bind(this)
+        .then(this.validate)
+        .then(this.compile),
+ 
+      'after:deploy:createDeploymentArtifacts': () => BbPromise.bind(this)
+        .then(this.cleanup),
+ 
+      'webpack:validate': () => BbPromise.bind(this)
+        .then(this.validate),
+ 
+      'webpack:compile': () => BbPromise.bind(this)
+        .then(this.compile),
+ 
+      'webpack:invoke:invoke': () => BbPromise.bind(this)
+        .then(this.validate)
+        .then(this.compile)
+        .then(this.run)
+        .then(out => this.serverless.cli.consoleLog(out)),
+ 
+      'webpack:watch:watch': () => BbPromise.bind(this)
+        .then(this.validate)
+        .then(this.watch),
+ 
+      'webpack:serve:serve': () => BbPromise.bind(this)
+        .then(this.validate)
+        .then(this.serve),
+    };
+  }
+}
+ 
+module.exports = ServerlessWebpack;
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/cleanup.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/cleanup.js.html new file mode 100644 index 0000000..8f80fcd --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/cleanup.js.html @@ -0,0 +1,194 @@ + + + + Code coverage report for serverless-webpack/lib/cleanup.js + + + + + + + +
+
+

+ all files / serverless-webpack/lib/ cleanup.js +

+
+
+ 22.22% + Statements + 4/18 +
+
+ 0% + Branches + 0/6 +
+
+ 0% + Functions + 0/1 +
+
+ 22.22% + Lines + 4/18 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44  +  + + + +  + +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const path = require('path');
+const fse = require('fs-extra');
+ 
+module.exports = {
+  cleanup() {
+    const webpackOutputPath = this.webpackOutputPath;
+ 
+    const moveArtifact = new BbPromise((resolve, reject) => {
+      if (this.originalServicePath) {
+        this.serverless.config.servicePath = this.originalServicePath;
+        fse.move(
+          path.join(webpackOutputPath, '.serverless'),
+          path.join(this.serverless.config.servicePath, '.serverless'),
+          (err) => {
+            if (err) {
+              reject(err);
+            } else {
+              this.serverless.service.package.artifact = path.join(
+                this.serverless.config.servicePath,
+                '.serverless',
+                path.basename(this.serverless.service.package.artifact)
+              );
+              resolve();
+            }
+          }
+        );
+      } else {
+        resolve();
+      }
+    });
+ 
+    return moveArtifact
+      .then(() => {
+        if (this.serverless.utils.dirExistsSync(webpackOutputPath)) {
+          fse.removeSync(webpackOutputPath);
+        }
+      })
+      .then(() => BbPromise.resolve());
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/compile.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/compile.js.html new file mode 100644 index 0000000..582f87a --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/compile.js.html @@ -0,0 +1,161 @@ + + + + Code coverage report for serverless-webpack/lib/compile.js + + + + + + + +
+
+

+ all files / serverless-webpack/lib/ compile.js +

+
+
+ 33.33% + Statements + 5/15 +
+
+ 0% + Branches + 0/2 +
+
+ 100% + Functions + 1/1 +
+
+ 33.33% + Lines + 5/15 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33  +  + + +  + +  + +  + +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const webpack = require('webpack');
+ 
+module.exports = {
+  compile() {
+    this.serverless.cli.log('Bundling with Webpack...');
+ 
+    const compiler = webpack(this.webpackConfig);
+ 
+    return BbPromise
+      .fromCallback(cb => compiler.run(cb))
+      .then(stats => {
+        this.serverless.cli.consoleLog(stats.toString({
+          colors: true,
+          hash: false,
+          version: false,
+          chunks: false,
+          children: false
+        }));
+        if (stats.compilation.errors.length) {
+          throw new Error('Webpack compilation error, see above');
+        }
+        const outputPath = stats.compilation.compiler.outputPath;
+        this.webpackOutputPath = outputPath;
+        this.originalServicePath = this.serverless.config.servicePath;
+        this.serverless.config.servicePath = outputPath;
+        return stats;
+      });
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/index.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/index.html new file mode 100644 index 0000000..3443554 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/index.html @@ -0,0 +1,158 @@ + + + + Code coverage report for serverless-webpack/lib/ + + + + + + + +
+
+

+ all files serverless-webpack/lib/ +

+
+
+ 64.94% + Statements + 113/174 +
+
+ 56.6% + Branches + 30/53 +
+
+ 73.91% + Functions + 17/23 +
+
+ 64.74% + Lines + 112/173 +
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
cleanup.js
22.22%4/180%0/60%0/122.22%4/18
compile.js
33.33%5/150%0/2100%1/133.33%5/15
run.js
72.09%31/4360%6/10100%5/571.43%30/42
serve.js
73.33%44/6058.33%7/1285.71%6/773.33%44/60
utils.js
50%9/180%0/650%4/850%9/18
validate.js
100%20/20100%17/17100%1/1100%20/20
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/run.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/run.js.html new file mode 100644 index 0000000..9c468fd --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/run.js.html @@ -0,0 +1,359 @@ + + + + Code coverage report for serverless-webpack/lib/run.js + + + + + + + +
+
+

+ all files / serverless-webpack/lib/ run.js +

+
+
+ 72.09% + Statements + 31/43 +
+
+ 60% + Branches + 6/10 +
+
+ 100% + Functions + 5/5 +
+
+ 71.43% + Lines + 30/42 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99  +  + + + + +  + +  + + + +  +  +  + + +  + + + + + +  + +  +  +  + +  +  +  +  +  + +  +  +  +  +  +  +  +  +  +  +  +  + +  + +  + + + +  + +  +  +  + + +  + +  +  +  +  +  +  + + +  + +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const path = require('path');
+const webpack = require('webpack');
+const utils = require('./utils');
+ 
+module.exports = {
+  loadHandler(stats, functionId, purge) {
+    const handler = this.serverless.service.functions[functionId].handler.split('.');
+    const moduleFileName = `${handler[0]}.js`;
+    const handlerFilePath = path.join(
+      path.resolve(stats.compilation.options.output.path),
+      moduleFileName
+    );
+    if (purge) {
+      utils.purgeCache(handlerFilePath);
+    }
+    const module = require(handlerFilePath);
+    const functionObjectPath = handler.slice(1);
+    let func = module;
+    for (let p of functionObjectPath) {
+      func = func[p];
+    }
+    return func;
+  },
+ 
+  getEvent() {
+    return this.options.path
+      ? this.serverless.utils.readFileSync(this.options.path)
+      : null; // TODO use get-stdin instead
+  },
+ 
+  getContext(functionName) {
+    return {
+      awsRequestId: utils.guid(),
+      invokeid: utils.guid(),
+      logGroupName: `/aws/lambda/${functionName}`,
+      logStreamName: '2016/02/14/[HEAD]13370a84ca4ed8b77c427af260',
+      functionVersion: '$LATEST',
+      isDefaultFunctionVersion: true,
+      functionName: functionName,
+      memoryLimitInMB: '1024',
+    };
+  },
+ 
+  run(stats) {
+    const functionName = this.options.function;
+ 
+    this.serverless.cli.log(`Run function ${functionName}...`);
+ 
+    const handler = this.loadHandler(stats, functionName);
+    const event = this.getEvent();
+    const context = this.getContext(functionName);
+ 
+    return new BbPromise((resolve, reject) => handler(
+      event,
+      context,
+      (err, res) => {
+        if (err) {
+          reject(err);
+        } else {
+          resolve(res);
+        }
+      }
+    ));
+  },
+ 
+  watch() {
+    const functionName = this.options.function;
+    this.serverless.cli.log(`Watch function ${functionName}...`);
+ 
+    const compiler = webpack(this.webpackConfig);
+ 
+    compiler.watch({}, (err, stats) => {
+      if (err) {
+        throw err;
+      }
+      this.serverless.cli.log(`Run function ${functionName}...`);
+      const handler = this.loadHandler(stats, functionName, true);
+      const event = this.getEvent();
+      const context = this.getContext(functionName);
+      handler(
+        event,
+        context,
+        (err, res) => {
+          if (err) {
+            throw err;
+          } else {
+            this.serverless.cli.consoleLog(res);
+          }
+        }
+      )
+    });
+ 
+    return BbPromise.resolve();
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/serve.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/serve.js.html new file mode 100644 index 0000000..05fc595 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/serve.js.html @@ -0,0 +1,440 @@ + + + + Code coverage report for serverless-webpack/lib/serve.js + + + + + + + +
+
+

+ all files / serverless-webpack/lib/ serve.js +

+
+
+ 73.33% + Statements + 44/60 +
+
+ 58.33% + Branches + 7/12 +
+
+ 85.71% + Functions + 6/7 +
+
+ 73.33% + Lines + 44/60 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126  +  + + + + +  + +  + +  + +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  + +  + +  + +  +  +  +  +  +  +  + + + + + + + + +  + +  +  +  +  +  +  +  + +  +  +  + + + + + + + + + +  +  +  +  +  +  +  + +  +  +  + +  +  +  + + + + + +  +  +  +  + + + +  +  +  +  +  +  +  +  + + + + +  + +  +  +  +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const webpack = require('webpack');
+const express = require('express');
+const bodyParser = require('body-parser');
+ 
+module.exports = {
+  serve() {
+    this.serverless.cli.log('Serving functions...');
+ 
+    const compiler = webpack(this.webpackConfig);
+    const funcConfs = this._getFuncConfigs();
+    const app = this._newExpressApp(funcConfs);
+    const port = this._getPort();
+ 
+    app.listen(port, () =>
+      compiler.watch({}, (err, stats) => {
+        if (err) {
+          throw err;
+        }
+        const loadedModules = [];
+        for (let funcConf of funcConfs) {
+          funcConf.handlerFunc = this.loadHandler(
+            stats,
+            funcConf.id,
+            loadedModules.indexOf(funcConf.moduleName) < 0
+          );
+          loadedModules.push(funcConf.moduleName);
+        }
+      })
+    );
+ 
+    return BbPromise.resolve();
+  },
+ 
+  _newExpressApp(funcConfs) {
+    const app = express();
+ 
+    app.use(bodyParser.json({ limit: '5mb' }));
+ 
+    app.use(function optionsHandler(req, res, next) {
+      if(req.method !== 'OPTIONS') {
+        next();
+      } else {
+        res.sendStatus(200);
+      }
+    });
+ 
+    for (let funcConf of funcConfs) {
+      for (let httpEvent of funcConf.events) {
+        const method = httpEvent.method.toLowerCase();
+        const endpoint = `/${this.options.stage}/${httpEvent.path}`;
+        const path = endpoint.replace(/\{(.+?)\}/g, ':$1');
+        let handler = this._handlerBase(funcConf);
+        Eif (httpEvent.cors) {
+          handler = this._handlerAddCors(handler);
+        }
+        app[method](
+          path,
+          handler
+        );
+        this.serverless.cli.consoleLog(`  ${method.toUpperCase()} - http://localhost:${this._getPort()}${endpoint}`);
+      }
+    }
+ 
+    return app;
+  },
+ 
+  _getFuncConfigs() {
+    const funcConfs = [];
+    const inputfuncConfs = this.serverless.service.functions;
+    for (let funcName in inputfuncConfs) {
+      const funcConf = inputfuncConfs[funcName];
+      const httpEvents = funcConf.events
+        .filter(e => e.hasOwnProperty('http'))
+        .map(e => e.http);
+      if (httpEvents.length > 0) {
+        funcConfs.push(Object.assign({}, funcConf, {
+          id: funcName,
+          events: httpEvents,
+          moduleName: funcConf.handler.split('.')[0],
+          handlerFunc: null,
+        }));
+      }
+    }
+    return funcConfs;
+  },
+ 
+  _getPort() {
+    return this.options.port || 8000;
+  },
+ 
+  _handlerAddCors(handler) {
+    return (req, res, next) => {
+      res.header('Access-Control-Allow-Origin', '*');
+      res.header('Access-Control-Allow-Methods', 'GET,PUT,HEAD,PATCH,POST,DELETE,OPTIONS');
+      res.header('Access-Control-Allow-Headers', 'Authorization,Content-Type,x-amz-date,x-amz-security-token');
+      handler(req, res, next);
+    };
+  },
+ 
+  _handlerBase(funcConf) {
+    return (req, res) => {
+      const func = funcConf.handlerFunc;
+      const event = {
+        method: req.method,
+        headers: req.headers,
+        body: req.body,
+        path: req.params,
+        query: req.query,
+        // principalId,
+        // stageVariables,
+      };
+      const context = this.getContext(funcConf.id);
+      func(event, context, (err, resp) => {
+        if (err) {
+          res.status(500).send(err);
+        } else {
+          res.status(200).send(resp);
+        }
+      });
+    }
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/utils.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/utils.js.html new file mode 100644 index 0000000..ba66582 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/utils.js.html @@ -0,0 +1,182 @@ + + + + Code coverage report for serverless-webpack/lib/utils.js + + + + + + + +
+
+

+ all files / serverless-webpack/lib/ utils.js +

+
+
+ 50% + Statements + 9/18 +
+
+ 0% + Branches + 0/6 +
+
+ 50% + Functions + 4/8 +
+
+ 50% + Lines + 9/18 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40  +  + + +16× +  +  +  + +  +  + + +  +  +  +  +  +  +  +  +  + + +  +  +  +  +  +  +  +  +  +  + +  +  +  +  + 
'use strict';
+ 
+function guid() {
+  function s4() {
+    return Math.floor((1 + Math.random()) * 0x10000)
+      .toString(16)
+      .substring(1);
+  }
+  return s4() + s4() + '-' + s4() + '-' + s4() + '-' + s4() + '-' + s4() + s4() + s4();
+}
+ 
+function purgeCache(moduleName) {
+  searchCache(moduleName, function (mod) {
+    delete require.cache[mod.id];
+  });
+  Object.keys(module.constructor._pathCache).forEach(function(cacheKey) {
+    if (cacheKey.indexOf(moduleName)>0) {
+     delete module.constructor._pathCache[cacheKey];
+    }
+  });
+}
+ 
+function searchCache(moduleName, callback) {
+  var mod = require.resolve(moduleName);
+  if (mod && ((mod = require.cache[mod]) !== undefined)) {
+    (function traverse(mod) {
+      mod.children.forEach(function (child) {
+        traverse(child);
+      });
+      callback(mod);
+    }(mod));
+  }
+}
+ 
+module.exports = {
+  guid,
+  purgeCache,
+  searchCache,
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/validate.js.html b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/validate.js.html new file mode 100644 index 0000000..76b15ad --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/serverless-webpack/lib/validate.js.html @@ -0,0 +1,224 @@ + + + + Code coverage report for serverless-webpack/lib/validate.js + + + + + + + +
+
+

+ all files / serverless-webpack/lib/ validate.js +

+
+
+ 100% + Statements + 20/20 +
+
+ 100% + Branches + 17/17 +
+
+ 100% + Functions + 1/1 +
+
+ 100% + Lines + 20/20 +
+
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54  +  + + + +  + +  +10× +  +  +  +  +  +10× + + + +  +  + +  +  +  + + +  +  +  + + + +  +  +  +  + +  +  +  +  +  +  +  + + +  +  + +  + +  +  + 
'use strict';
+ 
+const BbPromise = require('bluebird');
+const path = require('path');
+const fse = require('fs-extra');
+ 
+module.exports = {
+  validate() {
+    this.webpackConfig = (
+      this.serverless.service.custom &&
+      this.serverless.service.custom.webpack ||
+      'webpack.config.js'
+    );
+ 
+    if (typeof this.webpackConfig === 'string') {
+      const webpackConfigFilePath = path.join(this.serverless.config.servicePath, this.webpackConfig);
+      if (!this.serverless.utils.fileExistsSync(webpackConfigFilePath)) {
+        throw new this.serverless.classes
+          .Error('The webpack plugin could not find the configuration file at: ' + webpackConfigFilePath);
+      }
+      this.webpackConfig = require(webpackConfigFilePath);
+    }
+ 
+    // Default context
+    if (!this.webpackConfig.context) {
+      this.webpackConfig.context = this.serverless.config.servicePath;
+    }
+ 
+    // Default output
+    if (!this.webpackConfig.output) {
+      const outputPath = path.join(this.serverless.config.servicePath, '.webpack');
+      const outputFilename = (
+        Array.isArray(this.webpackConfig.entry)
+        ? this.webpackConfig.entry[this.webpackConfig.entry.length - 1]
+        : this.webpackConfig.entry
+      ) || 'handler.js';
+      this.webpackConfig.output = {
+        libraryTarget: 'commonjs',
+        path: outputPath,
+        filename: outputFilename,
+      };
+    }
+ 
+    // Custom output path
+    if (this.options.out) {
+      this.webpackConfig.output.path = path.join(this.serverless.config.servicePath, this.options.out);
+    }
+ 
+    fse.removeSync(this.webpackConfig.output.path);
+ 
+    return BbPromise.resolve();
+  },
+};
+ 
+
+
+ + + + + + + diff --git a/node_modules/serverless-webpack/coverage/lcov-report/sort-arrow-sprite.png b/node_modules/serverless-webpack/coverage/lcov-report/sort-arrow-sprite.png new file mode 100644 index 0000000..03f704a Binary files /dev/null and b/node_modules/serverless-webpack/coverage/lcov-report/sort-arrow-sprite.png differ diff --git a/node_modules/serverless-webpack/coverage/lcov-report/sorter.js b/node_modules/serverless-webpack/coverage/lcov-report/sorter.js new file mode 100644 index 0000000..6c5034e --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov-report/sorter.js @@ -0,0 +1,158 @@ +var addSorting = (function () { + "use strict"; + var cols, + currentSort = { + index: 0, + desc: false + }; + + // returns the summary table element + function getTable() { return document.querySelector('.coverage-summary'); } + // returns the thead element of the summary table + function getTableHeader() { return getTable().querySelector('thead tr'); } + // returns the tbody element of the summary table + function getTableBody() { return getTable().querySelector('tbody'); } + // returns the th element for nth column + function getNthColumn(n) { return getTableHeader().querySelectorAll('th')[n]; } + + // loads all columns + function loadColumns() { + var colNodes = getTableHeader().querySelectorAll('th'), + colNode, + cols = [], + col, + i; + + for (i = 0; i < colNodes.length; i += 1) { + colNode = colNodes[i]; + col = { + key: colNode.getAttribute('data-col'), + sortable: !colNode.getAttribute('data-nosort'), + type: colNode.getAttribute('data-type') || 'string' + }; + cols.push(col); + if (col.sortable) { + col.defaultDescSort = col.type === 'number'; + colNode.innerHTML = colNode.innerHTML + ''; + } + } + return cols; + } + // attaches a data attribute to every tr element with an object + // of data values keyed by column name + function loadRowData(tableRow) { + var tableCols = tableRow.querySelectorAll('td'), + colNode, + col, + data = {}, + i, + val; + for (i = 0; i < tableCols.length; i += 1) { + colNode = tableCols[i]; + col = cols[i]; + val = colNode.getAttribute('data-value'); + if (col.type === 'number') { + val = Number(val); + } + data[col.key] = val; + } + return data; + } + // loads all row data + function loadData() { + var rows = getTableBody().querySelectorAll('tr'), + i; + + for (i = 0; i < rows.length; i += 1) { + rows[i].data = loadRowData(rows[i]); + } + } + // sorts the table using the data for the ith column + function sortByIndex(index, desc) { + var key = cols[index].key, + sorter = function (a, b) { + a = a.data[key]; + b = b.data[key]; + return a < b ? -1 : a > b ? 1 : 0; + }, + finalSorter = sorter, + tableBody = document.querySelector('.coverage-summary tbody'), + rowNodes = tableBody.querySelectorAll('tr'), + rows = [], + i; + + if (desc) { + finalSorter = function (a, b) { + return -1 * sorter(a, b); + }; + } + + for (i = 0; i < rowNodes.length; i += 1) { + rows.push(rowNodes[i]); + tableBody.removeChild(rowNodes[i]); + } + + rows.sort(finalSorter); + + for (i = 0; i < rows.length; i += 1) { + tableBody.appendChild(rows[i]); + } + } + // removes sort indicators for current column being sorted + function removeSortIndicators() { + var col = getNthColumn(currentSort.index), + cls = col.className; + + cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, ''); + col.className = cls; + } + // adds sort indicators for current column being sorted + function addSortIndicators() { + getNthColumn(currentSort.index).className += currentSort.desc ? ' sorted-desc' : ' sorted'; + } + // adds event listeners for all sorter widgets + function enableUI() { + var i, + el, + ithSorter = function ithSorter(i) { + var col = cols[i]; + + return function () { + var desc = col.defaultDescSort; + + if (currentSort.index === i) { + desc = !currentSort.desc; + } + sortByIndex(i, desc); + removeSortIndicators(); + currentSort.index = i; + currentSort.desc = desc; + addSortIndicators(); + }; + }; + for (i =0 ; i < cols.length; i += 1) { + if (cols[i].sortable) { + // add the click event handler on the th so users + // dont have to click on those tiny arrows + el = getNthColumn(i).querySelector('.sorter').parentElement; + if (el.addEventListener) { + el.addEventListener('click', ithSorter(i)); + } else { + el.attachEvent('onclick', ithSorter(i)); + } + } + } + } + // adds sorting functionality to the UI + return function () { + if (!getTable()) { + return; + } + cols = loadColumns(); + loadData(cols); + addSortIndicators(); + enableUI(); + }; +})(); + +window.addEventListener('load', addSorting); diff --git a/node_modules/serverless-webpack/coverage/lcov.info b/node_modules/serverless-webpack/coverage/lcov.info new file mode 100644 index 0000000..75b0b82 --- /dev/null +++ b/node_modules/serverless-webpack/coverage/lcov.info @@ -0,0 +1,244 @@ +TN: +SF:/Users/Nikso/Projects/EC/serverless-webpack/lib/validate.js +FN:8,(anonymous_1) +FNF:1 +FNH:1 +FNDA:10,(anonymous_1) +DA:3,1 +DA:4,1 +DA:5,1 +DA:7,1 +DA:9,10 +DA:15,10 +DA:16,3 +DA:17,3 +DA:18,1 +DA:21,2 +DA:25,9 +DA:26,6 +DA:30,9 +DA:31,5 +DA:32,5 +DA:37,5 +DA:45,9 +DA:46,1 +DA:49,9 +DA:51,9 +LF:20 +LH:20 +BRDA:10,1,0,10 +BRDA:10,1,1,10 +BRDA:10,1,2,1 +BRDA:15,2,0,3 +BRDA:15,2,1,7 +BRDA:17,3,0,1 +BRDA:17,3,1,2 +BRDA:25,4,0,6 +BRDA:25,4,1,3 +BRDA:30,5,0,5 +BRDA:30,5,1,4 +BRDA:32,6,0,5 +BRDA:32,6,1,1 +BRDA:33,7,0,1 +BRDA:33,7,1,4 +BRDA:45,8,0,1 +BRDA:45,8,1,8 +BRF:17 +BRH:17 +end_of_record +TN: +SF:/Users/Nikso/Projects/EC/serverless-webpack/lib/compile.js +FN:7,(anonymous_1) +FNF:1 +FNH:1 +FNDA:3,(anonymous_1) +DA:3,1 +DA:4,1 +DA:6,1 +DA:8,3 +DA:10,3 +DA:12,3 +DA:13,3 +DA:16,3 +DA:23,3 +DA:24,1 +DA:26,2 +DA:27,2 +DA:28,2 +DA:29,2 +DA:30,2 +LF:15 +LH:15 +BRDA:23,1,0,1 +BRDA:23,1,1,2 +BRF:2 +BRH:2 +end_of_record +TN: +SF:/Users/Nikso/Projects/EC/serverless-webpack/lib/run.js +FN:9,(anonymous_1) +FN:28,(anonymous_2) +FN:34,(anonymous_3) +FN:47,(anonymous_4) +FN:69,(anonymous_5) +FNF:5 +FNH:5 +FNDA:2,(anonymous_1) +FNDA:2,(anonymous_2) +FNDA:1,(anonymous_3) +FNDA:2,(anonymous_4) +FNDA:3,(anonymous_5) +DA:3,1 +DA:4,1 +DA:5,1 +DA:6,1 +DA:8,1 +DA:10,2 +DA:11,2 +DA:12,2 +DA:16,2 +DA:17,1 +DA:19,2 +DA:20,2 +DA:21,2 +DA:22,2 +DA:23,2 +DA:25,2 +DA:29,2 +DA:35,1 +DA:48,2 +DA:50,2 +DA:52,2 +DA:53,2 +DA:54,2 +DA:56,2 +DA:60,2 +DA:61,1 +DA:63,1 +DA:70,3 +DA:71,3 +DA:73,3 +DA:75,3 +DA:76,4 +DA:77,1 +DA:79,3 +DA:80,3 +DA:81,3 +DA:82,3 +DA:83,3 +DA:87,3 +DA:88,1 +DA:90,2 +DA:96,1 +LF:42 +LH:42 +BRDA:16,1,0,1 +BRDA:16,1,1,1 +BRDA:29,2,0,1 +BRDA:29,2,1,1 +BRDA:60,3,0,1 +BRDA:60,3,1,1 +BRDA:76,4,0,1 +BRDA:76,4,1,3 +BRDA:87,5,0,1 +BRDA:87,5,1,2 +BRF:10 +BRH:10 +end_of_record +TN: +SF:/Users/Nikso/Projects/EC/serverless-webpack/lib/serve.js +FN:9,(anonymous_1) +FN:37,(anonymous_2) +FN:68,(anonymous_3) +FN:88,(anonymous_4) +FN:92,(anonymous_5) +FN:101,(anonymous_6) +FN:124,(anonymous_7) +FNF:7 +FNH:7 +FNDA:5,(anonymous_1) +FNDA:8,(anonymous_2) +FNDA:5,(anonymous_3) +FNDA:12,(anonymous_4) +FNDA:4,(anonymous_5) +FNDA:6,(anonymous_6) +FNDA:1,(anonymous_7) +DA:3,1 +DA:4,1 +DA:5,1 +DA:6,1 +DA:8,1 +DA:10,5 +DA:12,5 +DA:13,5 +DA:14,5 +DA:15,5 +DA:17,5 +DA:18,3 +DA:19,5 +DA:20,1 +DA:22,4 +DA:23,4 +DA:24,6 +DA:29,6 +DA:34,5 +DA:38,8 +DA:40,8 +DA:42,8 +DA:43,5 +DA:44,5 +DA:45,5 +DA:46,5 +DA:47,2 +DA:49,5 +DA:50,5 +DA:51,5 +DA:52,5 +DA:53,2 +DA:54,2 +DA:56,5 +DA:57,5 +DA:61,5 +DA:65,8 +DA:69,5 +DA:70,5 +DA:71,5 +DA:72,3 +DA:73,3 +DA:74,4 +DA:75,2 +DA:76,3 +DA:77,2 +DA:85,5 +DA:89,12 +DA:93,4 +DA:94,1 +DA:95,1 +DA:96,1 +DA:97,1 +DA:102,6 +DA:103,2 +DA:104,2 +DA:113,2 +DA:114,2 +DA:115,2 +DA:116,1 +DA:118,1 +DA:125,1 +LF:62 +LH:62 +BRDA:19,1,0,1 +BRDA:19,1,1,4 +BRDA:46,2,0,2 +BRDA:46,2,1,3 +BRDA:52,3,0,2 +BRDA:52,3,1,3 +BRDA:76,4,0,2 +BRDA:76,4,1,1 +BRDA:89,5,0,12 +BRDA:89,5,1,11 +BRDA:115,6,0,1 +BRDA:115,6,1,1 +BRF:12 +BRH:12 +end_of_record diff --git a/node_modules/serverless-webpack/index.js b/node_modules/serverless-webpack/index.js new file mode 100644 index 0000000..bdb70fd --- /dev/null +++ b/node_modules/serverless-webpack/index.js @@ -0,0 +1,124 @@ +'use strict'; + +const BbPromise = require('bluebird'); + +const validate = require('./lib/validate'); +const compile = require('./lib/compile'); +const cleanup = require('./lib/cleanup'); +const run = require('./lib/run'); +const serve = require('./lib/serve'); +const packExternalModules = require('./lib/packExternalModules'); + +class ServerlessWebpack { + constructor(serverless, options) { + this.serverless = serverless; + this.options = options; + + Object.assign( + this, + validate, + compile, + cleanup, + run, + serve, + packExternalModules + ); + + this.commands = { + webpack: { + usage: 'Bundle with Webpack', + lifecycleEvents: [ + 'validate', + 'compile', + ], + options: { + out: { + usage: 'Path to output directory', + shortcut: 'o', + }, + }, + commands: { + invoke: { + usage: 'Run a function locally from the webpack output bundle', + lifecycleEvents: [ + 'invoke', + ], + options: { + function: { + usage: 'Name of the function', + shortcut: 'f', + required: true, + }, + path: { + usage: 'Path to JSON file holding input data', + shortcut: 'p', + }, + }, + }, + watch: { + usage: 'Run a function from the webpack output bundle every time the source is changed', + lifecycleEvents: [ + 'watch', + ], + options: { + function: { + usage: 'Name of the function', + shortcut: 'f', + required: true, + }, + path: { + usage: 'Path to JSON file holding input data', + shortcut: 'p', + }, + }, + }, + serve: { + usage: 'Simulate the API Gateway and serves lambdas locally', + lifecycleEvents: [ + 'serve', + ], + options: { + port: { + usage: 'The local server port', + shortcut: 'p', + }, + }, + }, + }, + }, + }; + + this.hooks = { + 'before:deploy:createDeploymentArtifacts': () => BbPromise.bind(this) + .then(this.validate) + .then(this.compile) + .then(this.packExternalModules), + + 'after:deploy:createDeploymentArtifacts': () => BbPromise.bind(this) + .then(this.cleanup), + + 'webpack:validate': () => BbPromise.bind(this) + .then(this.validate), + + 'webpack:compile': () => BbPromise.bind(this) + .then(this.compile) + .then(this.packExternalModules), + + 'webpack:invoke:invoke': () => BbPromise.bind(this) + .then(this.validate) + .then(this.compile) + .then(this.run) + .then(out => this.serverless.cli.consoleLog(out)), + + 'webpack:watch:watch': () => BbPromise.bind(this) + .then(this.validate) + .then(this.watch), + + 'webpack:serve:serve': () => BbPromise.bind(this) + .then(this.validate) + .then(this.serve), + }; + } +} + +module.exports = ServerlessWebpack; diff --git a/node_modules/serverless-webpack/lib/cleanup.js b/node_modules/serverless-webpack/lib/cleanup.js new file mode 100644 index 0000000..6fe842f --- /dev/null +++ b/node_modules/serverless-webpack/lib/cleanup.js @@ -0,0 +1,43 @@ +'use strict'; + +const BbPromise = require('bluebird'); +const path = require('path'); +const fse = require('fs-extra'); + +module.exports = { + cleanup() { + const webpackOutputPath = this.webpackOutputPath; + + const moveArtifact = new BbPromise((resolve, reject) => { + if (this.originalServicePath) { + this.serverless.config.servicePath = this.originalServicePath; + fse.copy( + path.join(webpackOutputPath, '.serverless'), + path.join(this.serverless.config.servicePath, '.serverless'), + (err) => { + if (err) { + reject(err); + } else { + this.serverless.service.package.artifact = path.join( + this.serverless.config.servicePath, + '.serverless', + path.basename(this.serverless.service.package.artifact) + ); + resolve(); + } + } + ); + } else { + resolve(); + } + }); + + return moveArtifact + .then(() => { + if (this.serverless.utils.dirExistsSync(webpackOutputPath)) { + fse.removeSync(webpackOutputPath); + } + }) + .then(() => BbPromise.resolve()); + }, +}; diff --git a/node_modules/serverless-webpack/lib/compile.js b/node_modules/serverless-webpack/lib/compile.js new file mode 100644 index 0000000..259c125 --- /dev/null +++ b/node_modules/serverless-webpack/lib/compile.js @@ -0,0 +1,33 @@ +'use strict'; + +const BbPromise = require('bluebird'); +const webpack = require('webpack'); + +module.exports = { + compile() { + this.serverless.cli.log('Bundling with Webpack...'); + + const compiler = webpack(this.webpackConfig); + + return BbPromise + .fromCallback(cb => compiler.run(cb)) + .then(stats => { + + this.serverless.cli.consoleLog(stats.toString({ + colors: true, + hash: false, + version: false, + chunks: false, + children: false + })); + if (stats.compilation.errors.length) { + throw new Error('Webpack compilation error, see above'); + } + const outputPath = stats.compilation.compiler.outputPath; + this.webpackOutputPath = outputPath; + this.originalServicePath = this.serverless.config.servicePath; + this.serverless.config.servicePath = outputPath; + return stats; + }); + }, +}; diff --git a/node_modules/serverless-webpack/lib/packExternalModules.js b/node_modules/serverless-webpack/lib/packExternalModules.js new file mode 100644 index 0000000..3843051 --- /dev/null +++ b/node_modules/serverless-webpack/lib/packExternalModules.js @@ -0,0 +1,115 @@ +'use strict'; + +const BbPromise = require('bluebird'); +const fs = require('fs'); +const path = require('path'); +const npm = require('npm-programmatic'); + +function getProdModules(externalModules, packagePath) { + + const packageJson = require(path.join(process.cwd(), packagePath)); + + const prodModules = []; + + // only process the module stated in dependencies section + if (!packageJson.dependencies) { + return [] + } + + externalModules.forEach(module => { + + const moduleVersion = packageJson.dependencies[module]; + + if (moduleVersion) { + prodModules.push(`${module}@${moduleVersion}`); + } + }); + + return prodModules; +} + +function getExternalModuleName(module) { + + const path = /^external "(.*)"$/.exec(module.identifier())[1]; + + + const pathComponents = path.split('/'); + + const main = pathComponents[0]; + + // this is a package within a namespace + if (main.charAt(0) == '@') { + return `${main}/${pathComponents[1]}` + } + + return main +} + +function isExternalModule(module) { + return module.identifier().indexOf('external ') === 0; +} + +function getExternalModules(stats) { + + const externals = new Set(); + + stats.compilation.chunks.forEach(function(chunk) { + // Explore each module within the chunk (built inputs): + chunk.modules.forEach(function(module) { + // Explore each source file path that was included into the module: + if (isExternalModule(module)) { + externals.add(getExternalModuleName(module)); + } + }); + }); + + return Array.from(externals); +} + +module.exports = { + packExternalModules(stats) { + + const includes = ( + this.serverless.service.custom && + this.serverless.service.custom.webpackIncludeModules + ); + + return BbPromise.resolve().then(() => { + + if (!includes) { + return; + } + + const packagePath = includes.packagePath || './package.json'; + + const externalModules = getExternalModules(stats); + + // this plugin will only install modules stated in dependencies section of package.json + const prodModules = getProdModules(externalModules, packagePath); + + if (prodModules.length === 0) { + return; + } + + this.serverless.cli.log('Packing external modules: ' + prodModules.join(", ")); + + const tmpPackageJson = path.join(this.serverless.config.servicePath, 'package.json'); + + // create a temp package.json in dist directory so that we can install the dependencies later. + fs.writeFileSync(tmpPackageJson, "{}"); + + return new BbPromise((resolve, reject) => { + npm.install(prodModules, { + cwd: this.serverless.config.servicePath, + save: true + }).then(() => { + // fs.unlink(tmpPackageJson); + resolve() + }).catch(e => { + // fs.unlink(tmpPackageJson); + reject(e); + }) + }) + }); + } +}; diff --git a/node_modules/serverless-webpack/lib/run.js b/node_modules/serverless-webpack/lib/run.js new file mode 100644 index 0000000..a417233 --- /dev/null +++ b/node_modules/serverless-webpack/lib/run.js @@ -0,0 +1,98 @@ +'use strict'; + +const BbPromise = require('bluebird'); +const path = require('path'); +const webpack = require('webpack'); +const utils = require('./utils'); + +module.exports = { + loadHandler(stats, functionId, purge) { + const handler = this.serverless.service.functions[functionId].handler.split('.'); + const moduleFileName = `${handler[0]}.js`; + const handlerFilePath = path.join( + path.resolve(stats.compilation.options.output.path), + moduleFileName + ); + if (purge) { + utils.purgeCache(handlerFilePath); + } + const module = require(handlerFilePath); + const functionObjectPath = handler.slice(1); + let func = module; + for (let p of functionObjectPath) { + func = func[p]; + } + return func; + }, + + getEvent() { + return this.options.path + ? this.serverless.utils.readFileSync(this.options.path) + : null; // TODO use get-stdin instead + }, + + getContext(functionName) { + return { + awsRequestId: utils.guid(), + invokeid: utils.guid(), + logGroupName: `/aws/lambda/${functionName}`, + logStreamName: '2016/02/14/[HEAD]13370a84ca4ed8b77c427af260', + functionVersion: '$LATEST', + isDefaultFunctionVersion: true, + functionName: functionName, + memoryLimitInMB: '1024', + }; + }, + + run(stats) { + const functionName = this.options.function; + + this.serverless.cli.log(`Run function ${functionName}...`); + + const handler = this.loadHandler(stats, functionName); + const event = this.getEvent(); + const context = this.getContext(functionName); + + return new BbPromise((resolve, reject) => handler( + event, + context, + (err, res) => { + if (err) { + reject(err); + } else { + resolve(res); + } + } + )); + }, + + watch() { + const functionName = this.options.function; + this.serverless.cli.log(`Watch function ${functionName}...`); + + const compiler = webpack(this.webpackConfig); + + compiler.watch({}, (err, stats) => { + if (err) { + throw err; + } + this.serverless.cli.log(`Run function ${functionName}...`); + const handler = this.loadHandler(stats, functionName, true); + const event = this.getEvent(); + const context = this.getContext(functionName); + handler( + event, + context, + (err, res) => { + if (err) { + throw err; + } else { + this.serverless.cli.consoleLog(res); + } + } + ) + }); + + return BbPromise.resolve(); + }, +}; diff --git a/node_modules/serverless-webpack/lib/serve.js b/node_modules/serverless-webpack/lib/serve.js new file mode 100644 index 0000000..279d09f --- /dev/null +++ b/node_modules/serverless-webpack/lib/serve.js @@ -0,0 +1,127 @@ +'use strict'; + +const BbPromise = require('bluebird'); +const webpack = require('webpack'); +const express = require('express'); +const bodyParser = require('body-parser'); + +module.exports = { + serve() { + this.serverless.cli.log('Serving functions...'); + + const compiler = webpack(this.webpackConfig); + const funcConfs = this._getFuncConfigs(); + const app = this._newExpressApp(funcConfs); + const port = this._getPort(); + + app.listen(port, () => + compiler.watch({}, (err, stats) => { + if (err) { + throw err; + } + const loadedModules = []; + for (let funcConf of funcConfs) { + funcConf.handlerFunc = this.loadHandler( + stats, + funcConf.id, + loadedModules.indexOf(funcConf.moduleName) < 0 + ); + loadedModules.push(funcConf.moduleName); + } + }) + ); + + return BbPromise.resolve(); + }, + + _newExpressApp(funcConfs) { + const app = express(); + + app.use(bodyParser.json({ limit: '5mb' })); + + for (let funcConf of funcConfs) { + for (let httpEvent of funcConf.events) { + const method = httpEvent.method.toLowerCase(); + let endpoint = `/${httpEvent.path}`; + if (this.options.stage) { + endpoint = `/${this.options.stage}${endpoint}`; + } + const path = endpoint.replace(/\{(.+?)\}/g, ':$1'); + let handler = this._handlerBase(funcConf); + let optionsHandler = this._optionsHandler; + if (httpEvent.cors) { + handler = this._handlerAddCors(handler); + optionsHandler = this._handlerAddCors(optionsHandler); + } + app.options(path, optionsHandler); + app[method]( + path, + handler + ); + this.serverless.cli.consoleLog(` ${method.toUpperCase()} - http://localhost:${this._getPort()}${endpoint}`); + } + } + + return app; + }, + + _getFuncConfigs() { + const funcConfs = []; + const inputfuncConfs = this.serverless.service.functions; + for (let funcName in inputfuncConfs) { + const funcConf = inputfuncConfs[funcName]; + const httpEvents = funcConf.events + .filter(e => e.hasOwnProperty('http')) + .map(e => e.http); + if (httpEvents.length > 0) { + funcConfs.push(Object.assign({}, funcConf, { + id: funcName, + events: httpEvents, + moduleName: funcConf.handler.split('.')[0], + handlerFunc: null, + })); + } + } + return funcConfs; + }, + + _getPort() { + return this.options.port || 8000; + }, + + _handlerAddCors(handler) { + return (req, res, next) => { + res.header('Access-Control-Allow-Origin', '*'); + res.header('Access-Control-Allow-Methods', 'GET,PUT,HEAD,PATCH,POST,DELETE,OPTIONS'); + res.header('Access-Control-Allow-Headers', 'Authorization,Content-Type,x-amz-date,x-amz-security-token'); + handler(req, res, next); + }; + }, + + _handlerBase(funcConf) { + return (req, res) => { + const func = funcConf.handlerFunc; + const event = { + method: req.method, + headers: req.headers, + body: req.body, + path: req.params, + query: req.query, + // principalId, + // stageVariables, + }; + const context = this.getContext(funcConf.id); + func(event, context, (err, resp) => { + if (err) { + res.status(500).send(err); + } else { + res.status(200).send(resp); + } + }); + } + }, + + _optionsHandler(req, res) { + res.sendStatus(200); + }, +}; diff --git a/node_modules/serverless-webpack/lib/utils.js b/node_modules/serverless-webpack/lib/utils.js new file mode 100644 index 0000000..38aca3d --- /dev/null +++ b/node_modules/serverless-webpack/lib/utils.js @@ -0,0 +1,39 @@ +'use strict'; + +function guid() { + function s4() { + return Math.floor((1 + Math.random()) * 0x10000) + .toString(16) + .substring(1); + } + return s4() + s4() + '-' + s4() + '-' + s4() + '-' + s4() + '-' + s4() + s4() + s4(); +} + +function purgeCache(moduleName) { + searchCache(moduleName, function (mod) { + delete require.cache[mod.id]; + }); + Object.keys(module.constructor._pathCache).forEach(function(cacheKey) { + if (cacheKey.indexOf(moduleName)>0) { + delete module.constructor._pathCache[cacheKey]; + } + }); +} + +function searchCache(moduleName, callback) { + var mod = require.resolve(moduleName); + if (mod && ((mod = require.cache[mod]) !== undefined)) { + (function traverse(mod) { + mod.children.forEach(function (child) { + traverse(child); + }); + callback(mod); + }(mod)); + } +} + +module.exports = { + guid, + purgeCache, + searchCache, +}; diff --git a/node_modules/serverless-webpack/lib/validate.js b/node_modules/serverless-webpack/lib/validate.js new file mode 100644 index 0000000..5c2e2ee --- /dev/null +++ b/node_modules/serverless-webpack/lib/validate.js @@ -0,0 +1,53 @@ +'use strict'; + +const BbPromise = require('bluebird'); +const path = require('path'); +const fse = require('fs-extra'); + +module.exports = { + validate() { + this.webpackConfig = ( + this.serverless.service.custom && + this.serverless.service.custom.webpack || + 'webpack.config.js' + ); + + if (typeof this.webpackConfig === 'string') { + const webpackConfigFilePath = path.join(this.serverless.config.servicePath, this.webpackConfig); + if (!this.serverless.utils.fileExistsSync(webpackConfigFilePath)) { + throw new this.serverless.classes + .Error('The webpack plugin could not find the configuration file at: ' + webpackConfigFilePath); + } + this.webpackConfig = require(webpackConfigFilePath); + } + + // Default context + if (!this.webpackConfig.context) { + this.webpackConfig.context = this.serverless.config.servicePath; + } + + // Default output + if (!this.webpackConfig.output) { + const outputPath = path.join(this.serverless.config.servicePath, '.webpack'); + const outputFilename = ( + Array.isArray(this.webpackConfig.entry) + ? this.webpackConfig.entry[this.webpackConfig.entry.length - 1] + : this.webpackConfig.entry + ) || 'handler.js'; + this.webpackConfig.output = { + libraryTarget: 'commonjs', + path: outputPath, + filename: outputFilename, + }; + } + + // Custom output path + if (this.options.out) { + this.webpackConfig.output.path = path.join(this.serverless.config.servicePath, this.options.out); + } + + fse.removeSync(this.webpackConfig.output.path); + + return BbPromise.resolve(); + }, +}; diff --git a/node_modules/serverless-webpack/node_modules/.bin/webpack b/node_modules/serverless-webpack/node_modules/.bin/webpack new file mode 120000 index 0000000..8a1900f --- /dev/null +++ b/node_modules/serverless-webpack/node_modules/.bin/webpack @@ -0,0 +1 @@ +../../../webpack/bin/webpack.js \ No newline at end of file diff --git a/node_modules/serverless-webpack/package.json b/node_modules/serverless-webpack/package.json new file mode 100644 index 0000000..873e73f --- /dev/null +++ b/node_modules/serverless-webpack/package.json @@ -0,0 +1,46 @@ +{ + "name": "serverless-webpack", + "version": "1.0.0-rc.2", + "description": "Serverless plugin to bundle your javascript with Webpack", + "main": "index.js", + "author": "Nicola Peduzzi (http://elastic-coders.com)", + "repository": { + "type": "git", + "url": "git+https://github.com/elastic-coders/serverless-webpack.git" + }, + "keywords": [ + "serverless", + "1.0", + "webpack", + "babel", + "typescript", + "es6", + "plugin", + "runtime" + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/elastic-coders/serverless-webpack/issues" + }, + "homepage": "https://github.com/elastic-coders/serverless-webpack#readme", + "scripts": { + "test": "istanbul cover _mocha tests/all -- -R spec --recursive" + }, + "dependencies": { + "bluebird": "^3.4.0", + "body-parser": "^1.15.2", + "express": "^4.14.0", + "fs-extra": "^0.26.7", + "npm-programmatic": "0.0.5", + "webpack": "^1.13.1" + }, + "devDependencies": { + "chai": "^3.5.0", + "istanbul": "^0.4.4", + "mocha": "^3.0.2", + "mockery": "^1.7.0", + "serverless": "^1.0.0-beta.2", + "sinon": "^1.17.5", + "sinon-chai": "^2.8.0" + } +} diff --git a/node_modules/set-blocking/CHANGELOG.md b/node_modules/set-blocking/CHANGELOG.md new file mode 100644 index 0000000..03bf591 --- /dev/null +++ b/node_modules/set-blocking/CHANGELOG.md @@ -0,0 +1,26 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [2.0.0](https://github.com/yargs/set-blocking/compare/v1.0.0...v2.0.0) (2016-05-17) + + +### Features + +* add an isTTY check ([#3](https://github.com/yargs/set-blocking/issues/3)) ([66ce277](https://github.com/yargs/set-blocking/commit/66ce277)) + + +### BREAKING CHANGES + +* stdio/stderr will not be set to blocking if isTTY === false + + + + +# 1.0.0 (2016-05-14) + + +### Features + +* implemented shim for stream._handle.setBlocking ([6bde0c0](https://github.com/yargs/set-blocking/commit/6bde0c0)) diff --git a/node_modules/set-blocking/LICENSE.txt b/node_modules/set-blocking/LICENSE.txt new file mode 100644 index 0000000..836440b --- /dev/null +++ b/node_modules/set-blocking/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/set-blocking/README.md b/node_modules/set-blocking/README.md new file mode 100644 index 0000000..e93b420 --- /dev/null +++ b/node_modules/set-blocking/README.md @@ -0,0 +1,31 @@ +# set-blocking + +[![Build Status](https://travis-ci.org/yargs/set-blocking.svg)](https://travis-ci.org/yargs/set-blocking) +[![NPM version](https://img.shields.io/npm/v/set-blocking.svg)](https://www.npmjs.com/package/set-blocking) +[![Coverage Status](https://coveralls.io/repos/yargs/set-blocking/badge.svg?branch=)](https://coveralls.io/r/yargs/set-blocking?branch=master) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +set blocking `stdio` and `stderr` ensuring that terminal output does not truncate. + +```js +const setBlocking = require('set-blocking') +setBlocking(true) +console.log(someLargeStringToOutput) +``` + +## Historical Context/Word of Warning + +This was created as a shim to address the bug discussed in [node #6456](https://github.com/nodejs/node/issues/6456). This bug crops up on +newer versions of Node.js (`0.12+`), truncating terminal output. + +You should be mindful of the side-effects caused by using `set-blocking`: + +* if your module sets blocking to `true`, it will effect other modules + consuming your library. In [yargs](https://github.com/yargs/yargs/blob/master/yargs.js#L653) we only call + `setBlocking(true)` once we already know we are about to call `process.exit(code)`. +* this patch will not apply to subprocesses spawned with `isTTY = true`, this is + the [default `spawn()` behavior](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). + +## License + +ISC diff --git a/node_modules/set-blocking/index.js b/node_modules/set-blocking/index.js new file mode 100644 index 0000000..6f78774 --- /dev/null +++ b/node_modules/set-blocking/index.js @@ -0,0 +1,7 @@ +module.exports = function (blocking) { + [process.stdout, process.stderr].forEach(function (stream) { + if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') { + stream._handle.setBlocking(blocking) + } + }) +} diff --git a/node_modules/set-blocking/package.json b/node_modules/set-blocking/package.json new file mode 100644 index 0000000..c082db7 --- /dev/null +++ b/node_modules/set-blocking/package.json @@ -0,0 +1,42 @@ +{ + "name": "set-blocking", + "version": "2.0.0", + "description": "set blocking stdio and stderr ensuring that terminal output does not truncate", + "main": "index.js", + "scripts": { + "pretest": "standard", + "test": "nyc mocha ./test/*.js", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "version": "standard-version" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/yargs/set-blocking.git" + }, + "keywords": [ + "flush", + "terminal", + "blocking", + "shim", + "stdio", + "stderr" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/yargs/set-blocking/issues" + }, + "homepage": "https://github.com/yargs/set-blocking#readme", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^2.11.9", + "mocha": "^2.4.5", + "nyc": "^6.4.4", + "standard": "^7.0.1", + "standard-version": "^2.2.1" + }, + "files": [ + "index.js", + "LICENSE.txt" + ] +} \ No newline at end of file diff --git a/node_modules/set-immediate-shim/index.js b/node_modules/set-immediate-shim/index.js new file mode 100644 index 0000000..e690485 --- /dev/null +++ b/node_modules/set-immediate-shim/index.js @@ -0,0 +1,7 @@ +'use strict'; +module.exports = typeof setImmediate === 'function' ? setImmediate : + function setImmediate() { + var args = [].slice.apply(arguments); + args.splice(1, 0, 0); + setTimeout.apply(null, args); + }; diff --git a/node_modules/set-immediate-shim/package.json b/node_modules/set-immediate-shim/package.json new file mode 100644 index 0000000..2e9a451 --- /dev/null +++ b/node_modules/set-immediate-shim/package.json @@ -0,0 +1,34 @@ +{ + "name": "set-immediate-shim", + "version": "1.0.1", + "description": "Simple setImmediate shim", + "license": "MIT", + "repository": "sindresorhus/set-immediate-shim", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "setImmediate", + "immediate", + "setTimeout", + "timeout", + "shim", + "polyfill", + "ponyfill" + ], + "devDependencies": { + "ava": "0.0.4", + "require-uncached": "^1.0.2" + } +} diff --git a/node_modules/set-immediate-shim/readme.md b/node_modules/set-immediate-shim/readme.md new file mode 100644 index 0000000..4ec864f --- /dev/null +++ b/node_modules/set-immediate-shim/readme.md @@ -0,0 +1,31 @@ +# set-immediate-shim [![Build Status](https://travis-ci.org/sindresorhus/set-immediate-shim.svg?branch=master)](https://travis-ci.org/sindresorhus/set-immediate-shim) + +> Simple [`setImmediate`](https://developer.mozilla.org/en-US/docs/Web/API/Window.setImmediate) shim + + +## Install + +``` +$ npm install --save set-immediate-shim +``` + + +## Usage + +```js +var setImmediateShim = require('set-immediate-shim'); + +setImmediateShim(function () { + console.log('2'); +}); + +console.log('1'); + +//=> 1 +//=> 2 +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/setprototypeof/LICENSE b/node_modules/setprototypeof/LICENSE new file mode 100644 index 0000000..61afa2f --- /dev/null +++ b/node_modules/setprototypeof/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Wes Todd + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION +OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/setprototypeof/README.md b/node_modules/setprototypeof/README.md new file mode 100644 index 0000000..01d7947 --- /dev/null +++ b/node_modules/setprototypeof/README.md @@ -0,0 +1,21 @@ +# Polyfill for `Object.setPrototypeOf` + +A simple cross platform implementation to set the prototype of an instianted object. Supports all modern browsers and at least back to IE8. + +## Usage: + +``` +$ npm install --save setprototypeof +``` + +```javascript +var setPrototypeOf = require('setprototypeof'); + +var obj = {}; +setPrototypeOf(obj, { + foo: function() { + return 'bar'; + } +}); +obj.foo(); // bar +``` diff --git a/node_modules/setprototypeof/index.js b/node_modules/setprototypeof/index.js new file mode 100644 index 0000000..086a879 --- /dev/null +++ b/node_modules/setprototypeof/index.js @@ -0,0 +1,11 @@ +module.exports = Object.setPrototypeOf || ({__proto__:[]} instanceof Array ? setProtoOf : mixinProperties); + +function setProtoOf(obj, proto) { + obj.__proto__ = proto; +} + +function mixinProperties(obj, proto) { + for (var prop in proto) { + obj[prop] = proto[prop]; + } +} diff --git a/node_modules/setprototypeof/package.json b/node_modules/setprototypeof/package.json new file mode 100644 index 0000000..eae132f --- /dev/null +++ b/node_modules/setprototypeof/package.json @@ -0,0 +1,24 @@ +{ + "name": "setprototypeof", + "version": "1.0.1", + "description": "A small polyfill for Object.setprototypeof", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "https://github.com/wesleytodd/setprototypeof.git" + }, + "keywords": [ + "polyfill", + "object", + "setprototypeof" + ], + "author": "Wes Todd", + "license": "ISC", + "bugs": { + "url": "https://github.com/wesleytodd/setprototypeof/issues" + }, + "homepage": "https://github.com/wesleytodd/setprototypeof" +} diff --git a/node_modules/sha.js/.travis.yml b/node_modules/sha.js/.travis.yml new file mode 100644 index 0000000..7a589da --- /dev/null +++ b/node_modules/sha.js/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.10" + diff --git a/node_modules/sha.js/LICENSE b/node_modules/sha.js/LICENSE new file mode 100644 index 0000000..92ba9d3 --- /dev/null +++ b/node_modules/sha.js/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2013-2014 sha.js contributors + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/sha.js/README.md b/node_modules/sha.js/README.md new file mode 100644 index 0000000..8b1024f --- /dev/null +++ b/node_modules/sha.js/README.md @@ -0,0 +1,47 @@ +# sha.js + +Streamable SHA1 hash in pure javascript. + +[![build status](https://secure.travis-ci.org/dominictarr/sha.js.png)](http://travis-ci.org/dominictarr/sha.js) + +[![testling badge](https://ci.testling.com/dominictarr/sha.js.png)](https://ci.testling.com/dominictarr/sha.js) + +## Example + +``` js +var createHash = require('sha.js') + +var sha1 = createHash('sha1') +var sha256 = createHash('sha256') +var sha512 = createHash'sha512') + +var h = sha1.update('abc', 'utf8').digest('hex') +console.log(h) //a9993e364706816aba3e25717850c26c9cd0d89d +``` + +## supported hashes + +sha.js currently implements: + +* sha1 +* sha256 +* sha512 + +## Note + +Note, this doesn't actually implement a stream, but wrapping this in a stream is trivial. +but is does update incrementally, so you can hash things larger than ram, and also, since it reuses +the typedarrays, it uses a constant amount of memory (except when using base64 or utf8 encoding, +see code comments) + + +## Acknowledgements + +This work is derived from Paul Johnston's ["A JavaScript implementation of the Secure Hash Algorithm"] +(http://pajhome.org.uk/crypt/md5/sha1.html) + + + +## License + +MIT diff --git a/node_modules/sha.js/bin.js b/node_modules/sha.js/bin.js new file mode 100755 index 0000000..ef105f6 --- /dev/null +++ b/node_modules/sha.js/bin.js @@ -0,0 +1,37 @@ +#! /usr/bin/env node + +var createHash = require('./browserify') + +var argv = process.argv.slice(2) + +if(/--help|-h/.test(argv[0])) return usage() + +function stream (alg, s) { + var start = Date.now() + var hash = createHash(alg || 'sha1') + s + .on('data', function (data) { + hash.update(data) + }) + .on('end', function (data) { + if(process.env.DEBUG) + return console.log(hash.digest('hex'), Date.now() - start) + console.log(hash.digest('hex')) + }) +} + +if(!process.stdin.isTTY) { + stream(argv[0], process.stdin) +} else if (argv.length) { + var filename = argv.pop() + var alg = argv.pop() + stream(alg, require('fs').createReadStream(filename)) +} else { + usage() +} + +function usage () { + console.error('sha.js [algorithm=sha1] [filename] # hash filename with algorithm') + console.error('input | sha.js [algorithm=sha1] # hash stdin with algorithm') + console.error('sha.js --help # display this message') +} diff --git a/node_modules/sha.js/browserify.js b/node_modules/sha.js/browserify.js new file mode 100644 index 0000000..7ed7c93 --- /dev/null +++ b/node_modules/sha.js/browserify.js @@ -0,0 +1,12 @@ +var exports = module.exports = function (alg) { + var Alg = exports[alg.toLowerCase()] + if(!Alg) throw new Error(alg + ' is not supported (we accept pull requests)') + return new Alg() +} + +var Buffer = require('buffer/').Buffer +var Hash = require('./hash')(Buffer) + +exports.sha = +exports.sha1 = require('./sha1')(Buffer, Hash) +exports.sha256 = require('./sha256')(Buffer, Hash) diff --git a/node_modules/sha.js/hash.js b/node_modules/sha.js/hash.js new file mode 100644 index 0000000..0179592 --- /dev/null +++ b/node_modules/sha.js/hash.js @@ -0,0 +1,77 @@ +module.exports = function (Buffer) { + + //prototype class for hash functions + function Hash (blockSize, finalSize) { + this._block = new Buffer(blockSize) //new Uint32Array(blockSize/4) + this._finalSize = finalSize + this._blockSize = blockSize + this._len = 0 + this._s = 0 + } + + Hash.prototype.init = function () { + this._s = 0 + this._len = 0 + } + + Hash.prototype.update = function (data, enc) { + if ("string" === typeof data) { + enc = enc || "utf8" + data = new Buffer(data, enc) + } + + var l = this._len += data.length + var s = this._s = (this._s || 0) + var f = 0 + var buffer = this._block + + while (s < l) { + var t = Math.min(data.length, f + this._blockSize - (s % this._blockSize)) + var ch = (t - f) + + for (var i = 0; i < ch; i++) { + buffer[(s % this._blockSize) + i] = data[i + f] + } + + s += ch + f += ch + + if ((s % this._blockSize) === 0) { + this._update(buffer) + } + } + this._s = s + + return this + } + + Hash.prototype.digest = function (enc) { + // Suppose the length of the message M, in bits, is l + var l = this._len * 8 + + // Append the bit 1 to the end of the message + this._block[this._len % this._blockSize] = 0x80 + + // and then k zero bits, where k is the smallest non-negative solution to the equation (l + 1 + k) === finalSize mod blockSize + this._block.fill(0, this._len % this._blockSize + 1) + + if (l % (this._blockSize * 8) >= this._finalSize * 8) { + this._update(this._block) + this._block.fill(0) + } + + // to this append the block which is equal to the number l written in binary + // TODO: handle case where l is > Math.pow(2, 29) + this._block.writeInt32BE(l, this._blockSize - 4) + + var hash = this._update(this._block) || this._hash() + + return enc ? hash.toString(enc) : hash + } + + Hash.prototype._update = function () { + throw new Error('_update must be implemented by subclass') + } + + return Hash +} diff --git a/node_modules/sha.js/hexpp.js b/node_modules/sha.js/hexpp.js new file mode 100644 index 0000000..f6e52f3 --- /dev/null +++ b/node_modules/sha.js/hexpp.js @@ -0,0 +1,38 @@ + + +function toHex (buf, group, wrap, LE) { + buf = buf.buffer || buf + var s = '' + var l = buf.byteLength || buf.length + for(var i = 0; i < l ; i++) { + var byte = (i&0xfffffffc)|(!LE ? i%4 : 3 - i%4) + s = s + ((buf[byte]>>4).toString(16)) + + ((buf[byte]&0xf).toString(16)) + + (group-1==i%group ? ' ' : '') + + (wrap-1==i%wrap ? '\n' : '') + } + return s +} + +function reverseByteOrder(n) { + return ( + ((n << 24) & 0xff000000) + | ((n << 8) & 0x00ff0000) + | ((n >> 8) & 0x0000ff00) + | ((n >> 24) & 0x000000ff) + ) +} + +var hexpp = module.exports = function (buffer, opts) { + opts = opts || {} + opts.groups = opts.groups || 4 + opts.wrap = opts.wrap || 16 + return toHex(buffer, opts.groups, opts.wrap, opts.bigendian, opts.ints) +} + +hexpp.defaults = function (opts) { + return function (b) { + return hexpp(b, opts) + } +} + diff --git a/node_modules/sha.js/index.js b/node_modules/sha.js/index.js new file mode 100644 index 0000000..c947f3e --- /dev/null +++ b/node_modules/sha.js/index.js @@ -0,0 +1,12 @@ +var exports = module.exports = function (alg) { + var Alg = exports[alg] + if(!Alg) throw new Error(alg + ' is not supported (we accept pull requests)') + return new Alg() +} + +var Buffer = require('buffer').Buffer +var Hash = require('./hash')(Buffer) + +exports.sha1 = require('./sha1')(Buffer, Hash) +exports.sha256 = require('./sha256')(Buffer, Hash) +exports.sha512 = require('./sha512')(Buffer, Hash) diff --git a/node_modules/sha.js/package.json b/node_modules/sha.js/package.json new file mode 100644 index 0000000..981573c --- /dev/null +++ b/node_modules/sha.js/package.json @@ -0,0 +1,38 @@ +{ + "name": "sha.js", + "description": "streaming sha1 hash in pure javascript", + "version": "2.2.6", + "homepage": "https://github.com/dominictarr/sha.js", + "repository": { + "type": "git", + "url": "git://github.com/dominictarr/sha.js.git" + }, + "dependencies": {}, + "devDependencies": { + "buffer": "~2.3.2", + "typedarray": "0.0.6", + "tape": "~2.3.2" + }, + "bin": "./bin.js", + "scripts": { + "test": "set -e; for t in test/*.js; do node $t; done;" + }, + "author": "Dominic Tarr (dominictarr.com)", + "license": "MIT", + "testling": { + "files": "test/{vectors,write,hash}.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/sha.js/sha1.js b/node_modules/sha.js/sha1.js new file mode 100644 index 0000000..af87d38 --- /dev/null +++ b/node_modules/sha.js/sha1.js @@ -0,0 +1,138 @@ +/* + * A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined + * in FIPS PUB 180-1 + * Version 2.1a Copyright Paul Johnston 2000 - 2002. + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for details. + */ + +var inherits = require('util').inherits + +module.exports = function (Buffer, Hash) { + + var A = 0|0 + var B = 4|0 + var C = 8|0 + var D = 12|0 + var E = 16|0 + + var W = new (typeof Int32Array === 'undefined' ? Array : Int32Array)(80) + + var POOL = [] + + function Sha1 () { + if(POOL.length) + return POOL.pop().init() + + if(!(this instanceof Sha1)) return new Sha1() + this._w = W + Hash.call(this, 16*4, 14*4) + + this._h = null + this.init() + } + + inherits(Sha1, Hash) + + Sha1.prototype.init = function () { + this._a = 0x67452301 + this._b = 0xefcdab89 + this._c = 0x98badcfe + this._d = 0x10325476 + this._e = 0xc3d2e1f0 + + Hash.prototype.init.call(this) + return this + } + + Sha1.prototype._POOL = POOL + Sha1.prototype._update = function (X) { + + var a, b, c, d, e, _a, _b, _c, _d, _e + + a = _a = this._a + b = _b = this._b + c = _c = this._c + d = _d = this._d + e = _e = this._e + + var w = this._w + + for(var j = 0; j < 80; j++) { + var W = w[j] = j < 16 ? X.readInt32BE(j*4) + : rol(w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16], 1) + + var t = add( + add(rol(a, 5), sha1_ft(j, b, c, d)), + add(add(e, W), sha1_kt(j)) + ) + + e = d + d = c + c = rol(b, 30) + b = a + a = t + } + + this._a = add(a, _a) + this._b = add(b, _b) + this._c = add(c, _c) + this._d = add(d, _d) + this._e = add(e, _e) + } + + Sha1.prototype._hash = function () { + if(POOL.length < 100) POOL.push(this) + var H = new Buffer(20) + //console.log(this._a|0, this._b|0, this._c|0, this._d|0, this._e|0) + H.writeInt32BE(this._a|0, A) + H.writeInt32BE(this._b|0, B) + H.writeInt32BE(this._c|0, C) + H.writeInt32BE(this._d|0, D) + H.writeInt32BE(this._e|0, E) + return H + } + + /* + * Perform the appropriate triplet combination function for the current + * iteration + */ + function sha1_ft(t, b, c, d) { + if(t < 20) return (b & c) | ((~b) & d); + if(t < 40) return b ^ c ^ d; + if(t < 60) return (b & c) | (b & d) | (c & d); + return b ^ c ^ d; + } + + /* + * Determine the appropriate additive constant for the current iteration + */ + function sha1_kt(t) { + return (t < 20) ? 1518500249 : (t < 40) ? 1859775393 : + (t < 60) ? -1894007588 : -899497514; + } + + /* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + * //dominictarr: this is 10 years old, so maybe this can be dropped?) + * + */ + function add(x, y) { + return (x + y ) | 0 + //lets see how this goes on testling. + // var lsw = (x & 0xFFFF) + (y & 0xFFFF); + // var msw = (x >> 16) + (y >> 16) + (lsw >> 16); + // return (msw << 16) | (lsw & 0xFFFF); + } + + /* + * Bitwise rotate a 32-bit number to the left. + */ + function rol(num, cnt) { + return (num << cnt) | (num >>> (32 - cnt)); + } + + return Sha1 +} diff --git a/node_modules/sha.js/sha256.js b/node_modules/sha.js/sha256.js new file mode 100644 index 0000000..053355f --- /dev/null +++ b/node_modules/sha.js/sha256.js @@ -0,0 +1,147 @@ + +/** + * A JavaScript implementation of the Secure Hash Algorithm, SHA-256, as defined + * in FIPS 180-2 + * Version 2.2-beta Copyright Angel Marin, Paul Johnston 2000 - 2009. + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * + */ + +var inherits = require('util').inherits + +module.exports = function (Buffer, Hash) { + + var K = [ + 0x428A2F98, 0x71374491, 0xB5C0FBCF, 0xE9B5DBA5, + 0x3956C25B, 0x59F111F1, 0x923F82A4, 0xAB1C5ED5, + 0xD807AA98, 0x12835B01, 0x243185BE, 0x550C7DC3, + 0x72BE5D74, 0x80DEB1FE, 0x9BDC06A7, 0xC19BF174, + 0xE49B69C1, 0xEFBE4786, 0x0FC19DC6, 0x240CA1CC, + 0x2DE92C6F, 0x4A7484AA, 0x5CB0A9DC, 0x76F988DA, + 0x983E5152, 0xA831C66D, 0xB00327C8, 0xBF597FC7, + 0xC6E00BF3, 0xD5A79147, 0x06CA6351, 0x14292967, + 0x27B70A85, 0x2E1B2138, 0x4D2C6DFC, 0x53380D13, + 0x650A7354, 0x766A0ABB, 0x81C2C92E, 0x92722C85, + 0xA2BFE8A1, 0xA81A664B, 0xC24B8B70, 0xC76C51A3, + 0xD192E819, 0xD6990624, 0xF40E3585, 0x106AA070, + 0x19A4C116, 0x1E376C08, 0x2748774C, 0x34B0BCB5, + 0x391C0CB3, 0x4ED8AA4A, 0x5B9CCA4F, 0x682E6FF3, + 0x748F82EE, 0x78A5636F, 0x84C87814, 0x8CC70208, + 0x90BEFFFA, 0xA4506CEB, 0xBEF9A3F7, 0xC67178F2 + ] + + var W = new Array(64) + + function Sha256() { + this.init() + + this._w = W //new Array(64) + + Hash.call(this, 16*4, 14*4) + } + + inherits(Sha256, Hash) + + Sha256.prototype.init = function () { + + this._a = 0x6a09e667|0 + this._b = 0xbb67ae85|0 + this._c = 0x3c6ef372|0 + this._d = 0xa54ff53a|0 + this._e = 0x510e527f|0 + this._f = 0x9b05688c|0 + this._g = 0x1f83d9ab|0 + this._h = 0x5be0cd19|0 + + this._len = this._s = 0 + + return this + } + + function S (X, n) { + return (X >>> n) | (X << (32 - n)); + } + + function R (X, n) { + return (X >>> n); + } + + function Ch (x, y, z) { + return ((x & y) ^ ((~x) & z)); + } + + function Maj (x, y, z) { + return ((x & y) ^ (x & z) ^ (y & z)); + } + + function Sigma0256 (x) { + return (S(x, 2) ^ S(x, 13) ^ S(x, 22)); + } + + function Sigma1256 (x) { + return (S(x, 6) ^ S(x, 11) ^ S(x, 25)); + } + + function Gamma0256 (x) { + return (S(x, 7) ^ S(x, 18) ^ R(x, 3)); + } + + function Gamma1256 (x) { + return (S(x, 17) ^ S(x, 19) ^ R(x, 10)); + } + + Sha256.prototype._update = function(M) { + + var W = this._w + var a, b, c, d, e, f, g, h + var T1, T2 + + a = this._a | 0 + b = this._b | 0 + c = this._c | 0 + d = this._d | 0 + e = this._e | 0 + f = this._f | 0 + g = this._g | 0 + h = this._h | 0 + + for (var j = 0; j < 64; j++) { + var w = W[j] = j < 16 + ? M.readInt32BE(j * 4) + : Gamma1256(W[j - 2]) + W[j - 7] + Gamma0256(W[j - 15]) + W[j - 16] + + T1 = h + Sigma1256(e) + Ch(e, f, g) + K[j] + w + + T2 = Sigma0256(a) + Maj(a, b, c); + h = g; g = f; f = e; e = d + T1; d = c; c = b; b = a; a = T1 + T2; + } + + this._a = (a + this._a) | 0 + this._b = (b + this._b) | 0 + this._c = (c + this._c) | 0 + this._d = (d + this._d) | 0 + this._e = (e + this._e) | 0 + this._f = (f + this._f) | 0 + this._g = (g + this._g) | 0 + this._h = (h + this._h) | 0 + + }; + + Sha256.prototype._hash = function () { + var H = new Buffer(32) + + H.writeInt32BE(this._a, 0) + H.writeInt32BE(this._b, 4) + H.writeInt32BE(this._c, 8) + H.writeInt32BE(this._d, 12) + H.writeInt32BE(this._e, 16) + H.writeInt32BE(this._f, 20) + H.writeInt32BE(this._g, 24) + H.writeInt32BE(this._h, 28) + + return H + } + + return Sha256 + +} diff --git a/node_modules/sha.js/sha512.js b/node_modules/sha.js/sha512.js new file mode 100644 index 0000000..1fca318 --- /dev/null +++ b/node_modules/sha.js/sha512.js @@ -0,0 +1,244 @@ +var inherits = require('util').inherits + +module.exports = function (Buffer, Hash) { + var K = [ + 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, + 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, + 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, + 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, + 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, + 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, + 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, + 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, + 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, + 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, + 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, + 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, + 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, + 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, + 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, + 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, + 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, + 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, + 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, + 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, + 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, + 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, + 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, + 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, + 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, + 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, + 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, + 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, + 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, + 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, + 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, + 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, + 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, + 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, + 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, + 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, + 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, + 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, + 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, + 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 + ] + + var W = new Array(160) + + function Sha512() { + this.init() + this._w = W + + Hash.call(this, 128, 112) + } + + inherits(Sha512, Hash) + + Sha512.prototype.init = function () { + + this._a = 0x6a09e667|0 + this._b = 0xbb67ae85|0 + this._c = 0x3c6ef372|0 + this._d = 0xa54ff53a|0 + this._e = 0x510e527f|0 + this._f = 0x9b05688c|0 + this._g = 0x1f83d9ab|0 + this._h = 0x5be0cd19|0 + + this._al = 0xf3bcc908|0 + this._bl = 0x84caa73b|0 + this._cl = 0xfe94f82b|0 + this._dl = 0x5f1d36f1|0 + this._el = 0xade682d1|0 + this._fl = 0x2b3e6c1f|0 + this._gl = 0xfb41bd6b|0 + this._hl = 0x137e2179|0 + + this._len = this._s = 0 + + return this + } + + function S (X, Xl, n) { + return (X >>> n) | (Xl << (32 - n)) + } + + function Ch (x, y, z) { + return ((x & y) ^ ((~x) & z)); + } + + function Maj (x, y, z) { + return ((x & y) ^ (x & z) ^ (y & z)); + } + + Sha512.prototype._update = function(M) { + + var W = this._w + var a, b, c, d, e, f, g, h + var al, bl, cl, dl, el, fl, gl, hl + + a = this._a | 0 + b = this._b | 0 + c = this._c | 0 + d = this._d | 0 + e = this._e | 0 + f = this._f | 0 + g = this._g | 0 + h = this._h | 0 + + al = this._al | 0 + bl = this._bl | 0 + cl = this._cl | 0 + dl = this._dl | 0 + el = this._el | 0 + fl = this._fl | 0 + gl = this._gl | 0 + hl = this._hl | 0 + + for (var i = 0; i < 80; i++) { + var j = i * 2 + + var Wi, Wil + + if (i < 16) { + Wi = W[j] = M.readInt32BE(j * 4) + Wil = W[j + 1] = M.readInt32BE(j * 4 + 4) + + } else { + var x = W[j - 15*2] + var xl = W[j - 15*2 + 1] + var gamma0 = S(x, xl, 1) ^ S(x, xl, 8) ^ (x >>> 7) + var gamma0l = S(xl, x, 1) ^ S(xl, x, 8) ^ S(xl, x, 7) + + x = W[j - 2*2] + xl = W[j - 2*2 + 1] + var gamma1 = S(x, xl, 19) ^ S(xl, x, 29) ^ (x >>> 6) + var gamma1l = S(xl, x, 19) ^ S(x, xl, 29) ^ S(xl, x, 6) + + // W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16] + var Wi7 = W[j - 7*2] + var Wi7l = W[j - 7*2 + 1] + + var Wi16 = W[j - 16*2] + var Wi16l = W[j - 16*2 + 1] + + Wil = gamma0l + Wi7l + Wi = gamma0 + Wi7 + ((Wil >>> 0) < (gamma0l >>> 0) ? 1 : 0) + Wil = Wil + gamma1l + Wi = Wi + gamma1 + ((Wil >>> 0) < (gamma1l >>> 0) ? 1 : 0) + Wil = Wil + Wi16l + Wi = Wi + Wi16 + ((Wil >>> 0) < (Wi16l >>> 0) ? 1 : 0) + + W[j] = Wi + W[j + 1] = Wil + } + + var maj = Maj(a, b, c) + var majl = Maj(al, bl, cl) + + var sigma0h = S(a, al, 28) ^ S(al, a, 2) ^ S(al, a, 7) + var sigma0l = S(al, a, 28) ^ S(a, al, 2) ^ S(a, al, 7) + var sigma1h = S(e, el, 14) ^ S(e, el, 18) ^ S(el, e, 9) + var sigma1l = S(el, e, 14) ^ S(el, e, 18) ^ S(e, el, 9) + + // t1 = h + sigma1 + ch + K[i] + W[i] + var Ki = K[j] + var Kil = K[j + 1] + + var ch = Ch(e, f, g) + var chl = Ch(el, fl, gl) + + var t1l = hl + sigma1l + var t1 = h + sigma1h + ((t1l >>> 0) < (hl >>> 0) ? 1 : 0) + t1l = t1l + chl + t1 = t1 + ch + ((t1l >>> 0) < (chl >>> 0) ? 1 : 0) + t1l = t1l + Kil + t1 = t1 + Ki + ((t1l >>> 0) < (Kil >>> 0) ? 1 : 0) + t1l = t1l + Wil + t1 = t1 + Wi + ((t1l >>> 0) < (Wil >>> 0) ? 1 : 0) + + // t2 = sigma0 + maj + var t2l = sigma0l + majl + var t2 = sigma0h + maj + ((t2l >>> 0) < (sigma0l >>> 0) ? 1 : 0) + + h = g + hl = gl + g = f + gl = fl + f = e + fl = el + el = (dl + t1l) | 0 + e = (d + t1 + ((el >>> 0) < (dl >>> 0) ? 1 : 0)) | 0 + d = c + dl = cl + c = b + cl = bl + b = a + bl = al + al = (t1l + t2l) | 0 + a = (t1 + t2 + ((al >>> 0) < (t1l >>> 0) ? 1 : 0)) | 0 + } + + this._al = (this._al + al) | 0 + this._bl = (this._bl + bl) | 0 + this._cl = (this._cl + cl) | 0 + this._dl = (this._dl + dl) | 0 + this._el = (this._el + el) | 0 + this._fl = (this._fl + fl) | 0 + this._gl = (this._gl + gl) | 0 + this._hl = (this._hl + hl) | 0 + + this._a = (this._a + a + ((this._al >>> 0) < (al >>> 0) ? 1 : 0)) | 0 + this._b = (this._b + b + ((this._bl >>> 0) < (bl >>> 0) ? 1 : 0)) | 0 + this._c = (this._c + c + ((this._cl >>> 0) < (cl >>> 0) ? 1 : 0)) | 0 + this._d = (this._d + d + ((this._dl >>> 0) < (dl >>> 0) ? 1 : 0)) | 0 + this._e = (this._e + e + ((this._el >>> 0) < (el >>> 0) ? 1 : 0)) | 0 + this._f = (this._f + f + ((this._fl >>> 0) < (fl >>> 0) ? 1 : 0)) | 0 + this._g = (this._g + g + ((this._gl >>> 0) < (gl >>> 0) ? 1 : 0)) | 0 + this._h = (this._h + h + ((this._hl >>> 0) < (hl >>> 0) ? 1 : 0)) | 0 + } + + Sha512.prototype._hash = function () { + var H = new Buffer(64) + + function writeInt64BE(h, l, offset) { + H.writeInt32BE(h, offset) + H.writeInt32BE(l, offset + 4) + } + + writeInt64BE(this._a, this._al, 0) + writeInt64BE(this._b, this._bl, 8) + writeInt64BE(this._c, this._cl, 16) + writeInt64BE(this._d, this._dl, 24) + writeInt64BE(this._e, this._el, 32) + writeInt64BE(this._f, this._fl, 40) + writeInt64BE(this._g, this._gl, 48) + writeInt64BE(this._h, this._hl, 56) + + return H + } + + return Sha512 + +} diff --git a/node_modules/sha.js/test/hash.js b/node_modules/sha.js/test/hash.js new file mode 100644 index 0000000..801ab87 --- /dev/null +++ b/node_modules/sha.js/test/hash.js @@ -0,0 +1,96 @@ +var hexpp = require('../hexpp').defaults({bigendian: false}) +var tape = require('tape') +var Buffer = require('buffer/').Buffer +var Hash = require('../hash')(Buffer) + +var hex = '0A1B2C3D4E5F6G7H', hexbuf +function equal(t, a, b) { + t.equal(a.length, b.length) + for(var i = 0; i < a.length; i++) + t.equal(a[i], b[i]) +} + +var count16 = { + strings: ['0A1B2C3D4E5F6G7H'], + buffers: [ + hexbuf = new Buffer([ + 48, 65, 49, 66, 50, 67, 51, 68, + 52, 69, 53, 70, 54, 71, 55, 72 + ]), + new Buffer([ + 128, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 128 + ]) + ] + } +var empty = { + strings: [''], + buffers: [ + new Buffer([ + 128, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0 + ]) + ] + } +var hh = 'abcdefhijklmnopq' + +var multi = { + strings: ['abcd', 'efhijk', 'lmnopq'], + buffers: [ + new Buffer('abcdefhijklmnopq', 'ascii'), + new Buffer([ + 128, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 128 + ]) + ] + } + +var long = { + strings: [hex+hex], + buffers: [ + hexbuf, + hexbuf, + new Buffer([ + 128, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 0 + ]) + ] + } + +function makeTest(name, data) { + tape(name, function (t) { + + var h = new Hash(16, 8) + var hash = new Buffer(20) + var n = 2 + var expected = data.buffers.slice() + //t.plan(expected.length + 1) + h._update = function (block) { + var e = expected.shift() + console.log('---block---') + console.log(hexpp(block), block.length) + console.log('---e---') + console.log(hexpp(e), block.length) + console.log(block) + equal(t, block, e) + if(n < 0) + throw new Error('expecting only 2 calls to _update') + + return hash + } + + data.strings.forEach(function (string) { + h.update(string, 'ascii') + }) + + equal(t, h.digest(), hash) + t.end() + + }) +} + +makeTest('Hash#update 1 in 1', count16) +makeTest('empty Hash#update', empty) +makeTest('Hash#update 1 in 3', multi) +makeTest('Hash#update 2 in 1', long) + diff --git a/node_modules/sha.js/test/nist-vectors.json b/node_modules/sha.js/test/nist-vectors.json new file mode 100644 index 0000000..c5fa8b2 --- /dev/null +++ b/node_modules/sha.js/test/nist-vectors.json @@ -0,0 +1,1178 @@ +[ + { + "input": "", + "sha1": "da39a3ee5e6b4b0d3255bfef95601890afd80709", + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "sha512": "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" + }, + { + "input": "JA==", + "sha1": "3cdf2936da2fc556bfa533ab1eb59ce710ac80e5", + "sha256": "09fc96082d34c2dfc1295d92073b5ea1dc8ef8da95f14dfded011ffb96d3e54b", + "sha512": "840cfc6285878464c36c9aa819d8373729eda14c3e701fd37afec1d5baa2893944c696fc4017a520abfbb1347b62e6b858211d3ea7c7dd26319601fde119c3b4" + }, + { + "input": "cPA=", + "sha1": "19c1e2048fa7393cfbf2d310ad8209ec11d996e5", + "sha256": "33a633841666a5c291a82bfae65deac5c537d05f9fe926cbb5b7281bf90ad150", + "sha512": "94d5407efa06aa877c84b998abde44caf6948cf6b6475e519baadb41790e3fbbbecf518288d57f5a84a54892482aa76f5d17adc5be176a476b712e9b5d30bb2c" + }, + { + "input": "Dh7w", + "sha1": "ca775d8c80faa6f87fa62beca6ca6089d63b56e5", + "sha256": "8e2cc699f7e677265069f172d4cba15c146e954d7e4f2a8c176576035275b7a0", + "sha512": "41e7f289effb46f2ca50b65f9702acc92a4c238a55ce2451a8fa6ccd46e589323ae9ff6dcec415aa42f238749ce9c43f3a7e6d1eaebc27fc7715895afcb16d8f" + }, + { + "input": "CDh4jw==", + "sha1": "71ac973d0e4b50ae9e5043ff4d615381120a25a0", + "sha256": "8096d72b968a2dbb7ceee163c1981f7f1ec11ee10051b2dc2a8d7601d4e56971", + "sha512": "9115ace4a1fb9ad2e367f24cd73a68347db6103e4429f3ef989643563f1eeb74cb279561c461eb67cf47bdc764c107e4371415dab3f089f0f189edf9cc09a84a" + }, + { + "input": "ED4I/A8=", + "sha1": "a6b5b9f854cfb76701c3bddbf374b3094ea49cba", + "sha256": "2cc06402328f034d1909fa7b95f34cdb5585ce7f9096bc4082c97904921f6304", + "sha512": "f4b4cf07e964c53fcefb0f7d48596beab1722e1d9a70ab74ec73140f0f4b7cd9bce6d7ed8e873639f2c32f2e4828294c6af04267d68c3f27ae065a781d66d9fe" + }, + { + "input": "58ceB+8D", + "sha1": "d87a0ee74e4b9ad72e6847c87bdeeb3d07844380", + "sha256": "99a8d6823b803a8d41ed7c26322b4ac8fdc86ce4457ffaaf8600e99595f1577a", + "sha512": "eb2276d498230c11404628a8dab7a851b1e9b1e529dc92241e8b99b2d11cea4668be552d40af999af815c9c3994da790de94386b955fa762e9913d288ca33937" + }, + { + "input": "4Ptx+PnB/A==", + "sha1": "1976b8dd509fe66bf09c9a8d33534d4ef4f63bfd", + "sha256": "5d73704556bd458af1b90383d98591c1d01894d99b394fb7647d3d0cbd45f9a0", + "sha512": "0d1c37c810abe448b113e196e2afad16dd3dc095a8c28d0c47c023fbd2c5cb032b8b1116efebad9a60a209fc099b8a03c0fe6eb1d7444c2cf01fce4da9ecefad" + }, + { + "input": "/3xgPB+A4gc=", + "sha1": "5a78f439b6db845bb8a558e4ceb106cd7b7ff783", + "sha256": "3b9606c772ea20bf2889732b034f9fd476ceddefe8ec4e3704c5993e38dace1b", + "sha512": "7f3acc0c6821e17e47346b363aec4f45be96963a45253fd6972b7736c45ad8d6bcbe6ffac0a6b919dc3136895c5365e1467add1fee9531f641ec60d5da96489e" + }, + { + "input": "8D/IYIH+Afh/", + "sha1": "f871bce62436c1e280357416695ee2ef9b83695c", + "sha256": "b858d7c61b67e1688c267ca83b57dd0947c4e5acc4eb3d130fbd92222b66a9ab", + "sha512": "64e87ebf5229971b7ba773eb8f9d221a3ade057a293afd1e4ddb20735ea4e5fc908b71e28400afacf3aa58c70f6f72f81173f3990f3f97c9e6f469910fe48ea9" + }, + { + "input": "n8f4H8Hjx8c/AA==", + "sha1": "62b243d1b780e1d31cf1ba2de3f01c72aeea0e47", + "sha256": "87574390294ef6d212b6d8c44ebd5c88e932036dadd0b827c6cb25cd120bfdf6", + "sha512": "ddd79f12cecc64f1936c92a7ab03db23d2c9edf0109187c5687918da836379af37268321c488b0912f734303c77c05296fceb1b35abf2ad651502393211f3ea3" + }, + { + "input": "AH+/38D+An4A+H8=", + "sha1": "1698994a273404848e56e7fda4457b5900de1342", + "sha256": "afe90242f095e967523c12333b0093d4e532a0db0f27dcc25d44d23ffb62094b", + "sha512": "f780834bda7d752773f106df8f99464ffd55dfed8e4df11247e9af8661e62724966a7077af60c1d19d1777e2e83d22ebf357dbafd40b4376dc267a04413c2a35" + }, + { + "input": "AQHAHgP4MAgP8/n/", + "sha1": "056f4cdc02791da7ed1eb2303314f7667518deef", + "sha256": "55731252db2418c49f15d7f0f146c6506589f016a82c72cf8a6276ac6bd123c7", + "sha512": "5a3a2555f8affd5dad71dbae0571f47cda35089ff232f43f6f7615bd20353203793177d8bbfdc9ef336406811680a88dd6d46f44a1ff46e99a5631bc34eb5e15" + }, + { + "input": "xAP8D/gBwA/wBhBB/w==", + "sha1": "9fe2da967bd8441eea1c32df68ddaa9dc1fc8e4b", + "sha256": "c2e0ed603bfa67292b78a29264e409a2e0c98482cdb59cf4fe7cafff69853d11", + "sha512": "5737cddc5cd9cb9aca517a4efea88ed34206a1427b1a15323d79d2a731d1981d6eee0d02e9f22dcf44e2ca9c6aab1942d07bcb3a9ca03af388e422175ee476f6" + }, + { + "input": "/weAR/wfwGAwH+A8A/8=", + "sha1": "73a31777b4ace9384efa8bbead45c51a71aba6dd", + "sha256": "517f007a8b65d4197411c35b14edb1340490a9be7a6c66b8c827b1e101a05b5c", + "sha512": "046e506b845967cbdead62c325be7dbb01f69057c65670a31f07ca520bdf12a44696bfa85af1614b41a2c60fb5182518ae4bab01fd459f343bed8223b3999415" + }, + { + "input": "gD+EPv/APw8Af8Af5/wA", + "sha1": "3f9d7c4e2384eddabff5dd8a31e23de3d03f42ac", + "sha256": "76c977fdc97261cd0956ce1319476d314bc57d8691c7884cd0a7ff3cf825c31f", + "sha512": "0f950ae30c7601a170ba2d86025d0f89d9f24ab8ae13db9c1d3345142c7427eda8a997f66533b44176b431e43164b05338b0fe9568a69b85dfde21a28b3c6e6e" + }, + { + "input": "/+B/AYGB/4H/AD4AIH+ADw==", + "sha1": "4814908f72b93ffd011135bee347de9a08da838f", + "sha256": "f10c26637ea8ca2d0898fa661f087f13f174fe0ca1c91862ce3b6127c3430f51", + "sha512": "1d13256c10f5ba75b3ed783c023071805c5cacb622807143d03c41621d883e29573538cabea4c83a277a3a8ff552390ff0f44ffffa6d8a2a51179b03671e7b81" + }, + { + "input": "AD4AcB/gD/r/yD/z/gD/gP8=", + "sha1": "0978374b67a412a3102c5aa0b10e1a6596fc68eb", + "sha256": "679b95521601c0ba63ce882046abd7a8cdb8e78d5c0ee9f38c21dc47ca846195", + "sha512": "799809967e08bbc8ab729a12558d8847f6903bcee6f7689229c7080a8c8088d5b5785ecb14e50487068e7bc21bab9575651ea29b614b51cda348129e106e5be5" + }, + { + "input": "f+/AHnz/4B/+AB/wCP/Af/AA", + "sha1": "44ad6cb618bd935460d46d3f921d87b99ab91c1e", + "sha256": "af369f2162152e43847b4d0c595dcf2d27059563909425f37928bc01090f2f34", + "sha512": "700e168b4a5f1d88788078791580cab12177c12b3d74aabeca82c61c0915cd3396628291bed600c364b41b20350118520c971318b089e26b41f70e1b4db15fc5" + }, + { + "input": "4A+ABwwB/+AD8C/4P+8AeAH+AA==", + "sha1": "02dc989af265b09cf8485640842128dcf95e9f39", + "sha256": "7e1f6f080a60c402bb9c39578f75afc148a0746c656ca243f75038b82304bdf5", + "sha512": "5c85cf188894d27f376dbcce5d8479c79d77d0bf4bc1c591dbc19bf53edf60f22d2999a537c5322430f85dab971f9ddd19f53a32e56199c56f66b427ffa2cf29" + }, + { + "input": "5wAQAPgYD/D/AP+AP8P+8A/8Af8=", + "sha1": "67507b8d497b35d6e99fc01976d73f54aeca75cf", + "sha256": "8d6df02738597d95e2eb9e870d4177339728d9ab8b8d61aa96f0b6d1b5ad6efd", + "sha512": "191f9b5cfed52a1dfe219be6c8c25458570e79927cfbf1940855204ec19da84f0df92219c4ecbce93fea970fb6cf74e091265ae7e5ad69ba62331cdb4388d7e3" + }, + { + "input": "AB/4D/wA/AD/h8APgHv/AA8CAf/A", + "sha1": "1eae0373c1317cb60c36a42a867b716039d441f5", + "sha256": "d0a9699291dead3f6fba3b648c28537a04caea4b96b145802c06125a17c3faba", + "sha512": "9fe65bd1a9b8776c4f99059864f73b6f1c9e54904bf9118cba1d06bb40a961de59eaa8294a67406d5310c40b8ca7285a1d726f24f80df7dcef48e5ba32d9a80f" + }, + { + "input": "AA/wA8f4PgP/gAP/gAf/D/8fg/+AHw==", + "sha1": "9c3834589e5bffac9f50950e0199b3ec2620bec8", + "sha256": "d504026213b322cbcb0dbadd6a1fc6c708825019da9bac7aec973f750cbf2d3d", + "sha512": "0ecef99b7db364703f3a13f19861c0b211b2c450d6ac290136c1a4c2805cf51bd328e456342f44d7be2dc8f2eca657ade47c180d11543b33e615057d12d7b083" + }, + { + "input": "/8AfgD+f+Hg+f/gAPiAEP4B//AAf/gA=", + "sha1": "209f7abc7f3b878ee46cdf3a1fbb9c21c3474f32", + "sha256": "66c1a2578b41c3a200296e85d4d30a1876f8ca5cb941ffb1420e04d8e37149a5", + "sha512": "9faa741aaadd8800590fb24964a541da6913de2e7b9b86c27380b21fb2bec8462b5d4f01df914a646549adface74e979482c41dc43868ceb3ab038fb06331faf" + }, + { + "input": "PweA4AfgAPx/wMAPj/CADg4D/7/8Af/g", + "sha1": "05fc054b00d97753a9b3e2da8fbba3ee808cef22", + "sha256": "b8a87b047350912e4861e4aab7d1046d5372797ecea81d187f8e2c117db535dd", + "sha512": "25401e50121bec22ecd77147c28afeb3aed0012eac5ce5319cdedafe50552c3dc41fb1375eb1d26d0c7219ba53e8bff8df7bac099c50e19220f9b13c04e9c139" + }, + { + "input": "//wR/OAOH/+HgB/g//3/wAP/wA8AB/AB/w==", + "sha1": "0c4980ea3a46c757dfbfc5baa38ac6c8e72ddce7", + "sha256": "8d2b52d4d4074d471d037cdf9eeb13c18ef9ce4949fce00d106ef0880f2db5ed", + "sha512": "445cf90cc104716f55a8e9af95b9cc4645978968bccb5509606098ae0b913eb7551d7a87c1acf85a0735b21abf928e3abfb709d7770e2299d255ac7a7011d1ff" + }, + { + "input": "8AfB/gD4AeeA/4A/H3+MABwAD/gH/AD//AA=", + "sha1": "96a460d2972d276928b69864445bea353bdcffd2", + "sha256": "3d182e9928b2433c94255452170e59e3f4cca3dd29ea2e9b01e94e89da595393", + "sha512": "b617e5d54371bb5ec3ae745026c08b4250efc36e74e6a7d60892dee3e605189998d1dc49ed3cc1397beefa04219f035bf28ecdeab7dbeb59c5fe7b32b18e2a59" + }, + { + "input": "AA/4P8BgAH/4/wAD8DwHwH/gP/gBAH4D/8AA", + "sha1": "f3ef04d8fa8c6fa9850f394a4554c080956fa64b", + "sha256": "17c3f5d88ed7f3f62be0e28913357d65916389c1633db8fb62b92e14230d3611", + "sha512": "63a3c524ff190df114dbb6f8483048dd4cf6320a2d4c2928f5b97aeaf9583112d12e3fa6fb76a8994ddc06cb7a349f2d8f9b0b30e6bb72f338d7c6ef3988e3e9" + }, + { + "input": "AA/4AwAf/wAP/gA/AAP/4AfA/wA8f/AB//g//w==", + "sha1": "f2a31d875d1d7b30874d416c4d2ea6baf0ffbafe", + "sha256": "93788128441c894247bd9ccd6fc8af146c0ee76cdbe4e1c5a8dfa81dd0c338b4", + "sha512": "fd256d33a09fca956b374c996c0a22ffbfb47ca3a88723c0274186b095da01c0ba92e89c589fa2fc88b3e614b7928a1f0f101cd5de5425baf4b73b28e305d8cd" + }, + { + "input": "AAHg4B/+AAP8AA//4A//AA4Af/wP/gB4AD//AP8=", + "sha1": "f4942d3b9e9588dcfdc6312a84df75d05f111c20", + "sha256": "c7855ac54d2c5767273eec327efe39fdb3bad74121bdd8d2065484268727539e", + "sha512": "cf7789daccf0b20ced8a6c39ddfd0b53d1806e1447fed350d28212ead062b0d9aba6680c5ab0d85a905dadb5692728b5e4e96f82807265c1a5f908c10dc36b0f" + }, + { + "input": "gEH/w/4AHgAP/+D/gA/gAH/3/wH+Af/f/wAB/+AA", + "sha1": "310207df35b014e4676d30806fa34424813734dd", + "sha256": "d1e1f2aca9cfe8c6460f576661190a8008705ef13207c4c7200a2d6b0605f519", + "sha512": "6e838146cde732b5d72f1709554a578d8ade9182e54827426818cf4ed6a3d9ef1d98f7e23d0365b34c99c0cefb9dd723716ebacd2eab77ba8e9501d12311e910" + }, + { + "input": "+AcA/8B/vgAP/wAD4/D/8AAfgf+AD/+AIAPwA4D//A==", + "sha1": "4da1955b2fa7c7e74e3f47d7360ce530bbf57ca3", + "sha256": "b1843454b0258016558abdbd899319c1fd12d03e0c3d9e882da03de9cb981777", + "sha512": "7181c16eb19dd6c82a178d1d9c9b79a7a21f2b7185787fbd31ff781a178870fd9ef6434b5814edf2b55a3c0d51f799e2f319beac54bfbb0153130898370aa64e" + }, + { + "input": "ADggAH/wAf/+z/4H/8AAf/gfAADAAMAP/z4PwA//gAA=", + "sha1": "74c4bc5b26fb4a08602d40ccec6c6161b6c11478", + "sha256": "2834dcb6957b97fdde61b532d151ee4482bface8714fe36cd072b4783765901d", + "sha512": "2fcb3ddc573861b64f05ea82a1c1abe7ada1d16bf18fa7760d7893d029dd0b60dd181dd052e7b9882c4e5699f72d2a06a5d83b26960123faae6bbb8d75cdf93c" + }, + { + "input": "H/gH//gD4AH//D/4ADgfAD/cAcAE//8AD/wIAgAB8D//", + "sha1": "0b103ce297338dfc7395f7715ee47539b556ddb6", + "sha256": "47d61de59879013f64eb78fa8f6c8b906f7d25b8e1c3ca888f32421749c0d042", + "sha512": "e181d464243b380c604e97793333ca188d193357c743d0402990d796f2028f28838a93d717e5e07119e06dd2ba889b2ba775989ed77fa6fd959d565c1f179401" + }, + { + "input": "gAeGAAP/4AA/+AAPgA/4D//gAB+AAH/4wA//8HwEB/8AAA==", + "sha1": "efc72d99e3d2311ce14190c0b726bdc68f4b0821", + "sha256": "855fc59aa873328501ab0b1ce9c60a7d5582662c725605ebe02b64a13e34b3bc", + "sha512": "4c57d718a80f7e0d59285733fe06e763861cfefab394fc2fef53cf7739514e03b0c334c4420720f1bd4ec219da00e3dfc6272eeb4890ab4fb0d405ab26d9dcd0" + }, + { + "input": "Af8AGD4PAAf/wADwH/4HgGAP+AA//jgfwAA/gf/8H+AAP/8=", + "sha1": "660edac0a8f4ce33da0d8dbae597650e97687250", + "sha256": "e05939e8ff4ed77a11522dde249a74841c54970d984e0bb6f77ac64f1fa313b0", + "sha512": "28e62841b7553adf1a3fc1e5e24c0f81df969fd315dbc616a75adde00093472f401aa4840a23afa128b19d345756c74ee4ff2dd79879841258508390233f159c" + }, + { + "input": "8D//wAB/8AA//w/gBw/8fgP/8PwPn8A//8//AAD/wADnAf/4", + "sha1": "fe0a55a988b3b93946a63eb36b23785a5e6efc3e", + "sha256": "078778fed0e382da5d7dd36f585e1f1aa9b92d4caf20b85c0f6dd346de8d3998", + "sha512": "515d0e71faca3297dc1a88774bcf181faa928a4a76481e0adbf0ac2833f6f10c1e55780306c499aa89b4647be727be083ce5f38971e218edbb76d7ee63591bb7" + }, + { + "input": "AAH/gCAAf+AAfgf/+Mf4//AP/gAA4A/gAB//h/8AAfAAf8H//w==", + "sha1": "0cbdf2a5781c59f907513147a0de3cc774b54bf3", + "sha256": "263140810ae46430e19ac1a4a98b6204b63031b282ecb28594bd837268104308", + "sha512": "7113009ad761f953e532a90bd48fa720127539d9b08d1383c09186c23288885c7f841a70ede2485c1d333893347c2f5bf239605b338e57974105427a4235a7b0" + }, + { + "input": "AAB//8AB/n4B//7/8H//z/gH/gAP/8AH//wAPgAH/AB/wAeAD/8=", + "sha1": "663e40fee5a44bfcb1c99ea5935a6b5bc9f583b0", + "sha256": "ed39f65ea0e6cd8fb39bc5d94a1554dfd0002733e01618161d58a7b7dc8be834", + "sha512": "4511276b41b462f8cbf45da6f699d00217a8ee2bb8594c41f33ca39febcf1ab52725c15bad08c6dc1aa5f85e511f218898a574baf7b8e910d6e44158ddfc381d" + }, + { + "input": "//8D/wf4//+AAH/+//4AA//4H/8/+B//AB//D8B/8AH/4AAf/wAA", + "sha1": "00162134256952dd9ae6b51efb159b35c3c138c7", + "sha256": "89830272d1fd54040f9329a39c7f491f15eea851095e0bd2d0bb412baeda7445", + "sha512": "1eb56e081e4b413df323fb7b9b0b31d9c71e438ea8d80d8d0374c2acfec3825492220e9eecdcbedb48c4017dc28bab48dcd6af5a240a865929641b7cee0e67e9" + }, + { + "input": "//8AAP/8AAMP//AB+AAP4f//A+A/H/+AAHwAAf/AAX/+AA4H/+D//w==", + "sha1": "ceb88e4736e354416e2010fc1061b3b53b81664b", + "sha256": "c9b63e8ac2e87494f98c9ece5d74f4540090c286166efdaedc6d59a0f623e5f8", + "sha512": "7f1fd27e9d2aff5bb3b0f5e3e2868b7718f8f27fc4e7e71e71283e1d0563346632bfb36e27710d2129d858db958c34ae51c26cd23cdc5186d9517c9eb5556c17" + }, + { + "input": "wAA//gP8DAAEAf/h4AP/4DAB/wAAPB4BgAH/AEA//gA//4B8Af+AAH8=", + "sha1": "a6a2c4b6bcc41ddc67278f3df4d8d0b9dd7784ef", + "sha256": "509e6b7dacb70bfa62f3964eba990b9c576fd2485c1040fefc8eab5f07269f4d", + "sha512": "152802b0fcc383bf8e9df9ac1ede5bac84475aceb03a3599e70e1588e7b4b2c0b06fd3d299e72c284bf0d8d8d9f6cf80c8eab409567f9b97554c3d9475573977" + }, + { + "input": "P6AAD/+B/8AP8H/4AA/AAH/gAeAABP8AH/4AAf+AB//+AD//wAP/gAA/", + "sha1": "c23d083cd8820b57800a869f5f261d45e02dc55d", + "sha256": "8bb7546f64ce1cab770407de791ab25f7bfbac3e071810310ee674c2824e59ac", + "sha512": "0483e42546dcdd8c225a87308a3b42b4a74a1d0d0c1d5771debea9465483bc04ca267c9203fbcc5ec668ac1de37e77ecc880a78e79f2a618b877f39d42161fa2" + }, + { + "input": "8B//Af+A/8CAB/AAA/+AABgB//wA//wD//8Af8AD/8f/wAPw/4AAP/4AAA==", + "sha1": "e8ac31927b78ddec41a31ca7a44eb7177165e7ab", + "sha256": "ac5bf3cad821e7ad8b2ed7bbfbaaa5e7abb30606afd8ed5d6a18a0d0eba343d3", + "sha512": "be54942c8080777f4e33c0688c587d4178714357d67f0e285411a931d32fff84944d86c7d0a776034f3a5214e1471022e68fa2082c4c28110792c5d15fa5d998" + }, + { + "input": "B/G//+AAeAAH4ACAA/A/9wAAOAD+APgP/gAAgD//wf/8AP//j/AAH//wD/8=", + "sha1": "e864ec5dbab0f9ff6984ab6ad43a8c9b81cc9f9c", + "sha256": "f7995f433d17bee25f44c918de82eb147b3bfef24045ff8fb17ffedf559e06b7", + "sha512": "a0e78554a722cca6d512903028d8aa121cd77df3e0779cd0941ade5cbc6080d7e69d0a78e266c7e30505397fa06e7c5b58addcefe12be21fdc8c29075dd3e85c" + }, + { + "input": "ABwAB//8AF4//wAAPP//wD//geBwAB/8AAP/AAB//8AfjA//8P+AB+AQAf//", + "sha1": "cfed6269069417a84d6de2347220f4b858bcd530", + "sha256": "2c465ddc53e88894a2279e30b9e6feb064c66b15dcf5a38722f5c92d65a84bab", + "sha512": "cb370c694eac9caa723acc2f76a91db5f2b25a2d4d420cb36ab7cb39a82e63e474073c84de3789d4342383f85acc4444c0e971c47a4224751a39b5a5aa110eaf" + }, + { + "input": "wAAH/4B//4ABgD//z8D+AP/AH/wB//gA//4P//AGAADAP/+AeP/8AA//8AAP/w==", + "sha1": "d9217bfb46c96348722c3783d29d4b1a3feda38c", + "sha256": "b0ced82dc52c4f9b1dec098a80a23a4a711f3a8c9c3684f0761b0e8a29ba560a", + "sha512": "b1be6bc2581e06b40dd711119535f92f5a1f7b4b5752b519203859f3c15817c91b44a2c99949f22f026c2d34463e794ad556bc59593814289c5651a61613847d" + }, + { + "input": "/+AH//gAf/Af/4AB//gf+AED/+AAA+B4D/8AD/wf+AAP/+AfAAf//AAfA//3//8=", + "sha1": "dec24e5554f79697218d317315fa986229ce3350", + "sha256": "f8dce75572495bc241288c07246acf7a157d462a9c01d1491618f073e57f47e2", + "sha512": "688c2c2e28b3eef624ad1f6d47e13d9a73f9a59b5de509a6f56d2c2575fa80919d51fd9e5832ebc3ef680b7e3573c88e1f95cbca73afbfe4f2004a7887e710e0" + }, + { + "input": "wPgAA/4AP//wAAP8D/+AAOP/+D/+AABz4P/8B//D//4DAABwAAP/+A//4AAf//gA", + "sha1": "83a099df7071437ba5495a5b0bfbfefe1c0ef7f3", + "sha256": "46d89d780f178334d19e02c41d5af2e265e2190896dce94822b99c19adc3ff6f", + "sha512": "1bed6b7e19942b514d17b1c5a112a23f923e458a01bb58a6291664353df37f734accd54477c68311a86e7f1dcd98df4f1fd05271d2880203932ce7cfddff71fa" + }, + { + "input": "//APx//8AD/+AAA//4A/gAA//8AAcAH/wYAD//+AAGH//gP9gD//4AHB//+AAA/+AA==", + "sha1": "aa3198e30891a83e33ce3bfa0587d86a197d4f80", + "sha256": "968954bafff8e2a118d3aedffc6283b30efddbac0af2245195c2a32a665a5d54", + "sha512": "3d8a970a249b71233b9a25f7109d5a99f62c4bcb11c47bc72d26dd46ad853ff682b8e1abb7053b4401461681a86d6f22bca4ce9420cab97c77f601f2463de7af" + }, + { + "input": "//wAA//wD/gAB9+P//gAAf/+AIAA/4Af8AABHAAAP/gAP//v//4Bw4CAAf//wAAH//8=", + "sha1": "9b6acbeb4989cbee7015c7d515a75672ffde3442", + "sha256": "33f78a8a6adc466fed41020fa799aa537cc1c1bb4e938c06a1baec97f7b3c26c", + "sha512": "c618b2550d72c4ba8940c0c38f69d385f1439fd99a5f4b811a3939fdfeaff80490576f1557dc0ec354a2788ecc84220f56dc59ce8b3089312ea180a0d5d26d92" + }, + { + "input": "///AAf/B//+H//8APwAAH/wAAf+AH8Af/wAA/4Af//h/+D//wf//4AHAP/f//vwAAD//", + "sha1": "b021eb08a436b02658eaa7ba3c88d49f1219c035", + "sha256": "5f7f49d1c307363de95d450b558275f8d5a6780ee47246268e6729f7733e535a", + "sha512": "6b8de48cc9108209f15be365ade22d3d2172aa2c742b255500153e5d8016af0ba64b403368fe3be84c39d9e1cb366d96bbccd0ebc91ba26baed8618c2d7afe51" + }, + { + "input": "AP+B/+AD+A4AAP/4H//+AAD/gAAH//gB/+AAD/AB/gA/8H/gAH//4B///AH/4AGAAAf//w==", + "sha1": "cae36dab8aea29f62e0855d9cb3cd8e7d39094b1", + "sha256": "9f126c6e07ed2744cef5de1f468b2ed1c51a13ec3c8351935b9656458a3dc40f", + "sha512": "2fab3a562074e25cc68d89fd2e4fb2dff240c734391d244eb87e59f256318ebbb77ba1dd7de7a981ba43beefc37411c98e8e523ec3d4995a71cd4718c96a2dfc" + }, + { + "input": "AA//8AAA4A/4AAD//4AD/+H//z/4D//H4AAf/wA//g//8AMAwAAf//w//+A///gf8AAf/8A=", + "sha1": "02de8ba699f3c1b0cb5ad89a01f2346e630459d7", + "sha256": "b8e3d23621cb02bcae060bdf5b6b7db1f024651f98ec63766c20b7883bc033d2", + "sha512": "95fcadd861b5b2cfd5a7af2696018a9f06090f40b1b1f5a27da12d6463aaf93461222d748412ae3ffed6a2dba87e5cd11acb0195d42e955c86a00ca87fc4c693" + }, + { + "input": "AYAAHwH//4MAAfwAf+AOf/4AADgA/wAAP/+Dg//AAH//gB//8B///AADf/+BwAAH/4P//wAA", + "sha1": "88021458847dd39b4495368f7254941859fad44b", + "sha256": "689c608602d5e5d37a0285eeb5006d97addd7c2b8e006770fad588eff621c971", + "sha512": "159fd2f979439b91980e9e00cfd86c1f72ab02d7bcc9ca9766cae9448dc4d8d229004936b43fdb066449b5ffa2faf327cac122cb93a8880ed2ee64906954615d" + }, + { + "input": "/4AN/+AD//AA//wA8AH4B//4D4AP//8A//+H/+H//GeMf/4AA/8//AcB///gAAH//8AMQA///w==", + "sha1": "91a165295c666fe85c2adbc5a10329daf0cb81a0", + "sha256": "17c6f0073c4f92d5eedda57ca2506aa6002695c6b7bf12e4dbf4dd1a7fbceb08", + "sha512": "e4e68d6dd664a69d5b05e1d2d110a047c34a26417d74aaf26c6e5fb9501a2bf05787dc615c91bfaa224ffd295c80a8694553639f4e82dbd61417e62c269a750e" + }, + { + "input": "AAAf//4AHwAAH///B///wAf/4AACAAD/AHgAAOAACAAf//8AA/gfAAAP/8AAAf//4fgAAD+AD/8=", + "sha1": "4b31312eaf8b506811151a9dbd162961f7548c4b", + "sha256": "504472bf96d0a3da1098dacacac48886d1ab92929187de95c7f42eab9907801d", + "sha512": "765e9cd6478a6da8c8887a5b76c30a277fd5de87a34fe401286163fc0ae773835f8fe4e81650a6a7a7d32ce43af44774989f29e5295f7d0d1642bc72f5797131" + }, + { + "input": "AA/4APwAA///AAA/8AH//+B/+AD4D///gAAP//wP/+AAAP/D//AH//8AOPgAIB/+P/4A/gB//8AA", + "sha1": "3fe70971b20558f7e9bac303ed2bc14bde659a62", + "sha256": "ad3a49ab7ad5b69182301d9ef971feab72f770f4d9f60f6db308ffea746db005", + "sha512": "6dc712927b0fc4528db9b1412b989f307c98f349e9ae36404b7cba1c5e49ab59a732fc446556b7f80cad3aa04ca92911c8f72a8d663966a442e987f00a08bc3b" + }, + { + "input": "AD8A4AAP//x///wAAH4AAP/+H/AAH/AAH/+H8AA/wA//h/8AP4H///f/4P/gP5//AAcAf/wD//AAAA==", + "sha1": "93fb769d5bf49d6c563685954e2aecc024dc02d6", + "sha256": "5cdcb342f26857e8db5ac97a89da6197759adf384ab241a8112795241983238f", + "sha512": "7465eca59636aa00877954200b7a25f03cdf41541ca4e732aba4945ab4dc2ee41ba1438485882e84d997b64af4f31f7f67ae16ea6b94e384e2689a12413aa143" + }, + { + "input": "4D//8P+APgAD/+AAD/wAB//4AAB/gAAP+AH/f//wAD///n//4AD/w///AADwAAB//wA///AAAcAD//8=", + "sha1": "bc8827c3e614d515e83dea503989dea4fda6ea13", + "sha256": "4ec9883c8ad72131c79f14e4f1e75042a61100a5bc290fc344ee3c2adc99c143", + "sha512": "3a490375e04aa9723b0fae4e8718f2409c57ec7650709551e65899ce2a6fca62f913f3303b4b073af50cf8c0c2b82bc11ea4ad2f5d56f17af2ae8764f7dda99d" + }, + { + "input": "AAPAAf/f/f+f/h///wA///4AAH/P//Af//4H8AD//+AAAQAH/4Af4AAA//4D//+AA/AP//4AAB//+AAA", + "sha1": "e83868dbe4a389ab48e61cfc4ed894f32ae112ac", + "sha256": "375c64eb3361f34b4d89078fa95d082c74bced92436aa3d50031839375d6473e", + "sha512": "10a72fadb32da865d39d36445c053776af1f19b384d6cbb45fee9d93399e22250ffdf16b2f867a5adf87d2215cd2685587db3ea50920a36354b22d17b4897e63" + }, + { + "input": "AB//+//+AAf/8AAA//8AAA/z//4AeAAAPgAAP//4AB///4AAA///AAf/7gAf/AB4AAAf/wf//gP//+AAAA==", + "sha1": "55c95459cde4b33791b4b2bcaaf840930af3f3bd", + "sha256": "8a0a36538da941bea6c614b2c038424588d8d2505039f70cbff291d4f0f9f6a1", + "sha512": "e1ecf3f08e315a415e0043b21e6b30b18d468c41bce2f58db6dcb59a4147db9a2919512eaefd71b6ed3ae4561f6d9eda2f7519c50a069e04a43db55c626817f9" + }, + { + "input": "AH///gAAP/wD//wf//B/2APwAP38OAAIABAA4AYAf/4AAA//gAA/A//+///5//gAB//8Af/AAAP//+AD//8=", + "sha1": "36bb0e2ba438a3e03214d9ed2b28a4d5c578fcaa", + "sha256": "a4dd6338174ebeda6a25b88d754fa5b95cad27902eb0bc8321b76db62efb1abc", + "sha512": "96a0d87a5e1da55518c46b94c86d3a3442fbf077bfc92dfd4de8c8135a957b6ec082abc4b16155a423848cd6f221677a764f24e603cfcfd5582cdcdbece50070" + }, + { + "input": "//AP//8ABgD///AH/+AEAAMAAAPw//8AA//7/8P/8Af//8cAf4AAA//4AB/h//hj/AA/wJ//+AAAf/8f//wA", + "sha1": "3acbf874199763eba20f3789dfc59572aca4cf33", + "sha256": "a644092a1de8e05e17908ce565d55fcf39e30585565d96bf1c13eeb9f3401803", + "sha512": "92b2850798b270bfd56c21fd8e81e28cb3e353a4b681aac2d0bb6572c6bbd7cff8f1d84d634f1b9c4abd3d1700bd07b4c83fbaae88adf9ec275f0e5eac3ab9c3" + }, + { + "input": "AD///AAPx4AAAgAeAABgfwP+AAAf/4Af+AAA//+AAAP/wAB//8B/4AP8AP/3//8AAB/wAAP//+H//4AP+AAAHw==", + "sha1": "86be037c4d509c9202020767d860dab039cadace", + "sha256": "7697b1435a5bdc094038469fc5268615cbe94641b2165bee62466426ab414c97", + "sha512": "9cec66b2a39a697b2a3fbd582ab032ee12f7ee3fdbb58236c87723dffbc541f45b04d865c9da0588917cccd960f4daf725059dd788e39899694a235322bfa9d0" + }, + { + "input": "AAH+AAOD8///gAf//D///AP/gAAGAAB4AAf//4AH/AH4AAf//8AAOAAH//4///g//88//AB//wAf/4AAMAP//wA=", + "sha1": "51b57d7080a87394eec3eb2e0b242e553f2827c9", + "sha256": "62f249a85b14b477e764e63e9821d3f44dd2c745293f3586eff976266311a39f", + "sha512": "ad3922a3daaca8508e73bcfd2a605c1a7efff788de1ddea32eec897d9cafc137187c0c766b60f642d3697ab6f99ab2ab3d79e0361069b9daf6ad38479757cec7" + }, + { + "input": "+AA4AAA+PwAAP//wAgAAD///gIAD/8AABAAPwD///gAAP//+AD//+AAwAHv/AAAD//w/4f+AAHAf/8AH/AAf//AA", + "sha1": "1efbfa78866315ce6a71e457f3a750a38facab41", + "sha256": "eb2c75aa7330a6589d09f58231d1218e4124ba49b7b0c5245a76a5101d136449", + "sha512": "057cddff81c1b44002cf70049822ef9aa3de2867cdd84b185b5337d7795089de5095f915963dd9569196ab2fc916457c3886a2721a0daf47e79896cbc72e35c8" + }, + { + "input": "AAP4GAAAcD//+AAA/8///8AD//4AEAAA/gP/+AAAfgAAf4//wAAAf//gADwHwAAAf/8B//gB/4AAD//54AA//+AAAA==", + "sha1": "57d6cb41aeec20236f365b3a490c61d0cfa39611", + "sha256": "90c096f9852990cf0fcfbd36ffeb577b4d106d66e9c7a18abdc6f7a3b1ddbab1", + "sha512": "80409076e286025cef2ee16f55e02c8b96a2eb7b8da31218adda76df57b6ec12303ec1eccab269a508ad7547472ffecd569c56773a301655bb32580b479df093" + }, + { + "input": "//4AP8Af//B/+AAB//gf//4AAP//+AB//4A///8Af//4AAwAAA/+fgA/4Bh//gAAOAAAP//+AAAD/P/h/h///gAAB/8=", + "sha1": "c532cb64b4ba826372bccf2b4b5793d5b88bb715", + "sha256": "327b0e47ba3bc200579ac67ac38968e0df655e2d22ffe3adf238f7ac9029a1de", + "sha512": "2d2b0df226f6b5d1bf32d9768e976c58c9e523fdb908b65ba43b10efd3f0b44b2bd752e508b1603c3ad86d28d0e0659ca7fa01002d9a7a17822d360575e799d6" + }, + { + "input": "AAAH//4AAAf+AAA/4AB//8AAAH///AD+AAP/4AAfD/wAH/+AAAf///AA///wAAAf//gB/+Af//8AH4AH8AAB//gAAf//", + "sha1": "15833b5631032663e783686a209c6a2b47a1080e", + "sha256": "bdf4ef8fcafbe13b772ca217eef56a316210e71f69cd943433087c68d9a67bb9", + "sha512": "aab28280cbd78a6d4efdcd7c39ef64badfe6e781db9c90d25bbe9192d8d609e89e5e512b2072b462985642345643f29a4b3198c81a41f2b99967bcb6b299e227" + }, + { + "input": "AAA///8D/gAAB8AAAH/8D/AAAB///gAAB8AAAP/+AAA///wB/3/8AB/4AB//B///4AB///wB///wAAH/+AAeAAB//AA//w==", + "sha1": "d04f2043c96e10cd83b574b1e1c217052cd4a6b2", + "sha256": "72c955a5adaf9e49d565342b41b36ee5ab9b5a394d003b804e4e361a46bda571", + "sha512": "d6c146d5f60a8354f2ba75c199742ca029ea85cb28f136420e4f80298f231173d4b554ab960b7a4adf2710bd77a6b8e64262f3235e7ab1af631a1f0967d81048" + }, + { + "input": "/j//g//+AAf///AAPgAA///8AEA//gAAA/AAAHA/+A///+AfgAADw///8AAB//APgAAP4P///vAAAf/AAAB/8AAAf/7gAAA=", + "sha1": "e8882627c64db743f7db8b4413dd033fc63beb20", + "sha256": "cbd287d6a6707e2cdc8e63a29f758facbdab375bb252183d3af877dea8d25260", + "sha512": "d278d3750bb1e2bb657f48f47d92d468e82a1f15b589defca3d54a328e9a143c69c810d2a4d5dc4c40f544ae7134fec00471f4a9fd5fb8ce9fd8cc292be1a5d6" + }, + { + "input": "AAAD//AB/AAA//8AAH///4AH/4//gAAP//AAADwAA8D///4B//+ADH//+AAAH/AAAH+AAACAAAD///Af/+AA///+H/8fwAAA", + "sha1": "cd2d32286b8867bc124a0af2236fc74be3622199", + "sha256": "7aa856fd19741a16ec634b1f653cfd5ac224278652e0b0a2903e274be20a048f", + "sha512": "a13267c4d3e417c839e85649445039b75c5f63a24bcdbabf645cd98da1a4c9d86c3e8765b2d969c562f938f83101fdf4a6dc23dd346c3d4a7686c6e5c93bfe32" + }, + { + "input": "///+B//AAAY/n/AH//A//h///4H//8AAAgD+AAQABwAAAf///gAAB//+AB/+AAD//+AH+AD///wAP/P//8AAf//gAA///Af//w==", + "sha1": "019b70d745375091ed5c7b218445ec986d0f5a82", + "sha256": "8410cdb01c659f05741fd29469d0dbb0251b4fd8e708abeec4a879047fba7c37", + "sha512": "94e96bdf3f3c8f4c5eca524a5e22cdcd779c964be49c958ed7810dedfca6fa93025784d5b1c1a181092b99c4b590a3f8b08efe4b6266a1326e322d03c67ed6a9" + }, + { + "input": "//AAAH4AHgP//wAAc//wAAAP/9///9/8AAf+B//+AAAf3+//8D///AAAB///8AAAf+AH/4AAAH/gA///+f/gAAA/4////AAAA/8=", + "sha1": "e5ff5fec1dadbaed02bf2dad4026be6a96b3f2af", + "sha256": "b5f811baf9c441d04f010f76bcd7eae80c5bb249a40ce37436f0a0296849b8ab", + "sha512": "c446aae62b6dce4d53665d0598c2e980280edee0c7cac0a033157cca9a9fe6e70e31f334c42ed17de21fce51599cb0392cef59d1c6fade17928892a531f4937e" + }, + { + "input": "AAP/AAA//4AB8AAP/gAABgAD///8A///94AAAH/AD//j/g8AAH//AH/4AAD//+4AfgHAAB/gAAf///gAAOH//D/n///4P//8AB//", + "sha1": "6f4e23b3f2e2c068d13921fe4e5e053ffed4e146", + "sha256": "9be38d9ac8a9c30e8a5e86e3ede291b23bb381ee41dc662421e394f6b8b9881e", + "sha512": "cfefe5a6d4a13bb1d4f8d728ef310efc0bd4fb22c01ef685bc6e9b8459591391e871525448484c46ff6872f4b7d60af3c2530ba3d474ba6cf9ab0ec9cd9a4c90" + }, + { + "input": "AAAP//gAAP///AAf4Af//wAB/9//gAA///wAAA/8B/8AAP+AAAP///AAB///8AD//h///+A///4AAGAAAP//f//wAAP//8AHAAH//w==", + "sha1": "25e179602a575c915067566fba6da930e97f8678", + "sha256": "ef45cac2d6f325a523c40a989f5554e152f8d65cbd22d35824d1f28378658432", + "sha512": "b82c05e20de02ae1ee1e68f0c0467b40ccf82e201f96176f940f39117df5d8ad76c351b934cc6aab9d6c857d8d90af4ba4881d11cb8c83bd924aba69d6eafc20" + }, + { + "input": "AAAgf/4Pg///gAP/AAAA///gAB///+AAP/5///AAH///4AAA//+H/8AAF/3/n//7///gAAPgAAf/n///gAB///8AAf//wP//wBAAAB8=", + "sha1": "67ded0e68e235c8a523e051e86108eeb757efbfd", + "sha256": "8e3d126f3a316e0ec49741a3ae6215e29c4acaee364272b7087d9b766579e00c", + "sha512": "c4b58481ad9de99212d85c7382dabf79b3e59307152163d748573a56fa844718ded648b77b31dcf1456b269fbf3e4e27c7edb89a10c4a0bbb5f014bbb3962293" + }, + { + "input": "AAAH/8AA/+AAB/+AA4AAD/gAAH///gAAGAD/8CAB//4AAGAP8OAD//4APh///AAD/4AAAP/4AAEAAA/z//wAA///4f//wfAAAP///wAA", + "sha1": "af78536ea83c822796745556d62a3ee82c7be098", + "sha256": "a43ee360b1dc90c573bef4145e1d4557166d7cce6ea1ed33e0cbd909643c3621", + "sha512": "c5706521041bc9554215370dfc9bc0c480fa995d46ed5698a63270dba26a901ae059232d9377c150a7d254d23197e56d05ab1e195b8e25dddd9282eb253cc1cb" + }, + { + "input": "///wAAAH//wAf4f//wAAAH//wH//gAAD8P8//4AwB///H44Af///wAH//Af4AAB///wAP/AA+AAAB/8AAA4AD/+AAH/AAf+P+AAHAf///w==", + "sha1": "64d7ac52e47834be72455f6c64325f9c358b610d", + "sha256": "5396745f9645dad55b732efde57de49c2ae40624fee192579014dc2b79d814d5", + "sha512": "7d8a31bde32e9ca2d71e7088c919e0b3640b44bb861071b67f862d748839185fa9ced67edfa8aa6761bc82163efbf343c5ec6b5f26970026361672b686f4251a" + }, + { + "input": "/4A//z/+AAD///+f//g///gAAA/4AAAD/gB///8AD/8B//AP/+Agf//8/wH4AAf/4AB/+AAP/4AAAH/gAD/4Af4AB//wAAB////AAAH///8=", + "sha1": "9d4866baa3639c13e541f250ffa3d8bc157a491f", + "sha256": "b617be050dedb47be64d82dc19e3d84b6799b5bada18944df5417759a85e445e", + "sha512": "b78eb89bbe88846d006364775b54beb32e9308fd578c810fcec790d053de4abd7554831a67c63c9e9cfaa39bb327de60097d90b4728034410ba20b4398bf59c3" + }, + { + "input": "AH//4AAB///4AAA///wAf/4AAAP///AD/+AAf4AAD/8/+AAAf///AAeAH4P4AAAP/j//wD///h/gAAfAA//wD8AAA///gAAAf4AAAP//gAAA", + "sha1": "2e258811961d3eb876f30e7019241a01f9517bec", + "sha256": "17d5520a82dd7c945de6a92200d036cd95bb16330f0f95df802d23e90c8e5c2c", + "sha512": "37956b43f14a97e2ba3c4a58f13599b8ed7aa419d199939d25b069c00c43d71731d6e9be6d5086a91d5b65aae51581f1d71c481783468c7872dabd068aa71bf6" + }, + { + "input": "/gAAIAAEAA///8AB//g/wAAA///AAP///4AAP/gAf//+f/gAf/+AB//AAA//+AB//8AA///AP///4A///+DgH/+AAAB//8Bx///8AAH///gAAA==", + "sha1": "8e0ebc487146f83bc9077a1630e0fb3ab3c89e63", + "sha256": "b71e5a677801057ec719ae2655732720644bc8f999a8698876c92e4323d4ae0f", + "sha512": "bb5802eb5e8011a5b87c81cb4b8562ea03ab2e82168963cfe2c88999d8b341e63b1215a356b328b4cf515b0975da9a5ef51bac9cd13ea8f938e24dcd1a52b9fa" + }, + { + "input": "///gAA//8AAAP///wAD//wAAD///4AAB/wAAH//gP/wAA+Af8B/4AAA////AD/4AACAAAP/8AA///j///wD/8AAAgAAfA+AB//oAP+AAAHAAAA8=", + "sha1": "ce8953741fff3425d2311fbbf4ab481b669def70", + "sha256": "80c6a41efdfe452d1ad6f3b0d5eb31b962c332a9bb7e4f7ee6f4aaa18a3b81d4", + "sha512": "493f75fc17c2a89a35ca0be8995dc126a2d205897c91b5b120751218b987e371299bbb1f22ad39df68c3d4366790f971fc29fddef5758d78759777b73b9b619d" + }, + { + "input": "/f/AACAB/gA/+AAD/wAAA/j/z8P///wAA////AAAeD//8AH/4A///wAAB////P//+AAB/4AAB////AAAHAAB//8H+AAAH///8AAB/gB///Af/AAA", + "sha1": "789d1d2dab52086bd90c0e137e2515ed9c6b59b5", + "sha256": "32ae8512b486d4523ca7a630556758655a5cff12aa5cfcb8dc5e65b21a257f4a", + "sha512": "f5af63acbedce005bcb250c44ebee7e9b63200e2d515c6ec74d072ed66adb79f52f44000de5dad0961c8b058f59ebaf9db40806aaab9b71e72d50c7cb2a97576" + }, + { + "input": "AAAAP/8AAAf///wAAAHgD/+D/AH///AP/wAAAD///8Af///gACAAAD///wAA//wD///wAB///AAAA///wH+AAP///wAPD///4AAA//gAAP/+AAAP/w==", + "sha1": "b76ce7472700dd68d6328b7aa8437fb051d15745", + "sha256": "0f14c68ffe8c26e9d2ecdd5ea8027b6549b3e8742023ffbdc7547227cc27ec2c", + "sha512": "781dc172a8c8757c56daf68cee5ce6a19b7180334fb33277c7e31031f9e68214b6356fec09725b2e9f7a1f9ffbf3a1371e3c0001c5b5d11a626b797fc649d49b" + }, + { + "input": "//4DgAAD///AP///AAP///gAAAH//8/8AADg7/gAD//j+AA///+AP7/+AAD//AAAAf8AAM/AAfwAAH///8AAEH///P4AAAfA////j/8AAB+eAAAB//8=", + "sha1": "f218669b596c5ffb0b1c14bd03c467fc873230a0", + "sha256": "85dff510ebd3f1fa617a2273ed67ef5abe4774cfe95657fe380e75b25090664d", + "sha512": "6a295f58b75db5d93485951c5b411adce64309498ce0a189974145b70ecb145b2aba84ee62e72a90a5324aa7fb3b527e9e842bb2c54db9ce58d9bc4550f5f7d9" + }, + { + "input": "AH//4AAB///D/4ABA/wAAAD8Af//+H/n8AAAf8A////AAAAf/4AAAf/gAABwAAAcf//4H/wAAAfv4P//wf/8AAAB////oAf/AB4AH/wAADgAGMAAAH//", + "sha1": "1ff3bdbe0d504cb0cdfab17e6c37aba6b3cffded", + "sha256": "7a852eb3b59ad350c9d47adf1ce0812d06866cca8e1f2c65c893e7952a62eea4", + "sha512": "82976c7f43220e623b217ac30133205c212d56ab94f2c5f132bf318454ae1e485aeac0941b28008dbc87fcd20d635c3ac5fdf5260421fff25ee90f70a521fe27" + }, + { + "input": "AA//+AAAB/8A/AAAA//8AAf//+AAAP/8D/8AAA///g+AB/8D///5//4AAAP4AAAH4AAAwAAf//B////AB////AAAP///4AAAH//4H/4AAD///+A/BgAAAA==", + "sha1": "2f3cbacbb14405a4652ed52793c1814fd8c4fce0", + "sha256": "49cfe8b6302a2d45b866a26c4940d777df4f588ebe1bafeb275a8a03a1eeb0aa", + "sha512": "8ed7db5fb6d0aa5bde561900450c8f49799b19be158a5d08208c5124dddad1befcda048e4d578827213a55cdd613e5ac667ab8b149ece286af5dec91af98f1ef" + }, + { + "input": "//AACAAP7////AAAf//wAH//+AD//4H//+D///8AAACAAAP/gD///AAAH//AD//+AAAAc/Af/gD/wD//AD//g//+Af//9///gAAAPwAAH+P///AAD//wAAA=", + "sha1": "982c8ab6ce164f481915af59aaed9fff2a391752", + "sha256": "e6e49ebcd83acd3a9ec0b100e26c4d82388eb9378ecfcbf967a31c4951ad0c01", + "sha512": "03a2d1acb8111cade71d6eec11811743335b4bf3184bbf0848f393824d840e6ed8e6346daae20e546ba759010334a2b86ac4b06af6a672ea47705de21f9b1665" + }, + { + "input": "AAB//AB/4AAP/+AB+AA//wAAeAB/4AAAHwAH///4+fAB//gHwA//+AAH+H/+AAAP/+PwAAf///wDHAAAf+AA///8AAAP8//gAAAP//kAABAAP//8+H//AAAA", + "sha1": "5cd92012d488a07ece0e47901d0e083b6bd93e3f", + "sha256": "9edf4d38cdd6e73e857f1ec91132499e7f930d2cdee6b3583a8f062ff7e9d848", + "sha512": "06c1accab6d42ea7914ce698993c7a3cb67c1e08e38ba5f7fcb4c67229f5c5f16c4034c04d4fb0bdf51e34c9ee50b87739c77535b3b1b892425b313a5aeb4a6c" + }, + { + "input": "AAP//8B////AAAP///8AAA//8B//8AAH///v/4H3//4AB//wAAAf/8APgAAP//wAAP///8AD/+P///4AH///AAD///8P//H4AAAB////gB///gAIAAB///+AAA==", + "sha1": "69603fec02920851d4b3b8782e07b92bb2963009", + "sha256": "768dbebcd6aa66337810b7457964c63322904e9242229e5d98b808799f7f4cd4", + "sha512": "5b695679de2856bf0b7a95680807a2dd147519411ea07234771ebec48f613d865af8516d74eb27e2878a9379d9067fa5f5f2e7c09de452af8060f554e4b09b64" + }, + { + "input": "H+AAfB/AB//AB////gA8AAAA//+AAAf//wAf+P/AAP+B/wH//gB4f//wAAGAAAAf/wAAH//wAB///+AAPAAAH///gAP/4AH///n/+AAAfAAA/gAA////AAAP//8=", + "sha1": "3e90f76437b1ea44cf98a08d83ea24cecf6e6191", + "sha256": "867a5ab42d15f9843d67438db495a8a581eddd39c3753f3d203225b60eaa9a3e", + "sha512": "3ffb6486fb6a3b7931673c5e754b0f44c193a00ed86b0a7bdd0e20f6afa14a870c84fc1d24db19fc2f73680e9719e41679a0ca5dc49e8ddd41496b8d46c49594" + }, + { + "input": "/AAB/wAADAD//+P///CADg4AAA/+AAP/wAAAf///4MAAAAfg//8Dn///wcAAA///w////P//wAAB/AAP/AAAAH///wP///wP//4AAAOAP///AAD///gAA///gAAA", + "sha1": "34c09f107c42d990eb4881d4bf2dddcab01563ae", + "sha256": "d7acd8d042b8c6802f6d9262055a6e296f3254674745f18cd1b21244e1acb9f0", + "sha512": "0e9edad47cce524e8030cdf77a9bed351e285b8f41aa9144e7d76de2dcea327ebda0d707d56304c9111d9edf933cc157f771cae51ab17814c24df1a1c343e748" + }, + { + "input": "//+A///4AAD8AAP/+AAP//8AAwAAAH//8AA///AAAfwBAAP/gB//4///+AAf///4Af/f//v/wAAAP//4AACAx///+A//AGAf/+AAAf///g////wAAADwBgP///4AAA==", + "sha1": "474be0e5892eb2382109bfc5e3c8249a9283b03d", + "sha256": "fe781c4d49e73ca9f82389b6d58f3def857cffe624acfb6a2a5a8e9559623f37", + "sha512": "ee2ef4b0866deb7e6150e1883613c4a29ec9c62d87fce6d264bd747f4570af190a1d0bb27876376aa1495f8800f7c9c75f1d53a3cc520019a1cbba4fd038fa5e" + }, + { + "input": "/wAP//wAD////Af//AAA8AAAgAB//gAAD////D8A////AB////AAH///4AAf///D/wAAAf//8AAAD//gB/wAAAD+AAf///gAAD8AAA+AAD//wAAR/+8ABwB///wAAAA=", + "sha1": "a04b4f75051786682483252438f6a75bf4705ec6", + "sha256": "96fb72ddb440bb1f00dbafc97768f9890effcc172fcc395de4f2a19318c46c86", + "sha512": "036d8aae871ff4db9549a05157a93ba9e925867543e267cffdb5d2d07584eacd70a57f8029a3125fe880ea5c6613f67e70fe79196a66f9429e83bd9b8f747ec7" + }, + { + "input": "/gAAf/f//wAAD///4AH/4AAAA//gAP/+AAH/9//4AA//AAAAOAAH//gH//wAH///D8H//8AA//8P//AB//gAAf//gAAAD/gAP//+P///8AAAOA/D////H//AP///4AAA", + "sha1": "be88a6716083eb50ed9416719d6a247661299383", + "sha256": "b63555a77fefcadecfa88a770e70f1d51d46ae68fc672ad4770804495eb1b867", + "sha512": "6794d279562e7eb852be3f8a11c23d70c4dae96a3d171d3c017ea94cfd0e30fd462598a977623431c557113b809cbb69c7fa6e6633213b64b74b3ab753d89b3a" + }, + { + "input": "///wAH/AB///gf/AAAH////AAAAf//4DwAAP///8AAP///+D/8AAB/AAAB+AAAA////n/wAH///8AAAf//gAA/AA//wAH///4AP/8AAB////gAAAD///+BAAHgP///+AAA==", + "sha1": "c67e38717fee1a5f65ec6c7c7c42afc00cd37f04", + "sha256": "161d1a609fdeb2fa425761bf0b751dfd25e7a0a02995920921f99f63331b76d9", + "sha512": "c89fe1bc437054e44d4992ead0e5eb9181f28c8e08ff2d92ac879fd0cf6bd2e20ff465cda6be8db631d256135b9513cf299aebd13c75512f5dca3bac62a7f54a" + }, + { + "input": "AAH//gAAAH////AAAB//gAf///+AAAH//AP//5///gAIAAAGAAAAgf//wAAH+AD///8AAAD///gAP///+D/4AAAAf//Af///wP///gAAAf/4AAAD/8H/8AAAAwf4Af/+AAA=", + "sha1": "959ac4082388e19e9be5de571c047ef10c174a8d", + "sha256": "c80931a1263d7f192937eea3e453006b19525ed981314ec3fd561d256e8e135d", + "sha512": "03d6d82208d4c26503c1bd34a8eda172f485f14f010d266deb828ec950172b876db8c9a65f90a29b58cb100e7922f4d64edc88ac0af7e20be02c6f5e29a1eff4" + }, + { + "input": "+AAAAf//gAAP//+P/wAf/w//wAAA///8AA//8ABwAAA//8AAAHwAAH4AD/wAAAD//4AAAA////w4AAAD8AAx8B///8AH///gH///8/4AAAD//8AAH+f/4f//3wAAAB//AAAA", + "sha1": "baa7aa7b7753fa0abdc4a541842b5d238d949f0a", + "sha256": "a3b6ba9a5cba6071a99b1a43454053bfc3e6d1338ccf0063d5d71247a6b57566", + "sha512": "b8e94b6c6bb9bb37c87c7a9adb138bf7af5a7758ef84b5fc7b5a7aab1a8bdce779eb22b3654f0286481805ac90b32c3260145143211668ca881523079344c81c" + }, + { + "input": "P///+AAAAGAAD///4Af///8AAD////Af//+AAHAAAAEAAAA///4AAAAf+PzAD//4AD//wP//gAAD///4AD///AAAD4H/wAP/wD///4AD//4A///+AAAcAAAAP///+AB//8AAAA==", + "sha1": "351394dcebc08155d100fcd488578e6ae71d0e9c", + "sha256": "7d057dc07ed5a7c11590262a0a18c8cd614a029ca12fe08bfedc87307b5f65b9", + "sha512": "2f79327fd6c2a23fbbcc197ba3ba2b07a45672ef75619f6ce3eb34343edbffe4787d26a86c28525e1299182076a4ae35ccb0c0c80fa9fae8effe94c336446f1e" + }, + { + "input": "AAAAD//+D///h////wCAAA//wAAD8B/34AAAcAAB////gAH+B/AAAf/8AAAEAAH//gf///4AB8AAAP///4fwA//8AAAf+AAB///AAAA//8AAAH+P//gAAAB//+AGDgAAD///gAA=", + "sha1": "ab8be94c5af60d9477ef1252d604e58e27b2a9ee", + "sha256": "0b7744d3394c04618e6376cd450cc3fc81586493ab5081a9b3b155938d98563c", + "sha512": "28fc3b506c5c6ce09784cafad1c08b99c88efc9d46e771ddcfef25e1afc0524453d8c36ceaa9f40dba6689b70d1fcb7febecb0ab302c9e1b2905589d4785808c" + }, + { + "input": "A////eAAAB//+AH///v//+AB8PAAAP///4AAAH///+Af///wAYD////gAAB///D//8AAAAf+AAAAHgAf///gH//gAf/AAD//4AAAD/AA//9/wB/4P///wAAAf//g//wAAP8P////", + "sha1": "3429ec74a695fdd3228f152564952308afe0680a", + "sha256": "e8d1ea7154ec53c175761311295f69037865db32ec22976b6ddb981d226760ad", + "sha512": "d874ce9b2ac8b10d85a6a7c9935c76568559b3eef248161afcecb78585d05d476df2005ab43baedffd2acfc78cffbde018d522a3d1ed34566b56e1b209cb624e" + }, + { + "input": "B//8A////9///4f/GAADgAH/AAAAH/8AAD///8Af4D////wAAAH/+AAAP///+AAAB+AAB//5/+AAP+AAf+/wAAeB//wAAAD/4AAwAAAA///wAAAD//x/B/gD////AD/8AAAB/8AAAA==", + "sha1": "907fa46c029bc67eaa8e4f46e3c2a232f85bd122", + "sha256": "40aa287bdf661317439fa5ffa77cc9fa9ca3df504aae74b0ba83b2fbebbaac83", + "sha512": "1e89f0c16069768b010807a5b504a49a5eb506a0341e25971f87d241a161a4ad743da93dfe8a28ff18b025f27a565bddb57d58a9c44ef8c4ccd1bc4769dae6b1" + }, + { + "input": "AH/8AAAD//gAAGH+f//+AAAf//w//4AB////4AD///+AH/gAf///+AAAB///4AAAB////4AA/4AP///8AAB///4AAAAwAAB/gAAH///wAAAD/8AP//+AP/+AA////gP///9//B/wAAA=", + "sha1": "2644c87d1fbbbc0fc8d65f64bca2492da15baae4", + "sha256": "ee2e8fb7206e2e8fdae91afcc3e903d534b304069f232c68f53407cfc6d0bbae", + "sha512": "47f6def39d031dc2353714e174a4b0574eb8c1d6f6001f59d768591a8aa6273c00446a13a6aaf5918a2772de94563e6b8a42448dc9fc0041f5f9290cf385307b" + }, + { + "input": "H/AAAH///gIAAAP////YB///4AH//4AAAAfAAA//wH/wAAf//4AAB/AAAH/8A////8AAAf//+f/+AAAf/8AAAAP+P///AAf+AAADwAA/+AAQA/wAD//AAH//4AD/8AAAf//gAAAP////", + "sha1": "110a3eeb408756e2e81abaf4c5dcd4d4c6afcf6d", + "sha256": "b940c011eaef2b772ba03659581d525e0b6148f9c59cb7120db55ce18bf6d695", + "sha512": "fbb0bcd5e88a68f5c1644b87db67574716d325fa7dd211e228020f122edc23966a761663231b473fed3626dca83e4e0f3c009fb905a12c91927b279c94e026f1" + }, + { + "input": "////+AAAYAAAAP///AP//AAAPAA/4H/4AAf/+A/4AAB////8AH/CAAP///4AAf////AD///wGAfAAA//wAAAf///h+AAAAcAH4AEB//gAAAf/4H/gAAD//wAAAf////AAAAfgAH//wAAAA==", + "sha1": "cd4fdc35fac7e1adb5de40f47f256ef74d584959", + "sha256": "9574545ba02bd75bb1dcf038884bf9d7892bc017215308f01ebba7932c014a62", + "sha512": "fb7fb28ee316d6d17f7f0ddd292fa56e0802269a255380e737e8cb255ca93def16be30379495e36d8e46decb16b93edea0d214e78484c5efac10e7f738259816" + }, + { + "input": "AA/8H/gAD///+Af/8fwAH///8AAP/9////8AAAP///8AAf///8AQD/AAAAD8AB8ABwAB8AAAH+AAADB8P//gAP/8B//8AD////j//8H8H///+AAB//wAAA////8AAP/4DAAAB///AAAAf/8=", + "sha1": "8e6e273208ac256f9eccf296f3f5a37bc8a0f9f7", + "sha256": "da685c53ddf810225507141759e3c74ffeaa1c5eecbe150386a83027e7014077", + "sha512": "9272aa50c3f160a107804b19058950dda018a10293cb811af37c5f984e8e0714b0d3e79635484c518877b928c9fba60c088c31776421f414833758dc4e0e4a8f" + }, + { + "input": "//+A//8AAAB///8AH/wABgAP+AAAAYAAAH///+A////8AGAAAAD+AAAH///wf///+AAAgAAAD////7///8AH//4AABwAH/wHAAH//wAAAIAAH/8DgAAAP///+AAH////gAAf///gH///wAAA", + "sha1": "fe0606100bdbc268db39b503e0fdfe3766185828", + "sha256": "5c0769369e4fb9f9d9e599612923554fb2f1e6d87eaeed283f6106845b66b532", + "sha512": "593c8043119e06289fda10840f0b4cdd651b1a49256ac4aed3fadfacdd52e2c4c09c03e02f16930e9ea8d99f864830e23c35a2796f9bddc96e0db020c18f95ce" + }, + { + "input": "////+AAAAH4AAAAf/4AH////gAAAD///w//wAAAEf8B/8AA//4AAf+AAA//AAAf/AAAP/4AAAAeAAAAP////Af///8ADwAAAA///4AAP///AAAP//gAD//gAAA///8AB/+AAAP///AAAH////w==", + "sha1": "6c63c3e58047bcdb35a17f74eeba4e9b14420809", + "sha256": "19056a3d33ebe1b84a100c27fc72d0265ceeb9c573d7942a4d44983238d34ea7", + "sha512": "d3ec082c6f34441b9edfb6902e73a1493e15990aa5d230b288bc27f14072ed6265149225d0707a9049c803844ff444559df87be7ee0a1c592468ed1c8c182efb" + }, + { + "input": "//AAP/wAAP///+Afw/4AB//4AA/wAf//8AAA/8AP//+AAAD///P/gAAAgAg4AAAP//AAH////AAPgABwAAAx///+P//4AAAAPD/wD///AAP/+////wAP///+AAAA8AAAAP///AB///AAAf///gA=", + "sha1": "bcc2bd305f0bcda8cf2d478ef9fe080486cb265f", + "sha256": "8a5e6e6cae30d4283fd70af96d9c53d8ea45ca48892d313981fe208b1384f0dc", + "sha512": "0edd3f9f6adf8642d20edad7a3d23b84990de8152534ffc375b4c7cf127b78fe5a7f578ec4f00c623b1aebb9255d6dff4f8df3b07b5c930de5319275a2e1c0e5" + }, + { + "input": "///wD/g////gAAP//gAAP/+A////AAAA///4///wAA/x//8AAAAP//wAAAAf8AAAH/AD////4f/gAAAf/8H+AAAH//wAAAAf/j/AAAAB//wAAB///w/gAAH8AP4AAAB/4AAf///gfwAP8AD//gAA", + "sha1": "ce5223fd3dd920a3b666481d5625b16457dcb5e8", + "sha256": "880992dadfeccb31f289522214209eb87f41fea5fd3155ab274e9a6fdc6f9f64", + "sha512": "ff11746432697d38dbe59a2ae68a96ff29021d011cf9b4e8ca01313dfabf0b76169ff534dafafdd4f7c618988b54bd3b9eb427df15d82853aa37b44ccaa36b4d" + }, + { + "input": "AH//wAf///+AB////gAAAH4AAAAP/4Af//4H///wA8f///4AAAB//gAAH//+AAAA//wAH//AAAA//wAeAAAD////gAAAf//4A//8AAH///4AD/8CAAf///4AAAD+Af//9///Gf//AAAH/8H/AAAH/w==", + "sha1": "948886776e42e4f5fae1b2d0c906ac3759e3f8b0", + "sha256": "ea7a2b0e780fc6dc8843643a2bc18a17226a1bb3d9e1467cf0be2201decce2c7", + "sha512": "e91363e3e5eaa25ffd8ea3ccf744bb9165ea22339995dd9e645eb1a12b10b6942262e0cea8df8eac080c7b334821780de2ace12b28174f28d7034ef2e826d2ef" + }, + { + "input": "AAAAQAAAB///+B///8AAPv//8AAAAH///gAAAP+AAAA/8f///+AAAAH///8AAB//+AAH///4AB//wf///+AB////4AAAB////8//4AA/4P//wAAH///gAf/8PwAB///+AAH/D////AAAAf//gAAAD/8=", + "sha1": "4c12a51fcfe242f832e3d7329304b11b75161efb", + "sha256": "2077395cd7562dd5e9965ea620cedf32c805f50f748c4ee6e82af960c5ce2d66", + "sha512": "5fc80cf23b3ebac3a5d71f79c590fb62b0d8d09d6a87b43c6588cc1266f8895e4927422ae7561c5a9dabfa3bc7764d6d7f15ce192397138a00f22e46a50b56c8" + }, + { + "input": "///w////wAOAAAEAAAP////x////4Af8AAAD//AAAAB+AAAABwA///wAD8f//wAAB///wAAAA//8AAD/4AAAAH/wAAD////9/wD/4P//4Af///h///4YAAAB8AAf//4BwAH////4AAH+////gAAAB///", + "sha1": "c54bdd2050504d92f551d378ad5fc72c9ed03932", + "sha256": "2dbdc632baa5d0831808518beb80e3737de5bbec3dd0438e75dd30b2ea7fbb90", + "sha512": "a4b851e28dac17e2a71261e8a0c80325fab569746b542bf613351a74a68ed47c4bcd1c2170f2cebb0e99ef6d626dd33fb46cc61f10256fe202b6aefa8c90ebeb" + }, + { + "input": "///wAB///wAAP//wAP//AAf///gD///n////gQAAAf8AAD////gAAADwB//AD/AAP//AAH/4AP/8AAAf/4Af/AAAAf//AAB////+AAAP/4AAAA////wAAAD///wAD/+AAD///gAAP+AP//8AAAH/8AAAAA==", + "sha1": "8f53e8fa79ea09fd1b682af5ed1515eca965604c", + "sha256": "ced4cf34982e0abbef40e876659544c4ed01f1975351490984aaa429fef321b0", + "sha512": "78d1de348e5b71f0be5bfaf81c01f48988a340c6b1392401887e097627d0aa9117335202f0109cc34e5b7fd9b539e1d7dcc7f410a71b381a374a23380dff58a9" + }, + { + "input": "/8AAAAP///wA///8AAAAf///AAAAH+AAD//A8AAAf///4AAgH////wAAAB+AAAAH//H////AAAAf///wAD//+AA////+Af///n+eAB///AB/4H///+AAf//+AAAB////+AH/wAMAD//4AAAP8A//AAAAD/8=", + "sha1": "2d7e17f6294524ce78b33eab72cdd08e5ff6e313", + "sha256": "69339b4534eecca329ef2af397ede2a882d7e315a871dd2b781b8e0f4277ee66", + "sha512": "bfb4090a392960e478cc33a9ee3af0d9f743d48621219af21920fb1c989bf6a8ea040ce156a0ba4b42bd2d8a3d3fe3a178b150c9b76452285ebf379e4c8a0713" + }, + { + "input": "AAP////P//h/j////AH///wAAB////+AAAAB///h//AAAAD////4A4AAP4AAD////8AAAAJ///gD/8AAAD//gAAAAf//4AAAA4AAAP/gf////AAAAf///AAAAP//gAAH/gAAB//wAAAfgAAAPh////+f////", + "sha1": "64582b4b57f782c9302bfe7d07f74aa176627a3a", + "sha256": "79cfbb9b52e573e22cd3427ec258d69e2d19fd27de15df96ca9006ccebe7b58e", + "sha512": "6019a0c6bc28bc3c3bdb637f24151b2556b34115dc7f1a20c3f3837104f186ca0ba29d5824c55e31096a39b2f01aa303a34a6ad144e77ce5bb2be42403f96466" + }, + { + "input": "///+AAP///+AAf///6A///gAfwP//8AAD//D//gAA////8B/8B/gD///wAAf/g///+AAB////AAA/AA////wAAAAP/wAAAAP/8QAAP/AAAP///+AAAP8D///8AAAA///wAf///gMP//wAB//gAAAAf/+AAA//w==", + "sha1": "6d88795b71d3e386bbd1eb830fb9f161ba98869f", + "sha256": "1203d54626871bccfa8abb8bbd740b9af3c7266bc8490a210074d7f2b0806ae8", + "sha512": "d7f25152eb94d3fb0dee398ad8b38e9320cb484a51b7856f142462d2450e824c3f00ff18017aa86c876e0dfdea2d878f23fef17d4f84bcfc724bfbdfbdc5e480" + }, + { + "input": "AAAAH//D//+AAAA////+AAAAD+AAf//gD/4A////4AP/+AAAAD/gAAAB///n////4AAAB///wAAf4AAAAf//AAAD////4AAAH+AD//8AAAAH//A/gAAAD////gAA////AAA///+AD//+Af////g////AAAD///8=", + "sha1": "86ad34a6463f12cee6de9596aba72f0df1397fd1", + "sha256": "0c15140d3b5e4b180b0b1517a51fa08f82458c02185ef2bc59fae37543ef9011", + "sha512": "e000dadfc0fe9c13ba1cdebeca0a2a4e24f62f317d11a791153f7c5f59bf8f41d73e06db0d1b57c7938563116d74daf25a8b6a3215b51ead5da81e5d2005cf76" + }, + { + "input": "////gAD////wAAAfAD///g/wHAD//+AAD8Af/8AAAAH/gPf/+AAAP//AAAAB//AAA+P8AAf4AAA////+AAAfAAAAGAA////4D+AAAABgAAf///4AYP////gAAD//gAAAP///8AAAD/AA///x/wA/////Af/gAAAA", + "sha1": "7eb46685a57c0d466152dc339c8122548c757ed1", + "sha256": "ab71b18daceecc7c8fde7cf5f77eacf118262d760bcd383dd7bfa2170895d518", + "sha512": "29c97533a23510ad29db92d4db332f35025dbd169ada5fd1686f1e9a538c17ea24389df08b59c060d458878ab2cbc0ce28d8f2572b8ca02efa9d7675c7e253a7" + }, + { + "input": "/////AAAAP//8AH///gAAP///+f/+AH4f///gAA///wAAAH//+AAP8AAAH///wAAD////8AAD//x////+AAP///8AAAH////gP//+AfwAAAAH////8AA////wAAO////wAAAA//4AAAD/4AAAADgAAAP+AAAP////w==", + "sha1": "e7a98fb0692684054407cc221abc60c199d6f52a", + "sha256": "fa3174d3432fe38241a34a8387811b54c3d0f183468cef5cd6d3fb325b270b66", + "sha512": "8489ab4c31995c0be48275d6d658fcdee8b57b9b03b801c3c0f02626b0999972001ab4dd1dbb3f633a2e59269ba5ed9e4aa0e9f9a87f56f082a300389d6833b6" + }, + { + "input": "PwADwB8AAAAP8AAH///vAP4Af/4AAD/wAD/AAAAH/AAAA////w/////AB/gAAPgAAD+f///8AAA//8AAA///wAAA//wAH//gAAAH/+AHAAAAf//+AAAA///+AAAP/AAH//AAAAB/gAPP/4AAAf//4DwAAD////+AAAA=", + "sha1": "34df1306662206fd0a5fc2969a4beec4eb0197f7", + "sha256": "c13fd9ed22d33aa45f73748782e4dbb835d180dc0662e160c0a6c445c76f0c72", + "sha512": "7d6da87c8b9be034e95f641368cc4e8a1e01710d5728d7b0064b733c30d40834a7e35918ee2f3c4b4b3f1a88afa4abfb7325489d6ad3fcf2291f5cc38977fad2" + }, + { + "input": "MAAAAP/4AAD//AA///+AAAAPz//P///A////gAAAAf////AAAAAf/wAD//wAAAAH/4AAf////g//wAAAAH/wv///4AAA//gAAAH///gAAD4AAAf/4AAAAIAAA4AB////+GAAAAD/8AAf///H//BA///+AAAH/9//gAAA", + "sha1": "56cf7ebf08d10f0cb9fe7ee3b63a5c3a02bcb450", + "sha256": "b88a842dc14c41c2b5bd74e48fdd2bd0d43cfeea1eb9b154bebfc4f03d8a102d", + "sha512": "fb619c19b30935c87e450e40e515bd41c0f6e755309978679bfde5b8ae1701883a157d39275ac69baf77d5e3874d706509d9946b709dead3701515c28c62b388" + }, + { + "input": "////g//4H/8f//+AD///4AAAAD///8D4AAB4ABwAAAAf4AAAAP//4D////4AAAP///4AAAAf//wAf/8AAAAf//8AAgAAP//8AAAA//Af/v//wAH////wAAAA///+AAAAD//AAAB/f8AAAAH///4P////wAAPgAAAP////w==", + "sha1": "3bae5cb8226642088da760a6f78b0cf8eddea9f1", + "sha256": "45ba1056e49828a0385b0b5f9e4933905973f15b2713fee1c1755e2a7a3e8d79", + "sha512": "0fee2eac82a5539f49655d2ee5a8428f5f6158cecd3bb97a84d8cbf83bf3b57284d1ab4439183779641c2b50f4a1ff3c32444ceda889263b3b7b577a8d51cff9" + }, + { + "input": "/////gAAAP/4D///8AAAB///+//AAAf8B+AAAAH//+AAf///+AAAP//4AAAAD///AAAPwAAAP//wAAAB///+H//4AAAgAAAAP/+//5////w////wAAAHgAAA///n////8AAAP////wAAB///4P///+AB////+AAAAH////8=", + "sha1": "6475df681e061fa506672c27cbabfa9aa6ddff62", + "sha256": "a0d7d4fda9435ef292b761aed2c9fea576519437a824e96150a4324dcc757605", + "sha512": "c88ebff861e4fe68f92b17536e70f4b22fe91b6730e7468d57f40f1d1c02f9e474da1027958b715acb6de5fc4da2453f40e93b40f97d137b9dfe39436d05cb20" + }, + { + "input": "AAAAH//AAA//wAAAH//AAB/AAAAD/4AAAAf////AAAAAgAA//ADAAA///wAABgA//B4AH///8AA+D///8AAAP//wAAAAH//AAAD////4AOAAAP////4AAA////4////8B/4AAADAAH////4A/////Af//+AAP+P//8AAAD//", + "sha1": "79d81991fa4e4957c8062753439dbfd47bbb277d", + "sha256": "7906439843a1c1758c232182eaa66d5e6bd5ad2fbc0157fdc5438e1038966dc4", + "sha512": "944d6522c69ffaee3033e3a933b2dcdfb05a3b5cc3bcf3b9d35cce08c496fedb6879a0a277c59a84e5b6d523d9f202e07a09e32a08e63f30269a060d4df81d01" + }, + { + "input": "AAAAD//AAA/4AAAA//wAAA////4AAAAD/wD//Af/8B///g////3////wH//wAAf4//gAAAD//8A/////gAAAD////wAAA///8AAH//8AAD//8AH//8AB////AD//+B////4f////wAAAf+AAB////gAAAAPgB//AA/wAB////w==", + "sha1": "bae224477b20302e881f5249f52ec6c34da8ecef", + "sha256": "35e5a6c17906646cf15c2bed4884129b5134eb2b411400e4d8797126f51a4cb7", + "sha512": "c6a5f4bbdb075c17ebcf4131de0fe33d3e2bb6edb5af7c277b472ea7847b11d2aa2598cb7ca75e4fe94c264bd2942bd82fc60b5045bd7c5cbc31325954713dfc" + }, + { + "input": "////AAADwAAAAf//+AAAAA////8AAA8AAP//+IAA+AAPwAAAAOAAAAD////4D////gAAGAAAf////wAAA////wB////8AAPAAAAP////8AAH//+AAf///+AAD//+B///+AD////AAAAD4AAH//AP//AAAP//+H/AA8A//+AAAAA=", + "sha1": "ede4deb4293cfe4138c2c056b7c46ff821cc0acc", + "sha256": "d19ddbd98476519a07cd8917b95eb609e5b50e8088ad68cd7426e8139d5bffc2", + "sha512": "cc2a04b943fb4625e799ca7f7566110fd200040e8bc5e76b167fe95bf758dbe68a9eaa0a078445141cf547c417f6953872de8962f9ca06f5be1d777ba6f5d936" + }, + { + "input": "+P/gAD/8AAAD////+AAACB////5/gAAAAPwH////4P//4AAAP/wP+AAAAA///gAA////AAAAf//+B/+AAHAf8Af/AAAAB4AMH/8AAAAH/gAf////gAAAAH//4AAAAf//gAAADgAAAAeH///+B//AAAAAf/z/4D///8P//gAAAA//", + "sha1": "a771fa5c812bd0c9596d869ec99e4f4ac988b13f", + "sha256": "cafea6a1183ca8934867692684194ce9903b375a8036c4c5deb8fb379c3423cb", + "sha512": "7fdcd5fea3209c23797da291503bd224e948e48111e895cf2e033a436309373934cfc88d3c35d3fd31d0a917d6f9303adba0263ceb58fcddcb83aad6a606e479" + }, + { + "input": "AAAH//8AAAf///AB//////+AAAAABgAPf////8AAAP//AAAAH/D/////gABwAAAAP/+AD/////AAf//AAAAAAP////8Af/////4f/////gAD/////AAAAH//8AAAAAAD//8D/////8AAAP/AAAAB/+AAAA//////8AAf////AAAB////4AAAAHgH/wAAAP+AH////8AAAAP//////4AAAP//AAAAB/////wf/////+AAAAcAAB/+AAAAAB///////B/////8AAAAAB8AA/+AAAAAAAP/////gAB/4B/////h//wAAAAAf///5//////wAAA//////gAAB////+AAAAA=", + "sha1": "e99d566212bbbceee903946f6100c9c96039a8f4", + "sha256": "7e4734ce7e22a515bfe60e296640dec121a089f75034240408fe7be2ed9e5c87", + "sha512": "8a006a0a1e2dd36ba57aa0325b2c9532db7649a4c3c6214ee0f004ecabcf1eef89a91b225ffc52a4f811791d20f6faddd900b863386da65daec18e00c48412d6" + }, + { + "input": "///AAP///////+fgAAAAH////////AH////+AAAAAAAAf//////4AAAAAAAf///////8A/+AAAAAAAAD///////gAAAAAAAAP//ADgAAAAAAAB/gAAH//+AAAAAAAH///////gAP////8///////8AAAAAAAA///////gAAAP////////wB/////AAAABgAAAAAAA//4AAAAP///wAAAAAAB///////gAAf/////wAD////wAAAAAB/////+AAAAAAAf///////AAD////////AD///////AAAA////////+AAAAAD///wAAAAAA///wAB////////wAAA///AAAD//////8D////////8AAAAAAAB//////gAAD+AA/////AAD+AAAAAAAAH//gAAB//AAAAAAP4AP//////8AAAAAf8AAAB//AAAAAAf//////gAH///////AAAAAAAAB////+AAAAAAfAAAAfAAAAAB//////gAAB////4AAAAAAAAA==", + "sha1": "b48ce6b1d13903e3925ae0c88cb931388c013f9c", + "sha256": "b09436c29cc3823c434a4689bb67a73a49164bd23ab40c4e04ed99320fd138d2", + "sha512": "d0b336a283f47f0f6fb19e98df49730d802321f1ba993c760687a0469b72015d98dcf86db7b2b628ff03d690300ec827851f40e621cf6cf98a4b378d901735db" + }, + { + "input": "AAAAAAAAAD/////4AAAAAAD////wf///+AAAAAAAAAAP8AD///+AP///////gAAAAAAAA////+AAAAAAAAAAfwAAAAAGP///////wf/gAAAAAAAAP////////8AAAAP/////gAf/////////8D/////////wAAAB////wAAAAAAAP//////8AAAAAH/+AAD///wAAAAAAAAD/wCAAH//AAAAAAA//////////4AB////////wAAAAAAAAB///AAAAAAB/AP////////8AAAAAAAH/////gAAAAAAAB///4AAAAAAAH/////////4AAAAAAD/v////////D///gAAAAAAAP/////////4AAH///////+AAAD/+AAAAAAAH/////////+AAAD///////gAAAAAAAAAH/AAAAAAAAAP//wAAAf////wD////8AAAAf/////gAP/////8P/////////AAAAAAAAB///////8AAAD//8AAAAAAH/////////gAAP/wAAP////////AAAAAAA////+AAAAAAAf//////8AAAA//////////4AAAAAf////4AAAAAAP//////////AAH///+AAAAAAAAA///n///////8D////8AAAAAAAAH/8AAAAAAAAD/8AAAAAAAAH/////8AD/////+AAAAAAA", + "sha1": "e647d5baf670d4bf3afc0a6b72a2424b0c64f194", + "sha256": "d6752f5e2738cbaa2e154b749216babc990297af411dfa2b66c9f942480ff4b9", + "sha512": "525213a2b56a346a3e641b3f951b60df15dc35e56befc197fd7671d3fb955f27c4410f3aa07e4bf2dae05d2419e355a05cdecaf282d44471467c140610d4b495" + }, + { + "input": "AAAAAAAAAA8AAAAAAAAf///////AAAAAAAAAD//////AAB//////////wAAAAB///8AAAAP//////4AH///////////AAAAAAAAAAAD/////gAA//AAAAAAAAP//wAAP/////wAAAAAABgAAAAAAAAAAPAAAAAAAH//+H/////AAAAAAP////gAAAAAAD/AAAD/////////wAAAAAB///////////8AAAAAA/////wAAAAAAAAf/////AAAf////////AAAAAf////AAAAAAAAH/////w//////////4AAH/////AAAAP///////gAAAAAAAAAAB/wAAAAAAAAAA//////////wAAAAAAAAAB////+AAIAAAAAP////AAD//////wAAD//AAAAAAAAP////+AAA//////wAAAAP///+AAAAAAAAA/8AAP///////AAAAAAAB/AAAAAAAAAf/gAAAAAAAA////wAAAAAAAAD/////gAAAA/////////gAAAP//gAAAAAAAf//////wAAB///////AAA////AAAAAAAAAAH////AAAAAAAAAAAP///////AAAAAAP////+AAAAAD/wAAP/gAAAD4A/////////8AAAAAH///////////gAAAAAf/////gAAAAAD///////5/4AAAAAAAAAAH///////wf//gAAD///////AAAAAAAAAAAB////////wAAAAAAAABwAAA/gAAAAAAAAH////////8AAAAP///4AB//////4AAf///gAAAAAAAf/////wAAAAAAAAAAP///////8AAAD///////gAAf/////8AAAAAAAAH////////8=", + "sha1": "65c1cd932a06b05cd0b43afb3bc7891f6bcef45c", + "sha256": "0f0cc4994a2a88f58cc38afdf61ff43952473c437d235cec426139c8f43ffb5a", + "sha512": "546e908100c06a32a98056236acab22e0ca59721f59271d783c8c9f64e39ba5b1de316a0433fe557f68238378cf340ab9f5ad3ebcccfc6eea518b71048eee75c" + }, + { + "input": "//4AAAAAAAAAB/////8AAAAH//AAAAAAD//+AAAAAAAAAAAAH//////////8PgAAAAAAAAAAA///////8AAAAAAAAAB////gH////////////wAAAAAAAIAAAP/wAAAAAAAAAP//////AAAA///////wAAAAB////8AAAAAAD///////////+AAAAAAAAAAB////////AAAAAAAAAAf//4AAAAEAAAAAAAAD////////wAAAAAAP/////+AAAAAAAAAAH///wAAAAAAAAAAf/////wAAAP//8AH///+AAAAAAAAAAB///4AAAAAAAAAAAAP///AAAAAAAAAH8AAAAAA/////////4AAAAA//////////gAAAAAAAAAAAP4AAAAAAAAAAQAAAAAAAH/////wAAAAAAAAAAAA//wAAAB///wAAAAAAB///////////gAAAAAH/////////gAAAAAAAAAAAP/4AAAAAAAAAAAH///////wAAAAAAAAAAAH/////AP//////////wAAAf/////4f////////4AAAAAAAAAAAAP///////gB///////////8AB///gAAAAP//////gf///////8f//gAA//////////gAAAAAAA//wAAAAAAAP////////////AAAAAAAD////wAAAAA///////v////////8P///4D//////////4AAAAAAIAAAAAf/////wAAAf/////////AAAAAAAAAB////wAAAD/+AAP/8AAAAAAAAAAAAf/////////gAEAAAAP/////////gAAAH//////////wAAAAAAAAAH//gAAA//+AAAAH4AADAAAAAB////////AAAAA////+AAAAAAABgAAAAAAAAAD///////////+AAAAAAAAACP////////////AAAAAAB/8AAB//+AAAAAAA/////////AAAAAAAAAAH//////////8AAAAAAAAeAP////////////wAAAAAAAAAP//////////w==", + "sha1": "70ffae353a5cd0f8a65a8b2746d0f16281b25ec7", + "sha256": "eb9dd875ecf9ec930b1482b8a50c337d48b31590f99cdfb80bbdd160ad4fb49b", + "sha512": "ea48c9e9245bf42c820385a6167afd3a8bb473645016ff4dba198d184ee0534bbc2d0ec07328be7b58c9a8e9d10c2379d69414ab2bfdddf99dafa8f92daf21af" + }, + { + "input": "AAA///gAAAAAAAAAAAAAf//AA/8AAAAAAAAAf//gAAAAAAAAAAAA////////4AAAAAAAAf/+AAAAAAP///////////8AAAAAAAB////////4AAAAAAAAAA//////wAAAAAAAAAAAMAAf/////////+AAAAAAAAAAAAAD//////////AAAAAAAAf////////4AP///8AAAAAAAAAAA/////////8AAAAAAAAAAAAAH/////AAAB////////gAAAAAAAAAD///////////4AAAAAAAAP///+AAAAAAAAAAAAAB///4AAAAAA//////+AAAAAAAAH/////////gA//////////8B/////////+AAAAAAAP///////////AAAD4AAAAAAAAAAAD//////AAAAA/////AAAAAAAA///4AAAAAAAAAAAA/////////////wAAAAAA///4B////////8Af///4AAAAH/////gAAAAAAAAAAAAD///////////4AAAAAAAAAAAP///////+AAAD////////////gAAAAD//////4AAAP////8H+AAAAAAAAAAB//////4AAAAAAAAAAAH//////gAB//////////+AAAAAAAP/8AAAAAAAD/////wAAB//////8AAAAcAAAAAAAAB////wAAAAAH///wAAAAAAf//////////D/////////gAAAAAAAD////////////AAAD//+AAAAAAAAAAAAP//////wAA///gAAAAAAB//4Af///////gAAH//8AAAAAB//////8AA/////////4A//+AAAAAAAAA//////wf/////////gP///////AAAP//////4AAAAAH//////AAAAAAAAD///AD//////////AAAAAAAH///wAAAAAAA/gAAAAA////////////4AAAAP///////////AAAAAB/////+AAAAAH///8AD//gAAAAAAAAAAAAAf///4AfgAAAAAAAAAAA////+AAAAAAAAAAAAA/////4AAAAB////AAAAAAAAAB////////////4AAAAAAAAAAAAAf/////////+AAAAAAAAAH8A///gAAAAAA////////////z////////4AAAAAAAAAAP///3////////8AAAAAAAAf/////gAAD////////////+A", + "sha1": "cc8221f2b829b8cf39646bf46888317c3eb378ea", + "sha256": "ec7435c1c8e3eb1de2871cdc797bff6969b863e5b9fb005b3a7af4ff96680c63", + "sha512": "5b819d5210a41d738341328814cd091220e1fc61333d13fcaa1a0447bed04d48cce66de97573d3e724be1ab18bf0757a4a1fd0b2743fccd74b5cc6d4a7040861" + }, + { + "input": "//////////AAAAAAAAAAA///////////A4A////////////gAfwAAAAAA///////////8AAD///////////4AAAAAAAAAAAf//////////AAAAAAAAH/+AAf///8AAAAAAAA/4AAAAAAAAAAAAAA/////////////4AAAAAAAB///4AAD//////wAAAAAH//gAAAAAAAAAAAAP/wAAAAA//////gAAAAAAAAAAAB/////////////4Af/////////wH/////////////AAAAAAAAf//////////wAAAD///////gAAAAAAAAAAAAB////////gAAAAAAAAAAAAD//////////gAAAAAAD///////////gAAAAP////4AAAAAAAAAAAAf///////8Af/////////AAAAAD////////gAAAAAAAAf////+AAAAAAAAAAAAAD/wAAAAB////////wAAAAAAAAAAAAAD/////////////gAAAAAAAAAAAAB///////////wAAP8AAAP//////////AAAf/////////////4AAAAAAAAAB/////////4AAAAA//////gAA/////8H////wAAAAAAAAAAAAAf//////gAAAAAAAAAAAB////////gAAAAAAAAf//////////gAAAAAD/v/////wAAAAAAH//wAAAAAAAAAAfgAAAAA/////////wAAAAAAAAAAAP///////////gAAAAAAAAAAAAA//////////////8AIB//////////////wAAA/AAAH////////////4AAAD//////////////4AAAAAAD//////////AAAAAAD////////+AAAAAAAAAAAAAA/////gAAAAAAAAAAAAAA////////////wAAAAAAAAAB///////4AAAAf///gAAAH////8AAH8A////4AAD/8AAAAAAAAAAAAAB//8AAA////4AAAAAAAAAAAAP/P////////wAAAAAAAAAAAAD//4AAAAAAAAf///////////4AAAAAAAAAAAAf//////////8AAAAAAAAAAAAD/4AAB/////////////h////8AAAD/////////////+AAAAAAAAAAAAP//wAAAAAB///8AAAAAAAAAAAP/////////////gAAAA/////////4AAAAAAAAAAD///////////8AAAAAAAAAAAP////+H//AAAAAAAD//////8///////////4AD////////4AAAAA///////////4AAAAAAAAAAf////8AAAAAAAAAAP/8AAAAAAP//////////+AAAAAAAAAD////////////8AAAAAAAAA=", + "sha1": "26accc2d6d51ff7bf3e5895588907765111bb69b", + "sha256": "e5afd502015d80bb43ab7f92f138b35ce5fafa980c5fba78412ffcfa281f9d8d", + "sha512": "c84d01e7de9caed0887563a40277c89b1c5544d821f3dfb42de262333465a5ab2d11012ace1c7eaf33cdd3a7cdd6710e7c7c645683495c71a0d1588af536b245" + }, + { + "input": "///////////wAAAAAAH///////////////wAAAAAAAAf/////4AAAAAAAAAf////////wAAAA///wAA///////////gAAAAAAAf//AAAAAAAAAAAAAAP////8AAAAAAA///////////wAAAAAAAAAAH////gAB//gAAAAAAAAAAAAP///4AA////////////AAAAAAAAAAAf///////4AAAAAAAAAAAAB//+AAAAB/4AAAAAAAAAAAAgAAAAAAAAAf//////8AAAAAAAAAAAAAAf////////////gAAAB///////////////8AAAAAAAB//////////wAAAAAAAAAAA/////////+AAAAfgAAAAAAAD///////////+AAAAAAAAAB//////////8AAD////AP/////////////+D/////////wAAAAH///8AAAAAAAAAf/////+AAAAAAAH////////////4AAAAAAAAA/////8AAAAAAAAAAAAD/////////+AAAAAB//////////+AAAAAAAAAP/////8AAAAAAAAAAAAA///8AAAAA///////////wAAAAAAAAAAAf////gAAAAAAAAAAAH/////8AAAAAAAAAAAAAf//////////gAAAAAAAAAAAA///////////AAAAAAAAAYAAAAAAAA////AAAAAAAAAAAAAAAH/////////8AAAAAAP//////////8AAAAAAAf/////////wAP/////////+AAAB//////////////8AAAAAAAAAH////////gAAAAAAAAAAAf//////wAAD///AAAAAAAAAAAH/////4AAAAAAf/////////////+AAAAAAAH/AAAAAAAAAAP////////4AAAAAAAAAAAAP/////////////wAAD//8D////////////4AA////////8AAAAAAf///////////8f///4f/////////////wAAf/////////4AAAAP/gAAAAAAAAAAAAAAH///////gAAAAAAAAB////8AAAAAAAAAH////gAAAAAAf////////////4AAAAAAAAAAAP///////////////z///////+AAAAAAAAAAAAAA//////gAAAAAAAAADwAAAAAAAAAH/+f////////AAAAAAAAAAAAAAH///8AAAAAAAAAAAAAA/AAAAAAAAAAAAH////////////gAAAAAAAAAAAAAAD//////4AAAAAA///////////////4AAAAAAAAAAAAAAB//////+AAAAAAAAAf//////AAAAAAAAAAH///+AAAAH//4AAAAAAAAAAAAP/gAAAAAAAH////////////4AAAAAAAAAAAACAB/+AAAAAAAAAAAAAAA////+AA/////////8H////////+AAAAAAAAAAAAAAAf//////////gAAAAAAAAA8AAAAAAf///////AAAAAAAAAAAAAAAB8H/////////+AAAAAP/4AAAAAAAAAAAAB////w==", + "sha1": "01072915b8e868d9b28e759cf2bc1aea4bb92165", + "sha256": "14e7975021e84497b4daf367f6861c79820308883c4e1254c038a7458a3f2913", + "sha512": "2d1cb26c944b0560e923417441191c87afbb204702c132e100973cf555fc532a6531bbd4addaeeaff13afa629fd30126643985eec68423bf74a643a592c11565" + }, + { + "input": "/////+AAAAH/wH//+A//+OAAAAB//////////////8AB//////////8AAAAAAAAA///////////AAAAAAAAAAH///gAAAAAAAAAAAAB//4AAAAD////////+AAAAAAAAAf////////////AAAAAAAAAAAAAAAf//////////////wAAAAAAAAAAAf////////////////AAf+AAAAAAAAAAAAAA////////////////+AAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAA////////////////wAAAAAAAAAAAAAAH/////AAP//8AAAAAAAD/////Af/////////+AAAAD//////AAAAAB////D///////////////4AAAP/////f/////////////wAAAAAAAAAAAAB///8AAAAB///////////////+AAAAAAAAAAAD/////////////8AAAAAAAA//4AAAAAAAAAAAAD///////4AAAD////////gAAAAAAD/////4AAAAAAAAAAH//8AAAAAAAAAB/////////+AP////4AAAAAAAAAAAAAAA//////////8AAAAAAAAAAAP////wAAAAAAP/////////////8AAAAAAAAEAAAAAAAAAAAH///////gAAAAAAf//+AAAAAAf///////////////gB////////////////wAAAAA////////////8AAAA/////////gAAAAAAAAAAAP/////////+AAAAAAAAAAP///////////////4AAAP//////////////8AAAAAAAAAAAAAf/////////+AAAAAAAEAAAAD////+AAAAAD//////////////AAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAH//////////////AAD/////+AAAAAAAAAAAP/////////////wAAAAAAAAAAAAAAA//////////AAAAAAAAAAAAAAAADgAAAAAAA////////8A///////////+AAAAAAAAAAAAAAP////////+AAAAD////////////wAAAAAAH//4AAAAAAA///////8AAf//////////AA///////wAAAAAAP//////////wAAAAAAAAAP/+AAAAAAAAAAAf//wAAAAAAAAA//////4AAAAAAAAAB////////////4A///////////////wAAAAAAAAAAAAAAAAf////////8AAAAA8AAAAAP/+AAAAAAD//////////AAAAAP//////////////gAAAAAAAAAAA///////gAAAAA/////gAAAAAA///4AAP/////////////AAAAAH///////////////4AAAA//////////wAAAAf////////////+AAAAAAAAAP///////AAAAAAAAAAP/+AAAB////////////////wAAAAAH/////////////wAAAAAA////////8AAAAAAAP////////8AAAAAAAAAAAAA////////////////x///////////////AAAAAAAAAAAAPwAAAB/AAAAAAAAAD///////////////wA//////+AAAAAAAAAAAAA////////////////gAAAAAAAAAAAAAf////////////AAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAf/////", + "sha1": "3016115711d74236adf0c371e47992f87a428598", + "sha256": "bcefd79629a6d7a8afbf0c8ccfc2c5889f627989e71c3a212d900e3015e460fa", + "sha512": "7122401afdd5b9c033e721b0c6f321742ced26b680d80f55f1dfc6d0916e9fd96fab18752fa60a4f20b570ed8ede6688f151fd9bc18828ac99cb7afed54c787d" + }, + { + "input": "////wAAAAAAAAAAAAAAAAf//AAAAAAAAAAAAAAAf//////8AAAAAAAAAAAAAAAB/////////////gAAAAAAAAA/////////////////AAAAA///////////w//////////////gDwAAAAAB//4AAH/////////////8AAAAAAAAAAAP/gAAAAAAAAAAAAAD//////////AAAAAAAAAAAA/////////////AAAAAAOAAAAAAD////////4AAAAAAAAAAAAAf////////////+AAAAAD//8AB///////////////4AAAAAAAAAAf///////+AAAAAAP////////////gAAAAAAAAA////wB//////////8AAAAAAAP//////////AAAAAAAAAAAAAAAAP////8AAAAH/////////////+AAAAAAAAAAAAA//8AAAAAAAAAAf////////gAAAAAAAAAAAB/////////gAAAAAAAAAAAAAAAH//gAAAAAAAAAAAAAAAH////////////4AAAf///////////gAAAAD////////////////+AAAAAAAAAAAAAAAAAf///////+AAAAAAAAAAAAAB/+AAAAAAAAAAD//gAAAAAAAAAAAAAP////////////////gAAAAAAAAAAAAAAAP/////////////wAAAAH////////AAAAP////////////////AAAAAAAAAAAAAAAAD//////AAAAAAAAAAAAAP/5////////////+A////AAAAAAAP/////////////wAAAAAAAAAAAAAAAAH////Af///+AAH/////AAAAAAAAAAAAAAAAH///8AAAAAAAAAAAAAAAAB////////+AAf//////gAAAAAAAAAH//////////////AAAAAAAAAAAAAAAP///+AAAH//////4P////////AAAAAAAAAAAAAAAAD/////////////8AAAAAAAAAAAB///////////+AAAAAAAAAAP///wAAAAAAAP////////8AAAAAAAAP///g//+AAAAAAAAAAAAH////9///////////////8AAAAAAAAAP//////4AAAAAAAAAH///////+AAAAAAAAAAf////gAAA/////+AAAAAAAAAAAAA///wAAAAAcAAAAAAAAAAAD/////AAAAAAAH////////AAAAAAAAAAAAAAH//////wAAAAAAAAAAAAAP///+AAAAAAAAAAAAA////////////+AAAAAAAAAAAH////////////////4AA/////////AAH////////////////gH////////gAAAAAAAAAAD/////////////////j////8AAAAAAAAAAAAAD//////8AAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAA///////////4AAAAAAAAAAAB///+AAAAAP//////wAAAAAAAAAAAAAAAHgAAAAAH////////////////AAAAAAAAAAAAAAAAAD/+AAAAAAAAAAAAAAAPwAAAAAAAAAAAAAAAAA////////////wAAAAAAAAAAAAAAAA/////////AAAAAAAH///AAAAAAAAAB/////////gAAAAAP+AAAAAAAAf////4AAAAAAAAAAAA/////////////gAAAAP////////////gD/////////+/wAAAAAAAAAAD8AAAAAAAAAD/////////////gP////////////////AAAAAAAAAAAAAAAAH/+AAAAAAAAAA////////AAAAf///////////wAAAD///////////wAAAAAAAAAAAP///////////////AAAAAA=", + "sha1": "bf30417999c1368f008c1f19feca4d18a5e1c3c9", + "sha256": "74661206cb19ec00619e1fcdcae443486adfa69a564672c9ee9f48f8ea35d5b2", + "sha512": "9875b0bad0a3a1d841e6294559109d5717742a6566717a6f04ee18a34fa66100143695db036a2e1ef22f370487c6c23b6386c0f1eba1e1d1c348564dfffa7598" + }, + { + "input": "AB////////////////gAAAAAAAAAAAAAAAAAH//////////////gAAAAA//////////////////wAAAAAf/////////+AAAAAAAA//////////////8AAAAAAAAAAAAAAAf///+AAAAAAAAAAAAAA/AAAAAAAAAAAAA//////////+AAAAAAAAAAAAAAB////////4AAAH///////////////8AAAAAAAAf/////8AAAAAAAAf/////////AAABH///////8AAAAAf/AAAAAAAAAAAAA////4AAAAAP////////4AAAAAAAAAAfAAAAAAAAAAAAAAAAA8AAAAAAAAAAAP///4AAAAAAAAB/////////4AAAAAB//////////+AD////+AAAAAAAAAAAAAAAB//////////+AAAAAAAAAAAAAAAAAf/////////AAAAAAB//////////////gAAAAAAAAB////8AAAAAAAAAAAAB//////8AAAAAAP///////////wAAAAAAAB/+AAAAAAAAAAAAAAAf////+AAAAAAAAH//////+AAAAAAAAAAAP////////////////8AAAf///////////////wAAAAAAAAAAAAAAf///////////////gAAAAAAAf//8P/////////4AAAAAAf///+AAAAAAAAAAA/////8AAAH/////////////8AAAAAAA///8AAAAAAAAAA////////8AAAAAf/////////////gAAA/////AAAAAAAAAAAAf///////////4f////////////wAP////////////+AAAAAAAD/////////////AAAAAAAAB///////8AAAAAAAAAAAAAAH//////gAAP////////////8AAAAAAAAAA/////8AAAAAAAAAB////8AAAAAAH///////////////wAAAAAAEAAAAAAAAAAAP/////////+AAAAAAAAA////////+AAB///AAf///////gB///AAAAAAAAAAAAAAB////////D////////wAAAAAAAAf////////4AAAAAAAAAAAAf+Af////////////////4AAAAAAAAAAAAP/+AAAAH/////+AAAAAAAAAAAAP/////////////////4//////////////gAAAAAAAAAAAAAAf4AAAAAAf4AAAAAAAAAAAAAAA///////////////AAAAAAAf/////8AA//////////AAB/////+AAAAAAAAAAAAB/////////gAAAAAAAAAAAAAAAAD/////////////AAAAAB////////////wAAAAAP/////////AAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAD////////////////AAAAAAAH///////////+AA///////////AAAAAAAAAAAAAAAB///////////+AAAAAAAAAAAD///////////////4AAAAAf/4AAAAAAAAAAAAAAAB///////////+AAAAAP/////8AAAAAB////////4H/+AAAAAAAAAAAAAAAAAD/////////4////+AAAAAAAAAA///////////gAAAAAAAAAAAAAAAH//////////////+D///////////////gAA////////////////wAAAAAP/////////////+AAAAAAD////////////gAAAAAAAAAAA/////////4AAAAAAAAAAAAAAf//AAAAf//////////////wAAAAAAAAAAAAAOAAAAAAAAAH//+AAAAAAAAAAAAAAAf//////////8AAAAAAAAf///////4AAAAAH////8AA////gAAf////gAAQAAAAAAf////////////////4AAAAH/////////////gAAAAAAAAA//////////////4AAAAAAAAAAAf////+AAAAAAAAAAAAAAAB////4AAAAAAAP/////////8AAAAAAAAAAAAAf////////////////wAAAAAAAA==", + "sha1": "62ba49087185f2742c26e1c1f4844112178bf673", + "sha256": "341bf4dff841088f3b902c2471b67b49498bda5c045e9befd58af93ade0a8df8", + "sha512": "3bfeab5bee92507591f21f757765c8c09e1241812f8e51c200f6bc5703f7e15f057d9378f59fd4d50410e68b2451681df70c9c3bcb2be17fa80f2362284e31ff" + }, + { + "input": "//////////4AAAAAAAAAAAAAAH/////////////////4AAAAAAAAAAAAAAAAAH////////////////4AAAAAAAAAAf////////////+AAAAAAAAAAAAAAHgAAAAAAAAAAAAAAAAAD////////////wAAAAAAAAAAAAAAAH///v/////////////////wAAAAAAf////+AAAAAAAAAAAAAAA/////4AAB////AAAA////wD/////////////4AP////4D/////////////+AAAAAAAAAAAAAAAAAA/////////////////8P////////////////+AAAAAAAAAAAAAA///////////wAAAA/////4AAAAAAAAAAAAAAAA////////+Af/+AB/8AAAAAAAAAAAA//+AAAAAAAAAAAAAD//////////8AAAAAAAAAAAP//AAAAA/4AAAAAAAAAAD///////////AAAAAD///8AAAAAAAAAAAAAf///////AAAAAAAAAAAAAAAP///////////wAAAAAAAf/////wAAAAB//////gAAAAAAAAAAAAAAH//wD////wAAAAAAAAAAAAAA///8AAAAAAAAAA///////////4AAAAB//////////////8AAAAAAAAAAAAcAAAAAAAAAAAD///////////////8AAAAAAAAAAAAAB//AAAAAAAAAAAAAAAAAH///4AAAAAB//////AAAAAAAAAAAAAAAD//////////////////+AAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAAAAD//////////////////8AAAD///////////////gAAAAAAAAAAAB/////////////////AAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAH/AAAAAAAf+AAAAAAAAAD///////////////4AAAAAAAf////8AAAAAAAAAAAAAAAf/8AAAA///////////4AAAAAABgAAAAAAAAAAAAAH/////////////////wAAAAAAAAH///////AAAAAAAAAAf//////////AAAAAAAAAAAAAAAP//////////wB/////////AAAAAAH///////////gAAAAAAAAAAAAA////+AAAAAAAAAAAAAAAAf//////////wAH//////////////////AAH//+AAAAA//////////wDAAAAAAAAH//+AAA/////4AAAAAAB//gAAAAAAAAAAAAAAAAAAP///////////gAAAAAAAAAAAAAAAAAB////////////////AD//////4AAAAf////////AAAAAAAAH///+AAAAAAAAAP/+AAAAAAf///////////////4AAAAAAAAD/////////////////wAAA/////////wAAAAH///////+AAAAAAAAAAAAAAAD/////////////////4AAAAAAAAAAAAAAAAAB/////////////+AAAAAAAAAAAH/gAAAAAAAAAAAAAAAAAf///////////////gAAAAAAAAAAAP////////////////AAAAf/////gAAAAAAAAP//////////////AAAAAAAAAAAB//////////wAAAAAAAAAAAAAAAP////wAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAH///gP////wAAAf///+AAAAAAAAAD//////////////4B////////wAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAA/////8AAD//////+AAAAAAAAAAD////////////////AAAAAAAAAAAAAAAAAP/////////////////gAAAAAAAP4AAAAAAAAAAAAD///////////4B/////////wAAB////8AAAAAAAAAAAAAwAAAAAAAAAAAD////////////8AAAAAAA/////////4A//////////gAAB//AAAAAAAAAAAP////////AAAAAAAAAAAAAAAf///////////4AAAAAAAAAAAAAAAB///////AAAAA/////////8AAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAAAD////////////wAAAAAAP///////////////+AAAAAAAAAA", + "sha1": "e1f6b9536f384dd3098285bbfd495a474140dc5a", + "sha256": "e5239ecec9ea7737f614ebab502df1248c0a9a0183fc70441fd9ac88040846ce", + "sha512": "7d701e5945751e112f57304fecd6be22e60b85367b14df5b79514ea731d5183f563212fa4ba3ac148b0e96617ab2fc2d9d2b32ff675b8a8344f9c8c102c80896" + }, + { + "input": "AAAAH///////////////////AAAAAAAH///////////////////wAAAAAAAAAAAADAAAAAAAAAB////////4B////////////4AAB///////////////4AAAAAAAAAAAAAAAAH///////////AAAAAAAAAAAAAAAA///////AAAAAAAD/////////////////wAAAAAAAAAAH/AAAAAAAAAAAAAAAAAAP//////////////gAAAAAAAAAAA///////////4AAAAAf////wAB//////////////AAD////4AAAAAAAAAAAAAAAAf////////////+AAAAAA//////////wAAAAAAAAAAD///////////////4AAAAAAAAAAAAAAAAAH//////+AAAD//////////////////8AAAAAP/////wAAAAAAAAD///////////////gA/////AAAAAAAAAAAAAAAA//AAAAAf//////////wAD//////////////////8AA///////////AAAAAAAAAAAD////8AAAAAAAAAAAAAf//////+AAAAAAAAD///////////////////4AAAAAAAAAAAAAD8AAAAAAAAAAAAAAAAA///////+AAAAAAAAAAAAAAAf///////////gAAAAAAAAAAB8AAAAAAAAAAAP/+AAAAAAAAAAAAAAAAAAA///////////wAAAAAAAAAAAAAAAAB/////wAAAAAAAAAAAAAAAH/8AAAAAAAAAAAAAB/////////////////4AB//////////AAAAB///4AAAAAAAAAAAAAB///////wAAAAAAAAB///////////wAAAAAAAAAAAAAAH/////+AAAAAAf///+AAAAAAAAAAB//AAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAA4AA+AAAAAAAAAAAAAAAAAAD//////AAAB//////////g//////////+AAAAAAAAAAAAAAAH/////////////////wAAAAAAAAAAAH////////////////+AAAAAA///////+AAAAAAAAAAAAAAAAB///////////////////x////////////+AAAAAD+AAAAAAAAAAAAAAAD/////////AAAAAAAAAAAAA///////4AAAAAAAAf///+AAAAAAAH//////8AAAAAB/////gAAAAAAAA///gAAAAAAAAP////gAAAAAAA////////////////gAAAAAAAAAAAB///////////////////AAAAAAAAAAAAAB///////////8AH///////AAAAAAAAAAP///////////4AAAAAAAAAAAAAAAA///////////////////z///8AAAAAAAAAAAAAAAAAAA/gAAAB//////8////+AAAAAA////////8AAAHwAAAAAAAAAAAAAAAAAAA/////////////gAAAAP////////+AAAAAAAAAAAAAAD/////////+AAAB////////////////8AAAAAAAAAAAAD////////AAAAAAAAAAAAAAAH///////////8AAAAAAH8AAAAAf//////8AAAAAAAH///7///////8AAAAAAAAAAAB/////+P///4AAAAAAAAAAAAAA///////8AAAAP/////////////////wAAAAAAAAP//////////8AAAAAAAAH//////////8AAAAAAAAAAAAAAAAAAAH////gAAAAAf//////////////////+AAAAAAAAAAAAAAAAAf//////gAAAAAAAAAAAAAB////wAAAAAAAAAAAAAAAAAA8AAAAAAAAAAAA//////////////v/////////+AAAAAAP///////////////4AAAAAAAAAAAP///////////8AAAAAAAAAAAAf///////////+AH///////8AD////////////+AD////////AAAAAAAAAAAAAAAAAf//////////////////+AAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAD/////+AAAAP/wAAAAAAAAAAAAAAA//////////AAAAD////////gAAAAAAA/////////////8AAAAAAAAAAAAAAAAAA//+AAP////////////////+AAAD////////wAAAAAAH/4AAA///8AAAAAAAAAAAAAf//////AA////////////wAAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAAAAAP//////////////////+AAAAAAAAAAAAAAAAf//////////w///////////4AAAAAAAAAAAAAA=", + "sha1": "b522dae1d67726eba7c4136d4e2f6d6d645ac43e", + "sha256": "58e09b4047770bc998b86a4191b7a11eec6fc65bd5e5d0fb6f30d4b7ee0cd683", + "sha512": "bd274fcdd48554e5406076d795e5029945d23894ffa4da150a203b8caaccf3fc8dabf8177995c7965822f920c62574b4f65216f3d807b5bb766843ced496343d" + }, + { + "input": "AAAAAAAAAAAAD/////////////gAAAH////////////////wAH//////////+AAAAAAAAAAf////////////////+AAAAAAAAAAAAAAP//4AAAAAAAAAAAAAAAB/////gAAAAAAAAAAAAD//////////gAAAAAAAAAAAAP//////////////+AAAAA////////////////////gAAAAAAAAAAAAAAAAAAAD////////////gAAAAAAAB/8AAAAAAAAAAD//////4AAAAAAAAH//////////////AAAAAAPgAAAAAAAAAAAAAAAAH/////////+AAAAAAAAP/////8AAAAAAAAB////8AAAAB///////////////////+AAAAAAAf/+AAAAAAAB///////////////4AAAAAAAf////////////////8AAAAAAAAAAAAAAB//////////+AAAAAAAAAAAAB////wAAAAAAH4AAAAAAAAAAYAAAAAAAAD////////////+AAAAAAAAAAAAP/////////////wAAAAAAAAAAAAAAAAAAf//+H//////////////////wAAAAAAAAAAAAAAAAAH////8AAAAAf//AAAAAAAAAAAAAAAA/+AAAAD//////////gAAAAAAAAAAAAAAAH////////////////8AAAAAAAAAAAAAAAAAH//////////////////+AAAAAH/+AP///////////////gAAAAAH//4AAAAAAAA////////////4AAAAA//////////////////gAAAAAAAAAAAAAAAAAAB/////////AAAAAAAAAAA8AAAAAAAAAAAAP4AAAAAAAAAAAA///////////8AAAAAAAAAAAAAP///////////8AAAEAAAA///////////4AAAAAAf/////////////////+AAAAAAAAAAAAAAAAP///////gAAAAAAAAAA/////gAAAAAAAAAAB///////////////////8AAAAAAAAP////////////////4AAAAAAAAAAAAAAf/////////////wAAAAAAAAAAAAAD////wAA//AAAAAAAAAAAAD///+AAAH////////////4AAAAAAAAAAf//8AAAAAAAAAAAAAA////4AAAAAAAAAD////////////4AAAAAAAAAAAH/////////////////5//+AAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAAAAAAAH////////4AAf/////////////wAAAAAAAAAAAMAAAAAAAAAAAB////gAAAAAAAAAAAAAB//////////+AAAAAAAAAB/////////////gAAAAA////AAAAAAAAAAAAAAAAAAAH//////////AAAA///////wAAAAAAAAAAH////////gAAAAAAAAAAAAAAAAAAAH///////////////////wAAAAB///////////+AAH////////////////AAAAAAAAAAAAAAAAP//////////+AAAAAAAAB///AAAAAAA////8AAAAAAAAH//////////////////4AAAAB///////////////////wAAAAAAAAAAAAB////////////////4AAAAAAAAA////////gAAAAAAAAAAAAAAAAAAAD/////////AAAH//////////+AAAAAAAAAAAAAf////////AAAAAAAAAAAAAAABAAAAAAAH///////////////wAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAgAAAD///8AAAAAAAAAAAAAAAAf//////////////////4AAf//////////3//////////////////wAAAAAAAAAAAAAAH/////////////4AAAD//////wAAAH///////////4////8AAAAAAAAAAAAAAD//////////////gAAAAAAAAAAAAAAAAAB/////////////gAB///////////////////wAAAAAAB////////////8AAAAAD//////////////8AAAAAAAAAAAAAAAAAf/////////AAAAAAAAAAAAAAD////////////////////AAAAAAAAAAD/////////////////8AAAAAAAAAAAAAAAAAD///////////4AAAAAAAAAAAAAAAB//+AAAAAAAAf//////////////////wAAAAAAAAAAAAAAB/////////////////wAAAAAAAAAAAAf/////A///////8AAAAAAAAB///////8AAAAAAAAAAAAAAAADwAAAD//////////////gAAAAAAAAAAAAAAAAAD///////////////////8AAAAAAAAAAAAAf//////////////////D/////////gAAAAAAAAAAAAAAAADgAAAAAAAAAAAAAf//////////////+AAAAAAAAAAAAAAAAAAA//////////gAAAAAAAAAAAA==", + "sha1": "e9a021c3eb0b9f2c710554d4bf21b19f78e09478", + "sha256": "9e01666f3284aec585338d0b452aa1712b9d1392c4a265a2ecfc9dc4cadd002b", + "sha512": "f6aeb80185c567f9c51ae74741c800dda109d0d64e0f8b0dbad115d9513564e61e8520baec3a04c98546443c60ee79d1d0d5596d4aac33384a530d7d37518752" + }, + { + "input": "///////////////gAAAAAAA////////4A/////////4A/////////gAAAAAAAAAAAAAAB//////////AAAAAAAAAAf//////////4AAAAAAAf/////////////////A/////+AAAAAAAAAAAAAP//////////////////wAAAAAAAAAAAAP///////////////wAAAAAAAAAAAD//AAAAAAAAAAAAAAB////////////AAAAAAAAAAAf//////////wAAAAAAAAAAB//////////////////8AAAAAAAAAAAAAAAAAH/wAAAAAAAAAAAAAAAD//////////////////4AAAAAAAAAAAAAAAAAD///AAAAAAAAAAAAAAAP//gAAAAAAAAAAP///////////AAAAAAAAAAAAAAD/////////gB////////////////////4A//////////AAAAAAAAAAAAAAAAAAAAP//////////////+AAAAAAAAAAAAAAA///////4D//gAAAf///////8H///////////////+AAAAAAAAAAAAAAAAAAA////AAAAAAAAf//////////////gAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAA/////////////////////wAAAAAAAAAAAAAAA//////////8AAAAAAAAAAAAA//AAAAAAAf//////////////wAAAAAAA//////////////////4AAAAAAAAAP/////////AAAAAAAAAAAAB////+AAf/////////////+AAAAAAAAAB////////////////////wAAAAAAAAAAD//////4AAAAAAB///////////gAAAAAB/////////////////gAAAAAAAAAAAAAAAAAAB//////8AAAAAAB///////////////wAAAAAAAAAAAAAAP////////////////+AAAAAAAAAAAAAAAf/+AAAAAAAAAAAAAAAAAAAAA////wAAAAAAAAP////////////////////gAAAAAAAAAAAAAAAAAAf////////////wAAAAAAAAAAAf///gAAAAAAAf//8AAAB///////////////////4AAAAAAf////////////////wAAAAAAAH//////////////8AAAAAAA///////////gAAAAAAAAAA////gAAAAAAAAAAAAAAAAAAA//////////////////gAAf///////gAAA///////////4AAAAAAAAAAAAAAAAAAf/////4AAAAAAAAAD//////////wAAAAAAAAAAAAAB/////////////+AAAAAAAAAAAAAAAAAf///////////////////AAAAAAAAAAAAAAAAAAP///////////////////8AAAAAAAAAAAAAAA///9/gAAAAAAAAAAAAA/////8AAAAAAAAAAAAAAAAP////8AAAAAAAAAA//////+AAAAAAAH//////////////////gAAAAB////////////////+AAAAAAAAAAAAH////////+AAAAAAPAAAAAAAAAAAAAAAAAAAAAf////gAP//4AAAAAAAAAAAAAAAAAB////////////////////4D//wAAAAAAAAAAAAAAAAAAAAD///////////////4f///////////////////+AD/////////////////AAAAAAAAAAAAAAAAAAAP////////////////////wAAAAAAAH///////////////x/////////AAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAD////////////////4f///////////////////4AAAAAAAAAAAAAAAH////////////AAAAAAAAAAAAAAAAAAAD//////gAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAP//wAAAAAAP//8AAAAAAAAAAAAAAf/////////////AAAAAB////////////wAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAf///////////AAAAD////////////////4A/////////wAAAAA/////////////////4AAAAAAAAAAAAAAAP///////////gAAA////////////gAAAAAAAAAAA////wAAAAAAAAB//wAAAf///////////+AAAAAAAAA////////////////4AAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAA///////////////////4AAAAAAAAAAB///////AAAAAAAAAAAAAAD//////////AAAAAAP/////////8AAAAAAYAAAAAAAAAAAAAP/////wAB////gAAAAAAAAAf////////////////////4AAAAAAAAAAAAAAAH///////////8AAAAAAAAAAAA/9/+AAAAAAB///5////////////8AAAAAf////8AAAAA/////////4AD//AAAAAAAAAAAAAD/////////gAf/////8f////////+AAAAAAAAAAAAAf/4AAAAAAAAAAAAAA//////////////gAAAAAAH///////////AAAAAAAAAAAAAAAAAAf///////////////////", + "sha1": "df13573188f3bf705e697a3e1f580145f2183377", + "sha256": "33aa52b6be6991965ae18124232f108ec7b400528e848e5d8a8d7cf75783ee19", + "sha512": "bd17c744c4f7cdc4b44cd83262754196d8f11750136ca16f93661f8381df351eeff4829e52c4eca7ea8215c0ee54341663633f0c2aa5d820ed3ca0f2cdcd1f6a" + }, + { + "input": "AAAAAAAAAAAAAAAAA/////////gAAAAAAAA//////+AAAAAAAAAB//////////////4AAAD//////////+AAAAAAAAAH////////////gAAAAAAH///gAAAAAAAAAAH///////////////////gAAAAAAAAAAAAAAAAAf/////+AAAAAAAAAAAAAAAD///////////////8AAAAAAAAAAAAAf//////////////gAAAAAAAAAAAAAP////AAAAAAAAAAAAB//+AD/////+AAAAAAAAAAAB///////////////wAAAAAAAAAP/////////////////8AAAAAAAAAAAAAAAAAAAAP///////////////////gAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAAAAA//////gAAAAAAAAAAAAAAAAAP////////////////////wD///////////+AAAAAAAAAAAP///////////wAAAAAAAAAAAAD//4AAAAAAAAAAAAAPgAAAAAAAAAAAB///gAAAAH//ACAAAAAAAAAAAAAAAAA///AAAD///////////////////AAAAAAAAAAAAAAAAP+AAAAAAAAP////////////////8AAAAAAAAAAAAAAAAD///////AAAAAH//AA////AAAAAf//////////////fgAAAB////////////////8A//////////8AAAAAAAAAAAD/////////////////wAAAAPgAAAAAAAB//////////////8AAAAAAAAAAAAAAAAAAf/////8AAAAAAAAAAAAAAAAAD//////////wAAAAAAAAAAAAAH//wAAAAAAAAB////////////////////4AAAf////////////////8AAAAAAAAAAAAD//////////////////+P//////AAAAAAAAAAAAAAAAAA///////////AAAAAAAAAAAAAAAAAA////////////+AAAAAAAAAAAAAAAAAAAAAf////////////////wAAAAAAD/////4AAAAAB//AAAAAAAAAAAAAAAB/4AAAAAAAAAAAAAAAAAA//////////4AAAAAAAAAAAAAAAB///////////////////wAAAAAAAAAH////////AAAAAAAAAAAAAAAAAAAAA///////////4AAAAAAAAAAAAAAAH///////////gAAAAAAAAAAB/////////8AAAAAAf/wAAAAAAAAAAAAAAAH/////AAAAAAAAAAAAAAAAAADAAAAAAAAAAAAB/AAAAAAAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAA////+AAAAAAAAAAB///////////AAAAAAAAf////////////wAAH////////////////+AAAAAAAAAAAAAAAAAAA//wAAAAAAAAAMAAAAAAAAB//////4AAAAAAAAAAAP////+AAAAH//////////////////4AAAAAAAA///////////////gAA///////8AAAAAAAf////////////////////4AAAAAAAAA//////////////4AAAAAAAAAB////AAAAAAAAAAAAAAAAAAAf/////4AAAAAAAB/////////AAAAAAAAAAAAAAA////////////wD////////gAAAAAAAAAAAAAAAAf////////////wAAAAAAH///////////////gAAAAAf///////////////////4AAAAAAAAAAAAAAAf/gAAAAAA//////AAAAf////////////////4AAAAAAAAAAAAAAD///8B////////////////wAAAAAAAAAAAAD////8AAAAAAAAAAAAAAAAAAAAD/////AAAAAAAAAAH///////////////////+AAAAAAAAAAAAAAAAAD//////////////////8AAAAf///+AAAAAAAAAAAAA//////+AB/////8AAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAD///////////wAAA/////////////gAAAAAAAAAAwAAAAAAAAAAAAAA////4AAP/////////gAD///4AAAAAAB/////////wf//////////A////wAAAAAAAAAAAAP////////////8AAAAAAAAAAAAAAAAAAAAH///////////////AAAAAAAAAAAAAA////////////////8AAAAAAAf//+AAAAAAAAAAAAAAAAAAAAAP//////////8AAAAAD//////////////////gAAAAAAAAAAAH///////////////////gA////////wAAAAAAP///////////////////wAAAAAAAf//8A//////////////wAAAAAAAAAAAAAAAAAA////wAAAAAAAAAAH/////////4AAAAAAAAAAAAAAAA///////4AAAAAAAAAAAAAAA///////+Af/////4AAAAAAAAAP////////AAAAAAAAAP////////////////////AAAP/////////////AAA///////////////////4AAAH//4AAAAAAAf////////////////+AAA///////////////////4AAAB//////8AAAAAAAH///////AAAAAAAAAAAAAAAAAAAAAD+AAIfAAAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAH///////+AAAAAAAf/AAAAAAAA///////////////wAAAAAAAAAAAAAAAAAAAf/////////////////gAAAAAAf//////////////////+AAAAAAAAAAAAAAAAAAAAA=", + "sha1": "188835cfe52ecfa0c4135c2825f245dc29973970", + "sha256": "f854ce37a0821dee06b616d2e86383271c91e09328f884dfef2107712d267601", + "sha512": "60f8f46caeed82fa1afe2e1a8c27d490f3cdcfc6e0d29010813e36a1acf6c0aa64f9f03823bc60cd48162db49adf4ef8d42be2fd5ee6948ef6e9ec83d8784c51" + }, + { + "input": "AP/////////4AAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAAD///AAAAAAAAAAAAAAH////////////////////AAAAAAAAAAAAAAAAAAAAAAP/////////////AAADAAAAAAAAAAAAAAAAAAP///////////////////AAAAAH/////////////+D+D///////////////wAAAAAAAAD//gAAAAAAAAAf///////////////gAAAAAAAAf/wAAA/////////gAAAAAAAAAAAAAAADwAAAH/////////////////AAB//////AAAAAAAAAAAH/gAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAH////4AAAAAAAAAAAf/wB//////////////////AAAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAAAf///////+AAAAAAAAAAB///////////////////wAAAAAAAAAAAAAAAA//////////////////////4D////////////gAAA+AAAAAAAAAAAAAP/////////////////8AAAAB///////////////////4AAAAIAAAAAAAAAB/8AAAAAAH//////////////gAAAAAAAAAAAAAAAAAP///////////////////gAAAAAAAAAAAAAAAAf/////x////+AAAAAAAAAAAAAAAAAAAAP///////gAAAAP////////8AAAAAAAAAAAAAAAAAAAAAA////+AAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAH////////////////AAAH////////+AAAAAAAAAAAAD//////////////+AAAD/////+AAAAAAAAAAAAAAAAAAAAAAP////////////+AAAAAAAf///////8AAAAAAAAAAB///////////////8AAAAAAAAf/////////////////////wAAAAP////////+AAAAAAAAAAA///////////////////wAAAAAAAAAAAAAAAAAAAD///////8AAAAAAAAAAAAAAAAP4AAAAAAAAAAAAAAAAAAAAAH///////////////8AAAAAAAAAAAAAAAAAP//////////+AAAAAAAAAAAAAAAPAAAAAAAAAAAAAAAB////////////////////8AAAAAAAAAAAAAAAAD////////////////+AAAAAAAAAAAAAAAAAAAD/////////////gAAAAAAAAD/////////////////////+AAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAAH//////////////////wAAAAAAAAAAAAB/////////////////////AAB///////wAAAAAAAAAAAAAAAAH////+AAAAAAAAAAAAAAAAAAAA/////////////////////4AAAAAAAAAH//////8AAAAAAAAAAH////////////////////8AAAAD/////8AAAAAAAAAAAAAAAAAAAAAD//////////////////AAAAAAAAD////////4AAAAAP///////////gAf///////////+AAAH///////////4AAAAAAAAAAAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAAAAP////////////AAAAP//+AAAAAAAAAAAA//////////4AAAAAAAAAA////////////////gAAA/////////8AAAAAAAAAAAAAAAAAAB/////////////////////+AAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAA/////gAAAAA///AAAB////////+AAAAB////////8AAAAAAAAAAAAAAAAAAAP/////4AAAAAP/////////////wAAAAAAAAAAAAAAA//////////AAAAAAAAAAAAAAAADgAAAAAAA///AAAAAAAAAAAAAAAAAAP////////////////+AAAAAAAAAAAAAAP////////+AAAAAAAAAAP////////////AAAAAAAf///////+AAAAAAAP///////AAAAAAAAf//////////AA/8AAAAAAD//////////8AAAAAAAAAD///////////////////gAAAAB///AAAAAAAAAAAAAAAPgAAAB////////////4A/////////////////////AAAAAAAAAAAAAAAAB////////////////////8AAAAA//////wAAAAA//4D//////////AAAAAP//+P//////4AAAAAAAAAAA/////gAAAAAAAAAAAAAAAAAP///////////////////+AAAAAAAAP//////////////////8AAAAAAAAAAAAAAAAAAAAAAf////////////////////+AAAAAAAAAAAAAAAAAAAAAP/////////////////////AAAAAAAAAAH/+AAAAAAAAAP////////////8AAAAAAAAAA//4AAAAAAAAAAAAAAAAAAAAH////8AAAAAAAAAAAAAAAAAf/////////////AAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAP////////////////////AAAAAAAD////////////////AAAAAAAAAAAAAAAAAH//////////////8D8AAAAfwAAAAAAAAA/////////////////////wAAAAAAAAAAAAP4AAAAAAAAAAAAD//////////gAAAAAAAAAAAAAf////////////AAAAAAAAAAAAP////////////////////8AAAAAAAAAAH/////////8AAAAAAAAAAAAAAAH///////////////////8AAH///4AAAAAAAAAAAAAAAAAAAA///////////////////4AAAAAAAA////////////////////w==", + "sha1": "41b615a34ee2cec9d84a91b141cfab115821950b", + "sha256": "a58035c4921e7114b97fde8d4cf04224971d49fc6b23ed5d61a29e133684c809", + "sha512": "9dfd23daa0e8c6660c0f3bbd1187451b862fb3476ff7725d4f502150bf0a827deb1cbfcd055129f1c0a493d6f71c87380776f2d260cbe39d9bdb4259202f55d9" + }, + { + "input": "AAAAAAAP///////////gAAAAAAP//8AAAAAAAAAAAAAAAAAAAAD/////////wAAAAAAAAf///////AAAAAAAAAAAAAAAAP//wAAH///////////////////wAAAAAAAAAAAAAAB////8AAAAAAAAAAAP/////+AAAAAAAAAAAAAAAH//////////////////gAAAAAAH//////////////////8AAAAAAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAf+AAAAAAAAAAA////+AAAAAAAAAAAAAAAAB//////////////////8AAAAAAAAAAA/////////////wAAH////////////f//////////////wAAAAAAAAAAAAAAAAAAP/////////////+AAAAAAAAAAAAAfwAAAAAA//////gAAAAAAAAAAAAAH///////////////////4AAf////////////////4AAAAAAAAAAAAAAAAAAAAAAf//////////////////8AAAAAAAAAAAAAP//////////////////////wAAAAD//////////4AAAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAAAAAAAAAAf///////////////////wD/8AAAAAAAP////////////////////wAAAD///////////////////4AAAAAAAAAAAD/////////////////////wAAAAAAAAAP////////////AAAAAAAAAAAAAAAAAAB////////////////4AB//////////////////////AAAAP//////////////////8AAAAAP///////////////////wAAAAAAAAAAAAD///////////////4AAAAAAAAAAAAAH////////wAB///+AAAAAAAf///////8P/AAAB////////////8AAAD////////////////gAAAAAAAAAAf/////wAAAAAAAAP/////////+AAAAAAB//////////////////////4AAA/////8AAAAAAAAAAAAPgAA/////////////////////wAAAAAAAAAAAAAAAAQAAAAAAAAAf/////+AAAAAAAAAAAAAAAAAAD//////////////////wAAAAD///////////////+AAAAA///////////////4AAAAAAAAAAAAAAAAA/////////////gAAAAAAAAAAAAAAP//wAAAAABAAAAAAAAAAAAAAAAAf/////////////////////gAAAAAAAD///////////AAAAAAAAAAAAAAAAAAAAD//////////////////gAP///////////////4AAAAAAD/////////////////////AAAAAAAAAAAH/////////////4AAAAAAAAAAAAAD///////////8AAf/gAAAH//////////////AAD//wAAAAAAAAAAAAAAD////////////////4AD////////////4f//////////////////+AAAAAAAAAAAAAAD////////////////+AAAAAAAAAf//////gAB/////////+AAAAAAAf/////8AAAAAAAAAAAAAAAAAAAAD///////wAAP/////////////gAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAA4AAAAAAAAAAAAAf////////////////+AAAAAAAAAAAAAAAAAAA///////////////geAAAAAAAAB///////////////////AAH///////////////////4AAAAAAAAAAAAAAAAAD//////////////////4AAAAAAAH//////wB///////////////8AAA//////////wAAAAAAAAAAH/////////////////AAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAAAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAf//////+AAAAAAAAAAAAAAAAAAAAAAB///////gAAAAAAAAAAAAA//////////gAAAAAAAAAAAAAAAAAAAB/////////////////////8AAAAAH/////////////////wP/////////////////4AAAAAAAAAAAAA///////////////////AAAAAAAAAAAf//////AAAAAAAAAAAAAAAAAAAAAAB////////////8AAAAAAAAA////////////wAAAAAAAAAAAH/////gAf/4AAAAAAAAAAA////////8AH///////////////+AAAAAAAD////////8AAAAAAAAAAAAAH/////////////AgAD//////////////wAAAAAf//////////////////////wAAAAAAAAAAAAD/////////////4AAAAAAAD//////////////4AAAA///////+AAAAAAAAAAAAAAAAAAAAAH//////////////+AAAAAAAAAAAAAAAAAAAAAA//////////////4AAAAAAAA//////////////////8AAAAAAAAAAAAP///////4AAAAAAAAAAAAAAAAAD///////+AAAAAAAAAP//////////////+AAAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAf////////+AAAAAAAAAAAAAAAA///////////////////////gAAAAAf/////////////////////AAAAAAAAAAAAAAAAAAH/////////////////////8AAAAAAAAAP////+Af/////////////+AAAAAAAAA//////wAAAAAAAAAAAAAAP///////AAAAA///////////////////8AAAAAAAAAf////AAAAAAAAAAAAAD////////////AAAAAAB///////////////////wAAAAAD///////wAAAAAAAAAAAAAAA///////////////wf////////////////8AAf//////////////////8AAAAAAAAAAAP////////////////4AAAAAAAAAAB/////////+AAAAAAAAAAAAAAAAAAAB///////+AAAP/////////////////8AAAAAAAAAAAAAB//////4AAAAAAAAAAAAA///////////", + "sha1": "ab3dd6221d2afe6613b815da1c389eec74aa0337", + "sha256": "4d8963b832c44bab059a206f162890fff4eafd71e535a03609a67fe3c31de9e3", + "sha512": "1b627ee9d88d2c8ef7f883f7ca84d888e7fedfda36c32b2e2e2f066e29bb0b42938f4ae7e12b18840050a8f7494caa1699836ba921bc17d6708d503b893281f0" + }, + { + "input": "AAAAAAAD//////////////////4AAAAAAAAf/////////////////////4AAAAAAAAAAAAD////////////AAAAAAAAAAD/////////gAAf///AH////////gH//gAAAAAAAAAAAAAAAAP////////+AAAAAAAAAAAAAAAAAAAAAAB/////////8AAAAAAAAAH//////////wAAAAAAAAAAAAAADAf////////////////////+AAAAAAAAAAAAAAAf4AAAAAf/////+AAAAAAAAAAAAAAf/////////////////////AAAAAAAAAAAAAAAAAAAAAAAP////////////////AAAAAAAAAAAAAAAAAADwAAAAAAIAAAAAAAAAAAAAAAAAB/////////////////+AAAAAAA//////gAD//////////+AAA/////+AAAAAAAAAAAAAAAH/////////8AAAAAAAAAAAAAAAAAAAAAP///////////////8AAAAAH//////////////gAAAAB//////////8AAAAAAAAAAAAAAAAA/////////wAAAAAAAAAAAAAAAAAAAAP///////////////////AAAAAAAAP/////////////+AH/////////////AAAAAAAAAAAAAAAAAAf//////////////gAAAAAAAAAAAAAH///////////////////gAAAAH4AAAAAAAAAAAAAAAAAAH//////////////gAAAAAD//////AAAAAAH/////////+P/wAAAAAAAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAB/////////////wAAAAAAAAAAAAAAAAAAH/////////////////gAAAAAAAAAAAAAAAAAAAAAAH//////////////////gA////////////////////4AAAAAAP////////////////4AAAAAAA///////////////+AAAAAAAAAAAAAB///////////8AAAAAAAAAAAAAAAAAH/8AAAA//////////////////wAAAAAAAAAAAAAAAP/////////8AAP/////////////////4AAAAAAAAAAAAAH/////////wAAAAAAAAf/////4AAAA/wAAAA//gAAAAH//////////////////////wAAf//////////////////////gAAAAAA////////////////////wAAAAAP///////////////+AAAAAAAAAAD////////////////gAAAAAAAAAAAAA//////AAAAAAAAAAAAAAAAAAD////4AAAAAAAD///////////wAAAAAAAAAAAAAAAP////////////////////wAAAAAAAAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAAAAAAAAAcAAD//4AAAAAAAAAAAAAAAAAAAAH///////////////////////gAH/////8AAAAAAAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAAAAAf//////gAP//8AAAAAAAAAAAAAH//////4AAAAAAAAAAAAAAAAAAAAAAD/////////////4AAAAAAAAH///////8AAAAAAAAAP//////wH//////////////////////wAAAAf///////gAB////////////////////AAAAAAAAAAAAAAAAAAAAAf////+AAAAAAAAH//////////////////////+AAAAAAAAAAAAAAAAAH////////////////////+AAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAf/////////////AAAA///////////4AAAAAAAAH/////////8AAAAAAAAAAAAAAAAAAP//gf///////////////AAAAAAAAAAAAAAAAf///gAAAAAAAAD//////////////AAAAAAAAAAAAAAAAAAAAD/////////4AAAAAAAAAAAP///////////////////gAAAAAAAAAAIAAAAAAAAAAAAAAAAAD////////8AAAAAAAAAAA////////gAAAAAAAAD/////////////////////8AAAAA+AAAAAAAAAAAD//////////////////4AAAAAAf////////////4AAAAAAAAAAAAAAAAB////////////wAAAAAAAAAAAAAAAAA//////////////////////gAAAAP////////////////////wAAAAAAAAAAAAAAAAAAAAA/////////////////wAAAAAAAAAD/gAAAAAD///////AAAAAAAAAAA///////////////////wAAAAAAAAAAAAAAAAAAAAAAB//////////////////////wAAAAAAAAAAAAAAAP/////////////////////wAAH//////wAAAAAAAAAAAAAAAAAAAH////////////////////4AAAAAAAAAAAAAAAAAAAAf////////////APAAAAAAAAAA/////////////8D////////////////4AAAAAAAAAAAAAAAAAAAAf//////////////AAAAAAAAH////////////////////AAAAAAAA//////8AAAAAAAAAAAAAB////////////8AAAf/////////////////+AH///////////AAAAAAAAAAA///////////////////gAAAAAAAAAAAAAD//wAAAAAAAAAAAAAAAAAP////wAAAAAAAAAAAAAP//////////gAAAAAAAAAAAAH////////////////8AAAAA/////////////////////+AAAAAAAAAAAAAAAAAAAAAAf///////////wAAAAAAAf+AAAAAAAAAAP/////+AAAAAAAAf//////////////8AAAAAB//////////////////////wAAAAAAAAAAAAAAAAAAP//////////+AAAAAAAB/////4AAAAAAAAAf///AAAAH/////////////////////8AAAAAB/wAAAAAA////////////////wAAAAAAf//////////////////wAAAAAAAAAAAAAAAB///////////4AAAAAAAAAAAAAP////gAAAAAAf//////////wAAAAAAAAAAAAAAAAAAAAAH////////8AAAAAAAAAAAAB/////////////gAAAAAAAAAAAAAAP///////////////////8ADAAAAAAAAAAAAAAAAAAAf/////////////////+AAAAAAAA=", + "sha1": "0706d414b4aa7fb4a9051aa70d6856a7264054fb", + "sha256": "6ebf98b52fc3c4e77257d176b47d10729baeec4066a9bc78a89d7d02af7ab2cf", + "sha512": "29bcfa8101e8288470c5bd0cf671f52dfc7e8a4fe0026d6d3b6bb86a84d995d9f684dcf6e5a6870cec74f2fa252fa99f5e38144033f105c723d6256199e143f9" + }, + { + "input": "/////4AD////////////////gAAAAAAAAAAAAAAAAAAAAAAQAAAAAAP//////////////gAAAAAAAAAAAAAAAAAAB//////////////wAAAAAAAAAAAAAAAAAAf///////////////8AD//////////////////////AAAAAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAD4AAAAAAAAD////////////gAH/////////////////+AAAAAAAAAAAAAAAAAB//////+AAAAAAAAAAH/////////////////////gAAAAAAAAAAAAP//////////////////////wAAAAAAAAAD//////////wAAAAAAAAAAB/////////wAf///////////////////8AAAB/////////8AAP/////////////////+AAAAAAAAAAAAAAAH///4AAAAAAAAAAP////wAAAAAAAB///////////////////8AAAAAAAD/////////////////4AAAAAAAAAAAAAAB/////////////+AAAAAAAAAAAP////AAAAAAAAAAAAAAAAAAAAAAD///////////////////////4AAAAAAAAH//+B///////////4AAAAAAAP///////////////////////AAAAAAAAAADwAAAAAAAAAAf/////////AAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAA////////////////////////wAAAAAAAAAAAAAAAAP////////////////////gAAAAAAAAAAAAAAAAAAH/////////AAAAAAAAAAAAAAAAAAAAAAB//////////////////////8AAAAAAAAAAAD///////8AAAAAAAAAAAAAAAAAAAAA/////////////B/////////////gAAAAAAAAAD////////8AAAAAAAAAAAAAB////+AAAH/////////////////////AAAAAAAAAAAP//+AAAP///////////8AAAAP////////////////////8AAAAAAAAAAAAAAAAAH////AAAAAAAAAAwAAAAAAAAAAAAAAB///////////////8AAAAAAAAAAAH/////8AH/////8AAf/////8AAAAAAAAAAAAAAAADgAAAAAAAAAAAAAAAAH//////////wAAAAAAAAAAAAD/////////4AAAAAAAAf//////////////////AAAAAD///AAAAAAAAAAAf/////////gAAAD/////////////gAAAAAAAAAAAAAAAAAAAAAD///////wAAAAAAAAAAAAAAAAA/////////////////8AAAAAAAAAAAAAAAAD///////////AAAAAAAAAAAAAAAAAAAAAP//gAD////////////////wAAAAAAAAAAAAAAAAAAAA//4AAAAAAD//////////////////////wAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAAAAAAH///////////////////////gAAAAA//wAAAAAAAAf///////////////////n//////////////wAAAAAAAAAAAAAAB////////////////wAAAAAAAAAAAH/AAAAAAAAAAAAAAAAAAAAP/////+AAAAAAAAAAAAD//gAAAAAAAAAAAAAAAf//////////////gAAAAAAP////////////4AAAAAAAAAAAAAAAAAAAAAB/////////+AAAAAAAAAAAAAAAf////////////////8AAAAAAAAB///////////////+AAAAA///////////////////+AAAAAAAAAAAAD///////////////+AAAAAAAAAAAB///8AAAAAAAAAAAAAAAAAAAAAAA//////wAAAAP//////4AAAAAAAAAAAAAP//////////////////////AAAAAAAAAAAAAAD/////////////////4AAAAAAAAAAAAAAAAAAA////////////AAAAAAAAAAAAAAAAAAf//////////////////+AAAAAAAAAH//////////////8AAAAAAAAAAAAAAAAAAAH///////////8AAAAAAAAAAAAAAAH////////////////gAAAAAAAAA///////////////////////AAAAAAAAAAAAAAAAAAAAH////////4AAAAAAAD///////////////////////4AAAAAAAB//////////////////////AAAAAAAAAAA/////////////////8AAAAAAAf////////////4AAAAAAAAAAAAAAf//////gAAAAAAAAAAAAAAD4AAAAAAAAAP////////////gAAAAAAAAAAAAAAAA////////////wAAAAAAAAAAAAAAAAAD/////////////////AAAAAAD///////////////AAAf////////////AAAAAAAAAAAAD//////////////AAAAAAAAAAAAAAAAA/////////////////////4////////////////////////AAAAAAAAAAAAAAAAB///////////////////////AAAAAAAAAAAAAAAAAAAAAAP////////8AAAAAAAAAAAAAAD//8AAAAAAAAAAAAAAB///////////////4AAAAAAAAAAAAAD////////+AH//////////////////////wAf////////+/////////////////+AAAAAAAAAAAAAAAAAAD///////////wAAA/gAAAAAAAAH////wAAAAAAAAAAAAAAAAAAAAAAAf/////////////////8A///////8AAAAAAAf//////////////////4AAAAAAAAAAAAAAAAAAAB/////////////////////wAAAAAAAAAAAAAAfwAAAAAAAAAAAAAAAAAAAH////////wAAAAAAAAAAAAAAAf/4AAAAAAAD///////////////////wAAAAAAAAAD////////////////////4AAAAAAAAAAAAAAD/////+AAAAAAAAAAAAAAAA//4AAAAAP////////////////AAAAAAAAAAH//////////////////////+AAAAAAAAAAP///////8AAAAAAAAf////////+AAAAf////////////////////wP//////+AAAAAAAAAD////////////////8AAAAAAAAAAAAAAAAAAAB//////////////////+AAAAAAAAAAAAAA///////////////////////4AAAAAAAAAAAAAAAAAAAAAA/8AAAAAAAAAAAD//+AAAAAAAAAAAAAAAAAH//////////4AAAAAAAAAAH//////wAAAAAAAAf/wAAAAACAAAP////////////////4AAAAAAAB/////////////////+AAAAAAAH///////////////////w==", + "sha1": "3cbf8151f3a00b1d5a809cbb8c4f3135055a6bd1", + "sha256": "366cd811c075198d1749db7075c4c333b6f347b64e44b3744ef28a3957a0feb1", + "sha512": "d275f1f6410bf258738b02db1348af55ec1fa524240e9516e5b4ba9066a930e781d7236d7257c998800e64349fa1ffa78d168bf8a626dade502af7472f3f3298" + }, + { + "input": "///////8AAAAB/////////////////////+AAAAAAAAAAAAAAAAAAH////gAAAAAAAAAD////////////////////wAAAAAAAAAAAAAAAH/////////////////////AAAAAAAf/////+AAAAAAAAAAAB////////////////wAAAAAAAAAAAAAAAAAf/////////////////////gAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAB////////////////////AAAAAAAAAAAAAAAAAAAAAAAA///////////////////8AAAAAB/////////////wAAAAAB///////////////4AAAAAAAAAH////////8AAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAAB//////////////////gAAAAAAAAAAAD//////8AD//////////////////////+AAAAAAAAAAAAAAAAAAAAAAD//////+AAAAAAAAAAAAAAAAAAAAAf////////////////+AAAAAAAAAAAAAAAAAD//+AAAAAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAAAAAAD////////////////////////wAAAAAAAAAAAAAAAAAAAAH//AAAAAAAAAAAAAAAAAAAH////////////////////wAAAAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAAAH/8AAAAAAAAAAAAP///////////AAAAAAAAAAAAAAAA////////////////+AAAAAAAAAAAAAAAAAAf//////////////////////gH///////////////wAAAAAAAAAAAA///////////////////////+AAD/////AAAAAAAAAAAAAAAAf////////////wAAAAAAAAAAAAAAAAAAAAAAAAf/////8AAAAB///////////////4AAAAAAAAAAAAAAAAAAAA///4AAAAAAP/////////////////////+AAAAAAAAA//////////////AAAAAAAAAAAD/////gAAAAAAAAAAAAAAAAAAAAAD///////////////////wAAAAAAAD//////////AAAAAAAAAAAAAAAAAAAAf////////////9///////wAAAAAAAAAAAAAAAAAAAAAAAP//////////////+AAAAAAAAAB8AAAAAAAAAAP//////////////////////gH/////////////////AAAAAAAAAAAAAAAf////////////+AAAAAAAAAAA////4AAAAAAAAAAAAAAAAAAP///////////////////gAAAAAAAA///AAAAAAAAAAAAAAAAP////////////4AAAAAAAH/////////////4AAAAB//////////////////////wAAAAAAAAAAAAAAAAAAAAD7///AAAAAAAAAAAAD//4AAAAAAAAAAAAAD/////////////////8AAAAAAAAAAAD/////8AAAAAAAAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAf////////////////4////////////////////////+AAf////////gAAf///////////////////////+AAAAAAAAAAH///////8AAAAAAAAAAAAAAAAAAAAH/////8AAAAP////AAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAB///////////////AAAAAP/////////////AAAAAAAAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAAH///////////AAAAAAAAADAAAAAAAAAD//////////////////+AP/////////////////wAAAAAAAf/////////AAAAAAAAAAB//gAAAAAAAAAAAAAAH8P/////////////8AAAAAAAAAAAAAAAA//8AAAAAAAAAAAAAP//////////////8AAAAAAAAAAAAAAAH/////+AAAAAAAAAAAAAAAH/gB///////+AAAAAAAAAAAAAAAA/////////////////wAAAAAAAAAAP//////////////////4AAAAAAAAAAAAAAAD///////////////////////wAAAAAAAAAAAAAAAAAAH///////////////////+AAAAAAAAAAAAAAAAAAAD//////wAAAAAAAAAAAAAAAH//gAAAAH///////wAAAAA//////////////8AAAAAAAAAAAAAAH///+AAAAAAAAf/gAAAAAAAAAAAB/////////////gAAAAAAAD//////wB//////////////////////AAAA///////wAAAAAA////////////////////wAAAAAAAAAAAAAP/////////////////wAAA///////////////////////8AAAAAAAAAAAAAAAAAAAAAAA////////////////4AAAAAAB//wAAAAAAAAP8AAAAAAD////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////gAAD////////////+AAAAAAAAAAAAAAAAAAAAH/////////+AAAAAAAH///////////////////wAB////8AAAAAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAAAAAAAAAAP///////////gAAAAAAAAAP//////////8AAAAAAAAAAAAA///////////////////AgAAAAAAAAAAB////////////////////AAA/////+AAAAAAAAAAAAAAAAAAD/////////AAAAAAAAAAAAAAH/////////////+AH///////////8AA////+AAAAAAD///////////8AAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAB//////////////wAAAAAAAAAAAAAAAAf///////////////////8AAAAAAAAAH//////////////4AAAAAAAAAAAAAAAAAAAAAAAf////////////AAAAAAAAAAAAAAAAAAAAAAf///////+AAAAAAAAAAAAB///////+AAAAAH//////////////////////4AAAAAAAAAAAAAAAAAAAAAB////wAAf/////////gAAAAAAAAAAAAAAAAAAAAA////////////////////////4AAAAAAAAAD///////////////gAAAAAAAAAAAAAAAAAAAAAAAB//////wAAAAAAAAAAAAAAAAAAAAAP///////4AAAAAAAAAB///////////8AAAAAD//////////////+H//////////////////////8AAAAAAAB////////////////4AAAAAAAAAAAAAAAAAAAH//////8AAAAAAAAAAAAAAAAAAH///////////////////////4AAAAH///////////////gAAAAAAAAAAAP////////////////////4AAAAAAf////////////////////", + "sha1": "da5d6a0319272bbccea63acfa6799756ffda6840", + "sha256": "712157d7d59011c4bf1ee690217f4b0f855816e9bbee6b6aff277b9645340c9a", + "sha512": "9892c6d0f13e39f870d42d1bec54a4cd1823c502026f891227fecf7cd262f10a5b35f3d9b96831ebc4801ffa864ace05b1ba80b31eb91f7031ba8e07f6a91204" + }, + { + "input": "/////+AAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAH/////////////////AAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAAAAAA/////AAAAAAAAAAAH/////////8AAAAAAP///////gAAD/////////////////4H//////AAAAAAAAAAAAAH//////////////////4AAAAAAAAP////////////////////gAAAAAAAA////////////////////////4AB4AAAAAAAAAAAD////////////////////////AAAAAAAAAAAAAAAA////////////////wAH//////////////////////gA////////////////wAAAAAAAAAAAAAAAAAAB//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAAAB///////////8AAAAAAAAAAAAAAAAf/////////////+A///////////////////////gAAAf/wAAAAAf///////AAAAAAAAAAAAAAAAf///////////////////////wAAAAAAAAAAAD/////8AAAAAAAAAAAAAAAAAAAHAAAAAAAAH/////////////4AAAAAAAH///////////////////wAAAAAAAAf///////wAB/////////////AAB///////////////////+AAAAAAAAAAAAAAAAA////////////////////////8AAAAAAAAAAP////wAAAAAAf///////////////wAAAAAAAAAAAAAAAAAAAP///////////+AAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAAAAAAD//8AB/4AAAAAAAH//4AAf///////gAAAAAAP/+AAAAAAAAB//+AAAAAAD///////4AAAAAAAAAAAAAAAAAAAB///////////////////////+AAAAAAAAAAAAAf//////////////4AAAAAAAAAAAAAAAAAAAAAAAD////////////7/////gAAAAAAAAAAAAAA/////////AAAAAAAD////////////8AAAAAAAD///gAAAAAAAAf//////wAAAAAA////////+AAAB//4AAAAAAAAAAAD//////8AAAAAAAAAAAAAAAP////gAAAAAAH/////gAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////8AAAH////////////////////AA///////////////////////8AAAAAAAAAAAAAAAAAAAAAAA///////gAAAAAP//////////////AAAAB///AAAAAB///////////+AAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAD//////////AAAAAAAAAAAAAAAAAAD/////wAAAAAP//////////////4AAAAAAAAAAAAAAAAAAAH///////+AAAAAAAAAAAAAAAAAAH//////////////////////5/////////////////wAAAAAAAAAP//////////8AAAAAAAAAAAAAAAAAAAAAAH/////////////////////wf//AAAf//////////////////////wAAAAAAAAAAAD/4AAAAAAAAAAAAAAAAAAAH////////////4AAAAAAAAAAAAAAAAAAAAAAAAP/////////AAAAAAAAAAAAAAAAAAAAAP/+AAAAAAAAAAAAAAAAAf/+AAAAAAAAAAAAAAAAAAAAB////////////+AAAf////////8AAAAAAAAAAAAAAAAAAAAAA///////////+AAAAAAAA///////////////AAAAAAAAAAAAD/////////////////8AAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAAAAP//////////8AAAAAAf/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAD//////8AAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////4AAAAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAA/////+AAAAAAAAAAAAP/////////4AAAAAAAAAAP///////AAAAAAAAAAAAAAAAD/////////////wAAAAAAAAAAAAAAAAB//////////////////8AAAAB//////////wf///////////////AAAAAAAAA///////////+AAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAB/////////////gAAAAAf///////////////////////n/////////////////////gAAAAAAAAA////////////wAAAAABwAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////+AAAAAAAAAAAAAAAAAAAAf//gAAAAAAAAAAAAAAAAAAD/8AAAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAAAAP/////////////////////+AAAAAAAAAAf/////gAAAAAAAAAAD4AAAAAA////////+AAAAAAAAAAAAAAAAAAAAAAAAB////////4AAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////+AAAAB//////////+AAAAAAAAAAAAAAAB////////////////////+AAAAAAAAAAAAAAAAAAAAAD/////////AAAAAAAAAAAAAAAAAAAAAAB/gP/////////////8AAAAAAAAAAAAAAAAAAAAAAAP/8B//////////////////wAAAAAAAAAAAAAAAAAAAAAAB///////////////wAAAAAAAAAAAAAAAAAAAAAA//////////////////////+AAAAAAAAAAAAAAAAAAAAAAf//////wAP//////////////gAAA///////wB/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAH///////////////////8AAAAAAAAAAAAAAAB///AAAAAAP/////////////////8AAAAAAAAAAAAAAB///////////AAP////8AAAAAAAAAAAAAAAAAA///////////AAAAAAAAAAAP8AAAAAAAAAAAAAAAAAAAAAP/////////////wAAAAAAAAAAAH///////////////////gAAAAAAAAA////AAAAAAB///////////+AAAAAAAAAAAAAAAAAAAA/////////////////////////AAAAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAP///+AAAAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAAAAAAAB/wAAAAAAAB//////////////////AAAAAAAB////////////////////8AAAAAAAAAAAAAAAAAB////////////////////////4B////8AAAAAAAH///////////4AAAAAAAAAAAAAAAAAAAAAAAB////////////////////////8AAAAAAAAAB///////////////wA///wAAAAAAAAP/+AAAAAAAAAAAAH/////////////////////8AAAAAAAAAAAAAAAAAAAAH/////////////////gAAAAAAAAAAAAAAAAAH//////////8=", + "sha1": "fb4429c95f6277b346d3b389413758dfffeedc98", + "sha256": "1dc0a697f2a7c1da301b256e6822879f212beb56fbc7fe1b8e877ccd964c132a", + "sha512": "7408a856c2075aae53b77304272161cf6847cef3d4ae8ba3d12c9c9e51ad345049800b7e663e9e75d8e9f8728bea66d2ad46c292625ac850554b3faf605f9956" + }, + { + "input": "AAAB/wAAAD//////////////////////wAAAB////////wAAAAAAAAAAAAAAAAAAH///////////////////+AAAAAAAf////////////////+B////////////////////////gAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAAAf///////8A////////////////AAAAAAAB/////////gAAAAAAAAAAAAAAAAAAAAAA//////////wAAAAAAAAAf//////////////AAAAAAAAAAAAAAAAAAAAAAAB///////////8AAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAH/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAD////////////////gAAAAAAAAAAAAAAAAAAAAAAAAB////+AAAAAAAAD//////////////////+AAAAAD////////////////////////gAAAAAAAAAAAAAAAAAf///////////////////////+AAAAAAAAAH////////gAAAB/////////////////AAAf//////////////4AAAAAAAAAAAAAAAP////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAA/8AH/8AAAAAAAAAAAAAAAAAAAAAAAAAH//////////8AH/////////////wAAAAAAAAAAAAAAAAf//////////8AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAA///////////////////////4AB///////////////////+AAAAAAAAAAAAAAAAAAAAAP//////////////////////8AAAAAAAAAAAAAAAAAAAH/////////z//////////////4AAAAAAAAAAAD///////////wAAAAAAAAAAAAAf////AAD//////////+AAAAAAAAAAAAAP////v////////wAAAAAAAAAAf///////////////gAAAAAAAAAAAAAAAAAADAAAAAAf///////AAAAAAAAAAAAAAAAAAAAAP////+P//////////////gAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAAAAAAAAAD///////+AAAAAAAAAAAAB////wAAAAAAAAAAAAAAAAH//////AAAAAAAAAAAAAAAAAAAB///////////////4AAAAAAAAAAAAAAAAAAAAAH//////////////4AAAAAB/////////////////////+AAAAAAAAA//////////+AAAAAAAAAAAAAAAAAAAAAD//////////////8AAAAAAAAAMAAAAAAAAD////////////////wAAAAAAAAAAB///////////////////4AAAf//////4AAD+AAAAAAAAAAAAAAAAAAAAAAAAH///////////////AAAAf////AAAAAAAAAAAAAAAAAAAAH/////////////AAAAAAAAAAD/////////////////////4AAAAAAAAD////9//////////////8AAAAAA////////////////gAAAAAAAAAAAAB//////////4A////8AAAAAAAAAAAAAA////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAAAAAAAAAH//////gAAAAAAAAAAAAAAAAAAAAAAAAB////////////////+H//////////////////AAAAAAAAAAB/wAAAAAAAAAB/////////8AAAAAAAAAAAAAAAAAAAAP//////////////////8AAAAAAAAAAAD/////////////////gAAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAf///////////////wAAAAD///////////////////////+AAAAAAAAAAAAAAAAAB////////////+AAAAAAAAAAAAAAAAAAH///////+AAAAAAA/////8AAAAAAAAAAAAf//4AAAf////////////////////8AAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAH///////////+AAAAAAAAAAAAAAAAAAAAD///////////////////wAAAAAAAAB//8AAA/////////////////////4AAAAAAAAP//////////gAAAAAAAAAP///////////////////////3//wAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////AAAAAAAAD////////////////wAAAAAAAAP/+AAAf/////////AAAAAAAAD//////////gAAAAAAAAAAAAAAAAAAAAA///wAAAAAAAAB////////////////////4AAAAD////////////////////4AAAAAAAf////////////////+AAAAAAAAAAAAH//////////////+Af///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAAAAAAB//////////////////+AAP//////////////////////8AAAAAAAAAAAAAAAAAAAAAAP//////////////////AAAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAB////AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////gAAAAAAAAAAAAH////////////////AAAAAAAAAAAH/////////////wAAAAAAAH//////8AAAAAAAAAAAAAAAAAAAAAAAAAD////////////////8AAB/////////wAAAAAAAAAAAAAAAAAAAD///////////////8AAAADgAAAAAD//////////////////////AAAAAAD/////AAAAAAAAAAAAHwAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAH/////////AAAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAAAAAP//wAAAAAAAAAAAAAAAAAAAAH/////AAAB///////////////////gAAAAAAAAAAAAAAAAA/////////////AAAAAAAAAAAAB////////////8AAAAAAD///////////gAAAAAAAAAAH///////4AAAAAAP//////////+AAAAAP///////////+AAAAAAH///////8AAAAAAAAAAAAAAAAAAAAA/////////////////h////////////////////////gAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAAAf////////////////////////4AAAAH/////+AB/////wf/////8AAAAAAAH//////wAAAAA//////////AAAAAAAAAAAAP//8AAAAAAD/////////////////////////8AAAAAAAAAAB///////4AAAAAAAAB////////////////+AAAAAAAAAAAAAADgAAAAAAAAAAAAAAAAAAAB/////////////wAAAAAAAAAAAAAAAB/////////////////gAAAAAAAAH///////+Af/////////wAAAAAAAAAAAD////////+AAP/////////////+AAAAAAAAAAAAAAAAAB////4AAAAAAAAAAAAAAAAAAAAAAH///////wAAAAAAAAAAAAAAB/////////+f///////////////////+AAAAAAAH////////////////////4AAAAAAAAAAAAAAAAD//////////////+AAAAAAAAAAAAAAAAAAAAAAAA//////AAAAf//////////////////4AAAAAAAAAAAAAAAAAAAB////////w==", + "sha1": "2c6e30d9c895b42dcccfc84c906ec88c09b20de1", + "sha256": "6fb0514a46f06be4bc3798ae82fa6cf14103926f1969b3d70910a9c5d9589e58", + "sha512": "2e132f775423970174eff2ccad923875865ae6b92f1c5d24630dd494c3b8d3fb367ec1e00a67ed63b902338be4ecd4ddcd4a211d0015508d5a1e272a67a9716d" + }, + { + "input": "AAAAAAAAAAAAH8AAAAAAAAAAAAAAAAAH/////////////////////////+AAAAAAAAAAAAAP////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAB////////////////////8AAAAAAAAAAAAH//////////////////4AAAAAAAAAAA///////////////wH/////////gAAD////////////////AAAAAAAAAAAAAAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAAAAP////////////////////////gAAAAAAAAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////+P//wAAAAAAAAAAAAAAAAAAAH/////////////+AAAAD///////////4AAAAAAAAAAAAD////////////////58AAAP///////////////////////gB////wAAAAAAAAf///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////wAAAAAAAAAAAAAAAAAA/////4AAAf////////////4AAAAAAAAAAAAAAAAAAAA////AAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAP/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////AAAAAAAAAAAAP///////////////4AAAAAAAAAAAAAAAAAD/////gAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAH//////////////AAAAAAAAAAAAAAAAAAAAAA///////////////////4AAAAAAAf////////////////////8AAAAAAAAAH//////////////////8AAAAAAAAAAAAAAB//////////////////+AAAAAAAAAAAAAAAAAAAH/////8AAAf8AAAAAAAAAAAAAAAP/////////wAAAH//////////////wAAAAAAAf////////////wAAAAAAAAAAAAAAAAAD////////+AB///////////////////////8AAAAAAAAAAAAAAAAf/////////////8AAAAAAB//////////////4AAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8AD///////////4AAAAAAAAAAAAAAAAAAAAB////////////8AAAAAAAAAAAAAAAAAAAAAAAA///////////////////4AAAAAAAAAAAAAAAP////////////////+AAAAAAB//////////+P////////////AAAP////////////AAAAAAAAAAAAAAAAB////////////////4AAAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAH///+AAAAAAAAAAAAAAAAAAAAAAAD////////////////////////wAAAAD/////////gAAAAAD/////////////////////gAAP////4AAAAAAAAAAAAAH////////////////////f////////////////////////4AAAAAAAAAAAAAAAAAAAAAA+AAAAAAAAB////+AA////////////////////+AAAAAAAAAAAf////gAAAAAAAAAH////////////////wAAAAAAAB///////////////////////4P///////////////AAAAAAAAAAAAAB///gAAAAAAAAAAAAAAAAAAH///4AAAAAAAAAAAAAAAAAP/////////////////4AAAAAAAAAAAAAAAAA///////////////gAAAAAAf//AAAAH///////////////AAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAD//////AAAAAAAAAAAAAAwAAAAAAAAAAAAAH////////////////////+AAAAAAAAAAAAAAAAAAAAB////////////////////gAAAAAAAAAAAAAAAAAAAAA///////8AAH/////wAAAAAAAAAAAAAAAD//////8AAAA//////////////////4AAAAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAAA/////////gAAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAP///////////////////////4AB//+AAAAAAAAAAAAAAAAAAAAAAB//gD/////////////+A///AAAAAAAAAAAAAAAAAAAAAH//////////////8///////////////8AAAAAAAAf/////////////////////wAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAA//////////8AAAAAAf4AAAAAAAAAAAAAAAAD//////AAAAAAAAAAAAH/////////////////+AAAAAAAAAAAAB/AH///////AAAAAAAAAAAAAAAAAAAf//////gAAAAAAAAAAAAAAAAAAAAAAf/////////////////////8AAAAAAAAAAAAAA/////////////AAD//////////AAAAAAAAD////////////gAAAAAAAAAAAAAAAAAAAAAAAAAD////////8AAAAAAAAAAAAAAAAAAAAAAAAB////////////////////AAAAAAAAAAAAAAAAAAAAAAAAB////////////8AAAAAAAAAAAA//8AAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAD/////////////////gAAAAAAAAAAAAB///////////////////8AP//////////4AAAAAAAAAAAAAAAAAAAAAAP////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////AAAAD///////+AAAAf////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAB/////+AAAAAAAAAAAAAf/AAAAAAAAAAAAAAAAAAAAAD//////////////////4AAAH//////+AAAAB////////////////+AAAB/////////4AAAAAAAAAAAAAAAAAAAAAD////////////////////AAAAAAAAAAAAAAAAAAAAAAAP///////////////+AAAB/////////////////////////+AAAAAAAAB///////////////////8AAAAAAD//////////////////+AAAAAAAAAAAAAAAAAAAAAAAf//////////////gAAAAAAAAAAAAAAAAAAAAAf//wAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAAAAAAAAD//+AAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAAAH/////////////////////////AAAAAAAAAAAAAAAAAA////////4AAA/////////8AAAAAAAAAAAP///////////8AAAAAAAAAAAAAAAAAAAAAAEAAAAAAP/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD4AAAAAAAAAAAAAAAAD8AAAAf///////////+AAAAAAAAAAAAAAAAAAAAAAAP///gAAAAAAAAAAAAAAAAAAAA///////////////////////3///////////4AAAAP////////////////////////8AAAAAAAAAAAAAAAAB//////////////wAAAAAAAB///////////////wAAAAf////////////4AAAAAAAAAAAAAAAAAAAAAH////////////wAAAAAAAAAAAAAAAAAAAB/////////////////////////wAAAAAAAAAAD//////////////////////////gAH///////////////+AAAAAAAAAAAAAAAAAAB//////////////////////////gAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAAAf/////////////////////", + "sha1": "3de3189a5e19f225cdce254dff23dacd22c61363", + "sha256": "9731a6c8ef6c4d601781f231e5b17c0a5194495d5b01b27aefbb4cd857c0c7d1", + "sha512": "5039be6bc422543c53be17b8045b5290b8b337fc20273d1e0da6ca37f8688d17b8b6f60ad9b9065b8d698e0497a3e9d8ce9c5b1e3e572d0b153dc48cb8581cd5" + }, + { + "input": "AAAAAB///////////////wAAAAAAAAAAAAAAAAAH////////////////////wAAAAAAAAAAAAAAAAAAAf+AH//8AAAAAAAAAAAAAAAAAAAAAAf///4f///////////4AAAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAAAAAAAAAB/////////////////////wAAAAAAAAAAAAAAAAAAf//////////////4AAP////gAAAAAAA//////////////8AA//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAP/////////////+AAAAAAAP/////wAAAAAAAAAAAB///////////gAAB//////////////////////+AAAf/////////AAAAAAAAAAf///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAP/gAAAAAAAAAAAAAAAAA/AAAAAAAAAAAAAAAAAAAAAAAAAH/////////////AAAAAAAAAAAAAAAAAAA///////gAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAD////////////////////////wAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAf////////4AAAAAAAAAf//////////////////////AAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAf//////////wAAAAAAAAAAAAAAAD////////////////+AAAAAAAAAP///////////////////////8AAAf/////////AAAAAAAAAAAAAAD////////////////////////+AAAAAAAAAAAAAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAB////AAB///////8AAAAAAAAAAAAAAAAAAB/////4AAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////wAAAAAAAAAAAAAAAAAf///////8AAAAAAAAAAP////AAAAAAAAAAAH/8AAAAAAAB////////////////////////8AAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAD///////////////gAAAAAAAAAAAAAAAAAAB//////////4AQAAAAAAAAAAH////////////+AAAAAD//////////////////gAP/////////AAAAAAAAAAAAAAAAB////////////////////4AAAAAAAAAAAAAAAAAAAAAH////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAD//8AAAAAAAAAAAAAAAAAAAAAAAAAAD8AAAAAAAAAAAAAAAAAAAAAAAAAAD/////g/////8AAAAAAAAAAAAAAAAAAAAAAAP//////gAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////8AAAAAAAAAAAAAAAAAAD///////////wAAAAAAAAAAAAAAD///////////////////////+AAAAAAAAAP/////////////////////////gAAAAAAAAAAAAAAAAAAAAAA///////////////////AAAAAAAAAAAAAf//////4AAAAAH///////4AAAAAAAH///AAP/4AAAAAAf////8AA/////////////////////////AAAAAH////+AAAAAAAAB///////////////////////8AA8AAAAAAAAAAAAAAAAH///////////////////////gAAA///////////////4AAAAAAAAAAAAAAAAAAAAAAf/////////////////////gAAAAAAAAAAAAAAAAAAAf///////////////////////8AAH/////+AAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////+D////////wAAAAAAA/////////////////////////gAAAAAAAAAAAAAAAAAAAAB/////gAAAAAAAAAAAf////+AAAAAAAAAAAAAAAAAAAAH////////////////////////4AAAAAAAA////////////////8AAAAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAAAAAAD//////////4Af///+AAAAAAAAAAAAAAP////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////gAAP///////////////+AAAAAAAAB////////////////////8AAAAAAAAAAAAAAAAAA/////////8AAAAAAAAAAAAAAAAAAAH////gAAAAAAAAAAAAf////////////////////8AAAAAAAAAAAAAAAAA///////////////////////wAAAAAAAAAAAAAAAAAAAAP/////8AAAAAAAAAAAAAAAAAAAAAAAAAP///+AAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAAAAAAAAAH///////////////+AAAAAAAAAAAAH///////+AAAAAAAAAAAAAAAAAAAAAAAAAAf///////gAAAAAD/////////////wAAAAAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAAAAAAAAA//////////////////////8AAAAAAAAAAAAAAAAAAD////////wAAAAAH/////wAAAAAAAAAAAB//////8AAf////////////////////////gAAAAAAAAAf///////+AAAAAAAAA/////////////////gAAD///4AAAAAAAAAAAAAAAAAAAAA///////////////+AAAB///////////8AAAAAAAH///////////////8AAAAAAAAAAAAAAAAAAAAAP///////8AAAAAAAAAAAAAAAAAAAAAAA////////////////////////AAAAAAAAAAAAAAAAAD////////////////wB//////////4AAD///////////////////gAAAAAAAAAAAAAAAAAAAH///////////wAAAAAAAAAAAAAAD/AAAAAAAAAAAAB/////////////////////////+AAAAAAAAAAAAAAAA////////////////////gAAAAAAAAAAAAAAAH/////////////8//////AAAAAAH/////////////////8AAAAAAAAAAAAAAAAAAAAAAAH///wAAAAAAAAAAAAAAAAAAAP/////////////////////////wAAAAAAAAAAAAAAAAAAAAAH///////////////AAAAAAAAAAAAAAAAAAAAAAAAAD/////8AAAAAAAAAAD///////////////////AAAAAAAAAAAAAAAAAAAAAB//////////////////////////4AAAAAAAAAAAAAAAAAAAAD//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAH///////gAAAAAAAAAAAAAAAAAAAAAAB//gAAAAAAAAAAAAAAAAAAAAAAD/////////////gAAAAAAAAAAAAAAAAAAAAAAD//////////gAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAOAAAAAAAAAAAAAAAAAAAAAAAAf/////8AAAAAAAAAAAAAAAAAAAAB/////4AAAAAAAAAAAAAAAAAAAAAD////////////////8AAAAAAAAAAAAAAAAAP//////////////4AAAAAAAAA/4AAAAf/////////gAAAAAAAAAP4AAAAAAAAAAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAAAf///4B////////////////////gAAAAAAAAAAAAH//////+AAAAAAAAAAP//////////8AAAAAAAB//////////////////////////gAAD/////AAAAAAAAAAAAAH////////////////////////4AA///////////////////////4AAAAAAAAAAAAAAAAAH//////////////////////////wAAAAAAAAAA///////8AAAAAAAAAAAAAAAAAAAAP///////////////////+AAAAA////////////////4AAAA/////////////////4AAAAAAAAAAAAAAAAAD////////////////AAAAAAAAAAAAAAAAAAD///////////////////////8=", + "sha1": "93530a9bc9a817f6922518a73a1505c411d05da2", + "sha256": "b18a49b7c4114fb94d16ffdce1e1677e6bde99bba443936af10cbedca6eeaf2a", + "sha512": "ea52123ba216899ff9f5c61241108ef555ac322f69ad0fe48ad57364591b360695cfbdb5682c04fd7f333a902f13d2bd2c2702400738e2f62b1fa02e730a07c0" + }, + { + "input": "/////8AAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAf///////gAAAAAAAAAAP//////////////////////wAAAAAAAAAAAAAAAAAH/////////////////////////4AAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAAAAAAf/////////gAAAAAAAAAAAAH/////////////4AAAAAAAAAAAAA//////////////////////////8Af/+AAAAAAAAAAAAAAAB///////////////////////////v///////////////gAAAAAAAAAAAAAAAAAB4AAAAAAAAAAAAAB///////////////////////////AAAAAAAAAAAAAAAAAAAP///////////////8AAAAAAAAAAAAAAAAB/////////AAAAAf4AAAAAAAAB//////AAAAAA/////////////////////gAAAAH/AAAAAAAAAAAAAAB/////////////////AAAAAf///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////+AAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAAf////////////////////////+AAAAAAA///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////AAAAAAAAAAAAAAAAAA/////////////////////wAAAAAD//////8AAAAAAAAAAAAAAAAAAAAAAAAAH////////////////8Af/////////4AAAAAAAAAAP////////////////wAAAAAAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////AAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAAAA///////gAAAAAAAAAAAAAAP//////////+AAAAAAAAAAAAAAAAAAAAP/////////////////////wAAAAAf///////////////////w//////////////////wAAAAAAAAAAAAAAf////////////////////+AAAAAAAAAAAP////gAAAAAAAAAAAAAAAAAAAAAAP//////////+AAAAAAAAD///////////wAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////wAAAAAAf8AAAAAAAAAAAAAAAAAD/////wAAAAAAB////////////4AAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////8AAAAAAAAAAAAAAAA/////4AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////wAAAAAAAAAAAAP///////wAAAAAAAAAAAAAAf8AAAAAA//////////////////////wAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAAAP////////wAAAAAAAAAAAAAAAAAAAD/////////////AAAAAAP//////////////////AAAAAAAAAf////////wAAAAAAAAAAAAAAAAf//////////+Af////////////////////8AAAAAAA///////////8AAAAAAAAAH///////////////+AAAAAAAAAAAAAAAAAAAAAAAAD///8AAAAAAAAAAAAAAAAAAAAAAP////////////////////AAAAAAAAAAAAAAAAAAAAAAA//////////4AAAD///////////////////////////wAAAAAAAAAAAAAf///////8AAAAAP//////////////4AAAAAAA/////AAAAAAAAAAAAAAAAAAAD/////////4AAAAf/////////////8AAAAAAAAAAAD///////AAAAAAAAAAAAAAAAAAAP////AAAAAA/////////////////gAAAAAAAAAAAAAAAA//////////////////+AH//////////////////4AAAAAAAAAB//////////////////wAAAAAAAAAAAf//////////AAAAAAAAAAAAAAAAAAAAA////////+AAA///////////////////AAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAH/////4AAAAAAAAA//////////////////////+AAAAAAAAAP/////////////////////////+AAAAAAAAAAAAAAAD//////////////wAAAAAAAAAAAAP///////////wAAD///4AB//////////8AP///AAAAAAAAAAAAAAAAAAAAAH//////////+///////////wAAAAAAAAAAAD////////////gAAAAAAAAAAAAAAAAAfwH////////////////////////AAAAAAAAAAAAAAAAAAf/wAAAAAf///////8AAAAAAAAAAAAAAAAA//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////AAAAAAAAAAAAAAAAAAAAAwAAAAAAADwAAAAAAAAAAAAAAAAAAAAAP////////////////////4AAAAAAAAAP////////AAf////////////8AAA///////+AAAAAAAAAAAAAAAAAD////////////AAAAAAAAAAAAAAAAAAAAAAAAD//////////////////+AAAAAAAH/////////////////gAAAAAAB/////////////4AAAAAAAAAAAAAAAAAAD///////////AAAAAAAAAAAAAAAAAAAAAAAA///////////////////////8AAAAAAAAAD/////////////////AAH//////////////4AAAAAAAAAAAAAAAAAAAAAAf////////////////gAAAAAAAAAAAAAAAB//////////////////////gAAAAAAf/8AAAAAAAAAAAAAAAAAAAAAAAf///////////////+AAAAAAH///////8AAAAAAAB////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////4AAB///////////////////////AAAAAAAH///////////////////+AAAAAAAAAf/////////////////AAAAAAAAAAAAAAAAP////////////4AAAAAAAAAAAAAAAAAAAAf//AAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAA///////////////////////////4AAAAAAAAAAAAP///gAAAAAAAAAAAAAAAAAAAAB///////////////+AAAAAAAAAAA///////////wAAAAAAAf/////wAP////gAAf/////4AAAAAAAAAAAAAAAAAAAAAAAAAB///AAAAAAAAAAAAAAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAAAAAAAAAAAAAAAH/////////////wAAAAAAAAAAAAAAAAAAAH///////////////4AAAAAAD//////////////////////4AAAAA///////////AAAAAAAAAAAAAAP//////////////////AAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////4P//4AAP////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAB/4AAAAAAB//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAD/////////gAB////gAAAAAAAAAAAAAAAAf///////x/////////////////gAAAAAAAAAP/////////gAAAAAAAAAAAH////////+AH///////////////////////////AAAAAf////////gAAAP///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAA///////////////////////////4AAAAAAAAAAAAAAAAAAAAB/////////////////////////4AAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAAAAAAAB///////////////4AAAAAAAAAAAAAAAAAAAAAAA==", + "sha1": "e31354345f832d31e05c1b842d405d4bd4588ec8", + "sha256": "ce197d61ddae42c8b8447aa698b3e7e5d51d9f0cd2034bc64f1a9d1b2b18e3cd", + "sha512": "10d2eaa4eb9814ebd114aeb9168793b24109d16d3e9e6fab8182623ba2a13f09ee4a1756733a7c3d084a8ea1d75532b1d65fb2156ac13eae4241273548468f96" + }, + { + "input": "//////////gAAAAAAAD//////////4AAAAAAAAAAAAAAAAAAAAAH///wAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAf/4AAAAAAAAAAH8AAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAB///////////wAAAAAAAAAAAAAAAAAAAAH///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////4AAAAAAAAAAB///////////wAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAAAAf////wAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////4AAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAP//////////////////gAAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD////AAAAAAAf//////////AAAAAAAAAAAAAAAAAAAAAf//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAB////////////////gAAAAAAAAAAAAAAAAAAAAAAAf//gAAAAAAAA//////////////////////+AAAAAAAAAAAAAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAP/////////////////////////8P/////////8AAAAAAAAAAAAD///////////////////////////4AAAAAAAAAAAAAAAAH///////////////////////4AAAAAAAAAAAAAAAf////////wAAAAAAAAAAAAAAAAAAAAAAB//////////wAAAAf///////////////wAAAAAAAAAAAAAAH///4AAAAAAAAAAAAAAAAAAD///////////////////////////wAAAAAAAAAAAAD///////////gAAAAAAAAAAAAAAAAAAAAAf///////////////9///////////////////+AAAD//////////////wAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAAAAAAAAAf/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////AAAAAAAB////////4AAAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////8AAAAAAAAAAf//////gAAAAAAP////////AB/+AAAAAAAAAAAAAAAAAAAAAAAAA/////gAf//////4AAAAAAAAAAAAAAAAAB/////gAAAAAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAP/////////////+AAAB//////wAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////wAAAAAAAAAAAAAAAAAAAAAAAH/////////gAAAAAAAAAAAAAAAA///////////////gAAAAAAAAAAAAAAAB///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////gAAAAAAAAAAAAAAAAAAAAB/////gAADgAAAAAAAAAAAAAAAAACAAAAA////////////4AAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAAAAAAAAAAAAB//////////////////////wAAAAAAAAAAAAAAAAAAAAA////4AAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////wAAAAAAAAAAAAAAAAAAD///4AH/////////+AAAAAAAAAAAAAD///8AAAAAAAAAAAAAAAAAAAAAB///////////////////////AAAAAAAAf///////////4AAAAAAAAAAAAAAAAAAAAAAAAAf////////////4AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////gAAAAAAAAAB/////////////////wAAAAAAAAAAAAAAH4AAAAAAAAAAAAAAAAAAAAAAAf///8AAAAAAAAAAAAf///gAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAA/////////////gAAAA//////////////AAAAAAAAAAAAAAAAAAAAAAAAf/////+AAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAH///////////////wAAH//wAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////+AAP//gAAAAAAAAAAAAAf/////////+AD///////////////gB//////////+AAAAAAAAAAAAB////////////AAAAAAAAAAAAAAAAAAAAAf///////////////////////+AAB/////////////////////wAAAAAAAAAAAAAAAAAAAAAAH///////////////////////wAAAAAAAH////////////////////////4A//////////////AAAAAAAAAAAAAD/////////////////////////wAAAAAAAAAAAAAH//AAAAAAAAAAAAAAD/////////////wAAAAAAAAAP/////////////+AAA//AAAAAAAAAAAAAAAAAAAAAAf////////+AAAf/////8AAAAAAAAAAH////////AAAAAAAAAAAAAAAAAAAAAAAH/////////////////////+AAAAP///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAAAAAAAAAAAAAAAAP/////////////8AAAAAAAAAD//wAAAAAAD//wAAAAAAAAB/////////////////////+AAB///////////////////////4AAAAAAAAAAAAAD/////////////////AAAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAf////////+f//////////gAAAAAAAAAAAAAAAf//////+AAAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAB//////////////////gAAAAAAAAAAAAAAAAAAAAf////////////////wAAAAAAAAAAAAAAP///////////////////////wAH///+AAAAAAAAAAAAAAAAAH///////////////////////4AAAAAAAAAAAAB////////////////////////AAAAAAAAAAAAAAAAAAAAAAP////////////////4AAAAAAAAAAAAAAAP///wAAAAAAD///gAAAAAAAAAAAAAf//////////////////////4AD///////////////+AAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAA/AAAAAAAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAP//////wAAAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAB///////////////+AAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAf///////////////////wAAAAAAAAAAAAB//////////////////AD////////4AAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAAAAAAAAAAAAAAD/////////////gAAAAD/////////////////////////4AAAAAAAD/////////+AAAAAAf///////////////////f/wAAAAAAAAAAAAAAAAP/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAB///////////////4AAAAAAAAAAAAAAAAAAAAAP///////////////////////gAAAAAAAAAAP///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAH//////////////wAAAAAAAAAAAAAAAAD//////////////////////gAAAAAAAAAAAf///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAD////////////wAAAAAAAAAAAAAB+AAAAAAAAAABgAAAAAAAAAAA////////////////////4AAAAAAAAAAAAP//////////////////wAAAAAAAAAAAAAAAAAP/////////gAAAAAAAAAAAAAAAAAAAD///////////////////////////wAAAAAAAAAD///////////////8AAAAAAAAAAAAAAAAAAAAAAA///////////////8AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "3ff76957e80b60cf74d015ad431fca147b3af232", + "sha256": "7d9a3aebb470990abb92303f0c2ce5d6c38f9a2198d8f1ae8ab7fbbbf007cc7d", + "sha512": "cc60cf4c4aae634201325ea053e3890b3868b5a8aec71b1a5b764111edbe9c75bf5ce0a1eb1b3886b416c644dc3fdc4da93a4ce37611f08edf430b5bc6b91fd9" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAB//8AAAAAAAAAAAAAAAf////////////+AAAAAAAAAAAAAf/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAeAAAAD//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAAAAAAAAAAB//////////////////8AAAAAAAAA/wAAAAAAAAAAAAAAAAAAAAAAAAAH////wAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAA////////AAAAAAAAP/4AAAAAAAAAAAAAAAAAAAAAAH/wAAAAAAAD/////////+AAAAAAAAAAAAAAAAP/////////////////P/////////////////gf//////////////////8AAAAAAAAAAAAAf//////////////////gAAAAB////////AAAAAAAAAAAAAAAAAAD////////wAAAAAAAAAAAAAAAAAAAAA/////////////8AAAAAAAAAAAB/////////////////+AAAf//+AAAAAAAAAAAAAAAAAAf/////////gA////////////////wAAA//////////wAAAAAAAAA////////AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////gAAA//////////////8AAAAAAAAAAAAAAAAAAAAAAAP//////////////////////4AAAAAAAAAAAAAAAAAAAD//////////////+AAH//////////////////////////8//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////wAAAAAAAAAAAAAAAA//////////wAAAAAAAB///////4Af///gAAAAAAAAAAAAAAAAAAAAAAAAf///////wAAAAAAAAAAAAAAAAAAAAAf/////////gAAAAAAAAAAAAAAAAAAf/////////8AAAAAAAAAAAAAD//////////////+AAAAAAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAAAAAAA//////8AAAAAAA////////////////////////////8AAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAP///////////8AAAAAAAAAAAAAAAAAAB///////////////+AAAAAAAAAAAAAAAAAAAAAP///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////8AAAD///////////////////////////+AAH//////////////////8AAAAAAH////////////////////wAAAAAAA//////////////4AAAAAAAAAD///////////////AAAAAAAAAAAAAAAAAAAAAAAAf/////8AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////+AAAAAAAAAAAAB///////////4AAAAAP///+AAAP///////wAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAH//////gAD4AAAAAAAAAAAAAAAAAAAAAH///////////////////////////+AAAD//8AAAAAAAAAAAAAAAAAH//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAf////////4AAAAAAAAAAAAAAAAAAAAAD///4AAAAAAAAAAH/////////////////8AAAAAAAAAAAAAAAAAAAAf////////////////gAAAAAAAAAAAAAAAAf///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////8AAAAAAAAAAAAAAAAAAA/////////////////////////gAAAABAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////+D/////8AAAAAP///////////////4AAAAAAAAAAAAAAAAAAH//AAAAAAAAAAAAAD/////////////////////AAAAAAAAAAAAAAAAAAD///////////////////4AAAAAAAAAAAAAAAA////+AAAAAAAAAAAAAAAAAAAAAAB///////////////////////////wAAAAAAP/////wAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////8AAB//////////wAAAAAP//////////////gAAAAAAAAAAAAAAAA//////8AAAAAAAAAAAAAAAgAAAAD///////////////gAAAAAAAAAAAAAAAAH//////////////////wAAAAAAAAAAAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAA//////////////////AAAAAAAAAAAAAAA/////////4AAAAAAAAAAAAAAP////4P//////////////////gAAAAf/////8AAAAAB///////////////+H////////////////////////AAAAAAAAAAAAAAAAAAAAAf////////////////8AAAAAAB///////////////////////////wAAAAAf///////////8AAAH///////////////////////wAAAAAAAAAAAAAAAAB///+AAAA/8AAAAAAAAAAAAAAAP/wAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////AAAAAAAAAAAAAAAB////////////////////////////gAAAAAAAAAAA///wAAAAAAAAAAA//////////////////8AAAH///////////////wAAAAAAAAAAAAAAAAAAAAAAAP/////////4AAAAAAAAAAAD//////////////////////4AAAAAAAACAAAAAAAAAAAAAP///////////////gAAAH//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////+AAAAAAAAAAAAAAAAAAAAAAAAP////////////4AAAAAAAAAAAAAAAAD/////////////AAAAAAD///////////////////+AAAAAB/+AAAAAAAAAAH///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////4AAAAAAAAAAAAAAAAAA///////////////////gAAAAAAAf////////////////////wB//////wAAAAAAAAAAAAAAAAAf/////////////////////AAAAAAAAAAAAAAAAAAAf//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////8AP///////////////////////AAP///////////////////+AAAAAAAAB///////////////////////wAAAAAP//////gAAAAAAAAAAAAAAAD////////////4AAAAAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////8AAAB/////////////////4AAAAAAAAA///////////8AAAAAAAAAAP//+AAA//4AAAAAf////////////////////gAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAD//////////////////////////+AAAAAAAAAAAAAAAAAAAAA//////////////wAAAAAf4AAAAAAAAH///wAAAAAA///////////////////////x////////////////////////////AAAAAAAH////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAB///////////////+AAAAAAAAAAAAAP///////////////////////gAAAB////4AAAAAAAAAAAAAAAAAAAAf/////////////8AB//////////////+AP/////////////+AAAAAAAAAAAAAAAAAA////////+AAAAAAAAAAA///////////////////////////wAAAA/////////////////////+AAAAAAAAAAAAAAD////////////////////////4AAAAAAH////////8AAAAAAAAAAAAAAAAAAAAD////////////+AAAAAAAAAAAAAAAAAAAAAB////////////gAAP/////////////////////4AAAAH//////////+AAAAAAAAAAAB////////////wAAAAAAAAAAAAAAAAAAAAAAAA/4Af///////////gAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////4AAAAAAAAAAAAAP/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAH///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////wAAAAAAAAAAAAAAAAAAAAAAAAA=", + "sha1": "34ae3b806be143a84dce82e4b830eb7d3d2bac69", + "sha256": "e52d8c79b31f45d4894e0948089da5fc236d33dd79a80d2304043e8c234cf88c", + "sha512": "d86d8aa698c70e574e1e88a28e1666302b56f5dfae559cb13470944d4a424830dfa9d05527652330891be236caf2a02b2772a7e8411b065027fe83e03145b0a7" + }, + { + "input": "///////////////8AAAAP/4B///gAAAB//+AAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAAAAAAAAAAAD//////////////4AAAAAAAAAAD////////////////////////////gAAAAAAAAAB/////////////////////////gAAAAAAAAAAAAAP/////////4AAAAAAAAAAAAAAAAP////////////8AAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////gAAAAB///////////////////AAAAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAAAAAAAAAAAAAB//////////////////gAAH/////AAAAAAAAAAAAAAAAAAAAAP////////////////4AAAAAP/////8AAAAAAAAAf///////////////AD//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAP///////////////8AAAAH////////////////////////wH/////////////gAAAAAAAAAAD///////////////8AAAAAAAAAAAAAAAAAAD////AAAAAAAAAAAAAAAAAf///////////////////////////8AAAAAAAAAAAAAAAAH////////wAAAAAAAAAAAAAAAAAAAH///5////////////////////AAAAAAAAH/////wAAAAAAAAAAA/////////////+AAAAD/////////////////////////wAAAAAAAAAAHwAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////8AAP///+AAAAAAAAAH/////////5////////////////wAAAAAAAAAAAAAAAAAAAAD///////////4AAAAA//////////////////////////8AAAAAAAAAAAP///////+P//////8AAAAAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAD/////////////8AAAAAAAAAAAAAAAAH//////8AAD//////////////////4AAAAAAAAAAAP/////8AAAAAAAAAAAAAAAP////////4AAAAAAAf///////////4AAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAP/////gAAAAAAP///////8AAAAAAAAAAAAA////////////////////wP////////////////AAAAAAAAAAAAAAD/////////////////////gAAAAAf///wAAAAAAAAAAAAAAAAAAAAAAAA///////+AAAB/AAAAAAAAAf/////gAAAAf////////////+AAAAAf//wAAAAAAAAAAAAP/////////+AAAAf///////////AAAAD///////////////////////////4AAAAAAAAAAAAAD/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////gAAAH//////////4AAAAAP//////////gAAAAAAAAAAAAAAf//////4AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////4AAAAAAAAAP/+AAAAAAAAAAA/////////4AAAAAAAAAAAAAAAAAAAAAAP//+AAAAAAAAAAAAAAAAAAAAAB////////gAAAAAAAAAAB////////////+AAAAAB////////////////////////////B////////////////////////////4AAAAAAAABAAAD/////////////////////gAAAAAAAAAAH///////////////////////////4AAAP///////////8AAAAD////////////////gAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////gAAAAAAAAAAAAAAAAAAAf//////////////wAAAAAAAAAAAAAAAAAAAAAAAAB/gAAAAAAf//////////4AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////8AAAAAAAAAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAAAAAAD4AAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAD/////////4AAAAAAAAf///////////////////////gAAAAAf//4AAAAAB//////////gAAAA//////////////////8AAAAAAAAAAAAAAAAAP///////AAAAAAAAAAAAAAAAAD//+AAAAAAAAAAAA///////////8AAAAAAH//////////////////////AAAAAAAAAAAAAAAAAAAAAAf//////gAAAAAAAAAAAAAAA//4AAAAAAA//////////////////////8AAAAAAAAAAAAAAAf///////////////+AAAAAAAAAAAAAAAAf////////////////////////////A//////////////////////AAAAAAAAAeH////8AAAAAAAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////AAAAAAAAAAD/////////////////////////+AAA///////8AAAAAAAAAAAAAAAD///////////////AAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAAAAAADAAAAAAAAAAAAAAAD//8AAAAAAAAAAAAAA//////wAAAAAAAAAAAAAAf/////////4AAAAAAAAAD////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAAAAAA//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH////+AAf/////////////wAAAAAAAAAAAAP////4AAAAAAAAAP//9//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////8AAAAH///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////AAAAAAf////////////////gAAAAf/////////////////wAAAAAAAAAAf/////////////wAAAAAAAB////////////////////////////+AAAAAAAAAAAAAAAAAAf//////////////////////////4AAAAAAAAAAH////////////wAAAAAAAAAAAA//////////////////4AAAAAAAAAAAAAAAAAAAP/////////////////////gAAAAAAAAAAAAAAA+///////////////////////////8AAAAAAAAAAAAAAAAB//////////////////////gAAAAAAAAAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////wAAAAAAAAAAAAAAAAAAAAAAf///////gAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////+AAAAAAAAAAAAAAAAB//H/////////////+AAAAAAA///+AAAAAAAD/////////AAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////4f/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////4AD////////////////////////gAAAAAAAAAAAAAAAAAAA//////////////////gAAAAAAAAP//////gAAAAAAAAAAAAAAAAAAAAAAH//////////8AAAAf////wAAAB////////8AAAAAAAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////AAAAAAAAAAAAAAAAAAAAP//////////////////////////AAAAAAAAAA/gAAAAAAAAAAAAAAH//////////////////wAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAA//////////AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////AAAAAAAAAAP/////+AAAAAAP//gAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAAAAAAAAAAAAAAAAAAf////////AAAAAAAAAAAAAAAD////////AAAAAAAAB//////////////8AAAAAAAAAf////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAB///////wAAAAAAAAAAAAAAAAAAAf////////////////4AAAAAAAAAAAAAAAAAAAD/////////AAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////+AAA//////+AAAAAAAAAAAAAAAAA///////////////////////////8AAAAAAAAAAAAAAAAAD///////////AAAAAAAAAAAAAAAAAAAAA//AAAAAAAAB/////////////////////8AAAAAAAAAAAAAAAAAAAAB////////////////////////4AAAAAAAAf/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH///////+AAAAAAAB/////////////////////////8AAAAAAAAAAAD/////////////////////8AAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAAAAA//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////w==", + "sha1": "d7447e53d66bb5e4c26e8b41f83efd107bf4adda", + "sha256": "b1870cae9e54cbe8ddd74782c98f6c9ec6eeb835e2765252530d71779685d4ea", + "sha512": "3d994a9ab4b95d6622f464dbf2ec8725fafe25988bb108cf9bd09b4c07ad0c3d1b3f6a2500b881f2bfd3b323894898014b4b1eab3956854b8ef1a6c991f73a5f" + }, + { + "input": "////////////gAD////////////+AAAAAAAAAAAAAAAAAAAAAAA///////////+AAAAAAAAP/////////////////////wAAAAAAAAAAAAB//////////////////////wAAAAAAAAAAAAAAAAAB///////////////////////////4AAAAAAAAAD/////+AAAAAAAAAB///////////////////4AAAAAD///////////////AAAAAAAAAAAAAAAAAAAAAAP///////////////AAAAAAAAAA/////////////////////////AAAAAAAAB///////8AAAB//////////////////gAAP//////////////////////gAAAAAAAAAAAAAAAAA////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////AAD///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP/wAAf//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////4AAAAAAAAAAAAAAAAAAA//////+AAAAAAAAAAAPwAP//////////////////////////8AAAAAAAAAAAAP/////////////////4AAAAAAf////////////////AAAAAAAAAAAAAD/////////////wAAAAAAAAAAAAAAAAAAAAAf/////////////////////4AAAAAAAAAAAAAAAAAAD///////////////////////4AAAAAAAAAAAAAAAAAAf////4AAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAAAAAD//////////////////////////8AAAAB//////////+AAAAAAAAAAAAAAAAAAAAP///////////////////////wAAAAAAAAAAAAD//////////////////////gAAAAAAAAAAAAAAB//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////4AAAAAAAAAB//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////8AAAAAAAAAAAAAAAAAAPAAAAAAAAAAB/////////////////////AAAAAAAP/////////////////////gAAAAAAAA///gAAAAAAAAAAAAAAAAD//////////////+AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////wAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////AAAAAAAAAAAAAAf/4AAAAAAAAAAAAD///AAAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////4AAAAAAAAAAAH//////////+AAAAAAAAAD////////////////////////////gAA////////////////////8AAAAAAAAAAAAA///////////////////////////+Af////////8AAAAP////////////////////////////AAAAAAAAAAA////////////////////////AAAAAAAA///////AAAAAAf/////////////////wAAAAAAAAAAAAAAAAAAAH//4AAAAAAAAAAAAAAAAAAAH////////////////8AAAAAAAAAAAAAAAAAAA//gAAAAAAAAAAAAAAAAAAAAAf/////8AAAAAAAAAAAB////////////////////////AAAAAAAAAAAAD//////////////////////////AAAAAAAf////////////+AAAAAP///////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////8AAAAAAAAAAAAAAAB//OAAAf///gAAAAAAAAAAAAAAAAAAf///////////////4A////gAAAAAAAAAAAAAAAAAAAAA/////////////////AAAAAAAAAAAAAAAAH/////////////+AAAAAAB//4AAAB//////////////////////////wAAAAAB/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////wAAAAAAAH/////////wAAAAAAf//4AAAAAAAAAAAAAAAAA/////gAAAAAAAAAAAAAAAAAAAAAB////+AAAAAAAAAAAAAAP/AAAAAAAAAAAAAAAAAAAB//////////////////////wAH/////////////wAAAAAAAAAAAAAAAB/////////4AAAAAAA////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH///+AAAAAAAAAA//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAP////////////8AAAAAAAAAAAAAAAAAAAP/////////////////gAAAAAAAAAAAB////////////8AAAD///////AA///////////AAAAAAAAAAAAAAAB////////////////////////wAAAAAAAAAH/////////////+AAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////4AAAAAAAAH///////wAAAAAAP///////////////4AAAAAAAAAAAAAAAAAAAB//+AAAAAAAAAB//////////////+AAAAAAH///////////8AAAAAAAAAAAAAB//////////wAAAAAAAAAAAAAgAAAAA////////////////AAAAAAAAAAAAAAAD/////////////gAAAAAAAAf/8AAAAAAAAAAAAAAAAAAAAA//////8AAAAAAAAMAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////8AAAAAAAAAAAAAAAAAAH/////////////////////gAAAAAAAAAAAAAP///////////////gAAAAAAAAAAAAAAAH/////////gAAAAAAAAAAAAAAAAD//////////////////wAAAAAAAAf//////////4AAAAAAAAAAAAAAAAAH///////////4AAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAD//////////////gAAAAAAAAAAAAAAAAAAAAAAH//////AAAAAAAAAAAAAAAAAAAAAAAAAD//////////AAAAAAAAAAAAAAAAAAA////////////+AAAAAAAAAAAAAAAAAP/////////+AAAAAAAAf/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAB////gAAAAAAAAAAAAf///////////////////4AAAAAAAAAAD///////////////////////AAAAAAAAAAAB////////////////////////////gAAAAAAAAAAAAAAAAAAA+AAAAAAAAAAAAAAAP//////////AAAAAAAAP////////////////8AAAAAAAAAAAAAAAAAAAAAAAD/////////////////////gAAAAAAAAAAAAAAAAAAAAAAf///////////8AAAAAAAAAAAAAAAAAAAAAPAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////8AAAAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAAAAAB//////////////////8AAAAAwAAAAAAAAAAAAAAAAAAAAAH//wAAAAAAH//////////////////////////8AAAAAAAAAAAAAAAAAAAAAf//////////////AAAAAAAAAAAA////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAD////8AAAAAA/gAAAAAAAAAAAAAAAAAAAAAAfAAAAAAAP/////////4AAAAAAAAAAA////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////8D///////////////gAAAAAAAAAAAAD//////////////////8AAAAP///4AAAAAAAAAAAAAf//+AAAAAAAAAAAAAAAAAAAH/////////gAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////AAAAAAAAAAAAAAAP/////wAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA//////wAAAA///////AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////4AAAAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAH//////////8AP////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAD///////////wAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////AAAAAAAAAAAAAAAD/////AAAAAAAAH/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////4AAAAAAAAAAAAAAAAAAAAAH///////8AAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAH///////AAAAAAAAAAAAA////////////gAAAAAAAAAB//////////////////////////wAAAAAAAAAAAAAAAAAAAP///AAAP/////////////////////////AAAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAD/////////AAAAAAAAAAAAAAAAAAf///////////gAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "77dd2a4482705bc2e9dc96ec0a13395771ac850c", + "sha256": "69850fe71261572f61d56863a7dc12aeda7931225d0eafb5b7b45aef7b6c8586", + "sha512": "19f812d6a96e6062a71928bc5ed95d84659a4d70f6d47aebdd5eb19be1a8c163a121005228b8eae2fccced12c26b3a295a5c349daed623743b8602364cc2ee2c" + }, + { + "input": "//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAA////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////wAB////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////8AAAAAAH//////////////wA//////////AAAAAAAAD///////////////AAAAAAAAAAAAAAAAAAAAAAAAf//8AAAAAAAAAAAAAAAAAAAAAAAAAH///////wAAAAAAP/////////4AAAB//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////gAAD////////////////////wAAAAAAAAAAAAAAAAAAAAAAB//////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAAAAAA///////////////////////+AAf//////////////////////////4AAAAAAAAAAAP///////////////////////AAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAAH///wAAAAAB/////////////4AAAAAAAAAAAAAAAAAAH/////////////4AAAAAAAAA///////////////////////////gAAAAAAAAAAAAAAAAAAAAB/////////////////////AAAAAAAAAAAAAAB//////////////////////AAD//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAf////+AAAAAAAAAAAH////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAP//////////AAAAAAAAAAAAAAAP//////////////////AAAAAAAAAAAAAAAAAAP///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////4AAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////8AAAAAAAAAH//8AAAAf//gAAAAAAAA///////////////wAAA//////////+AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////gAAAAAAAAD/////////////////AAAAAAAAAAAAAAP////////////////////wAAAAAAAAAAAAAAAAAAAAAAAA///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAH////////////4AAAAAAD//////////gAAAAAAAAAAAAAAf///////gAAAAAAAAAAAAH///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAf//////////4AH//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////AAAAAAAAAAAAAAAAAAAD///////////////8AAAAAP////////////////////+AAAAAD////////////g/////////////////+AAAAAAAAAB////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////8AAAAAAAAAAAAABwAAAAAAAAAAAAAAAAAAAAAH////////////AAAAAAAAAAAAAAAB/////////////////////////gAAAAAAAAAAA/////////////////////////////gAAAAAAP///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAAAAAAA//8AAAAAAAP////////////////gAAAAAAB///////////////////////////wAAAAAAB///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAPgAAAAAAAAAAAAAAAAAAAAAAf/////////wAAAAAAAAAAAAAH/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////AAAAAA///////////////////////////+AAA////////+AAAAAAAAAAAAAAAAAAAAAAAAA///////////AAAAAAAAAAA////////////////j////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD/8AAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAH/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////gAH/////////////////AAAAH///////////////////////AH/////////////////////////////gAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAAAAAAA//////gAB/////////////AA/////8AAAAAB///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH/gAAAAAAAAAAAAAAAAAAAAAAAAH8AAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAB////gAAAAAAAAAAAAAAAAAAAAAB/////////////4AAAAAAAB////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAf//////8AAAAAAAAAAAAAAAAAAAAAAAAAAP////4AAAAAAAAAAAAAAAAAAAB///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAP+AAAAAAAAAAAAAAAAAAAAAAAAB////////////////////8AAAAAAAAf////////gAAAAAAAAAAAAAAAAAAAA/////////////////////////////AD////////////////////wAAAAAAAB/////////////////////////////wAAAAAAAAAAAAB////////////////////////////8AAAAAAAAAAAf/////////////////wD///wAAAAAAAAAAAAAAAAAAAAAAAAAf//wAAAAAAAAAAAAAD//////////AAAAAAAAAAAAAAAAAAAAAAAAB/////wAAAAAH/////////////////4AAAAAf///////////////gAAAA/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAP////////wAAAAAAAAAAP/////////////////////AEAAAAAD//////////////////////AAAAAAAAAAAAAAAAAA//////////+AAAAAAAAAAAAAAAA/////////////////////AAAAAAAAAP//////////////////////////4AAAAAAAAAAAP/////////////gAAAAAAAAAAAAAB/7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///wP//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAAP//////4AAAAAAAAAAAAAAAAAAAAAAAAA//////8AAAAAAAAAAAAAAAAAAAAAAAA//////////////gAAAAP//////////////gAAAAAAAAAAAH/////////////////////////gAAAAAAAAAAAAAAAAAAAAAA////8AAAAAAAAAAAAAAAAAAB///////////////////wAAAAAAAAAAAAAAAAAAAD/////////////////8AAAAAAAAAAAAAAAA/wAAAB//////////////////gAAAAAAAAAAAAAAAAP////////////////////////////8AAH//+AAAAP////////////gB///////////////wAAAAAAAAAAAAAAAAAAAAAAAH////////////+AAD///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8AAAAAAAAAAAf////////////gAAAAAAAAAAAAAAAAf//4AAAAAAAAAAAAAAD////////////////////////////gAAAAAAAAAAAAAAA////+AAAAAAAAAAAAAAAAAAA/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////gAAAAAD///4AAAAAAAAAAAf/////////////AAAAB////////////////////////4AAAAAAAAAAD//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD/////h//wAAAAAAA///////P//////////+AAAAAAAAAAAAAAAAAf////////AAAAAH//////////////////////////gAAAAAAAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAAAf/4AAAAAAAAAAAAAAAAAAAAAP//////////////////////////AAAAAAAAAB////////////+AAAAAAAAAAAD//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////AAAAAAAAAAB//////gAAAAAAAAAAAAB/////////AAAAAAD/////////AAAAAAD//////////////////+AAAAAAAAAAAAAB////8AAAB///////gAAAAAAAAAAH////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////+AAAAAAAAAAH////////////////////AAA////gAAAAAAAAAAAAAAAAAAAAAB///8D////////////////////////////gAAAAP////wAAAAP/////////AAP/4AAAAAAAAAAAH/////////AAP//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "sha1": "eaa1465db1f59de3f25eb8629602b568e693bb57", + "sha256": "18fdfb38e4f516734cef5de8cba84a54a17cdaf13228621dfcd806c5e822eccd", + "sha512": "c5fa614fa377b61b8e0b6f98367108cae2b599cb6a5ca4ee8917fa8c61589dc67c0c8cc898688845dc992c218477def295ce231200eedb8de65696882f62c163" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAH///////////AAD///////////////////////////4AAAAAAAAAAAAD///////////////////////4AAAAAAAf////////////////////////////+AAAAcAAAAAAAAAAH/////////gAAAAAf/////////////////////8AAAAAAAAAAAAAAAB/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////wAAAAAAAAAAAAAAAAAAAAAAAAD/////////gAAAAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAAAAAAAAAAAAAB/4AAAAAAAAH////////////4f////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////+AAAAAAAB/////////////////////////////wAAA/////////////////////4AAAAAAAAAAB///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////gAAAAAAAAAAAAAAAAB////////////////////////////+AAAAAAAAAAAAAAAB////////////wAAAAAAAAAAAAAAAAAAAAB///////////////////////////+AAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////wAAAAAAAAAAAAAA////////////4AAAAAAAAAAAAAAAAP////////////////////////gAAAAAAAAAAAAAAAAP//////////AAAAAAA/////////+AAAAAAAAH//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//4AAAAH///gAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////wAAAAAAAAAAAAAAAD///////wAAAAAAAAAAAAAAAAA/////////////////////////////+AAAAAAAAAAD////8AAH////////////////////8AAAAAAAAAAAAAAAAAAAAH///+AAAAAAAAAAAAAAP/+D//////////////////////wAAAAAAAAAAAAAH////////////////gAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////8AAf/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAB//wAAAAAAAAD////////////////AAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////wAAAAAAAP////////////////////////j//AAAAAAAAAAAAAAAA////////8AAAAAAAAAAAAB///////////////////AAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAAAD/////////////////////////////wAAAAAAAAP///AAAAAAAAAAAAf////gAAAAAAD//////gAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////8AAAAAAAAAAAAAAAD////////////////+AAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////+B///////wAAAAAAAAAD//////////////////////////////gAAAAAAAD/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAH///////////////////AAAH//////////////4AAAAAAAAAAAAAAAAAAAf/////////////////wAAB//////////gAAB////////////////8AAAAAAAB////////8AH///////////////////////gAAAAAAAAAAAAAH/////////////////////4AAAAAAAAAAH///////////AAAAAAAP////////////4AAAAAAAAAAAAAA///////////////////gAAAAAAAAPgAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAAAP////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAB///////////////AAAAAAAAAAAAAAAAAAAAAH//////gAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////4AAAAAAAAAAAAAAAAH///////////////////////////H///////////gAAAD////////////////////////////+AAAD/////////AAAAAAAAAAAAAAAAAAAAA///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB///gAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////4AAAAAAAAAAAAAAAAAA///////////4AAAAAA/////4AAAAAAAAAAAAAAAAAAAAAH//////8AAB/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAAAD/gAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////wAAAAAAAAAAAAAAAAAAAD/////////////////////AAAAAAAAD+AAAAAAAAAAAAAB//////////////8AAAAAAAAAP/4AAAAAAAAAAAAAD///////AAAAAAAAAAAAAAAAAAAAAAH////////////////////wAAAAAAAAP/////////////////////////////+AAAD//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAf////AAAAAAAAAAP///////8AB/////////wAAAH////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAf//////8AAAAAAAAAAAAAAAH//////////gAAAAAAAAAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAP///////////////////////////AAAAAAAAAAAP////AAAAAAAAAAAAAAAAA8AAAAAAAAH////////////////////AAAAAAAAAAAAAAAAAAH////////////////////////gAAH////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAf////+Af//////////////////////wAAAAD///////////////////AAAAAAAAAAA//gAAAAAAAAAAAAAAAAAA//////////4AAAAAAAAAAAAAAAAAAAAAAAP///////////////4AAAAAAAAAA/8AAAAAAAAAH////////////////////gAAAAAAAAAAf//////AAAAAAAAAAAAAAAAAAAAAA///////////+AAAAAAAAAAAAAAAAAAA/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAB///////4AAAf//////8AAAAAAAAAAAAAAAAAAAP/////4AAAAAAAAAAD////////////////////wAAAAAAAAAAAAAAA//////////AAAAAAAAAAAAAAAAAAAAAAAP//////4AAAAAAAP/AAAAAAAAAAAAAAAAAAP+AAAAAAAAAAAAAAAAAAAAA////////////////gP/////////////////////////wAAAAAAH/////////////////////////////4AAAAAAAAAAAAAAAAAAAAP////////////////////wAAAAAAAAAAAAAAAAAAAAAAf///////////////////////wAMAAAAAAAAAAAAAP///////////////////////8AAAAAAAAAAAAAAAAP//////////////////+AAAAf/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////gAAAAAAAAAAAAAA/////AAAAAAAAAAAAAAAAB////////////8AAAAAAAAAAAD////////////////////8AAAAAAAAAAAAAAAAAAD///////////////4AAAAAAAAAAAAAAAAAAAAAAAP////////////////wAAAAD////////////////////////4AAAAAAAAAAAAAAAAAAAAAP/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP////4AAAAAAAAAD////////////////////gAAAAAAAAAAAAAAAAAAAAAAP/////////wAAAAAAAAAAAAAAAAAAAAAB////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAB////////////////+AAAAAAAAAAAAAAAAAAAAAAD///////////////////////////8AAAAAAAAAA/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////8AAAAAAAAAAAAAAAB///////////////////////////AAAAAAAAAA//////////////////8AAAD///////////////////AAAAAAP/////////////////////////////AAAAAAAAAAAAAAAAAH//////////////8AAAAAAAAAAAAAAAAAAAAAA//////////////wAAAB///////8AAAAAAAAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAP/////////////////////////////+AAAAAAAAAAAAA//4AAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////AAAAAAAAAH////////wAAAAAB//////////////////////////8AAAAAAAAAAAAAAA==", + "sha1": "9329d5b40e0dc43aa25fed69a0fa9c211a948411", + "sha256": "9db6e6591134181c2c19bbc57f24e11ea161165cde584e1f58c4df2fb5ee8c88", + "sha512": "ebf031e509ce9143cd427c92df91c3e744aec4d4455eb69ec80d2dac4aaf1fbb699174d2d2a11f08f118f7f4cabc2ccfc6686eb5ba9a81843e223ce3f6f2d336" + }, + { + "input": "///////////////wAAAAAAAAAAAP////////////////////////4AAAAA//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB//////+AAAAAAAAAAAAAAAAAAH////+AHwAAAAAAAAAAAAAAB////////////////AAA//////////////////8AAAAAAAAAAAH//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////AAAAAAAAAAB//////////////////////8AAAAAAAAAAA//////////////////////x/////////////4A///gAAAAAAAAAAAAAAAAAAAP////////////////////////AAAAAAAAAAB//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAAAAAAAAAH/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAA/////////////+AAAAAAAAAAAAAAf///4AAf+AAAAAAAAAAAAAAAAAAAAAAP///gAAAAAAAAAAAAD//////////////////////4AAAAAAAAAAAAAAAAAAAAP//gAAP///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gP////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////AAAAAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAA////////////4AAHwAAAAAAAAAf/////////////////////gAAAH///AAAAAAAAAAAAAAAAAAAAAf///////////////////AAAAAAAAAAAAAP///////////////AAAAAAAAAAAAAB////////4AAAAAAAAAD///8AAAAAAA8AAAAAAAA/////////gAAAAAAAAAAAAAAAAAAP////gAAAAAAAAAAB///////////////////////gAAAAAAAAAAAAAAAD//////////////////////////gAAAAAAAAAAAAAAAAAAAAB////////+AAP//////wAAAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAA////8AAAAAAAA//8AAAAAf////////AAAAAAAB/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//8AAAAAAAAAAP////////////wAAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////+AAAAAf//////4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////+////////////////////////+AAAAAAAAAAAAAA////////////////gAAAAAAAAAAAAAAAAAAf////////////AAAAAAAAAAH/////////gAAAAAAAH//////////////wAAAAAAAAAAAAD///gAAAAAf/////////////gAAAAAAAAAAAf/////////gAAAAAAAAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAAAAAf/////////////////gAD//////////////////////8AAAAAAAAD/////////////////wAAAAAAAAAAAAf///////////////wAAAf//////gAAAAAAAAP////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////8AAAAAAAAAAAAAAAAAP//////4AAAAAAAAAAAAAAAAAAAAAH////8AAAAAAAAAAAAAAAAAAAAAAD//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////AAAAAAD/wAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAAAf//////////////////gAAAAAAP///////////////8P///////////////////////8AAA/////////////////+AAAAAAAAAAAAAAAAAAAB//gAAAAAAAAAAAAAAAAAA//////////////////////////////gAAAAAAAAAAAAAAAAgAAP/////////////////////////gAAAAAAAAAAAAAAAAAA/////////4AAAAAAAAAAf//////////////////8AAAAAAAAAAAAAAAAAAAAAH4AAAf//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////+AAAAAB//////////AAAAB/////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////+B//////////AAAAAAAAAAAAAH//////////////////4AAB/////////////////////////////+AAAAAAB////////////////////////////AAAAAAAAAAAAAAAAA////wAAAAAAAAAAAH////////////+AAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAAAAB4AP///////////////8AAAAAAAAAAAAAAAAAAAAAAB+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////8AAAAAAAAAAAAAAAAAAAAAAAA///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAH/////z//////////////////////gAAAAAAAAAAAAAAAAAAA///////8AAAAAAAAAAAAAAAAAAAAAf//////////////wAAAAAAAAAAAAAD/////////////////////////////4AAAAAAAA///AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAAD///////+AAAAAAAAAAAAAAAAA///////8AAAAAAAAAAAD///gAAAAAAAAAAB////////AAAAAAAAAAD////////////////////////+AAAAAAAAAAAAAAAB//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAAAAAAAAAAA//////////////gAAAAAAAAB/////gAAAAAAAAAAAAAAAA//////8AAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAAAAAAA///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////gB////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////wAAAAAAf//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////8AAAAAAAH//////////////+AAAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAAAAAAAD///wAAA/////////wAAB//AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////8AAAf/////////////////wAAAAAAAAAAAAAAAAAAAAAf/////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////4AAAAAAAAAAAAAAAAB/////////////////////gD////////////////////////4AAAAAAAA//////////////////////AAAAAAAAAAAAAAAAAAAAP////////////////////////////gAAAAAAAAAAAAAAAAAAA///+AAAB///////////AAAAAAAAAAAAAAAAAAf///////////4AAAAB//////////////////////////gAAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAP///////////////////+B///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////gAAAAB/////////AAAAAAAAAD////////////////////////AAAAAAAAA////////////AAAAAAAAAAAAAAAAAAD/////////////////gAAAAAAAAD//////////////////////////wAAAAAAAAAAAAA////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAA//////4AAD+Af////8AAAAAAAAAAAAAAf/gAAAAAAAH///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAP///////////gAAAAAAAAAAAAAAAAAAAAf/////////////////////wAAAAAAAAAAAAAAAAAP///////////////////////4AAAAAAAAAAB//////gAAAAAH//////////////4AAAAAAD////////////gAAAAAAAAAAAAAAAAAAAAAAAB////////////////////gAAAAAAMAAAAAAf////////////////////////+AAAAAAAAB///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////wAAAAAAAAAAAAAAH////wAAAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAB//////////////////////////////gAAAAAAAAAAAAAD/////4AAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAB/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////8AAAAAAAAAD///////////////4AAAAAAAAAAAAAAAAAAAAAAAf///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAH////gAAAAAAAAAAAAAf///////////////////////////gAAAP///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////gAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAAAAAAAAAAAf///wAAAAAAAAf///////////////////////", + "sha1": "e94c0b6aa62aa08c625faf817ddf8f51ec645273", + "sha256": "86eafcaf23edfef66753d664eaa7813b5a16d1abc01a95f74ae88a02e42cadfb", + "sha512": "c5fc8a78f9106a42da273a6e95894cae0ebe9814a7e4df178c7962eebf251f285cef64a62546220c9a531233cd1d0a886957441ccbcd6ef3849493d1387870a6" + }, + { + "input": "//wAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////gAAAAAAAAAAAAAAAAAAAAB///////wAAAAAAAAAAB///////4AAAAAAAAAAAAAAAAAAAAAAAD/////8AAAAAD////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA//gAAAAAAAAAAAAAAAAAAAAAD//////8AAAA/////////////4AAAAAAAAAAAAAAAAAAAAAAAAA////////////8AAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAA/////////////////////+AAAAB////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAAAAAAAAAAAAAAAH///////////////////////gAH//////////////x///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAD////AAAAAAAAAAAAAAB///////////////4AAAAAAAAAAAAAAAA///////////////////////////////gH////////8AAAAAAAAAAAAAAAAAAAAAAAAAADAAf///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAAAAf////////////////////////////AAAAAAB/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAAB//////+AAB///////////////////////gAAAAAAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAAAAAP///gAAAAAAAAAAAAAAAAB//////////////////////////wAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAB////////////////8AH////4AAAAAAAAAAAAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////AAAf///////////////////////////wAAAAAAAAAAAH///////////////////////////AAAAAAAAB//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAH/wAAAAAAA//////wAAAAAAAAAAAAAAAAAAAAAAAAf///wAAB///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAAAAAAAAAAAB//////////////////////wAAAAA//////wAAAAAAAAA///////////////////AH//////////////////////////////D///////////////////wAAAAAAAAAAAA////////gAAAAAAAAAAAAAA/////////////////AAAAAAD//////////////////////////+AAAAAAAD////////////gAAAAAAAAAAAAH////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/wAAAAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAf/////////////gAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAAAP8AAAAAAAAAAAAAAAAAAAAAAD/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAf////////////////////////+AAAAAAAAAAAAAAAAAAAA///wAAAAAAAAAAAAAAAAAAH///////////////AAAAAAAAAAAAAAAAAAA//////////////////AAAAAAAAAAAAAD//////////////////////////////AAAf/////////////4AAAAAAAAAAAAAAAAD/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////+AH///////////AAAAAAAAAAAAAAAAAAAAAAB/////////v/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP//////////8AAAAAAAAAAAf////////gAAAAAAAAAAAAAH/4AAAAAAAAAAAA////////////////////////////4AAAAAAAAAAAAAA///////////////////////////////gAAAAAAAAAAAAAAAAA////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAA///////////////4AAAP4AAAAAAAAAAAf/////////////AAAAB///////////////////////gAAAAAAAAAAP//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////8AAAAD//8////+AAAAAAAAAAAB////8AAAAAf/////////////////////////+AAAAAAAAAf8AAAAAAAAAAAAAAAAAAAAf//////////////////////////4AAAAAAAAAAAAAAAAAP//////////////////////AAAf//////////+AAAAA/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAB////////////////wAAAAAAAAB//////gAAAAAAAAAAf////////wAAAAD/////AAAA/////////////+AAAAAAAAAB///3/////4AAAAAAAAf////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////+AAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////gAAAAAAAAAAAAAAAAAAAf/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///AAAAA////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPgAAAAAAAAAB////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAf/////////////////8AAAAAAAAAf////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAP///AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////8AAAAAP////wAAAAAAAAAAAAAAAAAAB///////////////AAB///////////////////////////AAAAAAH/////////////////////+AAAAAAAAAP//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAA///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////wAAAAAAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAAAAAA///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////gAAAAAAAAAAAH////////////////////////4AAAAAAAAAAAAAD/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////wAAAAAAAAAB//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAP////AA//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAB////////////4AAAAAAAAA////////////+AAAAAAAAAAAAAAAAAAAAD////////////+AAAAAAH/4AAAAAAAAAP////////AAAAAAAAAAAAAAD/////////////////////////8AAA////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////gAAAAAAAAH//////////////+AAAAAAAH/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////4AAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAf/8AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////+AAAAAAAAAAAAA////////////////8AAAAAAD///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAAAAAAAAAAAAAAAAAAA4AAAAAAAB////////////gAAAAAAAAAAAAAAAAAAAAAH/////////////////////////4AAAAAH///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////4AAAAAAAAAAAAAAAH///gAAAAAAAAAP///////////////4AAAAAAAAAAAAAAAAAAAAAB///////////////////////////gAAAf//////////////////////////+AAAAB/4AAAAAAAAAD8AAAAAf8AAAAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAf///////////////wAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////wAB////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAB/////////////////////AAAAAAAAAAAAAAAH//AAAAAAAAAAA///////////////8AAAAAAAAAAAAAAB/wAAAAD//wAAAAAAAAAAAAAAAf/////wAAAAf///////////////////////////8AAAAAAAAAAAAAAAAAAAP//////////8AAAAAAAAAAAAAAAAAD//////////8AAAAAAAAAAf//4AAAAAAAAD//////////////////////////////8=", + "sha1": "7ff02b909d82ad668e31e547e0fb66cb8e213771", + "sha256": "7be29e433c7e17875c71eea08d10ada5a17eea25ead94d41cb1711e8fd204c06", + "sha512": "4ed3534e84d10d48d7723a6446962fb2f00b977adcce7fc0f59660663961b932133f4c48e3725c2c3e745b6566252d99dd308a8967e67195c1750981d52e61bf" + }, + { + "input": "////////////////gB8AAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////wAAP/////////////gAAAAAAAAAAAP/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP////////4AAAAAAAAAAAAAAAAAAAH////8AAAAAAAAAAAAAAAAAAAAAH///////////////////////AAAAAAAAAAAAAAAAA///////////////////////H////////gD////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////gAAAAAAAAAAAAAAAD///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAAAAAAAf/////////+AAAAAAAAAAAAAAAAAAAAAAAAAA////////////////8AAAAAAAAAAAAAAAAAAAB//////////////////8AAAAAAAAAAAAAAAAD///8AAAf///gAAAAAAAAAAAAAAAAAAAD//wA///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAH//8AAAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAAAAAAAAAH///////////////////+AAAAAAAAAAAAAAAf//////wH////////////AAAAAAAAAAP///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////4AAAB///////////////////wAAAAAAAAAAAAAAAAAAAAf//////8AAAAAAAAAAAAAAAAAAAAAAAAfAAAAAAAAAAAAAAAAAAAAB//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////P///8AAAAAAAA//////////////////////////+AAAP/////////////////////////////8AAAAAAAAAAAAAAAAAAAAB/////////////////////////wAAAAAAB///////////gAAP///////////////////4AAAf//////////////+AAAAAAAAAAAAAAAAAf///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAB/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAP///gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////gAAAAAAAAAAAAAAAAAAf//////////////8AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////+AAAAAAAAAAB/////////////////////////AAD/////////////////////////8AAAAAAAAAAAAAAAAAAAB/////////////////////////////+AAAAAAAAAAAAAAAAf///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAP/////////////////gAAAAAAAAAAAAAAAP//////+///+AAAAAAAAAAAAAAAA/////////4AAAP/////////////////////wAAAAAAAAf////////////wAAAAAAAAAAAAAAAH////////////////////gAAP//////////////////////////////8AAD///////////////////wAAAAAH///gAAAAAAAAAAAAAAAAB///////////////4AAAAAAAAAH///////////////////+AAAA///////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////wAAAAAAAAAAAAH//////////////////////////+AAAAAAAAAAAAAAAAAAAB///////AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////wAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////4AAAAAAAAAAAAAAAAAB/////////////4AAAAAAAAAAAAAAAAAAA////////////////////////////////AAAAAAB////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////+AAAAAAAAAAAAH////////wAP////////////////gAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAA/////////////AAAAAAAAAB///////////////////////8AAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAAAP////////////////8AAAAAAAAAAH////////////////////////gAAAAP////////AAAAAAAAAAAAAAAAAAAAH/////////////////////gAAAf/////////////////////8AAAAAA/////////////////////////gAAAAAAAAAAAAAAA///////////////////////8AAAAAAAAAAAAAAAf////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAH/////////+AAAAAA////////////////////////8AAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAAAAAAAB////////wAAAAAAAAAAAAAH/////////////////////////////8AAAAAAAAAAAAAAH/wAAAAAAAAAAAAAAAAAAAAAAA/////////////////4AAAAAAAAAAAAAAD/////////////////4AAAAAAAB////wAAAAf////////////////gAAAAAf//////wAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////A//////////////AAAAAAAAAAAAAAAAH///////////////////gAAAAAAAAAAAAAAAAAAAAH////4AAABwAAAAAAAAAAAAAAAAAAAAAAAf//AAAAAAAAAAAP//////////wAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////8AAf//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAA////AAAAAAAAAAAAAAD///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////+AAAAAAAAAAAAAP////////////4AAH/////////////////gAAAAAAA////////////wAAAAAAAAAAAAAAAAAAAAAAAAP////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////4AAAAAAAAAP/////////////////////gAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////gAAAAAAAAAAAAAAAP//////////////////////AAAAAAB//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////8AAAAAAAAAAAD//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////wAAAAAAAAAAAB////////////gAAAAAAAAAAA//////////////wAD//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////8AH//////////AAAAAAAAAAAAAAAAAAAB///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////AAf///////////////////////////8AAAAAAAB//////////////////////////////gAAAAAAAAAAAAH///////////////////////////wAAAAAAAAAAAH////////////////////////AAAAAAAAAAAAAAAAAAAB/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAA//////+AAAAAAAAAP/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH////wAAAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////4AAAAAAAAAAAAAAAAP////////////8AAAAAAAAAAAAP//////////gAAAAB////////AAAA///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////4AAAD//////////////+D/////////wAAAAAAAAAAAAAAAAAAAAAAD////////////////gAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////wAAAAAAAAP//////////////////////////////AAAAAAA/////////////+AAAAAAAAAAAAAAAAAB/////////////////////////8P//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHAAAAB/////8AAAAAAP/////////////////////////////wH//////gAAAAAAAAAAf///wAP/////////////////////////////wAAAAAAAAAAAAAD////AAAAAAP//////////////////////4AAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAD////////////+AAAAAAAAAAAAAA/////////////////AAAAAAAAAAAB//////wAAAA///////wAAAAAAAAAAAAf////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/gAAAAAAAAAAf///////////gAAAAf////////////////////////4D//////////////gAAAAAAAAAAAAAf//////////////////////////////+AAAAAAAAAAAAAAAAAAAAB///////+AAAAAAAAAAAAAAAAH////////////wAAAAAAAAAAAAAAH//////////////////////////////4AAAAAH////8AAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////8AAAAAAAA///////////////8AAAB////////////////////+///////////////////AAAAAAAAAAAAAAAAAH///////////////////////////+AAAAAAAAAAAAAAAP///AAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////+AAAAAAAAAAAAAAAAAAAf/////////////8AAAAAAAAAAAAAAAD//+AAAAAAAAAAAAeAAAAAAAAAAAAAAAAAAAAf/////////////////////////////8AAAAAAAAAAAAAH///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////gAAAAAAAAAAAA//////////////////////////////3////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==", + "sha1": "5bb3570858fa1744123bac2873b0bb9810f53fa1", + "sha256": "c1f98aac0cfc98f30c3fa13fb8011b2a1d553e6c03edb8e2a35f47574237fc64", + "sha512": "1263ee50dddda760e3cd1aae4834fcee248b28d18012c45611f30c2f239d267f8d3e6945015a6c739abac78137e1f9f68c0df04a91ffc5616d4b8c8b2fb65e2f" + }, + { + "input": "////gAAAAAAAAH/////////////8AAAAAAAAAAAAAAAAAH/////////////////////////////A///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAP/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////A////+AAAAAAAAAAAAAP//////////////////wAP/////////////////////////z//////////////////////4AAAAAAAAAAAAAB///////////////////////////////wAAAAAAAAAAAH//////////gAAAAAAAAAAAAAAAAAAAAP////////////////////+AAAAAAAf///////////////////////////AB////////////////gAAAAAAAAAAAAAAAAA////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAA////wAAAAAAAAAAAAAAAAAAAAAAAD/////////+AAAAAAAAAAAAAAAAAAAAAH//////////////gAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAA///////////gAAAAAAAAAAAAf///////////////////////8AAAAAAAAYAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////4AAAAAAAAAAAAAH///////8AAAAAAAAAAAAAAf/////4AAAAAAAH///////////////////////////////4AAAAAAAAAB////AAAAAAAAAAD///////////////////////4AAAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAB/////////////////4AAAAAAAAAAAAAAAAAA////////+AAAAAAAAAH+AAAAAAAAAAAAAAAP8AAAAAAAAAAAAAB////////////////////wAAAAAAAAAAAAAAAAAAAP/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////+f///////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////wAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAAAB///wAAAAAB///////////////gAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////gAPgAAAAAAAAAAAAAAAAAAAAAAAAf///////gAAD/////////////+AAAAAAAAAAAAAAAAAAA//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAD/////////////////f////////////////////AAAf//////////////////4AAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAAAf///AAAAAAAf////////////////AAAAAAAAAD//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAB////////////+AAAAAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAB//////////////////////////////8AAAAAAAAAAAAAA/////////////////////////gAAAAAAAAAAAAAAAAAAAAB//////////////////////gAAAAAAAAAAAAAAAAAAAAAAA/////8AAAAB////gAAAAAAAAAAAAAAAAAAAH/////+AAAAAAH//////////////////gAAAAAAAAAAAAAAAP////+AAAAAAAAAAAAAAAAAAAAAAH///////+AAAAAAAAAAAAAAAAP////////////////////4AAAAAAAAAAAAAAAAAAAD//////////////////////////gP//4AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////f////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///4AAAAAAAAAAAAAAAAAAAAAf//////////////////+D/////////////////AAAAAAAB////////////////////+AAAAAAAAAAAAAAAAAA///////////////wAAAAAAAAAAAAAAAAAAAAB////////4AAAAH///////////////////////////////8AAAAAAAAAAAAAAD/////4AAAAAAAAAAD///////////////////AAAAAAAAAAAAAAA4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////gAAAAAAAAAAAAAAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////+AAAAAAAAAAAAAAAAAP/////////////gAAD////////////4AAAAAP///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAf///////////////AAAAAAAAAAAA//////////////////////////////+AAAAAAAAAAAAAAAH/////4AAAAAAAAAAAAAAAP////////////////////wAAAAAAAAAAAf////////////////////////D///////////AAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////8B////////AAAAAH////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////gAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAD/////8AAAAAAAAAAAAAAAAAAAAH/wAAAAAP//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////gAAAAAAAAAAAAAAAAAAAA////7///////////4AAAAAAAAAAAAAAAAAAAAAD/////////+AAAAAAAAAAAAAAAAAAAAAAB///////////////////////////+AAAAAAAAAAAAAA/////////////////////+D///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAAAAAAAAAAAAA///////////////////////8AA/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////8AAAAAAAAB/8AAAAAAAAAAAA//////////////AAAAAAAAAAAAAf///////////////////////////H//////gAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////8AAAAAAAAAAAAAAAAP//////////////////////////4AAf////////////////////wAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////+AAAAAAAAAAAAAAAA//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAAAAAAf////4AAAAAAAAAAAAAAAAAH//4AAAAAAAAAAAAAAAH/////////////////////////gAAAAAAAAAAAAAAAAP////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAD//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/8AAAAAAAAAAAAAAAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////+AAAAAAAAAAAf/////////////////////////+AAAAAAA//////////////////gAAAAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAfgAAD///4AAAAAAAAAAAAAAAAAAAAAAAAP//8AH/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAH//////+AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////AAAAAAAAAAAAAAAAAAAAAAf//////////////////wAAAP////AAAAAAAAB/////////////////gD//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////AAAAAAAAAP/////+AAAAAAAAAAAAAB//////////////wAAAAP//////////////////////////4AAAAB///////////4AAAAAAAAAAAA///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAA/wAAAAAAAAAAAAAAAAAAAAAAH/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAH//////////gAAAAAAAAAAAAAA///////////////////////////////AAAAAAAAAAAAAAAAAAA//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAD/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////gAAAAAAAAAP/////////////////////////AAAAAAAAAAAAAAAAAAH//AAAAAAAAAAAAAAAAAAAf////////////gAAAAAAAAAAAAAAAAAP/////////////////wAAAAAAAAAAAP////////////////////////////+AAf///////////AAAAAAAAAAAAAAAAAf////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAB//////gB////////+AAAAAAAAAAAAAAAAAAAAAB//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////wAAAAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAAAP/////+AAAAAAAAAAAAH8AAAAAAAAAAH////////////////////////////AAAAAAAAAAAAAD////////////////////////////wAAAAAAAAAAAAAAAAP////////////////////AAAAAAAAAAAAAAAAAAAAAAAP/////////////gAAAAAAAAAAAAAAAAAA", + "sha1": "905f43940b3591ce39d1145acb1eca80ab5e43cd", + "sha256": "981571f9393463f49cd5352c024a8998d7b139bc8aec7a512101edb18a7e0954", + "sha512": "b5adf124a9d519929f4e9dea2d9a1d061451abab6046d51e973d6fd1704cfa8c2f5bf50a950d7d3490ec8032490ed27da5532247c1a7f4831ab355ee28721fa1" + }, + { + "input": "///////////////////////////////4AAAAAAAAAAAf/////////////AAAAB/////////////////////+AAAAAAAAAAA///////////////////gAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////AAP///////////////////////////////z//////////////////////////////+AAAAAAHwAAAAB//////////////////////////4AAAAAAAH//////////////////////////////AAAAAAAAAAAAAAAf/////////////////////gAAAAAAAAAAAAA//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAAAAAAAAAAAAAAB///////////////AAAAAAAAf/////4AAAAAAAB/////////AAAA/wD////////gAAAAAf/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////gAAAAAf////////4AAAAAAAAAAAAAAAAAAAAAAAAAAf/gAAAAAAAAAAAAAAAAA////////////////////////////AAAAf////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAB///////////////////////////wAD////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////+AAAAAAD/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf///+AAAAAAAAAAAAA//////+AAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////8AAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////gAAAAAAAAAAwAAA///////////////4AAAAAAAAAAAAAP///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAD///4AAAAAAAAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAf/////////////////////AAAAAAAAAAA///////////////////////AAAAP////////////wAAAAAAAAAAAAAAAAAAAAAAAD////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////AAAAAD/////////////////////////////gAAB/////////////////////wAAAAAAAAAAD///////////////////////////8AAAAAAAAAAAAAAAAAAf///////////4AAAAAAAAAAAAAAAAAAAf////////////AAAAAAAAP////////////////////////////wAAAAAAAAP///////gAAAAAAAAAAAAAD///////////////////////AAAP/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////4AAAAAAAAAH/////AAAAAAAH///////////////AAAAAAAB/gAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////wAAAAAAAA/////////wAAAAAAAAAAAAAAAAAAAA///wAA/////////////////////////AAAAAAAAAAAAAAAAAAAH///4AAAAAAAAAAAAAH///////gAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAH/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////4AAAAAAAAAAAAAAAAAA////////////////////////////////8AAAAAAAAAAAB//wAAAAAAAAAAAAAAAAAAAAAH//////////////////////gAAAAAAAAAAA////////////////8AP/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAAAAAAAAA///////////////////AAAAAAAAAAAAAAAP//////////////////////////////gAAAAAAf//////gAAAAAAAAAAAAAAAAAAB/////////////////////////+AAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////4AAAAAAAAAAAAAAAH/////////////////////////////4AAAAAD///////////+AAAAAAAAAAAAAAAAAAAAAAf/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////gAAAAAAD////////////////////////////wAAf///////////////////////////AAAAAAAAAAAAAAAf///////////////////////////gAAAAAAAAAAH//////////////wAAAAAAAAAAAAAAAAAAAAAD///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////gAAAAB///////////////////////wAAAAAH////////////////////////+AAAAAAAAAAAAAAAAAAP//////////////////+AAAAAAAAAAAAAAAAA//////////gP/////AAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////AP/////////////+AAAP///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAA//////////////wAAAAAAD////////////wAAAAAAAAAAP////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///wAAAAH//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////8AAAAAAAAH////////////////////////wAAAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAAAAAAAAA////wAAAAAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAP///////////////////+P//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////4AAAAAAAAAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////wAAAAAAAAAAAAAAAAAAAAA////////gAAAAAAAAAAAAAAAAAAAAAAAAAP////////////8AAAAAAAAAAAAAAAAH////////////////8AAAAAAAAAAAAAAAB////////////////////////////////8AH//8AAAAAAAAAAAAAAAAAAAIP///////////////////gAAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAAAAAAB////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAf///////////gAAAAAH8AAAAAAAAAAAP///////gAAAAAAH/////////////////////////wAAAAAAH//gAAAAAAAAAAAAAAAAA////////////////////+AAAAAAH////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////wAAAAAAAAAAAAAAAAAAAAAf///////////////////////wAAAAAAAAAAAAAAAAAP//////////////////////////////gf///////4AAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAD///////gAAAAAAAA////////////////////////////////AAAAAAAAAAAAAAAAAAAAf/gAAAAAAAAAAAAP/////////////wAAAAAAAAAAAAAAAAAAAAB/////////////////////////AAH///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAB///////////////////////////////gAAAAAAAA//////////////////gAAAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAD///////wAAAAAAAAAAAAAAAAAAAAAAAD8AAAAAAAAAAAAAAAAAAAAAAP/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////gAAAAAH///////////////////////////8AAAAAAAAAAAAAB//////////wAAAAAAAAAAAAAB////////////////AAAAAB////////////////////////////////j/////////////wAAAAAAAA//gAAAAAAAAAAAAAAAAAAAAAf//////wAAAAAAAAAf//////////////4AAAAAAAAAAAAAAAAADwAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4AAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////+AAAAAAAAAAAAAAAH/////////4AAAAAAAAAAAAAAAAAAf/8AAAAAAAH///////////////////////////gAAAAAAAAAAAAAAAAAH/////////////////////////////wAAAAAAAAAAAAAAAB///////////wAAAAAAAAAAAAAAAAAAAAAAAAf///////////////+AAAAAAA///////////////////////AAAAAAAAAAAH///////////gAAAAAAAAAAAAAAAAAAAD//////////////4AH/////////////////////////+AAAAAAAAB//////////////+AAAAAAAAAAAB///////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////AAAAAGAAAAAAAAAAAAAAAAD//////////gAAAAAAH//////////////////gAAAAAAAAB//////wAAAAAAAAAAAAAAAAAAAAAAAf///////////4AAAAB/////////////////8AAAAAAAAAAAAAAH////////8AAAAAAAAAAAAAAAAAAAAAAAH////8AAAAAAAH////////////////////AAAAAAAAAAAAAAAAAAAAB4AAAAAAAAAAAAAAAAAAAAAA///AAAAAAAAAAAAAAAAAAAAAAAH////////////+AAAAAAAAAAAAAAAAAAAAAAD//////////////wAAAAAAAAAAAAB/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "sha1": "336c79fbd82f33e490c577e3f791c3cbfe842aff", + "sha256": "8628162a5c9d34c94e60027175f819e98a356832a3d3898a7f11b95e171e2a73", + "sha512": "d1e420127b234fee4a21777a107b75009621fad13facfe4af57a03de9ca4eeabdf5cf6bd1db312deb2ea2013ba0ca3ae64fa48794464cc12fbb4dfcedcb24a60" + }, + { + "input": "AAAB//////////////////////gAAAAAAAAAAAAAAAAP///////4AAAAAAAAAAAAAAAA///////wAAAAAAAAAA//////////////////////////8AAAAAAAAAAA///////////////////////////////8AAAAAAAAAAAAAAAAAAD/////////////////gAAAAAAAAAAAAAAP/////////////8AAAP///+AAP///////////8A///8AAAAAAAAAAAAAAAAAAAAAAAAB/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////gAAAAAAAAAAAAAP//////////////wAAAAAAAAAAAAAAAAAAAAD/g/////////////////////////////gAAAAAAAAAAAAAAAAAAAAD//4AAAAAAf/////////AAAAAAAAAAAAAAAAAAAAB///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAHAAAAAAAAAA/AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////+AAAAAAAAAAA/////////8AAD////////////////gAAP////////gAAAAAAAAAAAAAAAAAAAAD//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////+AAAAAAAAH////////////////////gAAAAAAAAP///////////////gAAAAAAAAAAAAAAAAAAAAAAf////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAAAAH///////////////////+AA//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////4AAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAA//wAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////gAAAAAAA//////////AAAAAAAAAH/////////////+AAH//////////////////////////////gAAAAAAAAAAAAAAAf////////////////////////////////AAAAAAA////////////////wAAAAAAAAAAAAAAAAAA///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAH///AAAAAAAAAAAAAAAAAAAAAAAAAAAA////////8AAAAAAAAAAAAAAAAAAAAAAAA///////////AAAAAAAAAAAAAAAAAAAAA///////////////////+AAAAAAAAAAAAAAAP////////////////////////wAAD/////8AAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////wAAAD/////////////////////////8AAAAAAAAAAAAAAAAAAD/////////////AAAAAAAAAAAAAP////////4AAAAAAB//AAAAAB///+AAAAAAAf///////////////////////////////AAAA///////////////////////////////8AAAAAAAAAAf////////////////////////////8AAAAAAA///////////////////////wAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAAAAAAAAAD////////8AAAAAAAAAAAAAAAAAAAAAAAAAAB//////+AAAAAAAAAAAAH////////////////wAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAA///gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////wAP////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////wAAH////8AAAAAAAAAAAAAAAAAAAP/////////v///////////////////+AAAAAAAAAAAB///////////8AAAAAAAAAAAAAAP//////////AB////////////////////////////////AAAAAAf/////////8AAAAP///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////8AAAAAAAAAAAD////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////4AAAAAAAAAAAAAA/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////wAAAAAAf//////////////8AAAAAAAAAAAAH/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAB////8f/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAH/////4AAAAAAAAAAAAB4AAAAAAAAAAAAAAAAAAAP///////////////////////////AAAAAAAAAAAAAAP///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////3///////////////////////8AAAAAAAAAAAAAH////////////////4AAAAAAAAAAH////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////4f//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAH////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAP/////////////+D////////gAAAAAAAAAA///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///wAAAAAAAAAH//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////4AAAAAAAAAAAAAAAAAPwB/////////////wAAAAAAAAAAAAAAAAAAP4AAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////4AAAAAAAAAAAAAAAAAAA////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////+AAAAAAAAB//////////////////4AAAAAAAAAAAAAAAAAAH////wAAAAAAAAAAAAAAAAAAAAAAAAD/4AAAAAAAAAAAAAAAB//////////////wAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////4AAA////////////////////////wAAAAAAf//////////////////wAAAAAAAAAAAAAA///////////////////gAAAAAAAAAAAAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////AAAAAAAAAAAAAAAAAAf//////////wH//////////////4AAAAAAAAf///////////wAAAAAAAAAAAAAAAAAAAAH////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////AAAAAAAAAAAAAAB///////////wAAAAAAAH////////////wAAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////gB//////////4AAAAAAAAAAAAAAAAAAAAAAH/////////AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////gAAAAAAAAAAAAAAP/////////////////+AAAAAP/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////8AAAAAAAAAAAAAAAAAAAP/////////////////8AAAAAAAAAAAAAAAAAAAAD////////////////////////////AAA////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////AAAAAD//////4AAP//////////////////////wP/////AAAAAAAAAAAAAAAP///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAB/gAAAAAAAAAAAAAAAAAAAAAAH///////4AAA///////////8AAAAAAAAAAAAAAAAAf/////+AAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////wAAAAAAAAAAAAP//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////AAAAAAAAAAAAAAAA////////////////////gAAAAAAAAAAAAAAAAB///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////gAAAAAAAAAAAAAAAf///////gAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////gAAAAAAAAB///////////////AAAAAB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////+AAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAH//gf/////////////////AAAAD///AAAAAAAAAAAAAAAAAAD///////////////AAA////////////////////gAAAAf/////////////gAAAAAAAAAAAAAAAAA////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////4AAD/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////AAAAAAAAAAAAP//////////////////////////////wAf///////////////////gAAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAAAA///////////////8AAAAAAAAAAAAAP///////////////////gAAAAAAf///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////gAAAAf/////////4AAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAD////////////////8AAf////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAAAAAAAAP//AAAAAAAAAH/////wAAAAAAAAAAAB////////////////////////////AAAAAB////////////////////////////gAAAAAAAAAAAAAAAAAAH///////////////////////4AAAAAAAAAAAA////////////////////////w==", + "sha1": "5c6d07a6b44f7a75a64f6ce592f3bae91e022210", + "sha256": "1c4530860ec79ab73b141a7e64b0de775192a002fa2f3832b6c24972797d5161", + "sha512": "56ad639b61fe8f834de37d5c760cd436add2a7fb046b78ca87cc6434f3e251d902a8b0d1a7dacef2d156c28ee22ba52c3d90a3640dd0cea4e7a4b5d72f7ca0ee" + }, + { + "input": "//////////////////////////////+AAAAAAAAAAAH/4AAAAAAAAAAAAAAAP/////////////////8AAAAAAAAAAAH/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////+AAAAAAAAAAAAAAAAAAAAAAH////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//AAAAf///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAH/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////4AAAAAAAAAAAAAAAAAAA///gAAAAAAP//8AAAAAAAAAAAAAAAf////////////////////////////gAAAAAf////////////////////AAAAAAAAAAAAAAAAAAAH////////////////////////8AAAAAAAAAAAAAAAAABwAAAAAAAAAAAAAAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAD/////+AAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAAAAAAP/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////8AAAAAAAAAAAAAAAf//////////////////////+AAf/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAB///////////////4AAAAAA///////////////////////////////8AAAAAAAA///////////wAAAAAAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//wAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////+AAAAAAAAAAAf///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////+AAAAAAAAAAAA//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAAAAA////AAAAAAAAAAAAD/wAAAAAAAAAAAAD////////////////////////AAAAAAAAAAAAAf////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH/////////+AAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////AAAAAAAAAAAAAB//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAB/////////////////////////gAAAD/////////////////+AAAAAAAAAAAAAAAAAH///////////////////4AAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////4AAAAAAAAAAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAAAAD/////////////////////8AAAAAAAAAAAAAAAAAAAAAB/////////////8A//AAAAAAAH//////////////gD//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAA///////////////wAAAP///gAAAAAAAAAAAB//////////wAD//////////////////////////+Af/////////4AAAAAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAD/AAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////wAAAAAAAAAAAAAAAAAAAAH////+AAAAAAAAAAAAAAD//////////////////////////8AAAAAAAAAAAAAAD//////////////////////////gAAAAAAAAAAAAAAAAAAAA//////////////gAAAAAAAAAAAAAAAAAAAAD///////4AAAAAAAAP/////////////////////////AAAAAAAAAAAAAAAB/AAAAAAAAAAAAAAAAAAAf////////gAAAAAAAAAAAAAAAA//////////////////AAAAAAAAAP///////////////////////////4H///////wAAAAAAAAAAAAAAAAH////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////8AAAAAAAAAAAAAAAAAAAAH/+AH////4AAAAAAAAAAAAAAAAAAAAf//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////AAAAAAAAAAAAAAAAA/////////gAAAAAAAAAAD//gAAAAAAAAAf////////////////////////////////+AAAAAAAA////////////////////////////4AAAAAAAAAAAB////////////////////////4AAAAAAAAAAAAAAAf//////////////4AAAAAAAAAAAAAAAAAAAAf//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////wAAAAAAAAAAA/////////////+AAAAD////////////////////+AAAAAAAAAAA////////////////8AAAAAAAAAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////8P/////////////////////////////8///////////////////////////+AAAf///////////////////////////////wAAAAB//////////////////////////4AAAAAAD////////////////////////////4AAAAAAAAAAAP//////////////////AAAAAAAAAAAP///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAAAAAAAAAAAAAAAAAf/////////////4AAAAAAB//////gAAAAAB/////////AAAf///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////4AAA/wAAAAAAAAAAAAAAAAAAAAAAAAAAB///8AAAA//////+AAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAA////gAAAAAAAAAAAAAAB///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAP+AAAAB/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////4AAAAAAAA//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAA/////////+AAAAA////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////wAAAAAAAAAAAAAAAAB////////////////////////////8AAAAAAAAAAAAAAAAAAAB//////////////////////gAAAAAAAAAAAAAAAAAAAAB//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//wAAAAAAAAAAAAP////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////wAH///////////////wAAAAAAAAB//////////////////////8AAAAAAAAAAAAAAAAAAA////////4AAAAAAAAAAAAAAAAAAAf/+AAAAAAAAAAAAB/////////////////////8AAAAAAAAAAAAAAAAAAB/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAD////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////8AAAAAAAAAAAA////////////////wAAAAAAAAAAAP////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAA////////////AAAAAAAAAAAAAAAAAAAAAD//gAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAAAAAAAAAAA////////wAAP///4AAAAAAAAAAAD////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////AAAAAAAAAB////////4AAAAAH////////////////+AAAf/AAAAAAAAAAAAAAAAAAAAAD///////////////+H////////////8AAAAAAAP/////////////////AAAAAAAAAAAAAAAAAAAAAAAH///////gAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAAAP/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////gAD//////////////////4AAAAAAAAAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAAAB////////////////////////////////AAAAAAAAAAB//////////////////////////////gAAAAAAAAAAAAAAAAD////////////////////gAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////+AAAH/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH/gAAAAAAAAAAAAAAAAAAAAB///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAA///8AAAAAAAAAP/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAAAAAAAAAAAAAAAH8AAAAAAAAAAAAAAAAAAAAAAAAf///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAA/////////8AAAAAAAAAAAAAD/////////////////////gAAAAAAAAAAAAAAAB///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAA////AAAAAAAAAAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAB//////////////4AAAAAAMAA///////AAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAA4AAAAAAAAAAAAAf/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////+AAAAAAAAAAB////8AAAAAAAAA///////////AAAAAAAAP///////////////////////////+P/gAAAAAAAAAAAA///////////////////////////g/////////////////////////AAAAAAAAAAAAAAAAAAD/////////////////////", + "sha1": "7e0d3e9d33127f4a30eb8d9c134a58409fa8695b", + "sha256": "d97097c16c4d0cf169e61cde78e807cd318b8938992066bfe4e266e14146fbba", + "sha512": "9c3c1aa2aa6fbb1dca6ebc4be7a8aeca2284f3a4729227dab1c4cab359bcb6750c228e754b170a2f20fc06f7c64789647f26dc0c61f0b62a9dc98fff5bef2289" + }, + { + "input": "///////+AAAA//////////////////////4AAAAAAAAAAAAAAAAAAAAA/gAAAAAAAAAAAAAAAAAf//wAAAAAAAAAAAAAAAAAf//////////////////8AAAAAAAAAAAAAAAAD////////////////8f///+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////AAAAAAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAAAAP/////////////////////////////+AAAAAAAAAAAAAAAAAAAf///+AAAAAAAAAAAAAAAAAAAAAAAAAA////////8AAAAAAAAAAAB//////////////8AAAAAAAAAAAAAAD///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAA//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////+AAAAAAAAAAAAAAAAAAAf////////////////////////////////4AAAAAAAAAAAAAAAD///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAAAAAAAB///////gAAAAP4AAAAAAAAAA////wAAAAAH/////////////////////+AAAGAAAAAAAAAAAAAAAP////////////////////gAAAAD////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////AAAAAAAAAAAAAAAAAAAAD/////////////////////4AAAAAAAAAAAAAAAf///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAA///////4AAAAB///////////////////////////gAAAAAAAAAAAAAAAAAAAP/////////////////////////////////gAAAAAAAAB/////////gAAAAAAAAAAAAAAAAAf////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAAP//////////////////////////////8AAAAAAAAH////////////////4AAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAf//gAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////gAAAAAAAAAAAAAAAAAf///////////////wAAAAAAAAAAAAAAAAAAAAAP///////////8AAAD/////////////////////////4AAAAAAAAAAAP/////////+AAAAAAAAAD///////////////+AAAP///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////+AAAAAAAP/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAP////gAAAAP/////////////4AAAAAAAAAAAAAAf//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////AAAAAAAAAAAAAAAAAAAD/////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////wAAAAAAAAAAAAAAA//////////AAAAAAAAAAAAAAAAIAAAAAAAP////////////////////////8AAAAAAAAAAAAAAAAAA////////////////////////////8AAAAAAAAAAAAAB/////////AAAAAAAAAAAAAAAAAAAAAP///////////wAAAAAAH///////////////////AAAAAAB//////+AAAAAAAAAAAAAAAAAAAf/////////wAP///////////////////////wAAAAAA//////////wAAAAAAAAA///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH//wAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////B////////////////////////////////AAAAAAAAAAAAAAAAB/////////gAAAAf////////////////8AAAAD/4AAAAAAAAAAAAAAAAAAAAAAAP/////////wAAAAD//////////////AAAAAAAAAAAP//////gAAAAAAAAAAAAAAAAAAAAAB/////AAAAAAD////////////////////gAAAAAAAAAAAAAAAAAAB//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////AAAAAAAAAAAAAAAAAAAAAf/////////AAAAAAAAAAAAAAAAAAAAAD/////////////gAAAAAAAA////////////////////////4AAAAAAAAAB//AAAAAAAAAAAAAAAAAAAAD////////////////AAAAAD////////////+AAAAAAP///////4AAAAAAAAAAAAAAAAAAAAAAAD////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////gAAAAAAAAAAAAAAAAD//////////////+AAAAAAAAAAADAAAAHwAAAAAAAAP///////////////////////////////4D///////////////////////gAAAAAAAAAAAAP////////////////////////////////4AAAAAAAAAAAAAH////////////AAAAAAAAAAAD/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////gAA///wAAAAAAAAP//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////wAAAAAAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////wAAAAAAAAAAAAAAAAAAAAAAP////////+AAAAAAA///AAAAAAAAAAAAAAAD/////////////////////////////////8AAAAAA///////////////////AAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAf///////AAAAAAAAAAAAAAAAAAAB////////gAAAAAAAAAAAAH///////////////////gAAAAAAAAAAAAB//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAB////////AAAAAAAAAAAAAAAAAAD////////////////////////////////8AAAAAAAAAAAAAAP/////////////////////////gAAAAAAAAAAAAAAAAH///////////////wAAAAAAAAAAAAAAAAAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////8AAAAAAAAAAAAAAAAP///+AAAAAAAAf///////AAAAAAAAAAAAAAP///////////////////////////////+AAH/////////////////////////////wAAAAAAAAAAAAB///////////////////////////////+AAAAP/////////////gAAAAAAAAAAAAAAAAA/////////////////////////AAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAf//////////////////////4P//+AAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////AAAAP//////////////////////AAAAAAAAAAAAAAAAAAf//////////AAAAAAAAAAAAAP//////AAAAB/////////////////////////////////8AAAB///+AAAAAAAf///////////////////////////////AAP///////////////////////////8AAAAAAB//////////////////////////+AAAAAH//////////////////////+AAAAAAAAAAAAAAAAP//////////////////////+AAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAAAAAf//////gAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAB//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////8AHgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB////////gAH///+AAAAAAAAAAAAAAAAA////////v//////////////////gAAAAAAAAAAP/////////4AAAAAAAAAAAAH/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAf////////AP/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf////gAAAAAAAAAAAf////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////AAAAAAAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////gAAAAAf/////////////+AAAAAAAAAAAAD////////////+AAAAAAAAAAAAAAAAAAAAAAB///+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////AAAAAAAAAAAAAAAAAAAAAf///4AAAAAAAAAAAf///////////////////////////////4AAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAB///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////+AAAAAAAAAAAAB///////////////gAAAAAAAD/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////w////////////wAAAAAAAAAAAAAAAAAAAAAB/////8AAAAAAAAAAAAAAAAAAAD/////////////////////gAAAAAAAAAAAAAAAAAA//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAD////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////+AAAAAAAP////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//+AAAAAAAAAA////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////wAAAAAAAAAAAAAAAAAfgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////AAAAAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAP/////////////////8=", + "sha1": "9a5f50dfcfb19286206c229019f0abf25283028c", + "sha256": "3a18179cf693d234a8aa913b7362505533b414d60bf7eaf02427157759defaba", + "sha512": "4e041ec1fc9208aa3ef65bd5abfaf8db48e0d6f1b1de52a47351d827057bff253effb63c5e868dbdddd56ff93b32e750896db45a534b1e2016cf1f380f564262" + }, + { + "input": "//////////////gAAAAAAAAAf////////////////////////gAAAAAAAAAAAP/////8AAAAAAAAAAAAAAP//////////////wf////////////////////4AP///////////////8AAAAAAAAB/////////////////////4AAAAAAAAAAAAAAD/+AAAAAAAAAAAAAAAAAAAAAAAAf8AAAAAAAAAAAAAAB//////////////4AAAAAAAAAAAAAAAAAP//////////////////////4AAf/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////8AAAAAH///////////////////////////////wAAAAAAAAAAP////AAAAAAAAAAB///////////////8AAAAAAAAf/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAH/////////AAAAAAAH///////8AAAAAAAAAB////AH///////////////////////////gAAAAAAAP///////////////////////////////8AAAAAAAAAAP/////////////////////AAAAAH///////////////////////wAAAAAAAAAAAAAAAAAA///////////gAAAAAAAAAAAAAAAAACAAAAAAAAAH/////////////////////////////AAAAAAAAAAAAAP//////////////////////////8AAAAAAAAAH/////////////////+AAAAAAAAAAAAAAAH///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAH/gAAAAAA////////////////////////////////gAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAf//////////////4AAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAf/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////AAAAAAAA///4AAAAAAAf//////////////AAAAAAD/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////gAAAAAAAAAA////////////////////////////////8AAAAAAAAAAAAAA/////////////////////////////gAAAAAAAAAAAAAAf////////////4AAAAAAAAAAAAAAP///////////////8AAAH///////////////////////////8P//////////////AAAAAP/////////////////////+AAAAAAAAAAAAAAAAAAAAAAD////AAAAAAAAAAAAH/wAAAAAAAAAAAAB////////////////////////////4AAAAAf////////////////////////wAAAAAAAAAAAAAAAAAAAAA//////////////////AAAAAAAAAAAAAAAAAP/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////8AAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAB/////////////////////////////////4AAAAAAAAAAAAAAAD//////////////////////////////////AAAAAAAAAA////////////4AAAAAAAAAAA//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD//+AAAAAAAAAAAAAAAAAAAAAD////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAB//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////AAAAAAAAAAAAAAAAAP//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////4B////////////////////+AAAAAAAAAAAH//////////4AAAAAAAAAAAAAAAAD////AAB///////////////////////wAAAAAAAAAAAAAAP///AAAAAAAAA//////////8AAAAAA////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAAAAAAAH////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAP///////////////////////+AAAAAAAAAAAAAAAf//////////+Af///wAAAAAf///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAA///////////////wAAAAAAAAAAAAAAAAAAAAD//////+AAAAAAAAAD//////////////////////////////4AAAAAAAAD/////////////////////////////////+AAAAAAAAAAAAAAAD////////////////wAAAAAAAAAAAf///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////4AAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAAAAAAAAAAAAAAAAAAAB///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////AAAP//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////wAAAAAAAAAf///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////AAAAAAAAAAAAAAAAAAD/gAAAAD/wAAAAAAAAAAAH/////////////////////////gAAAAAf//////////////////wAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAB////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAH///gAAAAAAAAAAAAAAAAAB///////////////////////4AAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAA/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////8AAAAAAAAAAAAD///////////////////////wAD////////4AAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////gAAAAAAAAAAAAAAAAAAAAAAP///////////8AAAAH/////////////////////////////4AAAAAP/////////AAAAH//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAB//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////wAAAAAAAP//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////8AAAAAAAAAAAAAAAAAAAAAD/////////////////////////8AAAAAAAAAP//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAD///////////AAAAAAAAAAAAD//+AAAAAAAAAfAAAAAAAAAAAD///////////////////////gAAAAAAAAAB//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAD///////4AAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAAAB///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////gAAAAAAAAP///////////////////////+AAD/////////////////+AAAAAAAAAAAAAAf///////////////+AAAAAAAAAAAAAAAAAAAAAP////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAA/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////8AAAAAAAAAAAAAAAAAAP////////////////////////////////8AAAAAAAAAAAAAAAAAH///////////////////+AAAAAAAAAAAAAAAAAAAAAA//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgAAAAAB////////////+AP/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAf////////////+H/8AAAAAAAAAAAH///////4f//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////wAAAAAAAAAD///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAf4AAAAAAAAAAAAAAAAf/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////4AAAAAAAAAAAAAAAAAAA//AAAAAAAAAAAAAAB////////////////////////wAAAAAAAAAAAB///////////////////////+AAAAAAAAAAAAAAAAAAAB//////////////AAAAAAAAAAAAAAAAAAf/////wAAAAAAAP/////////////////////////AAAAAAAAAAAAAAYAAAAAAAAAAAAAAAAAAD//////gAAAAAAAAAAAAAAAf/////////////////gAAAAAAB///////////////////////////v////8AAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAf/////////////////////////////////8AP/+AAAAAAAAAAAAAAAAAAADAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////wAAAAAAAAAAAAAB////////gAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////gAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAAAAAAAAf//////////////////gAAAAAAAAD////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAH////wAAAAAAAAAAAAAAAAAAAD//////////+AAAAAAAAAAAAAA//////////////////////4AAAAAAAAAAAAAAAAAA//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////+AAAAAP//////////////////////////AAAAAAP//////////////////////////4AAAAAAAA///////////////wAAAAAAAAf////////////+AAAAAAAAAAAAAAAAAAAAAAD////////wAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////8AAAAAAAAAAAAAAAA/////////////4AAAAAA//////wAAAAP////////4AB//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/wOAAAAAAAAAAAAAAAAAAAAAAAAAD//8AAP///8AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////+AAAAAAAAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAAAAAAAAAB4AAAAH//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////AAAAAAAD////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////AAAAD///////////////////////////////w==", + "sha1": "dca737e269f9d8626d488988c996e06b352c0708", + "sha256": "8ee61f98cea2659f5ab9d8ec444de3b3e843ab02baa7806e96230a64dee93774", + "sha512": "b6e75a0815130a7d690bf587251c88850838f6ae834979e941eabc624529022b181ae7fecba9580193621520c65c416735d85542d5ab635be6bdc6b7c9dd87af" + }, + { + "input": "//+AAAAAAAAAAAAAAAB///////////////////////////8AAAAf//////////////wAAAAAAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAAAAAAAAAAAAA//////////////////gAAAAAAAAAA///////////////////////////wA/////////4AAAAAAAAAAAP///////////////////////////gAAAAAAAAAAAAAAAAAAAD//////////////////////////////////4AAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAf///////////////AAAAA///////////////P////////////wAAAAAAAAAAAAAAAAAD////////////////4AAAAAAAAAAAAAAAAAAAAAAAA///////////////////////AAAD/////////////////////////////gAAB/////////////////////wAAAAAAAAAAD///////////////////////////8AAAAAAAAAAf///////////4AAAAAAAAAAAf//////////P////////////////////////////wAAP///gAAAAAAAD///////////////////AP///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////4AAAAAH//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////gAAAAAAAAAAAAAAAAAAAH/////////wAAAAAAAA/wAAAAAAAAAAAA///w/////////////////AAAAAAAAAAAH//////////////////////////////4AAAAAAAAAAAH///////gAAAAAAAAAAAAAAAAB/////////////////////AAAAAAH/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD////////////////4AAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAB//wAAAAAAAAAAAAAH//////////////////////gAAAAAAAAA////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////gAAAAAAAAAAAAAAA/////////////AAAAAAAP////////////////////////////gAAAAf/////////////////////////////////gAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////4AAAAAAAAAAAAAAAH/////////////////////7/////+AAAAAAAAAAAAAAAAAAAAAAf///////////////////////gAAAAAAAAAAAAAAAAAAAAAH////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////gAAD//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////AAAAAAAAAf/////////////////////////gAAAAAAH////////////wAAAAAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////gB/////////////////wAAAH////////////////////+AAAAAAAAAAAAP//////////////+AAAAAAAAAAA//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////AAP////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////+AAAAAAAAAAAAAAA//////wD////wAAAAAAAAAAP//////4AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////AAAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAP////8H////////////////////wAAAAAAAAAAAAAAAAAAAA////////////////////////////////+AAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAAAH/////////////////wAAAAAAAAAAAAAAAAP/////////////+P//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAf////////4AAAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAAD/////////////////////////////wAAAAAAAAAAAAAAAAA//gAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAH////////////8AAAAAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH8AAAAAAAAAAAAAAAAAP////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////gAAAAAAAAAAAAAAAA////////////////////////////////4AAAAAAAAAAAAAAB////////////////////////////////AAAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAf///gAAAH8AAAAAAAAAP/gAAAAH///////////////////wH/////////////////////////////////gAAAAAAAAAAAAA////////////////////+AAAAH////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAf///////////////////wAAAAAAAAAAAAAP//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAA///////////////////////////gAAAAAAAAAAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAD///////gAA////////////////////////AAAAAAAAAAAAAAAAAAAAf////////////////////////////////gAAAAAAP/////wAAAAAAAAAAAAAAB///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAB/////////////////+AAAB///////////////////////////////gAAAAAAAA////////////////gAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAD//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////8AAAAAAAAAAAAAAP/////////////+AAAAAAAAAAAAAAAAAAH//////////gAH///////////////////////8AAAAAAAAAB//////////wAAAAAB////////////////AB////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD///////wAAAAAA/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAf//wAf////////////4AAAAAAAAAAAD/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////4AAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAAB///////////+AAAAAAAAAAAAAAAH/////////4AAAAAAAAAAAAAAf////////////////////////////////8AAAAAAAH///////////////////////gAAAAAAAAAAAAAAAAAH///////////////////////////wAAAAAAAAAAAB///////wAAAAAAAAAAAAAAAAAAf///////+AAAAAAA/////////////////AAAH///gAAAAAAAAAAAAAAAAAD//////4AH/////////////////////+AAAAB//////+AAAAAAAB///////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAAAAAAAAAAAD////////gAAH////////////////gB/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAB///////////8AAAAAAAAH////8AAAAAAAAAAAAAAAAAAAAAH////8AAAAAH////////////////////AAAAAAAAAAAAAAAAAAB///////////////////////////4AAAAAAAAAAAAAAAAAAAAAA//////////////////////////////AAAAAAAAAAAAAAAAAAAH//////+AAAAAAAAAAAAAAAAAAAAD////////////wAAAAB///////////////////////4AAAAAAAAABwAAAAAAAAAAAAAAAAAAP//////////////+AAA/////////+AAAAAH/////gAAAAAAAAAAAAAAAAAAAAB//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////+AAAAAAAAAAAAAAAAAP//////////////4AAAAAAAAB///////////////////////////////+AAAAP/////////////////////////////////4AAAAAf////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAH////////////////////////////////8AAAAAAAAAAAAAD//////////4AAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAD/////////////////+AAD///////+AAAAAAAAAAAAAAAAAP//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////4AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////4AAB////////////////////////////////4AAAAAAAAAAAAAAAAAAAAB////////////////////wAAAAAAAf///+AAB//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///AAAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////8AAAAAAD//////////////////8AAAAAAAf//////////AAAAAAAAAAAAAAAAP/////////////AAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////+AAAAD////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAAAAAAAAAAAAAH//////////////wAAAAAAAAAAAB///////////////////////wAAB//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////Af/////+AAAAAA//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAf////////////////gAAAAAAAAAAAAAAAAAAAAAD//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////4AH//////////////////+AAAAAAAAAf///////////////gAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////gf//////////////////AAAAAAAAAAAAAAAAD/////////////////////////wAAAAAAAAAB////gAAAAAAAAAAAAAAAAAAH////////////////+AAAAAAAAf////////////4AAAD//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////", + "sha1": "b8ffc1d4972fce63241e0e77850ac46dde75dbfa", + "sha256": "bc69420ce99aa58de5d5c9ae32c3528b02546347e8f85dca651187142bc2a40b", + "sha512": "05a4482e9c87bc49e5c55a087d2d670fec5e3eb0ed2d7fa9d836485c94b477fa50f027f2013a638d9668c4c8a48770415ef5319460c2f4af1b9f628615ccde8d" + }, + { + "input": "////AAAAAAAP///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAf//////////////////////gAAf//////////////////////4AAAAAAAAAAAAAAAA//////////////////////gAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAf//////gAAAAAAAAAD//////////////wAAAAAAAAAAAAAAAf//////////////////////AAAAAAAAAAAAAAAAAAAAAAf////////////////////////wAAAAAAAAAAAAAAAAAAD//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAf///4H//4AAAAAAAAAAAAD/////v///////////////4AAAAAAAAAP//////+AAAAAAAAAAf///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////+AAAB//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAPwAAAAAAAAAAAP///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////gAAAAAAAAAAAAADwAAAAAAAAAAAAAAAAAAAAAAAAP/////////////8AAAAP////////////8AAAAAAAAAAAAH///////////wAAAAAAAAAAAAAAAAA//8AAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////4AAAAAAAAAAAAAAAAD/wAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAAAAD//////////////////8AAAAAP/////////////4AAAAAAAAAAAAAAAAAAAAAAAAH///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////8AAAAAAAAAAAP////////////AAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////w///////AAAAAAAAAAAAAAAAAcAAAAAAAAAAAAAAAAAAAD///////////////+AAAAAAAAAAAAAAAAAAD/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////8AAAAAAAAAAAAAAAAAAAAAf/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAA//////wAH////////wAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/8AAAAAAAAAB//////////////////4AAAAAAAAAAAAAAAAAAAP////////////////////////////4AAAAAAAAAAAAAAAAAPwAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////gAAAAAAAAAAAAAAAAAAf/////////////////////////////////8AAAAAAAAAAAAAAAAD////////////////////4AAAAAAAAAAP///////8AAAAAAAAAAAAAAAAAAAAAAf///////////wAAAA////////////gAAAAAAAAAAAD/////////////////////////////////4AAAAAAAAAAAAAAAAAAP/gAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAAAAAAAAH///////////wD////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////wAAAAAAAAAAAAAA/////////////////AAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////+AAAAAAAAAAAAAD//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////+AA//////////AAAAAAAAAAAAH////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////8AAAAAB////+AAAAAAAA////////4AAB//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAP//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////AAAAAAAAAAAAAAAAAAAAB//+AAAAAAAAAAAAAAAAAAAAAA////////////////gAAAAAAAB//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////8AAAAAAAAAAAAAAAAAAAAAAAAH///////8AAAAAAAAAAAAAAAAAf/////////////gAAAAAAAAAAAAAAAAAA////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///gAAAAAAAAAAAAAAH///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////gAAAAAAAAAAAAAAAAD/////////////////////4AAAAH//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////4AAAAAAAH///AAAAAf////////////wAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////AAAAAAAAP////////////////////////////////AAAAAAAAAAAB//////////////////////////4AAAAAAAAAAAAB///////////4AAAAAAAAAAA///////////////4AAAP/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////wAAA///////////////////gAAAAAAAAAAAAAAAAAAAAAfgAAAAAAAAAA//////////////////////////////////+AAAAAAAAAAAD///////////////////////////4AB/////////////////////////AAAAAAAAAAAAAAAAAAAAAD////////////////AAAAAAAAAAAAAAB/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////4AAAAAAAAAAAAD///////////////////////////////8AAAAAAAAAAAAAP///////////////////////////////gAAAAAAD////////+AAAAAAAA//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAf//wAAAAAAAAAAAAAAAAAAD///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAH/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAA///////8AAAAAAAAAAAAAAAAAAAAAAAA/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////8AAAAAAAAB/////////gAAAAAAAAAAAAAAD//wAP///////////////////4AAAAAAAAAAAAAAH//wAAAAAAAH//////+AAB////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAA//////4AAAAAAAB///////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAf///////////////////////8AAAAAAAAAAAAAAf//////////HwAAAAAf///////////8AAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////wAAAAAAAAAAAAAAAAAAA////////////+AAAAAAAAAAAAAAAAAAAH///wAAAAAAAAP//////////////////////////////gAAAAAB////////////////////////////////wAAAAAAAAAAAAAAAf//////////////gAAAAAAAAAAA//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAD/////////////////8AAAAAAAAAAAAAAAAAAAAAAAD////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////wAB//////////////////////gAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAP/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////AAAAAAAAAAAAAAAAAB///////////////////////////////////gAAA////////+AAAAAAAAAAAAAAAAAAAAAAf/////gAAAAAAAAAAAAAAAAD////////////4AAAAAAAAAAAAAAAAAA//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////4AAAAAP/////////4D////////////////8AAAAAAAAAAAAAAAAAAAAAD//////////+AAAAAAAAAAAAAAAAAAAAAD/////////////////////AAAAAAAAAAAAA///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////wAAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAH////////////////////////AAAAAAAAAAAAAAAAAAAAAAAH/////////////////////AAAAAAAAA///gAAAAAAAAAAAAAAAAAAAAAAAAAAAH//+AAAAAAAB/AAAAAAAAAAAAAAAB////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAAAAAAAf/////////////////////////wAAAAAAAAAAAD////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////gAAAAAAAAAAAAAAAAAAAAP//////////////////////4AAAAAAAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAf//////wAAAAAAAAAAAAAAAAAAAAAAD//////////////////////+AAAAAAAB//////////AAP/////wAAAAAAAAAP//////////////////////AAAAAAAAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAH////+AAAAAAAAAAAAAAAAAA/////////////////////////////AAAAAAAAAAH/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////4AAAAAAAAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAH///////////////////////gP/////////////////4AAAAAAAAAAH////////////+AAAAAAAAAAAAAAAAAAAAH/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////4AAAAAAAAAAAAAAB//////////////////////////////8AAAAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAAAAAAAAAA///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////+AAAAAD///////////AH//////////////////////////////AAAAAAAAAAAAAAAAAAAAB///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgAAAAAAAAAAf////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////+AAAAAAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAP8AAAAAAAAAAAAAB/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////4AAAAAAAAAAAAAAAAAP/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "sha1": "e9c9bf41c8549354151b977003ce1d830be667db", + "sha256": "79b2d4da202168d2c6f7dbff6dba414f71e405731a287a23b58af903f9b1c770", + "sha512": "e6684cb464c37e901848d0dd497df81ee31f709dd8304ccb2158783c981f8a8151f60d67271667b159ab11b0912e63900e99dfd8fdcc6c3d24a93dc899919537" + }, + { + "input": "/////////////////////AAAAAAAAA////////////////////4AAAAAAAAAAAAAAAAAAP/////////////4AAAAAAAAAAAAA///+AAAAAAD/////////////////////////wAAAAAAAAAAAf//////////////////////////////////gAAAAAAAAAAAAAAAAAAP//wAAAAAAAAAAAAAAf/////////////////gAAAAH/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/wAAAAAAAAAAAAAAAAH////////////////////////////+AAAAAAAAAAAAAAAAAAAAAA//////////////////////8AAAAAAAAAAAAH///////////////////////////////8AP//////////////////////////////////gAAAAAAAAAAAAAAAAAB////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////4AAAAAAAAAAH//////8AAAAAAAAAAAf///////////////////////////////4AAAAAH/////////////////////////////////gAAAP///////////////////////////+AAAAAAAAAAf/////////////////+AAAAAAAAAAAAAH//////+AAAAAAAAAAAAAAAAH//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////8AAAAAAAAAAAP/////////////gAAAA///////////////////gAAAAAAAAAAP//////////AAAAAAAAAAAAAAAAAAAAAP/////////////gAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////+f///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////wAAAAB//////////////////////////4AAAAD////////////////////////4AAAP//////////AAAAAP/////////4AAAAAAAAAAAAAAAAAD///////gAAAAAAAAAAAAAAAAAAAP////////////////wAAAAAAAAAAAAf///////////4AAAAB//////gAB/////////Af///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP/nwAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAf//////////////gAAAAAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAAP/////////////////AAAAAAAAAAAAAAAAAAAAP////////////////+AAAAAAAAAAAAAAAAAAB///////////////////////////////////gAAAAf//////////////////////8AAAAAAAAAAAAAAAAAAAAAP///////////////////////////////+AAAAAAP///wAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////8AAAAP/gP////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAB//8AAAAAAAAAAAAAf//////////////////////AAAAAAAAAAAAAAf/////////////4AAAAAAAAAAAAAAAf//////////AAAAAAAAAAAAAAAAAAAAAAAAB8AAAAAAD//////////8AAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////4AAAAAAAAA//////////////////////+AAAAAAAAAAAAAf/8AAAAAAAAAAAP/////////////////////////////////+AAAAAAAAAAAAB///////////////8AAAAAAAAAAAAAAB///////////////////////4AAAAAAAAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////8AAAAAAAAAAAA////////wAAAAAAAAAP////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAcAB/////+AAAAAAAAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAf/////////////////////////4AAAAAAAAAAAAAAAAAAB////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////AAAAf///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAP////////////////wAAAAAAAAAf///////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////AAAP///////////////////////////////////AAAAAAAAAAAAAD///////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAAAf////////////////+AAAAAAAAAAAAAAAAAAAP/////AAAAAAAAAAAAAAA///////////////////////gAAAAAAAAAAAAAAAf//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////AAH//////////wAAAAAAAAAAAAAAAAAAAAAA/////////AAAAAAAAAAAAAAAAD/////////////////////////+AAD//////////////////////////////AAAAAAAAAAAAAAAH////////////AAAAAAAAAAH///4AAAAAAAAAAAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////8AAAAAAAAAAAAAAAAAAAP/////////////////////////wAAAAAAAAAAAAAAAAB///////////////+AAAAAAAAAAAAAAAAAAH/////////////////////////////////AAAD//////////////////////AAAAAAAAAAAAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAAH///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/8AAAAAAAAAAAAAAAAAAAB////////////////////////////////+AAAAAAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAAAAAAAAAf////+AAAAAAAAAAAB///////////////////wAAAAAAAA///////////////////////////////8AAAAAAAAAAAAAAAAAf//////////////gAAH///////////////////gAAAAAAAAAf/////////////AAAAAAAAD//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////gAAAAAAAAAAA///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/AAAAAAAAAAA//////wAAAAAAAAAAAAAAAAAAAAAB////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////8AAAAAAAAAAAAAAAAAAB8AAAAAAAAAAAAAAAAAAAAAAD//////////////wAAAAAAAAAAAAAAAAAAAB//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////gAAAAAAAAAAAAAAAAAAA//////////////////////////////wAAAAAAAAAAAAAAAD/gAAAAAAAAAAAAAD///////////////+AAAAAAAAAAAAAAAAB///////////+P8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////wAAAAAAAAAAAAAAAAAAAAAAAH////////+AAAAAP//////////////////////////+AAAAAAAAAAAAAAAAB////////////////////////////8AAAAAAAAAAAAAAAD///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP/wAAAAB///////////wAAAAAAAAAAAAD/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAAAAAAAAP//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////gAAAAAAAAAAAAAH///////////////////////////////gAAAAAAAAAAAAAD///////////////////////////////+AAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAeAAAP4AAAAAAAAP//////////////////////////////////4AAAA/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////8AAAAAAAAAAAB////////////////////8AAAH/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////+AAAAAAAAAAAAAAAAAAB//////////////////wAAAAAAAAAAAD////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP///////8AAAAAAAAAAAB////////////////////////4AAAAAAAAAAAAAAAAAAB/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAH///////P////////////////////AAAAAAAAAAAAAAAAAAAAf///////////////////////////////wAAAA//8AAAAAAAAAAAAD////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////AAAAAAAAAAAAAAAAAAAAAP//////////////AH//////////////////////////////+AAAAAAAAD///////////////AAAAAAAAf///////////////////////+AAAAAAAAAAAAAAAAAAP///////////////////////////////4AAAAAAAAAAAAAAAAAAAAA///////////////////////////////gAAAAAAAAAAH////////////wAAAAAAAAAAAAAAD////////8P/////////////////////+AAAAAAAAP/////////+AAA////////////////v///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAD////+AAAAAD////////////////////////////////AAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////wAAAAAAAAA////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAAAAAAAAAAAH/////8AAAAAAAAAAAAAAAAAAAAB//////////gAAAAAAAAAAAAAAB/////////+AAAAAAAAAAAAB////////////////////////////////4AAAAAAAP/////////////////////wAAAAAAAAAAAAAAAAAD//////////////////////////8AAAAAAAAAAH/////wAAAAAAAAAAAAAAAD/////AAAAAAAf/////////////9gAAAAAAAAAAAAAAAAB///wAP////////////////////AAAP///AAAAAAP//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////AAAAAAAAAAAAAAAAB///////4Af///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAA//////8AAAAA/////////wAAAAAD///gAAAAAAAAAAAAAAAAAAAAf////wAAAAP///////////////////+AAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAB////8AAAAAAAAAAAAAAAAAAAD///////////4AD//////////////////////4AAAAAAAAAB//////////////////////////////////4AAAAAAAAAAAAAAAAB//////////////4f/////8AAAAH//8AAAAAAAAAAAAAAAAAAB///8AAAAAAAAAAAAAAAAAAAAAAAAAAP////////wAAAAAAAAAAAAAAAAB///////////////AAAAAAB/////////////////////////////wAAAB/////////////////////////////////gAH//////////////////////////8AAAAAAAAAAAAAAAAAAAAA==", + "sha1": "0942908960b54f96cb43452e583f4f9cb66e398a", + "sha256": "068c65431e6010461cd77e3d2859fbc978857d1195dc1506ab1b5c9344e1099f", + "sha512": "b2bf183fbd9e04d480c35d733a2f325a2ee3c936b173d1f925fa545c2dbd02f8a0f0c34b505b8289ce64b621f1f7d53da6b3a5419e7f4f5af531153f1e262137" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAf////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////4AAAAAAAD//8AAAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////gAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////wAAAAAAAAAAAAAAf//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////8AAAAAAAAAAAAAAAAAAAAAB///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////wAAAAAAAAAAAAAAAAD////////////////gAAAAPAAA/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////gAAAAAAAAAAAD////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////+AAAAAH/////////////////gAAAAAP//////gAAAAAAAAAAf////////////+AAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAD/////8AAAAAAAAAAAAAAAAAAAAAAAP//////////gAAAAAAP////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAH///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8AAAf/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//4AAAAAAAAAAAAAAAAAAAAAAAAAAAf//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////wAAAAAAD///////////////wAAAAAAAAAAAAAAAAAB/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////AAAAAAAA////////////AAAAAAAAAAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAP//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////+AAAAAAAAAAAf///////////////////4AAAAAA////wAAAAAAAAAAAAAP///////////////wAAAD/////////////f/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////4AAAH//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////AAAAAAAAAAAAAAAAAAAD//////////////////////////////////4AAAAAAAAAAAAAAAAAAAA/wAAAAAAAAAAAAAAAAAAAAAAD////////////////gAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAA//////////////////gAAAAAAf////////+AAAAAAAAAAAAAD/////////////AAAAAAAAAAAAAAAAAAAAP//////////////////gAAAAAAAAAAAAAAAAAAAAAAD///////////////8AAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////AAD///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////gAgAP//////////AAABAAAAAAAAAAAAAAH////////4AAAAAP////////AAAAAAAD/////////////////////////+AAAAAAAAAAAAAAAAAAAA///gAAAD////////////////////////////8AAAAAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////wAAAAAAAAAAAAAAAAAAAB/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAf/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////8AAAAAAAAAAAAP///gAAAAAAAAAAAD////////////4AAAAAAAAAAH/////////////+AD////////////////////////4AAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAf///////////////////////////gAAAAAAAAAAAAAAAA///////////////8AA/////////////wAAAAAAAAAAAAAAAAAAAAAAAH///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////8AAAAAAAAAAH//////////4AH///AAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////+H///gAAAAAAAAAAAAA/////////////////////////////////4AAAAAAAAAAAAAAAAAAAH////////////wAAAAAAAAAAAAAAAAAAf//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////8AAAAAAAAAAAAAAAAAAAAP///////+AAAAAAAAAAAAAAAAAAAAAAAAA/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////AAAAAAAAAAAAAAAAAAAAAB/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfAAAAAAAAAAf//////////////4AAAAAAAAAAAAAAAA/////////////////////////////gAAAAAAAAAAAAAAAAA/AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////wAAAAAAAAAAAAAAAAAAP/////////////////////////////////+AAAAAAAAAAAAAAP////////////////+AAAAAAAP//////AAAAAAAAAAAAAAAAAAAA////////////gAAf////////+AAAAAAAAAB////////////////////////////////AAAAAAAAAAAAAAAAP/gAAAAAAAAAAAAAAAH//AAAAAAAAAAAAAAAAD////////j////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////wAAAAAAAAAAAAAA////////////////gAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAB///////////////////////////////wAAAAAAAAAAAH///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////8AAAAAAAAB////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAf/8AAAAAAAB//////8AAf////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////4AAAAAAAAAAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAH/////////////gAAAAAP/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////wAAAAAAAAAAAAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAAAAAAAAAAH//////AAAAAAAAAAAAAAAAD////////////AAAAAAAAAAAAAAAAAA////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAA//8AAAAAAAAAAAAAAA////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////+AAAAAAAAAAAAAA///////////////////wAAAH//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////+AAAAAAAB///wAA///////////4AAAAAAf///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////8AAAAAH////////////////////////////////gAAAAAAAAH///////////////////////8AAAAAAAAAAAP/////////wAAAAAAAH//////////////gAAA////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////+AB///////////////8AAAAAAAAAAAAAAAAAAAAB/////////////////////////////////+AAAAAAAAA/////////////////////////////////gAAAAAAAAAP//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////+AAAAAAAAAAAAAAAAAAAAAH//////////////gAAAAAAAAAAAH/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAAP//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////8AAAAAAAAAAAA//////////////////////////////wAAAAAAAAAAH////////////////////////////+AAAAB/////8AAAAH//////////////////////////AAAAAAAAAAAAAAAAAAAAAAf//wAAAAAAAAAAAAAAAAf//////////////////////////4AAAAAAAAAAAAAAAAAAAAD/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////gAAAAAAAAAAAAAf///4AAAAAAAAAAAAAAAAAAAAAP////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////wAAAAAA///////8AAAAAAAAAAAAAH4D////////////////4AAAAAAAAAAAAAAH/4AAAAAAB///+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAB//////wAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAf///////////////////////8AAAAAAAAAAAAAP/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////gAAAAA////////////4AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////+AAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAAAAAHAAAAAAAAf//////////////////////////////AAAAf//////////////////////////////AAAAAAAAAAAAAAAB/////////////gAAAAAAAAAAA/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAD///wAAAAAAAAAAAAAAAAA//////////////8AAAAAAAAAAAAAAAAAAAAAAAD////////8AAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////+AH/////////////////////AAAAAAAAAAAAAAAAAAAB/////////////////////////////4AAD////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////gAAAAAAAAAAAAAAAAf/////////////////////////////////8AB//////////////////////////////////4AAAAA///////////////////AAAAAA///////////////+AAAAAAAAAP//////////////4AAAAAAAAAAAAD/////////////////////////////AAAAAAAAAAAAAAAAAP///4AAAAAAP//////////////////////////////////wAAAAAAAAAAAAAAD////////////////////AAAAAAAAAAP/////////////////////gAAAAAAAAAAAAAAAAAf///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////gAAAAAAAf//////////////////////+AAf////8AAAAAAAAAAAAAAAAAAAAAAH///////////////////////AAAAAAAAAAAAAAAAAAAB/////4A//////////////////////////8H/////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////AAAAAAAAP/////////////////wAAAAAAAAAAAAAAAAAA", + "sha1": "fce34051c34d4b81b85ddc4b543cde8007e284b3", + "sha256": "918a1d14de8c5fa363fb3137cf5014020646a1a2235f78ef3ed0d034c74f5761", + "sha512": "6123a636eaaa0de3ff0c09043f9a9d7601f264433c9b3deb5964c42c1892c62f4184271a15ccbd2c017a2e4b4bacab3badbf46393875804a1cf2371e43df643a" + }, + { + "input": "////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////8AAAAAAAAAAAAAAAAAAH//////////////////AAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAP///////////////////AAAP//////Af4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////8AAAAAAAAAAAAAAAAAAAAAf///+AAAAAAAAAAAAD////////////////////////8AH/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAD///////+AAAAAAAAAB/////////////////////////4AAAAAAAAAAAAAAAAAAAAf///////////////////AAAAAAAAAAAAAAAAAAAAAAAf//////wAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////wAAAAAAAAAAAAAAAAAH/////+AAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////8AAAAA//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAA/////////+AAAAAAAAAAAAAAAAf/////////////////////wAB////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///+AAAAAAAAB/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////gAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////+AB/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////+AAAAAAAP///////8AAAAAAAAAAAAAAAAAAA////////////////////////gH//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////gAAAAAAAAAAAH///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////4AAAAAAAAAAAAAAAAAA//////4AAAAAAAAAAAAAAAAAH/////////////////4AAAAAAAAAAAAAD/////////+AA//////AAAAAAAAAAAAAAAAAAAAAAAAAD/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////H/////////////4AAAAAAAAAAAAAAAAAH///gAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////wAAAAAAAAAAAAAAAAAAAAP////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////+AAAAAAAAAAAAAAAAAP///////+AAAAAAf//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////+AAAAAAAAAAAAAAAA////////////+AAAB//////////////+AAAAAP/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////wAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAP//////////4AAAAAAAD////////////////////gAAAAAAAAAD////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAAAAAAAA////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf////8AAAAAAAAAAAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAAAAAAABwAAAAAAD//4AAAAAAAf//////////////AAAAAAD////////////////4AAAAAAAAAAAAAA//////////wAAAAAAAAAAB////wAAAAAD8AAAAAAAABgAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////A///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////wAAAAAAAAAAAAAH////////////////////gAAAP/////////wAAAAAAAAAAAAAAH//////////////////gAAAAAAAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////+AAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////AAAf/////////////////////////4AAAAAAAAAAAP////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAD///////////////////////////4D///////////4AAAAAAAAAAAAAAAAAAAB///////////4AAAAAAAAAAH////////////8AAAAAAAAAAH/////////////////////+AAAAP/////////4AAAAAAAAAAAAAAAAAAAAAAAA/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////8AAAAAAAAAf//////////////////////AAAAAAAAAAB////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////+AAAAAAAAAAAAAf////////////8AAAAAAAAAAAAAAAAAAAAAAH////////////////////+AB/////////////////////////////wAAA/////////////////////4AAAAAAAAAAB///////////////////////////+AAAD////////////AAAAA/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAcAAAf///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////AAD//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAPgAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////wAAAAAAAAAAAAA/////////+AAAAAAAAH//////////////////////////////+AAAAAB///gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////gAAAA//////////////////////////AAAAAAAAAAP///////AAAAAAAAAAAAAAAAAD/////////////////4AAAAP///////////AAAAAAAAAAAAAAAAAAAAAH//////////////8AAAAAAAAAAAAAAH////////////////////////////4AAAAAAAA//4AAAAAAA//////////////////////8AAAAAAAB////////////////4AAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////wAAAAAAAAH///////4f//////////////////////////wAAD////////////////////////////8AAAAAAAAAAAA//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAD//////////////8AAAAAAAAAAAAAAAD///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/AAAAAAAAAAAAAAAAAAAAAAP/////////////////////8AAAAAAAAAAAAAAAP/////////////////AAAAAAAAAAAAAAAAAAAAAD////////////////////////////f////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////gAAAP///////////////////////8AAAD//////////+AAAAAAAAAAAAAAAAAAH////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////gAD/////////////////8AAAAAAAf///////////wAAAAAH///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////8AAAAAAAAf///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////gAAAAAAAAAAf////8AAAAAAAAAAAAAAAAAAAD/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD//////+AAAAAAAAAAAAAAAD4AAAAAAAAAAAAAAAAAAAAAD////////////8AAAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////4AAAAAAAAAAAAAAAAAAH///////////////////////////wAAAAAAAAAAAAAAB4AAAAAAAAAAAAP/////////////+AAAAAAAAAAAAAAAAB/////////x////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAA/////////wAAP//////////////////////////+AAAAAAAAAAAAAAAf///////////////////////////gAAAAAAAAAAAAAH/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////+AA/////////+AAAAAAAAAAAAP/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////+AAAAAAAAAAAAAAAB/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////8AAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAA////////////////////////////////gAAAAAAAAAAAAAAAAAAA///////////////////wAAAAAAAAAAAAAAAAAAAD/////////////////////////////////8AAP4AAAAAAAH/////////////////////////////////AAAD//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////wAAAAAAAAAB////////////////////8AAD//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAD//4AAAAAAAAAAAAAAAAAD////////////////4AAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAAAAAAAD////////AAAAAAAAB//////////////////////AAAAAAAAAAAAAAAAB/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////+AAAAAAAAAAAAAAAAAAAA///////////////////////////////wAH///////////////////////////////////4AAAAAAAAAA//////////////gAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////wAAAAAAAAAAAAAAAAAP//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////8AAAAAAAAH//////////////AAAAAAH////////////////////////gAAAAAAAAAAAAAAAf////////////////////////////+AAAAAAAAAAAAAAAAAAAAH/////////////////////////////+AAAAAAAB///////////AAAAAAAAAAAA///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////+AAAAAAD//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAB//4AAAAH///////////////////////////////AAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////AAAAAAAf/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "sha1": "61e8916532503627f4024d13884640a46f1d61d4", + "sha256": "ef88b649d012178186dcf0244835232b5b7392e0c1f8f141f5107e9ead559e74", + "sha512": "7b700b34117c7d5d54f678dcbab709b5cb0e43c46ec5909d237d76c915677f6398232a2786b9e2acb44bccf30902ebd21bf5ccb009325ffa2d9fe323af956dd3" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////gAAAAAP///////////////+AAAAAAAB///////////////4AAAAAAAAAH//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////4AAAAAAAAAAAAAAAAAAAf/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAP///////+AAAP////////////h///////AAAAAAAAAAA////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAB+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///+AAAAAAAAAAAAAAAAAAAP/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////4AAAAAAAAAAAAAAAAA///////////////////////////wAAAAAAAAAAAA//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////8AAAAAAf///////////////////////////////////4AAAAAAAAAAAAAAf/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////gAAAH////+AAAAAD//4D///////////////////4AAAAD///4AAAAAAAAAAAAAAAAAAAA////////////////+AAAAAAAAAAAAAAAAAAAAAH//////////////////////AAAAAAAAAAAAAAAAAAAAAAAA///////////////4B//////////////////8AAAAAAAAAAP/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAH/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////wAAAAAAAAAAAAB//////////////////////////////////AB///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////+f///////////////////////4AAAB/////////////////wAAAAAAAAAAAAAAH//+AAAAAAAAAAAAAAAAAAAAAAAAAB////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////4AAAAAAAAAAAAAAAAAP////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////8AAAAAAGAAAAAAAAAAAAAAAAAAAAAf//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////8AAAAAAAAAAAAAB//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////+AAAAAAAAAAAAAP//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH////////////4AAAAAAAAAAAAAAAAAAAAA//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////4AAAAAAAAAAAAAAB//////////////wAAAf//////////////////////////////////+AAB////////////8AAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////AAAAAAAAAAH///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////8AAAAD////////////////8AAAAAf////AAAAAAAAP/////////////AAAAAAAAAAAAAAAAAAAAA////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////8AAAAAAAAAAAAAAAAAAAAAAAAf//4AAAAAAAAAAAAAAAAAAAAAAH////////wAAAB///////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/+AA//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//8AAAAAAAAAAAAAAAAAAAAAAAAAAD////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////+AAAAAf//////////////gAAAAAAAAAAAAAAAD//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////+AAAAAAAf/////////gAAAAAAAAAAAAAAAAAAA////////gAAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////AAAAAAAAD/////////////////wAAAB////gAAAAAAAAAAH//////////////+AAf////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////8AAAD///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////4AAAAAAAAAAAAAAAAH////////////////////////////////wAAAAAAAAAAAAAAAAAB///////////////////////gAAAAAAAAAAAAAAAD////////////////////wAAAAAAAAAAAAAAAf////////gAAAAAAAAAAAAAAAP//////wAAAAAAAAP////////////4AAAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAP/////////////////////4AAAAAAAAAAAAB//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAH+AAAAAAAAAAAAAAAAAAAAAD////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/gAAAAAAAB//3////////////8AAAAAAAA////4AAAAAAB//////+AAAAAAAAAAAAAAAAAAAAAAAA/////////////////4AA///gAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////wAAAAAAAAAAAAAAAAAAA/////////////////////////////////4AAAAAAAAAAAH/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////wAAAAAAAAAAAAAB///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAH//////////+AAA///////////+AAAAAAAAAAAAD//////////AAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAAAAD//////////////////8AAAAAAAAAAAAP///////////////////////////////////AAAAAA//////////////////////////8AAAAAAAAAAAAAAAB////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////gAAAAAAAAAAAAAAAAAAAAAAD///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP////////+AAAAAAAAAA/////////8Af////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8D//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////wAAAAAAAAAH//////////////////wAAAAAAAAAAAAA//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAAAA///////////////////////gAAAA/////////////////////////////////gAAAH////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////9//////////AAAAAAAAAAAAD/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////4H///////////////////////wAAAAAAAAAAAAB//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////+AAAAAAAAAAAAAAD////4AAAAB/////////////////+AAAAAAAAAAAA/8AAAAAAAP///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////4AP///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////gAAAAA////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////wAAAAAAAH//////////////gAAAAAAAAAAAAAAD//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////AAAA//////////////////////////////////8AAAAAAAAf/////8AAAAAAAAf///////////////////////////4AAAAAAAAAAAAAAAAAAAAABH///////8AAAAAf/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////4AAAAAP///////////////////////////AAAAAAAAAAD///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////8AAAAAAAAAAAP///4AAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAH////////////////////////wAAAAAAAAAAP///////////////////8AAAAB///////////////+AAAAAAAAAAB/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAAAD////////////////////////+AAAAAAAAAAAB//////////////////////////+AB///////////////gAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAP//////////+AAAAAAAAAAAAAAAAAf//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////8AAAAAAAAAAAAAAAD///////8AAAeAAAAAAAAAA//////8AAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////4AAAAAf///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////AAAAAAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAf////8AAAAAAAAAAAAAD///4AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAAAA8AAAAAAAAAAAAB////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////+AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////wAAAAAAAAAAAAD//////////AAAAAD////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////4AAAAAAAAAAAAAAAAAAf//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP//wAAH/8AAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////wAAH//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////gAAAAAAAB///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8AP///////////////////gAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAAAAAAAAAAAAAAAB////4AAAAAB////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4AAAAAAAAAAAAAD//////////////8A//////8A//////////////////////////////////AAAAAAAAAAAAAAA/////////////////////////8AAAAAAf/4AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAB////////////+AAAAAAAAAf///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAAP///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////8AAAAAAAA////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////wAAAAAAAAAAAAAAAAAAAAAAAAA==", + "sha1": "f008d5d7853b6a17b7466cd9e18bd135e520faf4", + "sha256": "326f14fc54954b73d704935b213dc797311f7c8fcd88c238c8ab767286dc3f94", + "sha512": "18bdb647bf2cb2b8817a896757820d7dcc18b0f4c1220b3820cae338fc974253d8f92dfbe3f8c2f20bd7298be0a055684a4af91adc02f080c78d376f7676b175" + }, + { + "input": "AAAAAAAAAAAB//////////////8AAAAAAAAAAAAAAAAAAAAAAB/gAAAAAAAAAAAAAAH/////////4AAAAAAAAAAAAAAAAAAAAAAAH///+AAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////j////////////////////AAAAAAAAAAAAAAAAP//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////wAAAAAAAAAAAAAAAf////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////wf/////////////4AAAAAH/////////////AAAAB//////////wAAAAAAAAAAf//////////////////////////gAAAAAAAAAAAB/8AB////////////////////////////////4AAAAAAAAAAAAH/////////////////4AAAAAAAAD/////////////////////4AAAAAAAAAAAAB//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////8AAAAf//////////////////////+AAf//wAAAAAAAAAAAAAAAAAAAB///////////////////////wAAAAAAAAAAAAAAAAB+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////wAAA//////////////4AAAAAAAAAAAAAAAAAAAAAD//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////gAAAAAAAAAAAAAAP///////////////4AAAAAAAAAAAAAAAAAAD////////////////////Af//////////////4AAAAAAAAAAAAAAAAD///+f//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////gAAAAAAAAAAAAAAAAAAAB//AAAAAAAAAAP////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////wAAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAAAP////////////////////////gAAAAAAAAAAAAAAAAAAAB/////////////////gAAAAAAAAAAAAAAAAAAAAAH/////+AAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////gAAAAAAAAAAAAAAAAAP//wAAAAD//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////gAAB///////gAAAAAAAAAAAAAAAAAAAAAAAA//////4AAAAAAAAAAAAAAf/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///4AAAAAAB/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////8AAAAAAAD//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAD///////gAAAAAAAAAAAAAAAAB///////////////////////gH///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////+AAAAAAAAAAAP/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAAAAAAAAAAAAf///gAAAAAAAAAAAAAAD////////////////+AAAAAAAAAAAAAA//////+P////4AAAAAAAAAAAAAAAAAAAAAAAAAf///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////4AAAAAAAAAAAAAAAAAH/4AAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////4AAAAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////wAAAAAAAAAAAAAAAA//////AAAAAH///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPgAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////wAAAAAAAAAAAAA////////////D/////////////AAAB/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAA////+AAAAAAAAAAAAAAAAAP//////////4AAAAAf//////////////////+AAAAAAA///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAMAAAAAAAAAAAAB//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAAAA///4AAAAAAAAAAAAAAAAAAAAA//////////////////////////////////8AAADAAAAAf//////////8AAAAAH/////////////AAAAAAAAAAAA///////+AAAAAAAAAAH///gAAAAAH////////////////////////////////////wAAAAAAAAH////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////f/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////gAAAAAAAAAAAAAAAAAAAAD///////////////////gAAAAAAAAAAAAH//////////////////4AAAD////////AAAAAAAAAAAAD////////////////8AAAAAAAAAAAAAB/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////gAAAAAAAAAAAAAAAAAD//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////wAD/////////////////////////gAAAAAAAD////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////wAAAAAP///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////wAAAAAAAAAAAAAAAAAf////////wAAAAAAA//////////8AAAAAAAAAAH///////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//AAAAAAAAAP//////////////////////gAAAAAAAAAAAAAAD////////////////////////////////AAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////wAAAAAAAAAAf///////////AAAAAAAAAAAAAAAAAAAAAA////////////////////4D/////////////////////////////gAAB/////////////////////wAAAAAAAAAAD///////////////////////////8f///////////4Af///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////8D//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////4H/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////gAAAAAAAAAH/////////wAAAAAAAA////////////////////////////+AAH//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////4A///////////////////////4AAAAAAAAA///////8AAAAAAAAAAAAAAAAAP///////////////4AAAH///////+AAAAAAAAAAAAAAAAAAAD//////////////AAAAAAAAAAAAAA////////////////////////////gAAAAAAB//wAAAH//////////////////////gAAAAAAH////////////////gAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////gAAAAA/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAf/////////////////////////8AAAAAAAAAAP/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////gAAAAAAAAAAAAAAAAAAAAAAAH////////////AAAAAAAAAAAAAAAA////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAD/////////////////////gAAAAAAAAAAAH///////////////4AAAAAAAAAAAAAAAAAAD///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////4Af//////////////////////8AA//////////wAAAAAAAAAAAAAAAAAf/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////+AH///////////////+AAAAB//////////wAAA/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////wAAAAH/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////+AAAAAAAAAAB////4AAAAAAAAAAAAAAAAA///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAA/////4AAAAAAAAAAAA+AAAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAAP//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAAA/////////////////////////wAAAAAAAAAAAAAA/////////////////////////////////////wAAAAAAAAAAH////////////wAAAAAAAAAAAAAAAAP//////x///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////wAAAAAAAAAAAAAAAAAAAAf////////4///////////////////////////4AAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAA////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////8AAAAAAAAAAAP//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////8AAAAAAAAAAAAAAB////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/8AAAAAAAAA//////////////////////////////AAAAAAAAAAAAB////////////////////////////////AAAAAAAAAAAAAAAAAAA///////////////////wAAAAAAAAAAAAAAAAAAAD///////////////////////////////gA/gAAAAAAP///////////////////////////////gAA////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAAAAAAH////////////////////wAH//8AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////wAAAAAAAAAAAAAAAAD//////////////+AAAAAAAAB/////////////////////8AAAAAAAAAAAAAAAAAAAAf///////4AAAAA///////////////////8AAAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "bd8d2e873cf659b5c77aac1616827ef8a3b1a3b3", + "sha256": "0087e37129b9a2d58b0987a218a3c1be67cb1e08142cbcf889aa617ca3e4640d", + "sha512": "3b2e6dedcb51a29fff2bfaa8067002e407aaf8409b0f1db895de04b9f99135d02c6c1585bb95adcdc580c5246e11a19d96f4eaf6690d7395ecc183a97e5567b9" + }, + { + "input": "///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////wAAAAAAAAAAAAAAAAAAAH/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////8AAAAAAD//////////wAAAAAAAAAAAAAAAAAAAAP///////////////////////////AAAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////8AAAAAAAAH////////////+AAAA////////////////////////8AAAAAAAAAAAAf/////////////////////////wAAAAAAAAAAAAAAAAAAB/////////////////////////////AAAP////////gAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////4AAAA//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////AAAAAAAAAAAAAAAAAAA/////////////////////////////////////AAAB//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////wAAA////////////////////////AAAAAAAAAAAAAAAAAAAA///////////////////////+AAAAAAAAAAAAAAAAAAAH/////8AAAAAAAAAAAAB//////gAAAAAAAAAAAAAAB/////////+AAAAAAAAB//////////////////////////////4AAAAAAAP/////////////////wAAAAAAAAAAAAAAAAAD////////////////////////8AAAAAAH/wAAAAAAAAAD//////////////////////////////////+AAAAAAA////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////8AAAAAAAAAAAAAAAP////////////////////////////////8AD////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////AAP//////////////////+AAAAAAAAAAAAAAAAAAAAH////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAD///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB////////////4AAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////AAAAAAAAAAAAAAAAB/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAB//4AAAAB///n////////////////////////////////////+AAAAAAAAAAAAAAAAAAB/////AAA////////////////////4AAAAAAAAAAAAAAAH//////////////////8AAAAAAAAAAAAAAAAAAAAAAf////////////////////8AAAAAAAAAAAAAH////////////////////////////////////gAAAAAAAAAAAAAAAAAf/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////+AAAAAAAAAAf///////////////////////////////+AAAAAAAAAAAAAf///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////8AAH//////////////////////////////////4AAAAAAAAAAAAD///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAA/AAAAAAAAAAAAAAAAAH//////////////8H///////////////////////AAAAH//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////wAAAAAAAAAAAAH////////////////////////////////8AAAAAAAAAAAAAD////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAD////////////gAA//////////////////////////4AAAAB///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////gAAAAAAAAAD////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////8AAAAAAAAAAAD/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//+AAAAAAAAAAAB//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////AAAAAA/////////////////////////////+AAAAf///wAAAAAAAAAAAAAAAH////gAH////4AAAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAB///////////////////////////////gAAAAAAAAAAH/////////////////////4AAAAAAAAAAAAAAAAAAAAAD//////Af/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////AAAAAAAAAAAAAAAAAAAAAAAP/wAAAAAAAAAAAAAAAAAAAAAAAAAAA/H/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//wAAAAAAAAAAAAAAAAAAAAAAAAAH//AAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////8AAH/////////////8AAAAAAAAAAAAAD///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////gAAAAAAAAAAAAAAAAAAAAAAH//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////+AAAAAAP//////+AAAAAAAAAAAAAAAAAAAD//////gAAAAAAAAAAAAAAAf//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////8AAAAA//////////////wAP///8AAAAAAAD//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////4AAAH//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////4AAAAAAAAAAAAAf/////////////////////////////4AAAAAAAAAAAAAAAH///////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAAAAP//////////////wAAAAAAAAP/////////////AAAAAAA////////gAAAAAAAAAAAH////////wAAAAAAAAAAAAAAAf////////////AAAAAAAAAAAAAAAAAAAAH/////////4AAAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////8AAAAAAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////4f/////wQAAAAAAAAAAB///////wAAf////+AAAAAA/////////////////////8AAAAAAAAAAAAAB/4A////////////////////////4AAAAAAAAAAAAAAf////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////gAAAAAAAAAAAAAD/////////////////////////AAAAAAAAAAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////AAAAAAAAAD//AAAAAAAAAAA////////////+AAAAAAAAAP///////8////////////////////wAAAAAAAAAAAAAAAAAH/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///+AAAAAAAAAAAAAAAAAAAAAAAAB///////////////gAAAAAAAAB//////////////////////////////////AAAAAAAAAAAB//////////////////////4AAAAAAAAAAAAAAA////////////////////////4AAAAAD/////////AAAAAAAA///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAA8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAB//////////////////8AAAAAAAAAAB//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAD//////////////////+AAAAAP////////////////////AAAA//////////////////////////////8AH//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////4AAAAAAAAB//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////wP////////////////////8AAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////8AAAAAAAAAAAf8AAAP//////////////+AAAAAAAAAAAA/////gAAAAD////////////////////////////+AAAAAAAAAAAD/4AAAAAAAAAAAAAAAB//////////////////////////////////gAAAAAAAAAAAP/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAD/////wAAAAAAAAAAAAAA//////////////8AAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////+AAAAAAAAAAAAAAAAB///////////////////////////////wAAAAAAAAH//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////gAAP///////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////wAAAAAAAH///wP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAH//////////gAAAAAAAAAAAAAAAAAD/////////////////////////////////4AAAAAAAAAAAAAAAAA/////////gB//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAD/////////wAAAAAAAAAAAAAAAAAAAAAAH/////////8AAAAAAAAAAAAAAAAAH///AAAAAAAAAAAAAAAf////////4AAAAAAAAAAAAAAAA////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAAAAAAAf////////8AAAAAAAAAAAAAAAAAAAAAAAAA/gAAAAAAAAAAAAAAH///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////+AAAAAAAA//////////////+AAH//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////+AAAAAAAB///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////+AAAAAAH/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///4B////////////////////////////////4AAAAP///////////////////AAAAAAAAD//////8AB/////////////AAAB/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////+AAAAAAAAAAAAAAAAAAAH/////////////////////////////AAAAAAf/////////////////////////////wAAAAAAH/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAD///////////wAAAAAAAf////+AAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////8AAAA/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAD//////////////////wAAAAAAAAAAf//////////////////////////4AAAAAAf////////////////////////H/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////wAAAAAAAAAAAAAAAAA///gAAAAAAAAAAAH/////////////////////////wAAAAAAAAAAAAAAAA///////+AAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + "sha1": "b25a04dd425302ed211a1c2412d2410fa10c63b6", + "sha256": "93294033c9de9361a3c6cc0df539e2e459f6d2babbbc0623859e18af0d0ccf4e", + "sha512": "fb111732088f48dbd779104f1de037bdc5ce349fbbbbfe2b79965e545ab968b347f6403492eb5adfcc347a283c9a58795ee6d0e758e8014a9702c3f1d58910fe" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAAAB////////////////////////////+AAAAAAAAAAAAAAAAAAAAB/AAAA//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////gAAAAAAAAD//////////////9/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////8AAAAAAAAAA////////////////////////4AAAAAA///////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////gAAAAAAAAAAAAAAAAAAAAAAAD///////////4AAAAAAAD///////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAAP//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAP////////wAAAAf/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAH///////////wAAAAAAAAAAAAB/////////////////////8AAAAAAAAAAAAAD///////AAAAAAAAAAAAAAAAAAAAAAAA/8AAAAAAAAAAAAAAAAAAAAAA///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////wAAAAAAAAAAAAAD/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////AAAAAAAAAAAAAAAf///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////wAAAAAP///////////4AD///////4AAAAAAAAA/////////////////////////8AAAAAAAAADgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////gAAAAAAAAAAB////////////////4AAAAAAAA/////////////////////+AAAAAAAAAAH/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////8AAf//////////////////////+AAf/AAAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAB////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//4AAAAAAAAAAAAAAAAAAAAAAAAAAAAf////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////gf///////////8AAAAAAAAAAAAAAAAAAAAH///+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////8AAAAAAAAAAAAf//////////////8AAAAAAAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////gAAAAAAAAAAAAAAP//////////////////////////////////+AA/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////+AAAAH///////////////////7////////AAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////4AAAAAAA///////////////////////8AAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAH////////////////////gAAAAAAAAAAAAAAH/////////////////////gAAAAB////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////P//AAAAAAAAAAAAAAAAAA/////////////////////////////////4AAAAAAAAAAAAB////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAf/4AAAAf//////////////////////AAAB/////////////gAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////wAAH/////4A//////////////////////////////4AAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////gAAAAAAAH///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAD/////8AAAAAAH///////////////+AAAAAAAAAAAAAAAAAAAAAAH+AAAAAA//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAH//////////wAAAAAAAAAAAB//////////////////////////AAAAAAAAAAAAAAB/////////////+AB/////////////wAAAAAAAAAAAAAAAB//////////////AAAAf////////////////////////////////////4AAAA/////////////////////////8AAAAAAB//////////////////////////////////+AAAAAAAAAAAAAAAAAAA//////////////////////////////////gAAAAAAAAAAAD/////////////////8P///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////AAAAAAAAAAAAAAAAAf////////////////////////////4AAAAAAAAAAAAAAAAA//////////////////AAAH///////////////////////////AD/////////////////////////////+AAAAAAAAAAAAAAAAf///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAH///////////////4AAAH////wAAAAAAAAAAB///////////////////////////4B/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////8AAAAAAAD///////////wAAAAAAAAAAD///////////////////////////////////+AAAAAAAAAAAB//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAP/////////////gAAAA////////////////+AAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAD////gAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////n//////////AAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAf/////////////////////////+AAD///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//4AAAAAAAAP////4AAAAAAAAAAP/////////wAAAAAf/////////gAAf/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////+AAAAAAAAAAAAAAAAAAD/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////gAAAAAAAAAAAAAAAAAAAAD///////////////////gAAAAAAAAAAAAAAAAD////////////wAAAAAAAAAAAAAAAAA////gAAAAAAf/////////+AAAAAAAAAAAAAAAH////////////wAAAAAAAAAAAP//////////////////////////////////8AAAAD////////////////gAAAAAAAAAAAAAAAAf///////////////////////////////8AAAB/4AAAAAAAAAAAAAAAAAAAAAAAAP/////gAAAB//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//4AAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////8AAAAAAAAH///////////////wAAAAAAH////4AAAAAAAAf//////////AAAAAAAAAAAAAAAAAB////////////////////////////////////4AAAAAAAAf////////////////////8AAAAAAAAAAAAAB///////////////////////////////4AAP////////+AAAAAAAAAAAAAAAAAAAAAAD//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////gAAAAAAAA//////////4AAAAAAAAAAAAAAAAAAAAB///////////////////8f////////////////////////////8AAAP////////////////////+AAAAAAAAAAAf///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////wAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAB//////////////////8AAAAAAAP/////////gAAAAAAAB//////////////////////////8AAA///////////////////////////////////8AAAA/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAH////////wAAAAAAA/////////////////8AAAAAAAAAAAAAAH//+AAAAAH//////////////////AAAAAAAAAAAAAD/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////4AA/4AAAAAAAAAAAAAAAAAAAAAAf/////+AAAAAAAAAAAAAAAAH//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////8AAAAAAAAAAAAB///AAAH////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAAAAAAP////////8AAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////+AAAAAAAAAAAAAAAAAf//////////////////////+AAAAAAAAAAP///////////////wAAAAAAAAD////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////8AAAAAAAAAAAAAAAAAAAAH/////////gAAAAAAAAAAAAAB/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////gAAAAAAAAAAAAP//////////////////////////////AAAAAAAf/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAB+AAAAAAAAAH////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAAAAA/////////////////////////////////4AAAAAAAH4AAAAAAAAAAAAAAH//4AAAAAAAAD/wAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////8AAAAAAAAAAD//////////////////////////////////AAAAAAAAAP//////////////////////////////8AAAAAAAAAAAAAAAAD//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////gAAAf//////////////8AAAAAAAAAAAAAAAAAAAAA//////////////////////8AAAB/////////8H////////////////////gAAAAAAAAAf//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAAAAf////////////////8AAAAAAAAAAAAAAAAAAAAAAA//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////gAAAAAAAAAAD////////////////8AAAADgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////8AAAAAAP//////////////////+AAAAAAAAAf///////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////8AAAAAAH//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA==", + "sha1": "a404e21588123e0893718b4b44e91414a785b91f", + "sha256": "49640215294d9263bc464538c3c29e42edea637d1427c2f04ebcd828d6fcb480", + "sha512": "ffa4fc1884d61bb097c2481cbfa1e247498627458b792c0abb37d6923345bbcaf51c9720bd4e81109d54e86f822d6cb8e3786876868a1097dd18a34b1f129892" + }, + { + "input": "/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////8AAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////wAAAAAAAAAAAAAAAAAAD////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAHwH/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/wAAAAAAAAD////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////+AAAAAAAAAAAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAD//////8AAAAAAAAAAAAAAAAAAAH/////////////////AAAAAAAAP+AAAAAAAAAAAAAAAAH//////////+AAAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAA//////////////////////////////+AAAAAAAAAB////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////gAAAAAAAB//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////gAAAf///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////wAAAAAAAAAAAA//+AAAAAAAAAAAAAAAAAAAAAAAAH//////////AAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAHgAAAAAAD////wAAA/////////////////////////////////+AAAAAAAAAAAAAAAAA///gAAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAB/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////AAAAAAAAAB/4AAAAAAAAAAAAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAAAAAAAAAAP///////////////////AAAAAAf/////////8AAAAD///////////////wAAAAAAAAAP///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////+AAAAAAAAAAAAAAAAH/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAf////8f///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////+AAAAAAP///////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAAAAAf///////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfgAAAAAAAAAAAAAAAAAAD///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////+AAAAAAAAAAAAAAP//////////////////////////8AAAAAAAAAAAAP/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////AAAAA//////////////////////////////////+AAAAAAAAAAAAA//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAf///////////////////////AP////8A////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////+AAAAA//wAAAAAAAAAAAAAAAAAAAB///////////////gAAAAAAAAAAAAAAAAP/////////////////////+AAAAAAAAAAAAAAAAAAAP///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////4AAAAAAAAD/////////////////////////////8AAAAAAAAAAAAAAAAAAAB//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////gAAAAAAAAAAAf//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////8AAAAAAAAAAAAAAB/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAD////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////AAAAAAAAAAAAAAAAAAAf/////////////////////////+AAAAAAAAAAAAAAD////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////AAAB////////////////////////////////8AAAAAAAAAAAAAAAAAAAAA////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAP/////////////AAAAAAAAAAAf//AAAAAAAAAAAAAAAAAAAAAAAAH////gAAAAAAAAAD//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAB////////wAAAAAAAAAAAAAAAAAAAP//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////+AAAAAAAAAAD/////////8H////////////////////////////////gAAf//////////4AAAAAAAAAAAAAAAAAAAAH///////////////////////////////wAAAAAP////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/4AAA///////////////4AAAGAAf////////////+AAAAAAAAAAAAAAAAAB////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////4AAAAAAAAAAAAAAAAAAAH///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAB////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////wAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAB//4AAAAAAAAAAAAAAAAAAAAAAAAA4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////g/////////////4AAAAAAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////wAAAAAAAAAAAAAAAAAAAAD/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAB////////8AAAAAH/////AAAAAAAAAAAAAAAAAAAB/////AAAAAAAAAAAAAAD//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////+AAH///////////gAAAH////+AAAAAAAAAAAAAD////////////////////////////////////AAAAAAAAAAAAAP///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////AAAAAAAP///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAA///////////////////gAAAAAAAAAAAAAH////////4AAAAAAAAAAAf//////gAAAAAAD///////////wAAAAAAf/////////////AAAAAAAAAAf//////////////////wAAAAAAD/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////4AAAAAAAAAAAAAAAAAAAP////////////////////////////////////gAAAAAAAAAAAAAAAAH/////////////////////AAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAH//////wB////gAAAAD///////////////////wAAAAAAAAAAB+//////////////////////4AAAAAAAAAAAAf//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////+AAAAAAAAAAAD/////////////////////////AAAAAAAAAAAAAAAAAAAAD//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////wAAAAAAAD/wAAAAAAAAAD////////////4AAAAAAAAP/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////4AAAAAAAAAAAAAAAAA////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//wAAAAAAAAAAAAAAAAAAAAAAA//////////////8AAAAAAD///////////////////////////////+AAAAAAAAAAA//////////////////////AAAAAAAAAAAAAAAH//////////////////////8AAAf///////+AAAAAAH/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////wAAAAAAAAAAAAAAAAAAAAAAAAB4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////gAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////gAAAP//////////////////gAAAAAAAAP/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////4AAAAAAAAAAAAAAAAAAAf/////////////////8AAAB//////////////////4AAB////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////+AAAAAAH///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////wP//////////////////8AAAAAAAAAAAAAf//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////8AAAAAAAAH////////////////////////////////////+AAf////////////8AAAAAAAAAAAB/////////////////////////////////////AAAf//8AAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////wAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////gAAB//////////////4AAAAAAAAAAAAAH//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////4AAAAAAHAAAAAAAAH//////////////////////////wAAAAAAAAAAAAAAAAAAAAf///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////4AAwAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAf///////////////////////+AAAAAAAAAAH/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////AAAAAAAf/////////////////////////////////////wAAAAAAAAAAAAAAAAAB///////////////////////gAAAAAAAAB/////////////////////8AAAAAAAAAf///////////////AAf/////////////8AAAAAAAD///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAAAAAH////////////////////////////gAAAAAA////////////////////8AAAAAAAf///////////////////////j///////////////AAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAD/////gAAAAAAAAAAAA//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////AAAAAAAAAAAAA////////AAAH//////////////////////////////////gAAAAAAP//////AAAAAAAAAAAAAAAAAAAAAD/////////////////////////////wAA/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//wAAAAAAAAAAAAAAAAA///////wAAAAA//////////////////////////////////////AAAAAAAAAAAAH///wAAAAAAAAAAAAAAAAAAAB///////////////////////////4AAAAAAAAAAP///////////////////////////////////4AAAAAAD//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "a1e13bc55bf6dad83cf3aabda3287ad68681ea64", + "sha256": "ce5cfa5b3b0485805cf5bcc8c24594a6b6fec9249698d317ce20bc84d857eafe", + "sha512": "a2e8bb021193a23df9f3a1778735827121fdc94f80a9b603f650465c8f62feac24044b73cbbfbcc2aa351b45381eba3ac8a878f284fc76a5d8d25ac4934b1dea" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////AAAAAAAAAAAAAAAAAAAAAD/////////wAAAAf////+AAAAAAAAAAAAAAAAAAAAAAAAB/////////////////8A////////////////////////////////4AAAAAAAAAAAAAAAAAAAD//////////////////wAAAAAAAAAf////////////////////////4AAAf/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////gAAAAAAAAAAAAAB///AAAAAAAAAP////////////////////////gAAAAAAAAAAAAB//////8AAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////wAAAAAAAAP////////////////////////////////4AAAAAAAAAAAAAB/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//4AAAAAAAAP//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAAAH/////////////////////////////////////+AAAAD////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA///////8AAAAAAAAAAAAAAAAAAAAAAAH//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAAf///////////////////////8AAAAAAAAAAB///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////8AAAAAH////////////AAAAAAAAAAAAAAAAAAAAAAD//////////////////////wAAAAAD//4AAAAAAAAAAAAH////////////////////////////+AAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////8AAAAAAAAAAAAAAAH///////+AAAAAAAAAAAD///////////+AAAAAAAAAAAAAAAAAAAAP////////////////////////////////////+AAAAAAAAAAAAH////gAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAA/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////8AAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////8AAAAAAAAAAAAAAf///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////8AAAAAD//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////AAAAAAAAf///////////////////////4AAAAAAB/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////4AAAAAAAAAB///////////////gAAAAAAA/////////////////////+AAAAAAAB/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////x///////////////////////4ABwAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAH//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf4AAAAAAAAAAAAAAAAAAAAAAAAAAf/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////gAAAAAAAAA//////////////+AAAAAAAAAAAAAAAAAD/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////wAAAAAAAAAAAAH/////////////////////////////////f/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////AAAP//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////AAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////gAAAA//////////////////////wAAAf//8AAAAAAAAAAAAAAAAAAAAAAAf///////////////////+AAAAAAAAAAAAAf////////////////////gAAAAf///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////AAAAAAAAAAAAAAAAAAAAAAAD////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////wP/wAAAAAAAAAAAAAAAAAAAAfwAAAAAAAAAAAP/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/8AAAAP+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////gAAAAH//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////4AAAAB//////AAAAAAAAAAAAAAA//////////////////////AP///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////wAAAAAAAAAH//////////8AAAAAAAAAAAAAAAAAAAAAAAB//////////////8AAAAAAAAAAAD///////////gAAAAAAAAAAAAAAA//////////////gD////////////////////////////////////wAAAf////////////////////////+AAAAAD//////////////////////////////////8AAAAAAAAAAAAAAAAAAB////////////////////////////////8AAAAAAAAAAAH/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////wAAAAAAAAAAAAAAAAH////////////////////////////+AAAAAAAAAAAAAAAA////////////////8H/////////////////////////8AP////////////////////////////gAAAAAAAAAAAAAAB//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAf//////////////4AH///8AAAAAAAAAAAf/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////AAAAAAAAAD//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////gP///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAP///////////AAAAAAAAAAAAAB////8AAAAAAAAAAAAAAAB///////////AD//////////////////AB//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////gAAAAAAAAAAAAAAAAAAAGAAAAAAAD/////////////////////+AAAAAAAAAAAAAAAAAAAAAA/////4AAAAAAAAAAAAAAAAAAAAAAAAAAf/wAAAAAAAAAAAAAAAAAAf/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////4D/////+AAAAP///////4AAAAAAAH///wAAAAAAAAD//AAAAAAP//////////////////////////////////+AAAAAAAAA/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAD////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////4AAAAAAAAAAAAAAAAA///////////////4AAAAAAAAAAAH///////////////+AAAA/////8AAAAAAAAP/////////////8AAAAAAAAAAAf/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////4AAAAAAAAAAAAAA///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//8D///////////////////////+AAA////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////w///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAB/////AAP//////AAAAAAAAAAB///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////gAAAAAAAf///////////////////wAAAAAAAAAAAAf//////////////////////////////g/////////4AAAAAAAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////AAAAAAB/////////AAAAAAAAAAAAAAAAAAAAD//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////AAAD/////////////////////gAAAAAAAAAAH///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////AAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAf////////////////AAAAA/////////+AAAAAAAAH////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////8AAAAAAAB///////4AAAAAAAAAAAAAAAAAf////////////8AAP//wAAAAAAAAAAAAAAAAH////////////4AAAAAAAAAAAAf//////////////////////////AAAAAAP/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////gAAAAAf///////////////+AAAAAAAAAAAAAAAAA/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////4/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wH//////////////////////AAAAAAAA//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////gAAAAAAAAAAAAAAAAAAAAAAf///////8AAAAAAAAAAAAAAAD//////wAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAD///////////////////+AAAAAAB/////////////gAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////AAAAAAAAAAAAAAAAH///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////P/////////////A///////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAD////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////AAAAAAAAAAA///wAAAAAAAAAAAAB///////////////////gAAAAAAAAAAAAAAAAAAAB//8AAAAAAB8AAAAAAAAAAAAAAAAAAAD////////wAAAAAAAB////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB/////////////AAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAH////////////////////////////////////+AAAAAAAAAAA=", + "sha1": "d5fd35ffabed6733c92365929df0fb4cae864d15", + "sha256": "c9ac43870e02c7b36bb1e7ba3ce2e234507c0076f8a77494f268777edf5ebffc", + "sha512": "8223437b80c6b88e5397f138b9831fb3c06db9ea99ac4cc9e49e082b7b648c58a4dab10d089530d08d9afedd0a7facb27331f2ef51879e7c04758f30a1b7e968" + }, + { + "input": "/////////gAAAAAAAAAAAAAAAAf8f////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAP//4AAAAAAAAAAAAAAAAD/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////+AAAAAAAAAB/////////////////////////gAAAAAAAAf///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////4AAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////4AAAAAAAAAAAAB////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////AAAB////////////////////////////AAAAAAAAAAA////////////////////////////////gAAAAAAAAAAAAAAAAP//////////////////8AAAAAAAAAAAAAAAAAAAA//////////////////////////Af////4AAAAAAAAAAAAAAAAAAAAAAAAAAAD8AAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////4AAAAAAAAAAAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////8AAAAAAAAAAAf////4AAAAAAAAAAAAAAAAAD//////////////8AAAAAAAA/////////////////////////////////////4AAAAAAAAAAAAAD////////4AAAAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAAD////////////////////////////+AAAAAAH///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB//wAAAAAH////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////D//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////gAAAAAAAAAAA/gAAAAAAAAAAAAAAAAAAAAAAAB///////+AAAAAAAAAAAAAAAAAAAAAH/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////+AAAAAD/AA/////////////////////////////////AAAAAAAAAAAAAAAH/AAAAAAAAAAH////////////////////////////AAAAAAAAAAAAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////wAAAAAAAAP/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAB////////////////+AAAAAAAAAAAAAAAAAAAAD///////////////////wAAAAAH//////8AAA///////////////8AAAAAAAAAD/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAD//////////////////AAAAAAAAAAAAAAAAAAAAAAAH///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////gAAAf////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAP/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/wAAAAAAAAAAAAA///////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////wAAAAAAAAAAAAAAAAAAB/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAH//////////////////////////+AAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////+AAAA//////////////////////////////////AAAAAAAAAAAAP///////////////////////AAAAAAAAAAAAAAAAAAAAH///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////wAAAAH/AAAAAAAAAAAAAAAAAAAAH//////////////AAAAAAAAAAAAAAD//////////////////////gAAAAAAAAAAAAAAAAf//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////gAAAAAAAH///////////////////////////AAAAAAAAAAAAAAAAAAAP/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////gAAAAAAAAAAP////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////+AAAAAAAAAAAAAAA///////////////////////////////////8AAAAAAAAAAAAAAAAAAD////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////8AAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAAAAAAH////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////+AA//////////////////////////////wAAAAAAAAAAAAAAAAAAAAD//////////////////////4AAAAAAAAAAAAAAAAAAAAAD//////////+AAAAAAAAAP//gAAAAAAAAAAAAAAAAAAAAAf///AAAAAAAAB///////////////////////////////AAAAAAAAAAAAAAAAAAAAP/////wAAAAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////AAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////wAAP/////////+AAAAAAAAAAAAAAAAAAP/////////////////////////////4AAA///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////4AAf/////////////+AAA/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////gAAAAAAAAAAAAAAAH/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAA/////////////////////////////////wAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////8AAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAB//4AAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////AAAAAAAAAH/gAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////4AAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAP/////8AAAAD//8AAAAAAAAAAAAAAAAAAAH///AAAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///8AD////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////4AAAH//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////AAAAAAAD/////////////////////////4AAAAAAAAAAA//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAAB/////////////wAAAAAAAAP////////4AAAAAAH//////gAAAAAAAAAA////wAAAAAAAAAAD//////4AAAAAAAAAAAAAAAAA////AAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAAAAAAAAAAAAAAAA//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//wAAAA/////////////////gAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////wAAAAAAAAAH////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////AAAAAAAAH////////////////////////+AAAAAAAAAAAAAAAAAAB//////////////wAAAAAAAAAAAAAAAAAAAAAAAA/////////////////wAAAAAA+AAAAAAAAAP////////////gAAAAAAAf/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////4AAAAAAAAAAAAAAAAf////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf4AAAAAAAAAAAAAAAAAAAAAH/////////////wAAA/////////////////////////////8AAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAD/////////////////////v///////gAAAAf///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////gAAAAAAAAAAAAAAAAAAAAAAeAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/8AAAAAAAAAAAAAAAAAAAAAAAAf///////////////////g//////////////////+AAAAAAH/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAAP/////////////////AAH///////////////8AAf/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////4AAB////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////wP////////////////gAAAAAAAAAAAAD//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////8AAAAAf//////////////////////////////////D//////////8AAAAAAAAAAAB////////////////////////////////////h+AAAAAAAAAAAAAAAAAAAAAAAAAA///////wAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAA//////////////////////gP//////////////AAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////+AAAAAA////////////////////////////////////4AAAAAAAA//////////////////////////AAAAAAAAAAAAAAAAAAAA//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////g//////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////AAAH//////////////////////4AAAAAAAAAAf////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////4AAAAf///////////////////////////////////8AAAAAAAAAAAAAAAAAAf////////////////////AAAAAAAAAD///////////////////+AAAAAAAAH////////////+P/////////////AAAAAAP//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////AAAAAAAAAAAAAAAAAAAAA///////////////////////////AAAAAAB//////////////////AAAAA//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////8AAAAAAAAAAAAAAAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAH/8AAAAAAAAAA/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////gAAAAAAAAAAH///////4AAA/////////////////////////////////AAAAAH//////gAAAAAAAAAAAAAAAAAAAA/////////////////////////////8D///////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAA/////8AAAD///////////////////////////////////gAAAAAAAAAAB///8AAAAAAAAAAAAAAAAAAH//////////////////////////w==", + "sha1": "c12e9c280ee9c079e0506ff89f9b20536e0a83ef", + "sha256": "30eb195e3aad4c288af76c66e26c6096f5f7de1b56b43d638ab7119d73cfd214", + "sha512": "5969edd5e6f7413f921bedb46e63077e0456fdbebea85324f7dbc40b21efda8e05de1c9fe32dd42d777e800ca2b502db8ff5d8117b07cca4a431821456309145" + }, + { + "input": "AAAAAAAAAD//////////////////////////////////8AAf/////////////////AAAAAAAAAAAAAAAAAAAAAAAA//4AAAAAAAAAAAAAAAAAAAAAAAAAA///4AAAAAAAAAAAAAAAAAAAAAAAf////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////wAAAAAAAP/8AAAAAP//////////////////////8AAAAAAAAAAAAAAA//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////+AAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAAAAfwAAAAAAAAAAAAAAAAAAD//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf///////////wAAH//////wAAAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAH////////////////////////gAAAAAAAAAAAAAAAAAAAAAD///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////4AAAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAAAAAAAAAAAAB////////////////////////////////////gf//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////+AAAP//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////AAAAP//////////////////////wAAAAAB///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAP//////////////////AAAAAAAAAAAAAAAAAAAAAAAAH/////////+AAAAAAH///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAP/////gAAAAAAAAAAAD/////////////////////4AAAAAAAAAAAAAAAAAAAD//8///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAB///////////////8AAAAAAP///////////gAAAAAAAAAD//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////gH///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////gAAAAAAAAAAAAAAAAAAP/////////////////////////////////////wAAAAAAAAAAAAAAAAAH/////////wAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////+AAAAAAAAAAAAAAAAAB////////////////////////////4AAD/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////+AAAAAH/////4AAAAAAAAD///////////////////////////////////4AP/////+AAAAAAAAAAAAAAAAAAAAAAH///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////+AAAAAAAAAAAAAH//////4AAAAAAAAAAAAAAAAAAAAAP////AAAAAAAAB//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////AAAAAAAAAAAAAAAAAAAAAAAH////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////gAAAAAAAAAAAAAAAAAAAf/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////4AAAAAAAH/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////wAAAAAAAAAAAAf/////8AAAAAAAAAAAAAD////////////////8AAAAAAAAAAAAAAAD//////////////////////////////////AAAAAAD//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/8AAAAAAAAAAAAAAAAAAf//////////////////////////////////4AAAP////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/wAAAAAAAAAAAAAAAAAAAA//gf/////////////////////+AAAAAAAAAAAAAAAAAAAAH//////////wAAAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAAAAAAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAB/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////4AAAAAAAAAAAAAAAAAAAAAH////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////8AAAAAAAAAAAAAAAAAA/////////////////////////////////////4AAAAAAAAAB//////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/gAAf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAf///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////+AAAAP////8AAAAAAAAAAAAA/////////////////////gH/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////gAAAAAAAAH/////////AAAAAAAAAAAAAAAAAAAAAD//////////////4AAAAAAAAA/////////////////////////////////////gAAAAAAAAf//////////////gAAAAAAAAAAAAAP/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///AAAAAAAAAAAAAAAAAAAAAAAAAD///+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAAAAAf////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////8AAAAAAAAAAAAAAD////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAP/AAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////AAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////+AAAAAAAAAAAAAAP//////////////////////////////////////4AAB///////////wAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAAA/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAH////wAAAAAAAAAAAAAAD//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////gAAAAAAAAAAAAAAAAABgAAAD///////////////////wAAAAAAAAAAAAAAAAAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAAAA/wAAAAAAAAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//wAAA////+AAAAAP8AAAAAAAAf8AAAAAA//////////////////////////////////8AAAAAAAAB////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAB///////////////////////////4AAAAAAAAAAAAAAAAAAAAA/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////8AAAAAAAAAAAAAAD////////////8AAAAAAAAAAB//////////////4AAAD///8AAAAAB////////////4AAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////gAAAAAAAAAAAf/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//3//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAf/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///8AAAAAAAAAAH////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////8AAAAAAB//////////////////wAAAAAAAAAAH/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////gAAAAAAAAAAAAAAAAAAAAAA/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////gAAAH///////AAAAAAAAAAAAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////wAAA/////////////////////4AAAAAAAAAAB///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAAAAB////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////+AAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////gAAAAAAAAAAAAAAAAAAAD////////////////////////////gAAAAAAAAAAAAAAAAAAAA//////////////wA/////////+AAAAAAAAH/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////AAAAAAAP///////AAAAAAAAAAAAAAAAAD///////////4AP//////////////////////////////////////8AAAAAAAAAAAAAAAf///////////wAAAAAAAAAAAf/////////////////////////gAAAAD//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////AAAAAf///////////////+AAAAAAAAAAAAAAH//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////8////////////////////AAAAAAP/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAAAAAAA//////AAAAAAAAAAAAAAAA///wAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAP//////////////////8AAAP///////////AAAAAAAAAAAAD/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////+AAAAAAAAAAAAAAAH////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAH////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////", + "sha1": "e22769dc00748a9bbd6c05bbc8e81f2cd1dc4e2d", + "sha256": "338225e3b94025d2b327d72ed3d763a66856e1d1ebcb632bc4d8752000ad9966", + "sha512": "7de4de5d39e0f1efc62d9f5343f1b77657281adb46fc189813196eed279ab0cee8a7fa2cb58ebd31f906c11f82418c83f374055cb745c715c5532ac0f62a6899" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////AAAAAAAAAAA//gAAAAAAAAAf///////////////AAAAAAAAAAAAAAAAAcAAfAAAAAAAAAAAAAAAAAAB//////gAAA//////////////////////////////wAAAAAAAAAAAAAAAAAAAAf//////////AAAAAAAAAAAAAAAD/////////////////4AAAAAAAAAAAD/////////////////////////////////////AAAAAAf///////AAAAAAAAAAAAAAAAA///////////////////////////////////////4/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAB/AAAAAAAAAAAAAAAB/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////8AAAAAAAAP////////////////////////AAAAAAAD//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//wAAAAAAAAH////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAAAAA///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////+AD///////////////////////////gAAAAAAAAAH///////////////////////////////8AAAAAAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAAAAAAAB///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfwAAAA//////////////////////////+f////AAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////+AAAD////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////AAAAAAAAAAAAAAB//////////AAAA////////////////+AAAAAAAAAAAAB////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////+AAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD///8AAAAAAAAAAAAAAAAAAAB///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////H///8AAAAAAAAAAAAAAf//////////////////////////+AAAAH/////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAAAD//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////wAAAAAP///////////////////AAAAAAAAAAAAAAAAAP//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////3//////////AAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAAAAAP////////////////////////////////AD///////////////////////////+AAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAB////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////AAAAAAAAAAAAAAAD///////////////////gAAAAAAAAAAAAAAAAAAB//////AAAAH/4AAAAAAAAAAAAAAAf/////////gAAAB////////////////////////////wAAAAAAAf////////////+AAAAAAAAAAAAAAAAAAf//////////////////////AAH/////////////////////////////////////AAAB////////////////////////////4AAAAAAD/8AAAAAAAAAAAAAAAAAAAAAAAf//////////////////////+AAAAAAAAAAAAAP//////////////////////////gAf///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAP//////////////////////////////4AAAAAAAAAAAAAAAAAAAAf//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8AAAAAAAAAAAAAAAAAf/////////////////////////4AAAAAAAAAAAAAAAAP//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////+AAAAAAAAAAAAAAAAAAAAAH//////////////////AAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////gAAAAH////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////wAAAAAAAAAAAAAAAAf////wf///////////////////8AAAAAAAAAAAAAH////////////gAAAAAAAAAAAAAAAAAAAAAD//////////////8AAAAAAAAAf///////////////////////////////4AAAAAAAAAAAAAAAP///////AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAAAAAH//////////////////////////////AAAAAAAAA//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////4f/////////////////////////////+AAAAAAH//////////////////////////////+AAAAAAAAAAAAAAAAAAf///////////////////////////////4AAAAAAAAAAAAAAAAA///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////4AAAA/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAB//////////////4AAAAAAAAAAAAAAAAAAAAAAAf///////////AAAAAAAAAAAAAf////////////////////////////////wAAAAAAAAAAAAAPgAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAAAAAAAAAAAAAAAAAP////////////////////gAAAAAAAAP///////4AAP///////////////////wAAH///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////gAAAf/////////////////8AAAAAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////4AAAAA/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//4AAAAAAAAAP///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/gB/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/wAAAAAAAAAAAAAP/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////AAAAAAAAAAAAAD//////////////8AAAAAAAH////////////////////////wAAAAAAP//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////4AAAAAAAAAAAAA////////////////////////////4AAAAAAAAAAAAAAAf////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////8AAAAAAAAAAAAAA//////////////////////////8AAP///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////+AAAAAAAAAAAB///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////+AAAAAAAAAAAAD////////////////4AAAAAAAAAAAAAAf/////////////////////gAAAH///+B///////////////////8AB///////////4AAAAAAAAAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////wAAAAA///////////////////////////////////4AAAB///////////////////////////////////////gAAAAAAAAAAAH////////////////////////////////+AAAAAAAAAAAAAf////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///wAAAAAAD///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////gAAAAAAAAAAAAAAAAAAAAAAA/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////+AAAAAAAAAAAAAAAAP/////////////////AAAAAAAAAAAAB////////+AAAAAAA///////AAAAAA//////////gAH////////+AAAA////////////////gH/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////+AAAAAAAAAAAAAAAf//////////////////////////////////4AAAAAAAAAAAAP/////////////////+AAAAAAAAAAAAAAAAAAAAAA//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////f//////wAAAAD//////////////////////////////////////gH///4AAAAAAAAAAAAAAD/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////+AAAAAAAAAAAAAAAAAB////////AAAAAAAAAAD///////////////////////////gAAAAAAAAAAAf/////AAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////AAAAAAAAAAAAAAH//////////////////////gAAAAAAAAAAAAAAAf////7////////4AAAAAAAAAAAAP//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////4AAAAAAAAAAAAAP///////////////wAAA////////////////////////////////8AAAAAAAAAAAAAAAAAAAAP////////////4H///////////////////////////gAAAAAAAAB////////////////////wAAAAAAAAAAAAAAB////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////8AAAP//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////8AAAAAAAAAAAAAAAAAAAADwAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAD/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAP/////////////////////+AAAAAAAAAAAAAAAAH////////////////4D/////////////+AD///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////8P/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////gf//////////////AAAAAAAAAAAAAH//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///4AAP////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAD///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAB/////gA////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAAP//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAf/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAAAAAAAAAH////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////wAAAAB//////////////////////////////////wAAAAAAAB/////////////////////////gAAAAAAAAAAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////gAA/////////////////////8AAAAAAAAAAP///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////wAA///////////////////////////////////gAAAAAAAAAAAAAAAAAD//////////////////4AAAAAAAAAf//////////////////AAAAAAAA///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////4AAAAH/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////4AAAAAAAAAAAAAAAAAAAB/////////////////////////4AAAAAAP///////////////4AAH////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////+AAAAAAAAAAAAAAAAAAAAAAAA////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////4AAAAAAAB//////////////////////////////////8=", + "sha1": "f29835a93475740e888e8c14318f3ca45a3c8606", + "sha256": "69b6cc0729b2d2877d46a08f3c251ae18f043949a33797c3027668b23c969d68", + "sha512": "a10cacb49c9425b942e716f4640fca68421eafb13aa318b0c80e19b5b30f82b3395f28ce18d7f5865d16a3f61934918d742b4ffda2f34a2999298078e058bb47" + }, + { + "input": "//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////wAAAAAAAP//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///wAAAAAA///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////4AAAf/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////wAAAf////////////////AAAAAAAAAAAAAAAAAAAAAAAA/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////////4AAAAAAAAAAAAAAAAD/////////////////////8B/////////////////////////AAf//////////////////////gAAAAAAAAAAAAAAAAAAAAAAH////////////////////////8AAAAAAAAAAAAAAAAAAAAAP//////P/////AAAAAAAAAAAAAAAAAAAAAA/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////4AAAAAAAAAAAAAAAf/////////////////+AAAAAAAAf/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////4AAAAAAAAAD//+AAAAD///////////////////////AAAAAAAAAf////4AAAAAAAAAAAAAAAAAAAAAAAH////////////////////8AAAAAD////////////////////////////wAAAAAAAAAf////////////////4AAAAAAAAAAAAAAAAAAAAAAAAf////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////////gAAAAA/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB8AAAAAAAAAAAAAAD//////////////////////////////////////AAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAAAAAAAAAD//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAAP///////////////////////+AAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////wAAAAAf///////////8AAAAAAAAAAAAAAAAAAAAB//////////////////P//////////////////////////////////////AAAAAAAAAA////////////////////////+AAAA//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////gAAAAAAAAAAAAAAA/////wAAAAAAAAAAAf////////wAAAAAAAAAAAAAAAAB///////////////////////////////////+AAAAAAAAAH//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////+AAAAAAAAAAAAAAAAAB/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////4AAAAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB/AAAAAAAAAAAAA//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//4AAAAAH///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////8AAAAB/////////////////////gH////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////gAAAAAAH///////////+AAAAAAf/////////////////////AA////////gAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////AAP/////////////////////////////////////gAAAAAAAAAAAAAA///////////////////////4AAAAAAAAAAAP/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////+AAAAAAAAAAAAAAAAP/////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAA///////////4AAAP////////////8AAAAAAAAAAAAAAAA//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8AAAAAAAAP////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////8/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/8AAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////wf//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAH////////////////////gAAAAAAAA///////////////////4AAA//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////gAAAAAAAAAAAAAAA/////////////////////////////////4AAAAf///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAAD///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////+B/4A//////////////////////////////4AAAAAB//gAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAAAB///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////AAAAAAAAAAAAAAAAAAAAAAf///AAAAD///////////8AAAAAAAAAAAAAAAAAAAAf4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAB////////4AAAAAAAD///////////////////wAAAAAAAAAAAAAAf//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////4AAAAAAAAAAAAAB//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////AAP/////////////////////////AAB//////////////////////////////////+AAAAAAAAAAAAAAAAAAA/////////////////////////////+AAAAAAAAAAf/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////AAAAAAAAAAAAAAAAAf////////////////////////////4AAAAAAAAAAAAD/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////8AP/////////////////////////gAAAAAAAAAAAAAP///////////////////////AAAAAAAAAAAAAAAAAAAAAAAf/////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//4AAAAAAAAAAA///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////wAAAAAf////wAAAAAAAAH/////////////////////////////gAAAAAAB//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////wAAAAAAAAAAA/////////////+AAAAD//////////////wAAAAAAAAAAH/////////////////////////////////////AAAAAAAAAAAAAAAAB////////////////////////////////////AAAAAAAAAAAAAAAAAAAAA//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////n/wAAAAAAAAAAAAAAAAA///////////////////gAAAAD//////////////////////////w///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////v//wAH///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////4P/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////AAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////4AAAAAAAAAAAAAA/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////AAAAAAAAAAAAAAAAAAAP////////////wAAAAAAAAAAP/////////+AAAAAAAAAAAAAf///wAA////gAAAAAAAAAAH////////AAAAAH//////////////////////////////////+AAAAB/////////+AAAAAAAAAAAAH////////////////////////////////AA///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////AAAAD///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////wAAAB/////////h////////////////////////////////////4AD//////////4AAAAAAAAAAB////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////+AAAAAAAAAAAAAAAAA/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////AAAAAAAAAH//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////+AAAAAAAAAAAAB//gAAAAAP//////////////////4AAAAAAAAAAAAAAAAAf/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///AAAAAAAAAAAAAAAAAAAAAH//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////wAAAAAAAAAAAD//////////////////////////////wAAAAB///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////+AAAAAAAAH///////////////////////////////////+AAAAAAAH/////////////////////////+AAAAAAAAAAAAAAD///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAf/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAP//////////////////////////////////////8AAAAAAAAAH////////gAAAAAAAAAAAAAAAAAAAH///////////////////////////////gAAH///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////8AAAAAAAAAAAAAAD//////wAAAAAAA/////////////////8AAAAAAAAAAfwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////+AAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAAAAB////8AAf////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAA////4AAAAAAAAAAAAAAAAf///////////+AAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////wAAAAAAAAAAP//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////////wAAAAAAAAAAAAAAAAAP////wAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////wAAAAAAAAAAAAAAH/////////////////////AAAA////////////////D////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAAAAD8AAAAAAAAAA//////////8AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////4AAAAAAf///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////AAAAAAAAAAAAAAAAAAAAD////////////////////////////////////8AAAAAAAf//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////AH//////////////////////////////////////8AAAAAAAAAAP////////////////////////////////////gAAAA///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAD/////////////////////////////w==", + "sha1": "1a1d77c6d0f97c4b620faa90f3f8644408e4b13d", + "sha256": "3d35c10650a2da8413a2a11b8e7fb891af5da3a9763584caa6cd71bbe68de6ba", + "sha512": "b39ffcd51b0fdf9095ec7e38467a4aa87917fdda9acac5a95217ec3f378d4a6c5c5e2b643ca97151a0ab370dcc1af9f2e84cbf1c2535778ffbe45fc320b787d0" + }, + { + "input": "///////////////8AAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAA/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////8AAAAAAAAAAD/gAAAAAAAf/////////////AAAAAAAAAAAAAAAf///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAAAAAAH////4D////////////////////////////8AAAAAAAAAAAAAAAAAAH/////////AAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAP////////////////////////////////////8AAAAH//////AAAAAAAAAAAAAAAAA//////////////////////////////////////j////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////////AAAAAAAAAAAAAAH////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////AAAAAAAP///////////////////////wAAAAAD/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAB/8AAAAAAAAf/wAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////8AAAAAAAAAAAD/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAAAf///////////////////////////////wAAAAAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAAAAAAAB/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfwAAAP////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//AAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////+AAP////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////AAAAAAAAAAAAAAf////////AAD///////////////gAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////gAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAD///////gAAAAAAAAAAAAAAAAAAAAAAAAAD/AAAAAAAAAAAAAAAAAAAAf//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/8AAAAAAAAAAAAf//////////////////////////+AB///////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAAAD//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////AAAA/////////////////8AAAAAAAAAAAAAAAAP/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////8AAAAAAAAAAAAAAAAAAAAAAAAf///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAAAP//////////////////////////////8D///////////////////////////gAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAH///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////wAAAAAAAAAAAAAD/////////////////+AAAAAAAAAAAAAAAAAAAH/////8AHgAAAAAAAAAAAAAAB/////////+AAAf///////////////////////////AAAAAAAB////////////gAAAAAAAAAAAAAAAAAH/////////////////////8B////////////////////////////////////8AH//////////////////////////gAAAAAAP////////////////////+AAAAAAAAAAAAAAAAAAAAAH////////////wAAAAAAAAAAAAAAAAAAAAAAAAf/gAAAAAAAAAAf/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////+AAAAAAAAAAAAAAAAAAAP//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////+AAAAAAAAAA///////////////////////////wAAAAAAAAAAAA///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////AAB////////////////////////////////4AAAAAAAAAAH///////////////////gAAAAAAAAAAAAAAAD///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////gAAAAIAAAAAAAAAAAAAAAAAAAA/////////////gAAAAAAAAAB//////////////////////wAAAAAAAAAAAAP///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////8AAAAAAD///////////////////////gAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////AAAAAAAAAB//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////+AAAAH/////////////////////////////4AAAAAAAAAAAAAAAAH/////////////////////////////+AAAAAAAAAAAAAAAAAP//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAAA////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAf///////////+AAAAAAAAAAAAAAAAAAAAAAf/////////8AAAAAAAAAAAAB/////////////////////////////////AAAAAAAAAAAAAA////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////+AAAAAAAAAAAAAAAAAAAA///////////////////4AAAAAAD//////4AAP/////////////////wAB//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////+AB/////////////////8AAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAf///4AAA/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH//4AAAAAAAAD//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////+f/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/AAAAAAAAAAAAAP/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////AAAAAAAAAAAAAD/////////////wAAAAAf//////////////////////AAAAAD/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////+AAAAAAAAAAAA///////////////////////////gAAAAAAAAAAAAB////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////wAAAAAAAAAAAAAD/////////////////////////8AAP//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////4AAAAAAAAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////gAAAAAAAAAAD//////////////4AAAAAAAAAAAAB///////////////////+AAf//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////wf/////////4AAAAAAAAAAAAAAAAAAAAAH////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////8AAD/////////////////////////////////+AAAAf////////////////////////////////////4AAAAAAAAAAAf///////////////////////////////gAAAAAAAAAAAAH///////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////AAAAAAAP///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/4AAAAAAAAAAAAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAA/////////////////AAAAAAAAAAAAf////////gAAAAP//////wAAAAD/////////n//////+AP//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////gAAAAAAAAAAAAH//////////////////////////////////gAAAAAAAAAA/////////////////gAAAAAAAAAAAAAAAAAAAAD/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////3/////wAAAA/////////////////////////////////////+AAAf////////////wAAf//////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////gAAAAAP////////AAAAAAAAAAAAAAAAAAAAAAAAAAf///////////gAAP////////////////////////8AAAAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAAf/////////////+AAAB////////////////////////////////////////AAAAAAAAf////////////AAAAAAD/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////wAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////////8AAAAAAAAAAAAAAAAAAA////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////gAAAAAAAAf///////////////////AAAAAAAAAAAAAAAH//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////wAD/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////wAAAAAAAAAAAAAAAAAAPAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAD///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////8AAP/////////////////////////////4AAAAAAAAAAAAAAAAAAA/////////////////////+AAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gP/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////8AAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////4H////////////wAAAAAAAAAAAAB//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////4AAAAAAAAAAAD//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAf//4AP///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAH///gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAf/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAD//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAf///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//8AAAAAAAAAAAAAAAAAAAAAAf///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////AAAAB////////////////////////////////wAAAAAAAB////////////////////////4AAAAAAAAAAAAAAAAAAf/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAH///////////////gAP///////////////////8AAAAAAAAAAP///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////D/////////////////////////////////4AAAAAAAAAAAAAAAAAA////////////////+AAAAAAAAAH/////////////////AAAAAAAP////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////4AAAf////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////4AAAAAAAAAAAAAAAAAAAf///////////////////////4AAAAAAP/////////////4H///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////4AAAAAAAAAAAAAAAAAAAAAAAD///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "4ec84870e9bdd25f523c6dfb6edd605052ca4eaa", + "sha256": "baf3bcf323f3e5b91649eeb5f1be977a8bd91915a66297a22fdb1a906d1a7e53", + "sha512": "44a5845ade0e82bb34b1ada3b7875193515d5c3c19765f11e5e6d990f869adee5be8e9d995b18bd2d9ff5a308dc22d883198a527e6418833fbc715eccf939469" + }, + { + "input": "////////////////////////////////////4AAAAB////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////wAAAAAAAP///////wAAB/////////////////////////////4AD//////wAAAAAAAAAAAAAAAAAAH/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////8AAAAAAAAAB//h//////////////////////////////wAAAAAAAAAP///gAAAAAAAAAAAAAAD//////////////////////wAAAAAAAD/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAAAAAA////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAD4AAAAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////AAAAAP//////////////////////////////////////8AAAAAP////////////////////AAAAAAAAAAAP///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////gAAAAAAAAAAAAAAAAAB///////4AAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////8AAAAAAAAAAAAAAA////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAB////////AAAf//AAAAAAAAAAAAAAAAAAAAAAD//////+AAAAH//////////////////////+AAAAAAAAAAAAAAAAAAAD////wAAAAAAAAAAAAAAAAAAAAAAAAAA///////+AAAAAAAAAAAAAAAAAf//////////////////////4AAAAAAAAAAAAAAAAAAAAH/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//wAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////wAAAAAB///////////////////////////gAAAAAAAAAAAAAAAAAAAD/////////////////AAAAAAAAAAAAAAAAAAAAAAAAH////////4AAAAB///////////////////////////////////+AAAAAAAAAAAAAAAAAAAD/////4AAAAAAAAAAAA////////////////////4AAAAAAAAAAAAAAAD//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////wAAAAAAAAAAAAAAAAAAAAAAf//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAf///////////////AAAA///////////+AAAAAAAD///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////4AAAAAAAAAAAAAAD/////////////////////////////////////8AAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAAAAAAAAAAH////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////n/////4AAAAAA///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///gAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAH/////4AAAAAAAAAAH/////gAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////////wAAAAAAB//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////wAAAAAAAAAAAAAAAAAAAAAAB//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////4AAAAAAAAAAAAAAAAB///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAB//////////////////////////////4AAAH//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////AAAAAAAAAAfwAAAAAAAAAAAA///////////////8AAAAAAAAAAAAA///////////////////////////////8A//////8AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////8AAAAAAAAAAAAAAAAB////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////wAAAAAAAAAAAAAAAAAP///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAAAB//////8AAAAAAAAAAAAAAAAAAH//wAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAAAAAAAAAAAAAAAB/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////8AAAAAAAAAAAAAA/////////////////////////////////4AAAAAAAf/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAZ/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////gf///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////4AAD///8AAAAAAAAAAP///////////////////gH//////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////+AAAAAAAB//////8AAAAAAAAAAAAAAAAAP//////////////gAAAAD//////////////////////////////////gAAAAf////////////+AAAAAAAAAAAAAA/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/AAAAAAAAAAAAAAAAAAAAAAAAAD/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////4AAAAAAAAAAAAAAAAAH///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAH//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////AAAAAAAAAAAAP////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAP/AAAAAAAAAAAAAAAAAAAAAAAAH/////////////AAAAAAAAAAAAAAAAAAAAAAP/////////////////////+AAAAAAAAAAAAA////////////////////////////////////4AH///////////AAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAD////+AAA////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAH////wAAAAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////wAAAAAAAAAAAAAAAAAAAAAH//////////////////////////+AAAAAAAAAAAAAAABgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////AAAAAAAAAAAAAAAAAB/////wAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////7//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////8AAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////+AAAAAAAAAAAAA/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////AAAAAAAAAAAAAAAAAAD//////////8AAAAAAAAD/////////4AAAAAAAAAAAH///8A//gAAAAAAAAAf//////wAAB///////////////////////////////////gAAAAf///////gAAAAAAAAAAH////////////////////////////////AP///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////8AAAAP/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////8AAB///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////7//////////4AAAAAAAAB////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAAAAAD////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////wAAAAAAAAB//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////+AAAAAAAAAAAABgAAAA//////////////////4AAAAAAAAAAAAAAAAH//////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///wAAAAAAAAAAAAAAAAAAAAB///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////8AAAAAAAAAAAA////////////////////////////8AAAAH////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////gAAAAAAH///////////////////////////////////+AAAAAH/////////////////////////+AAAAAAAAAAAAAA///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAAAAAAAAAAH////////////////wAAAAAAAAAD/////////////////////////////////////AAAAAAAAAB////////4AAAAAAAAAAAAAAAAAB/////////////////////////////4AAB///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////AAAAAAAAAAAAA//////AAAAAAAD/////////////////wAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////gAAAAAAAAAA//////////4AAAAAAAAAAAAAAAAAAAAAAAAf///wAB///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAD///4AAAAAAAAAAAAAAAAf/////////+AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////wAAAAAAAAAD////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////8AAAAAAAAAAAAAAAD///wAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAf////////////////////AA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////8AAAAAAAAAAAAAAAAAD///////////////////////////////////////AAAAAAAAA////////8AAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////gAAAB///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////8AAAAAAAAAAAAAAAAAAAD////////////////////////////////////AAAAAAB//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////////AAAAAAAAAP//////////////////////////////////+AAAP/////////////////////////////////4AAAAAAAAAAAAAAAAAAD/////////////////////////////////+AAAAAAAAAAAA=", + "sha1": "d689513fed08b80c39b67371959bc4e3fecb0537", + "sha256": "7fd9b3abc4684e6f8ee591bbac5a36c5060bb09ef7899690416e5300cc14fcd3", + "sha512": "fa3948c29572d0e7c9103e435fc9f298dcd5a68d0e5174a84aaeb58af8564c473d64d8295347ded8fa5c10e02a96a30b26a6fa7435b449b78bb91105ad043ba0" + }, + { + "input": "AAAAAAAAAAAAAAAAAAP//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAD///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAP////////////////////AAAAAAAAAAA+AAAAAB///////////8AAAAAAAAAAAAB///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////AAAAAAAAAAAAAAAAB////////AAAAAAAAAAAAAAP/////////////gAAAAAAAAAA/////////////////////////////////////wAAB/////AAAAAAAAAAAAAAAAA////////////////////////////////////+P///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////AAAAAAAAAAAAAf////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////wAAAAAP//////////////////////8AAAAD////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAHAAAAAAAB////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAAAD////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////gAAAAAAAAf///////////////////////////////wAAAAAAAAAAAAAAH//////////////////+AAAAAAAAAAAAAAAAAAAAf///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH8AAA////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////gP////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////wAAAAAAAAAAAAB///////wD/////////////+AAAAAAAAB////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////+AAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAf/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAB///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAAAD/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////8AD///////////////wAAAAAAAAAAAAAAAP/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////wAAAAAAAAAAAAAAAAAAAAAB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAP/////////////////////////////z//////////////////////////4AAAAAAAAAAAAAAAAAAAAAH///////////////////////////////wAAAAAAAAAAAAAAAAAf//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////8AAAAAAAAAAAAD////////////////4AAAAAAAAAAAAAAAAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////4AAAAAAAAAAAAAAAf/////////gAf//////////////////////////wAAAAAAAf//////////gAAAAAAAAAAAAAAAAAH/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////gAAAAAAP//////////////////////////////////////8AAAAAAAAAAAAAAAAAAAf//////////////////+AAAAAAAAAAAA//////////////////////+AB/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////wAAAAAAAAAAAAA/////////////////////////////+AAAAAAAAAAAAAAAAAAAf////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////AAAAAAAAAAAAAH////////////////////////4AAAAAAAAAAAAAAAAP/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////+AAAAAAAAAAAAAAAAAH//////////////AAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////+AAAAAf/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////wAAAAAAAAAAAAAAB/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////wAAAAAAAAAAAB////////4AAAAAAAAAAAAAAAAAAAAAA///////////AAAAAAB/////////////////////////////4AAAAAAAAAAAAAA//////wAAAAAAAAAAAAAAAAAAAAB/////////////////gAAAAAAAAAH////////////////////////////8AAAAAA/////////gAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////gAB////////////////////////////4AAAAAAAAAAAAAAAf///////////////////////////4AAAAAAAAAAAAAAAAA///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////+AAAAP///////////////////////////gAAAAAAAAAAAAAAAAAAAAB//////////4AAAAAAAAAAAAAAAAAAAAH////////8AAAAAAAAAAAAB/////////////////////////////////AAAAAAAAAAAAAA///////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAA//////////////////gAAAAP////+AAD///////////////8AH////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////+AAAAAAAAAAAAAAAAA/////////////////+AAAAAAAAAAAAAAAAAAAf/////////////////////////4AB/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////+AAB////////wAAAAAAAAAAAB//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////8P////////////Af////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////wAAAAAAAAAAAD///4AAAAAAAAAAAAAAAAAAAAP///8AAAAAAAAAAAf//////////////////////////4AAAAAAAAAAAAAAAAAAf////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////8AAAAAAAAAAAAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAAAAAAAA/////////////////////////////wAAAAAAAAAAAAAAAAH//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////AAAAAAAAAAAAAAAAAAAAAAAAAP//AAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////4AAA///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAf/////////wAAAAAAAAAAAAf///////////gAAAAAAAAAAAAAAAAAYAAH/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAD////////////////////////////////////////4AAAAAAAAH////////////////////gAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////AAAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAD///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////AAAAAH////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAAP//////////+AAAAAAAAB///AAAAAAA////8AAAAAAAAH///////////////////////////////////////gAAAAH///////////////////////////////////////8AAAAAAAAAAAAAf////////////////////////////////////4AAAAAAAAA////////////////////////////+AAAAAAAAAAAAAAAAAAAAP/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAH//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////8AAAAAAAAAAAAAAAH////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAP///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////////4AAf//////////3///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////+AAAA//////8AAAAAAAAAAAAAAAAAAAAAAAAH///////////4/////////////////////////wAAAAAAAAAAAAAAP/////////////+AAAAAAAAAAAAAAAAAAH////////////+AAH///////////////////////////////////////8AAAAAAAf////////////AAAAAA///////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////8AAAAAAAAAAAAAAP////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////wAAAAAAAAAAAAAAAAAP///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////+AAAAAAAAf//////////////////wAAAAAAAAAAAAAAB/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////8D////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAADwAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////4AAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////D/////////////////////////////+AAAAAAAAAAAAAAAAAP////////////////////4AAAAAAAAAAAAAH///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////j///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////wAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////4H//////////wAAAAAAAAAAAAB//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////+AAAAAAAAAAAA//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAB/gA////////////////wAAAAAAAAAAAAAAAAAAAAAAAf+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////gAAAAAAAAAAP//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAH//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/8AAAAAAAAAAAAAAAAAAAAf/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////AAAAf/////////////////////////////8AAAAAAAAf///////////////////////gAAAAAAAAAAAAAAAAAf///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAf/////////////4A///////////////////AAAAAAAAAAD//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////4AAAAAAAAAAAAAAAAAA//////////////+AAAAAAAAAH///////////////8AAAAAAP//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////gAH////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////gAAAAAAAAAAAAAAAAAAf//////////////////////gAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////w==", + "sha1": "c4fed58f209fc3c34ad19f86a6dacadc86c04d33", + "sha256": "429e454c0cd5d874d7887f8f8def3390a6e54af783c102af6bc3c75c62f3661f", + "sha512": "288a6fa4607292fd3e4ee38d2007863cb11fd4fa5ffa5cc94cfcceefe9bdd1c5fdd092a0f9db45daeff6702b3d7dee5fd81d821e8f84c0ce21c3df1f0e1b58b2" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////AAAP///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////+AAAAAAH///////4AAA////////////////////////////wf/////+AAAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////gAAAAAAAPwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////+AAAAAAAAAf///AAAAAAAAAAAAAAf////////////////////4AAAAAAAf////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////gAAAAAAAAAAAAAAAAAf///////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAD///////////////////AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////+AAAB/////////////////////////////////////+AAAAAH//////////////////+AAAAAAAAAf/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////AAAAAAAAAAAAAAAAAAD//////8AAAAAAAAAAAAAAAAAAAP////////////////////////////////////gAAAAAAAAAAAAH//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAA//////gAAPgAAAAAAAAAAAAAAAAAAAAAf////wAAAP//////////////////////AAAAAAAAAAAAAAAAAAH//+AAAAAAAAAAAAAAAAAAAAAAAAH/////wAAAAAAAAAAAAAAAAD/////////////////////8AAAAAAAAAAAAAAAAAAAAD////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/+AAAAAAAAAAAAAAAAAAAAAAAAf///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////gAAAAAD/////////////////////////AAAAAAAAAAAAAAAAAAAf///////////////+AAAAAAAAAAAAAAAAAAAAAAAAP///////8AAAAP///////////////////////////////////wAAAAAAAAAAAAAAAAAf/////AAAAAAAAAAAAH///////////////////wAAAAAAAAAAAAAH////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////4AAAAAAAAAAAAAAAAAAAAP//gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////+AAAAAAAAAAAAAAAAAAAA///////////////+AAH///////////wAAAAAB//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////AAAAAAAAAAAAAf/////////////////////////////////////gAAAAAAAAAAAAAAAP///gAAAAAAAAAAAAAAAAAAf//////////////////////8AAAAAAAAAAAAAD////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////8AAAAB/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/8AAAAAAAAAAAAAAAAAAP/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAD////wAAAAAAAAA/////AAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////gAAAAAA///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAf/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////wAAAAAAAAAAAAAAAP/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////gAAAAAAAAAAAAAAAAAAAA////////////////////////////8AD/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAP///////////////////////////////////////4AAAAAAAAAAAH//////////////4AAAAAAAAAAAH/////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///+AAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////+AAAAAAAAAAAAAAAAP//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////+AAAAAAAAAAAAAAAAH///////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////8AAAAAAAAAAAAAAAAAAAAP////gAAAAAAAAAAAAAAAAAP/4AAAAAAAAAAAAAAAAA//////////////////////////////4AAAAAAAAAAAAAAAAAD///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////////AAAAAAAAAAAAAAAAAA////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////gAAAAAAAAAAAH///////////////////////////////AAAAAAP/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////wAB///gAAAAAAAAH//////////////////8A/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////wAAAAAAD/////gAAAAAAAAAAAAAAB//////////////8AAAf////////////////////////////////wAAP////////////wAAAAAAAAAAAAAH///////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfgAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP////////wAAAAAAAAAAAAAAAAAP//////////////////////////////////8AAAAAAAAAAAAAAAAAAAA//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAAB///////////////////////////////AAAAAAAAAAAAAAAAAAAAH/gAAAAAAAAAAAAAAAAAAAAAAP////////////gAAAAAAAAAAAAAAAAAAAAf/////////////////////AAAAAAAAAAAAH//////////////////////////////////8A///////////4AAAAAAAAAAAAAAAAAAAAf///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAH////Af///////AAAAAAAAAAAAAAAAAAAAAAAAAD////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAf//////////+AAAAAAAAAAAAAD////4AAAAAAAAAAAAB///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////gAAAAAAAAAAAAAAAAAAAAD//////////////////////////wAAAAAAAAAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////gAAAAAAAAAAAAAAAD/////gAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////+AAAAAAAAAAAAA//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////4Af/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////+AAAAAAP////////////////////////////////////////4AAAAAB////////////////////////////////4AAAAAAAAD////////////////////////////////////8AAAAAAAAAAAAAAAAD/////////////////////////wAAAAAAAAAAAAAAB/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH/////////////////4AAAAAAAAH//////4AAAAAAAAD//////////wAAAEAAAAAAAAP/////gD///////////////////////////////////AAAAA//////AAAAAAAAAA////////////////////////////////4f///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAP////////////////////////////////+AAAAH///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////4AP////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////wAAAAAAD////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////8AAAAAAAAAAAAAAf//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////4AAAAAAAAA//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////8AAAAAAAAAAAAD///////////////////////////////////////8AAAf/////////////////AAAAAAAAAAAAAAAAP//////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///gAAAAAAAAAAAAAAAAAAAAD///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAAAAAB//////////////////////////4AAAD///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////AAAAAA////////////////////////////////////wAAA//////////////////////////wAAAAAAAAAAAAB//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAP/////////////+AAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAAAAH//////////////////////////////////+AAAAAAAAAD////////wAAAAAAAAAAAAAAAD///////////////////////////wAAD///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////+AAAAAAAAAAB/////gAAAAAAB/////////////////4AAAAAAAP////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////8AAAAAAAAAB/////////8AAAAAAAAAAAAAAAAAAAAAAAD///gAD////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAH//8AAAAAAAAAAAAAAAAP////////AAAAAAAAAAAAAAAAAAAAAf/////////////////////////////4AAAAAAAAAf//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////////gAAAAAAAAAAAAAf/4AAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////gAAAAAAAAAAAAA////////////////////gAAAAAAAAAAAAAAAf////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAf////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////4AAAAAAf//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//AAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////AAAAAAAAAAAAAD//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////8AAAAAAAAAAAAAAAAA//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "051888c6d00029c176de792b84dece2dc1c74b00", + "sha256": "0987d19048e10b925bcf394dfffcf259fc1a15e403673a80bfd4e7fd4f43cef0", + "sha512": "ea2b4ce965af861dc357b045bcdefe1016bde2245dc9b0d13bd330da25fe5fe95b121163139bcc3cc7306c2a3e3f64f42deccade1c216592e6b94ddbca8d9262" + }, + { + "input": "AAAAAH///////////////////////////AAAAAH////////////////////////////8AAAAAAAAAAAAAAAAAAAA//////////////////////wAAAAAAAAAAAAAAAAAA///////////f///gAAAAAAAAA///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////g////////////////+AAf/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAAA/////////////8AAAAAAAAAAAH/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//gAAA/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////+AAAAAAAAAf////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD////wAAAAAAAAAAAAAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAAAAAB//////////////////////////AAAAAAAAAAAAAAAAAAAP//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////AAAAAAAAAAAAAAAf/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////gAAAAAAAAAAAAAAAAAAH///////////////////+AAAAAAAAAAAAAAAAAB///////////////////////////////////////gP//AAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAH///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////4AAAAAAAAAAAAAAAAAAAAAAA////////////+AAAAAA////////////+AAAAAAB////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////+D////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAB/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////////8AAAAAAAAf//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////+AAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////wAAAAAAAAAAAAA///////////////wAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////H////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////gAAAAAAAAAP/////////////////////////////////////4AAAAAAAAAA////////////////////4AAAAAAAAAAAAAAAD/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAf////////////////////////////////wAAAAAAAAAAAAAAA///////////8AAAAAAAAAAAAAAAf//////////////////+AAAAAA//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////gAAAAAAAAAeAAAAAAAAAAAAAAAAAAAAAAAAAAP///////gAAAAAAAAB//////////////////gAAAAAAAAAAAAAAAAAAAA////////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAH//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////gAAAAAAAAAAAAAAAAB///////////wAAAAAAAAAAAAAAAAAAAAf/gAAAAAAAH////////////////////////////////4AAAAAAAAAAAAAAAAAAAAH///////////////////////////////////4AAAAAAAAAAAAAAAAAAA////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////gAAAAAAD///////////////////////////AAAAAAAAAAAAD///////////gAAAAAAAAAAAAAAAAAAAAAAAH////////////////wB///////////////////////////////gAAAAAAAAB///////////////4AAAAAAAAAAAA///////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////gAAAAP////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAD///////////4AAAAAAf/////////////////////8AAAAAAAAD/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////wAAAAA///////////////////4AAAAAAAAAAAAAAAAf/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA/////AAAAAAAAH///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAB4AAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////AAAAAAAAAAAAAAAf/////////////////////////////4AAAAAAAAAAH////AAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////+AAAAAAAAH///////////////////gAAAAAAA/////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////4AAAAAAAAAAAAAAAAAAAAB/////////////////wAAAAAAAAAAAAAAAAf//wAAAH//wAAAAAAAAAAAAAP4AAA///////////////////////////////gAAAAAAAAAAAAAAf///////////////////////////////////////gAAAAAAAAAAAAAAAD////////////////gAAAAAAAAAAAAAAAAH/////////////////////////n/////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////4AAH///////wAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////wAAAAAAAAAAAOAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////AAAAAAAAAAAAAD//////////8AH//////////////////wAAP////////+AAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////4AAAAAAAAA////////////////////////4AAAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////8AAAAAAAAAAAAAA////////////////////////8AAP/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////4AAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////+AAAAAAAAAB/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////+AAAAAAAAD//////////4AAAAAAAAAAf///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////4AAAAAAAAAAAAAAAAAAB/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////4AAAB/////////////////////////////////gAAAAAAAAAH////////////////////////////+AAAAAAAAAAAAAf////////////////////////wAAAAAAAAAAAAAAAAAAAAAAD/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///wAAAAAAD///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////gAAAAAAAAAAAAAAAAA///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////+AAAAAAAAAAAAAAAAP///////////////AAAAAAAAAAB////////+A///////AAAA////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////+AAAAAAAAAf////////////////////////////////4AAAAAAP/////////////+AAAAAAAAAAAAAAAAAAAA//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////f//wAAD////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/4AAAAAAAAD///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////+AAAAAAAAAAAB//AAAAD///////////////////////gAAAAAAAAAAAf///////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAP/////////////AAAAAAAAAAAAAAH////////////////gAAAAAAAAAAAf4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////4AAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////4AAAAAAAP/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAAAAAAH//////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAf//////+AAAAAAAAAAAAAAAAAAP//////////////+AAAAAAAAAAAAAAAA///////////////////////////////4AAAAD/////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////+AAAAAD//////////////8P//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAAAAAA////////////////////////////////gAAAAAAAAAAAAAAAAAA////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////wAAAAAAAAAAAAAAAAAAAB////////////4AAAAAAAAAAAAAAH//////////////////////////////////////8AAAAAAAB/////////////////////////////////////////8AAAAAAAAAAAAAAAAAB///////////////////////////////gAAAAAAAAAD///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////4AAAAAAAAAD//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////wAAAAAAAAAAAAAAAAAfgAAAAAAAA/////////////4AAAAAAAAAAAAAAAAAAH//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////4AAH///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/AAAAAAAAAAAAAAAAP/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAAAAA///////////////////////////////////gAAAAAAAAAAAAAB//////////wAAAAAAAAAAAAAAAAAAAAAAH//////////////////////gAAAD///////////////////////////////g//////////////////4AAAAAAAAAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAP//+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////+AAAAAAAAAAAAAAAAAAAAAAAf////////////////4AAAAAAAAAAAAAAAAAAAAAD///////////////////////////gAAAAAAAP////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////gAAAAAAAAAAAAfAAAAAAAAAAAAAAAAAD//////////wAAAAAAAAAAAAAD/////////////////////////gAAAAAAAAP//////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////8AAAAAAAAAAAAH/////////gAAAAAAAAAAAAAA//////AAAAAP////////////////////////////////////4AAAAAAAAAAH4AAAAAAAAAAAB////////////////////////////gAAAAAAAAAH////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////+AAAAAAAAAAAAAAAAAAAAAH//////wAAAAAAAAAH//////////////////////////////////////4AAAAAAAAAAAAAAAAH//////////////////////////////////////4AAAAAAAAAAAAAAB////////////////////////AAAAAAAAAAAAAAAAAAAAAAf/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////wAAAAAAAP//+AAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAP/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////8=", + "sha1": "1a3540bee05518505827954f58b751c475aeece0", + "sha256": "fc774a081d9c93d52bb6a8d99a06ccd7bf32a10154d302524b8c5c5dc1b2969f", + "sha512": "57cdfebacae17394501dc67e5a12253e18ff29e8fe15bbd716edd28d070674e60d3fa0d6fd2ca574e670f45b3e9baab384b953e7005db7800bfdf825940f62de" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAH////////AAAB////////////wAAAAAAAAAAAAAAAAAAA//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////8AAAAAAAAAAAAAAD///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////4AAAAAAAAAAAAAAAAP////////////////////////4AAAAAAAAAAAAAAAAAAAAAf/4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////4AAAAAAAAAAAAAAAAAf//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////8AAAAAAAAB//////////////////4AAAAAD/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////wAAAAAAAAAAAAAAAAAAAAAA//////////////////////////4AAAf//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////wAA///gAAAAAAAAAAAAAAAAAAAAA////////////////+AH/////////////////////8AAA/////////////////wAAAAAAAAAAAAAAAAAAAD////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////gD//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////AAAAAAAAAAAAAAA//////////////////////////////////////wH/////////////////////wAAAAAAAAAAAAAAAAB///////////////////////////////////wAAAAAAAAAAAAAAAAAAAH//////AAAAAAAAAAAAAAAAAAAAAAB/////////////////wAAAAAAAAAAAAAAAf///////////////////////8AAAAAAAf///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////wAAAAAf///////////8AAAAAAAAAAAAAAAAAAD///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////+AAAAAH/////////////////+AB//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////wAAAAAAAAAAAAAAAfgAAAAAAAAAAA////+AAAAAAAAAAAAA/////////////////////////////////+AAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAAAAAAAf/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////gH////////////////+AAAAAAAAAAAAAAAAAAAAB////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////AAAAAAAAAAB////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////gAAAAAf//8AAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////B////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////8AAD///////8AAAB/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////4AAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////AAP///////////////////////////////////AAAAAAAAAAH///////////////////////AAAAAAAH///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////AAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////wAAAAAAAAAAAAAAA//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////////8AAAAAAAAAAAB/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////4AAAAAAAAAAAAAAAAAAD//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////8AAAAAAAAAAAAAB//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////8AB//////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAP//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////AAAAAAAAAAAAAAAAAAAB////////////////////4AB/////////////////gH////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////wAAAAAAAAAAAAA/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////wAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///+AAAAAAAAAAAAAAAAAAAAAA/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////4A//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////wAAAAAAAAAAAf////////////////////////////gAAAf//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////+AAAAAAAAAAAAAAAAAD/wAB///////4AAAAAAAAAAAAAAAAAB/gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////+AAAAAAAAAAAAAAAAAAA//////4AAAP////////////4AAAAAAAAAAAAAAPwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABwAAAAAAAAAAAH/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////f////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////+AAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAD/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////+AAAAAAAAAAAAAAAAA/////////////////////////////wAAAAAAAAf////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AD/////////////////////gAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAAAAAAAAH///////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgAAAAAAAAAAD//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////wAAA/////////////////////////////////////////gAAAAAAf//////////////////////wAAD////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////8AAAAAAAAAAAP/////////////gAAAA////////////4AAAAAAAAAAD////////////////////////////////+AAAAAAAAAAAAAAH/////////////////////////////gAAAAAAAAAAAAAAAAAA////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAH////////////5///////////////////////////////////4AAAAAAAAAAD//////////////4AAAAA//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////gAAAAAAAAAAAAAAAAAAB//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////AAAAAAAAAAAAAAAAAAAAAAAAB////////////+AAAAAAAAAAAAAAAAAAAB///////////////////////////////////AAAAAAAAAAf////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////8AAAAAAAAAAAAAAAAB//////wAAAB////////gAAAAAAAAf///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////////wAAAAAAP///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////wAAAAP//+AAAAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAP//////////////////////////////wAAAA/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////3//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////gAAAA////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///4AAAAAAAAAAAAP////+AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////4AAAAAAAAA//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAAAAAA//////////////////////////////////////wAf////////////////gAAAAAAAAAAAAAAD//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAAAAAA///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////wAAAAAAAAAAAD///////////////////////+AAAf////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////+AAAAf///////////////////////////////////4D//////////////////////////AAAAAAAAAAAAD/////////////////////////////////gAAAAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAAAAAAD////////////AAAAAB////////////////////////////////8AAAAAAAAAH////////gAAAAAAAAAAAAA/////////////////////////gAAH///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////8AAAAAAAAf///8AAAAAAAP/////////////////AAAAAAf////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////wAAAAAAAAD////////8AAAAAAAAAAAAAAAAAAAAAAB//4AA//////////////////////////8AAAAAAAAAAAAAAAAAAAAAAP/8AAAAAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAAf/////////////////////////////4AAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////4AAAAAAAAAAAAAAAAAAAAB///////////////////////////////////////+AAAAAAAAAAAPAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////+AAAAAAAAAAAAB///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////wAAAAAAAAAAAAAAAB//////////////////////////////////8AAAAAD///+AAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////3/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////wAAAAAAAAAAAAAAAAAB//////////////////////////////////8AAAAA////////////+AAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////gAAAAAH////////////////////////////////H//////////////////////////////8AAAAAAAAAAAAAAP/////////////////////////////////4AAAAAAAAAD//////////////4AAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////4AAAAH//////////////////////////4AAAH///////////////////////////gAAAAAAAAAAAAAAAAAA/////////////////////wAAAAAAAAAAAAAAAAH//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//gAAAAAAAH/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////g////////////////wD////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAP////////////8AAAAAH////////////8AAAAAAAAAA/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/gAA/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////+AAAAAAAAAf///////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAD///wAAAAAAAAAAAAAAAAAAAAAH//gAAAAAAAAAAAAAAAAAAAAAP/////////////////////////4AAAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/w==", + "sha1": "dfa19180359d5a7a38e842f172359caf4208fc05", + "sha256": "6c6afa35f1aad6301dfde6c4ababe2da47d92033a9a41e84ca6f00e5eb29bc60", + "sha512": "ae75a779c9034a0048caebad4983d870ea61fa2fd5adc8f7eb1c6f0bfaadd13685dcca0eaccee903b857893a17edfc6a08226b35e95804303845434d442f7cc7" + }, + { + "input": "////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////4AAAAAAAAAP////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////+AAAAAAAP///////////////////////////////4AAAAAAAAAAAAAf//////////////////4AAAAAAAAAAAAAAAAAAAB///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD+AD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAB/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////+AAAAAAAAAAAAAAAAAAAAAAf////////////////////gAAAAAAAAAAAf///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////AAAAH///////+AAAAAAAAAAAAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAP//////+AAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////4AAAAAAAAAAAAAAAAAAAD////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////gAAAAAf//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////gAAAAAAAA////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////4AAAAAAAAAAAAAA////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAAAAAAAAAB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////wAAAP//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////wAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////8AAAAAAAAAAAAA/////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////AAAAAAAAAA/////////////+AAAAAAAAAAAAAAAAAAAH/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////+AAAAAAAAAAAAAAAH/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////AAAAAAAB///////+AAAAAAAAAAAAAAAAAAf///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////+AAAAAAA////////////////////////////////////wAAAAAAAAAAAAAAP//////////////4AAAAAAAAAAf/////////////////4AH//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////4AAAAAAAAAAf///////////////////////////4AAAAAAAAAAAAAAAAAP/////gAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////gAAAAAAAAf//////////////////////8AAAAAAAAAAAAAAAAHgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////4AAAAAAAAAAAAD/////////8AAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////AAAAAP//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////AAAAAAAAAAAAAA/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////4AAAAAAAAAAH///8AAAAAAAAAAAAAAAAAAAAAAf/////8AAAH//////////////////////////gAAAAAAAAAAAAf////AAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAf//////////////////////////+AAAf//////+AAAAAAAAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////AAAAAAAAAAAAD///////////////////////4AAAAAAAAAAAAAAAAA///////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////wAAAB//////////////////////////gAAAAAAAAAAAAAAAAP/////4AAAAAAAAAAAAAAAAAH/////8AAAAAAAAAAAAB/////////////////////////////////AAAAAAAAAAAAAA//////////////////////////////////////AAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAA///////////////h//wAAf//////////8/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////wAAAAAAAAAAAAAAf//////////////AAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAP//wAAAAAAP///////wAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////////gAAAAAAAAAAP/////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAAAAAH////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////wD//////8AAAAAAAAAAAAAAAAAAAAAAH////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////+AAAAAAAD//////////////////////4AAAAAAD//////////////////AAAAAAAAAAAAAAAAAAAAAAAAB////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////8AAAAAAAAAAAAAA///////////////////////+AAH////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////wAAAAAAAAAAAAAAAAAAAAD//////////////////////////////wAAAAAAAAH/////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////4AAAAAAD////////AAAAAAAAA/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////gAAAAAAAAAAAAAAAAB////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////8AAAA//////////////////////////////+AAAAAAAAAP///////////////////////////AAAAAAAAAAAAAP//////////////////////+AAAAAAAAAAAAAAAAAAAAAH////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///8AAAAAAA///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////gAAAAAAAAAAAAAAH4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////AAAAAAAAAAAAAAAAH//////////////wAAAAAAAAAP////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////+AAA///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////+AAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////wAAAAAAf///////////////////////////////8AAAA////////////+AAAAAAAAAAAAAAAAAAAf////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////f8AAf//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP4AAAAAAf////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////AAAAAAAAAH//////////////////////////////////////////AAf/////////////////////AAAAAAAAAAAA/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAD///////////8AAAAAAAAAAAAAAf/////////////wAAAAAAAAAD//////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////wAAAAAAAAAAAAf///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////AAAAAP////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////4AAAAAAAAAB////////////////////////////////wAAAAAAAAAAAAAAAAAAH//////wAAAAAAAAAAAAAAAAA///////////////4AAAAAAAAAAAAAA/////////////////////////////AAAAP////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////wAAD////////////h////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////AAAAAAAAAAAAAAAAAAAA/////////////////////////////8AAAAAAAAAAAAAAAAAAH//////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////8AAAAAAAAAAAAAAAAAAAP//////////wAAAAAAAAAAAAAH/////////////////////////////////////+AAAAAH/////////////////////////////////////////wAAAAAAAAAAAAAAA/////////////////////////////8AAAAAAAAH//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////4AAAAAAAAAD///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////wAAAAAAAAAAAAAAAAAfgAAAAAH/////////////AAAAAAAAAAAAAAAAAAA//////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////4////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/AAAAAAAAAAAAAAAAP//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////gAAAAAAAAAAB/////////////////////////////////wAAAAAAAAAAAAAA/////////8AAAAAAAAAAAAAAAAAAAAAAB////////////////////+AB///////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAf/+AAAAAAAAAAAAAAAAAAAAAAAAAB////////wAAAAAAAAAAAAAAAAAAAAAB////////////////wAAAAAAAAAAAAAAAAAAA/////////////////////////+AAAAAAAA////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////wAAAAAAAAAAD///////////////8AAAAAAAAAAP////////////+AAAAAAAAAAAAAAAAAAAAAAAD///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////8AAAAAAAAAAAAAAAAAAf/////////4AAAAAAAAAf////////////wAAAAH//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////////////////8AAAAAAAAAAAAAAAAAf///////////////////gAAAAAA///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////8AAAAAAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////gAAAH///////4AAA//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////AAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////wAAAA/////////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAH///wAAAAAAAAAAAf//////////////////gAAAAAD///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////+AAAAAAAAAAAAAAP//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAH///////////////////////////////////////4AAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////8AP///////////////////////////////////AAAAAD////////////////8AAAAAH///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////wAAAAAAAAAAAAAAAAAA/////+AAAAAAAAAAAAAAAAAf/////////////////////////////////8AAAAAAAAAH///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAH/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAD//wAAH///////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAH///////////////////////////////////", + "sha1": "7b0fa84ebbcff7d7f4500f73d79660c4a3431b67", + "sha256": "1e858dd15069f54478023c4d8518cd5aa814fb15c9eb8df45c44efbb050587ed", + "sha512": "9d807978287e8964df204f412628975a29079e0f9e6ea2602b4bcb1b18ebc4a4566197c5ebdec2b6f86c71bc49c8e6423f2dc48f66e14a0595ae89f68713070c" + }, + { + "input": "//4AAAAAAAAAAAAAAAAAAAAD///////////////n///////////////////8A/////////////////wAAAAAAAAAAAAAAAAAAP////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////////+//////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////8AAAAAAAAAAAAAAD//////////////////////////////////////H///////////////////wAAAAAAAAAAAAAAB/////////////////////////////////wAAAAAAAAAAAAAAAAAAB//////wAAAAAAAAAAAAAAAAAAAAf///////////////wAAAAAAAAAAAAAAH////////////////////////AAAAAAB///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////wAAAAAf///////////8AAAAAAAAAAAAAAAAAA/////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////+AAAAf///////////////4B//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////8AAAAAAAAAAAAAAAH//////////////////////////////////////////wAAAAAAAAAAAf//8AAAAAAAAAAAH////////////////////////////////8AAAD//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAAAAAAP////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////gAAAAAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////8AAAAAAAAAB///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////AAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////gAAAAAf/wAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAH////////////////////+AH//////gAAD/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////8AAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////+AAf//////////////////////////////////gAAAAAAAAP//////////////////////+AAAAAA/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////4AAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////+AAAAAAAAAAAAAH///////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////8AAAAAAAAAAAAAAAAAAAAAAAB////////////////////////////////////////4AAAAAAAAAAP///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////8AAAAAAAAAAAAAAAAB/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAAAAAP////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////7////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////wAAAAAAAAAAAAAAAAAB///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////+AAAAAAAAAAAAAAAAAAA////////////////////8////////////////8P///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////4AAAAAAAAAAAAAAAAAAAAAAA/////////////////////////+AAAAAAAAAAAAB/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////+AAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH///////wAAAAAAAAAAAAAAAAAAAAAAAAAAP//8AAAAAAAAAAAAAAAAAAAAAB///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////////wB//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////////gAAAAAAAAA////////////////////////////wAAD/////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////wAAAAAAAAAAAAAAAB+AD//////AAAAAAAAAAAAAAAAAD/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////wAAAAAAAAAAAAAAAAAB/////8AAf//////////wAAAAAAAAAAAAAAf/////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////////gAAAAAAAAAAD/////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////gAAAAAAAAAAAAAAAAAAP///////////////////////4AAAAAAA//////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////4AAAAAAAAAAAAAAAAD/////////////////////////////AAAAAAAH///////4AAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////8AP///////////////////4AAAAAAAAAAB/////////////////+AAAAAAAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////////////wAAAAAAAAAAB/////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////+AAB///////////////////////////////////////8AAAAAA/////////////////////gAf//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAB/////////////////////gAAAAAAAAAAB/////////////8AAAAH///////////wAAAAAAAAAAH///////////////////////////////wAAAAAAAAAAAAAP///////////////////////////AAAAAAAAAAAAAAAAAAf///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAP///////////P/////////////////////////////////AAAAAAAAAf////////////8AAAAAf/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////AAAAAAAAAAAAAAAAAD////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////gAAAAAAAAAAAAAAAAAAAAAAAP//////////wAAAAAAAAAAAAAAAAAA///////////////////////////////////gAAAAAAAA/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////4AAAAAAAAAAAAAAAA////4AA///////8AAAAAAAP///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////4AAAAAf//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////4AAAAH/AAAAAAA////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////////8AAAAAAAAAAAAAAAAAAAH////////////////////////////4AAAAf//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAfAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////gAA////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAB///4AAAAAAAAAAA////gAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////+AAAAAAAAAP//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////AAAAAAAAAAAAA/////////////////////////////////////H///////////////+AAAAAAAAAAAAAAD//////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///4AAAAAAAAAAAAAAAAAAAAA///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////wAAAAAAAAAAAD/////////////////////+AAH//////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////+AAB////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////+AAAAAAAAAAAB////////////////////////////////8AAAAAAAAAAAAAAAAAB/////////+AAAAAAAAAAAAAAAAAB//////////gAAA//////////////////////////////+AAAAAAAAAD////////wAAAAAAAAAAAf//////////////////////wAAD///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////+AAAAAAP///gAAAAAAB/////////////////4AAAAP////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////gAAAAAAAB////////gAAAAAAAAAAAAAAAAAAAAAD/8AAf///////////////////////+AAAAAAAAAAAAAAAAAAAAAAH/gAAAAAAAAAAAAAAAB////AAAAAAAAAAAAAAAAD//////////////////////////////AAAAAAAAf/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////AAAAAAAAAAAAAAAAAAAAD///////////////////////////////////////8AAAAAAAAAf/////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////+AAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////+AAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAB////////////////////////////////8AAAAP/4AAAAAAAAAAAAAAAAAAAAAP/////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////+AAAAAAAAAAAAAAAAAD/////////////////////////////////+AAAAH///////////8AAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////////AAAAA///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////wAAAAAAAAAAAA//////////////////////////////////gAAAAAAAP////////////gAAAAAAAAAAAAAAAAAAAAAAA/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////gAAAH/////////////////////////+AB/////////////////////////4AAAAAAAAAAAAAAAAP///////////////////wAAAAAAAAAAAAAAH//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgAAAAAH///////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////4P///////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////wAAAAAAAAAAD////gAAAAAAAAAAAD////////AAAAAAAAA/////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+AAAAAAAAAAAAAAAAB/////////////////////////////////H/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////+AAAAAAAAAP////////8AAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////+AAf////////////////////8A////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAAAAf///////////////////////////////////////gAAAAAf//////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////////wAAAAAAAAB////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////8AAAAAAB////////////////////////////////AAAAAAAAAAAAAP//////////////////8AAAAAAAAAAAAAAAAAAAA//////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/AH////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////AAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAD///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////+AAAP///////8AAAAAAAAAAAAAAAAAAAAAAAAAD//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAH///////AAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////AAAAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////////wAAAAP///////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////8=", + "sha1": "9e886081c9acaad0f97b10810d1de6fcdce6b5f4", + "sha256": "5d73820315ebd00f0e419a7fe418ff109664add82a68387daffff4239a2c1b23", + "sha512": "2e66fadfa7cba0704d58989e24a63d3bef3138479a205cf6d3cd2884fb84c1bcb0ee7cbb74e64165d86860c3fea48df8fa1c7ebf6c757b7b7b51d75eb4f60e9e" + }, + { + "input": "AAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////8AAAAAAAAH///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////4AAAAAAAAAAAAAH///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////+AAAAAAAAAAAAAAAAP///////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////wAB/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////wAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAA/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////4AAAAAAAAH////////////wAAAAAAAAAAAAAAAAAAA//////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////wAAAAAAAAAAAAAAA//////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////4AAAAAAAP//////wAAAAAAAAAAAAAAAAAD///////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////wAAAAAAH//////////////////////////////////+AAAAAAAAAAAAAP/////////////AAAAAAAAAAf////////////////AA/////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////4AAAAAAAAAf///////////////////////////AAAAAAAAAAAAAAAAAP////gAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////gAAAAAAD//////////////////////8AAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////AAAAAAAAAAAD////////gAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////AAAAAP/////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////4AAAAAAAAAAAAA/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////4AAAAAAAAAA//8AAAAAAAAAAAAAAAAAAAAAAf////gAA/////////////////////////8AAAAAAAAAAAAf///4AAAAAAAAAAAAAAA///////////////wAAAAAAAAAD//////////////////////////+AAf//////wAAAAAAAAAAAAAAAAAAAAB///////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////AAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAAAH//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAP//////wAAAB/////////////////////////8AAAAAAAAAAAAAAAP////AAAAAAAAAAAAAAAAA/////gAAAAAAAAAAAAP////////////////////////////////4AAAAAAAAAAAAAH/////////////////////////////////////AAAAAAAAAAAAAAAAAAP////////////////////8AAAAAAAAAAAAAAH/////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf8AAH/////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////4AAAAAAAAAAAAAP////////////8AAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAA///AAAAAAH//////AAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////////+AAAAAAAAAAH//////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////4AAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////5/////wAAAAAAAAAAAAAAAAAAAAD////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////AAAAAAB/////////////////////8AAAAAP/////////////////gAAAAAAAAAAAAAAAAAAAAAAH//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////+AAAAAAAAAAAAAAf//////////////////////4AAf///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////AAAAAAAAAAAAAAAAAAAAP/////////////////////////////AAAAAAAAD////////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////8AAAAAB//////8AAAAAAAD///////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////wAAAAAAAAAAAAAAAA///////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////wAAAD/////////////////////////////AAAAAAAAA//////////////////////////8AAAAAAAAAAAAA//////////////////////4AAAAAAAAAAAAAAAAAAAAf//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAB///+AAAAAAAf///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////wAAAAAAAAAAAAAf//////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////AAAAAAAAAAAAAAAAH/////////////+AAAAAAAAAP////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////+AAH//////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///gAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////8AAAAA////////////////////////////////AAB///////////8AAAAAAAAAAAAAAAAAAH///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////+4AH//////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADwAAAAH///////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////////////wAAAAAAAP////////////////////////////////////////+H////////////////////wAAAAAAAAAAAP///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAH//////////4AAAAAAAAAAAAAA////////////8AAAAAAAAA//////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////8AAAAAAAAAAAAH///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////wAAAf///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////+AAAAAAAAAD//////////////////////////////8AAAAAAAAAAAAAAAAAP/////8AAAAAAAAAAAAAAAAB///////////////wAAAAAAAAAAAAB///////////////////////////wAAAf///////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////8AH//////////4f//////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////+AAAAAAAAAAAAAAAAAAAB////////////////////////////AAAAAAAAAAAAAAAAAAB/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////4AAAAAAAAAAAAAAAAAAD/////////8AAAAAAAAAAAAAP/////////////////////////////////////gAAAP/////////////////////////////////////////gAAAAAAAAAAAAAP////////////////////////////AAAAAAAB//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////wAAAAAAAAAH//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////gAAAAAAAAAAAAAAAAA/AAAAB/////////////wAAAAAAAAAAAAAAAAAAP/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/gAAAAAAAAAAAAAAAH/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////wAAAAAAAAAH////////////////////////////////AAAAAAAAAAAAAAD////////+AAAAAAAAAAAAAAAAAAAAAAA////////////////////H///////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////AAAAAAAAAAAAAAH///////////////////////////+AAAAAAAAAAAAAAAAAAAAAB//AAAAAAAAAAAAAAAAAAAAAAAAH////////AAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAf////////////////////////AAAAAAAAf///////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAP//////////////////////////////////////////gAAAAAAAAAAAAAAf/////////+AAAAAAAAAAAAB//////////////////////8AAAAAAf///////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////gAAAAAAAA/////////8AAAAAAAAAAAB////4B///////////////////////////////////8AAAAAAAAAP/////////////////////////////////////////AAAAAAAAAAA////////////////////////////wAAAAAD////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////8AAAAAAAAAAAAAAAAAAD//////4AAAAAD////////////////////////////////////AAAAAAAAAAAAAAP//////////////////////////////////////wAAAAAAAAAAAAAAD///////////////////////+AAAAAAAAAAAAAAAAAAAAD/////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//4AAAAAAAH///AAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////////+AAAAAAH////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAB///////4AAAP//////////gAAAAAAAAAAAAAAAAAf/////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////8AAAAAAAAAAAAAB////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////AAAAAAAAAAAAAAP////////////////////////4AAAAAAAAAAAAAAAAAAAAAeAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////gAAAAAAAAAAAAAAAf///4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////8AAAAAAP//////////////////AAAAAP///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////AAAAAAAAAAAAAAAAAAAAf/////////////////////////8B///AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////////7//AAAAAAAAAAAAAAAAAAAAA///////////////gAAAAAAAAAAAAAAAAAAB8AAAAAAAAAAAAAAAAA//////////////////8AAAAAAAAAAAAAAAAAAAAD////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAAAAAAAAAAB//////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////gAAAAAD////////////////////AAAAAAAAAAAAAAAB//////////////+AAAAAAAAAAAAAAAAAAAAAAAAP//////AAA////////////////////////////////////AAAAAAAAAAAAB/////8AAAAAAAAAAAAf/////////////////wAAAAAAAAH///////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///+AAAAAAAAAAAAAAAD+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////gAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH///////////wAAH///////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////8AAAAAAAAAAAAAAAAAAAAAAAA////////////AAAAAAAAf/////////////////////////////////////gAAAAAAAAAAAAAD/////////////////////////////////////////+AAAAAAAAAAAAAAAH//////////////////AAAAAAAAAAD////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////8AH////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////////wAAAAAAAAAAAAAAD////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////AAAAAAAAAAAAAAAAAAAAA/wAAAAAD///AAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////w==", + "sha1": "a4d46e4ba0ae4b012f75b1b50d0534d578ae9cb6", + "sha256": "f7f4721be31524d014bacf105b06bacc4bdb953bc04d5a048e1fd4ddc395667e", + "sha512": "79ade28ad07bfef57f84f8dd5b8fcf05e887e897c3f10ede2a27990720e70487fdaa29d11fb806e4a7d4920f7b015bf7390437396ef28edd3f52afd67d1cb908" + }, + { + "input": "AAAAB//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAA//wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////AAAAAAAAAAAAAAAAAAAAAAAH////8AAAAAAAAAAAAAAAAAAAD//////////////////////////////gAAAAAAAAAAAf/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////gAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////AAAAAAAAAAAAAAAAB//////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////AAAAB//////////////////////////////////AAAAAAAAAA/////////////AAAAAAAA//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////////AAAAAAAAAAAAAAAAAAA/////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////AAAAAAAAAAAAAAH//////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////AAAAAAAAAAAAD/////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////+AAAAAAAAAAAAAAAAAAAAH//////////////////////////////////////////AAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAB////////////////////////////wAAAAAAAAAAAAAAAAAH/////////////////////4AAAAAAAAAAAAAAAAAAAAAAAB////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////+AAAAAAAAAAAAAB////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////AAAAAAP////////////////////////+AAf/////////////////////wAAAAAAAAAAAAAAAAAH////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////4AAAAAAAAAAAAAAAAAAAAAAAAAAAP//gAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////+P8AAAAA/////////////////gH////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////////+AAAAAf8AAAAAAAAAP//////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////gAAAAAAAAAAAAAP/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////wAAAAAAAAAAAAAAAAAAAAAAH//////4AAAAAAAAAAAAAAAAAH////////////////////////////////+AAAAAAAAAAAAAAAAAAf//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAA/////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////AAAAAAA/////////////////////////gAAAAAAAAAAAAAAAD/wAAAAAAAAAAAAAAAAAAH//////////wAAAAAAAAAAAAAAAAP///////////////////gAAAAAAAAAD//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////wAAAAAAAAAAAAAAAA//////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAP/+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////4AAAAAAAAAAAAAAAAAAAfgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////+AAAAAAAAAAAAAAAAAAAAD///////////wAAAAAAAAAAAAAf////AAAAAAAAAAAP//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////+AAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////8AAAAAAAAAAAAAAAAAAAf///////////////////////+AAAAAAAAAAGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////8AAAAAAAAAAAAf////8AAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////////////wAAAAAAAAH///////////////8AAAAAAAAAAAAAAAAAAAAAAD/////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////////////////AAAAH////////////////////////////////////////8AAAAAA//////////////////////////////8AAAAAAAAB////////////////////////////////////+AAAAAAAAAAAAB///////////////////////4AAAAAAAAA/////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////////////////8AAAAAAAAAAAAAAAAAAAAD///////////////8AAD8AAAAAAB//////4AAAD///////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///+AD//////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////gAAAAAf///////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////+AAAAAAAAAAAAAAAAAAAf//////////////////////////8AAAAP/////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////4B////////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAD//+AAAAAAAAAAP//4AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////8AAAAAAAAAf//////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////gAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////+AAAAAAAAAAAAB///////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////+AAAAAAAAAAAAAAf/////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///AAAAAAAAAAAAAAAAAAAAAH//////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////wAAAAAAAAAAAD////////////////////wAH////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////+AB////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////////////////////////+AAAAAAAAAAAP///////////////////////////////8AAAAAAAAAAAAAAAAB////////wAAAAAAAAAAAAAAAB////////8AA/////////////////////////////wAAAAAAAAAf///////+AAAAAAAAAAf////////////////////+AAAf//////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////wAAAAP//8AAAAAAAP/////////////////AAAB/////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAD///////gAAAAAAAP///////gAAAAAAAAAAAAAAAAAAAAAf8AAf//////////////////////wAAAAAAAAAAAAAAAAAAAAAA/gAAAAAAAAAAAAAAAB//4AAAAAAAAAAAAAAD//////////////////////////////AAAAAAAD////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////////////4AAAAAAAAAAAAAAAAAAAD///////////////////////////////////////8AAAAAAAD/////////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////wAAAAAAAAAAAf/////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////+AAAAAAAAAAAAAAAAAAAAAAAD//////////////////////AAAAAAAAAAAAAAAP//////////////////////////////8AAAPAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAH//////////////////////////wAAAAAAAAAAAAAAAAD/////////////////////////////////wAAAH///////////gAAAAAAAAAAAAAAAAAAAAf//////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB///////////////////////////////////////AAAA//////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////+AAAAAAAAAAA//////////////////////////////////gAAAAAB///////////gAAAAAAAAAAAAAAAAAAAAAA/////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////////////gAAA/////////////////////////+P///////////////////////4AAAAAAAAAAAAAAB//////////////////+AAAAAAAAAAAAAH//////////4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//////////////////////////////////////////wAAAAf/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////////////////8H//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAf/////////AAAH///////////4AAAAAAP///////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////////8f////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf///////8AAAAAAAAA/////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAf4AAAAAAAAAAAAAAAAAAAAGAAAAAAAAAAAAAAAAAAAD/////////////////////////+AAAAAAAAAAAAAAf/////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf////////gAAAAAAAAAAA/////////////////////////////////+AAAAAAAAAAAAAAAAAAAAD//////////////////////////////////gAAAAAAAAAAAAAAAAAAAAAAA//////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////8AAAAAAAAAAAAAAAAAAA////////////////////wAAAAAAAAAAAAP//////////////////////////////////////8B+AAAAAAAAAAAAAAAAAAAP///////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//////////////////AAAAAAAAAAAAAAAAAAAAAAAAA////////////////////////////////////////gAAAAAAAAAAAAAAAAAAAA///////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAB/////////////////////wAAAAAAAAAAAAAAAAAB///////////AAB////////////////////////////////////////gAAAAAAAAD/8AAAAAAAA////////////////////////4D///////////////////////////////////////gAAAAAAAAAAAAAAAAAAAAP////////////////////////+AAAAAAAP///////////////////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB////////////////////AAAAAAAAAAAAAAAAAAAAAAAAP////////////////////////gAAAAAAAAAAAAAAf////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAf//8AAAAAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////+AAAAAAAP///////////////////////////////////4AAAAAAAAAAAAAAAAAAAAAAAAf/////////////////////////////////////+AAAAAAAAH/////////////gAAAAAAAAAAAAAAAAAAAAAAH///////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA///////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD////////////////////////////wAAAAAAf///////////////////////////////////8AAAAAAAAAAf//////////////8AAAAAAAAAAAAAAAB////////////////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+AAAAAAAAAAAAAAAAAAAAAAAAAf////////////////////////////////////////+AAAAAAAAAAAAAAAAAAAAAAAAP/////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////8AAAAP/////////////////////////////gAAAAAAAAAAAH///////+AAAAAAAAAAAA///////////////////8AAAAAB/////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///////////////wAAAAAAAAAP////////////////////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAf///////AAAAAAP/////////////////8AAAAAAAAAAAAAAAAAAB//////////////////////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////////////////////////////AAAAAAAAAAAAAAAAAD//////+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH////////////AAAAAAAAAAAAD/////////4AAAAAAAAAAAAAAAP/wAAAAP//////////////////////////////8AAAAAAAAAAAAAAAD//////////////////////////////////AAAAAAAAAAAAAAAAAAAH////////4AAAAAAAAAAAAAAAAAAAAAAAAAAA/////////////////AAAAf//////////////////////////4AAAAAAAAAAAAf/////8AAAAAAAAAAAAAAAAAAAAAAP////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////wAAAAAAAP//////////AAAAAAAH///////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//////////////////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB/////////////AAAAAf////////////////////////////////////////AAAAAAAAAAAAAAAAAAAAH/////////8AP/////////////////////+AAAB//AAAAAAAAAAAAAAAAAAAAAAAAA////////////gAAAf/////////////8AAAAAAAAAAAAAAD//////////AAAAAAAAAAAAAAAAAAAAAAAAAAB/4AAAAAAP////////////////////gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", + "sha1": "6342b199ee64c7b2c9cbcd4f2dcb65acef51516f", + "sha256": "426cbfa5a10024c4f5deae9146222146c2d75a5bf13e8215c04d7dd17f455743", + "sha512": "a38b0e7a64c8ce3fc050e35bde8dc0df0e47acbfe8be29a0f464d91f1130792c11556f3edd52627753d47fe2b7f257f8406de95cd90857dff2c9cb1d5ba25141" + } +] diff --git a/node_modules/sha.js/test/prepare/vectors.js b/node_modules/sha.js/test/prepare/vectors.js new file mode 100644 index 0000000..63ec0e6 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors.js @@ -0,0 +1,50 @@ +var assert = require('assert') +var crypto = require('crypto') +var fs = require('fs') +var path = require('path') + +var dir = path.join(__dirname, 'vectors') + +var vectors = fs.readdirSync(dir) + .sort() + .filter(function (f) { + return f.match(/\.dat$/) + }) + .map(function (f) { + return fs.readFileSync(path.join(dir, f)) + }) + +var hashes = {} + +;['sha1', 'sha256', 'md5'].forEach(function (name) { + hashes[name] = + fs.readFileSync(path.join(dir, 'byte-hashes.' + name), 'ascii') + .split(/\r?\n/) +}) + +var expected = [] + +for (var i = 0; i < vectors.length; i++) { + var sha512 = crypto.createHash('sha512').update(new Buffer(vectors[i])).digest('hex') + + expected.push({ + input : vectors[i].toString('base64'), + sha1 : hashes.sha1[i], + sha256 : hashes.sha256[i], + sha512 : sha512 + }) + + assert.equal( + crypto.createHash('sha1').update(new Buffer(vectors[i])).digest('hex'), + hashes.sha1[i]) + + assert.equal( + crypto.createHash('sha256').update(new Buffer(vectors[i])).digest('hex'), + hashes.sha256[i]) + + assert.equal( + crypto.createHash('md5').update(new Buffer(vectors[i])).digest('hex'), + hashes.md5[i]) +} + +console.log(JSON.stringify(expected, null, 2)) diff --git a/node_modules/sha.js/test/prepare/vectors/Readme.txt b/node_modules/sha.js/test/prepare/vectors/Readme.txt new file mode 100644 index 0000000..99d14c9 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/Readme.txt @@ -0,0 +1,25 @@ + +File formats: + +There are two files included for this byte-oriented test. +One file contains the messages and the other file contains the hashes. + +The message files provided use "compact strings" to store the message values. +Compact strings are used to represented the messages in a compact form. +A compact string has the form + z || b || n(1) || n(2) || ... || n(z) +where z>=0 that represents the number of n, b is either 0 or 1, and +each n(i) is a decimal integer representing a positive number. +The length of the compact string is given by the summation of the n(i). + +The compact string is interpreted as the representation of the bit string +consisting of b repeated n(1) times, followed by 1-b repeated n(2) times, +followed by b repeated n(3) times, and so on. + +Example: + M = 5 1 7 13 5 1 2 + where z = 5 and b = 1. Then the compact string M represents the bit string + 1111111000000000000011111011 + where 1 is repeated 7 times, 0 is repeated 13 times, 1 is repeated 5 times, + 0 is repeated 1 time, and 1 is repeated 2 times. + diff --git a/node_modules/sha.js/test/prepare/vectors/byte-hashes.md5 b/node_modules/sha.js/test/prepare/vectors/byte-hashes.md5 new file mode 100644 index 0000000..97a913a --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte-hashes.md5 @@ -0,0 +1,196 @@ +d41d8cd98f00b204e9800998ecf8427e +c3e97dd6e97fb5125688c97f36720cbe +038701ca277a9d4de87bff428dd30a12 +bc60c6192e361d99b59d47250668a852 +542c3a0ab6b51bc6a88fa7bb567bca3e +e035f9e748a2a09a4fbdcf18c4f58bf1 +3b4cc9226a236742d72578c5915b6c3c +35950208a022baac90056636827158ce +84cedff2ed1b78b395cc8651094f4ce3 +7badf748f4cb700272a72edfea22e9bf +a1bb6e142739dbdb0925747d95e0a1ad +0cd9b72dfdee8efd2e1515f4c5a62284 +ef07c13e75d50578d09052aa21a7cffb +cf3b261af9344bf83b4dd82b30242c78 +530710f65fb98fff8eb927e2938cb8c5 +4e6d73658b27e19d4bb4500625001e39 +c8e5f2f272b1ef88ec62dd0d9d54e902 +031cbf1fb05b4ec09f3c93235d0f49ac +8c0e1400df02ba8c4809b705e5f5e114 +57ec48278e19f71f54c570a5ab306df7 +ecd3dc346a2337b95389a094a031610f +f11d91eae492225cbd82ef356aa96f9f +26bd8b480216c723ce75da98b9bd430c +80999c2d12f623e4f87e0550a8e3523a +00945c1bd739ce389ac24bb93f6f9a85 +7ab55f0bd5dca5b17ecaa7fef73ed87b +e3cedd606ad51dd18532abd3079a3e0c +df5ecc6732e22cc25836398a10222e97 +863b6d9962ee3761bbb9cd8a8367589e +683c9384e29efe82dd3ac847904c28e8 +b3d948e72159ddc9c600d75512c5f115 +ce8633a6cf189b07e022147bbbd0f350 +8df17372eb32a0afa4fc47837262ff61 +62c63ca91890ce6f78a59c0bdb1e7bab +1eda4bb0259a939548ec4ceb39facde4 +c4f37a2c450f2a23322513b372e668a5 +cab8f06436c5ad45f982490215836f4e +3a43bc720714a2a42a73a76085542f86 +03f2f4033b258e6eb1e101f1ed4c24b4 +2ceb33cec5ecad4a50f6bd3a831ae77c +dd808f695d28f93562cfcb164bc3cce4 +01c6d7a87e94bf685205ec8d7c5196af +ef0e93e8928f8bae1b216da8e661fc9b +c8da55117d7d4b7ee8ddc8dc4ba73aa6 +bbfc64583c6d4c2ef4b0358464d4d028 +3bb5864481f2e66387419dd1a168aadc +0d725d3a1d3d97d7b5ea8293bbbf32ba +915eb22a15f7673f983672b6c353b6c8 +13b51da3e8a1422bfd58b79c4e19de64 +e69d6c03102464f22c395f9fa27108de +132fa4cbedaa7bd965b0b5900211be48 +e37ff5d9f14249f327a19dd5296e6c7e +4881a65cf107b1d034ff3ecd64ab9cb4 +547e92d01c0b699cfdf43f91714cfe2d +aa2b3a055b56845f19109f21d3c783f4 +eb1f01cc647ece73b2192537200bb8b9 +1db274ef41b1ad71f713df2b05207e1a +d8b4ec343b4310345efc6da9cee8a2ec +082ee3b2be7910f7350368e395a63d90 +d247c4070ae1de106bcb438a2dacac23 +f8cbc4f3af45befc792679f2b113f1cb +9031006a437019c5dcd987a31731ebd9 +a6b62759ee3883258fbdeeb8b56e6283 +4933898605b4a1b970b674a2dde92292 +f0684ca20de4607232f3e158e81a37f2 +c0b3fdecb3bb7b4ff0c936f378ccb027 +50652123b5e0e51bb5bc3fdde3c6a750 +ed4526ba8226d969f47edbb27b2f1144 +80e6f61dff9da8673fa16dbbdb14d03d +1d52744bf1450d7c5cfdf1f0bbf967c1 +3438a953124960bcc44611923a8844ee +b2f341296dd7aabbd4fd8e011be68a7d +322dba69658a92e9a9ace4d7177fb97d +b94a434a98efa493fbbc989360671bb9 +cd9ce9a01ed810af70999d8ce4c63811 +4c639abb75a0ae0f22c3384cb9c68441 +fe31ffcced1717988c854c2f3492466e +b56d81337f9bbf0d838df831e9b40216 +0be9161adfeb2dd1c3f20338bfb3ec4b +be7b7c9fa1ab09d6578a3f2a82bfafe3 +f6bdc04b4611ddf0aa8403bcb04292f7 +1c7146a10f3c76b0c1dd4af354b14982 +0d3d987f94aee65f84436696bcf33ea4 +1a5c9ac3ee859361ad5477ea792506a3 +e827d60f27e35d8e5b05af748ba897dd +5b7899bf7a6267d9b3b8c82f241a1d7b +6dc9fe740cf4a4b93cb0953a3c2a6026 +27adf814806fd4a51c1ffc84122c5c8a +f74e94ab992c8f27de264993a09ab429 +5eee0f1591d10c159763749ec86b9ecb +46898964a3889615d9f7c22a81e0a0e7 +8fb58d6770971b0f12e40b31ad65b4a9 +eb4ce130268dc13731dcd16ff492d0a9 +23532a54e8005860ad5e77f4e3392827 +07fedc4dc4891d1a90c501a781a666f2 +83e8341035b37dd70a92a6eed5406927 +6c9f7b3b25734d58f21f5050642874a5 +ef661042e6624f4052ce86d8f233d780 +efe794cdfad5cb86656e29854a1f5c92 +e5f19a0045481443bae165f03598a9ba +b8fe8691321edbf308a9d60bb817c6af +f31fdd0f1aef106005e6d29b72229fa1 +239ed45c3cb734db446adfbbe3dab8a1 +2c2303411c7d25617a54106aca18070d +de179c41aca8bcdc388964024948ff8e +ca335b74d59bd50832267d3bf28f81df +dabda7a1cbaa8ea5104c57c8950b703a +076352a22ecea5ebc876812f62c1cb8d +ee0a2bdec712a9413623d8a920714b96 +a927c3a99f2843de4133377c690db9b7 +1fa98cff485549d49799dc8cf987a8af +74013a076a786a26c7e04217bb51031d +a44ca9661e967bb2e98af65277dac72f +d30897726b635548dbfa5cebffd9cd63 +4ad04a250b8029c9a7bf6529ee8793c3 +de41e337d96fd23619121ea709861e1a +18e070fd32cf732b9f37a0083320eec2 +7dd4b27ca8906182f684d0ee4ddb98c4 +70a440a8bd06ff40f6e9135946eb174d +b8d052366e752ce7c803abd24854e934 +8ab9dfff746ce3e62c6e04feb7b48528 +ecfca8b371616efe78e9916dbf825f5b +5f76da828c37fc4edb4557953539c92a +ecad54f76ce3bc233e02fc6fd7f94628 +e8a1cc06bfec7f677f36a693e1342400 +9ad0fe040e44a8e7146c3dd8582b6752 +4e56f978f94cf72158fd4311831b4f9f +3b95686fe49f50006607d5978aaa3efc +fa354daecc45f14b82b0e7e567d24282 +b7c30cf902e74c10e3d5c3af7e854f6b +e9369a7ec98e63186bdae77025cb5519 +57b441e2f3397d2628657e636cd2fc80 +8ae3a1e880ffb884260ec26e8fcd71a5 +eb7d8f9199945e8a1e5c3708da45e08b +d7dd1997c20a1029f9bd0fd1e2d2ed92 +a986ef62ef378583985cf0d0a34d17d0 +ad5bef0d6ad3434f871983ed09aaa43c +326f662a5c18a14d26c3d35131ea4b4e +ea4bf919aebf4add0024d91ee6f640d0 +9cc49e156084d2c757bd6d502bae8309 +9c18d4c75cc02337c277532ecea4b9fa +4159a65b7db275742e998fb855e7b9f3 +df34d37f6b4ef078bd9570efdd8fd2e2 +84d2c12c4f0c28d288464d33a23f227c +17b55bbd4222066960e54182e1e95f0b +75eb69b22793852bc892ce264c421a1e +de4abe78e28e2718200c76237f2ed42f +1149c8fc988799f43f6e5069355e108b +4129891ff13ddd62820f6f3cdbfa95da +c8758df3c9ad4d311516ea39fe734052 +360ddf0b658fd764ef5ae9bf7a8a1a12 +ad054e0e84e2b8e2b02ce4dee7688226 +cb434f8c5fad9793ed142805afa861a0 +83a3d5436f96cb2cb31d929794425f31 +34dde0f0fe7d4fdb359df1fccbf5fcde +7b77219e9549fad49e97c380f7e1f362 +053f4e89ae2355c5cb259d21e85eb9cd +fc45c5118f642cc479e6a550756f1a4e +0138351089a87a2ddc2d98255ce6b8cc +1f3e42daa4b315f2a0e6a530e0cc6976 +aec4974f238a6e04dcb07e20ad861230 +7a27fedaeec41b5832bda3169d76cd05 +154bd1371ae66ad3ab9a9ee6b1324e36 +a4594c9e974eed1fc159cc306dd7378a +431acd1a4a4d6036057c9906da8add5e +f6afe47bdedf075c7e188b2640152cf7 +8bc3bd8625778f64ed7c29698025f292 +51f6bb4db8e6e61cc4333450c6035139 +0baff1c675866bf259d3ac9417a33464 +6e8a56a9a005c6c6239ccbdf48f59aa8 +6565bceb49f962f797f49084f3f819a1 +2267037a7f3e753c653218fcf67ce9c7 +aca1ae6237f498986991565b0307f0da +785bb09a5f25730a3aed4de12da4d9ea +4eb5472f4e5243fcd4a76533789e829a +7d725ae9a8e569f49c56194226b64dee +7396f5d4491e79ec1ac0ce7a105bb233 +aa64644a4877da34e2197c5f2dc375c5 +2165718fc24bf21f1c4e0623c8e8d811 +e1f45852024724f00ced7935e297983a +deac06cde1f6b18a53a2cf0b03998da2 +8371f0970efbc6099c50afbbd4f0e477 +985d909280bc20607f4cb4941ae535f2 +abcdd18a791546544b52c0587dbd6107 +23e8b5a657c962a3e77979859ae1400e +cc4fab29cc180ffa888be396ce6aa6f5 +b553506daedf701ccdc437fbf3e6bbe4 +d707ae093ab94607010ddda09fc8a5a8 +76bdae04521ba996636c4dc431040031 +556c14fd0f3ff7bd6b435bd630e48811 +b500501957d4b8b412ea0102c842dd5e +d18506a74c66e4d8537269c10c783923 +c9b4b691f4d88b7d2b4d5b770b05c8bf +ba915c678f944fe5a480364ddc3382a8 +78134c91a1ffb2e21594daa2c2a932fc +6fc6c8790dfc301ee38b8b63e18def5c diff --git a/node_modules/sha.js/test/prepare/vectors/byte-hashes.sha1 b/node_modules/sha.js/test/prepare/vectors/byte-hashes.sha1 new file mode 100644 index 0000000..f251291 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte-hashes.sha1 @@ -0,0 +1,196 @@ +da39a3ee5e6b4b0d3255bfef95601890afd80709 +3cdf2936da2fc556bfa533ab1eb59ce710ac80e5 +19c1e2048fa7393cfbf2d310ad8209ec11d996e5 +ca775d8c80faa6f87fa62beca6ca6089d63b56e5 +71ac973d0e4b50ae9e5043ff4d615381120a25a0 +a6b5b9f854cfb76701c3bddbf374b3094ea49cba +d87a0ee74e4b9ad72e6847c87bdeeb3d07844380 +1976b8dd509fe66bf09c9a8d33534d4ef4f63bfd +5a78f439b6db845bb8a558e4ceb106cd7b7ff783 +f871bce62436c1e280357416695ee2ef9b83695c +62b243d1b780e1d31cf1ba2de3f01c72aeea0e47 +1698994a273404848e56e7fda4457b5900de1342 +056f4cdc02791da7ed1eb2303314f7667518deef +9fe2da967bd8441eea1c32df68ddaa9dc1fc8e4b +73a31777b4ace9384efa8bbead45c51a71aba6dd +3f9d7c4e2384eddabff5dd8a31e23de3d03f42ac +4814908f72b93ffd011135bee347de9a08da838f +0978374b67a412a3102c5aa0b10e1a6596fc68eb +44ad6cb618bd935460d46d3f921d87b99ab91c1e +02dc989af265b09cf8485640842128dcf95e9f39 +67507b8d497b35d6e99fc01976d73f54aeca75cf +1eae0373c1317cb60c36a42a867b716039d441f5 +9c3834589e5bffac9f50950e0199b3ec2620bec8 +209f7abc7f3b878ee46cdf3a1fbb9c21c3474f32 +05fc054b00d97753a9b3e2da8fbba3ee808cef22 +0c4980ea3a46c757dfbfc5baa38ac6c8e72ddce7 +96a460d2972d276928b69864445bea353bdcffd2 +f3ef04d8fa8c6fa9850f394a4554c080956fa64b +f2a31d875d1d7b30874d416c4d2ea6baf0ffbafe +f4942d3b9e9588dcfdc6312a84df75d05f111c20 +310207df35b014e4676d30806fa34424813734dd +4da1955b2fa7c7e74e3f47d7360ce530bbf57ca3 +74c4bc5b26fb4a08602d40ccec6c6161b6c11478 +0b103ce297338dfc7395f7715ee47539b556ddb6 +efc72d99e3d2311ce14190c0b726bdc68f4b0821 +660edac0a8f4ce33da0d8dbae597650e97687250 +fe0a55a988b3b93946a63eb36b23785a5e6efc3e +0cbdf2a5781c59f907513147a0de3cc774b54bf3 +663e40fee5a44bfcb1c99ea5935a6b5bc9f583b0 +00162134256952dd9ae6b51efb159b35c3c138c7 +ceb88e4736e354416e2010fc1061b3b53b81664b +a6a2c4b6bcc41ddc67278f3df4d8d0b9dd7784ef +c23d083cd8820b57800a869f5f261d45e02dc55d +e8ac31927b78ddec41a31ca7a44eb7177165e7ab +e864ec5dbab0f9ff6984ab6ad43a8c9b81cc9f9c +cfed6269069417a84d6de2347220f4b858bcd530 +d9217bfb46c96348722c3783d29d4b1a3feda38c +dec24e5554f79697218d317315fa986229ce3350 +83a099df7071437ba5495a5b0bfbfefe1c0ef7f3 +aa3198e30891a83e33ce3bfa0587d86a197d4f80 +9b6acbeb4989cbee7015c7d515a75672ffde3442 +b021eb08a436b02658eaa7ba3c88d49f1219c035 +cae36dab8aea29f62e0855d9cb3cd8e7d39094b1 +02de8ba699f3c1b0cb5ad89a01f2346e630459d7 +88021458847dd39b4495368f7254941859fad44b +91a165295c666fe85c2adbc5a10329daf0cb81a0 +4b31312eaf8b506811151a9dbd162961f7548c4b +3fe70971b20558f7e9bac303ed2bc14bde659a62 +93fb769d5bf49d6c563685954e2aecc024dc02d6 +bc8827c3e614d515e83dea503989dea4fda6ea13 +e83868dbe4a389ab48e61cfc4ed894f32ae112ac +55c95459cde4b33791b4b2bcaaf840930af3f3bd +36bb0e2ba438a3e03214d9ed2b28a4d5c578fcaa +3acbf874199763eba20f3789dfc59572aca4cf33 +86be037c4d509c9202020767d860dab039cadace +51b57d7080a87394eec3eb2e0b242e553f2827c9 +1efbfa78866315ce6a71e457f3a750a38facab41 +57d6cb41aeec20236f365b3a490c61d0cfa39611 +c532cb64b4ba826372bccf2b4b5793d5b88bb715 +15833b5631032663e783686a209c6a2b47a1080e +d04f2043c96e10cd83b574b1e1c217052cd4a6b2 +e8882627c64db743f7db8b4413dd033fc63beb20 +cd2d32286b8867bc124a0af2236fc74be3622199 +019b70d745375091ed5c7b218445ec986d0f5a82 +e5ff5fec1dadbaed02bf2dad4026be6a96b3f2af +6f4e23b3f2e2c068d13921fe4e5e053ffed4e146 +25e179602a575c915067566fba6da930e97f8678 +67ded0e68e235c8a523e051e86108eeb757efbfd +af78536ea83c822796745556d62a3ee82c7be098 +64d7ac52e47834be72455f6c64325f9c358b610d +9d4866baa3639c13e541f250ffa3d8bc157a491f +2e258811961d3eb876f30e7019241a01f9517bec +8e0ebc487146f83bc9077a1630e0fb3ab3c89e63 +ce8953741fff3425d2311fbbf4ab481b669def70 +789d1d2dab52086bd90c0e137e2515ed9c6b59b5 +b76ce7472700dd68d6328b7aa8437fb051d15745 +f218669b596c5ffb0b1c14bd03c467fc873230a0 +1ff3bdbe0d504cb0cdfab17e6c37aba6b3cffded +2f3cbacbb14405a4652ed52793c1814fd8c4fce0 +982c8ab6ce164f481915af59aaed9fff2a391752 +5cd92012d488a07ece0e47901d0e083b6bd93e3f +69603fec02920851d4b3b8782e07b92bb2963009 +3e90f76437b1ea44cf98a08d83ea24cecf6e6191 +34c09f107c42d990eb4881d4bf2dddcab01563ae +474be0e5892eb2382109bfc5e3c8249a9283b03d +a04b4f75051786682483252438f6a75bf4705ec6 +be88a6716083eb50ed9416719d6a247661299383 +c67e38717fee1a5f65ec6c7c7c42afc00cd37f04 +959ac4082388e19e9be5de571c047ef10c174a8d +baa7aa7b7753fa0abdc4a541842b5d238d949f0a +351394dcebc08155d100fcd488578e6ae71d0e9c +ab8be94c5af60d9477ef1252d604e58e27b2a9ee +3429ec74a695fdd3228f152564952308afe0680a +907fa46c029bc67eaa8e4f46e3c2a232f85bd122 +2644c87d1fbbbc0fc8d65f64bca2492da15baae4 +110a3eeb408756e2e81abaf4c5dcd4d4c6afcf6d +cd4fdc35fac7e1adb5de40f47f256ef74d584959 +8e6e273208ac256f9eccf296f3f5a37bc8a0f9f7 +fe0606100bdbc268db39b503e0fdfe3766185828 +6c63c3e58047bcdb35a17f74eeba4e9b14420809 +bcc2bd305f0bcda8cf2d478ef9fe080486cb265f +ce5223fd3dd920a3b666481d5625b16457dcb5e8 +948886776e42e4f5fae1b2d0c906ac3759e3f8b0 +4c12a51fcfe242f832e3d7329304b11b75161efb +c54bdd2050504d92f551d378ad5fc72c9ed03932 +8f53e8fa79ea09fd1b682af5ed1515eca965604c +2d7e17f6294524ce78b33eab72cdd08e5ff6e313 +64582b4b57f782c9302bfe7d07f74aa176627a3a +6d88795b71d3e386bbd1eb830fb9f161ba98869f +86ad34a6463f12cee6de9596aba72f0df1397fd1 +7eb46685a57c0d466152dc339c8122548c757ed1 +e7a98fb0692684054407cc221abc60c199d6f52a +34df1306662206fd0a5fc2969a4beec4eb0197f7 +56cf7ebf08d10f0cb9fe7ee3b63a5c3a02bcb450 +3bae5cb8226642088da760a6f78b0cf8eddea9f1 +6475df681e061fa506672c27cbabfa9aa6ddff62 +79d81991fa4e4957c8062753439dbfd47bbb277d +bae224477b20302e881f5249f52ec6c34da8ecef +ede4deb4293cfe4138c2c056b7c46ff821cc0acc +a771fa5c812bd0c9596d869ec99e4f4ac988b13f +e99d566212bbbceee903946f6100c9c96039a8f4 +b48ce6b1d13903e3925ae0c88cb931388c013f9c +e647d5baf670d4bf3afc0a6b72a2424b0c64f194 +65c1cd932a06b05cd0b43afb3bc7891f6bcef45c +70ffae353a5cd0f8a65a8b2746d0f16281b25ec7 +cc8221f2b829b8cf39646bf46888317c3eb378ea +26accc2d6d51ff7bf3e5895588907765111bb69b +01072915b8e868d9b28e759cf2bc1aea4bb92165 +3016115711d74236adf0c371e47992f87a428598 +bf30417999c1368f008c1f19feca4d18a5e1c3c9 +62ba49087185f2742c26e1c1f4844112178bf673 +e1f6b9536f384dd3098285bbfd495a474140dc5a +b522dae1d67726eba7c4136d4e2f6d6d645ac43e +e9a021c3eb0b9f2c710554d4bf21b19f78e09478 +df13573188f3bf705e697a3e1f580145f2183377 +188835cfe52ecfa0c4135c2825f245dc29973970 +41b615a34ee2cec9d84a91b141cfab115821950b +ab3dd6221d2afe6613b815da1c389eec74aa0337 +0706d414b4aa7fb4a9051aa70d6856a7264054fb +3cbf8151f3a00b1d5a809cbb8c4f3135055a6bd1 +da5d6a0319272bbccea63acfa6799756ffda6840 +fb4429c95f6277b346d3b389413758dfffeedc98 +2c6e30d9c895b42dcccfc84c906ec88c09b20de1 +3de3189a5e19f225cdce254dff23dacd22c61363 +93530a9bc9a817f6922518a73a1505c411d05da2 +e31354345f832d31e05c1b842d405d4bd4588ec8 +3ff76957e80b60cf74d015ad431fca147b3af232 +34ae3b806be143a84dce82e4b830eb7d3d2bac69 +d7447e53d66bb5e4c26e8b41f83efd107bf4adda +77dd2a4482705bc2e9dc96ec0a13395771ac850c +eaa1465db1f59de3f25eb8629602b568e693bb57 +9329d5b40e0dc43aa25fed69a0fa9c211a948411 +e94c0b6aa62aa08c625faf817ddf8f51ec645273 +7ff02b909d82ad668e31e547e0fb66cb8e213771 +5bb3570858fa1744123bac2873b0bb9810f53fa1 +905f43940b3591ce39d1145acb1eca80ab5e43cd +336c79fbd82f33e490c577e3f791c3cbfe842aff +5c6d07a6b44f7a75a64f6ce592f3bae91e022210 +7e0d3e9d33127f4a30eb8d9c134a58409fa8695b +9a5f50dfcfb19286206c229019f0abf25283028c +dca737e269f9d8626d488988c996e06b352c0708 +b8ffc1d4972fce63241e0e77850ac46dde75dbfa +e9c9bf41c8549354151b977003ce1d830be667db +0942908960b54f96cb43452e583f4f9cb66e398a +fce34051c34d4b81b85ddc4b543cde8007e284b3 +61e8916532503627f4024d13884640a46f1d61d4 +f008d5d7853b6a17b7466cd9e18bd135e520faf4 +bd8d2e873cf659b5c77aac1616827ef8a3b1a3b3 +b25a04dd425302ed211a1c2412d2410fa10c63b6 +a404e21588123e0893718b4b44e91414a785b91f +a1e13bc55bf6dad83cf3aabda3287ad68681ea64 +d5fd35ffabed6733c92365929df0fb4cae864d15 +c12e9c280ee9c079e0506ff89f9b20536e0a83ef +e22769dc00748a9bbd6c05bbc8e81f2cd1dc4e2d +f29835a93475740e888e8c14318f3ca45a3c8606 +1a1d77c6d0f97c4b620faa90f3f8644408e4b13d +4ec84870e9bdd25f523c6dfb6edd605052ca4eaa +d689513fed08b80c39b67371959bc4e3fecb0537 +c4fed58f209fc3c34ad19f86a6dacadc86c04d33 +051888c6d00029c176de792b84dece2dc1c74b00 +1a3540bee05518505827954f58b751c475aeece0 +dfa19180359d5a7a38e842f172359caf4208fc05 +7b0fa84ebbcff7d7f4500f73d79660c4a3431b67 +9e886081c9acaad0f97b10810d1de6fcdce6b5f4 +a4d46e4ba0ae4b012f75b1b50d0534d578ae9cb6 +6342b199ee64c7b2c9cbcd4f2dcb65acef51516f diff --git a/node_modules/sha.js/test/prepare/vectors/byte-hashes.sha256 b/node_modules/sha.js/test/prepare/vectors/byte-hashes.sha256 new file mode 100644 index 0000000..7332dea --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte-hashes.sha256 @@ -0,0 +1,196 @@ +e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 +09fc96082d34c2dfc1295d92073b5ea1dc8ef8da95f14dfded011ffb96d3e54b +33a633841666a5c291a82bfae65deac5c537d05f9fe926cbb5b7281bf90ad150 +8e2cc699f7e677265069f172d4cba15c146e954d7e4f2a8c176576035275b7a0 +8096d72b968a2dbb7ceee163c1981f7f1ec11ee10051b2dc2a8d7601d4e56971 +2cc06402328f034d1909fa7b95f34cdb5585ce7f9096bc4082c97904921f6304 +99a8d6823b803a8d41ed7c26322b4ac8fdc86ce4457ffaaf8600e99595f1577a +5d73704556bd458af1b90383d98591c1d01894d99b394fb7647d3d0cbd45f9a0 +3b9606c772ea20bf2889732b034f9fd476ceddefe8ec4e3704c5993e38dace1b +b858d7c61b67e1688c267ca83b57dd0947c4e5acc4eb3d130fbd92222b66a9ab +87574390294ef6d212b6d8c44ebd5c88e932036dadd0b827c6cb25cd120bfdf6 +afe90242f095e967523c12333b0093d4e532a0db0f27dcc25d44d23ffb62094b +55731252db2418c49f15d7f0f146c6506589f016a82c72cf8a6276ac6bd123c7 +c2e0ed603bfa67292b78a29264e409a2e0c98482cdb59cf4fe7cafff69853d11 +517f007a8b65d4197411c35b14edb1340490a9be7a6c66b8c827b1e101a05b5c +76c977fdc97261cd0956ce1319476d314bc57d8691c7884cd0a7ff3cf825c31f +f10c26637ea8ca2d0898fa661f087f13f174fe0ca1c91862ce3b6127c3430f51 +679b95521601c0ba63ce882046abd7a8cdb8e78d5c0ee9f38c21dc47ca846195 +af369f2162152e43847b4d0c595dcf2d27059563909425f37928bc01090f2f34 +7e1f6f080a60c402bb9c39578f75afc148a0746c656ca243f75038b82304bdf5 +8d6df02738597d95e2eb9e870d4177339728d9ab8b8d61aa96f0b6d1b5ad6efd +d0a9699291dead3f6fba3b648c28537a04caea4b96b145802c06125a17c3faba +d504026213b322cbcb0dbadd6a1fc6c708825019da9bac7aec973f750cbf2d3d +66c1a2578b41c3a200296e85d4d30a1876f8ca5cb941ffb1420e04d8e37149a5 +b8a87b047350912e4861e4aab7d1046d5372797ecea81d187f8e2c117db535dd +8d2b52d4d4074d471d037cdf9eeb13c18ef9ce4949fce00d106ef0880f2db5ed +3d182e9928b2433c94255452170e59e3f4cca3dd29ea2e9b01e94e89da595393 +17c3f5d88ed7f3f62be0e28913357d65916389c1633db8fb62b92e14230d3611 +93788128441c894247bd9ccd6fc8af146c0ee76cdbe4e1c5a8dfa81dd0c338b4 +c7855ac54d2c5767273eec327efe39fdb3bad74121bdd8d2065484268727539e +d1e1f2aca9cfe8c6460f576661190a8008705ef13207c4c7200a2d6b0605f519 +b1843454b0258016558abdbd899319c1fd12d03e0c3d9e882da03de9cb981777 +2834dcb6957b97fdde61b532d151ee4482bface8714fe36cd072b4783765901d +47d61de59879013f64eb78fa8f6c8b906f7d25b8e1c3ca888f32421749c0d042 +855fc59aa873328501ab0b1ce9c60a7d5582662c725605ebe02b64a13e34b3bc +e05939e8ff4ed77a11522dde249a74841c54970d984e0bb6f77ac64f1fa313b0 +078778fed0e382da5d7dd36f585e1f1aa9b92d4caf20b85c0f6dd346de8d3998 +263140810ae46430e19ac1a4a98b6204b63031b282ecb28594bd837268104308 +ed39f65ea0e6cd8fb39bc5d94a1554dfd0002733e01618161d58a7b7dc8be834 +89830272d1fd54040f9329a39c7f491f15eea851095e0bd2d0bb412baeda7445 +c9b63e8ac2e87494f98c9ece5d74f4540090c286166efdaedc6d59a0f623e5f8 +509e6b7dacb70bfa62f3964eba990b9c576fd2485c1040fefc8eab5f07269f4d +8bb7546f64ce1cab770407de791ab25f7bfbac3e071810310ee674c2824e59ac +ac5bf3cad821e7ad8b2ed7bbfbaaa5e7abb30606afd8ed5d6a18a0d0eba343d3 +f7995f433d17bee25f44c918de82eb147b3bfef24045ff8fb17ffedf559e06b7 +2c465ddc53e88894a2279e30b9e6feb064c66b15dcf5a38722f5c92d65a84bab +b0ced82dc52c4f9b1dec098a80a23a4a711f3a8c9c3684f0761b0e8a29ba560a +f8dce75572495bc241288c07246acf7a157d462a9c01d1491618f073e57f47e2 +46d89d780f178334d19e02c41d5af2e265e2190896dce94822b99c19adc3ff6f +968954bafff8e2a118d3aedffc6283b30efddbac0af2245195c2a32a665a5d54 +33f78a8a6adc466fed41020fa799aa537cc1c1bb4e938c06a1baec97f7b3c26c +5f7f49d1c307363de95d450b558275f8d5a6780ee47246268e6729f7733e535a +9f126c6e07ed2744cef5de1f468b2ed1c51a13ec3c8351935b9656458a3dc40f +b8e3d23621cb02bcae060bdf5b6b7db1f024651f98ec63766c20b7883bc033d2 +689c608602d5e5d37a0285eeb5006d97addd7c2b8e006770fad588eff621c971 +17c6f0073c4f92d5eedda57ca2506aa6002695c6b7bf12e4dbf4dd1a7fbceb08 +504472bf96d0a3da1098dacacac48886d1ab92929187de95c7f42eab9907801d +ad3a49ab7ad5b69182301d9ef971feab72f770f4d9f60f6db308ffea746db005 +5cdcb342f26857e8db5ac97a89da6197759adf384ab241a8112795241983238f +4ec9883c8ad72131c79f14e4f1e75042a61100a5bc290fc344ee3c2adc99c143 +375c64eb3361f34b4d89078fa95d082c74bced92436aa3d50031839375d6473e +8a0a36538da941bea6c614b2c038424588d8d2505039f70cbff291d4f0f9f6a1 +a4dd6338174ebeda6a25b88d754fa5b95cad27902eb0bc8321b76db62efb1abc +a644092a1de8e05e17908ce565d55fcf39e30585565d96bf1c13eeb9f3401803 +7697b1435a5bdc094038469fc5268615cbe94641b2165bee62466426ab414c97 +62f249a85b14b477e764e63e9821d3f44dd2c745293f3586eff976266311a39f +eb2c75aa7330a6589d09f58231d1218e4124ba49b7b0c5245a76a5101d136449 +90c096f9852990cf0fcfbd36ffeb577b4d106d66e9c7a18abdc6f7a3b1ddbab1 +327b0e47ba3bc200579ac67ac38968e0df655e2d22ffe3adf238f7ac9029a1de +bdf4ef8fcafbe13b772ca217eef56a316210e71f69cd943433087c68d9a67bb9 +72c955a5adaf9e49d565342b41b36ee5ab9b5a394d003b804e4e361a46bda571 +cbd287d6a6707e2cdc8e63a29f758facbdab375bb252183d3af877dea8d25260 +7aa856fd19741a16ec634b1f653cfd5ac224278652e0b0a2903e274be20a048f +8410cdb01c659f05741fd29469d0dbb0251b4fd8e708abeec4a879047fba7c37 +b5f811baf9c441d04f010f76bcd7eae80c5bb249a40ce37436f0a0296849b8ab +9be38d9ac8a9c30e8a5e86e3ede291b23bb381ee41dc662421e394f6b8b9881e +ef45cac2d6f325a523c40a989f5554e152f8d65cbd22d35824d1f28378658432 +8e3d126f3a316e0ec49741a3ae6215e29c4acaee364272b7087d9b766579e00c +a43ee360b1dc90c573bef4145e1d4557166d7cce6ea1ed33e0cbd909643c3621 +5396745f9645dad55b732efde57de49c2ae40624fee192579014dc2b79d814d5 +b617be050dedb47be64d82dc19e3d84b6799b5bada18944df5417759a85e445e +17d5520a82dd7c945de6a92200d036cd95bb16330f0f95df802d23e90c8e5c2c +b71e5a677801057ec719ae2655732720644bc8f999a8698876c92e4323d4ae0f +80c6a41efdfe452d1ad6f3b0d5eb31b962c332a9bb7e4f7ee6f4aaa18a3b81d4 +32ae8512b486d4523ca7a630556758655a5cff12aa5cfcb8dc5e65b21a257f4a +0f14c68ffe8c26e9d2ecdd5ea8027b6549b3e8742023ffbdc7547227cc27ec2c +85dff510ebd3f1fa617a2273ed67ef5abe4774cfe95657fe380e75b25090664d +7a852eb3b59ad350c9d47adf1ce0812d06866cca8e1f2c65c893e7952a62eea4 +49cfe8b6302a2d45b866a26c4940d777df4f588ebe1bafeb275a8a03a1eeb0aa +e6e49ebcd83acd3a9ec0b100e26c4d82388eb9378ecfcbf967a31c4951ad0c01 +9edf4d38cdd6e73e857f1ec91132499e7f930d2cdee6b3583a8f062ff7e9d848 +768dbebcd6aa66337810b7457964c63322904e9242229e5d98b808799f7f4cd4 +867a5ab42d15f9843d67438db495a8a581eddd39c3753f3d203225b60eaa9a3e +d7acd8d042b8c6802f6d9262055a6e296f3254674745f18cd1b21244e1acb9f0 +fe781c4d49e73ca9f82389b6d58f3def857cffe624acfb6a2a5a8e9559623f37 +96fb72ddb440bb1f00dbafc97768f9890effcc172fcc395de4f2a19318c46c86 +b63555a77fefcadecfa88a770e70f1d51d46ae68fc672ad4770804495eb1b867 +161d1a609fdeb2fa425761bf0b751dfd25e7a0a02995920921f99f63331b76d9 +c80931a1263d7f192937eea3e453006b19525ed981314ec3fd561d256e8e135d +a3b6ba9a5cba6071a99b1a43454053bfc3e6d1338ccf0063d5d71247a6b57566 +7d057dc07ed5a7c11590262a0a18c8cd614a029ca12fe08bfedc87307b5f65b9 +0b7744d3394c04618e6376cd450cc3fc81586493ab5081a9b3b155938d98563c +e8d1ea7154ec53c175761311295f69037865db32ec22976b6ddb981d226760ad +40aa287bdf661317439fa5ffa77cc9fa9ca3df504aae74b0ba83b2fbebbaac83 +ee2e8fb7206e2e8fdae91afcc3e903d534b304069f232c68f53407cfc6d0bbae +b940c011eaef2b772ba03659581d525e0b6148f9c59cb7120db55ce18bf6d695 +9574545ba02bd75bb1dcf038884bf9d7892bc017215308f01ebba7932c014a62 +da685c53ddf810225507141759e3c74ffeaa1c5eecbe150386a83027e7014077 +5c0769369e4fb9f9d9e599612923554fb2f1e6d87eaeed283f6106845b66b532 +19056a3d33ebe1b84a100c27fc72d0265ceeb9c573d7942a4d44983238d34ea7 +8a5e6e6cae30d4283fd70af96d9c53d8ea45ca48892d313981fe208b1384f0dc +880992dadfeccb31f289522214209eb87f41fea5fd3155ab274e9a6fdc6f9f64 +ea7a2b0e780fc6dc8843643a2bc18a17226a1bb3d9e1467cf0be2201decce2c7 +2077395cd7562dd5e9965ea620cedf32c805f50f748c4ee6e82af960c5ce2d66 +2dbdc632baa5d0831808518beb80e3737de5bbec3dd0438e75dd30b2ea7fbb90 +ced4cf34982e0abbef40e876659544c4ed01f1975351490984aaa429fef321b0 +69339b4534eecca329ef2af397ede2a882d7e315a871dd2b781b8e0f4277ee66 +79cfbb9b52e573e22cd3427ec258d69e2d19fd27de15df96ca9006ccebe7b58e +1203d54626871bccfa8abb8bbd740b9af3c7266bc8490a210074d7f2b0806ae8 +0c15140d3b5e4b180b0b1517a51fa08f82458c02185ef2bc59fae37543ef9011 +ab71b18daceecc7c8fde7cf5f77eacf118262d760bcd383dd7bfa2170895d518 +fa3174d3432fe38241a34a8387811b54c3d0f183468cef5cd6d3fb325b270b66 +c13fd9ed22d33aa45f73748782e4dbb835d180dc0662e160c0a6c445c76f0c72 +b88a842dc14c41c2b5bd74e48fdd2bd0d43cfeea1eb9b154bebfc4f03d8a102d +45ba1056e49828a0385b0b5f9e4933905973f15b2713fee1c1755e2a7a3e8d79 +a0d7d4fda9435ef292b761aed2c9fea576519437a824e96150a4324dcc757605 +7906439843a1c1758c232182eaa66d5e6bd5ad2fbc0157fdc5438e1038966dc4 +35e5a6c17906646cf15c2bed4884129b5134eb2b411400e4d8797126f51a4cb7 +d19ddbd98476519a07cd8917b95eb609e5b50e8088ad68cd7426e8139d5bffc2 +cafea6a1183ca8934867692684194ce9903b375a8036c4c5deb8fb379c3423cb +7e4734ce7e22a515bfe60e296640dec121a089f75034240408fe7be2ed9e5c87 +b09436c29cc3823c434a4689bb67a73a49164bd23ab40c4e04ed99320fd138d2 +d6752f5e2738cbaa2e154b749216babc990297af411dfa2b66c9f942480ff4b9 +0f0cc4994a2a88f58cc38afdf61ff43952473c437d235cec426139c8f43ffb5a +eb9dd875ecf9ec930b1482b8a50c337d48b31590f99cdfb80bbdd160ad4fb49b +ec7435c1c8e3eb1de2871cdc797bff6969b863e5b9fb005b3a7af4ff96680c63 +e5afd502015d80bb43ab7f92f138b35ce5fafa980c5fba78412ffcfa281f9d8d +14e7975021e84497b4daf367f6861c79820308883c4e1254c038a7458a3f2913 +bcefd79629a6d7a8afbf0c8ccfc2c5889f627989e71c3a212d900e3015e460fa +74661206cb19ec00619e1fcdcae443486adfa69a564672c9ee9f48f8ea35d5b2 +341bf4dff841088f3b902c2471b67b49498bda5c045e9befd58af93ade0a8df8 +e5239ecec9ea7737f614ebab502df1248c0a9a0183fc70441fd9ac88040846ce +58e09b4047770bc998b86a4191b7a11eec6fc65bd5e5d0fb6f30d4b7ee0cd683 +9e01666f3284aec585338d0b452aa1712b9d1392c4a265a2ecfc9dc4cadd002b +33aa52b6be6991965ae18124232f108ec7b400528e848e5d8a8d7cf75783ee19 +f854ce37a0821dee06b616d2e86383271c91e09328f884dfef2107712d267601 +a58035c4921e7114b97fde8d4cf04224971d49fc6b23ed5d61a29e133684c809 +4d8963b832c44bab059a206f162890fff4eafd71e535a03609a67fe3c31de9e3 +6ebf98b52fc3c4e77257d176b47d10729baeec4066a9bc78a89d7d02af7ab2cf +366cd811c075198d1749db7075c4c333b6f347b64e44b3744ef28a3957a0feb1 +712157d7d59011c4bf1ee690217f4b0f855816e9bbee6b6aff277b9645340c9a +1dc0a697f2a7c1da301b256e6822879f212beb56fbc7fe1b8e877ccd964c132a +6fb0514a46f06be4bc3798ae82fa6cf14103926f1969b3d70910a9c5d9589e58 +9731a6c8ef6c4d601781f231e5b17c0a5194495d5b01b27aefbb4cd857c0c7d1 +b18a49b7c4114fb94d16ffdce1e1677e6bde99bba443936af10cbedca6eeaf2a +ce197d61ddae42c8b8447aa698b3e7e5d51d9f0cd2034bc64f1a9d1b2b18e3cd +7d9a3aebb470990abb92303f0c2ce5d6c38f9a2198d8f1ae8ab7fbbbf007cc7d +e52d8c79b31f45d4894e0948089da5fc236d33dd79a80d2304043e8c234cf88c +b1870cae9e54cbe8ddd74782c98f6c9ec6eeb835e2765252530d71779685d4ea +69850fe71261572f61d56863a7dc12aeda7931225d0eafb5b7b45aef7b6c8586 +18fdfb38e4f516734cef5de8cba84a54a17cdaf13228621dfcd806c5e822eccd +9db6e6591134181c2c19bbc57f24e11ea161165cde584e1f58c4df2fb5ee8c88 +86eafcaf23edfef66753d664eaa7813b5a16d1abc01a95f74ae88a02e42cadfb +7be29e433c7e17875c71eea08d10ada5a17eea25ead94d41cb1711e8fd204c06 +c1f98aac0cfc98f30c3fa13fb8011b2a1d553e6c03edb8e2a35f47574237fc64 +981571f9393463f49cd5352c024a8998d7b139bc8aec7a512101edb18a7e0954 +8628162a5c9d34c94e60027175f819e98a356832a3d3898a7f11b95e171e2a73 +1c4530860ec79ab73b141a7e64b0de775192a002fa2f3832b6c24972797d5161 +d97097c16c4d0cf169e61cde78e807cd318b8938992066bfe4e266e14146fbba +3a18179cf693d234a8aa913b7362505533b414d60bf7eaf02427157759defaba +8ee61f98cea2659f5ab9d8ec444de3b3e843ab02baa7806e96230a64dee93774 +bc69420ce99aa58de5d5c9ae32c3528b02546347e8f85dca651187142bc2a40b +79b2d4da202168d2c6f7dbff6dba414f71e405731a287a23b58af903f9b1c770 +068c65431e6010461cd77e3d2859fbc978857d1195dc1506ab1b5c9344e1099f +918a1d14de8c5fa363fb3137cf5014020646a1a2235f78ef3ed0d034c74f5761 +ef88b649d012178186dcf0244835232b5b7392e0c1f8f141f5107e9ead559e74 +326f14fc54954b73d704935b213dc797311f7c8fcd88c238c8ab767286dc3f94 +0087e37129b9a2d58b0987a218a3c1be67cb1e08142cbcf889aa617ca3e4640d +93294033c9de9361a3c6cc0df539e2e459f6d2babbbc0623859e18af0d0ccf4e +49640215294d9263bc464538c3c29e42edea637d1427c2f04ebcd828d6fcb480 +ce5cfa5b3b0485805cf5bcc8c24594a6b6fec9249698d317ce20bc84d857eafe +c9ac43870e02c7b36bb1e7ba3ce2e234507c0076f8a77494f268777edf5ebffc +30eb195e3aad4c288af76c66e26c6096f5f7de1b56b43d638ab7119d73cfd214 +338225e3b94025d2b327d72ed3d763a66856e1d1ebcb632bc4d8752000ad9966 +69b6cc0729b2d2877d46a08f3c251ae18f043949a33797c3027668b23c969d68 +3d35c10650a2da8413a2a11b8e7fb891af5da3a9763584caa6cd71bbe68de6ba +baf3bcf323f3e5b91649eeb5f1be977a8bd91915a66297a22fdb1a906d1a7e53 +7fd9b3abc4684e6f8ee591bbac5a36c5060bb09ef7899690416e5300cc14fcd3 +429e454c0cd5d874d7887f8f8def3390a6e54af783c102af6bc3c75c62f3661f +0987d19048e10b925bcf394dfffcf259fc1a15e403673a80bfd4e7fd4f43cef0 +fc774a081d9c93d52bb6a8d99a06ccd7bf32a10154d302524b8c5c5dc1b2969f +6c6afa35f1aad6301dfde6c4ababe2da47d92033a9a41e84ca6f00e5eb29bc60 +1e858dd15069f54478023c4d8518cd5aa814fb15c9eb8df45c44efbb050587ed +5d73820315ebd00f0e419a7fe418ff109664add82a68387daffff4239a2c1b23 +f7f4721be31524d014bacf105b06bacc4bdb953bc04d5a048e1fd4ddc395667e +426cbfa5a10024c4f5deae9146222146c2d75a5bf13e8215c04d7dd17f455743 diff --git a/node_modules/sha.js/test/prepare/vectors/byte0000.dat b/node_modules/sha.js/test/prepare/vectors/byte0000.dat new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/sha.js/test/prepare/vectors/byte0001.dat b/node_modules/sha.js/test/prepare/vectors/byte0001.dat new file mode 100644 index 0000000..6f4f765 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0001.dat @@ -0,0 +1 @@ +$ \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0002.dat b/node_modules/sha.js/test/prepare/vectors/byte0002.dat new file mode 100644 index 0000000..26136c5 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0002.dat @@ -0,0 +1 @@ +p \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0003.dat b/node_modules/sha.js/test/prepare/vectors/byte0003.dat new file mode 100644 index 0000000..d3b1468 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0003.dat @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0004.dat b/node_modules/sha.js/test/prepare/vectors/byte0004.dat new file mode 100644 index 0000000..f00c578 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0004.dat @@ -0,0 +1 @@ +8x \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0005.dat b/node_modules/sha.js/test/prepare/vectors/byte0005.dat new file mode 100644 index 0000000..9b3ac32 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0005.dat @@ -0,0 +1 @@ +> \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0006.dat b/node_modules/sha.js/test/prepare/vectors/byte0006.dat new file mode 100644 index 0000000..611a56b --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0006.dat @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0007.dat b/node_modules/sha.js/test/prepare/vectors/byte0007.dat new file mode 100644 index 0000000..0af0f7a --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0007.dat @@ -0,0 +1 @@ +q \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0008.dat b/node_modules/sha.js/test/prepare/vectors/byte0008.dat new file mode 100644 index 0000000..90e3f79 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0008.dat @@ -0,0 +1 @@ +|`< \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0009.dat b/node_modules/sha.js/test/prepare/vectors/byte0009.dat new file mode 100644 index 0000000..6012eb5 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0009.dat @@ -0,0 +1 @@ +?` \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0010.dat b/node_modules/sha.js/test/prepare/vectors/byte0010.dat new file mode 100644 index 0000000..ebe5263 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0010.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0011.dat b/node_modules/sha.js/test/prepare/vectors/byte0011.dat new file mode 100644 index 0000000..bc046e1 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0011.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0012.dat b/node_modules/sha.js/test/prepare/vectors/byte0012.dat new file mode 100644 index 0000000..3464653 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0012.dat @@ -0,0 +1 @@ +0 \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0013.dat b/node_modules/sha.js/test/prepare/vectors/byte0013.dat new file mode 100644 index 0000000..648b68f --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0013.dat @@ -0,0 +1 @@ +A \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0014.dat b/node_modules/sha.js/test/prepare/vectors/byte0014.dat new file mode 100644 index 0000000..b19f410 --- /dev/null +++ b/node_modules/sha.js/test/prepare/vectors/byte0014.dat @@ -0,0 +1 @@ +G`0< \ No newline at end of file diff --git a/node_modules/sha.js/test/prepare/vectors/byte0015.dat b/node_modules/sha.js/test/prepare/vectors/byte0015.dat new file mode 100644 index 0000000..1e4de6c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0015.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0016.dat b/node_modules/sha.js/test/prepare/vectors/byte0016.dat new file mode 100644 index 0000000..7c46183 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0016.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0017.dat b/node_modules/sha.js/test/prepare/vectors/byte0017.dat new file mode 100644 index 0000000..5b45703 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0017.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0018.dat b/node_modules/sha.js/test/prepare/vectors/byte0018.dat new file mode 100644 index 0000000..47c17ed Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0018.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0019.dat b/node_modules/sha.js/test/prepare/vectors/byte0019.dat new file mode 100644 index 0000000..4ed952a Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0019.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0020.dat b/node_modules/sha.js/test/prepare/vectors/byte0020.dat new file mode 100644 index 0000000..a7387d7 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0020.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0021.dat b/node_modules/sha.js/test/prepare/vectors/byte0021.dat new file mode 100644 index 0000000..a330c33 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0021.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0022.dat b/node_modules/sha.js/test/prepare/vectors/byte0022.dat new file mode 100644 index 0000000..70bdf45 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0022.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0023.dat b/node_modules/sha.js/test/prepare/vectors/byte0023.dat new file mode 100644 index 0000000..895cec6 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0023.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0024.dat b/node_modules/sha.js/test/prepare/vectors/byte0024.dat new file mode 100644 index 0000000..37ab0ba Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0024.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0025.dat b/node_modules/sha.js/test/prepare/vectors/byte0025.dat new file mode 100644 index 0000000..c5773a5 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0025.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0026.dat b/node_modules/sha.js/test/prepare/vectors/byte0026.dat new file mode 100644 index 0000000..04b568d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0026.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0027.dat b/node_modules/sha.js/test/prepare/vectors/byte0027.dat new file mode 100644 index 0000000..b068ef3 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0027.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0028.dat b/node_modules/sha.js/test/prepare/vectors/byte0028.dat new file mode 100644 index 0000000..98bc2d9 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0028.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0029.dat b/node_modules/sha.js/test/prepare/vectors/byte0029.dat new file mode 100644 index 0000000..97bfc3d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0029.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0030.dat b/node_modules/sha.js/test/prepare/vectors/byte0030.dat new file mode 100644 index 0000000..25f09c0 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0030.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0031.dat b/node_modules/sha.js/test/prepare/vectors/byte0031.dat new file mode 100644 index 0000000..988f3ac Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0031.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0032.dat b/node_modules/sha.js/test/prepare/vectors/byte0032.dat new file mode 100644 index 0000000..f31fdcb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0032.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0033.dat b/node_modules/sha.js/test/prepare/vectors/byte0033.dat new file mode 100644 index 0000000..396509b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0033.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0034.dat b/node_modules/sha.js/test/prepare/vectors/byte0034.dat new file mode 100644 index 0000000..55c11cb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0034.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0035.dat b/node_modules/sha.js/test/prepare/vectors/byte0035.dat new file mode 100644 index 0000000..cedccf7 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0035.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0036.dat b/node_modules/sha.js/test/prepare/vectors/byte0036.dat new file mode 100644 index 0000000..f597deb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0036.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0037.dat b/node_modules/sha.js/test/prepare/vectors/byte0037.dat new file mode 100644 index 0000000..6bcc7eb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0037.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0038.dat b/node_modules/sha.js/test/prepare/vectors/byte0038.dat new file mode 100644 index 0000000..48e731d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0038.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0039.dat b/node_modules/sha.js/test/prepare/vectors/byte0039.dat new file mode 100644 index 0000000..5ebdf8c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0039.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0040.dat b/node_modules/sha.js/test/prepare/vectors/byte0040.dat new file mode 100644 index 0000000..4ee0307 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0040.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0041.dat b/node_modules/sha.js/test/prepare/vectors/byte0041.dat new file mode 100644 index 0000000..1f7c825 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0041.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0042.dat b/node_modules/sha.js/test/prepare/vectors/byte0042.dat new file mode 100644 index 0000000..9c9044f Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0042.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0043.dat b/node_modules/sha.js/test/prepare/vectors/byte0043.dat new file mode 100644 index 0000000..5721861 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0043.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0044.dat b/node_modules/sha.js/test/prepare/vectors/byte0044.dat new file mode 100644 index 0000000..963f6a8 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0044.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0045.dat b/node_modules/sha.js/test/prepare/vectors/byte0045.dat new file mode 100644 index 0000000..8bd9b41 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0045.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0046.dat b/node_modules/sha.js/test/prepare/vectors/byte0046.dat new file mode 100644 index 0000000..47ecdd2 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0046.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0047.dat b/node_modules/sha.js/test/prepare/vectors/byte0047.dat new file mode 100644 index 0000000..9d1116f Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0047.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0048.dat b/node_modules/sha.js/test/prepare/vectors/byte0048.dat new file mode 100644 index 0000000..13f3bd7 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0048.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0049.dat b/node_modules/sha.js/test/prepare/vectors/byte0049.dat new file mode 100644 index 0000000..5d6a89d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0049.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0050.dat b/node_modules/sha.js/test/prepare/vectors/byte0050.dat new file mode 100644 index 0000000..1dcf3ab Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0050.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0051.dat b/node_modules/sha.js/test/prepare/vectors/byte0051.dat new file mode 100644 index 0000000..f9c5499 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0051.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0052.dat b/node_modules/sha.js/test/prepare/vectors/byte0052.dat new file mode 100644 index 0000000..31af0d4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0052.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0053.dat b/node_modules/sha.js/test/prepare/vectors/byte0053.dat new file mode 100644 index 0000000..79876b8 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0053.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0054.dat b/node_modules/sha.js/test/prepare/vectors/byte0054.dat new file mode 100644 index 0000000..89373a1 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0054.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0055.dat b/node_modules/sha.js/test/prepare/vectors/byte0055.dat new file mode 100644 index 0000000..fb225eb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0055.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0056.dat b/node_modules/sha.js/test/prepare/vectors/byte0056.dat new file mode 100644 index 0000000..d0bee1c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0056.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0057.dat b/node_modules/sha.js/test/prepare/vectors/byte0057.dat new file mode 100644 index 0000000..e13d4de Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0057.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0058.dat b/node_modules/sha.js/test/prepare/vectors/byte0058.dat new file mode 100644 index 0000000..011f9fc Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0058.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0059.dat b/node_modules/sha.js/test/prepare/vectors/byte0059.dat new file mode 100644 index 0000000..372e461 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0059.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0060.dat b/node_modules/sha.js/test/prepare/vectors/byte0060.dat new file mode 100644 index 0000000..48cce5b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0060.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0061.dat b/node_modules/sha.js/test/prepare/vectors/byte0061.dat new file mode 100644 index 0000000..814eed2 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0061.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0062.dat b/node_modules/sha.js/test/prepare/vectors/byte0062.dat new file mode 100644 index 0000000..039ee6a Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0062.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0063.dat b/node_modules/sha.js/test/prepare/vectors/byte0063.dat new file mode 100644 index 0000000..a7f3d9c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0063.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0064.dat b/node_modules/sha.js/test/prepare/vectors/byte0064.dat new file mode 100644 index 0000000..5f46b83 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0064.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0065.dat b/node_modules/sha.js/test/prepare/vectors/byte0065.dat new file mode 100644 index 0000000..1bf5eda Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0065.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0066.dat b/node_modules/sha.js/test/prepare/vectors/byte0066.dat new file mode 100644 index 0000000..0d9dda4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0066.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0067.dat b/node_modules/sha.js/test/prepare/vectors/byte0067.dat new file mode 100644 index 0000000..7b2b995 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0067.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0068.dat b/node_modules/sha.js/test/prepare/vectors/byte0068.dat new file mode 100644 index 0000000..2c67326 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0068.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0069.dat b/node_modules/sha.js/test/prepare/vectors/byte0069.dat new file mode 100644 index 0000000..5609f91 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0069.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0070.dat b/node_modules/sha.js/test/prepare/vectors/byte0070.dat new file mode 100644 index 0000000..13620c8 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0070.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0071.dat b/node_modules/sha.js/test/prepare/vectors/byte0071.dat new file mode 100644 index 0000000..0644e65 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0071.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0072.dat b/node_modules/sha.js/test/prepare/vectors/byte0072.dat new file mode 100644 index 0000000..6fa76cc Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0072.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0073.dat b/node_modules/sha.js/test/prepare/vectors/byte0073.dat new file mode 100644 index 0000000..2a7b170 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0073.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0074.dat b/node_modules/sha.js/test/prepare/vectors/byte0074.dat new file mode 100644 index 0000000..516f7e2 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0074.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0075.dat b/node_modules/sha.js/test/prepare/vectors/byte0075.dat new file mode 100644 index 0000000..fda8bbc Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0075.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0076.dat b/node_modules/sha.js/test/prepare/vectors/byte0076.dat new file mode 100644 index 0000000..c311f89 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0076.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0077.dat b/node_modules/sha.js/test/prepare/vectors/byte0077.dat new file mode 100644 index 0000000..88a1675 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0077.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0078.dat b/node_modules/sha.js/test/prepare/vectors/byte0078.dat new file mode 100644 index 0000000..bc59710 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0078.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0079.dat b/node_modules/sha.js/test/prepare/vectors/byte0079.dat new file mode 100644 index 0000000..bb83930 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0079.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0080.dat b/node_modules/sha.js/test/prepare/vectors/byte0080.dat new file mode 100644 index 0000000..cd8620b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0080.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0081.dat b/node_modules/sha.js/test/prepare/vectors/byte0081.dat new file mode 100644 index 0000000..3ec8953 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0081.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0082.dat b/node_modules/sha.js/test/prepare/vectors/byte0082.dat new file mode 100644 index 0000000..a7f1edc Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0082.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0083.dat b/node_modules/sha.js/test/prepare/vectors/byte0083.dat new file mode 100644 index 0000000..7ab631c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0083.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0084.dat b/node_modules/sha.js/test/prepare/vectors/byte0084.dat new file mode 100644 index 0000000..32ba3e3 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0084.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0085.dat b/node_modules/sha.js/test/prepare/vectors/byte0085.dat new file mode 100644 index 0000000..e2f3c4b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0085.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0086.dat b/node_modules/sha.js/test/prepare/vectors/byte0086.dat new file mode 100644 index 0000000..b2093aa Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0086.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0087.dat b/node_modules/sha.js/test/prepare/vectors/byte0087.dat new file mode 100644 index 0000000..c289f0d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0087.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0088.dat b/node_modules/sha.js/test/prepare/vectors/byte0088.dat new file mode 100644 index 0000000..839cc69 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0088.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0089.dat b/node_modules/sha.js/test/prepare/vectors/byte0089.dat new file mode 100644 index 0000000..990cc8e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0089.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0090.dat b/node_modules/sha.js/test/prepare/vectors/byte0090.dat new file mode 100644 index 0000000..689939c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0090.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0091.dat b/node_modules/sha.js/test/prepare/vectors/byte0091.dat new file mode 100644 index 0000000..47dd878 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0091.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0092.dat b/node_modules/sha.js/test/prepare/vectors/byte0092.dat new file mode 100644 index 0000000..cd092f6 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0092.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0093.dat b/node_modules/sha.js/test/prepare/vectors/byte0093.dat new file mode 100644 index 0000000..0c4a5eb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0093.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0094.dat b/node_modules/sha.js/test/prepare/vectors/byte0094.dat new file mode 100644 index 0000000..650f3d0 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0094.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0095.dat b/node_modules/sha.js/test/prepare/vectors/byte0095.dat new file mode 100644 index 0000000..37723e4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0095.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0096.dat b/node_modules/sha.js/test/prepare/vectors/byte0096.dat new file mode 100644 index 0000000..d200428 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0096.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0097.dat b/node_modules/sha.js/test/prepare/vectors/byte0097.dat new file mode 100644 index 0000000..11cec37 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0097.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0098.dat b/node_modules/sha.js/test/prepare/vectors/byte0098.dat new file mode 100644 index 0000000..33d8657 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0098.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0099.dat b/node_modules/sha.js/test/prepare/vectors/byte0099.dat new file mode 100644 index 0000000..4a33315 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0099.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0100.dat b/node_modules/sha.js/test/prepare/vectors/byte0100.dat new file mode 100644 index 0000000..12b6aa4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0100.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0101.dat b/node_modules/sha.js/test/prepare/vectors/byte0101.dat new file mode 100644 index 0000000..620966b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0101.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0102.dat b/node_modules/sha.js/test/prepare/vectors/byte0102.dat new file mode 100644 index 0000000..12de2a5 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0102.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0103.dat b/node_modules/sha.js/test/prepare/vectors/byte0103.dat new file mode 100644 index 0000000..f1f74fb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0103.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0104.dat b/node_modules/sha.js/test/prepare/vectors/byte0104.dat new file mode 100644 index 0000000..1a7557f Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0104.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0105.dat b/node_modules/sha.js/test/prepare/vectors/byte0105.dat new file mode 100644 index 0000000..c021ab7 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0105.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0106.dat b/node_modules/sha.js/test/prepare/vectors/byte0106.dat new file mode 100644 index 0000000..db0552b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0106.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0107.dat b/node_modules/sha.js/test/prepare/vectors/byte0107.dat new file mode 100644 index 0000000..f656eb3 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0107.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0108.dat b/node_modules/sha.js/test/prepare/vectors/byte0108.dat new file mode 100644 index 0000000..cb49945 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0108.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0109.dat b/node_modules/sha.js/test/prepare/vectors/byte0109.dat new file mode 100644 index 0000000..90bd802 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0109.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0110.dat b/node_modules/sha.js/test/prepare/vectors/byte0110.dat new file mode 100644 index 0000000..e114f9e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0110.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0111.dat b/node_modules/sha.js/test/prepare/vectors/byte0111.dat new file mode 100644 index 0000000..17ee790 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0111.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0112.dat b/node_modules/sha.js/test/prepare/vectors/byte0112.dat new file mode 100644 index 0000000..5b5507e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0112.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0113.dat b/node_modules/sha.js/test/prepare/vectors/byte0113.dat new file mode 100644 index 0000000..31bcd32 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0113.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0114.dat b/node_modules/sha.js/test/prepare/vectors/byte0114.dat new file mode 100644 index 0000000..14cf88e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0114.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0115.dat b/node_modules/sha.js/test/prepare/vectors/byte0115.dat new file mode 100644 index 0000000..939a09c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0115.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0116.dat b/node_modules/sha.js/test/prepare/vectors/byte0116.dat new file mode 100644 index 0000000..defa29c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0116.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0117.dat b/node_modules/sha.js/test/prepare/vectors/byte0117.dat new file mode 100644 index 0000000..68c1e9c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0117.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0118.dat b/node_modules/sha.js/test/prepare/vectors/byte0118.dat new file mode 100644 index 0000000..83d18dc Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0118.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0119.dat b/node_modules/sha.js/test/prepare/vectors/byte0119.dat new file mode 100644 index 0000000..d697b62 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0119.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0120.dat b/node_modules/sha.js/test/prepare/vectors/byte0120.dat new file mode 100644 index 0000000..b6bdfb8 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0120.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0121.dat b/node_modules/sha.js/test/prepare/vectors/byte0121.dat new file mode 100644 index 0000000..c8c5a60 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0121.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0122.dat b/node_modules/sha.js/test/prepare/vectors/byte0122.dat new file mode 100644 index 0000000..d8cae0d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0122.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0123.dat b/node_modules/sha.js/test/prepare/vectors/byte0123.dat new file mode 100644 index 0000000..1c41f96 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0123.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0124.dat b/node_modules/sha.js/test/prepare/vectors/byte0124.dat new file mode 100644 index 0000000..7821c66 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0124.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0125.dat b/node_modules/sha.js/test/prepare/vectors/byte0125.dat new file mode 100644 index 0000000..8a426aa Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0125.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0126.dat b/node_modules/sha.js/test/prepare/vectors/byte0126.dat new file mode 100644 index 0000000..76ff47a Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0126.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0127.dat b/node_modules/sha.js/test/prepare/vectors/byte0127.dat new file mode 100644 index 0000000..77a0481 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0127.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0128.dat b/node_modules/sha.js/test/prepare/vectors/byte0128.dat new file mode 100644 index 0000000..6227f74 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0128.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0129.dat b/node_modules/sha.js/test/prepare/vectors/byte0129.dat new file mode 100644 index 0000000..4ce3fd4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0129.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0130.dat b/node_modules/sha.js/test/prepare/vectors/byte0130.dat new file mode 100644 index 0000000..c413d09 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0130.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0131.dat b/node_modules/sha.js/test/prepare/vectors/byte0131.dat new file mode 100644 index 0000000..8f74a10 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0131.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0132.dat b/node_modules/sha.js/test/prepare/vectors/byte0132.dat new file mode 100644 index 0000000..34f4858 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0132.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0133.dat b/node_modules/sha.js/test/prepare/vectors/byte0133.dat new file mode 100644 index 0000000..a033139 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0133.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0134.dat b/node_modules/sha.js/test/prepare/vectors/byte0134.dat new file mode 100644 index 0000000..66cd42d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0134.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0135.dat b/node_modules/sha.js/test/prepare/vectors/byte0135.dat new file mode 100644 index 0000000..66758ce Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0135.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0136.dat b/node_modules/sha.js/test/prepare/vectors/byte0136.dat new file mode 100644 index 0000000..6e7cddc Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0136.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0137.dat b/node_modules/sha.js/test/prepare/vectors/byte0137.dat new file mode 100644 index 0000000..18dffd1 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0137.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0138.dat b/node_modules/sha.js/test/prepare/vectors/byte0138.dat new file mode 100644 index 0000000..4079790 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0138.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0139.dat b/node_modules/sha.js/test/prepare/vectors/byte0139.dat new file mode 100644 index 0000000..f076de6 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0139.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0140.dat b/node_modules/sha.js/test/prepare/vectors/byte0140.dat new file mode 100644 index 0000000..07b7bf9 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0140.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0141.dat b/node_modules/sha.js/test/prepare/vectors/byte0141.dat new file mode 100644 index 0000000..22a8ca0 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0141.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0142.dat b/node_modules/sha.js/test/prepare/vectors/byte0142.dat new file mode 100644 index 0000000..fa95449 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0142.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0143.dat b/node_modules/sha.js/test/prepare/vectors/byte0143.dat new file mode 100644 index 0000000..907d2de Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0143.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0144.dat b/node_modules/sha.js/test/prepare/vectors/byte0144.dat new file mode 100644 index 0000000..7f4c091 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0144.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0145.dat b/node_modules/sha.js/test/prepare/vectors/byte0145.dat new file mode 100644 index 0000000..b20f4ab Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0145.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0146.dat b/node_modules/sha.js/test/prepare/vectors/byte0146.dat new file mode 100644 index 0000000..c04174b Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0146.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0147.dat b/node_modules/sha.js/test/prepare/vectors/byte0147.dat new file mode 100644 index 0000000..3c84c27 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0147.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0148.dat b/node_modules/sha.js/test/prepare/vectors/byte0148.dat new file mode 100644 index 0000000..e11058c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0148.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0149.dat b/node_modules/sha.js/test/prepare/vectors/byte0149.dat new file mode 100644 index 0000000..a058ae2 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0149.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0150.dat b/node_modules/sha.js/test/prepare/vectors/byte0150.dat new file mode 100644 index 0000000..64c1231 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0150.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0151.dat b/node_modules/sha.js/test/prepare/vectors/byte0151.dat new file mode 100644 index 0000000..58b85ad Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0151.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0152.dat b/node_modules/sha.js/test/prepare/vectors/byte0152.dat new file mode 100644 index 0000000..4db244f Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0152.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0153.dat b/node_modules/sha.js/test/prepare/vectors/byte0153.dat new file mode 100644 index 0000000..bc27593 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0153.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0154.dat b/node_modules/sha.js/test/prepare/vectors/byte0154.dat new file mode 100644 index 0000000..be5e0a4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0154.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0155.dat b/node_modules/sha.js/test/prepare/vectors/byte0155.dat new file mode 100644 index 0000000..eaf5f96 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0155.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0156.dat b/node_modules/sha.js/test/prepare/vectors/byte0156.dat new file mode 100644 index 0000000..8ac675f Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0156.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0157.dat b/node_modules/sha.js/test/prepare/vectors/byte0157.dat new file mode 100644 index 0000000..c742c47 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0157.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0158.dat b/node_modules/sha.js/test/prepare/vectors/byte0158.dat new file mode 100644 index 0000000..2998df4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0158.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0159.dat b/node_modules/sha.js/test/prepare/vectors/byte0159.dat new file mode 100644 index 0000000..87d68a4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0159.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0160.dat b/node_modules/sha.js/test/prepare/vectors/byte0160.dat new file mode 100644 index 0000000..668a383 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0160.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0161.dat b/node_modules/sha.js/test/prepare/vectors/byte0161.dat new file mode 100644 index 0000000..22968e0 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0161.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0162.dat b/node_modules/sha.js/test/prepare/vectors/byte0162.dat new file mode 100644 index 0000000..cca9a81 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0162.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0163.dat b/node_modules/sha.js/test/prepare/vectors/byte0163.dat new file mode 100644 index 0000000..2f62067 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0163.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0164.dat b/node_modules/sha.js/test/prepare/vectors/byte0164.dat new file mode 100644 index 0000000..a7f5981 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0164.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0165.dat b/node_modules/sha.js/test/prepare/vectors/byte0165.dat new file mode 100644 index 0000000..eb72edb Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0165.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0166.dat b/node_modules/sha.js/test/prepare/vectors/byte0166.dat new file mode 100644 index 0000000..c6baf33 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0166.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0167.dat b/node_modules/sha.js/test/prepare/vectors/byte0167.dat new file mode 100644 index 0000000..b10d746 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0167.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0168.dat b/node_modules/sha.js/test/prepare/vectors/byte0168.dat new file mode 100644 index 0000000..1029311 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0168.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0169.dat b/node_modules/sha.js/test/prepare/vectors/byte0169.dat new file mode 100644 index 0000000..caf4906 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0169.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0170.dat b/node_modules/sha.js/test/prepare/vectors/byte0170.dat new file mode 100644 index 0000000..40beda0 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0170.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0171.dat b/node_modules/sha.js/test/prepare/vectors/byte0171.dat new file mode 100644 index 0000000..a3b0686 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0171.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0172.dat b/node_modules/sha.js/test/prepare/vectors/byte0172.dat new file mode 100644 index 0000000..d10b184 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0172.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0173.dat b/node_modules/sha.js/test/prepare/vectors/byte0173.dat new file mode 100644 index 0000000..b9d784e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0173.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0174.dat b/node_modules/sha.js/test/prepare/vectors/byte0174.dat new file mode 100644 index 0000000..0754a29 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0174.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0175.dat b/node_modules/sha.js/test/prepare/vectors/byte0175.dat new file mode 100644 index 0000000..5c37de2 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0175.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0176.dat b/node_modules/sha.js/test/prepare/vectors/byte0176.dat new file mode 100644 index 0000000..a189735 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0176.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0177.dat b/node_modules/sha.js/test/prepare/vectors/byte0177.dat new file mode 100644 index 0000000..bbb0311 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0177.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0178.dat b/node_modules/sha.js/test/prepare/vectors/byte0178.dat new file mode 100644 index 0000000..01ab4f4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0178.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0179.dat b/node_modules/sha.js/test/prepare/vectors/byte0179.dat new file mode 100644 index 0000000..baa00b6 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0179.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0180.dat b/node_modules/sha.js/test/prepare/vectors/byte0180.dat new file mode 100644 index 0000000..157703c Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0180.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0181.dat b/node_modules/sha.js/test/prepare/vectors/byte0181.dat new file mode 100644 index 0000000..2c08362 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0181.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0182.dat b/node_modules/sha.js/test/prepare/vectors/byte0182.dat new file mode 100644 index 0000000..935f574 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0182.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0183.dat b/node_modules/sha.js/test/prepare/vectors/byte0183.dat new file mode 100644 index 0000000..7f76afe Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0183.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0184.dat b/node_modules/sha.js/test/prepare/vectors/byte0184.dat new file mode 100644 index 0000000..2ccd7d4 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0184.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0185.dat b/node_modules/sha.js/test/prepare/vectors/byte0185.dat new file mode 100644 index 0000000..e6f9444 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0185.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0186.dat b/node_modules/sha.js/test/prepare/vectors/byte0186.dat new file mode 100644 index 0000000..f89a548 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0186.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0187.dat b/node_modules/sha.js/test/prepare/vectors/byte0187.dat new file mode 100644 index 0000000..8b2326d Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0187.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0188.dat b/node_modules/sha.js/test/prepare/vectors/byte0188.dat new file mode 100644 index 0000000..be3ef38 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0188.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0189.dat b/node_modules/sha.js/test/prepare/vectors/byte0189.dat new file mode 100644 index 0000000..01061ce Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0189.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0190.dat b/node_modules/sha.js/test/prepare/vectors/byte0190.dat new file mode 100644 index 0000000..5df1d2e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0190.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0191.dat b/node_modules/sha.js/test/prepare/vectors/byte0191.dat new file mode 100644 index 0000000..9b23e57 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0191.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0192.dat b/node_modules/sha.js/test/prepare/vectors/byte0192.dat new file mode 100644 index 0000000..3e1b730 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0192.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0193.dat b/node_modules/sha.js/test/prepare/vectors/byte0193.dat new file mode 100644 index 0000000..1c53876 Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0193.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0194.dat b/node_modules/sha.js/test/prepare/vectors/byte0194.dat new file mode 100644 index 0000000..12e1e1f Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0194.dat differ diff --git a/node_modules/sha.js/test/prepare/vectors/byte0195.dat b/node_modules/sha.js/test/prepare/vectors/byte0195.dat new file mode 100644 index 0000000..b858c0e Binary files /dev/null and b/node_modules/sha.js/test/prepare/vectors/byte0195.dat differ diff --git a/node_modules/sha.js/test/test.js b/node_modules/sha.js/test/test.js new file mode 100644 index 0000000..bc2156f --- /dev/null +++ b/node_modules/sha.js/test/test.js @@ -0,0 +1,99 @@ + +var crypto = require('crypto') +var tape = require('tape') +var Sha1 = require('../').sha1 +var Uint32toHex = Sha1.Uint32toHex + +function generateCount (m) { + var s = '' + for(var i = 0; i < m/8; i++) { + console.log('GENERATE', i, Uint32toHex(i)) + s+=i + } + return s +} + +var inputs = [ + ['', 'ascii'], + ['abc', 'ascii'], + ['123', 'ascii'], + ['123456789abcdef123456789abcdef123456789abcdef123456789abcdef', 'ascii'], + ['123456789abcdef123456789abcdef123456789abcdef123456789abc', 'ascii'], + ['123456789abcdef123456789abcdef123456789abcdef123456789ab', 'ascii'], + ['0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcde', 'ascii'], + ['0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', 'ascii'], + ['foobarbaz', 'ascii'] +] + +tape("hash is the same as node's crypto", function (t) { + + inputs.forEach(function (v) { + var a = new Sha1().update(v[0], v[1]).digest('hex') + var e = crypto.createHash('sha1').update(v[0], v[1]).digest('hex') + console.log(a, '==', e) + t.equal(a, e) + }) + + t.end() + +}) + +tape('call update multiple times', function (t) { + var n = 1 + inputs.forEach(function (v) { + var hash = new Sha1() + var _hash = crypto.createHash('sha1') + for(var i = 0; i < v[0].length; i=(i+1)*2) { + var s = v[0].substring(i, (i+1)*2) + hash.update(s, v[1]) + _hash.update(s, v[1]) + } + var a = hash.digest('hex') + var e = _hash.digest('hex') + console.log(a, '==', e) + t.equal(a, e) + }) + t.end() +}) + + +tape('call update twice', function (t) { + + var _hash = crypto.createHash('sha1') + var hash = new Sha1() + + _hash.update('foo', 'ascii') + hash.update('foo', 'ascii') + + _hash.update('bar', 'ascii') + hash.update('bar', 'ascii') + + _hash.update('baz', 'ascii') + hash.update('baz', 'ascii') + + var a = hash.digest('hex') + var e = _hash.digest('hex') + + t.equal(a, e) + t.end() +}) + + +tape('hex encoding', function (t) { + var n = 1 + inputs.forEach(function (v) { + var hash = new Sha1() + var _hash = crypto.createHash('sha1') + for(var i = 0; i < v[0].length; i=(i+1)*2) { + var s = v[0].substring(i, (i+1)*2) + hash.update(new Buffer(s, 'ascii').toString('hex'), 'hex') + _hash.update(new Buffer(s, 'ascii').toString('hex'), 'hex') + } + var a = hash.digest('hex') + var e = _hash.digest('hex') + console.log(a, '==', e) + t.equal(a, e) + }) + t.end() +}) + diff --git a/node_modules/sha.js/test/vectors.js b/node_modules/sha.js/test/vectors.js new file mode 100644 index 0000000..34d17f0 --- /dev/null +++ b/node_modules/sha.js/test/vectors.js @@ -0,0 +1,74 @@ + +var vectors = require('./nist-vectors.json') +var tape = require('tape') +//var from = require('bops/typedarray/from') +var Buffer = require('buffer').Buffer +var hexpp = require('../hexpp') + +var createHash = require('../') + +function makeTest(alg, i, verbose) { + var v = vectors[i] + + tape(alg + ': NIST vector ' + i, function (t) { + if(verbose) { + console.log(v) + console.log('VECTOR', i) + console.log('INPUT', v.input) + console.log(hexpp(new Buffer(v.input, 'base64'))) + console.log(new Buffer(v.input, 'base64').toString('hex')) + } + var buf = new Buffer(v.input, 'base64') + t.equal(createHash(alg).update(buf).digest('hex'), v[alg]) + + i = ~~(buf.length / 2) + var buf1 = buf.slice(0, i) + var buf2 = buf.slice(i, buf.length) + + console.log(buf1.length, buf2.length, buf.length) + console.log(createHash(alg)._block.length) + + t.equal( + createHash(alg) + .update(buf1) + .update(buf2) + .digest('hex'), + v[alg] + ) + + var j, buf3 + + i = ~~(buf.length / 3) + j = ~~(buf.length * 2 / 3) + buf1 = buf.slice(0, i) + buf2 = buf.slice(i, j) + buf3 = buf.slice(j, buf.length) + + t.equal( + createHash(alg) + .update(buf1) + .update(buf2) + .update(buf3) + .digest('hex'), + v[alg] + ) + + setTimeout(function () { + //avoid "too much recursion" errors in tape in firefox + t.end() + }) + }) + +} + +if(process.argv[2]) + makeTest(process.argv[2], parseInt(process.argv[3]), true) +else + vectors.forEach(function (v, i) { + makeTest('sha1', i) + makeTest('sha256', i) + makeTest('sha512', i) + }) + + + diff --git a/node_modules/signal-exit/CHANGELOG.md b/node_modules/signal-exit/CHANGELOG.md new file mode 100644 index 0000000..e2f70d2 --- /dev/null +++ b/node_modules/signal-exit/CHANGELOG.md @@ -0,0 +1,27 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [3.0.1](https://github.com/tapjs/signal-exit/compare/v3.0.0...v3.0.1) (2016-09-08) + + +### Bug Fixes + +* do not listen on SIGBUS, SIGFPE, SIGSEGV and SIGILL ([#40](https://github.com/tapjs/signal-exit/issues/40)) ([5b105fb](https://github.com/tapjs/signal-exit/commit/5b105fb)) + + + + +# [3.0.0](https://github.com/tapjs/signal-exit/compare/v2.1.2...v3.0.0) (2016-06-13) + + +### Bug Fixes + +* get our test suite running on Windows ([#23](https://github.com/tapjs/signal-exit/issues/23)) ([6f3eda8](https://github.com/tapjs/signal-exit/commit/6f3eda8)) +* hooking SIGPROF was interfering with profilers see [#21](https://github.com/tapjs/signal-exit/issues/21) ([#24](https://github.com/tapjs/signal-exit/issues/24)) ([1248a4c](https://github.com/tapjs/signal-exit/commit/1248a4c)) + + +### BREAKING CHANGES + +* signal-exit no longer wires into SIGPROF diff --git a/node_modules/signal-exit/LICENSE.txt b/node_modules/signal-exit/LICENSE.txt new file mode 100644 index 0000000..eead04a --- /dev/null +++ b/node_modules/signal-exit/LICENSE.txt @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/signal-exit/README.md b/node_modules/signal-exit/README.md new file mode 100644 index 0000000..8ebccab --- /dev/null +++ b/node_modules/signal-exit/README.md @@ -0,0 +1,40 @@ +# signal-exit + +[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) +[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) +[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) +[![Windows Tests](https://img.shields.io/appveyor/ci/bcoe/signal-exit/master.svg?label=Windows%20Tests)](https://ci.appveyor.com/project/bcoe/signal-exit) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +When you want to fire an event no matter how a process exits: + +* reaching the end of execution. +* explicitly having `process.exit(code)` called. +* having `process.kill(pid, sig)` called. +* receiving a fatal signal from outside the process + +Use `signal-exit`. + +```js +var onExit = require('signal-exit') + +onExit(function (code, signal) { + console.log('process exited!') +}) +``` + +## API + +`var remove = onExit(function (code, signal) {}, options)` + +The return value of the function is a function that will remove the +handler. + +Note that the function *only* fires for signals if the signal would +cause the proces to exit. That is, there are no other listeners, and +it is a fatal signal. + +## Options + +* `alwaysLast`: Run this handler after any other signal or exit + handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/signal-exit/index.js b/node_modules/signal-exit/index.js new file mode 100644 index 0000000..7dd8d91 --- /dev/null +++ b/node_modules/signal-exit/index.js @@ -0,0 +1,148 @@ +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +var assert = require('assert') +var signals = require('./signals.js') + +var EE = require('events') +/* istanbul ignore if */ +if (typeof EE !== 'function') { + EE = EE.EventEmitter +} + +var emitter +if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ +} else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} +} + +module.exports = function (cb, opts) { + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() + } + } + emitter.on(ev, cb) + + return remove +} + +module.exports.unload = unload +function unload () { + if (!loaded) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 +} + +function emit (event, code, signal) { + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) +} + +// { : , ... } +var sigListeners = {} +signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + process.kill(process.pid, sig) + } + } +}) + +module.exports.signals = function () { + return signals +} + +module.exports.load = load + +var loaded = false + +function load () { + if (loaded) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit +} + +var originalProcessReallyExit = process.reallyExit +function processReallyExit (code) { + process.exitCode = code || 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) +} + +var originalProcessEmit = process.emit +function processEmit (ev, arg) { + if (ev === 'exit') { + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } +} diff --git a/node_modules/signal-exit/package.json b/node_modules/signal-exit/package.json new file mode 100644 index 0000000..8429fac --- /dev/null +++ b/node_modules/signal-exit/package.json @@ -0,0 +1,38 @@ +{ + "name": "signal-exit", + "version": "3.0.1", + "description": "when you want to fire an event no matter how a process exits.", + "main": "index.js", + "scripts": { + "pretest": "standard", + "test": "tap --timeout=240 ./test/*.js --cov", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "release": "standard-version" + }, + "files": [ + "index.js", + "signals.js" + ], + "repository": { + "type": "git", + "url": "https://github.com/tapjs/signal-exit.git" + }, + "keywords": [ + "signal", + "exit" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/tapjs/signal-exit/issues" + }, + "homepage": "https://github.com/tapjs/signal-exit", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^2.11.10", + "nyc": "^8.1.0", + "standard": "^7.1.2", + "standard-version": "^2.3.0", + "tap": "^7.1.0" + } +} diff --git a/node_modules/signal-exit/signals.js b/node_modules/signal-exit/signals.js new file mode 100644 index 0000000..3bd67a8 --- /dev/null +++ b/node_modules/signal-exit/signals.js @@ -0,0 +1,53 @@ +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} diff --git a/node_modules/slash/index.js b/node_modules/slash/index.js new file mode 100644 index 0000000..b946a08 --- /dev/null +++ b/node_modules/slash/index.js @@ -0,0 +1,11 @@ +'use strict'; +module.exports = function (str) { + var isExtendedLengthPath = /^\\\\\?\\/.test(str); + var hasNonAscii = /[^\x00-\x80]+/.test(str); + + if (isExtendedLengthPath || hasNonAscii) { + return str; + } + + return str.replace(/\\/g, '/'); +}; diff --git a/node_modules/slash/package.json b/node_modules/slash/package.json new file mode 100644 index 0000000..7e744a4 --- /dev/null +++ b/node_modules/slash/package.json @@ -0,0 +1,33 @@ +{ + "name": "slash", + "version": "1.0.0", + "description": "Convert Windows backslash paths to slash paths", + "keywords": [ + "path", + "seperator", + "sep", + "slash", + "backslash", + "windows", + "win" + ], + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "http://sindresorhus.com" + }, + "repository": "sindresorhus/slash", + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "mocha": "*" + }, + "engines": { + "node": ">=0.10.0" + }, + "license": "MIT", + "files": [ + "index.js" + ] +} diff --git a/node_modules/slash/readme.md b/node_modules/slash/readme.md new file mode 100644 index 0000000..15672f0 --- /dev/null +++ b/node_modules/slash/readme.md @@ -0,0 +1,44 @@ +# slash [![Build Status](https://travis-ci.org/sindresorhus/slash.svg?branch=master)](https://travis-ci.org/sindresorhus/slash) + +> Convert Windows backslash paths to slash paths: `foo\\bar` ➔ `foo/bar` + +[Forward-slash paths can be used in Windows](http://superuser.com/a/176395/6877) as long as they're not extended-length paths and don't contain any non-ascii characters. + +This was created since the `path` methods in Node outputs `\\` paths on Windows. + + +## Install + +```sh +$ npm install --save slash +``` + + +## Usage + +```js +var path = require('path'); +var slash = require('slash'); + +var str = path.join('foo', 'bar'); +// Unix => foo/bar +// Windows => foo\\bar + +slash(str); +// Unix => foo/bar +// Windows => foo/bar +``` + + +## API + +### slash(path) + +Type: `string` + +Accepts a Windows backslash path and returns a slash path. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/sntp/.npmignore b/node_modules/sntp/.npmignore new file mode 100644 index 0000000..77ba16c --- /dev/null +++ b/node_modules/sntp/.npmignore @@ -0,0 +1,18 @@ +.idea +*.iml +npm-debug.log +dump.rdb +node_modules +results.tap +results.xml +npm-shrinkwrap.json +config.json +.DS_Store +*/.DS_Store +*/*/.DS_Store +._* +*/._* +*/*/._* +coverage.* +lib-cov + diff --git a/node_modules/sntp/.travis.yml b/node_modules/sntp/.travis.yml new file mode 100755 index 0000000..047f7e3 --- /dev/null +++ b/node_modules/sntp/.travis.yml @@ -0,0 +1,5 @@ +language: node_js + +node_js: + - 0.10 + diff --git a/node_modules/sntp/LICENSE b/node_modules/sntp/LICENSE new file mode 100755 index 0000000..b0d8774 --- /dev/null +++ b/node_modules/sntp/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012-2014, Eran Hammer and other contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * The names of any contributors may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + * * * + +The complete list of contributors can be found at: https://github.com/hueniverse/sntp/graphs/contributors diff --git a/node_modules/sntp/Makefile b/node_modules/sntp/Makefile new file mode 100755 index 0000000..417fd93 --- /dev/null +++ b/node_modules/sntp/Makefile @@ -0,0 +1,9 @@ +test: + @node node_modules/lab/bin/lab +test-cov: + @node node_modules/lab/bin/lab -t 100 -m 3000 +test-cov-html: + @node node_modules/lab/bin/lab -r html -o coverage.html + +.PHONY: test test-cov test-cov-html + diff --git a/node_modules/sntp/README.md b/node_modules/sntp/README.md new file mode 100755 index 0000000..98a6e02 --- /dev/null +++ b/node_modules/sntp/README.md @@ -0,0 +1,68 @@ +# sntp + +An SNTP v4 client (RFC4330) for node. Simpy connects to the NTP or SNTP server requested and returns the server time +along with the roundtrip duration and clock offset. To adjust the local time to the NTP time, add the returned `t` offset +to the local time. + +[![Build Status](https://secure.travis-ci.org/hueniverse/sntp.png)](http://travis-ci.org/hueniverse/sntp) + +# Usage + +```javascript +var Sntp = require('sntp'); + +// All options are optional + +var options = { + host: 'nist1-sj.ustiming.org', // Defaults to pool.ntp.org + port: 123, // Defaults to 123 (NTP) + resolveReference: true, // Default to false (not resolving) + timeout: 1000 // Defaults to zero (no timeout) +}; + +// Request server time + +Sntp.time(options, function (err, time) { + + if (err) { + console.log('Failed: ' + err.message); + process.exit(1); + } + + console.log('Local clock is off by: ' + time.t + ' milliseconds'); + process.exit(0); +}); +``` + +If an application needs to maintain continuous time synchronization, the module provides a stateful method for +querying the current offset only when the last one is too old (defaults to daily). + +```javascript +// Request offset once + +Sntp.offset(function (err, offset) { + + console.log(offset); // New (served fresh) + + // Request offset again + + Sntp.offset(function (err, offset) { + + console.log(offset); // Identical (served from cache) + }); +}); +``` + +To set a background offset refresh, start the interval and use the provided now() method. If for any reason the +client fails to obtain an up-to-date offset, the current system clock is used. + +```javascript +var before = Sntp.now(); // System time without offset + +Sntp.start(function () { + + var now = Sntp.now(); // With offset + Sntp.stop(); +}); +``` + diff --git a/node_modules/sntp/examples/offset.js b/node_modules/sntp/examples/offset.js new file mode 100755 index 0000000..0303f6d --- /dev/null +++ b/node_modules/sntp/examples/offset.js @@ -0,0 +1,16 @@ +var Sntp = require('../lib'); + +// Request offset once + +Sntp.offset(function (err, offset) { + + console.log(offset); // New (served fresh) + + // Request offset again + + Sntp.offset(function (err, offset) { + + console.log(offset); // Identical (served from cache) + }); +}); + diff --git a/node_modules/sntp/examples/time.js b/node_modules/sntp/examples/time.js new file mode 100755 index 0000000..bd70d0e --- /dev/null +++ b/node_modules/sntp/examples/time.js @@ -0,0 +1,25 @@ +var Sntp = require('../lib'); + +// All options are optional + +var options = { + host: 'nist1-sj.ustiming.org', // Defaults to pool.ntp.org + port: 123, // Defaults to 123 (NTP) + resolveReference: true, // Default to false (not resolving) + timeout: 1000 // Defaults to zero (no timeout) +}; + +// Request server time + +Sntp.time(options, function (err, time) { + + if (err) { + console.log('Failed: ' + err.message); + process.exit(1); + } + + console.log(time); + console.log('Local clock is off by: ' + time.t + ' milliseconds'); + process.exit(0); +}); + diff --git a/node_modules/sntp/index.js b/node_modules/sntp/index.js new file mode 100755 index 0000000..4cc88b3 --- /dev/null +++ b/node_modules/sntp/index.js @@ -0,0 +1 @@ +module.exports = require('./lib'); \ No newline at end of file diff --git a/node_modules/sntp/lib/index.js b/node_modules/sntp/lib/index.js new file mode 100755 index 0000000..e91718b --- /dev/null +++ b/node_modules/sntp/lib/index.js @@ -0,0 +1,412 @@ +// Load modules + +var Dgram = require('dgram'); +var Dns = require('dns'); +var Hoek = require('hoek'); + + +// Declare internals + +var internals = {}; + + +exports.time = function (options, callback) { + + if (arguments.length !== 2) { + callback = arguments[0]; + options = {}; + } + + var settings = Hoek.clone(options); + settings.host = settings.host || 'pool.ntp.org'; + settings.port = settings.port || 123; + settings.resolveReference = settings.resolveReference || false; + + // Declare variables used by callback + + var timeoutId = 0; + var sent = 0; + + // Ensure callback is only called once + + var finish = function (err, result) { + + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = 0; + } + + socket.removeAllListeners(); + socket.once('error', internals.ignore); + socket.close(); + return callback(err, result); + }; + + finish = Hoek.once(finish); + + // Create UDP socket + + var socket = Dgram.createSocket('udp4'); + + socket.once('error', function (err) { + + return finish(err); + }); + + // Listen to incoming messages + + socket.on('message', function (buffer, rinfo) { + + var received = Date.now(); + + var message = new internals.NtpMessage(buffer); + if (!message.isValid) { + return finish(new Error('Invalid server response'), message); + } + + if (message.originateTimestamp !== sent) { + return finish(new Error('Wrong originate timestamp'), message); + } + + // Timestamp Name ID When Generated + // ------------------------------------------------------------ + // Originate Timestamp T1 time request sent by client + // Receive Timestamp T2 time request received by server + // Transmit Timestamp T3 time reply sent by server + // Destination Timestamp T4 time reply received by client + // + // The roundtrip delay d and system clock offset t are defined as: + // + // d = (T4 - T1) - (T3 - T2) t = ((T2 - T1) + (T3 - T4)) / 2 + + var T1 = message.originateTimestamp; + var T2 = message.receiveTimestamp; + var T3 = message.transmitTimestamp; + var T4 = received; + + message.d = (T4 - T1) - (T3 - T2); + message.t = ((T2 - T1) + (T3 - T4)) / 2; + message.receivedLocally = received; + + if (!settings.resolveReference || + message.stratum !== 'secondary') { + + return finish(null, message); + } + + // Resolve reference IP address + + Dns.reverse(message.referenceId, function (err, domains) { + + if (/* $lab:coverage:off$ */ !err /* $lab:coverage:on$ */) { + message.referenceHost = domains[0]; + } + + return finish(null, message); + }); + }); + + // Set timeout + + if (settings.timeout) { + timeoutId = setTimeout(function () { + + timeoutId = 0; + return finish(new Error('Timeout')); + }, settings.timeout); + } + + // Construct NTP message + + var message = new Buffer(48); + for (var i = 0; i < 48; i++) { // Zero message + message[i] = 0; + } + + message[0] = (0 << 6) + (4 << 3) + (3 << 0) // Set version number to 4 and Mode to 3 (client) + sent = Date.now(); + internals.fromMsecs(sent, message, 40); // Set transmit timestamp (returns as originate) + + // Send NTP request + + socket.send(message, 0, message.length, settings.port, settings.host, function (err, bytes) { + + if (err || + bytes !== 48) { + + return finish(err || new Error('Could not send entire message')); + } + }); +}; + + +internals.NtpMessage = function (buffer) { + + this.isValid = false; + + // Validate + + if (buffer.length !== 48) { + return; + } + + // Leap indicator + + var li = (buffer[0] >> 6); + switch (li) { + case 0: this.leapIndicator = 'no-warning'; break; + case 1: this.leapIndicator = 'last-minute-61'; break; + case 2: this.leapIndicator = 'last-minute-59'; break; + case 3: this.leapIndicator = 'alarm'; break; + } + + // Version + + var vn = ((buffer[0] & 0x38) >> 3); + this.version = vn; + + // Mode + + var mode = (buffer[0] & 0x7); + switch (mode) { + case 1: this.mode = 'symmetric-active'; break; + case 2: this.mode = 'symmetric-passive'; break; + case 3: this.mode = 'client'; break; + case 4: this.mode = 'server'; break; + case 5: this.mode = 'broadcast'; break; + case 0: + case 6: + case 7: this.mode = 'reserved'; break; + } + + // Stratum + + var stratum = buffer[1]; + if (stratum === 0) { + this.stratum = 'death'; + } + else if (stratum === 1) { + this.stratum = 'primary'; + } + else if (stratum <= 15) { + this.stratum = 'secondary'; + } + else { + this.stratum = 'reserved'; + } + + // Poll interval (msec) + + this.pollInterval = Math.round(Math.pow(2, buffer[2])) * 1000; + + // Precision (msecs) + + this.precision = Math.pow(2, buffer[3]) * 1000; + + // Root delay (msecs) + + var rootDelay = 256 * (256 * (256 * buffer[4] + buffer[5]) + buffer[6]) + buffer[7]; + this.rootDelay = 1000 * (rootDelay / 0x10000); + + // Root dispersion (msecs) + + this.rootDispersion = ((buffer[8] << 8) + buffer[9] + ((buffer[10] << 8) + buffer[11]) / Math.pow(2, 16)) * 1000; + + // Reference identifier + + this.referenceId = ''; + switch (this.stratum) { + case 'death': + case 'primary': + this.referenceId = String.fromCharCode(buffer[12]) + String.fromCharCode(buffer[13]) + String.fromCharCode(buffer[14]) + String.fromCharCode(buffer[15]); + break; + case 'secondary': + this.referenceId = '' + buffer[12] + '.' + buffer[13] + '.' + buffer[14] + '.' + buffer[15]; + break; + } + + // Reference timestamp + + this.referenceTimestamp = internals.toMsecs(buffer, 16); + + // Originate timestamp + + this.originateTimestamp = internals.toMsecs(buffer, 24); + + // Receive timestamp + + this.receiveTimestamp = internals.toMsecs(buffer, 32); + + // Transmit timestamp + + this.transmitTimestamp = internals.toMsecs(buffer, 40); + + // Validate + + if (this.version === 4 && + this.stratum !== 'reserved' && + this.mode === 'server' && + this.originateTimestamp && + this.receiveTimestamp && + this.transmitTimestamp) { + + this.isValid = true; + } + + return this; +}; + + +internals.toMsecs = function (buffer, offset) { + + var seconds = 0; + var fraction = 0; + + for (var i = 0; i < 4; ++i) { + seconds = (seconds * 256) + buffer[offset + i]; + } + + for (i = 4; i < 8; ++i) { + fraction = (fraction * 256) + buffer[offset + i]; + } + + return ((seconds - 2208988800 + (fraction / Math.pow(2, 32))) * 1000); +}; + + +internals.fromMsecs = function (ts, buffer, offset) { + + var seconds = Math.floor(ts / 1000) + 2208988800; + var fraction = Math.round((ts % 1000) / 1000 * Math.pow(2, 32)); + + buffer[offset + 0] = (seconds & 0xFF000000) >> 24; + buffer[offset + 1] = (seconds & 0x00FF0000) >> 16; + buffer[offset + 2] = (seconds & 0x0000FF00) >> 8; + buffer[offset + 3] = (seconds & 0x000000FF); + + buffer[offset + 4] = (fraction & 0xFF000000) >> 24; + buffer[offset + 5] = (fraction & 0x00FF0000) >> 16; + buffer[offset + 6] = (fraction & 0x0000FF00) >> 8; + buffer[offset + 7] = (fraction & 0x000000FF); +}; + + +// Offset singleton + +internals.last = { + offset: 0, + expires: 0, + host: '', + port: 0 +}; + + +exports.offset = function (options, callback) { + + if (arguments.length !== 2) { + callback = arguments[0]; + options = {}; + } + + var now = Date.now(); + var clockSyncRefresh = options.clockSyncRefresh || 24 * 60 * 60 * 1000; // Daily + + if (internals.last.offset && + internals.last.host === options.host && + internals.last.port === options.port && + now < internals.last.expires) { + + process.nextTick(function () { + + callback(null, internals.last.offset); + }); + + return; + } + + exports.time(options, function (err, time) { + + if (err) { + return callback(err, 0); + } + + internals.last = { + offset: Math.round(time.t), + expires: now + clockSyncRefresh, + host: options.host, + port: options.port + }; + + return callback(null, internals.last.offset); + }); +}; + + +// Now singleton + +internals.now = { + intervalId: 0 +}; + + +exports.start = function (options, callback) { + + if (arguments.length !== 2) { + callback = arguments[0]; + options = {}; + } + + if (internals.now.intervalId) { + process.nextTick(function () { + + callback(); + }); + + return; + } + + exports.offset(options, function (err, offset) { + + internals.now.intervalId = setInterval(function () { + + exports.offset(options, function () { }); + }, options.clockSyncRefresh || 24 * 60 * 60 * 1000); // Daily + + return callback(); + }); +}; + + +exports.stop = function () { + + if (!internals.now.intervalId) { + return; + } + + clearInterval(internals.now.intervalId); + internals.now.intervalId = 0; +}; + + +exports.isLive = function () { + + return !!internals.now.intervalId; +}; + + +exports.now = function () { + + var now = Date.now(); + if (!exports.isLive() || + now >= internals.last.expires) { + + return now; + } + + return now + internals.last.offset; +}; + + +internals.ignore = function () { + +}; diff --git a/node_modules/sntp/package.json b/node_modules/sntp/package.json new file mode 100755 index 0000000..0d57e3f --- /dev/null +++ b/node_modules/sntp/package.json @@ -0,0 +1,32 @@ +{ + "name": "sntp", + "description": "SNTP Client", + "version": "1.0.9", + "author": "Eran Hammer (http://hueniverse.com)", + "contributors": [], + "repository": "git://github.com/hueniverse/sntp", + "main": "index", + "keywords": [ + "sntp", + "ntp", + "time" + ], + "engines": { + "node": ">=0.8.0" + }, + "dependencies": { + "hoek": "2.x.x" + }, + "devDependencies": { + "lab": "4.x.x" + }, + "scripts": { + "test": "make test-cov" + }, + "licenses": [ + { + "type": "BSD", + "url": "http://github.com/hueniverse/sntp/raw/master/LICENSE" + } + ] +} diff --git a/node_modules/sntp/test/index.js b/node_modules/sntp/test/index.js new file mode 100755 index 0000000..f1d1cda --- /dev/null +++ b/node_modules/sntp/test/index.js @@ -0,0 +1,435 @@ +// Load modules + +var Dns = require('dns'); +var Dgram = require('dgram'); +var Lab = require('lab'); +var Sntp = require('../lib'); + + +// Declare internals + +var internals = {}; + + +// Test shortcuts + +var lab = exports.lab = Lab.script(); +var before = lab.before; +var after = lab.after; +var describe = lab.experiment; +var it = lab.test; +var expect = Lab.expect; + + +describe('SNTP', function () { + + describe('#time', function () { + + it('returns consistent result over multiple tries', function (done) { + + Sntp.time(function (err, time) { + + expect(err).to.not.exist; + expect(time).to.exist; + var t1 = time.t; + + Sntp.time(function (err, time) { + + expect(err).to.not.exist; + expect(time).to.exist; + var t2 = time.t; + expect(Math.abs(t1 - t2)).is.below(200); + done(); + }); + }); + }); + + it('resolves reference IP', function (done) { + + Sntp.time({ host: 'ntp.exnet.com', resolveReference: true }, function (err, time) { + + expect(err).to.not.exist; + expect(time).to.exist; + expect(time.referenceHost).to.exist; + done(); + }); + }); + + it('times out on no response', function (done) { + + Sntp.time({ port: 124, timeout: 100 }, function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.equal('Timeout'); + done(); + }); + }); + + it('errors on error event', { parallel: false }, function (done) { + + var orig = Dgram.createSocket; + Dgram.createSocket = function (type) { + + Dgram.createSocket = orig; + var socket = Dgram.createSocket(type); + setImmediate(function () { socket.emit('error', new Error('Fake')) }); + return socket; + }; + + Sntp.time(function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.equal('Fake'); + done(); + }); + }); + + it('errors on incorrect sent size', { parallel: false }, function (done) { + + var orig = Dgram.Socket.prototype.send; + Dgram.Socket.prototype.send = function (buf, offset, length, port, address, callback) { + + Dgram.Socket.prototype.send = orig; + return callback(null, 40); + }; + + Sntp.time(function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.equal('Could not send entire message'); + done(); + }); + }); + + it('times out on invalid host', function (done) { + + Sntp.time({ host: 'error', timeout: 10000 }, function (err, time) { + + expect(err).to.exist; + expect(time).to.not.exist; + expect(err.message).to.contain('getaddrinfo'); + done(); + }); + }); + + it('fails on bad response buffer size', function (done) { + + var server = Dgram.createSocket('udp4'); + server.on('message', function (message, remote) { + var message = new Buffer(10); + server.send(message, 0, message.length, remote.port, remote.address, function (err, bytes) { + + server.close(); + }); + }); + + server.bind(49123); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + var messup = function (bytes) { + + var server = Dgram.createSocket('udp4'); + server.on('message', function (message, remote) { + + var message = new Buffer([ + 0x24, 0x01, 0x00, 0xe3, + 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + 0x41, 0x43, 0x54, 0x53, + 0xd4, 0xa8, 0x2d, 0xc7, + 0x1c, 0x5d, 0x49, 0x1b, + 0xd4, 0xa8, 0x2d, 0xe6, + 0x67, 0xef, 0x9d, 0xb2, + 0xd4, 0xa8, 0x2d, 0xe6, + 0x71, 0xed, 0xb5, 0xfb, + 0xd4, 0xa8, 0x2d, 0xe6, + 0x71, 0xee, 0x6c, 0xc5 + ]); + + for (var i = 0, il = bytes.length; i < il; ++i) { + message[bytes[i][0]] = bytes[i][1]; + } + + server.send(message, 0, message.length, remote.port, remote.address, function (err, bytes) { + + server.close(); + }); + }); + + server.bind(49123); + }; + + it('fails on bad version', function (done) { + + messup([[0, (0 << 6) + (3 << 3) + (4 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.version).to.equal(3); + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + it('fails on bad originateTimestamp', function (done) { + + messup([[24, 0x83], [25, 0xaa], [26, 0x7e], [27, 0x80], [28, 0], [29, 0], [30, 0], [31, 0]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + it('fails on bad receiveTimestamp', function (done) { + + messup([[32, 0x83], [33, 0xaa], [34, 0x7e], [35, 0x80], [36, 0], [37, 0], [38, 0], [39, 0]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Invalid server response'); + done(); + }); + }); + + it('fails on bad originate timestamp and alarm li', function (done) { + + messup([[0, (3 << 6) + (4 << 3) + (4 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(err.message).to.equal('Wrong originate timestamp'); + expect(time.leapIndicator).to.equal('alarm'); + done(); + }); + }); + + it('returns time with death stratum and last61 li', function (done) { + + messup([[0, (1 << 6) + (4 << 3) + (4 << 0)], [1, 0]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(time.stratum).to.equal('death'); + expect(time.leapIndicator).to.equal('last-minute-61'); + done(); + }); + }); + + it('returns time with reserved stratum and last59 li', function (done) { + + messup([[0, (2 << 6) + (4 << 3) + (4 << 0)], [1, 0x1f]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(time.stratum).to.equal('reserved'); + expect(time.leapIndicator).to.equal('last-minute-59'); + done(); + }); + }); + + it('fails on bad mode (symmetric-active)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (1 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('symmetric-active'); + done(); + }); + }); + + it('fails on bad mode (symmetric-passive)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (2 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('symmetric-passive'); + done(); + }); + }); + + it('fails on bad mode (client)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (3 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('client'); + done(); + }); + }); + + it('fails on bad mode (broadcast)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (5 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('broadcast'); + done(); + }); + }); + + it('fails on bad mode (reserved)', function (done) { + + messup([[0, (0 << 6) + (4 << 3) + (6 << 0)]]); + + Sntp.time({ host: 'localhost', port: 49123 }, function (err, time) { + + expect(err).to.exist; + expect(time.mode).to.equal('reserved'); + done(); + }); + }); + }); + + describe('#offset', function () { + + it('gets the current offset', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + done(); + }); + }); + + it('gets the current offset from cache', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + var offset1 = offset; + Sntp.offset({}, function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.equal(offset1); + done(); + }); + }); + }); + + it('gets the new offset on different server', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + var offset1 = offset; + Sntp.offset({ host: 'nist1-sj.ustiming.org' }, function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(offset1); + done(); + }); + }); + }); + + it('gets the new offset on different server', function (done) { + + Sntp.offset(function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(0); + var offset1 = offset; + Sntp.offset({ port: 123 }, function (err, offset) { + + expect(err).to.not.exist; + expect(offset).to.not.equal(offset1); + done(); + }); + }); + }); + + it('fails getting the current offset on invalid server', function (done) { + + Sntp.offset({ host: 'error' }, function (err, offset) { + + expect(err).to.exist; + expect(offset).to.equal(0); + done(); + }); + }); + }); + + describe('#now', function () { + + it('starts auto-sync, gets now, then stops', function (done) { + + Sntp.stop(); + + var before = Sntp.now(); + expect(before).to.equal(Date.now()); + + Sntp.start(function () { + + var now = Sntp.now(); + expect(now).to.not.equal(Date.now()); + Sntp.stop(); + + done(); + }); + }); + + it('starts twice', function (done) { + + Sntp.start(function () { + + Sntp.start(function () { + + var now = Sntp.now(); + expect(now).to.not.equal(Date.now()); + Sntp.stop(); + + done(); + }); + }); + }); + + it('starts auto-sync, gets now, waits, gets again after timeout', function (done) { + + Sntp.stop(); + + var before = Sntp.now(); + expect(before).to.equal(Date.now()); + + Sntp.start({ clockSyncRefresh: 100 }, function () { + + var now = Sntp.now(); + expect(now).to.not.equal(Date.now()); + expect(now).to.equal(Sntp.now()); + + setTimeout(function () { + + expect(Sntp.now()).to.not.equal(now); + Sntp.stop(); + done(); + }, 110); + }); + }); + }); +}); + diff --git a/node_modules/source-list-map/.editorconfig b/node_modules/source-list-map/.editorconfig new file mode 100644 index 0000000..5be457d --- /dev/null +++ b/node_modules/source-list-map/.editorconfig @@ -0,0 +1,4 @@ +root = true + +[*.js] +indent_style=tab diff --git a/node_modules/source-list-map/.npmignore b/node_modules/source-list-map/.npmignore new file mode 100644 index 0000000..fc31e21 --- /dev/null +++ b/node_modules/source-list-map/.npmignore @@ -0,0 +1,3 @@ +/node_modules +/coverage +.DS_Store diff --git a/node_modules/source-list-map/.travis.yml b/node_modules/source-list-map/.travis.yml new file mode 100644 index 0000000..448420a --- /dev/null +++ b/node_modules/source-list-map/.travis.yml @@ -0,0 +1,5 @@ +sudo: false +language: node_js +node_js: + - "0.10" + - "0.12" diff --git a/node_modules/source-list-map/README.md b/node_modules/source-list-map/README.md new file mode 100644 index 0000000..b5c87bf --- /dev/null +++ b/node_modules/source-list-map/README.md @@ -0,0 +1,89 @@ +# source-list-map + +## API + +### Example + +``` js +var SourceListMap = require("source-list-map").SourceListMap; + +// Create a new map +var map = new SourceListMap(); + +// Add generated code that is map line to line to some soure +map.add("Generated\ncode1\n", "source-code.js", "Orginal\nsource"); + +// Add generated code that isn't mapped +map.add("Generated\ncode2\n"); + +// Get SourceMap and generated source +map.toStringWithSourceMap({ file: "generated-code.js" }); +// { +// source: 'Generated\ncode1\nGenerated\ncode2\n', +// map: { +// version: 3, +// file: 'generated-code.js', +// sources: [ 'source-code.js' ], +// sourcesContent: [ 'Orginal\nsource' ], +// mappings: 'AAAA;AACA;;;' +// } +// } + +// Convert existing SourceMap into SourceListMap +// (Only the first mapping per line is preserved) +var fromStringWithSourceMap = require("source-list-map").fromStringWithSourceMap; +var map = fromStringWithSourceMap("Generated\ncode", { version: 3, ... }); + +``` + +### `new SourceListMap()` + +### `SourceListMap.prototype.add` + +``` js +SourceListMap.prototype.add(generatedCode: string) +SourceListMap.prototype.add(generatedCode: string, source: string, originalSource: string) +SourceListMap.prototype.add(sourceListMap: SourceListMap) +``` + +Append some stuff. + +### `SourceListMap.prototype.prepend` + +``` js +SourceListMap.prototype.prepend(generatedCode: string) +SourceListMap.prototype.prepend(generatedCode: string, source: string, originalSource: string) +SourceListMap.prototype.prepend(sourceListMap: SourceListMap) +``` + +Prepend some stuff. + +### `SourceListMap.prototype.toString()` + +Get generated code. + +### `SourceListMap.prototype.toStringWithSourceMap` + +``` js +SourceListMap.prototype.toStringWithSourceMap(options: object) +``` + +Get generated code and SourceMap. `options` can contains `file` property which defines the `file` property of the SourceMap. + +### `SourceListMap.prototype.mapGeneratedCode` + +``` js +SourceListMap.prototype.mapGeneratedCode(fn: function) +``` + +Applies `fn` to each generated code block. The returned value is set as new generated code. The number of lines must not change. + +## Test + +[![Build Status](https://travis-ci.org/webpack/source-list-map.svg)](https://travis-ci.org/webpack/source-list-map) + +## License + +Copyright (c) 2015 Tobias Koppers + +MIT (http://www.opensource.org/licenses/mit-license.php) \ No newline at end of file diff --git a/node_modules/source-list-map/lib/CodeNode.js b/node_modules/source-list-map/lib/CodeNode.js new file mode 100644 index 0000000..7923ca1 --- /dev/null +++ b/node_modules/source-list-map/lib/CodeNode.js @@ -0,0 +1,31 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var getNumberOfLines = require("./helpers").getNumberOfLines; + +function CodeNode(generatedCode) { + this.generatedCode = generatedCode; +} +module.exports = CodeNode; + +CodeNode.prototype.clone = function() { + return new CodeNode(this.generatedCode); +} + +CodeNode.prototype.getGeneratedCode = function() { + return this.generatedCode; +}; + +CodeNode.prototype.getMappings = function(mappingsContext) { + var lines = getNumberOfLines(this.generatedCode); + return Array(lines+1).join(";"); +}; + +CodeNode.prototype.addGeneratedCode = function(generatedCode) { + this.generatedCode += generatedCode; +}; + +CodeNode.prototype.mapGeneratedCode = function(fn) { + this.generatedCode = fn(this.generatedCode); +}; diff --git a/node_modules/source-list-map/lib/MappingsContext.js b/node_modules/source-list-map/lib/MappingsContext.js new file mode 100644 index 0000000..f313f60 --- /dev/null +++ b/node_modules/source-list-map/lib/MappingsContext.js @@ -0,0 +1,24 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function MappingsContext() { + this.sources = []; + this.sourcesContent = []; + this.hasSourceContent = false; + this.currentOriginalLine = 1; + this.currentSource = 0; +} +module.exports = MappingsContext; + +MappingsContext.prototype.ensureSource = function(source, originalSource) { + var idx = this.sources.indexOf(source); + if(idx >= 0) + return idx; + idx = this.sources.length; + this.sources.push(source); + this.sourcesContent.push(originalSource); + if(typeof originalSource === "string") + this.hasSourceContent = true; + return idx; +}; diff --git a/node_modules/source-list-map/lib/SourceListMap.js b/node_modules/source-list-map/lib/SourceListMap.js new file mode 100644 index 0000000..7600da3 --- /dev/null +++ b/node_modules/source-list-map/lib/SourceListMap.js @@ -0,0 +1,90 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var CodeNode = require("./CodeNode"); +var SourceNode = require("./SourceNode"); +var MappingsContext = require("./MappingsContext"); + +function SourceListMap(generatedCode, source, originalSource) { + if(Array.isArray(generatedCode)) { + this.children = generatedCode; + } else { + this.children = []; + if(generatedCode || source) + this.add(generatedCode, source, originalSource); + } +} +module.exports = SourceListMap; + +SourceListMap.prototype.add = function(generatedCode, source, originalSource) { + if(typeof generatedCode === "string") { + if(source) { + this.children.push(new SourceNode(generatedCode, source, originalSource)); + } else if(this.children.length > 0 && this.children[this.children.length - 1].addGeneratedCode) { + this.children[this.children.length - 1].addGeneratedCode(generatedCode); + } else { + this.children.push(new CodeNode(generatedCode)); + } + } else if(generatedCode.getMappings && generatedCode.getGeneratedCode) { + this.children.push(generatedCode); + } else if(generatedCode.children) { + generatedCode.children.forEach(function(sln) { + this.children.push(sln); + }, this); + } else { + throw new Error("Invalid arguments to SourceListMap.prototype.add: Expected string, Node or SourceListMap"); + } +}; + +SourceListMap.prototype.preprend = function(source) { + if(typeof generatedCode === "string") { + if(source) { + this.children.unshift(new SourceNode(generatedCode, source, originalSource)); + } else if(this.children.length > 0 && this.children[this.children.length - 1].preprendGeneratedCode) { + this.children[this.children.length - 1].preprendGeneratedCode(generatedCode); + } else { + this.children.unshift(new CodeNode(generatedCode)); + } + } else if(generatedCode.getMappings && generatedCode.getGeneratedCode) { + this.children.unshift(generatedCode); + } else if(generatedCode.children) { + generatedCode.children.slice().reverse().forEach(function(sln) { + this.children.unshift(sln); + }, this); + } else { + throw new Error("Invalid arguments to SourceListMap.prototype.prerend: Expected string, Node or SourceListMap"); + } +}; + +SourceListMap.prototype.mapGeneratedCode = function(fn) { + this.children.forEach(function(sln) { + sln.mapGeneratedCode(fn); + }); +}; + +SourceListMap.prototype.toString = function() { + return this.children.map(function(sln) { + return sln.getGeneratedCode(); + }).join(""); +}; + +SourceListMap.prototype.toStringWithSourceMap = function(options) { + var mappingsContext = new MappingsContext(); + var source = this.children.map(function(sln) { + return sln.generatedCode; + }).join(""); + var mappings = this.children.map(function(sln) { + return sln.getMappings(mappingsContext); + }).join(""); + return { + source: source, + map: { + version: 3, + file: options && options.file, + sources: mappingsContext.sources, + sourcesContent: mappingsContext.hasSourceContent ? mappingsContext.sourcesContent : undefined, + mappings: mappings + } + }; +} diff --git a/node_modules/source-list-map/lib/SourceNode.js b/node_modules/source-list-map/lib/SourceNode.js new file mode 100644 index 0000000..2cc569e --- /dev/null +++ b/node_modules/source-list-map/lib/SourceNode.js @@ -0,0 +1,48 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var base64VLQ = require("./base64-vlq"); +var getNumberOfLines = require("./helpers").getNumberOfLines; + +function SourceNode(generatedCode, source, originalSource, startingLine) { + this.generatedCode = generatedCode; + this.originalSource = originalSource; + this.source = source; + this.startingLine = startingLine || 1; +} +module.exports = SourceNode; + +SourceNode.prototype.clone = function() { + return new SourceNode(this.generatedCode, this.source, this.originalSource, this.startingLine); +} + +var LINE_MAPPING = "AACA;"; +var LAST_LINE_MAPPING = "AACA"; + +SourceNode.prototype.getGeneratedCode = function() { + return this.generatedCode; +}; + +SourceNode.prototype.getMappings = function(mappingsContext) { + var lines = getNumberOfLines(this.generatedCode); + var sourceIdx = mappingsContext.ensureSource(this.source, this.originalSource); + var mappings = "A"; // generated column 0 + mappings += base64VLQ.encode(sourceIdx - mappingsContext.currentSource); // source index + mappings += base64VLQ.encode(this.startingLine - mappingsContext.currentOriginalLine); // original line index + mappings += "A"; // original column 0 + if(lines !== 0) + mappings += ";" + mappingsContext.currentSource = sourceIdx; + mappingsContext.currentOriginalLine = (lines || 1) + this.startingLine - 1; + mappings += Array(lines).join(LINE_MAPPING); + if(lines !== 0 && this.generatedCode[this.generatedCode.length - 1] !== "\n") { + mappings += LAST_LINE_MAPPING; + mappingsContext.currentOriginalLine++; + } + return mappings; +}; + +SourceNode.prototype.mapGeneratedCode = function(fn) { + this.generatedCode = fn(this.generatedCode); +}; diff --git a/node_modules/source-list-map/lib/base64-vlq.js b/node_modules/source-list-map/lib/base64-vlq.js new file mode 100644 index 0000000..b16ec8c --- /dev/null +++ b/node_modules/source-list-map/lib/base64-vlq.js @@ -0,0 +1,169 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/*eslint no-bitwise:0,quotes:0,global-strict:0*/ + +var charToIntMap = {}; +var intToCharMap = {}; + +'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' + .split('') + .forEach(function (ch, index) { + charToIntMap[ch] = index; + intToCharMap[index] = ch; + }); + +var base64 = {}; +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +base64.encode = function base64_encode(aNumber) { + if (aNumber in intToCharMap) { + return intToCharMap[aNumber]; + } + throw new TypeError("Must be between 0 and 63: " + aNumber); +}; + +/** + * Decode a single base 64 digit to an integer. + */ +base64.decode = function base64_decode(aChar) { + if (aChar in charToIntMap) { + return charToIntMap[aChar]; + } + throw new TypeError("Not a valid base 64 digit: " + aChar); +}; + + + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aOutParam) { + var i = 0; + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (i >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + digit = base64.decode(aStr.charAt(i++)); + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aStr.slice(i); +}; diff --git a/node_modules/source-list-map/lib/fromStringWithSourceMap.js b/node_modules/source-list-map/lib/fromStringWithSourceMap.js new file mode 100644 index 0000000..b5af247 --- /dev/null +++ b/node_modules/source-list-map/lib/fromStringWithSourceMap.js @@ -0,0 +1,98 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var base64VLQ = require("./base64-vlq"); +var SourceNode = require("./SourceNode"); +var CodeNode = require("./CodeNode"); +var SourceListMap = require("./SourceListMap"); + +module.exports = function fromStringWithSourceMap(code, map) { + var sources = map.sources; + var sourcesContent = map.sourcesContent; + var mappings = map.mappings.split(";"); + var lines = code.split("\n"); + var nodes = []; + var currentNode = null; + var currentLine = 1; + var currentSourceIdx = 0; + var currentSourceNodeLine; + mappings.forEach(function(mapping, idx) { + var line = lines[idx]; + if(typeof line === 'undefined') return; + if(idx !== lines.length - 1) line += "\n"; + if(!mapping) + return addCode(line); + mapping = { value: 0, rest: mapping }; + var lineAdded = false; + while(mapping.rest) + lineAdded = processMapping(mapping, line, lineAdded) || lineAdded; + if(!lineAdded) + addCode(line); + }); + if(mappings.length < lines.length) { + var idx = mappings.length; + while(!lines[idx].trim() && idx < lines.length-1) { + addCode(lines[idx] + "\n"); + idx++; + } + addCode(lines.slice(idx).join("\n")); + } + return new SourceListMap(nodes); + function processMapping(mapping, line, ignore) { + if(mapping.rest && mapping.rest[0] !== ",") { + base64VLQ.decode(mapping.rest, mapping); + } + if(!mapping.rest) + return false; + if(mapping.rest[0] === ",") { + mapping.rest = mapping.rest.substr(1); + return false; + } + + base64VLQ.decode(mapping.rest, mapping); + var sourceIdx = mapping.value + currentSourceIdx; + currentSourceIdx = sourceIdx; + + if(mapping.rest && mapping.rest[0] !== ",") { + base64VLQ.decode(mapping.rest, mapping); + var linePosition = mapping.value + currentLine; + currentLine = linePosition; + } else { + var linePosition = currentLine; + } + + if(mapping.rest && mapping.rest[0] === ",") { + mapping.rest = mapping.rest.substr(1); + } + + if(!ignore) { + addSource(line, sources ? sources[sourceIdx] : null, sourcesContent ? sourcesContent[sourceIdx] : null, linePosition) + return true; + } + } + function addCode(generatedCode) { + if(currentNode && currentNode instanceof CodeNode) { + currentNode.addGeneratedCode(generatedCode); + } else if(currentNode && currentNode instanceof SourceNode && !generatedCode.trim()) { + currentNode.generatedCode += generatedCode; + currentSourceNodeLine++; + } else { + currentNode = new CodeNode(generatedCode); + nodes.push(currentNode); + } + } + function addSource(generatedCode, source, originalSource, linePosition) { + if(currentNode && currentNode instanceof SourceNode && + currentNode.source === source && + currentSourceNodeLine === linePosition + ) { + currentNode.generatedCode += generatedCode; + currentSourceNodeLine++; + } else { + currentNode = new SourceNode(generatedCode, source, originalSource, linePosition); + currentSourceNodeLine = linePosition + 1; + nodes.push(currentNode); + } + } +}; diff --git a/node_modules/source-list-map/lib/helpers.js b/node_modules/source-list-map/lib/helpers.js new file mode 100644 index 0000000..ff39705 --- /dev/null +++ b/node_modules/source-list-map/lib/helpers.js @@ -0,0 +1,13 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +exports.getNumberOfLines = function getNumberOfLines(str) { + var nr = -1; + var idx = -1; + do { + nr++ + idx = str.indexOf("\n", idx + 1); + } while(idx >= 0); + return nr; +}; diff --git a/node_modules/source-list-map/lib/index.js b/node_modules/source-list-map/lib/index.js new file mode 100644 index 0000000..7828f52 --- /dev/null +++ b/node_modules/source-list-map/lib/index.js @@ -0,0 +1,5 @@ +exports.SourceListMap = require("./SourceListMap"); +exports.SourceNode = require("./SourceNode"); +exports.CodeNode = require("./CodeNode"); +exports.MappingsContext = require("./MappingsContext"); +exports.fromStringWithSourceMap = require("./fromStringWithSourceMap"); diff --git a/node_modules/source-list-map/package.json b/node_modules/source-list-map/package.json new file mode 100644 index 0000000..bdab9af --- /dev/null +++ b/node_modules/source-list-map/package.json @@ -0,0 +1,26 @@ +{ + "name": "source-list-map", + "version": "0.1.6", + "description": "Fast line to line SourceMap generator.", + "author": "Tobias Koppers @sokra", + "main": "lib/index.js", + "scripts": { + "test": "mocha -R spec" + }, + "repository": { + "type": "git", + "url": "https://github.com/webpack/source-list-map.git" + }, + "keywords": [ + "source-map" + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/webpack/source-list-map/issues" + }, + "homepage": "https://github.com/webpack/source-list-map", + "devDependencies": { + "mocha": "^2.2.1", + "should": "^5.2.0" + } +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/babel-source.js b/node_modules/source-list-map/test/fixtures/from-to-tests/babel-source.js new file mode 100644 index 0000000..50a17f0 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/babel-source.js @@ -0,0 +1,94 @@ +// Expression bodies +var odds = evens.map(v => v + 1); +var nums = evens.map((v, i) => v + i); + +// Statement bodies +nums.forEach(v => { + if (v % 5 === 0) + fives.push(v); +}); + +// Lexical this +var bob = { + _name: "Bob", + _friends: [], + printFriends() { + this._friends.forEach(f => + console.log(this._name + " knows " + f)); + } +} + +class SkinnedMesh extends THREE.Mesh { + constructor(geometry, materials) { + super(geometry, materials); + + this.idMatrix = SkinnedMesh.defaultMatrix(); + this.bones = []; + this.boneMatrices = []; + //... + } + update(camera) { + //... + super.update(); + } + static defaultMatrix() { + return new THREE.Matrix4(); + } +} + +var obj = { + // __proto__ + __proto__: theProtoObj, + // Shorthand for ‘handler: handler’ + handler, + // Methods + toString() { + // Super calls + return "d " + super.toString(); + }, + // Computed (dynamic) property names + [ "prop_" + (() => 42)() ]: 42 +}; + +// Basic literal string creation +`In JavaScript "\n" is a line-feed.` + +// Multiline strings +`In JavaScript this is + not legal.` + +// Interpolate variable bindings +var name = "Bob", time = "today"; +`Hello ${name}, how are you ${time}?` + +// Construct an HTTP request prefix is used to interpret the replacements and construction +GET`http://foo.org/bar?a=${a}&b=${b} + Content-Type: application/json + X-Credentials: ${credentials} + { "foo": ${foo}, + "bar": ${bar}}`(myOnReadyStateChangeHandler); + +// list matching +var [a, , b] = [1,2,3]; + +// object matching +var { op: a, lhs: { op: b }, rhs: c } + = getASTNode() + +// object matching shorthand +// binds `op`, `lhs` and `rhs` in scope +var {op, lhs, rhs} = getASTNode() + +// Can be used in parameter position +function g({name: x}) { + console.log(x); +} +g({name: 5}) + +// Fail-soft destructuring +var [a] = []; +a === undefined; + +// Fail-soft destructuring with defaults +var [a = 1] = []; +a === 1; diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/babel.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/babel.expected.map new file mode 100644 index 0000000..b44d522 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/babel.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "babel.generated.js", + "sources": [ + "babel-source.js" + ], + "sourcesContent": [ + "// Expression bodies\r\nvar odds = evens.map(v => v + 1);\r\nvar nums = evens.map((v, i) => v + i);\r\n\r\n// Statement bodies\r\nnums.forEach(v => {\r\n if (v % 5 === 0)\r\n fives.push(v);\r\n});\r\n\r\n// Lexical this\r\nvar bob = {\r\n _name: \"Bob\",\r\n _friends: [],\r\n printFriends() {\r\n this._friends.forEach(f =>\r\n console.log(this._name + \" knows \" + f));\r\n }\r\n}\r\n\r\nclass SkinnedMesh extends THREE.Mesh {\r\n constructor(geometry, materials) {\r\n super(geometry, materials);\r\n\r\n this.idMatrix = SkinnedMesh.defaultMatrix();\r\n this.bones = [];\r\n this.boneMatrices = [];\r\n //...\r\n }\r\n update(camera) {\r\n //...\r\n super.update();\r\n }\r\n static defaultMatrix() {\r\n return new THREE.Matrix4();\r\n }\r\n}\r\n\r\nvar obj = {\r\n // __proto__\r\n __proto__: theProtoObj,\r\n // Shorthand for ‘handler: handler’\r\n handler,\r\n // Methods\r\n toString() {\r\n // Super calls\r\n return \"d \" + super.toString();\r\n },\r\n // Computed (dynamic) property names\r\n [ \"prop_\" + (() => 42)() ]: 42\r\n};\r\n\r\n// Basic literal string creation\r\n`In JavaScript \"\\n\" is a line-feed.`\r\n\r\n// Multiline strings\r\n`In JavaScript this is\r\n not legal.`\r\n\r\n// Interpolate variable bindings\r\nvar name = \"Bob\", time = \"today\";\r\n`Hello ${name}, how are you ${time}?`\r\n\r\n// Construct an HTTP request prefix is used to interpret the replacements and construction\r\nGET`http://foo.org/bar?a=${a}&b=${b}\r\n Content-Type: application/json\r\n X-Credentials: ${credentials}\r\n { \"foo\": ${foo},\r\n \"bar\": ${bar}}`(myOnReadyStateChangeHandler);\r\n\r\n// list matching\r\nvar [a, , b] = [1,2,3];\r\n\r\n// object matching\r\nvar { op: a, lhs: { op: b }, rhs: c }\r\n = getASTNode()\r\n\r\n// object matching shorthand\r\n// binds `op`, `lhs` and `rhs` in scope\r\nvar {op, lhs, rhs} = getASTNode()\r\n\r\n// Can be used in parameter position\r\nfunction g({name: x}) {\r\n console.log(x);\r\n}\r\ng({name: 5})\r\n\r\n// Fail-soft destructuring\r\nvar [a] = [];\r\na === undefined;\r\n\r\n// Fail-soft destructuring with defaults\r\nvar [a = 1] = [];\r\na === 1;\r\n" + ], + "mappings": ";;;;;;;;;;;;;;;;;AACA;AAAA;AAAA;AACA;AAAA;AAAA;AACA;;AAEA;AACA;AAEA;AACA;;AAEA;AACA;AACA;AACA;;;AACA;AACA;AAAA;AACA;AACA;AACA;AACA;AACA;AADA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AATA;AACA;AADA;;AASA;;AAEA;AACA;;;AACA;AACA;AACA;;;AAfA;AAAA;AACA;AAiBA;;AAEA;;AAEA;;AAEA;;AAEA;AACA;AAEA;AAAA;AACA;;AAGA;AACA;;AAMA;AAAA;AACA;AACA;;AAEA;AACA;;AAMA;AAAA;AAAA;AACA;;;AAGA;AACA;AAFA;AAAA;AAAA;AACA;;;;AAIA;AACA;AADA;AAAA;AAAA;AACA;;AAEA;AAAA;AACA;AAAA;AACA;AACA;AACA;;AAEA;AAAA;AACA;AAAA;AACA;;AAEA;;AAAA;AACA;AAAA;AACA;;;;;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/babel.generated.js b/node_modules/source-list-map/test/fixtures/from-to-tests/babel.generated.js new file mode 100644 index 0000000..7094c6a --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/babel.generated.js @@ -0,0 +1,144 @@ +"use strict"; + +var _obj; + +var _defineProperty = function (obj, key, value) { return Object.defineProperty(obj, key, { value: value, enumerable: key == null || typeof Symbol == "undefined" || key.constructor !== Symbol, configurable: true, writable: true }); }; + +var _taggedTemplateLiteral = function (strings, raw) { return Object.freeze(Object.defineProperties(strings, { raw: { value: Object.freeze(raw) } })); }; + +var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }; + +var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); + +var _get = function get(object, property, receiver) { var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { return get(parent, property, receiver); } } else if ("value" in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } }; + +var _inherits = function (subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) subClass.__proto__ = superClass; }; + +// Expression bodies +var odds = evens.map(function (v) { + return v + 1; +}); +var nums = evens.map(function (v, i) { + return v + i; +}); + +// Statement bodies +nums.forEach(function (v) { + if (v % 5 === 0) fives.push(v); +}); + +// Lexical this +var bob = { + _name: "Bob", + _friends: [], + printFriends: function printFriends() { + var _this4 = this; + + this._friends.forEach(function (f) { + return console.log(_this4._name + " knows " + f); + }); + } +}; + +var SkinnedMesh = (function (_THREE$Mesh) { + function SkinnedMesh(geometry, materials) { + _classCallCheck(this, SkinnedMesh); + + _get(Object.getPrototypeOf(SkinnedMesh.prototype), "constructor", this).call(this, geometry, materials); + + this.idMatrix = SkinnedMesh.defaultMatrix(); + this.bones = []; + this.boneMatrices = []; + //... + } + + _inherits(SkinnedMesh, _THREE$Mesh); + + _createClass(SkinnedMesh, [{ + key: "update", + value: function update(camera) { + //... + _get(Object.getPrototypeOf(SkinnedMesh.prototype), "update", this).call(this); + } + }], [{ + key: "defaultMatrix", + value: function defaultMatrix() { + return new THREE.Matrix4(); + } + }]); + + return SkinnedMesh; +})(THREE.Mesh); + +var obj = _obj = _defineProperty({ + // __proto__ + __proto__: theProtoObj, + // Shorthand for ‘handler: handler’ + handler: handler, + // Methods + toString: function toString() { + // Super calls + return "d " + _get(Object.getPrototypeOf(_obj), "toString", this).call(this); + } }, "prop_" + (function () { + return 42; +})(), 42); + +// Basic literal string creation +"In JavaScript \"\n\" is a line-feed."(_taggedTemplateLiteral(["In JavaScript this is\n not legal."], ["In JavaScript this is\r\n not legal."])); + +// Interpolate variable bindings +var name = "Bob", + time = "today"; +"Hello " + name + ", how are you " + time + "?"; + +// Construct an HTTP request prefix is used to interpret the replacements and construction +GET(_taggedTemplateLiteral(["http://foo.org/bar?a=", "&b=", "\n Content-Type: application/json\n X-Credentials: ", "\n { \"foo\": ", ",\n \"bar\": ", "}"], ["http://foo.org/bar?a=", "&b=", "\r\n Content-Type: application/json\r\n X-Credentials: ", "\r\n { \"foo\": ", ",\r\n \"bar\": ", "}"]), a, b, credentials, foo, bar)(myOnReadyStateChangeHandler); + +// list matching +var _ref = [1, 2, 3]; +var a = _ref[0]; +var b = _ref[2]; + +// object matching + +var _getASTNode = getASTNode(); + +var a = _getASTNode.op; +var b = _getASTNode.lhs.op; +var c = _getASTNode.rhs; + +// object matching shorthand +// binds `op`, `lhs` and `rhs` in scope + +var _getASTNode2 = getASTNode(); + +var op = _getASTNode2.op; +var lhs = _getASTNode2.lhs; +var rhs = _getASTNode2.rhs; + +// Can be used in parameter position +function g(_ref2) { + var x = _ref2.name; + + console.log(x); +} +g({ name: 5 }); + +// Fail-soft destructuring +var _ref3 = []; +var a = _ref3[0]; + +a === undefined; + +// Fail-soft destructuring with defaults +var _ref4 = []; +var _ref4$0 = _ref4[0]; +var a = _ref4$0 === undefined ? 1 : _ref4$0; + +a === 1; + +// Computed (dynamic) property names + +// Multiline strings + +//# sourceMappingURL=babel.generated.js.map \ No newline at end of file diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/babel.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/babel.input.map new file mode 100644 index 0000000..400e2a8 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/babel.input.map @@ -0,0 +1 @@ +{"version":3,"sources":["babel-source.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;AACA,IAAI,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,UAAA,CAAC;SAAI,CAAC,GAAG,CAAC;CAAA,CAAC,CAAC;AACjC,IAAI,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,UAAC,CAAC,EAAE,CAAC;SAAK,CAAC,GAAG,CAAC;CAAA,CAAC,CAAC;;;AAGtC,IAAI,CAAC,OAAO,CAAC,UAAA,CAAC,EAAI;AAChB,MAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EACb,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;CACjB,CAAC,CAAC;;;AAGH,IAAI,GAAG,GAAG;AACR,OAAK,EAAE,KAAK;AACZ,UAAQ,EAAE,EAAE;AACZ,cAAY,EAAA,wBAAG;;;AACb,QAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAA,CAAC;aACrB,OAAO,CAAC,GAAG,CAAC,OAAK,KAAK,GAAG,SAAS,GAAG,CAAC,CAAC;KAAA,CAAC,CAAC;GAC5C;CACF,CAAA;;IAEK,WAAW;AACJ,WADP,WAAW,CACH,QAAQ,EAAE,SAAS,EAAE;0BAD7B,WAAW;;AAEb,+BAFE,WAAW,6CAEP,QAAQ,EAAE,SAAS,EAAE;;AAE3B,QAAI,CAAC,QAAQ,GAAG,WAAW,CAAC,aAAa,EAAE,CAAC;AAC5C,QAAI,CAAC,KAAK,GAAG,EAAE,CAAC;AAChB,QAAI,CAAC,YAAY,GAAG,EAAE,CAAC;;GAExB;;YARG,WAAW;;eAAX,WAAW;;WAST,gBAAC,MAAM,EAAE;;AAEb,iCAXE,WAAW,wCAWE;KAChB;;;WACmB,yBAAG;AACrB,aAAO,IAAI,KAAK,CAAC,OAAO,EAAE,CAAC;KAC5B;;;SAfG,WAAW;GAAS,KAAK,CAAC,IAAI;;AAkBpC,IAAI,GAAG;;AAEH,aAAW,WAAW;;AAEtB,WAAA,OAAO;;AAEP,YAAQ,oBAAG;;AAEV,WAAO,IAAI,iEAAmB,CAAC;GAC/B,IAEC,OAAO,GAAG,CAAC;SAAM,EAAE;EAAA,EAAG,EAAI,EAAE,CACjC,CAAC;;;AAGF,iJAIY;;;AAGZ,IAAI,IAAI,GAAG,KAAK;IAAE,IAAI,GAAG,OAAO,CAAC;AACjC,WAAS,IAAI,sBAAiB,IAAI,OAAG;;;AAGrC,GAAG,+TAAwB,CAAC,EAAM,CAAC,EAEd,WAAW,EACjB,GAAG,EACH,GAAG,EAAI,2BAA2B,CAAC,CAAC;;;WAGpC,CAAC,CAAC,EAAC,CAAC,EAAC,CAAC,CAAC;IAAjB,CAAC;IAAI,CAAC;;;;kBAIF,UAAU,EAAE;;IADX,CAAC,eAAL,EAAE;IAAgB,CAAC,eAAZ,GAAG,CAAI,EAAE;IAAY,CAAC,eAAN,GAAG;;;;;mBAKX,UAAU,EAAE;;IAA5B,EAAE,gBAAF,EAAE;IAAE,GAAG,gBAAH,GAAG;IAAE,GAAG,gBAAH,GAAG;;;AAGjB,SAAS,CAAC,QAAY;MAAJ,CAAC,SAAP,IAAI;;AACd,SAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;CAChB;AACD,CAAC,CAAC,EAAC,IAAI,EAAE,CAAC,EAAC,CAAC,CAAA;;;YAGF,EAAE;IAAP,CAAC;;AACN,CAAC,KAAK,SAAS,CAAC;;;YAGF,EAAE;;IAAX,CAAC,2BAAG,CAAC;;AACV,CAAC,KAAK,CAAC,CAAC","file":"babel.generated.js","sourcesContent":["// Expression bodies\r\nvar odds = evens.map(v => v + 1);\r\nvar nums = evens.map((v, i) => v + i);\r\n\r\n// Statement bodies\r\nnums.forEach(v => {\r\n if (v % 5 === 0)\r\n fives.push(v);\r\n});\r\n\r\n// Lexical this\r\nvar bob = {\r\n _name: \"Bob\",\r\n _friends: [],\r\n printFriends() {\r\n this._friends.forEach(f =>\r\n console.log(this._name + \" knows \" + f));\r\n }\r\n}\r\n\r\nclass SkinnedMesh extends THREE.Mesh {\r\n constructor(geometry, materials) {\r\n super(geometry, materials);\r\n\r\n this.idMatrix = SkinnedMesh.defaultMatrix();\r\n this.bones = [];\r\n this.boneMatrices = [];\r\n //...\r\n }\r\n update(camera) {\r\n //...\r\n super.update();\r\n }\r\n static defaultMatrix() {\r\n return new THREE.Matrix4();\r\n }\r\n}\r\n\r\nvar obj = {\r\n // __proto__\r\n __proto__: theProtoObj,\r\n // Shorthand for ‘handler: handler’\r\n handler,\r\n // Methods\r\n toString() {\r\n // Super calls\r\n return \"d \" + super.toString();\r\n },\r\n // Computed (dynamic) property names\r\n [ \"prop_\" + (() => 42)() ]: 42\r\n};\r\n\r\n// Basic literal string creation\r\n`In JavaScript \"\\n\" is a line-feed.`\r\n\r\n// Multiline strings\r\n`In JavaScript this is\r\n not legal.`\r\n\r\n// Interpolate variable bindings\r\nvar name = \"Bob\", time = \"today\";\r\n`Hello ${name}, how are you ${time}?`\r\n\r\n// Construct an HTTP request prefix is used to interpret the replacements and construction\r\nGET`http://foo.org/bar?a=${a}&b=${b}\r\n Content-Type: application/json\r\n X-Credentials: ${credentials}\r\n { \"foo\": ${foo},\r\n \"bar\": ${bar}}`(myOnReadyStateChangeHandler);\r\n\r\n// list matching\r\nvar [a, , b] = [1,2,3];\r\n\r\n// object matching\r\nvar { op: a, lhs: { op: b }, rhs: c }\r\n = getASTNode()\r\n\r\n// object matching shorthand\r\n// binds `op`, `lhs` and `rhs` in scope\r\nvar {op, lhs, rhs} = getASTNode()\r\n\r\n// Can be used in parameter position\r\nfunction g({name: x}) {\r\n console.log(x);\r\n}\r\ng({name: 5})\r\n\r\n// Fail-soft destructuring\r\nvar [a] = [];\r\na === undefined;\r\n\r\n// Fail-soft destructuring with defaults\r\nvar [a = 1] = [];\r\na === 1;\r\n"]} \ No newline at end of file diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.expected.map new file mode 100644 index 0000000..46d14c1 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "empty-lines.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\n\nWorld\n\n" + ], + "mappings": "AAAA;AACA;AACA;AACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.generated.txt new file mode 100644 index 0000000..5bc1171 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.generated.txt @@ -0,0 +1,4 @@ + Hello + + World + diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.input.map new file mode 100644 index 0000000..cf6ebf9 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/empty-lines.input.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "empty-lines.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\n\nWorld\n\n" + ], + "mappings": "CAAA;;AAEA;;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.expected.map new file mode 100644 index 0000000..d01fa1c --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.expected.map @@ -0,0 +1,13 @@ +{ + "version": 3, + "file": "file-offset.generated.txt", + "sources": [ + "hello.txt", + "world.txt" + ], + "sourcesContent": [ + "\nHello\nWorld\nTest", + "Hello\nWorld\nWith\nOffset" + ], + "mappings": "AACA;ACAA;AAEA" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.generated.txt new file mode 100644 index 0000000..d9d7674 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.generated.txt @@ -0,0 +1,3 @@ + Hello + World + Offset \ No newline at end of file diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.input.map new file mode 100644 index 0000000..a470630 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/file-offset.input.map @@ -0,0 +1,13 @@ +{ + "version": 3, + "file": "file-offset.generated.txt", + "sources": [ + "hello.txt", + "world.txt" + ], + "sourcesContent": [ + "\nHello\nWorld\nTest", + "Hello\nWorld\nWith\nOffset" + ], + "mappings": "CACA;CCAA;CAEA" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.expected.map new file mode 100644 index 0000000..770bf28 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "hello-world1.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld\n" + ], + "mappings": "AAAA;AACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.generated.txt new file mode 100644 index 0000000..40bf8fd --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.generated.txt @@ -0,0 +1,2 @@ + Hello + World diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.input.map new file mode 100644 index 0000000..8e84db0 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world1.input.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "hello-world1.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld\n" + ], + "mappings": "CAAA;CACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.expected.map new file mode 100644 index 0000000..831cf53 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "hello-world2.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld" + ], + "mappings": "AAAA;AACA" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.generated.txt new file mode 100644 index 0000000..fa25855 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.generated.txt @@ -0,0 +1,2 @@ + Hello + World \ No newline at end of file diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.input.map new file mode 100644 index 0000000..401d5ef --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world2.input.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "hello-world2.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld" + ], + "mappings": "CAAA;CACA" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.expected.map new file mode 100644 index 0000000..347431d --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "hello-world3.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld\n" + ], + "mappings": "AAAA;AACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.generated.txt new file mode 100644 index 0000000..40bf8fd --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.generated.txt @@ -0,0 +1,2 @@ + Hello + World diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.input.map new file mode 100644 index 0000000..7915d45 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/hello-world3.input.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "hello-world3.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld\n" + ], + "mappings": "A,CAAA;A,CACA;A" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.expected.map new file mode 100644 index 0000000..a3452cb --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "mismatched-mappings.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "function foobar() {\n return; // foo\n}" + ], + "mappings": "AAAA;AACA;AADA" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.generated.txt new file mode 100644 index 0000000..5e0d7c2 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.generated.txt @@ -0,0 +1,3 @@ +function foobar() { + return; // foo +} \ No newline at end of file diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.input.map new file mode 100644 index 0000000..502c5bc --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/mismatched-mappings.input.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "mismatched-mappings.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "function foobar() {\n return; // foo\n}" + ], + "mappings": "AAAA,SAAS,MAAT,GAAmB;AACjB;CADF;AAAmB" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.expected.map new file mode 100644 index 0000000..f456a01 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.expected.map @@ -0,0 +1,13 @@ +{ + "version": 3, + "file": "multiple-file.generated.txt", + "sources": [ + "hello.txt", + "world.txt" + ], + "sourcesContent": [ + "Hello\n", + "World\n" + ], + "mappings": "AAAA;ACAA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.generated.txt new file mode 100644 index 0000000..40bf8fd --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.generated.txt @@ -0,0 +1,2 @@ + Hello + World diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.input.map new file mode 100644 index 0000000..e813fac --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-file.input.map @@ -0,0 +1,13 @@ +{ + "version": 3, + "file": "multiple-file.generated.txt", + "sources": [ + "hello.txt", + "world.txt" + ], + "sourcesContent": [ + "Hello\n", + "World\n" + ], + "mappings": "CAAA;CCAA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.expected.map new file mode 100644 index 0000000..7837443 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.expected.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "multiple-mappings-per-line.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld\nTest\n" + ], + "mappings": "AAAA;AACA;AACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.generated.txt new file mode 100644 index 0000000..54f51d9 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.generated.txt @@ -0,0 +1,3 @@ + Hello World Test + World Test + Test diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.input.map new file mode 100644 index 0000000..634a612 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/multiple-mappings-per-line.input.map @@ -0,0 +1,11 @@ +{ + "version": 3, + "file": "multiple-mappings-per-line.generated.txt", + "sources": [ + "hello-world.txt" + ], + "sourcesContent": [ + "Hello\nWorld\nTest\n" + ], + "mappings": "CAAA,MAC,,MAC;,CAD,MAC;CA," +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.expected.map new file mode 100644 index 0000000..6e55d0b --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.expected.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "file": "no-source-contents.generated.txt", + "sources": [ + "hello-world.txt" + ], + "mappings": "AAAA;AACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.generated.txt new file mode 100644 index 0000000..40bf8fd --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.generated.txt @@ -0,0 +1,2 @@ + Hello + World diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.input.map new file mode 100644 index 0000000..67018d2 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/no-source-contents.input.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "file": "no-source-contents.generated.txt", + "sources": [ + "hello-world.txt" + ], + "mappings": "CAAA;CACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.expected.map b/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.expected.map new file mode 100644 index 0000000..dc09234 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.expected.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "file": "null-source.generated.txt", + "sources": [ + null + ], + "mappings": "AAAA;AACA;" +} diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.generated.txt b/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.generated.txt new file mode 100644 index 0000000..40bf8fd --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.generated.txt @@ -0,0 +1,2 @@ + Hello + World diff --git a/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.input.map b/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.input.map new file mode 100644 index 0000000..f321324 --- /dev/null +++ b/node_modules/source-list-map/test/fixtures/from-to-tests/null-source.input.map @@ -0,0 +1,5 @@ +{ + "version": 3, + "file": "null-source.generated.txt", + "mappings": "CAAA;CACA;" +} diff --git a/node_modules/source-list-map/test/fromStringWithSourceMapTest.js b/node_modules/source-list-map/test/fromStringWithSourceMapTest.js new file mode 100644 index 0000000..b59a197 --- /dev/null +++ b/node_modules/source-list-map/test/fromStringWithSourceMapTest.js @@ -0,0 +1,40 @@ +var should = require("should"); +var fs = require("fs"); +var path = require("path"); +var SourceListMap = require("../").SourceListMap; +var fromStringWithSourceMap = require("../").fromStringWithSourceMap; + +describe("fromStringWithSourceMap", function() { + fs.readdirSync(path.resolve(__dirname, "fixtures/from-to-tests")).filter(function(name) { + return /\.input\.map$/.test(name); + }).forEach(function(name) { + it("should parse and generate " + name, function() { + var MAP = JSON.parse(fs.readFileSync(path.resolve(__dirname, "fixtures/from-to-tests/" + name), "utf-8")); + var GENERATED_CODE = fs.readFileSync(path.resolve(__dirname, "fixtures/from-to-tests/" + MAP.file), "utf-8"); + var EXPECTED_MAP = JSON.parse(fs.readFileSync(path.resolve(__dirname, "fixtures/from-to-tests/" + + name.replace(/\.input\.map$/, ".expected.map")), "utf-8")); + var slm = fromStringWithSourceMap(GENERATED_CODE, MAP); + var result = slm.toStringWithSourceMap({ + file: MAP.file + }); + if(result.map.mappings !== EXPECTED_MAP.mappings) { + fs.writeFileSync(path.resolve(__dirname, "fixtures/from-to-tests/" + + name.replace(/\.input\.map$/, ".output.map")), JSON.stringify(result.map, null, 2), "utf-8"); + } + JSON.parse(JSON.stringify(result.map)).should.be.eql(EXPECTED_MAP); + if(result.source !== GENERATED_CODE) { + fs.writeFileSync(path.resolve(__dirname, "fixtures/from-to-tests/" + + path.basename(MAP.file, path.extname(MAP.file)) + ".output" + path.extname(MAP.file)), result.source, "utf-8"); + } + result.source.should.be.eql(GENERATED_CODE); + + slm = fromStringWithSourceMap(GENERATED_CODE, EXPECTED_MAP); + result = slm.toStringWithSourceMap({ + file: MAP.file + }); + result.source.should.be.eql(GENERATED_CODE); + JSON.parse(JSON.stringify(result.map)).should.be.eql(EXPECTED_MAP); + }); + + }); +}); diff --git a/node_modules/source-map-support/.npmignore b/node_modules/source-map-support/.npmignore new file mode 100644 index 0000000..d91de11 --- /dev/null +++ b/node_modules/source-map-support/.npmignore @@ -0,0 +1 @@ +browserify-test/ diff --git a/node_modules/source-map-support/.travis.yml b/node_modules/source-map-support/.travis.yml new file mode 100644 index 0000000..2a2f1c6 --- /dev/null +++ b/node_modules/source-map-support/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - 'stable' + - '0.12' + - '0.10' diff --git a/node_modules/source-map-support/LICENSE.md b/node_modules/source-map-support/LICENSE.md new file mode 100644 index 0000000..6247ca9 --- /dev/null +++ b/node_modules/source-map-support/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Evan Wallace + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/source-map-support/README.md b/node_modules/source-map-support/README.md new file mode 100644 index 0000000..c21cfeb --- /dev/null +++ b/node_modules/source-map-support/README.md @@ -0,0 +1,251 @@ +# Source Map Support +[![Build Status](https://travis-ci.org/evanw/node-source-map-support.svg?branch=master)](https://travis-ci.org/evanw/node-source-map-support) + +This module provides source map support for stack traces in node via the [V8 stack trace API](http://code.google.com/p/v8/wiki/JavaScriptStackTraceApi). It uses the [source-map](https://github.com/mozilla/source-map) module to replace the paths and line numbers of source-mapped files with their original paths and line numbers. The output mimics node's stack trace format with the goal of making every compile-to-JS language more of a first-class citizen. Source maps are completely general (not specific to any one language) so you can use source maps with multiple compile-to-JS languages in the same node process. + +## Installation and Usage + +#### Node support + +``` +$ npm install source-map-support +``` + +Source maps can be generated using libraries such as [source-map-index-generator](https://github.com/twolfson/source-map-index-generator). Once you have a valid source map, insert the following line at the top of your compiled code: + +```js +require('source-map-support').install(); +``` + +And place a source mapping comment somewhere in the file (usually done automatically or with an option by your transpiler): + +``` +//# sourceMappingURL=path/to/source.map +``` + +If multiple sourceMappingURL comments exist in one file, the last sourceMappingURL comment will be +respected (e.g. if a file mentions the comment in code, or went through multiple transpilers). +The path should either be absolute or relative to the compiled file. + +It is also possible to to install the source map support directly by +requiring the `register` module which can be handy with ES6: + +```js +import 'source-map-support/register' + +// Instead of: +import sourceMapSupport from 'source-map-support' +sourceMapSupport.install() +``` +Note: if you're using babel-register, it includes source-map-support already. + +It is also very useful with Mocha: + +``` +$ mocha --require source-map-support/register tests/ +``` + +#### Browser support + +This library also works in Chrome. While the DevTools console already supports source maps, the V8 engine doesn't and `Error.prototype.stack` will be incorrect without this library. Everything will just work if you deploy your source files using [browserify](http://browserify.org/). Just make sure to pass the `--debug` flag to the browserify command so your source maps are included in the bundled code. + +This library also works if you use another build process or just include the source files directly. In this case, include the file `browser-source-map-support.js` in your page and call `sourceMapSupport.install()`. It contains the whole library already bundled for the browser using browserify. + +```html + + +``` + +This library also works if you use AMD (Asynchronous Module Definition), which is used in tools like [RequireJS](http://requirejs.org/). Just list `browser-source-map-support` as a dependency: + +```html + +``` + +## Options + +This module installs two things: a change to the `stack` property on `Error` objects and a handler for uncaught exceptions that mimics node's default exception handler (the handler can be seen in the demos below). You may want to disable the handler if you have your own uncaught exception handler. This can be done by passing an argument to the installer: + +```js +require('source-map-support').install({ + handleUncaughtExceptions: false +}); +``` + +This module loads source maps from the filesystem by default. You can provide alternate loading behavior through a callback as shown below. For example, [Meteor](https://github.com/meteor) keeps all source maps cached in memory to avoid disk access. + +```js +require('source-map-support').install({ + retrieveSourceMap: function(source) { + if (source === 'compiled.js') { + return { + url: 'original.js', + map: fs.readFileSync('compiled.js.map', 'utf8') + }; + } + return null; + } +}); +``` + +The module will by default assume a browser environment if XMLHttpRequest and window are defined. If either of these do not exist it will instead assume a node environment. +In some rare cases, e.g. when running a browser emulation and where both variables are also set, you can explictly specify the environment to be either 'browser' or 'node'. + +```js +require('source-map-support').install({ + environment: 'node' +}); +``` + +To support files with inline source maps, the `hookRequire` options can be specified, which will monitor all source files for inline source maps. + + +```js +require('source-map-support').install({ + hookRequire: true +}); +``` + +This monkey patches the `require` module loading chain, so is not enabled by default and is not recommended for any sort of production usage. + +## Demos + +#### Basic Demo + +original.js: + +```js +throw new Error('test'); // This is the original code +``` + +compiled.js: + +```js +require('source-map-support').install(); + +throw new Error('test'); // This is the compiled code +// The next line defines the sourceMapping. +//# sourceMappingURL=compiled.js.map +``` + +compiled.js.map: + +```json +{ + "version": 3, + "file": "compiled.js", + "sources": ["original.js"], + "names": [], + "mappings": ";;;AAAA,MAAM,IAAI" +} +``` + +Run compiled.js using node (notice how the stack trace uses original.js instead of compiled.js): + +``` +$ node compiled.js + +original.js:1 +throw new Error('test'); // This is the original code + ^ +Error: test + at Object. (original.js:1:7) + at Module._compile (module.js:456:26) + at Object.Module._extensions..js (module.js:474:10) + at Module.load (module.js:356:32) + at Function.Module._load (module.js:312:12) + at Function.Module.runMain (module.js:497:10) + at startup (node.js:119:16) + at node.js:901:3 +``` + +#### TypeScript Demo + +demo.ts: + +```typescript +declare function require(name: string); +require('source-map-support').install(); +class Foo { + constructor() { this.bar(); } + bar() { throw new Error('this is a demo'); } +} +new Foo(); +``` + +Compile and run the file using the TypeScript compiler from the terminal: + +``` +$ npm install source-map-support typescript +$ node_modules/typescript/bin/tsc -sourcemap demo.ts +$ node demo.js + +demo.ts:5 + bar() { throw new Error('this is a demo'); } + ^ +Error: this is a demo + at Foo.bar (demo.ts:5:17) + at new Foo (demo.ts:4:24) + at Object. (demo.ts:7:1) + at Module._compile (module.js:456:26) + at Object.Module._extensions..js (module.js:474:10) + at Module.load (module.js:356:32) + at Function.Module._load (module.js:312:12) + at Function.Module.runMain (module.js:497:10) + at startup (node.js:119:16) + at node.js:901:3 +``` + +#### CoffeeScript Demo + +demo.coffee: + +```coffee +require('source-map-support').install() +foo = -> + bar = -> throw new Error 'this is a demo' + bar() +foo() +``` + +Compile and run the file using the CoffeeScript compiler from the terminal: + +```sh +$ npm install source-map-support coffee-script +$ node_modules/coffee-script/bin/coffee --map --compile demo.coffee +$ node demo.js + +demo.coffee:3 + bar = -> throw new Error 'this is a demo' + ^ +Error: this is a demo + at bar (demo.coffee:3:22) + at foo (demo.coffee:4:3) + at Object. (demo.coffee:5:1) + at Object. (demo.coffee:1:1) + at Module._compile (module.js:456:26) + at Object.Module._extensions..js (module.js:474:10) + at Module.load (module.js:356:32) + at Function.Module._load (module.js:312:12) + at Function.Module.runMain (module.js:497:10) + at startup (node.js:119:16) +``` + +## Tests + +This repo contains both automated tests for node and manual tests for the browser. The automated tests can be run using mocha (type `mocha` in the root directory). To run the manual tests: + +* Build the tests using `build.js` +* Launch the HTTP server (`npm run serve-tests`) and visit + * http://127.0.0.1:1336/amd-test + * http://127.0.0.1:1336/browser-test + * http://127.0.0.1:1336/browserify-test - **Currently not working** due to a bug with browserify (see [pull request #66](https://github.com/evanw/node-source-map-support/pull/66) for details). +* For `header-test`, run `server.js` inside that directory and visit http://127.0.0.1:1337/ + +## License + +This code is available under the [MIT license](http://opensource.org/licenses/MIT). diff --git a/node_modules/source-map-support/amd-test/browser-source-map-support.js b/node_modules/source-map-support/amd-test/browser-source-map-support.js new file mode 100644 index 0000000..06cb933 --- /dev/null +++ b/node_modules/source-map-support/amd-test/browser-source-map-support.js @@ -0,0 +1,89 @@ +/* + * Support for source maps in V8 stack traces + * https://github.com/evanw/node-source-map-support + */ +(this.define||function(K,O){this.sourceMapSupport=O()})("browser-source-map-support",function(K){(function g(r,m,e){function p(h,d){if(!m[h]){if(!r[h]){var l="function"==typeof require&&require;if(!d&&l)return l(h,!0);if(v)return v(h,!0);throw Error("Cannot find module '"+h+"'");}l=m[h]={exports:{}};r[h][0].call(l.exports,function(d){var c=r[h][1][d];return p(c?c:d)},l,l.exports,g,r,m,e)}return m[h].exports}for(var v="function"==typeof require&&require,A=0;A=a))return x?(x=f[n],n+1=c)){var b;a?(n+2>>0)): +(n+1>>0);return b}}function h(f,n,a,b){b||(q("boolean"===typeof a,"missing or invalid endian"),q(void 0!==n&&null!==n,"missing offset"),q(n+1=f.length))return f=v(f,n,a,!0),f&32768?-1*(65535-f+1):f}function d(f,n,a,b){b||(q("boolean"===typeof a,"missing or invalid endian"),q(void 0!==n&&null!==n,"missing offset"),q(n+3= +f.length))return f=A(f,n,a,!0),f&2147483648?-1*(4294967295-f+1):f}function l(f,n,a,b){b||(q("boolean"===typeof a,"missing or invalid endian"),q(n+3=d))for(c=0,d=Math.min(d-a,2);c>>8*(b?c:1-c)}function a(f,n,a,b,c){c||(q(void 0!==n&&null!==n,"missing value"),q("boolean"===typeof b,"missing or invalid endian"),q(void 0!==a&&null!==a,"missing offset"),q(a+3=d))for(c=0,d=Math.min(d-a,4);c>> +8*(b?c:3-c)&255}function b(f,n,a,b,d){d||(q(void 0!==n&&null!==n,"missing value"),q("boolean"===typeof b,"missing or invalid endian"),q(void 0!==a&&null!==a,"missing offset"),q(a+1=f.length||(0<=n?c(f,n,a,b,d):c(f,65535+n+1,a,b,d))}function z(f,n,b,c,d){d||(q(void 0!==n&&null!==n,"missing value"),q("boolean"===typeof c,"missing or invalid endian"),q(void 0!==b&&null!==b,"missing offset"),q(b+3=f.length||(0<=n?a(f,n,b,c,d):a(f,4294967295+n+1,b,c,d))}function D(f,n,a,b,c){c||(q(void 0!==n&&null!==n,"missing value"),q("boolean"===typeof b,"missing or invalid endian"),q(void 0!==a&&null!==a,"missing offset"),q(a+3=f.length||H.write(f,n,a,b,23,4)}function B(f,n,a,b,c){c||(q(void 0!==n&&null!==n,"missing value"),q("boolean"===typeof b,"missing or invalid endian"), +q(void 0!==a&&null!==a,"missing offset"),q(a+7=f.length||H.write(f,n,a,b,52,8)}function E(f,a,b){if("number"!==typeof f)return b;f=~~f;if(f>=a)return a;if(0<=f)return f;f+=a;return 0<=f?f:0}function w(f){f=~~Math.ceil(+f);return 0>f?0:f}function y(f){return(Array.isArray||function(f){return"[object Array]"===Object.prototype.toString.call(f)})(f)}function G(f){return 16>f?"0"+f.toString(16):f.toString(16)} +function t(f){for(var a=[],b=0;b=c)a.push(f.charCodeAt(b));else{var d=b;55296<=c&&57343>=c&&b++;c=encodeURIComponent(f.slice(d,b+1)).substr(1).split("%");for(d=0;d=a.length||d>=f.length);d++)a[d+b]=f[d];return d}function I(f){try{return decodeURIComponent(f)}catch(a){return String.fromCharCode(65533)}} +function L(f,a){q("number"===typeof f,"cannot write a non-number as a number");q(0<=f,"specified a negative value for writing an unsigned value");q(f<=a,"value is larger than maximum value for type");q(Math.floor(f)===f,"value has a fractional component")}function M(f,a,b){q("number"===typeof f,"cannot write a non-number as a number");q(f<=a,"value larger than maximum allowed value");q(f>=b,"value smaller than minimum allowed value");q(Math.floor(f)===f,"value has a fractional component")}function N(f, +a,b){q("number"===typeof f,"cannot write a non-number as a number");q(f<=a,"value larger than maximum allowed value");q(f>=b,"value smaller than minimum allowed value")}function q(f,a){if(!f)throw Error(a||"Failed assertion");}var J=g("base64-js"),H=g("ieee754");m.Buffer=e;m.SlowBuffer=e;m.INSPECT_MAX_BYTES=50;e.poolSize=8192;e._useTypedArrays=function(){try{var f=new ArrayBuffer(0),a=new Uint8Array(f);a.foo=function(){return 42};return 42===a.foo()&&"function"===typeof a.subarray}catch(b){return!1}}(); +e.isEncoding=function(f){switch(String(f).toLowerCase()){case "hex":case "utf8":case "utf-8":case "ascii":case "binary":case "base64":case "raw":case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":return!0;default:return!1}};e.isBuffer=function(f){return!(null===f||void 0===f||!f._isBuffer)};e.byteLength=function(f,a){var b;f+="";switch(a||"utf8"){case "hex":b=f.length/2;break;case "utf8":case "utf-8":b=t(f).length;break;case "ascii":case "binary":case "raw":b=f.length;break;case "base64":b=J.toByteArray(f).length; +break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":b=2*f.length;break;default:throw Error("Unknown encoding");}return b};e.concat=function(f,a){q(y(f),"Usage: Buffer.concat(list, [totalLength])\nlist should be an Array.");if(0===f.length)return new e(0);if(1===f.length)return f[0];var b;if("number"!==typeof a)for(b=a=0;bd&&(b=d)):b=d;c=String(c||"utf8").toLowerCase();switch(c){case "hex":a=Number(a)||0;c=this.length-a;b?(b=Number(b),b>c&&(b=c)):b=c;c=f.length;q(0===c%2,"Invalid hex string");b>c/2&&(b=c/2);for(c=0;c>8,l%=256,d.push(l),d.push(c);f=e._charsWritten=C(d,this,a,b);break;default:throw Error("Unknown encoding");}return f};e.prototype.toString=function(f,a,b){f=String(f||"utf8").toLowerCase();a=Number(a)||0;b=void 0!==b?Number(b):b=this.length;if(b===a)return""; +switch(f){case "hex":f=a;a=this.length;if(!f||0>f)f=0;if(!b||0>b||b>a)b=a;for(a="";f=this[c]?(f+=I(a)+String.fromCharCode(this[c]),a=""):a+="%"+this[c].toString(16);b=f+I(a);break;case "ascii":b=p(this,a,b);break;case "binary":b=p(this,a,b);break;case "base64":f=a;b=0===f&&b===this.length?J.fromByteArray(this):J.fromByteArray(this.slice(f,b));break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":b= +this.slice(a,b);f="";for(a=0;a=c,"sourceEnd < sourceStart"),q(0<=b&&bthis.length&&(d=this.length),a.length-bd||!e._useTypedArrays)for(var l=0;l=this.length))return this[a]};e.prototype.readUInt16LE=function(a,b){return v(this,a,!0,b)};e.prototype.readUInt16BE=function(a,b){return v(this,a,!1,b)};e.prototype.readUInt32LE=function(a,b){return A(this, +a,!0,b)};e.prototype.readUInt32BE=function(a,b){return A(this,a,!1,b)};e.prototype.readInt8=function(a,b){b||(q(void 0!==a&&null!==a,"missing offset"),q(a=this.length))return this[a]&128?-1*(255-this[a]+1):this[a]};e.prototype.readInt16LE=function(a,b){return h(this,a,!0,b)};e.prototype.readInt16BE=function(a,b){return h(this,a,!1,b)};e.prototype.readInt32LE=function(a,b){return d(this,a,!0,b)};e.prototype.readInt32BE=function(a,b){return d(this, +a,!1,b)};e.prototype.readFloatLE=function(a,b){return l(this,a,!0,b)};e.prototype.readFloatBE=function(a,b){return l(this,a,!1,b)};e.prototype.readDoubleLE=function(a,b){return k(this,a,!0,b)};e.prototype.readDoubleBE=function(a,b){return k(this,a,!1,b)};e.prototype.writeUInt8=function(a,b,c){c||(q(void 0!==a&&null!==a,"missing value"),q(void 0!==b&&null!==b,"missing offset"),q(b=this.length||(this[b]=a)};e.prototype.writeUInt16LE=function(a, +b,d){c(this,a,b,!0,d)};e.prototype.writeUInt16BE=function(a,b,d){c(this,a,b,!1,d)};e.prototype.writeUInt32LE=function(b,c,d){a(this,b,c,!0,d)};e.prototype.writeUInt32BE=function(b,c,d){a(this,b,c,!1,d)};e.prototype.writeInt8=function(a,b,c){c||(q(void 0!==a&&null!==a,"missing value"),q(void 0!==b&&null!==b,"missing offset"),q(b=this.length||(0<=a?this.writeUInt8(a,b,c):this.writeUInt8(255+a+1,b,c))};e.prototype.writeInt16LE=function(a, +c,d){b(this,a,c,!0,d)};e.prototype.writeInt16BE=function(a,c,d){b(this,a,c,!1,d)};e.prototype.writeInt32LE=function(a,b,c){z(this,a,b,!0,c)};e.prototype.writeInt32BE=function(a,b,c){z(this,a,b,!1,c)};e.prototype.writeFloatLE=function(a,b,c){D(this,a,b,!0,c)};e.prototype.writeFloatBE=function(a,b,c){D(this,a,b,!1,c)};e.prototype.writeDoubleLE=function(a,b,c){B(this,a,b,!0,c)};e.prototype.writeDoubleBE=function(a,b,c){B(this,a,b,!1,c)};e.prototype.fill=function(a,b,c){a||(a=0);b||(b=0);c||(c=this.length); +"string"===typeof a&&(a=a.charCodeAt(0));q("number"===typeof a&&!isNaN(a),"value is not a number");q(c>=b,"end < start");if(c!==b&&0!==this.length)for(q(0<=b&&b"};e.prototype.toArrayBuffer=function(){if("undefined"!==typeof Uint8Array){if(e._useTypedArrays)return(new e(this)).buffer; +for(var a=new Uint8Array(this.length),b=0,c=a.length;be)return-1;if(58>e)return e-48+52;if(91>e)return e-65;if(123>e)return e-97+26}var v="undefined"!== +typeof Uint8Array?Uint8Array:Array;e.toByteArray=function(e){function h(c){a[b++]=c}var d,l,k,c,a;if(0>16),h((k&65280)>>8),h(k&255);2===c?(k=p(e.charAt(d))<<2|p(e.charAt(d+1))>>4,h(k&255)):1===c&&(k= +p(e.charAt(d))<<10|p(e.charAt(d+1))<<4|p(e.charAt(d+2))>>2,h(k>>8&255),h(k&255));return a};e.fromByteArray=function(e){var h,d=e.length%3,l="",k,c;h=0;for(c=e.length-d;h>18&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>12&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>6&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k& +63),l+=k;switch(d){case 1:k=e[e.length-1];l+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>2);l+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k<<4&63);l+="==";break;case 2:k=(e[e.length-2]<<8)+e[e.length-1],l+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>10),l+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>4&63),l+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k<< +2&63),l+="="}return l}})("undefined"===typeof m?this.base64js={}:m)},{}],5:[function(g,r,m){m.read=function(e,p,v,g,h){var d;d=8*h-g-1;var l=(1<>1,c=-7;h=v?h-1:0;var a=v?-1:1,b=e[p+h];h+=a;v=b&(1<<-c)-1;b>>=-c;for(c+=d;0>=-c;for(c+=g;0>1,b= +23===h?Math.pow(2,-24)-Math.pow(2,-77):0;d=g?0:d-1;var z=g?1:-1,D=0>p||0===p&&0>1/p?1:0;p=Math.abs(p);isNaN(p)||Infinity===p?(p=isNaN(p)?1:0,g=c):(g=Math.floor(Math.log(p)/Math.LN2),1>p*(l=Math.pow(2,-g))&&(g--,l*=2),p=1<=g+a?p+b/l:p+b*Math.pow(2,1-a),2<=p*l&&(g++,l/=2),g+a>=c?(p=0,g=c):1<=g+a?(p=(p*l-1)*Math.pow(2,h),g+=a):(p=p*Math.pow(2,a-1)*Math.pow(2,h),g=0));for(;8<=h;e[v+d]=p&255,d+=z,p/=256,h-=8);g=g<c?[]:a.slice(b, +c-b+1)}d=m.resolve(d).substr(1);e=m.resolve(e).substr(1);for(var c=k(d.split("/")),a=k(e.split("/")),b=Math.min(c.length,a.length),h=b,g=0;ge&&(e=d.length+e);return d.substr(e,k)}}).call(this,g("node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))},{"node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":6}],8:[function(g,r,m){m.SourceMapGenerator=g("./source-map/source-map-generator").SourceMapGenerator; +m.SourceMapConsumer=g("./source-map/source-map-consumer").SourceMapConsumer;m.SourceNode=g("./source-map/source-node").SourceNode},{"./source-map/source-map-consumer":13,"./source-map/source-map-generator":14,"./source-map/source-node":15}],9:[function(g,r,m){if("function"!==typeof e)var e=g("amdefine")(r,g);e(function(e,g,m){function h(){this._array=[];this._set={}}var d=e("./util");h.fromArray=function(d,e){for(var c=new h,a=0,b=d.length;ad?(-d<<1)+1:(d<<1)+0;do d=k&31,k>>>=5,0=k)throw Error("Expected more digits in base 64 VLQ value.");g=h.decode(d.charAt(e++));b=!!(g&32);g&=31;c+=g<>1;return{value:1===(c&1)?-k:k,rest:d.slice(e)}}})}, +{"./base64":11,amdefine:17}],11:[function(g,r,m){if("function"!==typeof e)var e=g("amdefine")(r,g);e(function(e,g,m){var h={},d={};"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").forEach(function(e,k){h[e]=k;d[k]=e});g.encode=function(e){if(e in d)return d[e];throw new TypeError("Must be between 0 and 63: "+e);};g.decode=function(d){if(d in h)return h[d];throw new TypeError("Not a valid base 64 digit: "+d);}})},{amdefine:17}],12:[function(g,r,m){if("function"!==typeof e)var e= +g("amdefine")(r,g);e(function(e,g,m){function h(d,e,k,c,a){var b=Math.floor((e-d)/2)+d,g=a(k,c[b],!0);return 0===g?c[b]:0d?null:c[d]}g.search=function(d,e,k){return 0=a[c])throw new TypeError("Line must be greater than or equal to 1, got "+a[c]);if(0>a[d])throw new TypeError("Column must be greater than or equal to 0, got "+a[d]);return l.search(a,b,e)};h.prototype.originalPositionFor=function(a){a={generatedLine:d.getArg(a,"line"),generatedColumn:d.getArg(a,"column")};if(a=this._findMapping(a,this._generatedMappings,"generatedLine","generatedColumn", +d.compareByGeneratedPositions)){var b=d.getArg(a,"source",null);b&&this.sourceRoot&&(b=d.join(this.sourceRoot,b));return{source:b,line:d.getArg(a,"originalLine",null),column:d.getArg(a,"originalColumn",null),name:d.getArg(a,"name",null)}}return{source:null,line:null,column:null,name:null}};h.prototype.sourceContentFor=function(a){if(!this.sourcesContent)return null;this.sourceRoot&&(a=d.relative(this.sourceRoot,a));if(this._sources.has(a))return this.sourcesContent[this._sources.indexOf(a)];var b; +if(this.sourceRoot&&(b=d.urlParse(this.sourceRoot))){var c=a.replace(/^file:\/\//,"");if("file"==b.scheme&&this._sources.has(c))return this.sourcesContent[this._sources.indexOf(c)];if((!b.path||"/"==b.path)&&this._sources.has("/"+a))return this.sourcesContent[this._sources.indexOf("/"+a)]}throw Error('"'+a+'" is not in the SourceMap.');};h.prototype.generatedPositionFor=function(a){a={source:d.getArg(a,"source"),originalLine:d.getArg(a,"line"),originalColumn:d.getArg(a,"column")};this.sourceRoot&& +(a.source=d.relative(this.sourceRoot,a.source));return(a=this._findMapping(a,this._originalMappings,"originalLine","originalColumn",d.compareByOriginalPositions))?{line:d.getArg(a,"generatedLine",null),column:d.getArg(a,"generatedColumn",null)}:{line:null,column:null}};h.GENERATED_ORDER=1;h.ORIGINAL_ORDER=2;h.prototype.eachMapping=function(a,b,c){b=b||null;switch(c||h.GENERATED_ORDER){case h.GENERATED_ORDER:c=this._generatedMappings;break;case h.ORIGINAL_ORDER:c=this._originalMappings;break;default:throw Error("Unknown order of iteration."); +}var e=this.sourceRoot;c.map(function(a){var b=a.source;b&&e&&(b=d.join(e,b));return{source:b,generatedLine:a.generatedLine,generatedColumn:a.generatedColumn,originalLine:a.originalLine,originalColumn:a.originalColumn,name:a.name}}).forEach(a,b)};g.SourceMapConsumer=h})},{"./array-set":9,"./base64-vlq":10,"./binary-search":12,"./util":16,amdefine:17}],14:[function(g,r,m){if("function"!==typeof e)var e=g("amdefine")(r,g);e(function(e,g,m){function h(c){this._file=l.getArg(c,"file");this._sourceRoot= +l.getArg(c,"sourceRoot",null);this._sources=new k;this._names=new k;this._mappings=[];this._sourcesContents=null}var d=e("./base64-vlq"),l=e("./util"),k=e("./array-set").ArraySet;h.prototype._version=3;h.fromSourceMap=function(c){var a=c.sourceRoot,b=new h({file:c.file,sourceRoot:a});c.eachMapping(function(c){var d={generated:{line:c.generatedLine,column:c.generatedColumn}};c.source&&(d.source=c.source,a&&(d.source=l.relative(a,d.source)),d.original={line:c.originalLine,column:c.originalColumn},c.name&& +(d.name=c.name));b.addMapping(d)});c.sources.forEach(function(a){var d=c.sourceContentFor(a);d&&b.setSourceContent(a,d)});return b};h.prototype.addMapping=function(c){var a=l.getArg(c,"generated"),b=l.getArg(c,"original",null),d=l.getArg(c,"source",null);c=l.getArg(c,"name",null);this._validateMapping(a,b,d,c);d&&!this._sources.has(d)&&this._sources.add(d);c&&!this._names.has(c)&&this._names.add(c);this._mappings.push({generatedLine:a.line,generatedColumn:a.column,originalLine:null!=b&&b.line,originalColumn:null!= +b&&b.column,source:d,name:c})};h.prototype.setSourceContent=function(c,a){var b=c;this._sourceRoot&&(b=l.relative(this._sourceRoot,b));null!==a?(this._sourcesContents||(this._sourcesContents={}),this._sourcesContents[l.toSetString(b)]=a):(delete this._sourcesContents[l.toSetString(b)],0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))};h.prototype.applySourceMap=function(c,a){a||(a=c.file);var b=this._sourceRoot;b&&(a=l.relative(b,a));var d=new k,e=new k;this._mappings.forEach(function(k){if(k.source=== +a&&k.originalLine){var h=c.originalPositionFor({line:k.originalLine,column:k.originalColumn});null!==h.source&&(k.source=b?l.relative(b,h.source):h.source,k.originalLine=h.line,k.originalColumn=h.column,null!==h.name&&null!==k.name&&(k.name=h.name))}(h=k.source)&&!d.has(h)&&d.add(h);(k=k.name)&&!e.has(k)&&e.add(k)},this);this._sources=d;this._names=e;c.sources.forEach(function(a){var d=c.sourceContentFor(a);d&&(b&&(a=l.relative(b,a)),this.setSourceContent(a,d))},this)};h.prototype._validateMapping= +function(c,a,b,d){if(!(c&&"line"in c&&"column"in c&&0d)-(c",a=this.getLineNumber(),null!=a&&(b+=":"+a,(a=this.getColumnNumber())&& +(b+=":"+a)));a="";var c=this.getFunctionName(),d=!0,e=this.isConstructor();if(this.isToplevel()||e)e?a+="new "+(c||""):c?a+=c:(a+=b,d=!1);else{var e=this.getTypeName(),g=this.getMethodName();c?(e&&0!=c.indexOf(e)&&(a+=e+"."),a+=c,g&&c.indexOf("."+g)!=c.length-g.length-1&&(a+=" [as "+g+"]")):a+=e+"."+(g||"")}d&&(a+=" ("+b+")");return a}function a(a){var b={};Object.getOwnPropertyNames(Object.getPrototypeOf(a)).forEach(function(c){b[c]=/^(?:is|get)/.test(c)?function(){return a[c].call(a)}: +a[c]});b.toString=c;return b}function b(b){var c=b.getFileName()||b.getScriptNameOrSourceURL();if(c){var d=b.getLineNumber(),e=b.getColumnNumber()-1;1!==d||r()||b.isEval()||(e-=62);var g=l({source:c,line:d,column:e});b=a(b);b.getFileName=function(){return g.source};b.getLineNumber=function(){return g.line};b.getColumnNumber=function(){return g.column+1};b.getScriptNameOrSourceURL=function(){return g.source};return b}var h=b.isEval()&&b.getEvalOrigin();h&&(h=k(h),b=a(b),b.getEvalOrigin=function(){return h}); +return b}function z(a,c){t&&(F={},C={});return a+c.map(function(a){return"\n at "+b(a)}).join("")}function D(a){var b=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(a.stack);if(b){a=b[1];var c=+b[2],b=+b[3],d=F[a];!d&&y.existsSync(a)&&(d=y.readFileSync(a,"utf8"));if(d&&(d=d.split(/(?:\r\n|\r|\n)/)[c-1]))return a+":"+c+"\n"+d+"\n"+Array(b).join(" ")+"^"}return null}function B(){var a=e.emit;e.emit=function(b){if("uncaughtException"===b){var c=arguments[1]&&arguments[1].stack,d=0 +Make sure to run build.js. +This test should say either "Test failed" or "Test passed": +

+ + diff --git a/node_modules/source-map-support/amd-test/require.js b/node_modules/source-map-support/amd-test/require.js new file mode 100644 index 0000000..ee9999f --- /dev/null +++ b/node_modules/source-map-support/amd-test/require.js @@ -0,0 +1,36 @@ +/* + RequireJS 2.1.9 Copyright (c) 2010-2012, The Dojo Foundation All Rights Reserved. + Available via the MIT or new BSD license. + see: http://github.com/jrburke/requirejs for details +*/ +var requirejs,require,define; +(function(Z){function H(b){return"[object Function]"===L.call(b)}function I(b){return"[object Array]"===L.call(b)}function y(b,c){if(b){var e;for(e=0;ethis.depCount&&!this.defined){if(H(m)){if(this.events.error&&this.map.isDefine||j.onError!==aa)try{d=i.execCb(c,m,b,d)}catch(e){a=e}else d=i.execCb(c,m,b,d);this.map.isDefine&&((b=this.module)&&void 0!==b.exports&&b.exports!== +this.exports?d=b.exports:void 0===d&&this.usingExports&&(d=this.exports));if(a)return a.requireMap=this.map,a.requireModules=this.map.isDefine?[this.map.id]:null,a.requireType=this.map.isDefine?"define":"require",v(this.error=a)}else d=m;this.exports=d;if(this.map.isDefine&&!this.ignore&&(r[c]=d,j.onResourceLoad))j.onResourceLoad(i,this.map,this.depMaps);x(c);this.defined=!0}this.defining=!1;this.defined&&!this.defineEmitted&&(this.defineEmitted=!0,this.emit("defined",this.exports),this.defineEmitComplete= +!0)}}else this.fetch()}},callPlugin:function(){var a=this.map,b=a.id,e=n(a.prefix);this.depMaps.push(e);s(e,"defined",u(this,function(d){var m,e;e=this.map.name;var g=this.map.parentMap?this.map.parentMap.name:null,h=i.makeRequire(a.parentMap,{enableBuildCallback:!0});if(this.map.unnormalized){if(d.normalize&&(e=d.normalize(e,function(a){return c(a,g,!0)})||""),d=n(a.prefix+"!"+e,this.map.parentMap),s(d,"defined",u(this,function(a){this.init([],function(){return a},null,{enabled:!0,ignore:!0})})), +e=l(p,d.id)){this.depMaps.push(d);if(this.events.error)e.on("error",u(this,function(a){this.emit("error",a)}));e.enable()}}else m=u(this,function(a){this.init([],function(){return a},null,{enabled:!0})}),m.error=u(this,function(a){this.inited=!0;this.error=a;a.requireModules=[b];F(p,function(a){0===a.map.id.indexOf(b+"_unnormalized")&&x(a.map.id)});v(a)}),m.fromText=u(this,function(d,c){var e=a.name,g=n(e),B=O;c&&(d=c);B&&(O=!1);q(g);t(k.config,b)&&(k.config[e]=k.config[b]);try{j.exec(d)}catch(ca){return v(A("fromtexteval", +"fromText eval for "+b+" failed: "+ca,ca,[b]))}B&&(O=!0);this.depMaps.push(g);i.completeLoad(e);h([e],m)}),d.load(a.name,h,m,k)}));i.enable(e,this);this.pluginMaps[e.id]=e},enable:function(){T[this.map.id]=this;this.enabling=this.enabled=!0;y(this.depMaps,u(this,function(a,b){var c,d;if("string"===typeof a){a=n(a,this.map.isDefine?this.map:this.map.parentMap,!1,!this.skipMap);this.depMaps[b]=a;if(c=l(N,a.id)){this.depExports[b]=c(this);return}this.depCount+=1;s(a,"defined",u(this,function(a){this.defineDep(b, +a);this.check()}));this.errback&&s(a,"error",u(this,this.errback))}c=a.id;d=p[c];!t(N,c)&&(d&&!d.enabled)&&i.enable(a,this)}));F(this.pluginMaps,u(this,function(a){var b=l(p,a.id);b&&!b.enabled&&i.enable(a,this)}));this.enabling=!1;this.check()},on:function(a,b){var c=this.events[a];c||(c=this.events[a]=[]);c.push(b)},emit:function(a,b){y(this.events[a],function(a){a(b)});"error"===a&&delete this.events[a]}};i={config:k,contextName:b,registry:p,defined:r,urlFetched:S,defQueue:G,Module:X,makeModuleMap:n, +nextTick:j.nextTick,onError:v,configure:function(a){a.baseUrl&&"/"!==a.baseUrl.charAt(a.baseUrl.length-1)&&(a.baseUrl+="/");var b=k.pkgs,c=k.shim,d={paths:!0,config:!0,map:!0};F(a,function(a,b){d[b]?"map"===b?(k.map||(k.map={}),Q(k[b],a,!0,!0)):Q(k[b],a,!0):k[b]=a});a.shim&&(F(a.shim,function(a,b){I(a)&&(a={deps:a});if((a.exports||a.init)&&!a.exportsFn)a.exportsFn=i.makeShimExports(a);c[b]=a}),k.shim=c);a.packages&&(y(a.packages,function(a){a="string"===typeof a?{name:a}:a;b[a.name]={name:a.name, +location:a.location||a.name,main:(a.main||"main").replace(ja,"").replace(ea,"")}}),k.pkgs=b);F(p,function(a,b){!a.inited&&!a.map.unnormalized&&(a.map=n(b))});if(a.deps||a.callback)i.require(a.deps||[],a.callback)},makeShimExports:function(a){return function(){var b;a.init&&(b=a.init.apply(Z,arguments));return b||a.exports&&ba(a.exports)}},makeRequire:function(a,f){function h(d,c,e){var g,k;f.enableBuildCallback&&(c&&H(c))&&(c.__requireJsBuild=!0);if("string"===typeof d){if(H(c))return v(A("requireargs", +"Invalid require call"),e);if(a&&t(N,d))return N[d](p[a.id]);if(j.get)return j.get(i,d,a,h);g=n(d,a,!1,!0);g=g.id;return!t(r,g)?v(A("notloaded",'Module name "'+g+'" has not been loaded yet for context: '+b+(a?"":". Use require([])"))):r[g]}K();i.nextTick(function(){K();k=q(n(null,a));k.skipMap=f.skipMap;k.init(d,c,e,{enabled:!0});C()});return h}f=f||{};Q(h,{isBrowser:z,toUrl:function(b){var f,e=b.lastIndexOf("."),g=b.split("/")[0];if(-1!==e&&(!("."===g||".."===g)||1h.attachEvent.toString().indexOf("[native code"))&&!W?(O=!0,h.attachEvent("onreadystatechange",b.onScriptLoad)):(h.addEventListener("load",b.onScriptLoad,!1),h.addEventListener("error", +b.onScriptError,!1)),h.src=e,K=h,C?x.insertBefore(h,C):x.appendChild(h),K=null,h;if(da)try{importScripts(e),b.completeLoad(c)}catch(l){b.onError(A("importscripts","importScripts failed for "+c+" at "+e,l,[c]))}};z&&!s.skipDataMain&&M(document.getElementsByTagName("script"),function(b){x||(x=b.parentNode);if(J=b.getAttribute("data-main"))return q=J,s.baseUrl||(D=q.split("/"),q=D.pop(),fa=D.length?D.join("/")+"/":"./",s.baseUrl=fa),q=q.replace(ea,""),j.jsExtRegExp.test(q)&&(q=J),s.deps=s.deps?s.deps.concat(q): +[q],!0});define=function(b,c,e){var h,j;"string"!==typeof b&&(e=c,c=b,b=null);I(c)||(e=c,c=null);!c&&H(e)&&(c=[],e.length&&(e.toString().replace(la,"").replace(ma,function(b,e){c.push(e)}),c=(1===e.length?["require"]:["require","exports","module"]).concat(c)));if(O){if(!(h=K))P&&"interactive"===P.readyState||M(document.getElementsByTagName("script"),function(b){if("interactive"===b.readyState)return P=b}),h=P;h&&(b||(b=h.getAttribute("data-requiremodule")),j=E[h.getAttribute("data-requirecontext")])}(j? +j.defQueue:R).push([b,c,e])};define.amd={jQuery:!0};j.exec=function(b){return eval(b)};j(s)}})(this); diff --git a/node_modules/source-map-support/amd-test/script.coffee b/node_modules/source-map-support/amd-test/script.coffee new file mode 100644 index 0000000..fe28f65 --- /dev/null +++ b/node_modules/source-map-support/amd-test/script.coffee @@ -0,0 +1,13 @@ +define ['browser-source-map-support'], (sourceMapSupport) -> + sourceMapSupport.install() + + foo = -> throw new Error 'foo' + + try + foo() + catch e + if /\bscript\.coffee\b/.test e.stack + document.body.appendChild document.createTextNode 'Test passed' + else + document.body.appendChild document.createTextNode 'Test failed' + console.log e.stack diff --git a/node_modules/source-map-support/amd-test/script.js b/node_modules/source-map-support/amd-test/script.js new file mode 100644 index 0000000..7cfd359 --- /dev/null +++ b/node_modules/source-map-support/amd-test/script.js @@ -0,0 +1,24 @@ +// Generated by CoffeeScript 1.7.1 +(function() { + define(['browser-source-map-support'], function(sourceMapSupport) { + var e, foo; + sourceMapSupport.install(); + foo = function() { + throw new Error('foo'); + }; + try { + return foo(); + } catch (_error) { + e = _error; + if (/\bscript\.coffee\b/.test(e.stack)) { + return document.body.appendChild(document.createTextNode('Test passed')); + } else { + document.body.appendChild(document.createTextNode('Test failed')); + return console.log(e.stack); + } + } + }); + +}).call(this); + +//# sourceMappingURL=script.map diff --git a/node_modules/source-map-support/amd-test/script.map b/node_modules/source-map-support/amd-test/script.map new file mode 100644 index 0000000..27b4257 --- /dev/null +++ b/node_modules/source-map-support/amd-test/script.map @@ -0,0 +1,10 @@ +{ + "version": 3, + "file": "script.js", + "sourceRoot": "..", + "sources": [ + "amd-test/script.coffee" + ], + "names": [], + "mappings": ";AAAA;AAAA,EAAA,MAAA,CAAO,CAAC,4BAAD,CAAP,EAAuC,SAAC,gBAAD,GAAA;AACrC,QAAA,MAAA;AAAA,IAAA,gBAAgB,CAAC,OAAjB,CAAA,CAAA,CAAA;AAAA,IAEA,GAAA,GAAM,SAAA,GAAA;AAAG,YAAU,IAAA,KAAA,CAAM,KAAN,CAAV,CAAH;IAAA,CAFN,CAAA;AAIA;aACE,GAAA,CAAA,EADF;KAAA,cAAA;AAGE,MADI,UACJ,CAAA;AAAA,MAAA,IAAG,oBAAoB,CAAC,IAArB,CAA0B,CAAC,CAAC,KAA5B,CAAH;eACE,QAAQ,CAAC,IAAI,CAAC,WAAd,CAA0B,QAAQ,CAAC,cAAT,CAAwB,aAAxB,CAA1B,EADF;OAAA,MAAA;AAGE,QAAA,QAAQ,CAAC,IAAI,CAAC,WAAd,CAA0B,QAAQ,CAAC,cAAT,CAAwB,aAAxB,CAA1B,CAAA,CAAA;eACA,OAAO,CAAC,GAAR,CAAY,CAAC,CAAC,KAAd,EAJF;OAHF;KALqC;EAAA,CAAvC,CAAA,CAAA;AAAA" +} \ No newline at end of file diff --git a/node_modules/source-map-support/browser-source-map-support.js b/node_modules/source-map-support/browser-source-map-support.js new file mode 100644 index 0000000..acb55fa --- /dev/null +++ b/node_modules/source-map-support/browser-source-map-support.js @@ -0,0 +1,89 @@ +/* + * Support for source maps in V8 stack traces + * https://github.com/evanw/node-source-map-support + */ +(this.define||function(K,N){this.sourceMapSupport=N()})("browser-source-map-support",function(K){(function n(w,t,e){function r(g,b){if(!t[g]){if(!w[g]){var f="function"==typeof require&&require;if(!b&&f)return f(g,!0);if(l)return l(g,!0);throw Error("Cannot find module '"+g+"'");}f=t[g]={exports:{}};w[g][0].call(f.exports,function(b){var a=w[g][1][b];return r(a?a:b)},f,f.exports,n,w,t,e)}return t[g].exports}for(var l="function"==typeof require&&require,m=0;me)return-1;if(58>e)return e-48+52;if(91>e)return e-65;if(123>e)return e-97+26}var l="undefined"!==typeof Uint8Array?Uint8Array:Array;e.toByteArray=function(e){function g(a){c[d++]=a}var b,f,k,a,c;if(0>18&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>12&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>6&63)+"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k&63),f+=k;switch(b){case 1:k=e[e.length-1];f+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>2);f+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k<< +4&63);f+="==";break;case 2:k=(e[e.length-2]<<8)+e[e.length-1],f+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>10),f+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k>>4&63),f+="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".charAt(k<<2&63),f+="="}return f}})("undefined"===typeof t?this.base64js={}:t)},{}],4:[function(n,w,t){},{}],5:[function(n,w,t){function e(h,q,c){if(!(this instanceof e))return new e(h,q,c);var a=typeof h; +if("base64"===q&&"string"===a)for(h=h.trim?h.trim():h.replace(/^\s+|\s+$/g,"");0!==h.length%4;)h+="=";var d;if("number"===a)d=F(h);else if("string"===a)d=e.byteLength(h,q);else if("object"===a)d=F(h.length);else throw Error("First argument needs to be a number, array or string.");var b;e._useTypedArrays?b=e._augment(new Uint8Array(d)):(b=this,b.length=d,b._isBuffer=!0);if(e._useTypedArrays&&"number"===typeof h.byteLength)b._set(h);else{var f=h;if(E(f)||e.isBuffer(f)||f&&"object"===typeof f&&"number"=== +typeof f.length)for(q=0;q=a))return c?(c=h[q],q+ +1=a)){var d;c?(q+2>>0)):(q+1>>0);return d}}function g(h,q,a,c){c||(p("boolean"===typeof a,"missing or invalid endian"), +p(void 0!==q&&null!==q,"missing offset"),p(q+1=h.length))return h=l(h,q,a,!0),h&32768?-1*(65535-h+1):h}function b(h,q,a,c){c||(p("boolean"===typeof a,"missing or invalid endian"),p(void 0!==q&&null!==q,"missing offset"),p(q+3=h.length))return h=m(h,q,a,!0),h&2147483648?-1*(4294967295-h+1):h}function f(h,q,a,c){c||(p("boolean"===typeof a,"missing or invalid endian"),p(q+3=f))for(b=0,f=Math.min(f-c,2);b>>8* +(d?b:1-b)}function c(h,a,c,d,b){b||(p(void 0!==a&&null!==a,"missing value"),p("boolean"===typeof d,"missing or invalid endian"),p(void 0!==c&&null!==c,"missing offset"),p(c+3=f))for(b=0,f=Math.min(f-c,4);b>>8*(d?b:3-b)&255}function d(h,c,d,b,f){f||(p(void 0!==c&&null!==c,"missing value"),p("boolean"===typeof b,"missing or invalid endian"),p(void 0!==d&&null!==d,"missing offset"),p(d+1=h.length||(0<=c?a(h,c,d,b,f):a(h,65535+c+1,d,b,f))}function y(h,a,d,b,f){f||(p(void 0!==a&&null!==a,"missing value"),p("boolean"===typeof b,"missing or invalid endian"),p(void 0!==d&&null!==d,"missing offset"),p(d+3=h.length||(0<=a?c(h,a,d,b,f):c(h,4294967295+a+1,d,b,f))}function x(h,a,c,d,b){b||(p(void 0!==a&&null!==a,"missing value"),p("boolean"===typeof d, +"missing or invalid endian"),p(void 0!==c&&null!==c,"missing offset"),p(c+3=h.length||I.write(h,a,c,d,23,4)}function A(h,c,a,d,b){b||(p(void 0!==c&&null!==c,"missing value"),p("boolean"===typeof d,"missing or invalid endian"),p(void 0!==a&&null!==a,"missing offset"),p(a+7=h.length||I.write(h, +c,a,d,52,8)}function B(h,a,c){if("number"!==typeof h)return c;h=~~h;if(h>=a)return a;if(0<=h)return h;h+=a;return 0<=h?h:0}function F(h){h=~~Math.ceil(+h);return 0>h?0:h}function E(h){return(Array.isArray||function(h){return"[object Array]"===Object.prototype.toString.call(h)})(h)}function G(h){return 16>h?"0"+h.toString(16):h.toString(16)}function u(h){for(var a=[],c=0;c=d)a.push(h.charCodeAt(c));else{var b=c;55296<=d&&57343>=d&&c++;d=encodeURIComponent(h.slice(b, +c+1)).substr(1).split("%");for(b=0;b=c.length||b>=h.length);b++)c[b+a]=h[b];return b}function D(h){try{return decodeURIComponent(h)}catch(c){return String.fromCharCode(65533)}}function H(h,c){p("number"===typeof h,"cannot write a non-number as a number");p(0<=h,"specified a negative value for writing an unsigned value");p(h<= +c,"value is larger than maximum value for type");p(Math.floor(h)===h,"value has a fractional component")}function L(h,c,a){p("number"===typeof h,"cannot write a non-number as a number");p(h<=c,"value larger than maximum allowed value");p(h>=a,"value smaller than minimum allowed value");p(Math.floor(h)===h,"value has a fractional component")}function M(h,c,a){p("number"===typeof h,"cannot write a non-number as a number");p(h<=c,"value larger than maximum allowed value");p(h>=a,"value smaller than minimum allowed value")} +function p(h,c){if(!h)throw Error(c||"Failed assertion");}var J=n("base64-js"),I=n("ieee754");t.Buffer=e;t.SlowBuffer=e;t.INSPECT_MAX_BYTES=50;e.poolSize=8192;e._useTypedArrays=function(){try{var h=new ArrayBuffer(0),c=new Uint8Array(h);c.foo=function(){return 42};return 42===c.foo()&&"function"===typeof c.subarray}catch(a){return!1}}();e.isEncoding=function(h){switch(String(h).toLowerCase()){case "hex":case "utf8":case "utf-8":case "ascii":case "binary":case "base64":case "raw":case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":return!0; +default:return!1}};e.isBuffer=function(h){return!(null===h||void 0===h||!h._isBuffer)};e.byteLength=function(h,c){var a;h+="";switch(c||"utf8"){case "hex":a=h.length/2;break;case "utf8":case "utf-8":a=u(h).length;break;case "ascii":case "binary":case "raw":a=h.length;break;case "base64":a=J.toByteArray(h).length;break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":a=2*h.length;break;default:throw Error("Unknown encoding");}return a};e.concat=function(h,c){p(E(h),"Usage: Buffer.concat(list, [totalLength])\nlist should be an Array."); +if(0===h.length)return new e(0);if(1===h.length)return h[0];var a;if("number"!==typeof c)for(a=c=0;ab&&(a=b)):a=b;d=String(d||"utf8").toLowerCase();switch(d){case "hex":c=Number(c)||0;d=this.length-c;a?(a=Number(a),a>d&&(a=d)):a=d;d= +h.length;p(0===d%2,"Invalid hex string");a>d/2&&(a=d/2);for(d=0;d>8,f%=256,b.push(f),b.push(d);h=e._charsWritten=z(b,this,c,a);break;default:throw Error("Unknown encoding");}return h};e.prototype.toString=function(a,c,d){a=String(a||"utf8").toLowerCase();c=Number(c)||0;d=void 0!==d?Number(d):d=this.length;if(d===c)return"";switch(a){case "hex":a=this.length;if(!c||0>c)c=0;if(!d||0>d||d>a)d=a;for(a="";c=this[c]?(a+=D(b)+String.fromCharCode(this[c]), +b=""):b+="%"+this[c].toString(16);d=a+D(b);break;case "ascii":d=r(this,c,d);break;case "binary":d=r(this,c,d);break;case "base64":d=0===c&&d===this.length?J.fromByteArray(this):J.fromByteArray(this.slice(c,d));break;case "ucs2":case "ucs-2":case "utf16le":case "utf-16le":d=this.slice(c,d);c="";for(a=0;a=d,"sourceEnd < sourceStart"),p(0<=c&&cthis.length&&(b=this.length),a.length-cb||!e._useTypedArrays)for(var f=0;f=this.length))return this[a]};e.prototype.readUInt16LE=function(a,c){return l(this,a,!0,c)};e.prototype.readUInt16BE=function(a,c){return l(this,a,!1,c)};e.prototype.readUInt32LE=function(a,c){return m(this,a,!0,c)};e.prototype.readUInt32BE=function(a,c){return m(this,a,!1,c)};e.prototype.readInt8=function(a,c){c||(p(void 0!==a&&null!==a,"missing offset"),p(a= +this.length))return this[a]&128?-1*(255-this[a]+1):this[a]};e.prototype.readInt16LE=function(a,c){return g(this,a,!0,c)};e.prototype.readInt16BE=function(a,c){return g(this,a,!1,c)};e.prototype.readInt32LE=function(a,c){return b(this,a,!0,c)};e.prototype.readInt32BE=function(a,c){return b(this,a,!1,c)};e.prototype.readFloatLE=function(a,c){return f(this,a,!0,c)};e.prototype.readFloatBE=function(a,c){return f(this,a,!1,c)};e.prototype.readDoubleLE=function(a,c){return k(this,a,!0,c)};e.prototype.readDoubleBE= +function(a,c){return k(this,a,!1,c)};e.prototype.writeUInt8=function(a,c,d){d||(p(void 0!==a&&null!==a,"missing value"),p(void 0!==c&&null!==c,"missing offset"),p(c=this.length||(this[c]=a)};e.prototype.writeUInt16LE=function(c,d,b){a(this,c,d,!0,b)};e.prototype.writeUInt16BE=function(c,d,b){a(this,c,d,!1,b)};e.prototype.writeUInt32LE=function(a,d,b){c(this,a,d,!0,b)};e.prototype.writeUInt32BE=function(a,d,b){c(this,a,d,!1,b)};e.prototype.writeInt8= +function(a,c,d){d||(p(void 0!==a&&null!==a,"missing value"),p(void 0!==c&&null!==c,"missing offset"),p(c=this.length||(0<=a?this.writeUInt8(a,c,d):this.writeUInt8(255+a+1,c,d))};e.prototype.writeInt16LE=function(a,c,b){d(this,a,c,!0,b)};e.prototype.writeInt16BE=function(a,c,b){d(this,a,c,!1,b)};e.prototype.writeInt32LE=function(a,c,d){y(this,a,c,!0,d)};e.prototype.writeInt32BE=function(a,c,d){y(this,a,c,!1,d)};e.prototype.writeFloatLE= +function(a,c,d){x(this,a,c,!0,d)};e.prototype.writeFloatBE=function(a,c,d){x(this,a,c,!1,d)};e.prototype.writeDoubleLE=function(a,c,d){A(this,a,c,!0,d)};e.prototype.writeDoubleBE=function(a,c,d){A(this,a,c,!1,d)};e.prototype.fill=function(a,c,d){a||(a=0);c||(c=0);d||(d=this.length);"string"===typeof a&&(a=a.charCodeAt(0));p("number"===typeof a&&!isNaN(a),"value is not a number");p(d>=c,"end < start");if(d!==c&&0!==this.length)for(p(0<=c&&c"};e.prototype.toArrayBuffer=function(){if("undefined"!==typeof Uint8Array){if(e._useTypedArrays)return(new e(this)).buffer;for(var a=new Uint8Array(this.length),c=0,d=a.length;c>1,a=-7;g=l?g-1:0;var c=l?-1:1,d=e[r+g];g+=c;l=d&(1<<-a)-1;d>>=-a;for(a+=b;0>=-a;for(a+=m;0>1,d=23===g?Math.pow(2,-24)-Math.pow(2,-77):0;b=m?0:b-1;var y=m?1:-1,x=0>r||0===r&&0>1/r?1:0;r=Math.abs(r);isNaN(r)||Infinity===r?(r=isNaN(r)?1:0,m=a):(m=Math.floor(Math.log(r)/Math.LN2),1>r*(f=Math.pow(2,-m))&&(m--,f*=2),r=1<=m+c?r+d/f:r+d*Math.pow(2,1-c),2<=r*f&&(m++,f/=2),m+c>=a?(r=0,m=a):1<=m+c?(r=(r*f-1)*Math.pow(2,g),m+=c):(r=r*Math.pow(2,c-1)*Math.pow(2,g),m=0));for(;8<=g;e[l+b]=r&255,b+=y,r/=256,g-=8);m=m<d?[]:a.slice(c,d-c+1)}b=t.resolve(b).substr(1);f=t.resolve(f).substr(1);for(var a=e(b.split("/")),c=e(f.split("/")),d=Math.min(a.length,c.length),g=d,x=0;xf&&(f=b.length+f);return b.substr(f,e)}}).call(this,n("node_modules/process/browser.js"))}, +{"node_modules/process/browser.js":8}],8:[function(n,w,t){function e(){}n=w.exports={};n.nextTick=function(){if("undefined"!==typeof window&&window.setImmediate)return function(e){return window.setImmediate(e)};if("undefined"!==typeof window&&window.postMessage&&window.addEventListener){var e=[];window.addEventListener("message",function(l){var m=l.source;m!==window&&null!==m||"process-tick"!==l.data||(l.stopPropagation(),0b?(-b<<1)+1:(b<<1)+0;do b=k&31,k>>>=5,0=k)throw Error("Expected more digits in base 64 VLQ value.");y=g.decode(b.charAt(e++));d=!!(y&32);y&=31;a+=y<>1;return{value:1===(a&1)?-k:k,rest:b.slice(e)}}})},{"./base64":12,amdefine:2}],12:[function(n,w,t){if("function"!==typeof e)var e=n("amdefine")(w,n);e(function(e,l,m){var g={},b={};"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("").forEach(function(e, +k){g[e]=k;b[k]=e});l.encode=function(e){if(e in b)return b[e];throw new TypeError("Must be between 0 and 63: "+e);};l.decode=function(b){if(b in g)return g[b];throw new TypeError("Not a valid base 64 digit: "+b);}})},{amdefine:2}],13:[function(n,w,t){if("function"!==typeof e)var e=n("amdefine")(w,n);e(function(e,l,m){function g(b,e,k,a,c){var d=Math.floor((e-b)/2)+b,y=c(k,a[d],!0);return 0===y?a[d]:0b?null:a[b]}l.search=function(b,e,k){return 0=a[b])throw new TypeError("Line must be greater than or equal to 1, got "+a[b]);if(0>a[e])throw new TypeError("Column must be greater than or equal to 0, got "+ +a[e]);return f.search(a,d,g)};g.prototype.originalPositionFor=function(a){a={generatedLine:b.getArg(a,"line"),generatedColumn:b.getArg(a,"column")};if(a=this._findMapping(a,this._generatedMappings,"generatedLine","generatedColumn",b.compareByGeneratedPositions)){var d=b.getArg(a,"source",null);d&&this.sourceRoot&&(d=b.join(this.sourceRoot,d));return{source:d,line:b.getArg(a,"originalLine",null),column:b.getArg(a,"originalColumn",null),name:b.getArg(a,"name",null)}}return{source:null,line:null,column:null, +name:null}};g.prototype.sourceContentFor=function(a){if(!this.sourcesContent)return null;this.sourceRoot&&(a=b.relative(this.sourceRoot,a));if(this._sources.has(a))return this.sourcesContent[this._sources.indexOf(a)];var d;if(this.sourceRoot&&(d=b.urlParse(this.sourceRoot))){var e=a.replace(/^file:\/\//,"");if("file"==d.scheme&&this._sources.has(e))return this.sourcesContent[this._sources.indexOf(e)];if((!d.path||"/"==d.path)&&this._sources.has("/"+a))return this.sourcesContent[this._sources.indexOf("/"+ +a)]}throw Error('"'+a+'" is not in the SourceMap.');};g.prototype.generatedPositionFor=function(a){a={source:b.getArg(a,"source"),originalLine:b.getArg(a,"line"),originalColumn:b.getArg(a,"column")};this.sourceRoot&&(a.source=b.relative(this.sourceRoot,a.source));return(a=this._findMapping(a,this._originalMappings,"originalLine","originalColumn",b.compareByOriginalPositions))?{line:b.getArg(a,"generatedLine",null),column:b.getArg(a,"generatedColumn",null)}:{line:null,column:null}};g.GENERATED_ORDER= +1;g.ORIGINAL_ORDER=2;g.prototype.eachMapping=function(a,d,e){d=d||null;switch(e||g.GENERATED_ORDER){case g.GENERATED_ORDER:e=this._generatedMappings;break;case g.ORIGINAL_ORDER:e=this._originalMappings;break;default:throw Error("Unknown order of iteration.");}var f=this.sourceRoot;e.map(function(a){var c=a.source;c&&f&&(c=b.join(f,c));return{source:c,generatedLine:a.generatedLine,generatedColumn:a.generatedColumn,originalLine:a.originalLine,originalColumn:a.originalColumn,name:a.name}}).forEach(a, +d)};l.SourceMapConsumer=g})},{"./array-set":10,"./base64-vlq":11,"./binary-search":13,"./util":17,amdefine:2}],15:[function(n,w,t){if("function"!==typeof e)var e=n("amdefine")(w,n);e(function(e,l,m){function g(a){this._file=f.getArg(a,"file");this._sourceRoot=f.getArg(a,"sourceRoot",null);this._sources=new k;this._names=new k;this._mappings=[];this._sourcesContents=null}var b=e("./base64-vlq"),f=e("./util"),k=e("./array-set").ArraySet;g.prototype._version=3;g.fromSourceMap=function(a){var c=a.sourceRoot, +d=new g({file:a.file,sourceRoot:c});a.eachMapping(function(a){var b={generated:{line:a.generatedLine,column:a.generatedColumn}};a.source&&(b.source=a.source,c&&(b.source=f.relative(c,b.source)),b.original={line:a.originalLine,column:a.originalColumn},a.name&&(b.name=a.name));d.addMapping(b)});a.sources.forEach(function(c){var b=a.sourceContentFor(c);b&&d.setSourceContent(c,b)});return d};g.prototype.addMapping=function(a){var c=f.getArg(a,"generated"),d=f.getArg(a,"original",null),b=f.getArg(a,"source", +null);a=f.getArg(a,"name",null);this._validateMapping(c,d,b,a);b&&!this._sources.has(b)&&this._sources.add(b);a&&!this._names.has(a)&&this._names.add(a);this._mappings.push({generatedLine:c.line,generatedColumn:c.column,originalLine:null!=d&&d.line,originalColumn:null!=d&&d.column,source:b,name:a})};g.prototype.setSourceContent=function(a,c){var d=a;this._sourceRoot&&(d=f.relative(this._sourceRoot,d));null!==c?(this._sourcesContents||(this._sourcesContents={}),this._sourcesContents[f.toSetString(d)]= +c):(delete this._sourcesContents[f.toSetString(d)],0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))};g.prototype.applySourceMap=function(a,c){c||(c=a.file);var d=this._sourceRoot;d&&(c=f.relative(d,c));var b=new k,e=new k;this._mappings.forEach(function(g){if(g.source===c&&g.originalLine){var k=a.originalPositionFor({line:g.originalLine,column:g.originalColumn});null!==k.source&&(g.source=d?f.relative(d,k.source):k.source,g.originalLine=k.line,g.originalColumn=k.column, +null!==k.name&&null!==g.name&&(g.name=k.name))}(k=g.source)&&!b.has(k)&&b.add(k);(g=g.name)&&!e.has(g)&&e.add(g)},this);this._sources=b;this._names=e;a.sources.forEach(function(c){var b=a.sourceContentFor(c);b&&(d&&(c=f.relative(d,c)),this.setSourceContent(c,b))},this)};g.prototype._validateMapping=function(a,c,d,b){if(!(a&&"line"in a&&"column"in a&&0f)-(e",a=this.getLineNumber(),null!=a&&(b+=":"+a,(a=this.getColumnNumber())&&(b+=":"+a)));a="";var c=this.getFunctionName(),d=!0,e=this.isConstructor();if(this.isToplevel()||e)e?a+="new "+(c||""):c?a+=c:(a+=b,d=!1);else{var e=this.getTypeName(),f=this.getMethodName();c?(e&&0!=c.indexOf(e)&&(a+=e+"."),a+=c,f&&c.indexOf("."+ +f)!=c.length-f.length-1&&(a+=" [as "+f+"]")):a+=e+"."+(f||"")}d&&(a+=" ("+b+")");return a}function c(b){var c={};Object.getOwnPropertyNames(Object.getPrototypeOf(b)).forEach(function(a){c[a]=/^(?:is|get)/.test(a)?function(){return b[a].call(b)}:b[a]});c.toString=a;return c}function d(a){var b=a.getFileName()||a.getScriptNameOrSourceURL();if(b){var d=a.getLineNumber(),e=a.getColumnNumber()-1;1!==d||l()||a.isEval()||(e-=62);var g=f({source:b,line:d,column:e});a=c(a);a.getFileName=function(){return g.source}; +a.getLineNumber=function(){return g.line};a.getColumnNumber=function(){return g.column+1};a.getScriptNameOrSourceURL=function(){return g.source};return a}var m=a.isEval()&&a.getEvalOrigin();m&&(m=k(m),a=c(a),a.getEvalOrigin=function(){return m});return a}function w(a,b){u&&(z={},D={});return a+b.map(function(a){return"\n at "+d(a)}).join("")}function x(a){var b=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(a.stack);if(b){a=b[1];var c=+b[2],b=+b[3],d=z[a];!d&&E.existsSync(a)&&(d=E.readFileSync(a,"utf8")); +if(d&&(d=d.split(/(?:\r\n|\r|\n)/)[c-1]))return a+":"+c+"\n"+d+"\n"+Array(b).join(" ")+"^"}return null}function A(){var a=e.emit;e.emit=function(b){if("uncaughtException"===b){var c=arguments[1]&&arguments[1].stack,d=0 +Make sure to run build.js. +This test should say either "Test failed" or "Test passed": +

+ + diff --git a/node_modules/source-map-support/browser-test/script.coffee b/node_modules/source-map-support/browser-test/script.coffee new file mode 100644 index 0000000..5721068 --- /dev/null +++ b/node_modules/source-map-support/browser-test/script.coffee @@ -0,0 +1,12 @@ +sourceMapSupport.install() + +foo = -> throw new Error 'foo' + +try + foo() +catch e + if /\bscript\.coffee\b/.test e.stack + document.body.appendChild document.createTextNode 'Test passed' + else + document.body.appendChild document.createTextNode 'Test failed' + console.log e.stack diff --git a/node_modules/source-map-support/browser-test/script.js b/node_modules/source-map-support/browser-test/script.js new file mode 100644 index 0000000..e16d414 --- /dev/null +++ b/node_modules/source-map-support/browser-test/script.js @@ -0,0 +1,25 @@ +// Generated by CoffeeScript 1.7.1 +(function() { + var e, foo; + + sourceMapSupport.install(); + + foo = function() { + throw new Error('foo'); + }; + + try { + foo(); + } catch (_error) { + e = _error; + if (/\bscript\.coffee\b/.test(e.stack)) { + document.body.appendChild(document.createTextNode('Test passed')); + } else { + document.body.appendChild(document.createTextNode('Test failed')); + console.log(e.stack); + } + } + +}).call(this); + +//# sourceMappingURL=script.map diff --git a/node_modules/source-map-support/browser-test/script.map b/node_modules/source-map-support/browser-test/script.map new file mode 100644 index 0000000..127a2b6 --- /dev/null +++ b/node_modules/source-map-support/browser-test/script.map @@ -0,0 +1,10 @@ +{ + "version": 3, + "file": "script.js", + "sourceRoot": "..", + "sources": [ + "browser-test/script.coffee" + ], + "names": [], + "mappings": ";AAAA;AAAA,MAAA,MAAA;;AAAA,EAAA,gBAAgB,CAAC,OAAjB,CAAA,CAAA,CAAA;;AAAA,EAEA,GAAA,GAAM,SAAA,GAAA;AAAG,UAAU,IAAA,KAAA,CAAM,KAAN,CAAV,CAAH;EAAA,CAFN,CAAA;;AAIA;AACE,IAAA,GAAA,CAAA,CAAA,CADF;GAAA,cAAA;AAGE,IADI,UACJ,CAAA;AAAA,IAAA,IAAG,oBAAoB,CAAC,IAArB,CAA0B,CAAC,CAAC,KAA5B,CAAH;AACE,MAAA,QAAQ,CAAC,IAAI,CAAC,WAAd,CAA0B,QAAQ,CAAC,cAAT,CAAwB,aAAxB,CAA1B,CAAA,CADF;KAAA,MAAA;AAGE,MAAA,QAAQ,CAAC,IAAI,CAAC,WAAd,CAA0B,QAAQ,CAAC,cAAT,CAAwB,aAAxB,CAA1B,CAAA,CAAA;AAAA,MACA,OAAO,CAAC,GAAR,CAAY,CAAC,CAAC,KAAd,CADA,CAHF;KAHF;GAJA;AAAA" +} \ No newline at end of file diff --git a/node_modules/source-map-support/build.js b/node_modules/source-map-support/build.js new file mode 100755 index 0000000..fdbf145 --- /dev/null +++ b/node_modules/source-map-support/build.js @@ -0,0 +1,73 @@ +#!/usr/bin/env node + +var fs = require('fs'); +var path = require('path'); +var querystring = require('querystring'); +var child_process = require('child_process'); + +var browserify = path.join('node_modules', '.bin', 'browserify'); +var coffee = path.join('node_modules', '.bin', 'coffee'); + +function run(command, callback) { + console.log(command); + child_process.exec(command, callback); +} + +// Use browserify to package up source-map-support.js +fs.writeFileSync('.temp.js', 'sourceMapSupport = require("./source-map-support");'); +run(browserify + ' .temp.js', function(error, stdout) { + if (error) throw error; + + // Wrap the code so it works both as a normal + diff --git a/node_modules/source-map-support/header-test/script.coffee b/node_modules/source-map-support/header-test/script.coffee new file mode 100644 index 0000000..5721068 --- /dev/null +++ b/node_modules/source-map-support/header-test/script.coffee @@ -0,0 +1,12 @@ +sourceMapSupport.install() + +foo = -> throw new Error 'foo' + +try + foo() +catch e + if /\bscript\.coffee\b/.test e.stack + document.body.appendChild document.createTextNode 'Test passed' + else + document.body.appendChild document.createTextNode 'Test failed' + console.log e.stack diff --git a/node_modules/source-map-support/header-test/script.js b/node_modules/source-map-support/header-test/script.js new file mode 100644 index 0000000..012db6b --- /dev/null +++ b/node_modules/source-map-support/header-test/script.js @@ -0,0 +1,25 @@ +// Generated by CoffeeScript 1.7.1 +(function() { + var e, foo; + + sourceMapSupport.install(); + + foo = function() { + throw new Error('foo'); + }; + + try { + foo(); + } catch (_error) { + e = _error; + if (/\bscript\.coffee\b/.test(e.stack)) { + document.body.appendChild(document.createTextNode('Test passed')); + } else { + document.body.appendChild(document.createTextNode('Test failed')); + console.log(e.stack); + } + } + +}).call(this); + + diff --git a/node_modules/source-map-support/header-test/script.map b/node_modules/source-map-support/header-test/script.map new file mode 100644 index 0000000..0e890f8 --- /dev/null +++ b/node_modules/source-map-support/header-test/script.map @@ -0,0 +1,10 @@ +{ + "version": 3, + "file": "script.js", + "sourceRoot": "..", + "sources": [ + "header-test/script.coffee" + ], + "names": [], + "mappings": ";AAAA;AAAA,MAAA,MAAA;;AAAA,EAAA,gBAAgB,CAAC,OAAjB,CAAA,CAAA,CAAA;;AAAA,EAEA,GAAA,GAAM,SAAA,GAAA;AAAG,UAAU,IAAA,KAAA,CAAM,KAAN,CAAV,CAAH;EAAA,CAFN,CAAA;;AAIA;AACE,IAAA,GAAA,CAAA,CAAA,CADF;GAAA,cAAA;AAGE,IADI,UACJ,CAAA;AAAA,IAAA,IAAG,oBAAoB,CAAC,IAArB,CAA0B,CAAC,CAAC,KAA5B,CAAH;AACE,MAAA,QAAQ,CAAC,IAAI,CAAC,WAAd,CAA0B,QAAQ,CAAC,cAAT,CAAwB,aAAxB,CAA1B,CAAA,CADF;KAAA,MAAA;AAGE,MAAA,QAAQ,CAAC,IAAI,CAAC,WAAd,CAA0B,QAAQ,CAAC,cAAT,CAAwB,aAAxB,CAA1B,CAAA,CAAA;AAAA,MACA,OAAO,CAAC,GAAR,CAAY,CAAC,CAAC,KAAd,CADA,CAHF;KAHF;GAJA;AAAA" +} \ No newline at end of file diff --git a/node_modules/source-map-support/header-test/server.js b/node_modules/source-map-support/header-test/server.js new file mode 100644 index 0000000..ecc0cdf --- /dev/null +++ b/node_modules/source-map-support/header-test/server.js @@ -0,0 +1,45 @@ +var fs = require('fs'); +var http = require('http'); + +http.createServer(function(req, res) { + switch (req.url) { + case '/': + case '/index.html': { + res.writeHead(200, { 'Content-Type': 'text/html' }); + res.end(fs.readFileSync('index.html', 'utf8')); + break; + } + + case '/browser-source-map-support.js': { + res.writeHead(200, { 'Content-Type': 'text/javascript' }); + res.end(fs.readFileSync('../browser-source-map-support.js', 'utf8')); + break; + } + + case '/script.js': { + res.writeHead(200, { 'Content-Type': 'text/javascript', 'SourceMap': 'script-source-map.map' }); + res.end(fs.readFileSync('script.js', 'utf8')); + break; + } + + case '/script-source-map.map': { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(fs.readFileSync('script.map', 'utf8')); + break; + } + + case '/header-test/script.coffee': { + res.writeHead(200, { 'Content-Type': 'text/x-coffeescript' }); + res.end(fs.readFileSync('script.coffee', 'utf8')); + break; + } + + default: { + res.writeHead(404, { 'Content-Type': 'text/html' }); + res.end('404 not found'); + break; + } + } +}).listen(1337, '127.0.0.1'); + +console.log('Server running at http://127.0.0.1:1337/'); diff --git a/node_modules/source-map-support/package.json b/node_modules/source-map-support/package.json new file mode 100644 index 0000000..53abcf4 --- /dev/null +++ b/node_modules/source-map-support/package.json @@ -0,0 +1,28 @@ +{ + "name": "source-map-support", + "description": "Fixes stack traces for files with source maps", + "version": "0.4.5", + "main": "./source-map-support.js", + "scripts": { + "build": "node build.js", + "serve-tests": "http-server -p 1336", + "test": "mocha" + }, + "dependencies": { + "source-map": "^0.5.3" + }, + "devDependencies": { + "browserify": "3.44.2", + "coffee-script": "1.7.1", + "http-server": "^0.8.5", + "mocha": "1.18.2" + }, + "repository": { + "type": "git", + "url": "https://github.com/evanw/node-source-map-support" + }, + "bugs": { + "url": "https://github.com/evanw/node-source-map-support/issues" + }, + "license": "MIT" +} diff --git a/node_modules/source-map-support/register.js b/node_modules/source-map-support/register.js new file mode 100644 index 0000000..4f68e67 --- /dev/null +++ b/node_modules/source-map-support/register.js @@ -0,0 +1 @@ +require('./').install(); diff --git a/node_modules/source-map-support/source-map-support.js b/node_modules/source-map-support/source-map-support.js new file mode 100644 index 0000000..13cdf0c --- /dev/null +++ b/node_modules/source-map-support/source-map-support.js @@ -0,0 +1,506 @@ +var SourceMapConsumer = require('source-map').SourceMapConsumer; +var path = require('path'); +var fs = require('fs'); + +// Only install once if called multiple times +var errorFormatterInstalled = false; +var uncaughtShimInstalled = false; + +// If true, the caches are reset before a stack trace formatting operation +var emptyCacheBetweenOperations = false; + +// Supports {browser, node, auto} +var environment = "auto"; + +// Maps a file path to a string containing the file contents +var fileContentsCache = {}; + +// Maps a file path to a source map for that file +var sourceMapCache = {}; + +// Regex for detecting source maps +var reSourceMap = /^data:application\/json[^,]+base64,/; + +// Priority list of retrieve handlers +var retrieveFileHandlers = []; +var retrieveMapHandlers = []; + +function isInBrowser() { + if (environment === "browser") + return true; + if (environment === "node") + return false; + return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer")); +} + +function hasGlobalProcessEventEmitter() { + return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function')); +} + +function handlerExec(list) { + return function(arg) { + for (var i = 0; i < list.length; i++) { + var ret = list[i](arg); + if (ret) { + return ret; + } + } + return null; + }; +} + +var retrieveFile = handlerExec(retrieveFileHandlers); + +retrieveFileHandlers.push(function(path) { + // Trim the path to make sure there is no extra whitespace. + path = path.trim(); + if (path in fileContentsCache) { + return fileContentsCache[path]; + } + + try { + // Use SJAX if we are in the browser + if (isInBrowser()) { + var xhr = new XMLHttpRequest(); + xhr.open('GET', path, false); + xhr.send(null); + var contents = null + if (xhr.readyState === 4 && xhr.status === 200) { + contents = xhr.responseText + } + } + + // Otherwise, use the filesystem + else { + var contents = fs.readFileSync(path, 'utf8'); + } + } catch (e) { + var contents = null; + } + + return fileContentsCache[path] = contents; +}); + +// Support URLs relative to a directory, but be careful about a protocol prefix +// in case we are in the browser (i.e. directories may start with "http://") +function supportRelativeURL(file, url) { + if (!file) return url; + var dir = path.dirname(file); + var match = /^\w+:\/\/[^\/]*/.exec(dir); + var protocol = match ? match[0] : ''; + return protocol + path.resolve(dir.slice(protocol.length), url); +} + +function retrieveSourceMapURL(source) { + var fileData; + + if (isInBrowser()) { + var xhr = new XMLHttpRequest(); + xhr.open('GET', source, false); + xhr.send(null); + fileData = xhr.readyState === 4 ? xhr.responseText : null; + + // Support providing a sourceMappingURL via the SourceMap header + var sourceMapHeader = xhr.getResponseHeader("SourceMap") || + xhr.getResponseHeader("X-SourceMap"); + if (sourceMapHeader) { + return sourceMapHeader; + } + } + + // Get the URL of the source map + fileData = retrieveFile(source); + // //# sourceMappingURL=foo.js.map /*# sourceMappingURL=foo.js.map */ + var re = /(?:\/\/[@#][ \t]+sourceMappingURL=([^\s'"]+?)[ \t]*$)|(?:\/\*[@#][ \t]+sourceMappingURL=([^\*]+?)[ \t]*(?:\*\/)[ \t]*$)/mg; + // Keep executing the search to find the *last* sourceMappingURL to avoid + // picking up sourceMappingURLs from comments, strings, etc. + var lastMatch, match; + while (match = re.exec(fileData)) lastMatch = match; + if (!lastMatch) return null; + return lastMatch[1]; +}; + +// Can be overridden by the retrieveSourceMap option to install. Takes a +// generated source filename; returns a {map, optional url} object, or null if +// there is no source map. The map field may be either a string or the parsed +// JSON object (ie, it must be a valid argument to the SourceMapConsumer +// constructor). +var retrieveSourceMap = handlerExec(retrieveMapHandlers); +retrieveMapHandlers.push(function(source) { + var sourceMappingURL = retrieveSourceMapURL(source); + if (!sourceMappingURL) return null; + + // Read the contents of the source map + var sourceMapData; + if (reSourceMap.test(sourceMappingURL)) { + // Support source map URL as a data url + var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1); + sourceMapData = new Buffer(rawData, "base64").toString(); + sourceMappingURL = source; + } else { + // Support source map URLs relative to the source URL + sourceMappingURL = supportRelativeURL(source, sourceMappingURL); + sourceMapData = retrieveFile(sourceMappingURL); + } + + if (!sourceMapData) { + return null; + } + + return { + url: sourceMappingURL, + map: sourceMapData + }; +}); + +function mapSourcePosition(position) { + var sourceMap = sourceMapCache[position.source]; + if (!sourceMap) { + // Call the (overrideable) retrieveSourceMap function to get the source map. + var urlAndMap = retrieveSourceMap(position.source); + if (urlAndMap) { + sourceMap = sourceMapCache[position.source] = { + url: urlAndMap.url, + map: new SourceMapConsumer(urlAndMap.map) + }; + + // Load all sources stored inline with the source map into the file cache + // to pretend like they are already loaded. They may not exist on disk. + if (sourceMap.map.sourcesContent) { + sourceMap.map.sources.forEach(function(source, i) { + var contents = sourceMap.map.sourcesContent[i]; + if (contents) { + var url = supportRelativeURL(sourceMap.url, source); + fileContentsCache[url] = contents; + } + }); + } + } else { + sourceMap = sourceMapCache[position.source] = { + url: null, + map: null + }; + } + } + + // Resolve the source URL relative to the URL of the source map + if (sourceMap && sourceMap.map) { + var originalPosition = sourceMap.map.originalPositionFor(position); + + // Only return the original position if a matching line was found. If no + // matching line is found then we return position instead, which will cause + // the stack trace to print the path and line for the compiled file. It is + // better to give a precise location in the compiled file than a vague + // location in the original file. + if (originalPosition.source !== null) { + originalPosition.source = supportRelativeURL( + sourceMap.url, originalPosition.source); + return originalPosition; + } + } + + return position; +} + +// Parses code generated by FormatEvalOrigin(), a function inside V8: +// https://code.google.com/p/v8/source/browse/trunk/src/messages.js +function mapEvalOrigin(origin) { + // Most eval() calls are in this format + var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin); + if (match) { + var position = mapSourcePosition({ + source: match[2], + line: +match[3], + column: match[4] - 1 + }); + return 'eval at ' + match[1] + ' (' + position.source + ':' + + position.line + ':' + (position.column + 1) + ')'; + } + + // Parse nested eval() calls using recursion + match = /^eval at ([^(]+) \((.+)\)$/.exec(origin); + if (match) { + return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')'; + } + + // Make sure we still return useful information if we didn't find anything + return origin; +} + +// This is copied almost verbatim from the V8 source code at +// https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The +// implementation of wrapCallSite() used to just forward to the actual source +// code of CallSite.prototype.toString but unfortunately a new release of V8 +// did something to the prototype chain and broke the shim. The only fix I +// could find was copy/paste. +function CallSiteToString() { + var fileName; + var fileLocation = ""; + if (this.isNative()) { + fileLocation = "native"; + } else { + fileName = this.getScriptNameOrSourceURL(); + if (!fileName && this.isEval()) { + fileLocation = this.getEvalOrigin(); + fileLocation += ", "; // Expecting source position to follow. + } + + if (fileName) { + fileLocation += fileName; + } else { + // Source code does not originate from a file and is not native, but we + // can still get the source position inside the source string, e.g. in + // an eval string. + fileLocation += ""; + } + var lineNumber = this.getLineNumber(); + if (lineNumber != null) { + fileLocation += ":" + lineNumber; + var columnNumber = this.getColumnNumber(); + if (columnNumber) { + fileLocation += ":" + columnNumber; + } + } + } + + var line = ""; + var functionName = this.getFunctionName(); + var addSuffix = true; + var isConstructor = this.isConstructor(); + var isMethodCall = !(this.isToplevel() || isConstructor); + if (isMethodCall) { + var typeName = this.getTypeName(); + // Fixes shim to be backward compatable with Node v0 to v4 + if (typeName === "[object Object]") { + typeName = "null"; + } + var methodName = this.getMethodName(); + if (functionName) { + if (typeName && functionName.indexOf(typeName) != 0) { + line += typeName + "."; + } + line += functionName; + if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) { + line += " [as " + methodName + "]"; + } + } else { + line += typeName + "." + (methodName || ""); + } + } else if (isConstructor) { + line += "new " + (functionName || ""); + } else if (functionName) { + line += functionName; + } else { + line += fileLocation; + addSuffix = false; + } + if (addSuffix) { + line += " (" + fileLocation + ")"; + } + return line; +} + +function cloneCallSite(frame) { + var object = {}; + Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) { + object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name]; + }); + object.toString = CallSiteToString; + return object; +} + +function wrapCallSite(frame) { + if(frame.isNative()) { + return frame; + } + + // Most call sites will return the source file from getFileName(), but code + // passed to eval() ending in "//# sourceURL=..." will return the source file + // from getScriptNameOrSourceURL() instead + var source = frame.getFileName() || frame.getScriptNameOrSourceURL(); + if (source) { + var line = frame.getLineNumber(); + var column = frame.getColumnNumber() - 1; + + // Fix position in Node where some (internal) code is prepended. + // See https://github.com/evanw/node-source-map-support/issues/36 + if (line === 1 && !isInBrowser() && !frame.isEval()) { + column -= 62; + } + + var position = mapSourcePosition({ + source: source, + line: line, + column: column + }); + frame = cloneCallSite(frame); + frame.getFileName = function() { return position.source; }; + frame.getLineNumber = function() { return position.line; }; + frame.getColumnNumber = function() { return position.column + 1; }; + frame.getScriptNameOrSourceURL = function() { return position.source; }; + return frame; + } + + // Code called using eval() needs special handling + var origin = frame.isEval() && frame.getEvalOrigin(); + if (origin) { + origin = mapEvalOrigin(origin); + frame = cloneCallSite(frame); + frame.getEvalOrigin = function() { return origin; }; + return frame; + } + + // If we get here then we were unable to change the source position + return frame; +} + +// This function is part of the V8 stack trace API, for more info see: +// http://code.google.com/p/v8/wiki/JavaScriptStackTraceApi +function prepareStackTrace(error, stack) { + if (emptyCacheBetweenOperations) { + fileContentsCache = {}; + sourceMapCache = {}; + } + + return error + stack.map(function(frame) { + return '\n at ' + wrapCallSite(frame); + }).join(''); +} + +// Generate position and snippet of original source with pointer +function getErrorSource(error) { + var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack); + if (match) { + var source = match[1]; + var line = +match[2]; + var column = +match[3]; + + // Support the inline sourceContents inside the source map + var contents = fileContentsCache[source]; + + // Support files on disk + if (!contents && fs.existsSync(source)) { + contents = fs.readFileSync(source, 'utf8'); + } + + // Format the line from the original source code like node does + if (contents) { + var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1]; + if (code) { + return source + ':' + line + '\n' + code + '\n' + + new Array(column).join(' ') + '^'; + } + } + } + return null; +} + +function printErrorAndExit (error) { + var source = getErrorSource(error); + + if (source) { + console.error(); + console.error(source); + } + + console.error(error.stack); + process.exit(1); +} + +function shimEmitUncaughtException () { + var origEmit = process.emit; + + process.emit = function (type) { + if (type === 'uncaughtException') { + var hasStack = (arguments[1] && arguments[1].stack); + var hasListeners = (this.listeners(type).length > 0); + + if (hasStack && !hasListeners) { + return printErrorAndExit(arguments[1]); + } + } + + return origEmit.apply(this, arguments); + }; +} + +exports.wrapCallSite = wrapCallSite; +exports.getErrorSource = getErrorSource; +exports.mapSourcePosition = mapSourcePosition; +exports.retrieveSourceMap = retrieveSourceMap; + +exports.install = function(options) { + options = options || {}; + + if (options.environment) { + environment = options.environment; + if (["node", "browser", "auto"].indexOf(environment) === -1) { + throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}") + } + } + + // Allow sources to be found by methods other than reading the files + // directly from disk. + if (options.retrieveFile) { + if (options.overrideRetrieveFile) { + retrieveFileHandlers.length = 0; + } + + retrieveFileHandlers.unshift(options.retrieveFile); + } + + // Allow source maps to be found by methods other than reading the files + // directly from disk. + if (options.retrieveSourceMap) { + if (options.overrideRetrieveSourceMap) { + retrieveMapHandlers.length = 0; + } + + retrieveMapHandlers.unshift(options.retrieveSourceMap); + } + + // Support runtime transpilers that include inline source maps + if (options.hookRequire && !isInBrowser()) { + var Module = require('module'); + var $compile = Module.prototype._compile; + + if (!$compile.__sourceMapSupport) { + Module.prototype._compile = function(content, filename) { + fileContentsCache[filename] = content; + sourceMapCache[filename] = undefined; + return $compile.call(this, content, filename); + }; + + Module.prototype._compile.__sourceMapSupport = true; + } + } + + // Configure options + if (!emptyCacheBetweenOperations) { + emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ? + options.emptyCacheBetweenOperations : false; + } + + // Install the error reformatter + if (!errorFormatterInstalled) { + errorFormatterInstalled = true; + Error.prepareStackTrace = prepareStackTrace; + } + + if (!uncaughtShimInstalled) { + var installHandler = 'handleUncaughtExceptions' in options ? + options.handleUncaughtExceptions : true; + + // Provide the option to not install the uncaught exception handler. This is + // to support other uncaught exception handlers (in test frameworks, for + // example). If this handler is not installed and there are no other uncaught + // exception handlers, uncaught exceptions will be caught by node's built-in + // exception handler and the process will still be terminated. However, the + // generated JavaScript code will be shown above the stack trace instead of + // the original source code. + if (installHandler && hasGlobalProcessEventEmitter()) { + uncaughtShimInstalled = true; + shimEmitUncaughtException(); + } + } +}; diff --git a/node_modules/source-map-support/test.js b/node_modules/source-map-support/test.js new file mode 100644 index 0000000..583dedd --- /dev/null +++ b/node_modules/source-map-support/test.js @@ -0,0 +1,581 @@ +require('./source-map-support').install({ + emptyCacheBetweenOperations: true // Needed to be able to test for failure +}); + +var SourceMapGenerator = require('source-map').SourceMapGenerator; +var child_process = require('child_process'); +var assert = require('assert'); +var fs = require('fs'); + +function compareLines(actual, expected) { + assert(actual.length >= expected.length, 'got ' + actual.length + ' lines but expected at least ' + expected.length + ' lines'); + for (var i = 0; i < expected.length; i++) { + // Some tests are regular expressions because the output format changed slightly between node v0.9.2 and v0.9.3 + if (expected[i] instanceof RegExp) { + assert(expected[i].test(actual[i]), JSON.stringify(actual[i]) + ' does not match ' + expected[i]); + } else { + assert.equal(actual[i], expected[i]); + } + } +} + +function createEmptySourceMap() { + return new SourceMapGenerator({ + file: '.generated.js', + sourceRoot: '.' + }); +} + +function createSourceMapWithGap() { + var sourceMap = createEmptySourceMap(); + sourceMap.addMapping({ + generated: { line: 100, column: 0 }, + original: { line: 100, column: 0 }, + source: '.original.js' + }); + return sourceMap; +} + +function createSingleLineSourceMap() { + var sourceMap = createEmptySourceMap(); + sourceMap.addMapping({ + generated: { line: 1, column: 0 }, + original: { line: 1, column: 0 }, + source: '.original.js' + }); + return sourceMap; +} + +function createSecondLineSourceMap() { + var sourceMap = createEmptySourceMap(); + sourceMap.addMapping({ + generated: { line: 2, column: 0 }, + original: { line: 1, column: 0 }, + source: '.original.js' + }); + return sourceMap; +} + +function createMultiLineSourceMap() { + var sourceMap = createEmptySourceMap(); + for (var i = 1; i <= 100; i++) { + sourceMap.addMapping({ + generated: { line: i, column: 0 }, + original: { line: 1000 + i, column: 99 + i }, + source: 'line' + i + '.js' + }); + } + return sourceMap; +} + +function createMultiLineSourceMapWithSourcesContent() { + var sourceMap = createEmptySourceMap(); + var original = new Array(1001).join('\n'); + for (var i = 1; i <= 100; i++) { + sourceMap.addMapping({ + generated: { line: i, column: 0 }, + original: { line: 1000 + i, column: 4 }, + source: 'original.js' + }); + original += ' line ' + i + '\n'; + } + sourceMap.setSourceContent('original.js', original); + return sourceMap; +} + +function compareStackTrace(sourceMap, source, expected) { + // Check once with a separate source map + fs.writeFileSync('.generated.js.map', sourceMap); + fs.writeFileSync('.generated.js', 'exports.test = function() {' + + source.join('\n') + '};//@ sourceMappingURL=.generated.js.map'); + try { + delete require.cache[require.resolve('./.generated')]; + require('./.generated').test(); + } catch (e) { + compareLines(e.stack.split('\n'), expected); + } + fs.unlinkSync('.generated.js'); + fs.unlinkSync('.generated.js.map'); + + // Check again with an inline source map (in a data URL) + fs.writeFileSync('.generated.js', 'exports.test = function() {' + + source.join('\n') + '};//@ sourceMappingURL=data:application/json;base64,' + + new Buffer(sourceMap.toString()).toString('base64')); + try { + delete require.cache[require.resolve('./.generated')]; + require('./.generated').test(); + } catch (e) { + compareLines(e.stack.split('\n'), expected); + } + fs.unlinkSync('.generated.js'); +} + +function compareStdout(done, sourceMap, source, expected) { + fs.writeFileSync('.original.js', 'this is the original code'); + fs.writeFileSync('.generated.js.map', sourceMap); + fs.writeFileSync('.generated.js', source.join('\n') + + '//@ sourceMappingURL=.generated.js.map'); + child_process.exec('node ./.generated', function(error, stdout, stderr) { + try { + compareLines( + (stdout + stderr) + .trim() + .split('\n') + .filter(function (line) { return line !== '' }), // Empty lines are not relevant. + expected + ); + } catch (e) { + return done(e); + } + fs.unlinkSync('.generated.js'); + fs.unlinkSync('.generated.js.map'); + fs.unlinkSync('.original.js'); + done(); + }); +} + +it('normal throw', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'throw new Error("test");' + ], [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]); +}); + +it('throw inside function', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'function foo() {', + ' throw new Error("test");', + '}', + 'foo();' + ], [ + 'Error: test', + /^ at foo \((?:.*\/)?line2\.js:1002:102\)$/, + /^ at Object\.exports\.test \((?:.*\/)?line4\.js:1004:104\)$/ + ]); +}); + +it('throw inside function inside function', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'function foo() {', + ' function bar() {', + ' throw new Error("test");', + ' }', + ' bar();', + '}', + 'foo();' + ], [ + 'Error: test', + /^ at bar \((?:.*\/)?line3\.js:1003:103\)$/, + /^ at foo \((?:.*\/)?line5\.js:1005:105\)$/, + /^ at Object\.exports\.test \((?:.*\/)?line7\.js:1007:107\)$/ + ]); +}); + +it('eval', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'eval("throw new Error(\'test\')");' + ], [ + 'Error: test', + + // Before Node 4, `Object.eval`, after just `eval`. + /^ at (?:Object\.)?eval \(eval at \((?:.*\/)?line1\.js:1001:101\)/, + + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]); +}); + +it('eval inside eval', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'eval("eval(\'throw new Error(\\"test\\")\')");' + ], [ + 'Error: test', + /^ at (?:Object\.)?eval \(eval at \(eval at \((?:.*\/)?line1\.js:1001:101\)/, + /^ at (?:Object\.)?eval \(eval at \((?:.*\/)?line1\.js:1001:101\)/, + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]); +}); + +it('eval inside function', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'function foo() {', + ' eval("throw new Error(\'test\')");', + '}', + 'foo();' + ], [ + 'Error: test', + /^ at eval \(eval at foo \((?:.*\/)?line2\.js:1002:102\)/, + /^ at foo \((?:.*\/)?line2\.js:1002:102\)/, + /^ at Object\.exports\.test \((?:.*\/)?line4\.js:1004:104\)$/ + ]); +}); + +it('eval with sourceURL', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'eval("throw new Error(\'test\')//@ sourceURL=sourceURL.js");' + ], [ + 'Error: test', + /^ at (?:Object\.)?eval \(sourceURL\.js:1:7\)$/, + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]); +}); + +it('eval with sourceURL inside eval', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'eval("eval(\'throw new Error(\\"test\\")//@ sourceURL=sourceURL.js\')");' + ], [ + 'Error: test', + /^ at (?:Object\.)?eval \(sourceURL\.js:1:7\)$/, + /^ at (?:Object\.)?eval \(eval at \((?:.*\/)?line1\.js:1001:101\)/, + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]); +}); + +it('native function', function() { + compareStackTrace(createSingleLineSourceMap(), [ + '[1].map(function(x) { throw new Error(x); });' + ], [ + 'Error: 1', + /\/.original\.js/, + /at Array\.map \(native\)/ + ]); +}); + +it('function constructor', function() { + compareStackTrace(createMultiLineSourceMap(), [ + 'throw new Function(")");' + ], [ + 'SyntaxError: Unexpected token )', + ]); +}); + +it('throw with empty source map', function() { + compareStackTrace(createEmptySourceMap(), [ + 'throw new Error("test");' + ], [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?.generated.js:1:34\)$/ + ]); +}); + +it('throw in Timeout with empty source map', function(done) { + compareStdout(done, createEmptySourceMap(), [ + 'require("./source-map-support").install();', + 'setTimeout(function () {', + ' throw new Error("this is the error")', + '})' + ], [ + /\/.generated.js:3$/, + ' throw new Error("this is the error")', + /^ \^$/, + 'Error: this is the error', + /^ at ((null)|(Timeout))\._onTimeout \((?:.*\/)?.generated.js:3:11\)$/ + ]); +}); + +it('throw with source map with gap', function() { + compareStackTrace(createSourceMapWithGap(), [ + 'throw new Error("test");' + ], [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?.generated.js:1:34\)$/ + ]); +}); + +it('sourcesContent with data URL', function() { + compareStackTrace(createMultiLineSourceMapWithSourcesContent(), [ + 'throw new Error("test");' + ], [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?original.js:1001:5\)$/ + ]); +}); + +it('finds the last sourceMappingURL', function() { + compareStackTrace(createMultiLineSourceMapWithSourcesContent(), [ + '//# sourceMappingURL=missing.map.js', // NB: compareStackTrace adds another source mapping. + 'throw new Error("test");' + ], [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?original.js:1002:5\)$/ + ]); +}); + +it('default options', function(done) { + compareStdout(done, createSecondLineSourceMap(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install();', + 'process.nextTick(foo);', + 'process.nextTick(function() { process.exit(1); });' + ], [ + /\/.original\.js:1$/, + 'this is the original code', + '^', + 'Error: this is the error', + /^ at foo \((?:.*\/)?.original\.js:1:1\)$/ + ]); +}); + +it('handleUncaughtExceptions is true', function(done) { + compareStdout(done, createSecondLineSourceMap(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install({ handleUncaughtExceptions: true });', + 'process.nextTick(foo);' + ], [ + /\/.original\.js:1$/, + 'this is the original code', + '^', + 'Error: this is the error', + /^ at foo \((?:.*\/)?.original\.js:1:1\)$/ + ]); +}); + +it('handleUncaughtExceptions is false', function(done) { + compareStdout(done, createSecondLineSourceMap(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install({ handleUncaughtExceptions: false });', + 'process.nextTick(foo);' + ], [ + /\/.generated.js:2$/, + 'function foo() { throw new Error("this is the error"); }', + + // Before Node 4, the arrow points on the `new`, after on the + // `throw`. + /^ (?: )?\^$/, + + 'Error: this is the error', + /^ at foo \((?:.*\/)?.original\.js:1:1\)$/ + ]); +}); + +it('default options with empty source map', function(done) { + compareStdout(done, createEmptySourceMap(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install();', + 'process.nextTick(foo);' + ], [ + /\/.generated.js:2$/, + 'function foo() { throw new Error("this is the error"); }', + /^ (?: )?\^$/, + 'Error: this is the error', + /^ at foo \((?:.*\/)?.generated.js:2:24\)$/ + ]); +}); + +it('default options with source map with gap', function(done) { + compareStdout(done, createSourceMapWithGap(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install();', + 'process.nextTick(foo);' + ], [ + /\/.generated.js:2$/, + 'function foo() { throw new Error("this is the error"); }', + /^ (?: )?\^$/, + 'Error: this is the error', + /^ at foo \((?:.*\/)?.generated.js:2:24\)$/ + ]); +}); + +it('specifically requested error source', function(done) { + compareStdout(done, createSecondLineSourceMap(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'var sms = require("./source-map-support");', + 'sms.install({ handleUncaughtExceptions: false });', + 'process.on("uncaughtException", function (e) { console.log("SRC:" + sms.getErrorSource(e)); });', + 'process.nextTick(foo);' + ], [ + /^SRC:.*\/.original.js:1$/, + 'this is the original code', + '^' + ]); +}); + +it('sourcesContent', function(done) { + compareStdout(done, createMultiLineSourceMapWithSourcesContent(), [ + '', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install();', + 'process.nextTick(foo);', + 'process.nextTick(function() { process.exit(1); });' + ], [ + /\/original\.js:1002$/, + ' line 2', + ' ^', + 'Error: this is the error', + /^ at foo \((?:.*\/)?original\.js:1002:5\)$/ + ]); +}); + +it('missing source maps should also be cached', function(done) { + compareStdout(done, createSingleLineSourceMap(), [ + '', + 'var count = 0;', + 'function foo() {', + ' console.log(new Error("this is the error").stack.split("\\n").slice(0, 2).join("\\n"));', + '}', + 'require("./source-map-support").install({', + ' overrideRetrieveSourceMap: true,', + ' retrieveSourceMap: function(name) {', + ' if (/\\.generated.js$/.test(name)) count++;', + ' return null;', + ' }', + '});', + 'process.nextTick(foo);', + 'process.nextTick(foo);', + 'process.nextTick(function() { console.log(count); });', + ], [ + 'Error: this is the error', + /^ at foo \((?:.*\/)?.generated.js:4:15\)$/, + 'Error: this is the error', + /^ at foo \((?:.*\/)?.generated.js:4:15\)$/, + '1', // The retrieval should only be attempted once + ]); +}); + +it('should consult all retrieve source map providers', function(done) { + compareStdout(done, createSingleLineSourceMap(), [ + '', + 'var count = 0;', + 'function foo() {', + ' console.log(new Error("this is the error").stack.split("\\n").slice(0, 2).join("\\n"));', + '}', + 'require("./source-map-support").install({', + ' retrieveSourceMap: function(name) {', + ' if (/\\.generated.js$/.test(name)) count++;', + ' return undefined;', + ' }', + '});', + 'require("./source-map-support").install({', + ' retrieveSourceMap: function(name) {', + ' if (/\\.generated.js$/.test(name)) {', + ' count++;', + ' return ' + JSON.stringify({url: '.original.js', map: createMultiLineSourceMapWithSourcesContent().toJSON()}) + ';', + ' }', + ' }', + '});', + 'process.nextTick(foo);', + 'process.nextTick(foo);', + 'process.nextTick(function() { console.log(count); });', + ], [ + 'Error: this is the error', + /^ at foo \((?:.*\/)?original.js:1004:5\)$/, + 'Error: this is the error', + /^ at foo \((?:.*\/)?original.js:1004:5\)$/, + '1', // The retrieval should only be attempted once + ]); +}); + +it('should allow for runtime inline source maps', function(done) { + var sourceMap = createMultiLineSourceMapWithSourcesContent(); + + fs.writeFileSync('.generated.jss', 'foo'); + + compareStdout(function(err) { + fs.unlinkSync('.generated.jss'); + done(err); + }, createSingleLineSourceMap(), [ + 'require("./source-map-support").install({', + ' hookRequire: true', + '});', + 'require.extensions[".jss"] = function(module, filename) {', + ' module._compile(', + JSON.stringify([ + '', + 'var count = 0;', + 'function foo() {', + ' console.log(new Error("this is the error").stack.split("\\n").slice(0, 2).join("\\n"));', + '}', + 'process.nextTick(foo);', + 'process.nextTick(foo);', + 'process.nextTick(function() { console.log(count); });', + '//@ sourceMappingURL=data:application/json;charset=utf8;base64,' + new Buffer(sourceMap.toString()).toString('base64') + ].join('\n')), + ', filename);', + '};', + 'require("./.generated.jss");', + ], [ + 'Error: this is the error', + /^ at foo \(.*\/original.js:1004:5\)$/, + 'Error: this is the error', + /^ at foo \(.*\/original.js:1004:5\)$/, + '0', // The retrieval should only be attempted once + ]); +}); + +/* The following test duplicates some of the code in + * `compareStackTrace` but appends a charset to the + * source mapping url. + */ +it('finds source maps with charset specified', function() { + var sourceMap = createMultiLineSourceMap() + var source = [ 'throw new Error("test");' ]; + var expected = [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]; + + fs.writeFileSync('.generated.js', 'exports.test = function() {' + + source.join('\n') + '};//@ sourceMappingURL=data:application/json;charset=utf8;base64,' + + new Buffer(sourceMap.toString()).toString('base64')); + try { + delete require.cache[require.resolve('./.generated')]; + require('./.generated').test(); + } catch (e) { + compareLines(e.stack.split('\n'), expected); + } + fs.unlinkSync('.generated.js'); +}); + +/* The following test duplicates some of the code in + * `compareStackTrace` but appends some code and a + * comment to the source mapping url. + */ +it('allows code/comments after sourceMappingURL', function() { + var sourceMap = createMultiLineSourceMap() + var source = [ 'throw new Error("test");' ]; + var expected = [ + 'Error: test', + /^ at Object\.exports\.test \((?:.*\/)?line1\.js:1001:101\)$/ + ]; + + fs.writeFileSync('.generated.js', 'exports.test = function() {' + + source.join('\n') + '};//# sourceMappingURL=data:application/json;base64,' + + new Buffer(sourceMap.toString()).toString('base64') + + '\n// Some comment below the sourceMappingURL\nvar foo = 0;'); + try { + delete require.cache[require.resolve('./.generated')]; + require('./.generated').test(); + } catch (e) { + compareLines(e.stack.split('\n'), expected); + } + fs.unlinkSync('.generated.js'); +}); + +it('handleUncaughtExceptions is true with existing listener', function(done) { + var source = [ + 'process.on("uncaughtException", function() { /* Silent */ });', + 'function foo() { throw new Error("this is the error"); }', + 'require("./source-map-support").install();', + 'process.nextTick(foo);', + '//@ sourceMappingURL=.generated.js.map' + ]; + + fs.writeFileSync('.original.js', 'this is the original code'); + fs.writeFileSync('.generated.js.map', createSingleLineSourceMap()); + fs.writeFileSync('.generated.js', source.join('\n')); + + child_process.exec('node ./.generated', function(error, stdout, stderr) { + fs.unlinkSync('.generated.js'); + fs.unlinkSync('.generated.js.map'); + fs.unlinkSync('.original.js'); + assert.equal((stdout + stderr).trim(), ''); + done(); + }); +}); diff --git a/node_modules/source-map/CHANGELOG.md b/node_modules/source-map/CHANGELOG.md new file mode 100644 index 0000000..3a8c066 --- /dev/null +++ b/node_modules/source-map/CHANGELOG.md @@ -0,0 +1,301 @@ +# Change Log + +## 0.5.6 + +* Fix for regression when people were using numbers as names in source maps. See + #236. + +## 0.5.5 + +* Fix "regression" of unsupported, implementation behavior that half the world + happens to have come to depend on. See #235. + +* Fix regression involving function hoisting in SpiderMonkey. See #233. + +## 0.5.4 + +* Large performance improvements to source-map serialization. See #228 and #229. + +## 0.5.3 + +* Do not include unnecessary distribution files. See + commit ef7006f8d1647e0a83fdc60f04f5a7ca54886f86. + +## 0.5.2 + +* Include browser distributions of the library in package.json's `files`. See + issue #212. + +## 0.5.1 + +* Fix latent bugs in IndexedSourceMapConsumer.prototype._parseMappings. See + ff05274becc9e6e1295ed60f3ea090d31d843379. + +## 0.5.0 + +* Node 0.8 is no longer supported. + +* Use webpack instead of dryice for bundling. + +* Big speedups serializing source maps. See pull request #203. + +* Fix a bug with `SourceMapConsumer.prototype.sourceContentFor` and sources that + explicitly start with the source root. See issue #199. + +## 0.4.4 + +* Fix an issue where using a `SourceMapGenerator` after having created a + `SourceMapConsumer` from it via `SourceMapConsumer.fromSourceMap` failed. See + issue #191. + +* Fix an issue with where `SourceMapGenerator` would mistakenly consider + different mappings as duplicates of each other and avoid generating them. See + issue #192. + +## 0.4.3 + +* A very large number of performance improvements, particularly when parsing + source maps. Collectively about 75% of time shaved off of the source map + parsing benchmark! + +* Fix a bug in `SourceMapConsumer.prototype.allGeneratedPositionsFor` and fuzzy + searching in the presence of a column option. See issue #177. + +* Fix a bug with joining a source and its source root when the source is above + the root. See issue #182. + +* Add the `SourceMapConsumer.prototype.hasContentsOfAllSources` method to + determine when all sources' contents are inlined into the source map. See + issue #190. + +## 0.4.2 + +* Add an `.npmignore` file so that the benchmarks aren't pulled down by + dependent projects. Issue #169. + +* Add an optional `column` argument to + `SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines + with no mappings. Issues #172 and #173. + +## 0.4.1 + +* Fix accidentally defining a global variable. #170. + +## 0.4.0 + +* The default direction for fuzzy searching was changed back to its original + direction. See #164. + +* There is now a `bias` option you can supply to `SourceMapConsumer` to control + the fuzzy searching direction. See #167. + +* About an 8% speed up in parsing source maps. See #159. + +* Added a benchmark for parsing and generating source maps. + +## 0.3.0 + +* Change the default direction that searching for positions fuzzes when there is + not an exact match. See #154. + +* Support for environments using json2.js for JSON serialization. See #156. + +## 0.2.0 + +* Support for consuming "indexed" source maps which do not have any remote + sections. See pull request #127. This introduces a minor backwards + incompatibility if you are monkey patching `SourceMapConsumer.prototype` + methods. + +## 0.1.43 + +* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue + #148 for some discussion and issues #150, #151, and #152 for implementations. + +## 0.1.42 + +* Fix an issue where `SourceNode`s from different versions of the source-map + library couldn't be used in conjunction with each other. See issue #142. + +## 0.1.41 + +* Fix a bug with getting the source content of relative sources with a "./" + prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768). + +* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the + column span of each mapping. + +* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find + all generated positions associated with a given original source and line. + +## 0.1.40 + +* Performance improvements for parsing source maps in SourceMapConsumer. + +## 0.1.39 + +* Fix a bug where setting a source's contents to null before any source content + had been set before threw a TypeError. See issue #131. + +## 0.1.38 + +* Fix a bug where finding relative paths from an empty path were creating + absolute paths. See issue #129. + +## 0.1.37 + +* Fix a bug where if the source root was an empty string, relative source paths + would turn into absolute source paths. Issue #124. + +## 0.1.36 + +* Allow the `names` mapping property to be an empty string. Issue #121. + +## 0.1.35 + +* A third optional parameter was added to `SourceNode.fromStringWithSourceMap` + to specify a path that relative sources in the second parameter should be + relative to. Issue #105. + +* If no file property is given to a `SourceMapGenerator`, then the resulting + source map will no longer have a `null` file property. The property will + simply not exist. Issue #104. + +* Fixed a bug where consecutive newlines were ignored in `SourceNode`s. + Issue #116. + +## 0.1.34 + +* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103. + +* Fix bug involving source contents and the + `SourceMapGenerator.prototype.applySourceMap`. Issue #100. + +## 0.1.33 + +* Fix some edge cases surrounding path joining and URL resolution. + +* Add a third parameter for relative path to + `SourceMapGenerator.prototype.applySourceMap`. + +* Fix issues with mappings and EOLs. + +## 0.1.32 + +* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns + (issue 92). + +* Fixed test runner to actually report number of failed tests as its process + exit code. + +* Fixed a typo when reporting bad mappings (issue 87). + +## 0.1.31 + +* Delay parsing the mappings in SourceMapConsumer until queried for a source + location. + +* Support Sass source maps (which at the time of writing deviate from the spec + in small ways) in SourceMapConsumer. + +## 0.1.30 + +* Do not join source root with a source, when the source is a data URI. + +* Extend the test runner to allow running single specific test files at a time. + +* Performance improvements in `SourceNode.prototype.walk` and + `SourceMapConsumer.prototype.eachMapping`. + +* Source map browser builds will now work inside Workers. + +* Better error messages when attempting to add an invalid mapping to a + `SourceMapGenerator`. + +## 0.1.29 + +* Allow duplicate entries in the `names` and `sources` arrays of source maps + (usually from TypeScript) we are parsing. Fixes github issue 72. + +## 0.1.28 + +* Skip duplicate mappings when creating source maps from SourceNode; github + issue 75. + +## 0.1.27 + +* Don't throw an error when the `file` property is missing in SourceMapConsumer, + we don't use it anyway. + +## 0.1.26 + +* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70. + +## 0.1.25 + +* Make compatible with browserify + +## 0.1.24 + +* Fix issue with absolute paths and `file://` URIs. See + https://bugzilla.mozilla.org/show_bug.cgi?id=885597 + +## 0.1.23 + +* Fix issue with absolute paths and sourcesContent, github issue 64. + +## 0.1.22 + +* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21. + +## 0.1.21 + +* Fixed handling of sources that start with a slash so that they are relative to + the source root's host. + +## 0.1.20 + +* Fixed github issue #43: absolute URLs aren't joined with the source root + anymore. + +## 0.1.19 + +* Using Travis CI to run tests. + +## 0.1.18 + +* Fixed a bug in the handling of sourceRoot. + +## 0.1.17 + +* Added SourceNode.fromStringWithSourceMap. + +## 0.1.16 + +* Added missing documentation. + +* Fixed the generating of empty mappings in SourceNode. + +## 0.1.15 + +* Added SourceMapGenerator.applySourceMap. + +## 0.1.14 + +* The sourceRoot is now handled consistently. + +## 0.1.13 + +* Added SourceMapGenerator.fromSourceMap. + +## 0.1.12 + +* SourceNode now generates empty mappings too. + +## 0.1.11 + +* Added name support to SourceNode. + +## 0.1.10 + +* Added sourcesContent support to the customer and generator. diff --git a/node_modules/source-map/LICENSE b/node_modules/source-map/LICENSE new file mode 100644 index 0000000..ed1b7cf --- /dev/null +++ b/node_modules/source-map/LICENSE @@ -0,0 +1,28 @@ + +Copyright (c) 2009-2011, Mozilla Foundation and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the names of the Mozilla Foundation nor the names of project + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/source-map/README.md b/node_modules/source-map/README.md new file mode 100644 index 0000000..99d3861 --- /dev/null +++ b/node_modules/source-map/README.md @@ -0,0 +1,729 @@ +# Source Map + +[![Build Status](https://travis-ci.org/mozilla/source-map.png?branch=master)](https://travis-ci.org/mozilla/source-map) + +[![NPM](https://nodei.co/npm/source-map.png?downloads=true&downloadRank=true)](https://www.npmjs.com/package/source-map) + +This is a library to generate and consume the source map format +[described here][format]. + +[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit + +## Use with Node + + $ npm install source-map + +## Use on the Web + + + +-------------------------------------------------------------------------------- + + + + + +## Table of Contents + +- [Examples](#examples) + - [Consuming a source map](#consuming-a-source-map) + - [Generating a source map](#generating-a-source-map) + - [With SourceNode (high level API)](#with-sourcenode-high-level-api) + - [With SourceMapGenerator (low level API)](#with-sourcemapgenerator-low-level-api) +- [API](#api) + - [SourceMapConsumer](#sourcemapconsumer) + - [new SourceMapConsumer(rawSourceMap)](#new-sourcemapconsumerrawsourcemap) + - [SourceMapConsumer.prototype.computeColumnSpans()](#sourcemapconsumerprototypecomputecolumnspans) + - [SourceMapConsumer.prototype.originalPositionFor(generatedPosition)](#sourcemapconsumerprototypeoriginalpositionforgeneratedposition) + - [SourceMapConsumer.prototype.generatedPositionFor(originalPosition)](#sourcemapconsumerprototypegeneratedpositionfororiginalposition) + - [SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)](#sourcemapconsumerprototypeallgeneratedpositionsfororiginalposition) + - [SourceMapConsumer.prototype.hasContentsOfAllSources()](#sourcemapconsumerprototypehascontentsofallsources) + - [SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])](#sourcemapconsumerprototypesourcecontentforsource-returnnullonmissing) + - [SourceMapConsumer.prototype.eachMapping(callback, context, order)](#sourcemapconsumerprototypeeachmappingcallback-context-order) + - [SourceMapGenerator](#sourcemapgenerator) + - [new SourceMapGenerator([startOfSourceMap])](#new-sourcemapgeneratorstartofsourcemap) + - [SourceMapGenerator.fromSourceMap(sourceMapConsumer)](#sourcemapgeneratorfromsourcemapsourcemapconsumer) + - [SourceMapGenerator.prototype.addMapping(mapping)](#sourcemapgeneratorprototypeaddmappingmapping) + - [SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)](#sourcemapgeneratorprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])](#sourcemapgeneratorprototypeapplysourcemapsourcemapconsumer-sourcefile-sourcemappath) + - [SourceMapGenerator.prototype.toString()](#sourcemapgeneratorprototypetostring) + - [SourceNode](#sourcenode) + - [new SourceNode([line, column, source[, chunk[, name]]])](#new-sourcenodeline-column-source-chunk-name) + - [SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])](#sourcenodefromstringwithsourcemapcode-sourcemapconsumer-relativepath) + - [SourceNode.prototype.add(chunk)](#sourcenodeprototypeaddchunk) + - [SourceNode.prototype.prepend(chunk)](#sourcenodeprototypeprependchunk) + - [SourceNode.prototype.setSourceContent(sourceFile, sourceContent)](#sourcenodeprototypesetsourcecontentsourcefile-sourcecontent) + - [SourceNode.prototype.walk(fn)](#sourcenodeprototypewalkfn) + - [SourceNode.prototype.walkSourceContents(fn)](#sourcenodeprototypewalksourcecontentsfn) + - [SourceNode.prototype.join(sep)](#sourcenodeprototypejoinsep) + - [SourceNode.prototype.replaceRight(pattern, replacement)](#sourcenodeprototypereplacerightpattern-replacement) + - [SourceNode.prototype.toString()](#sourcenodeprototypetostring) + - [SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])](#sourcenodeprototypetostringwithsourcemapstartofsourcemap) + + + +## Examples + +### Consuming a source map + +```js +var rawSourceMap = { + version: 3, + file: 'min.js', + names: ['bar', 'baz', 'n'], + sources: ['one.js', 'two.js'], + sourceRoot: 'http://example.com/www/js/', + mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA' +}; + +var smc = new SourceMapConsumer(rawSourceMap); + +console.log(smc.sources); +// [ 'http://example.com/www/js/one.js', +// 'http://example.com/www/js/two.js' ] + +console.log(smc.originalPositionFor({ + line: 2, + column: 28 +})); +// { source: 'http://example.com/www/js/two.js', +// line: 2, +// column: 10, +// name: 'n' } + +console.log(smc.generatedPositionFor({ + source: 'http://example.com/www/js/two.js', + line: 2, + column: 10 +})); +// { line: 2, column: 28 } + +smc.eachMapping(function (m) { + // ... +}); +``` + +### Generating a source map + +In depth guide: +[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/) + +#### With SourceNode (high level API) + +```js +function compile(ast) { + switch (ast.type) { + case 'BinaryExpression': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + [compile(ast.left), " + ", compile(ast.right)] + ); + case 'Literal': + return new SourceNode( + ast.location.line, + ast.location.column, + ast.location.source, + String(ast.value) + ); + // ... + default: + throw new Error("Bad AST"); + } +} + +var ast = parse("40 + 2", "add.js"); +console.log(compile(ast).toStringWithSourceMap({ + file: 'add.js' +})); +// { code: '40 + 2', +// map: [object SourceMapGenerator] } +``` + +#### With SourceMapGenerator (low level API) + +```js +var map = new SourceMapGenerator({ + file: "source-mapped.js" +}); + +map.addMapping({ + generated: { + line: 10, + column: 35 + }, + source: "foo.js", + original: { + line: 33, + column: 2 + }, + name: "christopher" +}); + +console.log(map.toString()); +// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}' +``` + +## API + +Get a reference to the module: + +```js +// Node.js +var sourceMap = require('source-map'); + +// Browser builds +var sourceMap = window.sourceMap; + +// Inside Firefox +const sourceMap = require("devtools/toolkit/sourcemap/source-map.js"); +``` + +### SourceMapConsumer + +A SourceMapConsumer instance represents a parsed source map which we can query +for information about the original file positions by giving it a file position +in the generated source. + +#### new SourceMapConsumer(rawSourceMap) + +The only parameter is the raw source map (either as a string which can be +`JSON.parse`'d, or an object). According to the spec, source maps have the +following attributes: + +* `version`: Which version of the source map spec this map is following. + +* `sources`: An array of URLs to the original source files. + +* `names`: An array of identifiers which can be referenced by individual + mappings. + +* `sourceRoot`: Optional. The URL root from which all sources are relative. + +* `sourcesContent`: Optional. An array of contents of the original source files. + +* `mappings`: A string of base64 VLQs which contain the actual mappings. + +* `file`: Optional. The generated filename this source map is associated with. + +```js +var consumer = new sourceMap.SourceMapConsumer(rawSourceMapJsonData); +``` + +#### SourceMapConsumer.prototype.computeColumnSpans() + +Compute the last column for each generated mapping. The last column is +inclusive. + +```js +// Before: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] + +consumer.computeColumnSpans(); + +// After: +consumer.allGeneratedPositionsFor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1, +// lastColumn: 9 }, +// { line: 2, +// column: 10, +// lastColumn: 19 }, +// { line: 2, +// column: 20, +// lastColumn: Infinity } ] + +``` + +#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) + +Returns the original source, line, and column information for the generated +source's line and column positions provided. The only argument is an object with +the following properties: + +* `line`: The line number in the generated source. + +* `column`: The column number in the generated source. + +* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or + `SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest + element that is smaller than or greater than the one we are searching for, + respectively, if the exact element cannot be found. Defaults to + `SourceMapConsumer.GREATEST_LOWER_BOUND`. + +and an object is returned with the following properties: + +* `source`: The original source file, or null if this information is not + available. + +* `line`: The line number in the original source, or null if this information is + not available. + +* `column`: The column number in the original source, or null or null if this + information is not available. + +* `name`: The original identifier, or null if this information is not available. + +```js +consumer.originalPositionFor({ line: 2, column: 10 }) +// { source: 'foo.coffee', +// line: 2, +// column: 2, +// name: null } + +consumer.originalPositionFor({ line: 99999999999999999, column: 999999999999999 }) +// { source: null, +// line: null, +// column: null, +// name: null } +``` + +#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition) + +Returns the generated line and column information for the original source, +line, and column positions provided. The only argument is an object with +the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. + +* `column`: The column number in the original source. + +and an object is returned with the following properties: + +* `line`: The line number in the generated source, or null. + +* `column`: The column number in the generated source, or null. + +```js +consumer.generatedPositionFor({ source: "example.js", line: 2, column: 10 }) +// { line: 1, +// column: 56 } +``` + +#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition) + +Returns all generated line and column information for the original source, line, +and column provided. If no column is provided, returns all mappings +corresponding to a either the line we are searching for or the next closest line +that has any mappings. Otherwise, returns all mappings corresponding to the +given line and either the column we are searching for or the next closest column +that has any offsets. + +The only argument is an object with the following properties: + +* `source`: The filename of the original source. + +* `line`: The line number in the original source. + +* `column`: Optional. The column number in the original source. + +and an array of objects is returned, each with the following properties: + +* `line`: The line number in the generated source, or null. + +* `column`: The column number in the generated source, or null. + +```js +consumer.allGeneratedpositionsfor({ line: 2, source: "foo.coffee" }) +// [ { line: 2, +// column: 1 }, +// { line: 2, +// column: 10 }, +// { line: 2, +// column: 20 } ] +``` + +#### SourceMapConsumer.prototype.hasContentsOfAllSources() + +Return true if we have the embedded source content for every source listed in +the source map, false otherwise. + +In other words, if this method returns `true`, then +`consumer.sourceContentFor(s)` will succeed for every source `s` in +`consumer.sources`. + +```js +// ... +if (consumer.hasContentsOfAllSources()) { + consumerReadyCallback(consumer); +} else { + fetchSources(consumer, consumerReadyCallback); +} +// ... +``` + +#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing]) + +Returns the original source content for the source provided. The only +argument is the URL of the original source file. + +If the source content for the given source is not found, then an error is +thrown. Optionally, pass `true` as the second param to have `null` returned +instead. + +```js +consumer.sources +// [ "my-cool-lib.clj" ] + +consumer.sourceContentFor("my-cool-lib.clj") +// "..." + +consumer.sourceContentFor("this is not in the source map"); +// Error: "this is not in the source map" is not in the source map + +consumer.sourceContentFor("this is not in the source map", true); +// null +``` + +#### SourceMapConsumer.prototype.eachMapping(callback, context, order) + +Iterate over each mapping between an original source/line/column and a +generated line/column in this source map. + +* `callback`: The function that is called with each mapping. Mappings have the + form `{ source, generatedLine, generatedColumn, originalLine, originalColumn, + name }` + +* `context`: Optional. If specified, this object will be the value of `this` + every time that `callback` is called. + +* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or + `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over + the mappings sorted by the generated file's line/column order or the + original's source/line/column order, respectively. Defaults to + `SourceMapConsumer.GENERATED_ORDER`. + +```js +consumer.eachMapping(function (m) { console.log(m); }) +// ... +// { source: 'illmatic.js', +// generatedLine: 1, +// generatedColumn: 0, +// originalLine: 1, +// originalColumn: 0, +// name: null } +// { source: 'illmatic.js', +// generatedLine: 2, +// generatedColumn: 0, +// originalLine: 2, +// originalColumn: 0, +// name: null } +// ... +``` +### SourceMapGenerator + +An instance of the SourceMapGenerator represents a source map which is being +built incrementally. + +#### new SourceMapGenerator([startOfSourceMap]) + +You may pass an object with the following properties: + +* `file`: The filename of the generated source that this source map is + associated with. + +* `sourceRoot`: A root for all relative URLs in this source map. + +* `skipValidation`: Optional. When `true`, disables validation of mappings as + they are added. This can improve performance but should be used with + discretion, as a last resort. Even then, one should avoid using this flag when + running tests, if possible. + +```js +var generator = new sourceMap.SourceMapGenerator({ + file: "my-generated-javascript-file.js", + sourceRoot: "http://example.com/app/js/" +}); +``` + +#### SourceMapGenerator.fromSourceMap(sourceMapConsumer) + +Creates a new `SourceMapGenerator` from an existing `SourceMapConsumer` instance. + +* `sourceMapConsumer` The SourceMap. + +```js +var generator = sourceMap.SourceMapGenerator.fromSourceMap(consumer); +``` + +#### SourceMapGenerator.prototype.addMapping(mapping) + +Add a single mapping from original source line and column to the generated +source's line and column for this source map being created. The mapping object +should have the following properties: + +* `generated`: An object with the generated line and column positions. + +* `original`: An object with the original line and column positions. + +* `source`: The original source file (relative to the sourceRoot). + +* `name`: An optional original token name for this mapping. + +```js +generator.addMapping({ + source: "module-one.scm", + original: { line: 128, column: 0 }, + generated: { line: 3, column: 456 } +}) +``` + +#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for an original source file. + +* `sourceFile` the URL of the original source file. + +* `sourceContent` the content of the source file. + +```js +generator.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]]) + +Applies a SourceMap for a source file to the SourceMap. +Each mapping to the supplied source file is rewritten using the +supplied SourceMap. Note: The resolution for the resulting mappings +is the minimum of this map and the supplied map. + +* `sourceMapConsumer`: The SourceMap to be applied. + +* `sourceFile`: Optional. The filename of the source file. + If omitted, sourceMapConsumer.file will be used, if it exists. + Otherwise an error will be thrown. + +* `sourceMapPath`: Optional. The dirname of the path to the SourceMap + to be applied. If relative, it is relative to the SourceMap. + + This parameter is needed when the two SourceMaps aren't in the same + directory, and the SourceMap to be applied contains relative source + paths. If so, those relative source paths need to be rewritten + relative to the SourceMap. + + If omitted, it is assumed that both SourceMaps are in the same directory, + thus not needing any rewriting. (Supplying `'.'` has the same effect.) + +#### SourceMapGenerator.prototype.toString() + +Renders the source map being generated to a string. + +```js +generator.toString() +// '{"version":3,"sources":["module-one.scm"],"names":[],"mappings":"...snip...","file":"my-generated-javascript-file.js","sourceRoot":"http://example.com/app/js/"}' +``` + +### SourceNode + +SourceNodes provide a way to abstract over interpolating and/or concatenating +snippets of generated JavaScript source code, while maintaining the line and +column information associated between those snippets and the original source +code. This is useful as the final intermediate representation a compiler might +use before outputting the generated JS and source map. + +#### new SourceNode([line, column, source[, chunk[, name]]]) + +* `line`: The original line number associated with this source node, or null if + it isn't associated with an original line. + +* `column`: The original column number associated with this source node, or null + if it isn't associated with an original column. + +* `source`: The original source's filename; null if no filename is provided. + +* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see + below. + +* `name`: Optional. The original identifier. + +```js +var node = new SourceNode(1, 2, "a.cpp", [ + new SourceNode(3, 4, "b.cpp", "extern int status;\n"), + new SourceNode(5, 6, "c.cpp", "std::string* make_string(size_t n);\n"), + new SourceNode(7, 8, "d.cpp", "int main(int argc, char** argv) {}\n"), +]); +``` + +#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath]) + +Creates a SourceNode from generated code and a SourceMapConsumer. + +* `code`: The generated code + +* `sourceMapConsumer` The SourceMap for the generated code + +* `relativePath` The optional path that relative sources in `sourceMapConsumer` + should be relative to. + +```js +var consumer = new SourceMapConsumer(fs.readFileSync("path/to/my-file.js.map")); +var node = SourceNode.fromStringWithSourceMap(fs.readFileSync("path/to/my-file.js"), + consumer); +``` + +#### SourceNode.prototype.add(chunk) + +Add a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.add(" + "); +node.add(otherNode); +node.add([leftHandOperandNode, " + ", rightHandOperandNode]); +``` + +#### SourceNode.prototype.prepend(chunk) + +Prepend a chunk of generated JS to this source node. + +* `chunk`: A string snippet of generated JS code, another instance of + `SourceNode`, or an array where each member is one of those things. + +```js +node.prepend("/** Build Id: f783haef86324gf **/\n\n"); +``` + +#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent) + +Set the source content for a source file. This will be added to the +`SourceMap` in the `sourcesContent` field. + +* `sourceFile`: The filename of the source file + +* `sourceContent`: The content of the source file + +```js +node.setSourceContent("module-one.scm", + fs.readFileSync("path/to/module-one.scm")) +``` + +#### SourceNode.prototype.walk(fn) + +Walk over the tree of JS snippets in this node and its children. The walking +function is called once for each snippet of JS and is passed that snippet and +the its original associated source's line/column location. + +* `fn`: The traversal function. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.walk(function (code, loc) { console.log("WALK:", code, loc); }) +// WALK: uno { source: 'b.js', line: 3, column: 4, name: null } +// WALK: dos { source: 'a.js', line: 1, column: 2, name: null } +// WALK: tres { source: 'a.js', line: 1, column: 2, name: null } +// WALK: quatro { source: 'c.js', line: 5, column: 6, name: null } +``` + +#### SourceNode.prototype.walkSourceContents(fn) + +Walk over the tree of SourceNodes. The walking function is called for each +source file content and is passed the filename and source content. + +* `fn`: The traversal function. + +```js +var a = new SourceNode(1, 2, "a.js", "generated from a"); +a.setSourceContent("a.js", "original a"); +var b = new SourceNode(1, 2, "b.js", "generated from b"); +b.setSourceContent("b.js", "original b"); +var c = new SourceNode(1, 2, "c.js", "generated from c"); +c.setSourceContent("c.js", "original c"); + +var node = new SourceNode(null, null, null, [a, b, c]); +node.walkSourceContents(function (source, contents) { console.log("WALK:", source, ":", contents); }) +// WALK: a.js : original a +// WALK: b.js : original b +// WALK: c.js : original c +``` + +#### SourceNode.prototype.join(sep) + +Like `Array.prototype.join` except for SourceNodes. Inserts the separator +between each of this source node's children. + +* `sep`: The separator. + +```js +var lhs = new SourceNode(1, 2, "a.rs", "my_copy"); +var operand = new SourceNode(3, 4, "a.rs", "="); +var rhs = new SourceNode(5, 6, "a.rs", "orig.clone()"); + +var node = new SourceNode(null, null, null, [ lhs, operand, rhs ]); +var joinedNode = node.join(" "); +``` + +#### SourceNode.prototype.replaceRight(pattern, replacement) + +Call `String.prototype.replace` on the very right-most source snippet. Useful +for trimming white space from the end of a source node, etc. + +* `pattern`: The pattern to replace. + +* `replacement`: The thing to replace the pattern with. + +```js +// Trim trailing white space. +node.replaceRight(/\s*$/, ""); +``` + +#### SourceNode.prototype.toString() + +Return the string representation of this source node. Walks over the tree and +concatenates all the various snippets together to one string. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toString() +// 'unodostresquatro' +``` + +#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap]) + +Returns the string representation of this tree of source nodes, plus a +SourceMapGenerator which contains all the mappings between the generated and +original sources. + +The arguments are the same as those to `new SourceMapGenerator`. + +```js +var node = new SourceNode(1, 2, "a.js", [ + new SourceNode(3, 4, "b.js", "uno"), + "dos", + [ + "tres", + new SourceNode(5, 6, "c.js", "quatro") + ] +]); + +node.toStringWithSourceMap({ file: "my-output-file.js" }) +// { code: 'unodostresquatro', +// map: [object SourceMapGenerator] } +``` diff --git a/node_modules/source-map/dist/source-map.debug.js b/node_modules/source-map/dist/source-map.debug.js new file mode 100644 index 0000000..a3bcda1 --- /dev/null +++ b/node_modules/source-map/dist/source-map.debug.js @@ -0,0 +1,3056 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["sourceMap"] = factory(); + else + root["sourceMap"] = factory(); +})(this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) +/******/ return installedModules[moduleId].exports; +/******/ +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ exports: {}, +/******/ id: moduleId, +/******/ loaded: false +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.loaded = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; +/******/ +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; +/******/ +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; +/******/ +/******/ // Load entry module and return exports +/******/ return __webpack_require__(0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ function(module, exports, __webpack_require__) { + + /* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ + exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer; + exports.SourceNode = __webpack_require__(10).SourceNode; + + +/***/ }, +/* 1 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var base64VLQ = __webpack_require__(2); + var util = __webpack_require__(4); + var ArraySet = __webpack_require__(5).ArraySet; + var MappingList = __webpack_require__(6).MappingList; + + /** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ + function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + SourceMapGenerator.prototype._version = 3; + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + + /** + * Set the source content for a source file. + */ + SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + + SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + + /** + * Externalize the source map. + */ + SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + + /** + * Render the source map being generated to a string. + */ + SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + + exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }, +/* 2 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + var base64 = __webpack_require__(3); + + // A single base 64 digit can contain 6 bits of data. For the base 64 variable + // length quantities we use in the source map spec, the first bit is the sign, + // the next four bits are the actual value, and the 6th bit is the + // continuation bit. The continuation bit tells us whether there are more + // digits in this value following this digit. + // + // Continuation + // | Sign + // | | + // V V + // 101011 + + var VLQ_BASE_SHIFT = 5; + + // binary: 100000 + var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + + // binary: 011111 + var VLQ_BASE_MASK = VLQ_BASE - 1; + + // binary: 100000 + var VLQ_CONTINUATION_BIT = VLQ_BASE; + + /** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ + function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; + } + + /** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ + function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; + } + + /** + * Returns the base 64 VLQ encoded value. + */ + exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; + }; + + /** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ + exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; + }; + + +/***/ }, +/* 3 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + + /** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ + exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); + }; + + /** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ + exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; + }; + + +/***/ }, +/* 4 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + /** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ + function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } + } + exports.getArg = getArg; + + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; + + function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; + } + exports.urlParse = urlParse; + + function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; + } + exports.urlGenerate = urlGenerate; + + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consequtive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ + function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; + } + exports.join = join; + + exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); + }; + + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); + } + exports.relative = relative; + + var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); + }()); + + function identity (s) { + return s; + } + + /** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ + function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; + } + exports.toSetString = supportsNullProto ? identity : toSetString; + + function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; + } + exports.fromSetString = supportsNullProto ? identity : fromSetString; + + function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; + } + + /** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ + function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByOriginalPositions = compareByOriginalPositions; + + /** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ + function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + + function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; + } + + /** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ + function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + + +/***/ }, +/* 5 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var has = Object.prototype.hasOwnProperty; + + /** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ + function ArraySet() { + this._array = []; + this._set = Object.create(null); + } + + /** + * Static method for creating ArraySet instances from an existing array. + */ + ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; + }; + + /** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ + ArraySet.prototype.size = function ArraySet_size() { + return Object.getOwnPropertyNames(this._set).length; + }; + + /** + * Add the given string to this set. + * + * @param String aStr + */ + ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = util.toSetString(aStr); + var isDuplicate = has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + this._set[sStr] = idx; + } + }; + + /** + * Is the given string a member of this set? + * + * @param String aStr + */ + ArraySet.prototype.has = function ArraySet_has(aStr) { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + }; + + /** + * What is the index of the given string in the array? + * + * @param String aStr + */ + ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + throw new Error('"' + aStr + '" is not in the set.'); + }; + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); + }; + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); + }; + + exports.ArraySet = ArraySet; + + +/***/ }, +/* 6 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + + +/***/ }, +/* 7 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var binarySearch = __webpack_require__(8); + var ArraySet = __webpack_require__(5).ArraySet; + var base64VLQ = __webpack_require__(2); + var quickSort = __webpack_require__(9).quickSort; + + function SourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap) + : new BasicSourceMapConsumer(sourceMap); + } + + SourceMapConsumer.fromSourceMap = function(aSourceMap) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap); + } + + /** + * The version of the source mapping spec that we are consuming. + */ + SourceMapConsumer.prototype._version = 3; + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + + SourceMapConsumer.prototype.__generatedMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } + }); + + SourceMapConsumer.prototype.__originalMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } + }); + + SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + + SourceMapConsumer.GENERATED_ORDER = 1; + SourceMapConsumer.ORIGINAL_ORDER = 2; + + SourceMapConsumer.GREATEST_LOWER_BOUND = 1; + SourceMapConsumer.LEAST_UPPER_BOUND = 2; + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + if (source != null && sourceRoot != null) { + source = util.join(sourceRoot, source); + } + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: Optional. the column number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + if (!this._sources.has(needle.source)) { + return []; + } + needle.source = this._sources.indexOf(needle.source); + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + + exports.SourceMapConsumer = SourceMapConsumer; + + /** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The only parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ + function BasicSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this.file = file; + } + + BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @returns BasicSourceMapConsumer + */ + BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + + /** + * The version of the source mapping spec that we are consuming. + */ + BasicSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._sources.toArray().map(function (s) { + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; + }, this); + } + }); + + /** + * Provide the JIT with a nice shape / hidden class. + */ + function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + if (this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + if (this.sourceRoot != null) { + aSource = util.relative(this.sourceRoot, aSource); + } + + if (this._sources.has(aSource)) { + return this.sourcesContent[this._sources.indexOf(aSource)]; + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + aSource)) { + return this.sourcesContent[this._sources.indexOf("/" + aSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + if (this.sourceRoot != null) { + source = util.relative(this.sourceRoot, source); + } + if (!this._sources.has(source)) { + return { + line: null, + column: null, + lastColumn: null + }; + } + source = this._sources.indexOf(source); + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + + exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + + /** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The only parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ + function IndexedSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map')) + } + }); + } + + IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + + /** + * The version of the source mapping spec that we are consuming. + */ + IndexedSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + }); + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + if (section.consumer.sourceRoot !== null) { + source = util.join(section.consumer.sourceRoot, source); + } + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + + exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }, +/* 8 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + exports.GREATEST_LOWER_BOUND = 1; + exports.LEAST_UPPER_BOUND = 2; + + /** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ + function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } + } + + /** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ + exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; + }; + + +/***/ }, +/* 9 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + // It turns out that some (most?) JavaScript engines don't self-host + // `Array.prototype.sort`. This makes sense because C++ will likely remain + // faster than JS when doing raw CPU-intensive sorting. However, when using a + // custom comparator function, calling back and forth between the VM's C++ and + // JIT'd JS is rather slow *and* loses JIT type information, resulting in + // worse generated code for the comparator function than would be optimal. In + // fact, when sorting with a comparator, these costs outweigh the benefits of + // sorting in C++. By using our own JS-implemented Quick Sort (below), we get + // a ~3500ms mean speed-up in `bench/bench.html`. + + /** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ + function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; + } + + /** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ + function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); + } + + /** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ + function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } + } + + /** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); + }; + + +/***/ }, +/* 10 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + var util = __webpack_require__(4); + + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + + /** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ + function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are removed from this array, by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var shiftNextLine = function() { + var lineContents = remainingLines.shift(); + // The last line of a file might not have a newline. + var newLine = remainingLines.shift() || ""; + return lineContents + newLine; + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[0]; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[0] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[0]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[0] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLines.length > 0) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + }; + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + }; + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; + }; + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; + }; + + /** + * Returns the string representation of this source node along with a source + * map. + */ + SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; + }; + + exports.SourceNode = SourceNode; + + +/***/ } +/******/ ]) +}); +; +//# sourceMappingURL=data:application/json;base64,{"version":3,"sources":["webpack:///webpack/universalModuleDefinition","webpack:///webpack/bootstrap e1344ff22ac06f945ee4","webpack:///./source-map.js","webpack:///./lib/source-map-generator.js","webpack:///./lib/base64-vlq.js","webpack:///./lib/base64.js","webpack:///./lib/util.js","webpack:///./lib/array-set.js","webpack:///./lib/mapping-list.js","webpack:///./lib/source-map-consumer.js","webpack:///./lib/binary-search.js","webpack:///./lib/quick-sort.js","webpack:///./lib/source-node.js"],"names":[],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD,O;ACVA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA,uBAAe;AACf;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACPA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA,MAAK;AACL;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,2CAA0C,SAAS;AACnD;AACA;;AAEA;AACA;AACA;AACA,qBAAoB;AACpB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;ACnZA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4DAA2D;AAC3D,qBAAoB;AACpB;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;;AAEH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,IAAG;;AAEH;AACA;AACA;;;;;;;AC3IA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,iBAAgB;AAChB,iBAAgB;;AAEhB,oBAAmB;AACnB,qBAAoB;;AAEpB,iBAAgB;AAChB,iBAAgB;;AAEhB,iBAAgB;AAChB,kBAAiB;;AAEjB;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;AClEA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,+CAA8C,QAAQ;AACtD;AACA;AACA;AACA,MAAK;AACL;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,4BAA2B,QAAQ;AACnC;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;;;;;;AChaA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,uCAAsC,SAAS;AAC/C;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;ACvGA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iBAAgB;AAChB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AC9EA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,uDAAsD;AACtD;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA,oBAAmB;AACnB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;;AAEX;AACA;AACA,QAAO;AACP;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;;AAEX;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,4BAA2B,MAAM;AACjC;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uDAAsD;AACtD;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA,uDAAsD,YAAY;AAClE;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA,oCAAmC;AACnC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA,0BAAyB,cAAc;AACvC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,wBAAuB,wCAAwC;AAC/D;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gDAA+C,mBAAmB,EAAE;AACpE;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kBAAiB,oBAAoB;AACrC;AACA;AACA;AACA;AACA;AACA,8BAA6B,MAAM;AACnC;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA,uDAAsD;AACtD;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA,IAAG;AACH;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C,sBAAqB,+CAA+C;AACpE;AACA;AACA;AACA;AACA;AACA,EAAC;;AAED;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA,QAAO;AACP;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,2BAA2B;AAC9C;AACA;AACA,sBAAqB,4BAA4B;AACjD;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;;;;;;;ACzjCA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;;;;;;AC9GA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,SAAS;AACpB;AACA,YAAW,OAAO;AAClB;AACA,YAAW,OAAO;AAClB;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,oBAAmB,OAAO;AAC1B;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA,YAAW,MAAM;AACjB;AACA,YAAW,SAAS;AACpB;AACA;AACA;AACA;AACA;;;;;;;ACjHA,iBAAgB,oBAAoB;AACpC;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;;AAEL;;AAEA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,kCAAiC,QAAQ;AACzC;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,8CAA6C,SAAS;AACtD;AACA;AACA;AACA;AACA;AACA;AACA,qBAAoB;AACpB;AACA;AACA,uCAAsC;AACtC;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gBAAe,WAAW;AAC1B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,gDAA+C,SAAS;AACxD;AACA;AACA;AACA;;AAEA;AACA,0CAAyC,SAAS;AAClD;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAG;AACH;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,YAAW;AACX;AACA;AACA;AACA,YAAW;AACX;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA;AACA,MAAK;AACL;AACA;AACA;AACA;AACA;AACA,QAAO;AACP;AACA;AACA;AACA,6CAA4C,cAAc;AAC1D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,UAAS;AACT;AACA;AACA;AACA;AACA;AACA,cAAa;AACb;AACA;AACA;AACA,cAAa;AACb;AACA,YAAW;AACX;AACA,QAAO;AACP;AACA;AACA;AACA,IAAG;AACH;AACA;AACA,IAAG;;AAEH,WAAU;AACV;;AAEA","file":"source-map.debug.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"sourceMap\"] = factory();\n\telse\n\t\troot[\"sourceMap\"] = factory();\n})(this, function() {\nreturn \n\n\n/** WEBPACK FOOTER **\n ** webpack/universalModuleDefinition\n **/"," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\texports: {},\n \t\t\tid: moduleId,\n \t\t\tloaded: false\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.loaded = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(0);\n\n\n\n/** WEBPACK FOOTER **\n ** webpack/bootstrap e1344ff22ac06f945ee4\n **/","/*\n * Copyright 2009-2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE.txt or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\nexports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;\nexports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;\nexports.SourceNode = require('./lib/source-node').SourceNode;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./source-map.js\n ** module id = 0\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar base64VLQ = require('./base64-vlq');\nvar util = require('./util');\nvar ArraySet = require('./array-set').ArraySet;\nvar MappingList = require('./mapping-list').MappingList;\n\n/**\n * An instance of the SourceMapGenerator represents a source map which is\n * being built incrementally. You may pass an object with the following\n * properties:\n *\n *   - file: The filename of the generated source.\n *   - sourceRoot: A root for all relative URLs in this source map.\n */\nfunction SourceMapGenerator(aArgs) {\n  if (!aArgs) {\n    aArgs = {};\n  }\n  this._file = util.getArg(aArgs, 'file', null);\n  this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n  this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n  this._sources = new ArraySet();\n  this._names = new ArraySet();\n  this._mappings = new MappingList();\n  this._sourcesContents = null;\n}\n\nSourceMapGenerator.prototype._version = 3;\n\n/**\n * Creates a new SourceMapGenerator based on a SourceMapConsumer\n *\n * @param aSourceMapConsumer The SourceMap.\n */\nSourceMapGenerator.fromSourceMap =\n  function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n    var sourceRoot = aSourceMapConsumer.sourceRoot;\n    var generator = new SourceMapGenerator({\n      file: aSourceMapConsumer.file,\n      sourceRoot: sourceRoot\n    });\n    aSourceMapConsumer.eachMapping(function (mapping) {\n      var newMapping = {\n        generated: {\n          line: mapping.generatedLine,\n          column: mapping.generatedColumn\n        }\n      };\n\n      if (mapping.source != null) {\n        newMapping.source = mapping.source;\n        if (sourceRoot != null) {\n          newMapping.source = util.relative(sourceRoot, newMapping.source);\n        }\n\n        newMapping.original = {\n          line: mapping.originalLine,\n          column: mapping.originalColumn\n        };\n\n        if (mapping.name != null) {\n          newMapping.name = mapping.name;\n        }\n      }\n\n      generator.addMapping(newMapping);\n    });\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        generator.setSourceContent(sourceFile, content);\n      }\n    });\n    return generator;\n  };\n\n/**\n * Add a single mapping from original source line and column to the generated\n * source's line and column for this source map being created. The mapping\n * object should have the following properties:\n *\n *   - generated: An object with the generated line and column positions.\n *   - original: An object with the original line and column positions.\n *   - source: The original source file (relative to the sourceRoot).\n *   - name: An optional original token name for this mapping.\n */\nSourceMapGenerator.prototype.addMapping =\n  function SourceMapGenerator_addMapping(aArgs) {\n    var generated = util.getArg(aArgs, 'generated');\n    var original = util.getArg(aArgs, 'original', null);\n    var source = util.getArg(aArgs, 'source', null);\n    var name = util.getArg(aArgs, 'name', null);\n\n    if (!this._skipValidation) {\n      this._validateMapping(generated, original, source, name);\n    }\n\n    if (source != null) {\n      source = String(source);\n      if (!this._sources.has(source)) {\n        this._sources.add(source);\n      }\n    }\n\n    if (name != null) {\n      name = String(name);\n      if (!this._names.has(name)) {\n        this._names.add(name);\n      }\n    }\n\n    this._mappings.add({\n      generatedLine: generated.line,\n      generatedColumn: generated.column,\n      originalLine: original != null && original.line,\n      originalColumn: original != null && original.column,\n      source: source,\n      name: name\n    });\n  };\n\n/**\n * Set the source content for a source file.\n */\nSourceMapGenerator.prototype.setSourceContent =\n  function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n    var source = aSourceFile;\n    if (this._sourceRoot != null) {\n      source = util.relative(this._sourceRoot, source);\n    }\n\n    if (aSourceContent != null) {\n      // Add the source content to the _sourcesContents map.\n      // Create a new _sourcesContents map if the property is null.\n      if (!this._sourcesContents) {\n        this._sourcesContents = Object.create(null);\n      }\n      this._sourcesContents[util.toSetString(source)] = aSourceContent;\n    } else if (this._sourcesContents) {\n      // Remove the source file from the _sourcesContents map.\n      // If the _sourcesContents map is empty, set the property to null.\n      delete this._sourcesContents[util.toSetString(source)];\n      if (Object.keys(this._sourcesContents).length === 0) {\n        this._sourcesContents = null;\n      }\n    }\n  };\n\n/**\n * Applies the mappings of a sub-source-map for a specific source file to the\n * source map being generated. Each mapping to the supplied source file is\n * rewritten using the supplied source map. Note: The resolution for the\n * resulting mappings is the minimium of this map and the supplied map.\n *\n * @param aSourceMapConsumer The source map to be applied.\n * @param aSourceFile Optional. The filename of the source file.\n *        If omitted, SourceMapConsumer's file property will be used.\n * @param aSourceMapPath Optional. The dirname of the path to the source map\n *        to be applied. If relative, it is relative to the SourceMapConsumer.\n *        This parameter is needed when the two source maps aren't in the same\n *        directory, and the source map to be applied contains relative source\n *        paths. If so, those relative source paths need to be rewritten\n *        relative to the SourceMapGenerator.\n */\nSourceMapGenerator.prototype.applySourceMap =\n  function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n    var sourceFile = aSourceFile;\n    // If aSourceFile is omitted, we will use the file property of the SourceMap\n    if (aSourceFile == null) {\n      if (aSourceMapConsumer.file == null) {\n        throw new Error(\n          'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n          'or the source map\\'s \"file\" property. Both were omitted.'\n        );\n      }\n      sourceFile = aSourceMapConsumer.file;\n    }\n    var sourceRoot = this._sourceRoot;\n    // Make \"sourceFile\" relative if an absolute Url is passed.\n    if (sourceRoot != null) {\n      sourceFile = util.relative(sourceRoot, sourceFile);\n    }\n    // Applying the SourceMap can add and remove items from the sources and\n    // the names array.\n    var newSources = new ArraySet();\n    var newNames = new ArraySet();\n\n    // Find mappings for the \"sourceFile\"\n    this._mappings.unsortedForEach(function (mapping) {\n      if (mapping.source === sourceFile && mapping.originalLine != null) {\n        // Check if it can be mapped by the source map, then update the mapping.\n        var original = aSourceMapConsumer.originalPositionFor({\n          line: mapping.originalLine,\n          column: mapping.originalColumn\n        });\n        if (original.source != null) {\n          // Copy mapping\n          mapping.source = original.source;\n          if (aSourceMapPath != null) {\n            mapping.source = util.join(aSourceMapPath, mapping.source)\n          }\n          if (sourceRoot != null) {\n            mapping.source = util.relative(sourceRoot, mapping.source);\n          }\n          mapping.originalLine = original.line;\n          mapping.originalColumn = original.column;\n          if (original.name != null) {\n            mapping.name = original.name;\n          }\n        }\n      }\n\n      var source = mapping.source;\n      if (source != null && !newSources.has(source)) {\n        newSources.add(source);\n      }\n\n      var name = mapping.name;\n      if (name != null && !newNames.has(name)) {\n        newNames.add(name);\n      }\n\n    }, this);\n    this._sources = newSources;\n    this._names = newNames;\n\n    // Copy sourcesContents of applied map.\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        if (aSourceMapPath != null) {\n          sourceFile = util.join(aSourceMapPath, sourceFile);\n        }\n        if (sourceRoot != null) {\n          sourceFile = util.relative(sourceRoot, sourceFile);\n        }\n        this.setSourceContent(sourceFile, content);\n      }\n    }, this);\n  };\n\n/**\n * A mapping can have one of the three levels of data:\n *\n *   1. Just the generated position.\n *   2. The Generated position, original position, and original source.\n *   3. Generated and original position, original source, as well as a name\n *      token.\n *\n * To maintain consistency, we validate that any new mapping being added falls\n * in to one of these categories.\n */\nSourceMapGenerator.prototype._validateMapping =\n  function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n                                              aName) {\n    if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n        && aGenerated.line > 0 && aGenerated.column >= 0\n        && !aOriginal && !aSource && !aName) {\n      // Case 1.\n      return;\n    }\n    else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n             && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n             && aGenerated.line > 0 && aGenerated.column >= 0\n             && aOriginal.line > 0 && aOriginal.column >= 0\n             && aSource) {\n      // Cases 2 and 3.\n      return;\n    }\n    else {\n      throw new Error('Invalid mapping: ' + JSON.stringify({\n        generated: aGenerated,\n        source: aSource,\n        original: aOriginal,\n        name: aName\n      }));\n    }\n  };\n\n/**\n * Serialize the accumulated mappings in to the stream of base 64 VLQs\n * specified by the source map format.\n */\nSourceMapGenerator.prototype._serializeMappings =\n  function SourceMapGenerator_serializeMappings() {\n    var previousGeneratedColumn = 0;\n    var previousGeneratedLine = 1;\n    var previousOriginalColumn = 0;\n    var previousOriginalLine = 0;\n    var previousName = 0;\n    var previousSource = 0;\n    var result = '';\n    var next;\n    var mapping;\n    var nameIdx;\n    var sourceIdx;\n\n    var mappings = this._mappings.toArray();\n    for (var i = 0, len = mappings.length; i < len; i++) {\n      mapping = mappings[i];\n      next = ''\n\n      if (mapping.generatedLine !== previousGeneratedLine) {\n        previousGeneratedColumn = 0;\n        while (mapping.generatedLine !== previousGeneratedLine) {\n          next += ';';\n          previousGeneratedLine++;\n        }\n      }\n      else {\n        if (i > 0) {\n          if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n            continue;\n          }\n          next += ',';\n        }\n      }\n\n      next += base64VLQ.encode(mapping.generatedColumn\n                                 - previousGeneratedColumn);\n      previousGeneratedColumn = mapping.generatedColumn;\n\n      if (mapping.source != null) {\n        sourceIdx = this._sources.indexOf(mapping.source);\n        next += base64VLQ.encode(sourceIdx - previousSource);\n        previousSource = sourceIdx;\n\n        // lines are stored 0-based in SourceMap spec version 3\n        next += base64VLQ.encode(mapping.originalLine - 1\n                                   - previousOriginalLine);\n        previousOriginalLine = mapping.originalLine - 1;\n\n        next += base64VLQ.encode(mapping.originalColumn\n                                   - previousOriginalColumn);\n        previousOriginalColumn = mapping.originalColumn;\n\n        if (mapping.name != null) {\n          nameIdx = this._names.indexOf(mapping.name);\n          next += base64VLQ.encode(nameIdx - previousName);\n          previousName = nameIdx;\n        }\n      }\n\n      result += next;\n    }\n\n    return result;\n  };\n\nSourceMapGenerator.prototype._generateSourcesContent =\n  function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n    return aSources.map(function (source) {\n      if (!this._sourcesContents) {\n        return null;\n      }\n      if (aSourceRoot != null) {\n        source = util.relative(aSourceRoot, source);\n      }\n      var key = util.toSetString(source);\n      return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n        ? this._sourcesContents[key]\n        : null;\n    }, this);\n  };\n\n/**\n * Externalize the source map.\n */\nSourceMapGenerator.prototype.toJSON =\n  function SourceMapGenerator_toJSON() {\n    var map = {\n      version: this._version,\n      sources: this._sources.toArray(),\n      names: this._names.toArray(),\n      mappings: this._serializeMappings()\n    };\n    if (this._file != null) {\n      map.file = this._file;\n    }\n    if (this._sourceRoot != null) {\n      map.sourceRoot = this._sourceRoot;\n    }\n    if (this._sourcesContents) {\n      map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n    }\n\n    return map;\n  };\n\n/**\n * Render the source map being generated to a string.\n */\nSourceMapGenerator.prototype.toString =\n  function SourceMapGenerator_toString() {\n    return JSON.stringify(this.toJSON());\n  };\n\nexports.SourceMapGenerator = SourceMapGenerator;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/source-map-generator.js\n ** module id = 1\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n *\n * Based on the Base 64 VLQ implementation in Closure Compiler:\n * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n *\n * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n *  * Redistributions of source code must retain the above copyright\n *    notice, this list of conditions and the following disclaimer.\n *  * Redistributions in binary form must reproduce the above\n *    copyright notice, this list of conditions and the following\n *    disclaimer in the documentation and/or other materials provided\n *    with the distribution.\n *  * Neither the name of Google Inc. nor the names of its\n *    contributors may be used to endorse or promote products derived\n *    from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\nvar base64 = require('./base64');\n\n// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n// length quantities we use in the source map spec, the first bit is the sign,\n// the next four bits are the actual value, and the 6th bit is the\n// continuation bit. The continuation bit tells us whether there are more\n// digits in this value following this digit.\n//\n//   Continuation\n//   |    Sign\n//   |    |\n//   V    V\n//   101011\n\nvar VLQ_BASE_SHIFT = 5;\n\n// binary: 100000\nvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\n// binary: 011111\nvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\n// binary: 100000\nvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\n/**\n * Converts from a two-complement value to a value where the sign bit is\n * placed in the least significant bit.  For example, as decimals:\n *   1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n *   2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n */\nfunction toVLQSigned(aValue) {\n  return aValue < 0\n    ? ((-aValue) << 1) + 1\n    : (aValue << 1) + 0;\n}\n\n/**\n * Converts to a two-complement value from a value where the sign bit is\n * placed in the least significant bit.  For example, as decimals:\n *   2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n *   4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n */\nfunction fromVLQSigned(aValue) {\n  var isNegative = (aValue & 1) === 1;\n  var shifted = aValue >> 1;\n  return isNegative\n    ? -shifted\n    : shifted;\n}\n\n/**\n * Returns the base 64 VLQ encoded value.\n */\nexports.encode = function base64VLQ_encode(aValue) {\n  var encoded = \"\";\n  var digit;\n\n  var vlq = toVLQSigned(aValue);\n\n  do {\n    digit = vlq & VLQ_BASE_MASK;\n    vlq >>>= VLQ_BASE_SHIFT;\n    if (vlq > 0) {\n      // There are still more digits in this value, so we must make sure the\n      // continuation bit is marked.\n      digit |= VLQ_CONTINUATION_BIT;\n    }\n    encoded += base64.encode(digit);\n  } while (vlq > 0);\n\n  return encoded;\n};\n\n/**\n * Decodes the next base 64 VLQ value from the given string and returns the\n * value and the rest of the string via the out parameter.\n */\nexports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n  var strLen = aStr.length;\n  var result = 0;\n  var shift = 0;\n  var continuation, digit;\n\n  do {\n    if (aIndex >= strLen) {\n      throw new Error(\"Expected more digits in base 64 VLQ value.\");\n    }\n\n    digit = base64.decode(aStr.charCodeAt(aIndex++));\n    if (digit === -1) {\n      throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n    }\n\n    continuation = !!(digit & VLQ_CONTINUATION_BIT);\n    digit &= VLQ_BASE_MASK;\n    result = result + (digit << shift);\n    shift += VLQ_BASE_SHIFT;\n  } while (continuation);\n\n  aOutParam.value = fromVLQSigned(result);\n  aOutParam.rest = aIndex;\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/base64-vlq.js\n ** module id = 2\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\n/**\n * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n */\nexports.encode = function (number) {\n  if (0 <= number && number < intToCharMap.length) {\n    return intToCharMap[number];\n  }\n  throw new TypeError(\"Must be between 0 and 63: \" + number);\n};\n\n/**\n * Decode a single base 64 character code digit to an integer. Returns -1 on\n * failure.\n */\nexports.decode = function (charCode) {\n  var bigA = 65;     // 'A'\n  var bigZ = 90;     // 'Z'\n\n  var littleA = 97;  // 'a'\n  var littleZ = 122; // 'z'\n\n  var zero = 48;     // '0'\n  var nine = 57;     // '9'\n\n  var plus = 43;     // '+'\n  var slash = 47;    // '/'\n\n  var littleOffset = 26;\n  var numberOffset = 52;\n\n  // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n  if (bigA <= charCode && charCode <= bigZ) {\n    return (charCode - bigA);\n  }\n\n  // 26 - 51: abcdefghijklmnopqrstuvwxyz\n  if (littleA <= charCode && charCode <= littleZ) {\n    return (charCode - littleA + littleOffset);\n  }\n\n  // 52 - 61: 0123456789\n  if (zero <= charCode && charCode <= nine) {\n    return (charCode - zero + numberOffset);\n  }\n\n  // 62: +\n  if (charCode == plus) {\n    return 62;\n  }\n\n  // 63: /\n  if (charCode == slash) {\n    return 63;\n  }\n\n  // Invalid base64 digit.\n  return -1;\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/base64.js\n ** module id = 3\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n/**\n * This is a helper function for getting values from parameter/options\n * objects.\n *\n * @param args The object we are extracting values from\n * @param name The name of the property we are getting.\n * @param defaultValue An optional value to return if the property is missing\n * from the object. If this is not specified and the property is missing, an\n * error will be thrown.\n */\nfunction getArg(aArgs, aName, aDefaultValue) {\n  if (aName in aArgs) {\n    return aArgs[aName];\n  } else if (arguments.length === 3) {\n    return aDefaultValue;\n  } else {\n    throw new Error('\"' + aName + '\" is a required argument.');\n  }\n}\nexports.getArg = getArg;\n\nvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.]*)(?::(\\d+))?(\\S*)$/;\nvar dataUrlRegexp = /^data:.+\\,.+$/;\n\nfunction urlParse(aUrl) {\n  var match = aUrl.match(urlRegexp);\n  if (!match) {\n    return null;\n  }\n  return {\n    scheme: match[1],\n    auth: match[2],\n    host: match[3],\n    port: match[4],\n    path: match[5]\n  };\n}\nexports.urlParse = urlParse;\n\nfunction urlGenerate(aParsedUrl) {\n  var url = '';\n  if (aParsedUrl.scheme) {\n    url += aParsedUrl.scheme + ':';\n  }\n  url += '//';\n  if (aParsedUrl.auth) {\n    url += aParsedUrl.auth + '@';\n  }\n  if (aParsedUrl.host) {\n    url += aParsedUrl.host;\n  }\n  if (aParsedUrl.port) {\n    url += \":\" + aParsedUrl.port\n  }\n  if (aParsedUrl.path) {\n    url += aParsedUrl.path;\n  }\n  return url;\n}\nexports.urlGenerate = urlGenerate;\n\n/**\n * Normalizes a path, or the path portion of a URL:\n *\n * - Replaces consequtive slashes with one slash.\n * - Removes unnecessary '.' parts.\n * - Removes unnecessary '<dir>/..' parts.\n *\n * Based on code in the Node.js 'path' core module.\n *\n * @param aPath The path or url to normalize.\n */\nfunction normalize(aPath) {\n  var path = aPath;\n  var url = urlParse(aPath);\n  if (url) {\n    if (!url.path) {\n      return aPath;\n    }\n    path = url.path;\n  }\n  var isAbsolute = exports.isAbsolute(path);\n\n  var parts = path.split(/\\/+/);\n  for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n    part = parts[i];\n    if (part === '.') {\n      parts.splice(i, 1);\n    } else if (part === '..') {\n      up++;\n    } else if (up > 0) {\n      if (part === '') {\n        // The first part is blank if the path is absolute. Trying to go\n        // above the root is a no-op. Therefore we can remove all '..' parts\n        // directly after the root.\n        parts.splice(i + 1, up);\n        up = 0;\n      } else {\n        parts.splice(i, 2);\n        up--;\n      }\n    }\n  }\n  path = parts.join('/');\n\n  if (path === '') {\n    path = isAbsolute ? '/' : '.';\n  }\n\n  if (url) {\n    url.path = path;\n    return urlGenerate(url);\n  }\n  return path;\n}\nexports.normalize = normalize;\n\n/**\n * Joins two paths/URLs.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be joined with the root.\n *\n * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n *   scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n *   first.\n * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n *   is updated with the result and aRoot is returned. Otherwise the result\n *   is returned.\n *   - If aPath is absolute, the result is aPath.\n *   - Otherwise the two paths are joined with a slash.\n * - Joining for example 'http://' and 'www.example.com' is also supported.\n */\nfunction join(aRoot, aPath) {\n  if (aRoot === \"\") {\n    aRoot = \".\";\n  }\n  if (aPath === \"\") {\n    aPath = \".\";\n  }\n  var aPathUrl = urlParse(aPath);\n  var aRootUrl = urlParse(aRoot);\n  if (aRootUrl) {\n    aRoot = aRootUrl.path || '/';\n  }\n\n  // `join(foo, '//www.example.org')`\n  if (aPathUrl && !aPathUrl.scheme) {\n    if (aRootUrl) {\n      aPathUrl.scheme = aRootUrl.scheme;\n    }\n    return urlGenerate(aPathUrl);\n  }\n\n  if (aPathUrl || aPath.match(dataUrlRegexp)) {\n    return aPath;\n  }\n\n  // `join('http://', 'www.example.com')`\n  if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n    aRootUrl.host = aPath;\n    return urlGenerate(aRootUrl);\n  }\n\n  var joined = aPath.charAt(0) === '/'\n    ? aPath\n    : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\n  if (aRootUrl) {\n    aRootUrl.path = joined;\n    return urlGenerate(aRootUrl);\n  }\n  return joined;\n}\nexports.join = join;\n\nexports.isAbsolute = function (aPath) {\n  return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);\n};\n\n/**\n * Make a path relative to a URL or another path.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be made relative to aRoot.\n */\nfunction relative(aRoot, aPath) {\n  if (aRoot === \"\") {\n    aRoot = \".\";\n  }\n\n  aRoot = aRoot.replace(/\\/$/, '');\n\n  // It is possible for the path to be above the root. In this case, simply\n  // checking whether the root is a prefix of the path won't work. Instead, we\n  // need to remove components from the root one by one, until either we find\n  // a prefix that fits, or we run out of components to remove.\n  var level = 0;\n  while (aPath.indexOf(aRoot + '/') !== 0) {\n    var index = aRoot.lastIndexOf(\"/\");\n    if (index < 0) {\n      return aPath;\n    }\n\n    // If the only part of the root that is left is the scheme (i.e. http://,\n    // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n    // have exhausted all components, so the path is not relative to the root.\n    aRoot = aRoot.slice(0, index);\n    if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n      return aPath;\n    }\n\n    ++level;\n  }\n\n  // Make sure we add a \"../\" for each component we removed from the root.\n  return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n}\nexports.relative = relative;\n\nvar supportsNullProto = (function () {\n  var obj = Object.create(null);\n  return !('__proto__' in obj);\n}());\n\nfunction identity (s) {\n  return s;\n}\n\n/**\n * Because behavior goes wacky when you set `__proto__` on objects, we\n * have to prefix all the strings in our set with an arbitrary character.\n *\n * See https://github.com/mozilla/source-map/pull/31 and\n * https://github.com/mozilla/source-map/issues/30\n *\n * @param String aStr\n */\nfunction toSetString(aStr) {\n  if (isProtoString(aStr)) {\n    return '$' + aStr;\n  }\n\n  return aStr;\n}\nexports.toSetString = supportsNullProto ? identity : toSetString;\n\nfunction fromSetString(aStr) {\n  if (isProtoString(aStr)) {\n    return aStr.slice(1);\n  }\n\n  return aStr;\n}\nexports.fromSetString = supportsNullProto ? identity : fromSetString;\n\nfunction isProtoString(s) {\n  if (!s) {\n    return false;\n  }\n\n  var length = s.length;\n\n  if (length < 9 /* \"__proto__\".length */) {\n    return false;\n  }\n\n  if (s.charCodeAt(length - 1) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 2) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n      s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n      s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n      s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n      s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n      s.charCodeAt(length - 8) !== 95  /* '_' */ ||\n      s.charCodeAt(length - 9) !== 95  /* '_' */) {\n    return false;\n  }\n\n  for (var i = length - 10; i >= 0; i--) {\n    if (s.charCodeAt(i) !== 36 /* '$' */) {\n      return false;\n    }\n  }\n\n  return true;\n}\n\n/**\n * Comparator between two mappings where the original positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same original source/line/column, but different generated\n * line and column the same. Useful when searching for a mapping with a\n * stubbed out mapping.\n */\nfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n  var cmp = mappingA.source - mappingB.source;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0 || onlyCompareOriginal) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return mappingA.name - mappingB.name;\n}\nexports.compareByOriginalPositions = compareByOriginalPositions;\n\n/**\n * Comparator between two mappings with deflated source and name indices where\n * the generated positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same generated line and column, but different\n * source/name/original line and column the same. Useful when searching for a\n * mapping with a stubbed out mapping.\n */\nfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n  var cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0 || onlyCompareGenerated) {\n    return cmp;\n  }\n\n  cmp = mappingA.source - mappingB.source;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return mappingA.name - mappingB.name;\n}\nexports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\nfunction strcmp(aStr1, aStr2) {\n  if (aStr1 === aStr2) {\n    return 0;\n  }\n\n  if (aStr1 > aStr2) {\n    return 1;\n  }\n\n  return -1;\n}\n\n/**\n * Comparator between two mappings with inflated source and name strings where\n * the generated positions are compared.\n */\nfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n  var cmp = mappingA.generatedLine - mappingB.generatedLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = strcmp(mappingA.source, mappingB.source);\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalLine - mappingB.originalLine;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  cmp = mappingA.originalColumn - mappingB.originalColumn;\n  if (cmp !== 0) {\n    return cmp;\n  }\n\n  return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/util.js\n ** module id = 4\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar has = Object.prototype.hasOwnProperty;\n\n/**\n * A data structure which is a combination of an array and a set. Adding a new\n * member is O(1), testing for membership is O(1), and finding the index of an\n * element is O(1). Removing elements from the set is not supported. Only\n * strings are supported for membership.\n */\nfunction ArraySet() {\n  this._array = [];\n  this._set = Object.create(null);\n}\n\n/**\n * Static method for creating ArraySet instances from an existing array.\n */\nArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n  var set = new ArraySet();\n  for (var i = 0, len = aArray.length; i < len; i++) {\n    set.add(aArray[i], aAllowDuplicates);\n  }\n  return set;\n};\n\n/**\n * Return how many unique items are in this ArraySet. If duplicates have been\n * added, than those do not count towards the size.\n *\n * @returns Number\n */\nArraySet.prototype.size = function ArraySet_size() {\n  return Object.getOwnPropertyNames(this._set).length;\n};\n\n/**\n * Add the given string to this set.\n *\n * @param String aStr\n */\nArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n  var sStr = util.toSetString(aStr);\n  var isDuplicate = has.call(this._set, sStr);\n  var idx = this._array.length;\n  if (!isDuplicate || aAllowDuplicates) {\n    this._array.push(aStr);\n  }\n  if (!isDuplicate) {\n    this._set[sStr] = idx;\n  }\n};\n\n/**\n * Is the given string a member of this set?\n *\n * @param String aStr\n */\nArraySet.prototype.has = function ArraySet_has(aStr) {\n  var sStr = util.toSetString(aStr);\n  return has.call(this._set, sStr);\n};\n\n/**\n * What is the index of the given string in the array?\n *\n * @param String aStr\n */\nArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n  var sStr = util.toSetString(aStr);\n  if (has.call(this._set, sStr)) {\n    return this._set[sStr];\n  }\n  throw new Error('\"' + aStr + '\" is not in the set.');\n};\n\n/**\n * What is the element at the given index?\n *\n * @param Number aIdx\n */\nArraySet.prototype.at = function ArraySet_at(aIdx) {\n  if (aIdx >= 0 && aIdx < this._array.length) {\n    return this._array[aIdx];\n  }\n  throw new Error('No element indexed by ' + aIdx);\n};\n\n/**\n * Returns the array representation of this set (which has the proper indices\n * indicated by indexOf). Note that this is a copy of the internal array used\n * for storing the members so that no one can mess with internal state.\n */\nArraySet.prototype.toArray = function ArraySet_toArray() {\n  return this._array.slice();\n};\n\nexports.ArraySet = ArraySet;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/array-set.js\n ** module id = 5\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2014 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\n\n/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */\nfunction generatedPositionAfter(mappingA, mappingB) {\n  // Optimized for most common case\n  var lineA = mappingA.generatedLine;\n  var lineB = mappingB.generatedLine;\n  var columnA = mappingA.generatedColumn;\n  var columnB = mappingB.generatedColumn;\n  return lineB > lineA || lineB == lineA && columnB >= columnA ||\n         util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}\n\n/**\n * A data structure to provide a sorted view of accumulated mappings in a\n * performance conscious manner. It trades a neglibable overhead in general\n * case for a large speedup in case of mappings being added in order.\n */\nfunction MappingList() {\n  this._array = [];\n  this._sorted = true;\n  // Serves as infimum\n  this._last = {generatedLine: -1, generatedColumn: 0};\n}\n\n/**\n * Iterate through internal items. This method takes the same arguments that\n * `Array.prototype.forEach` takes.\n *\n * NOTE: The order of the mappings is NOT guaranteed.\n */\nMappingList.prototype.unsortedForEach =\n  function MappingList_forEach(aCallback, aThisArg) {\n    this._array.forEach(aCallback, aThisArg);\n  };\n\n/**\n * Add the given source mapping.\n *\n * @param Object aMapping\n */\nMappingList.prototype.add = function MappingList_add(aMapping) {\n  if (generatedPositionAfter(this._last, aMapping)) {\n    this._last = aMapping;\n    this._array.push(aMapping);\n  } else {\n    this._sorted = false;\n    this._array.push(aMapping);\n  }\n};\n\n/**\n * Returns the flat, sorted array of mappings. The mappings are sorted by\n * generated position.\n *\n * WARNING: This method returns internal data without copying, for\n * performance. The return value must NOT be mutated, and should be treated as\n * an immutable borrow. If you want to take ownership, you must make your own\n * copy.\n */\nMappingList.prototype.toArray = function MappingList_toArray() {\n  if (!this._sorted) {\n    this._array.sort(util.compareByGeneratedPositionsInflated);\n    this._sorted = true;\n  }\n  return this._array;\n};\n\nexports.MappingList = MappingList;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/mapping-list.js\n ** module id = 6\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar binarySearch = require('./binary-search');\nvar ArraySet = require('./array-set').ArraySet;\nvar base64VLQ = require('./base64-vlq');\nvar quickSort = require('./quick-sort').quickSort;\n\nfunction SourceMapConsumer(aSourceMap) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n  }\n\n  return sourceMap.sections != null\n    ? new IndexedSourceMapConsumer(sourceMap)\n    : new BasicSourceMapConsumer(sourceMap);\n}\n\nSourceMapConsumer.fromSourceMap = function(aSourceMap) {\n  return BasicSourceMapConsumer.fromSourceMap(aSourceMap);\n}\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nSourceMapConsumer.prototype._version = 3;\n\n// `__generatedMappings` and `__originalMappings` are arrays that hold the\n// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n// are lazily instantiated, accessed via the `_generatedMappings` and\n// `_originalMappings` getters respectively, and we only parse the mappings\n// and create these arrays once queried for a source location. We jump through\n// these hoops because there can be many thousands of mappings, and parsing\n// them is expensive, so we only want to do it if we must.\n//\n// Each object in the arrays is of the form:\n//\n//     {\n//       generatedLine: The line number in the generated code,\n//       generatedColumn: The column number in the generated code,\n//       source: The path to the original source file that generated this\n//               chunk of code,\n//       originalLine: The line number in the original source that\n//                     corresponds to this chunk of generated code,\n//       originalColumn: The column number in the original source that\n//                       corresponds to this chunk of generated code,\n//       name: The name of the original symbol which generated this chunk of\n//             code.\n//     }\n//\n// All properties except for `generatedLine` and `generatedColumn` can be\n// `null`.\n//\n// `_generatedMappings` is ordered by the generated positions.\n//\n// `_originalMappings` is ordered by the original positions.\n\nSourceMapConsumer.prototype.__generatedMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n  get: function () {\n    if (!this.__generatedMappings) {\n      this._parseMappings(this._mappings, this.sourceRoot);\n    }\n\n    return this.__generatedMappings;\n  }\n});\n\nSourceMapConsumer.prototype.__originalMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n  get: function () {\n    if (!this.__originalMappings) {\n      this._parseMappings(this._mappings, this.sourceRoot);\n    }\n\n    return this.__originalMappings;\n  }\n});\n\nSourceMapConsumer.prototype._charIsMappingSeparator =\n  function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n    var c = aStr.charAt(index);\n    return c === \";\" || c === \",\";\n  };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nSourceMapConsumer.prototype._parseMappings =\n  function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    throw new Error(\"Subclasses must implement _parseMappings\");\n  };\n\nSourceMapConsumer.GENERATED_ORDER = 1;\nSourceMapConsumer.ORIGINAL_ORDER = 2;\n\nSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\nSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\n/**\n * Iterate over each mapping between an original source/line/column and a\n * generated line/column in this source map.\n *\n * @param Function aCallback\n *        The function that is called with each mapping.\n * @param Object aContext\n *        Optional. If specified, this object will be the value of `this` every\n *        time that `aCallback` is called.\n * @param aOrder\n *        Either `SourceMapConsumer.GENERATED_ORDER` or\n *        `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n *        iterate over the mappings sorted by the generated file's line/column\n *        order or the original's source/line/column order, respectively. Defaults to\n *        `SourceMapConsumer.GENERATED_ORDER`.\n */\nSourceMapConsumer.prototype.eachMapping =\n  function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n    var context = aContext || null;\n    var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\n    var mappings;\n    switch (order) {\n    case SourceMapConsumer.GENERATED_ORDER:\n      mappings = this._generatedMappings;\n      break;\n    case SourceMapConsumer.ORIGINAL_ORDER:\n      mappings = this._originalMappings;\n      break;\n    default:\n      throw new Error(\"Unknown order of iteration.\");\n    }\n\n    var sourceRoot = this.sourceRoot;\n    mappings.map(function (mapping) {\n      var source = mapping.source === null ? null : this._sources.at(mapping.source);\n      if (source != null && sourceRoot != null) {\n        source = util.join(sourceRoot, source);\n      }\n      return {\n        source: source,\n        generatedLine: mapping.generatedLine,\n        generatedColumn: mapping.generatedColumn,\n        originalLine: mapping.originalLine,\n        originalColumn: mapping.originalColumn,\n        name: mapping.name === null ? null : this._names.at(mapping.name)\n      };\n    }, this).forEach(aCallback, context);\n  };\n\n/**\n * Returns all generated line and column information for the original source,\n * line, and column provided. If no column is provided, returns all mappings\n * corresponding to a either the line we are searching for or the next\n * closest line that has any mappings. Otherwise, returns all mappings\n * corresponding to the given line and either the column we are searching for\n * or the next closest column that has any offsets.\n *\n * The only argument is an object with the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.\n *   - column: Optional. the column number in the original source.\n *\n * and an array of objects is returned, each with the following properties:\n *\n *   - line: The line number in the generated source, or null.\n *   - column: The column number in the generated source, or null.\n */\nSourceMapConsumer.prototype.allGeneratedPositionsFor =\n  function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n    var line = util.getArg(aArgs, 'line');\n\n    // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n    // returns the index of the closest mapping less than the needle. By\n    // setting needle.originalColumn to 0, we thus find the last mapping for\n    // the given line, provided such a mapping exists.\n    var needle = {\n      source: util.getArg(aArgs, 'source'),\n      originalLine: line,\n      originalColumn: util.getArg(aArgs, 'column', 0)\n    };\n\n    if (this.sourceRoot != null) {\n      needle.source = util.relative(this.sourceRoot, needle.source);\n    }\n    if (!this._sources.has(needle.source)) {\n      return [];\n    }\n    needle.source = this._sources.indexOf(needle.source);\n\n    var mappings = [];\n\n    var index = this._findMapping(needle,\n                                  this._originalMappings,\n                                  \"originalLine\",\n                                  \"originalColumn\",\n                                  util.compareByOriginalPositions,\n                                  binarySearch.LEAST_UPPER_BOUND);\n    if (index >= 0) {\n      var mapping = this._originalMappings[index];\n\n      if (aArgs.column === undefined) {\n        var originalLine = mapping.originalLine;\n\n        // Iterate until either we run out of mappings, or we run into\n        // a mapping for a different line than the one we found. Since\n        // mappings are sorted, this is guaranteed to find all mappings for\n        // the line we found.\n        while (mapping && mapping.originalLine === originalLine) {\n          mappings.push({\n            line: util.getArg(mapping, 'generatedLine', null),\n            column: util.getArg(mapping, 'generatedColumn', null),\n            lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n          });\n\n          mapping = this._originalMappings[++index];\n        }\n      } else {\n        var originalColumn = mapping.originalColumn;\n\n        // Iterate until either we run out of mappings, or we run into\n        // a mapping for a different line than the one we were searching for.\n        // Since mappings are sorted, this is guaranteed to find all mappings for\n        // the line we are searching for.\n        while (mapping &&\n               mapping.originalLine === line &&\n               mapping.originalColumn == originalColumn) {\n          mappings.push({\n            line: util.getArg(mapping, 'generatedLine', null),\n            column: util.getArg(mapping, 'generatedColumn', null),\n            lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n          });\n\n          mapping = this._originalMappings[++index];\n        }\n      }\n    }\n\n    return mappings;\n  };\n\nexports.SourceMapConsumer = SourceMapConsumer;\n\n/**\n * A BasicSourceMapConsumer instance represents a parsed source map which we can\n * query for information about the original file positions by giving it a file\n * position in the generated source.\n *\n * The only parameter is the raw source map (either as a JSON string, or\n * already parsed to an object). According to the spec, source maps have the\n * following attributes:\n *\n *   - version: Which version of the source map spec this map is following.\n *   - sources: An array of URLs to the original source files.\n *   - names: An array of identifiers which can be referrenced by individual mappings.\n *   - sourceRoot: Optional. The URL root from which all sources are relative.\n *   - sourcesContent: Optional. An array of contents of the original source files.\n *   - mappings: A string of base64 VLQs which contain the actual mappings.\n *   - file: Optional. The generated file this source map is associated with.\n *\n * Here is an example source map, taken from the source map spec[0]:\n *\n *     {\n *       version : 3,\n *       file: \"out.js\",\n *       sourceRoot : \"\",\n *       sources: [\"foo.js\", \"bar.js\"],\n *       names: [\"src\", \"maps\", \"are\", \"fun\"],\n *       mappings: \"AA,AB;;ABCDE;\"\n *     }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n */\nfunction BasicSourceMapConsumer(aSourceMap) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n  }\n\n  var version = util.getArg(sourceMap, 'version');\n  var sources = util.getArg(sourceMap, 'sources');\n  // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n  // requires the array) to play nice here.\n  var names = util.getArg(sourceMap, 'names', []);\n  var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n  var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n  var mappings = util.getArg(sourceMap, 'mappings');\n  var file = util.getArg(sourceMap, 'file', null);\n\n  // Once again, Sass deviates from the spec and supplies the version as a\n  // string rather than a number, so we use loose equality checking here.\n  if (version != this._version) {\n    throw new Error('Unsupported version: ' + version);\n  }\n\n  sources = sources\n    .map(String)\n    // Some source maps produce relative source paths like \"./foo.js\" instead of\n    // \"foo.js\".  Normalize these first so that future comparisons will succeed.\n    // See bugzil.la/1090768.\n    .map(util.normalize)\n    // Always ensure that absolute sources are internally stored relative to\n    // the source root, if the source root is absolute. Not doing this would\n    // be particularly problematic when the source root is a prefix of the\n    // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n    .map(function (source) {\n      return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n        ? util.relative(sourceRoot, source)\n        : source;\n    });\n\n  // Pass `true` below to allow duplicate names and sources. While source maps\n  // are intended to be compressed and deduplicated, the TypeScript compiler\n  // sometimes generates source maps with duplicates in them. See Github issue\n  // #72 and bugzil.la/889492.\n  this._names = ArraySet.fromArray(names.map(String), true);\n  this._sources = ArraySet.fromArray(sources, true);\n\n  this.sourceRoot = sourceRoot;\n  this.sourcesContent = sourcesContent;\n  this._mappings = mappings;\n  this.file = file;\n}\n\nBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\n/**\n * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n *\n * @param SourceMapGenerator aSourceMap\n *        The source map that will be consumed.\n * @returns BasicSourceMapConsumer\n */\nBasicSourceMapConsumer.fromSourceMap =\n  function SourceMapConsumer_fromSourceMap(aSourceMap) {\n    var smc = Object.create(BasicSourceMapConsumer.prototype);\n\n    var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n    var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n    smc.sourceRoot = aSourceMap._sourceRoot;\n    smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n                                                            smc.sourceRoot);\n    smc.file = aSourceMap._file;\n\n    // Because we are modifying the entries (by converting string sources and\n    // names to indices into the sources and names ArraySets), we have to make\n    // a copy of the entry or else bad things happen. Shared mutable state\n    // strikes again! See github issue #191.\n\n    var generatedMappings = aSourceMap._mappings.toArray().slice();\n    var destGeneratedMappings = smc.__generatedMappings = [];\n    var destOriginalMappings = smc.__originalMappings = [];\n\n    for (var i = 0, length = generatedMappings.length; i < length; i++) {\n      var srcMapping = generatedMappings[i];\n      var destMapping = new Mapping;\n      destMapping.generatedLine = srcMapping.generatedLine;\n      destMapping.generatedColumn = srcMapping.generatedColumn;\n\n      if (srcMapping.source) {\n        destMapping.source = sources.indexOf(srcMapping.source);\n        destMapping.originalLine = srcMapping.originalLine;\n        destMapping.originalColumn = srcMapping.originalColumn;\n\n        if (srcMapping.name) {\n          destMapping.name = names.indexOf(srcMapping.name);\n        }\n\n        destOriginalMappings.push(destMapping);\n      }\n\n      destGeneratedMappings.push(destMapping);\n    }\n\n    quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\n    return smc;\n  };\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nBasicSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n  get: function () {\n    return this._sources.toArray().map(function (s) {\n      return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;\n    }, this);\n  }\n});\n\n/**\n * Provide the JIT with a nice shape / hidden class.\n */\nfunction Mapping() {\n  this.generatedLine = 0;\n  this.generatedColumn = 0;\n  this.source = null;\n  this.originalLine = null;\n  this.originalColumn = null;\n  this.name = null;\n}\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nBasicSourceMapConsumer.prototype._parseMappings =\n  function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    var generatedLine = 1;\n    var previousGeneratedColumn = 0;\n    var previousOriginalLine = 0;\n    var previousOriginalColumn = 0;\n    var previousSource = 0;\n    var previousName = 0;\n    var length = aStr.length;\n    var index = 0;\n    var cachedSegments = {};\n    var temp = {};\n    var originalMappings = [];\n    var generatedMappings = [];\n    var mapping, str, segment, end, value;\n\n    while (index < length) {\n      if (aStr.charAt(index) === ';') {\n        generatedLine++;\n        index++;\n        previousGeneratedColumn = 0;\n      }\n      else if (aStr.charAt(index) === ',') {\n        index++;\n      }\n      else {\n        mapping = new Mapping();\n        mapping.generatedLine = generatedLine;\n\n        // Because each offset is encoded relative to the previous one,\n        // many segments often have the same encoding. We can exploit this\n        // fact by caching the parsed variable length fields of each segment,\n        // allowing us to avoid a second parse if we encounter the same\n        // segment again.\n        for (end = index; end < length; end++) {\n          if (this._charIsMappingSeparator(aStr, end)) {\n            break;\n          }\n        }\n        str = aStr.slice(index, end);\n\n        segment = cachedSegments[str];\n        if (segment) {\n          index += str.length;\n        } else {\n          segment = [];\n          while (index < end) {\n            base64VLQ.decode(aStr, index, temp);\n            value = temp.value;\n            index = temp.rest;\n            segment.push(value);\n          }\n\n          if (segment.length === 2) {\n            throw new Error('Found a source, but no line and column');\n          }\n\n          if (segment.length === 3) {\n            throw new Error('Found a source and line, but no column');\n          }\n\n          cachedSegments[str] = segment;\n        }\n\n        // Generated column.\n        mapping.generatedColumn = previousGeneratedColumn + segment[0];\n        previousGeneratedColumn = mapping.generatedColumn;\n\n        if (segment.length > 1) {\n          // Original source.\n          mapping.source = previousSource + segment[1];\n          previousSource += segment[1];\n\n          // Original line.\n          mapping.originalLine = previousOriginalLine + segment[2];\n          previousOriginalLine = mapping.originalLine;\n          // Lines are stored 0-based\n          mapping.originalLine += 1;\n\n          // Original column.\n          mapping.originalColumn = previousOriginalColumn + segment[3];\n          previousOriginalColumn = mapping.originalColumn;\n\n          if (segment.length > 4) {\n            // Original name.\n            mapping.name = previousName + segment[4];\n            previousName += segment[4];\n          }\n        }\n\n        generatedMappings.push(mapping);\n        if (typeof mapping.originalLine === 'number') {\n          originalMappings.push(mapping);\n        }\n      }\n    }\n\n    quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n    this.__generatedMappings = generatedMappings;\n\n    quickSort(originalMappings, util.compareByOriginalPositions);\n    this.__originalMappings = originalMappings;\n  };\n\n/**\n * Find the mapping that best matches the hypothetical \"needle\" mapping that\n * we are searching for in the given \"haystack\" of mappings.\n */\nBasicSourceMapConsumer.prototype._findMapping =\n  function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n                                         aColumnName, aComparator, aBias) {\n    // To return the position we are searching for, we must first find the\n    // mapping for the given position and then return the opposite position it\n    // points to. Because the mappings are sorted, we can use binary search to\n    // find the best mapping.\n\n    if (aNeedle[aLineName] <= 0) {\n      throw new TypeError('Line must be greater than or equal to 1, got '\n                          + aNeedle[aLineName]);\n    }\n    if (aNeedle[aColumnName] < 0) {\n      throw new TypeError('Column must be greater than or equal to 0, got '\n                          + aNeedle[aColumnName]);\n    }\n\n    return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n  };\n\n/**\n * Compute the last column for each generated mapping. The last column is\n * inclusive.\n */\nBasicSourceMapConsumer.prototype.computeColumnSpans =\n  function SourceMapConsumer_computeColumnSpans() {\n    for (var index = 0; index < this._generatedMappings.length; ++index) {\n      var mapping = this._generatedMappings[index];\n\n      // Mappings do not contain a field for the last generated columnt. We\n      // can come up with an optimistic estimate, however, by assuming that\n      // mappings are contiguous (i.e. given two consecutive mappings, the\n      // first mapping ends where the second one starts).\n      if (index + 1 < this._generatedMappings.length) {\n        var nextMapping = this._generatedMappings[index + 1];\n\n        if (mapping.generatedLine === nextMapping.generatedLine) {\n          mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n          continue;\n        }\n      }\n\n      // The last mapping for each line spans the entire line.\n      mapping.lastGeneratedColumn = Infinity;\n    }\n  };\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n *   - line: The line number in the generated source.\n *   - column: The column number in the generated source.\n *   - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n *     'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n *   - source: The original source file, or null.\n *   - line: The line number in the original source, or null.\n *   - column: The column number in the original source, or null.\n *   - name: The original identifier, or null.\n */\nBasicSourceMapConsumer.prototype.originalPositionFor =\n  function SourceMapConsumer_originalPositionFor(aArgs) {\n    var needle = {\n      generatedLine: util.getArg(aArgs, 'line'),\n      generatedColumn: util.getArg(aArgs, 'column')\n    };\n\n    var index = this._findMapping(\n      needle,\n      this._generatedMappings,\n      \"generatedLine\",\n      \"generatedColumn\",\n      util.compareByGeneratedPositionsDeflated,\n      util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n    );\n\n    if (index >= 0) {\n      var mapping = this._generatedMappings[index];\n\n      if (mapping.generatedLine === needle.generatedLine) {\n        var source = util.getArg(mapping, 'source', null);\n        if (source !== null) {\n          source = this._sources.at(source);\n          if (this.sourceRoot != null) {\n            source = util.join(this.sourceRoot, source);\n          }\n        }\n        var name = util.getArg(mapping, 'name', null);\n        if (name !== null) {\n          name = this._names.at(name);\n        }\n        return {\n          source: source,\n          line: util.getArg(mapping, 'originalLine', null),\n          column: util.getArg(mapping, 'originalColumn', null),\n          name: name\n        };\n      }\n    }\n\n    return {\n      source: null,\n      line: null,\n      column: null,\n      name: null\n    };\n  };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n  function BasicSourceMapConsumer_hasContentsOfAllSources() {\n    if (!this.sourcesContent) {\n      return false;\n    }\n    return this.sourcesContent.length >= this._sources.size() &&\n      !this.sourcesContent.some(function (sc) { return sc == null; });\n  };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nBasicSourceMapConsumer.prototype.sourceContentFor =\n  function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n    if (!this.sourcesContent) {\n      return null;\n    }\n\n    if (this.sourceRoot != null) {\n      aSource = util.relative(this.sourceRoot, aSource);\n    }\n\n    if (this._sources.has(aSource)) {\n      return this.sourcesContent[this._sources.indexOf(aSource)];\n    }\n\n    var url;\n    if (this.sourceRoot != null\n        && (url = util.urlParse(this.sourceRoot))) {\n      // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n      // many users. We can help them out when they expect file:// URIs to\n      // behave like it would if they were running a local HTTP server. See\n      // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n      var fileUriAbsPath = aSource.replace(/^file:\\/\\//, \"\");\n      if (url.scheme == \"file\"\n          && this._sources.has(fileUriAbsPath)) {\n        return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n      }\n\n      if ((!url.path || url.path == \"/\")\n          && this._sources.has(\"/\" + aSource)) {\n        return this.sourcesContent[this._sources.indexOf(\"/\" + aSource)];\n      }\n    }\n\n    // This function is used recursively from\n    // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n    // don't want to throw if we can't find the source - we just want to\n    // return null, so we provide a flag to exit gracefully.\n    if (nullOnMissing) {\n      return null;\n    }\n    else {\n      throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n    }\n  };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.\n *   - column: The column number in the original source.\n *   - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n *     'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n *   - line: The line number in the generated source, or null.\n *   - column: The column number in the generated source, or null.\n */\nBasicSourceMapConsumer.prototype.generatedPositionFor =\n  function SourceMapConsumer_generatedPositionFor(aArgs) {\n    var source = util.getArg(aArgs, 'source');\n    if (this.sourceRoot != null) {\n      source = util.relative(this.sourceRoot, source);\n    }\n    if (!this._sources.has(source)) {\n      return {\n        line: null,\n        column: null,\n        lastColumn: null\n      };\n    }\n    source = this._sources.indexOf(source);\n\n    var needle = {\n      source: source,\n      originalLine: util.getArg(aArgs, 'line'),\n      originalColumn: util.getArg(aArgs, 'column')\n    };\n\n    var index = this._findMapping(\n      needle,\n      this._originalMappings,\n      \"originalLine\",\n      \"originalColumn\",\n      util.compareByOriginalPositions,\n      util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n    );\n\n    if (index >= 0) {\n      var mapping = this._originalMappings[index];\n\n      if (mapping.source === needle.source) {\n        return {\n          line: util.getArg(mapping, 'generatedLine', null),\n          column: util.getArg(mapping, 'generatedColumn', null),\n          lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n        };\n      }\n    }\n\n    return {\n      line: null,\n      column: null,\n      lastColumn: null\n    };\n  };\n\nexports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\n/**\n * An IndexedSourceMapConsumer instance represents a parsed source map which\n * we can query for information. It differs from BasicSourceMapConsumer in\n * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n * input.\n *\n * The only parameter is a raw source map (either as a JSON string, or already\n * parsed to an object). According to the spec for indexed source maps, they\n * have the following attributes:\n *\n *   - version: Which version of the source map spec this map is following.\n *   - file: Optional. The generated file this source map is associated with.\n *   - sections: A list of section definitions.\n *\n * Each value under the \"sections\" field has two fields:\n *   - offset: The offset into the original specified at which this section\n *       begins to apply, defined as an object with a \"line\" and \"column\"\n *       field.\n *   - map: A source map definition. This source map could also be indexed,\n *       but doesn't have to be.\n *\n * Instead of the \"map\" field, it's also possible to have a \"url\" field\n * specifying a URL to retrieve a source map from, but that's currently\n * unsupported.\n *\n * Here's an example source map, taken from the source map spec[0], but\n * modified to omit a section which uses the \"url\" field.\n *\n *  {\n *    version : 3,\n *    file: \"app.js\",\n *    sections: [{\n *      offset: {line:100, column:10},\n *      map: {\n *        version : 3,\n *        file: \"section.js\",\n *        sources: [\"foo.js\", \"bar.js\"],\n *        names: [\"src\", \"maps\", \"are\", \"fun\"],\n *        mappings: \"AAAA,E;;ABCDE;\"\n *      }\n *    }],\n *  }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n */\nfunction IndexedSourceMapConsumer(aSourceMap) {\n  var sourceMap = aSourceMap;\n  if (typeof aSourceMap === 'string') {\n    sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n  }\n\n  var version = util.getArg(sourceMap, 'version');\n  var sections = util.getArg(sourceMap, 'sections');\n\n  if (version != this._version) {\n    throw new Error('Unsupported version: ' + version);\n  }\n\n  this._sources = new ArraySet();\n  this._names = new ArraySet();\n\n  var lastOffset = {\n    line: -1,\n    column: 0\n  };\n  this._sections = sections.map(function (s) {\n    if (s.url) {\n      // The url field will require support for asynchronicity.\n      // See https://github.com/mozilla/source-map/issues/16\n      throw new Error('Support for url field in sections not implemented.');\n    }\n    var offset = util.getArg(s, 'offset');\n    var offsetLine = util.getArg(offset, 'line');\n    var offsetColumn = util.getArg(offset, 'column');\n\n    if (offsetLine < lastOffset.line ||\n        (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n      throw new Error('Section offsets must be ordered and non-overlapping.');\n    }\n    lastOffset = offset;\n\n    return {\n      generatedOffset: {\n        // The offset fields are 0-based, but we use 1-based indices when\n        // encoding/decoding from VLQ.\n        generatedLine: offsetLine + 1,\n        generatedColumn: offsetColumn + 1\n      },\n      consumer: new SourceMapConsumer(util.getArg(s, 'map'))\n    }\n  });\n}\n\nIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nIndexedSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n  get: function () {\n    var sources = [];\n    for (var i = 0; i < this._sections.length; i++) {\n      for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n        sources.push(this._sections[i].consumer.sources[j]);\n      }\n    }\n    return sources;\n  }\n});\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n *   - line: The line number in the generated source.\n *   - column: The column number in the generated source.\n *\n * and an object is returned with the following properties:\n *\n *   - source: The original source file, or null.\n *   - line: The line number in the original source, or null.\n *   - column: The column number in the original source, or null.\n *   - name: The original identifier, or null.\n */\nIndexedSourceMapConsumer.prototype.originalPositionFor =\n  function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n    var needle = {\n      generatedLine: util.getArg(aArgs, 'line'),\n      generatedColumn: util.getArg(aArgs, 'column')\n    };\n\n    // Find the section containing the generated position we're trying to map\n    // to an original position.\n    var sectionIndex = binarySearch.search(needle, this._sections,\n      function(needle, section) {\n        var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n        if (cmp) {\n          return cmp;\n        }\n\n        return (needle.generatedColumn -\n                section.generatedOffset.generatedColumn);\n      });\n    var section = this._sections[sectionIndex];\n\n    if (!section) {\n      return {\n        source: null,\n        line: null,\n        column: null,\n        name: null\n      };\n    }\n\n    return section.consumer.originalPositionFor({\n      line: needle.generatedLine -\n        (section.generatedOffset.generatedLine - 1),\n      column: needle.generatedColumn -\n        (section.generatedOffset.generatedLine === needle.generatedLine\n         ? section.generatedOffset.generatedColumn - 1\n         : 0),\n      bias: aArgs.bias\n    });\n  };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n  function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n    return this._sections.every(function (s) {\n      return s.consumer.hasContentsOfAllSources();\n    });\n  };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nIndexedSourceMapConsumer.prototype.sourceContentFor =\n  function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n\n      var content = section.consumer.sourceContentFor(aSource, true);\n      if (content) {\n        return content;\n      }\n    }\n    if (nullOnMissing) {\n      return null;\n    }\n    else {\n      throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n    }\n  };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n *   - source: The filename of the original source.\n *   - line: The line number in the original source.\n *   - column: The column number in the original source.\n *\n * and an object is returned with the following properties:\n *\n *   - line: The line number in the generated source, or null.\n *   - column: The column number in the generated source, or null.\n */\nIndexedSourceMapConsumer.prototype.generatedPositionFor =\n  function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n\n      // Only consider this section if the requested source is in the list of\n      // sources of the consumer.\n      if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {\n        continue;\n      }\n      var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n      if (generatedPosition) {\n        var ret = {\n          line: generatedPosition.line +\n            (section.generatedOffset.generatedLine - 1),\n          column: generatedPosition.column +\n            (section.generatedOffset.generatedLine === generatedPosition.line\n             ? section.generatedOffset.generatedColumn - 1\n             : 0)\n        };\n        return ret;\n      }\n    }\n\n    return {\n      line: null,\n      column: null\n    };\n  };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nIndexedSourceMapConsumer.prototype._parseMappings =\n  function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n    this.__generatedMappings = [];\n    this.__originalMappings = [];\n    for (var i = 0; i < this._sections.length; i++) {\n      var section = this._sections[i];\n      var sectionMappings = section.consumer._generatedMappings;\n      for (var j = 0; j < sectionMappings.length; j++) {\n        var mapping = sectionMappings[j];\n\n        var source = section.consumer._sources.at(mapping.source);\n        if (section.consumer.sourceRoot !== null) {\n          source = util.join(section.consumer.sourceRoot, source);\n        }\n        this._sources.add(source);\n        source = this._sources.indexOf(source);\n\n        var name = section.consumer._names.at(mapping.name);\n        this._names.add(name);\n        name = this._names.indexOf(name);\n\n        // The mappings coming from the consumer for the section have\n        // generated positions relative to the start of the section, so we\n        // need to offset them to be relative to the start of the concatenated\n        // generated file.\n        var adjustedMapping = {\n          source: source,\n          generatedLine: mapping.generatedLine +\n            (section.generatedOffset.generatedLine - 1),\n          generatedColumn: mapping.generatedColumn +\n            (section.generatedOffset.generatedLine === mapping.generatedLine\n            ? section.generatedOffset.generatedColumn - 1\n            : 0),\n          originalLine: mapping.originalLine,\n          originalColumn: mapping.originalColumn,\n          name: name\n        };\n\n        this.__generatedMappings.push(adjustedMapping);\n        if (typeof adjustedMapping.originalLine === 'number') {\n          this.__originalMappings.push(adjustedMapping);\n        }\n      }\n    }\n\n    quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n    quickSort(this.__originalMappings, util.compareByOriginalPositions);\n  };\n\nexports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/source-map-consumer.js\n ** module id = 7\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nexports.GREATEST_LOWER_BOUND = 1;\nexports.LEAST_UPPER_BOUND = 2;\n\n/**\n * Recursive implementation of binary search.\n *\n * @param aLow Indices here and lower do not contain the needle.\n * @param aHigh Indices here and higher do not contain the needle.\n * @param aNeedle The element being searched for.\n * @param aHaystack The non-empty array being searched.\n * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n *     'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n */\nfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n  // This function terminates when one of the following is true:\n  //\n  //   1. We find the exact element we are looking for.\n  //\n  //   2. We did not find the exact element, but we can return the index of\n  //      the next-closest element.\n  //\n  //   3. We did not find the exact element, and there is no next-closest\n  //      element than the one we are searching for, so we return -1.\n  var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n  var cmp = aCompare(aNeedle, aHaystack[mid], true);\n  if (cmp === 0) {\n    // Found the element we are looking for.\n    return mid;\n  }\n  else if (cmp > 0) {\n    // Our needle is greater than aHaystack[mid].\n    if (aHigh - mid > 1) {\n      // The element is in the upper half.\n      return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n    }\n\n    // The exact needle element was not found in this haystack. Determine if\n    // we are in termination case (3) or (2) and return the appropriate thing.\n    if (aBias == exports.LEAST_UPPER_BOUND) {\n      return aHigh < aHaystack.length ? aHigh : -1;\n    } else {\n      return mid;\n    }\n  }\n  else {\n    // Our needle is less than aHaystack[mid].\n    if (mid - aLow > 1) {\n      // The element is in the lower half.\n      return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n    }\n\n    // we are in termination case (3) or (2) and return the appropriate thing.\n    if (aBias == exports.LEAST_UPPER_BOUND) {\n      return mid;\n    } else {\n      return aLow < 0 ? -1 : aLow;\n    }\n  }\n}\n\n/**\n * This is an implementation of binary search which will always try and return\n * the index of the closest element if there is no exact hit. This is because\n * mappings between original and generated line/col pairs are single points,\n * and there is an implicit region between each of them, so a miss just means\n * that you aren't on the very start of a region.\n *\n * @param aNeedle The element you are looking for.\n * @param aHaystack The array that is being searched.\n * @param aCompare A function which takes the needle and an element in the\n *     array and returns -1, 0, or 1 depending on whether the needle is less\n *     than, equal to, or greater than the element, respectively.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n *     'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n *     closest element that is smaller than or greater than the one we are\n *     searching for, respectively, if the exact element cannot be found.\n *     Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n */\nexports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n  if (aHaystack.length === 0) {\n    return -1;\n  }\n\n  var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n                              aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n  if (index < 0) {\n    return -1;\n  }\n\n  // We have found either the exact element, or the next-closest element than\n  // the one we are searching for. However, there may be more than one such\n  // element. Make sure we always return the smallest of these.\n  while (index - 1 >= 0) {\n    if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n      break;\n    }\n    --index;\n  }\n\n  return index;\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/binary-search.js\n ** module id = 8\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n// It turns out that some (most?) JavaScript engines don't self-host\n// `Array.prototype.sort`. This makes sense because C++ will likely remain\n// faster than JS when doing raw CPU-intensive sorting. However, when using a\n// custom comparator function, calling back and forth between the VM's C++ and\n// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n// worse generated code for the comparator function than would be optimal. In\n// fact, when sorting with a comparator, these costs outweigh the benefits of\n// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n// a ~3500ms mean speed-up in `bench/bench.html`.\n\n/**\n * Swap the elements indexed by `x` and `y` in the array `ary`.\n *\n * @param {Array} ary\n *        The array.\n * @param {Number} x\n *        The index of the first item.\n * @param {Number} y\n *        The index of the second item.\n */\nfunction swap(ary, x, y) {\n  var temp = ary[x];\n  ary[x] = ary[y];\n  ary[y] = temp;\n}\n\n/**\n * Returns a random integer within the range `low .. high` inclusive.\n *\n * @param {Number} low\n *        The lower bound on the range.\n * @param {Number} high\n *        The upper bound on the range.\n */\nfunction randomIntInRange(low, high) {\n  return Math.round(low + (Math.random() * (high - low)));\n}\n\n/**\n * The Quick Sort algorithm.\n *\n * @param {Array} ary\n *        An array to sort.\n * @param {function} comparator\n *        Function to use to compare two items.\n * @param {Number} p\n *        Start index of the array\n * @param {Number} r\n *        End index of the array\n */\nfunction doQuickSort(ary, comparator, p, r) {\n  // If our lower bound is less than our upper bound, we (1) partition the\n  // array into two pieces and (2) recurse on each half. If it is not, this is\n  // the empty array and our base case.\n\n  if (p < r) {\n    // (1) Partitioning.\n    //\n    // The partitioning chooses a pivot between `p` and `r` and moves all\n    // elements that are less than or equal to the pivot to the before it, and\n    // all the elements that are greater than it after it. The effect is that\n    // once partition is done, the pivot is in the exact place it will be when\n    // the array is put in sorted order, and it will not need to be moved\n    // again. This runs in O(n) time.\n\n    // Always choose a random pivot so that an input array which is reverse\n    // sorted does not cause O(n^2) running time.\n    var pivotIndex = randomIntInRange(p, r);\n    var i = p - 1;\n\n    swap(ary, pivotIndex, r);\n    var pivot = ary[r];\n\n    // Immediately after `j` is incremented in this loop, the following hold\n    // true:\n    //\n    //   * Every element in `ary[p .. i]` is less than or equal to the pivot.\n    //\n    //   * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n    for (var j = p; j < r; j++) {\n      if (comparator(ary[j], pivot) <= 0) {\n        i += 1;\n        swap(ary, i, j);\n      }\n    }\n\n    swap(ary, i + 1, j);\n    var q = i + 1;\n\n    // (2) Recurse on each half.\n\n    doQuickSort(ary, comparator, p, q - 1);\n    doQuickSort(ary, comparator, q + 1, r);\n  }\n}\n\n/**\n * Sort the given array in-place with the given comparator function.\n *\n * @param {Array} ary\n *        An array to sort.\n * @param {function} comparator\n *        Function to use to compare two items.\n */\nexports.quickSort = function (ary, comparator) {\n  doQuickSort(ary, comparator, 0, ary.length - 1);\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/quick-sort.js\n ** module id = 9\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;\nvar util = require('./util');\n\n// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n// operating systems these days (capturing the result).\nvar REGEX_NEWLINE = /(\\r?\\n)/;\n\n// Newline character code for charCodeAt() comparisons\nvar NEWLINE_CODE = 10;\n\n// Private symbol for identifying `SourceNode`s when multiple versions of\n// the source-map library are loaded. This MUST NOT CHANGE across\n// versions!\nvar isSourceNode = \"$$$isSourceNode$$$\";\n\n/**\n * SourceNodes provide a way to abstract over interpolating/concatenating\n * snippets of generated JavaScript source code while maintaining the line and\n * column information associated with the original source code.\n *\n * @param aLine The original line number.\n * @param aColumn The original column number.\n * @param aSource The original source's filename.\n * @param aChunks Optional. An array of strings which are snippets of\n *        generated JS, or other SourceNodes.\n * @param aName The original identifier.\n */\nfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n  this.children = [];\n  this.sourceContents = {};\n  this.line = aLine == null ? null : aLine;\n  this.column = aColumn == null ? null : aColumn;\n  this.source = aSource == null ? null : aSource;\n  this.name = aName == null ? null : aName;\n  this[isSourceNode] = true;\n  if (aChunks != null) this.add(aChunks);\n}\n\n/**\n * Creates a SourceNode from generated code and a SourceMapConsumer.\n *\n * @param aGeneratedCode The generated code\n * @param aSourceMapConsumer The SourceMap for the generated code\n * @param aRelativePath Optional. The path that relative sources in the\n *        SourceMapConsumer should be relative to.\n */\nSourceNode.fromStringWithSourceMap =\n  function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n    // The SourceNode we want to fill with the generated code\n    // and the SourceMap\n    var node = new SourceNode();\n\n    // All even indices of this array are one line of the generated code,\n    // while all odd indices are the newlines between two adjacent lines\n    // (since `REGEX_NEWLINE` captures its match).\n    // Processed fragments are removed from this array, by calling `shiftNextLine`.\n    var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n    var shiftNextLine = function() {\n      var lineContents = remainingLines.shift();\n      // The last line of a file might not have a newline.\n      var newLine = remainingLines.shift() || \"\";\n      return lineContents + newLine;\n    };\n\n    // We need to remember the position of \"remainingLines\"\n    var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\n    // The generate SourceNodes we need a code range.\n    // To extract it current and last mapping is used.\n    // Here we store the last mapping.\n    var lastMapping = null;\n\n    aSourceMapConsumer.eachMapping(function (mapping) {\n      if (lastMapping !== null) {\n        // We add the code from \"lastMapping\" to \"mapping\":\n        // First check if there is a new line in between.\n        if (lastGeneratedLine < mapping.generatedLine) {\n          // Associate first line with \"lastMapping\"\n          addMappingWithCode(lastMapping, shiftNextLine());\n          lastGeneratedLine++;\n          lastGeneratedColumn = 0;\n          // The remaining code is added without mapping\n        } else {\n          // There is no new line in between.\n          // Associate the code between \"lastGeneratedColumn\" and\n          // \"mapping.generatedColumn\" with \"lastMapping\"\n          var nextLine = remainingLines[0];\n          var code = nextLine.substr(0, mapping.generatedColumn -\n                                        lastGeneratedColumn);\n          remainingLines[0] = nextLine.substr(mapping.generatedColumn -\n                                              lastGeneratedColumn);\n          lastGeneratedColumn = mapping.generatedColumn;\n          addMappingWithCode(lastMapping, code);\n          // No more remaining code, continue\n          lastMapping = mapping;\n          return;\n        }\n      }\n      // We add the generated code until the first mapping\n      // to the SourceNode without any mapping.\n      // Each line is added as separate string.\n      while (lastGeneratedLine < mapping.generatedLine) {\n        node.add(shiftNextLine());\n        lastGeneratedLine++;\n      }\n      if (lastGeneratedColumn < mapping.generatedColumn) {\n        var nextLine = remainingLines[0];\n        node.add(nextLine.substr(0, mapping.generatedColumn));\n        remainingLines[0] = nextLine.substr(mapping.generatedColumn);\n        lastGeneratedColumn = mapping.generatedColumn;\n      }\n      lastMapping = mapping;\n    }, this);\n    // We have processed all mappings.\n    if (remainingLines.length > 0) {\n      if (lastMapping) {\n        // Associate the remaining code in the current line with \"lastMapping\"\n        addMappingWithCode(lastMapping, shiftNextLine());\n      }\n      // and add the remaining lines without any mapping\n      node.add(remainingLines.join(\"\"));\n    }\n\n    // Copy sourcesContent into SourceNode\n    aSourceMapConsumer.sources.forEach(function (sourceFile) {\n      var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n      if (content != null) {\n        if (aRelativePath != null) {\n          sourceFile = util.join(aRelativePath, sourceFile);\n        }\n        node.setSourceContent(sourceFile, content);\n      }\n    });\n\n    return node;\n\n    function addMappingWithCode(mapping, code) {\n      if (mapping === null || mapping.source === undefined) {\n        node.add(code);\n      } else {\n        var source = aRelativePath\n          ? util.join(aRelativePath, mapping.source)\n          : mapping.source;\n        node.add(new SourceNode(mapping.originalLine,\n                                mapping.originalColumn,\n                                source,\n                                code,\n                                mapping.name));\n      }\n    }\n  };\n\n/**\n * Add a chunk of generated JS to this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n *        SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.add = function SourceNode_add(aChunk) {\n  if (Array.isArray(aChunk)) {\n    aChunk.forEach(function (chunk) {\n      this.add(chunk);\n    }, this);\n  }\n  else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n    if (aChunk) {\n      this.children.push(aChunk);\n    }\n  }\n  else {\n    throw new TypeError(\n      \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n    );\n  }\n  return this;\n};\n\n/**\n * Add a chunk of generated JS to the beginning of this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n *        SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n  if (Array.isArray(aChunk)) {\n    for (var i = aChunk.length-1; i >= 0; i--) {\n      this.prepend(aChunk[i]);\n    }\n  }\n  else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n    this.children.unshift(aChunk);\n  }\n  else {\n    throw new TypeError(\n      \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n    );\n  }\n  return this;\n};\n\n/**\n * Walk over the tree of JS snippets in this node and its children. The\n * walking function is called once for each snippet of JS and is passed that\n * snippet and the its original associated source's line/column location.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n  var chunk;\n  for (var i = 0, len = this.children.length; i < len; i++) {\n    chunk = this.children[i];\n    if (chunk[isSourceNode]) {\n      chunk.walk(aFn);\n    }\n    else {\n      if (chunk !== '') {\n        aFn(chunk, { source: this.source,\n                     line: this.line,\n                     column: this.column,\n                     name: this.name });\n      }\n    }\n  }\n};\n\n/**\n * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n * each of `this.children`.\n *\n * @param aSep The separator.\n */\nSourceNode.prototype.join = function SourceNode_join(aSep) {\n  var newChildren;\n  var i;\n  var len = this.children.length;\n  if (len > 0) {\n    newChildren = [];\n    for (i = 0; i < len-1; i++) {\n      newChildren.push(this.children[i]);\n      newChildren.push(aSep);\n    }\n    newChildren.push(this.children[i]);\n    this.children = newChildren;\n  }\n  return this;\n};\n\n/**\n * Call String.prototype.replace on the very right-most source snippet. Useful\n * for trimming whitespace from the end of a source node, etc.\n *\n * @param aPattern The pattern to replace.\n * @param aReplacement The thing to replace the pattern with.\n */\nSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n  var lastChild = this.children[this.children.length - 1];\n  if (lastChild[isSourceNode]) {\n    lastChild.replaceRight(aPattern, aReplacement);\n  }\n  else if (typeof lastChild === 'string') {\n    this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n  }\n  else {\n    this.children.push(''.replace(aPattern, aReplacement));\n  }\n  return this;\n};\n\n/**\n * Set the source content for a source file. This will be added to the SourceMapGenerator\n * in the sourcesContent field.\n *\n * @param aSourceFile The filename of the source file\n * @param aSourceContent The content of the source file\n */\nSourceNode.prototype.setSourceContent =\n  function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n    this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n  };\n\n/**\n * Walk over the tree of SourceNodes. The walking function is called for each\n * source file content and is passed the filename and source content.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walkSourceContents =\n  function SourceNode_walkSourceContents(aFn) {\n    for (var i = 0, len = this.children.length; i < len; i++) {\n      if (this.children[i][isSourceNode]) {\n        this.children[i].walkSourceContents(aFn);\n      }\n    }\n\n    var sources = Object.keys(this.sourceContents);\n    for (var i = 0, len = sources.length; i < len; i++) {\n      aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n    }\n  };\n\n/**\n * Return the string representation of this source node. Walks over the tree\n * and concatenates all the various snippets together to one string.\n */\nSourceNode.prototype.toString = function SourceNode_toString() {\n  var str = \"\";\n  this.walk(function (chunk) {\n    str += chunk;\n  });\n  return str;\n};\n\n/**\n * Returns the string representation of this source node along with a source\n * map.\n */\nSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n  var generated = {\n    code: \"\",\n    line: 1,\n    column: 0\n  };\n  var map = new SourceMapGenerator(aArgs);\n  var sourceMappingActive = false;\n  var lastOriginalSource = null;\n  var lastOriginalLine = null;\n  var lastOriginalColumn = null;\n  var lastOriginalName = null;\n  this.walk(function (chunk, original) {\n    generated.code += chunk;\n    if (original.source !== null\n        && original.line !== null\n        && original.column !== null) {\n      if(lastOriginalSource !== original.source\n         || lastOriginalLine !== original.line\n         || lastOriginalColumn !== original.column\n         || lastOriginalName !== original.name) {\n        map.addMapping({\n          source: original.source,\n          original: {\n            line: original.line,\n            column: original.column\n          },\n          generated: {\n            line: generated.line,\n            column: generated.column\n          },\n          name: original.name\n        });\n      }\n      lastOriginalSource = original.source;\n      lastOriginalLine = original.line;\n      lastOriginalColumn = original.column;\n      lastOriginalName = original.name;\n      sourceMappingActive = true;\n    } else if (sourceMappingActive) {\n      map.addMapping({\n        generated: {\n          line: generated.line,\n          column: generated.column\n        }\n      });\n      lastOriginalSource = null;\n      sourceMappingActive = false;\n    }\n    for (var idx = 0, length = chunk.length; idx < length; idx++) {\n      if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n        generated.line++;\n        generated.column = 0;\n        // Mappings end at eol\n        if (idx + 1 === length) {\n          lastOriginalSource = null;\n          sourceMappingActive = false;\n        } else if (sourceMappingActive) {\n          map.addMapping({\n            source: original.source,\n            original: {\n              line: original.line,\n              column: original.column\n            },\n            generated: {\n              line: generated.line,\n              column: generated.column\n            },\n            name: original.name\n          });\n        }\n      } else {\n        generated.column++;\n      }\n    }\n  });\n  this.walkSourceContents(function (sourceFile, sourceContent) {\n    map.setSourceContent(sourceFile, sourceContent);\n  });\n\n  return { code: generated.code, map: map };\n};\n\nexports.SourceNode = SourceNode;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/source-node.js\n ** module id = 10\n ** module chunks = 0\n **/"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.js b/node_modules/source-map/dist/source-map.js new file mode 100644 index 0000000..61c0d8d --- /dev/null +++ b/node_modules/source-map/dist/source-map.js @@ -0,0 +1,3055 @@ +(function webpackUniversalModuleDefinition(root, factory) { + if(typeof exports === 'object' && typeof module === 'object') + module.exports = factory(); + else if(typeof define === 'function' && define.amd) + define([], factory); + else if(typeof exports === 'object') + exports["sourceMap"] = factory(); + else + root["sourceMap"] = factory(); +})(this, function() { +return /******/ (function(modules) { // webpackBootstrap +/******/ // The module cache +/******/ var installedModules = {}; + +/******/ // The require function +/******/ function __webpack_require__(moduleId) { + +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) +/******/ return installedModules[moduleId].exports; + +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ exports: {}, +/******/ id: moduleId, +/******/ loaded: false +/******/ }; + +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); + +/******/ // Flag the module as loaded +/******/ module.loaded = true; + +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } + + +/******/ // expose the modules object (__webpack_modules__) +/******/ __webpack_require__.m = modules; + +/******/ // expose the module cache +/******/ __webpack_require__.c = installedModules; + +/******/ // __webpack_public_path__ +/******/ __webpack_require__.p = ""; + +/******/ // Load entry module and return exports +/******/ return __webpack_require__(0); +/******/ }) +/************************************************************************/ +/******/ ([ +/* 0 */ +/***/ function(module, exports, __webpack_require__) { + + /* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ + exports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + exports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer; + exports.SourceNode = __webpack_require__(10).SourceNode; + + +/***/ }, +/* 1 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var base64VLQ = __webpack_require__(2); + var util = __webpack_require__(4); + var ArraySet = __webpack_require__(5).ArraySet; + var MappingList = __webpack_require__(6).MappingList; + + /** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ + function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + SourceMapGenerator.prototype._version = 3; + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + + /** + * Set the source content for a source file. + */ + SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + + SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + + /** + * Externalize the source map. + */ + SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + + /** + * Render the source map being generated to a string. + */ + SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + + exports.SourceMapGenerator = SourceMapGenerator; + + +/***/ }, +/* 2 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + var base64 = __webpack_require__(3); + + // A single base 64 digit can contain 6 bits of data. For the base 64 variable + // length quantities we use in the source map spec, the first bit is the sign, + // the next four bits are the actual value, and the 6th bit is the + // continuation bit. The continuation bit tells us whether there are more + // digits in this value following this digit. + // + // Continuation + // | Sign + // | | + // V V + // 101011 + + var VLQ_BASE_SHIFT = 5; + + // binary: 100000 + var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + + // binary: 011111 + var VLQ_BASE_MASK = VLQ_BASE - 1; + + // binary: 100000 + var VLQ_CONTINUATION_BIT = VLQ_BASE; + + /** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ + function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; + } + + /** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ + function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; + } + + /** + * Returns the base 64 VLQ encoded value. + */ + exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; + }; + + /** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ + exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; + }; + + +/***/ }, +/* 3 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + + /** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ + exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); + }; + + /** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ + exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; + }; + + +/***/ }, +/* 4 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + /** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ + function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } + } + exports.getArg = getArg; + + var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; + var dataUrlRegexp = /^data:.+\,.+$/; + + function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; + } + exports.urlParse = urlParse; + + function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; + } + exports.urlGenerate = urlGenerate; + + /** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consequtive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ + function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; + } + exports.normalize = normalize; + + /** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ + function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; + } + exports.join = join; + + exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); + }; + + /** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ + function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); + } + exports.relative = relative; + + var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); + }()); + + function identity (s) { + return s; + } + + /** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ + function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; + } + exports.toSetString = supportsNullProto ? identity : toSetString; + + function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; + } + exports.fromSetString = supportsNullProto ? identity : fromSetString; + + function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; + } + + /** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ + function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByOriginalPositions = compareByOriginalPositions; + + /** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ + function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; + } + exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + + function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; + } + + /** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ + function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); + } + exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + + +/***/ }, +/* 5 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var has = Object.prototype.hasOwnProperty; + + /** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ + function ArraySet() { + this._array = []; + this._set = Object.create(null); + } + + /** + * Static method for creating ArraySet instances from an existing array. + */ + ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; + }; + + /** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ + ArraySet.prototype.size = function ArraySet_size() { + return Object.getOwnPropertyNames(this._set).length; + }; + + /** + * Add the given string to this set. + * + * @param String aStr + */ + ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = util.toSetString(aStr); + var isDuplicate = has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + this._set[sStr] = idx; + } + }; + + /** + * Is the given string a member of this set? + * + * @param String aStr + */ + ArraySet.prototype.has = function ArraySet_has(aStr) { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); + }; + + /** + * What is the index of the given string in the array? + * + * @param String aStr + */ + ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + throw new Error('"' + aStr + '" is not in the set.'); + }; + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); + }; + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); + }; + + exports.ArraySet = ArraySet; + + +/***/ }, +/* 6 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + + /** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ + function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; + } + + /** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ + function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + }; + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + }; + + exports.MappingList = MappingList; + + +/***/ }, +/* 7 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var util = __webpack_require__(4); + var binarySearch = __webpack_require__(8); + var ArraySet = __webpack_require__(5).ArraySet; + var base64VLQ = __webpack_require__(2); + var quickSort = __webpack_require__(9).quickSort; + + function SourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap) + : new BasicSourceMapConsumer(sourceMap); + } + + SourceMapConsumer.fromSourceMap = function(aSourceMap) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap); + } + + /** + * The version of the source mapping spec that we are consuming. + */ + SourceMapConsumer.prototype._version = 3; + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + + SourceMapConsumer.prototype.__generatedMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } + }); + + SourceMapConsumer.prototype.__originalMappings = null; + Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } + }); + + SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + + SourceMapConsumer.GENERATED_ORDER = 1; + SourceMapConsumer.ORIGINAL_ORDER = 2; + + SourceMapConsumer.GREATEST_LOWER_BOUND = 1; + SourceMapConsumer.LEAST_UPPER_BOUND = 2; + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + if (source != null && sourceRoot != null) { + source = util.join(sourceRoot, source); + } + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: Optional. the column number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + if (!this._sources.has(needle.source)) { + return []; + } + needle.source = this._sources.indexOf(needle.source); + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + + exports.SourceMapConsumer = SourceMapConsumer; + + /** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The only parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ + function BasicSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this.file = file; + } + + BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @returns BasicSourceMapConsumer + */ + BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + + /** + * The version of the source mapping spec that we are consuming. + */ + BasicSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._sources.toArray().map(function (s) { + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; + }, this); + } + }); + + /** + * Provide the JIT with a nice shape / hidden class. + */ + function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + if (this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + if (this.sourceRoot != null) { + aSource = util.relative(this.sourceRoot, aSource); + } + + if (this._sources.has(aSource)) { + return this.sourcesContent[this._sources.indexOf(aSource)]; + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + aSource)) { + return this.sourcesContent[this._sources.indexOf("/" + aSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + if (this.sourceRoot != null) { + source = util.relative(this.sourceRoot, source); + } + if (!this._sources.has(source)) { + return { + line: null, + column: null, + lastColumn: null + }; + } + source = this._sources.indexOf(source); + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + + exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + + /** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The only parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ + function IndexedSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map')) + } + }); + } + + IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); + IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + + /** + * The version of the source mapping spec that we are consuming. + */ + IndexedSourceMapConsumer.prototype._version = 3; + + /** + * The list of original sources. + */ + Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + }); + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ + IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ + IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + if (section.consumer.sourceRoot !== null) { + source = util.join(section.consumer.sourceRoot, source); + } + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + + exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + + +/***/ }, +/* 8 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + exports.GREATEST_LOWER_BOUND = 1; + exports.LEAST_UPPER_BOUND = 2; + + /** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ + function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } + } + + /** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ + exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; + }; + + +/***/ }, +/* 9 */ +/***/ function(module, exports) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + // It turns out that some (most?) JavaScript engines don't self-host + // `Array.prototype.sort`. This makes sense because C++ will likely remain + // faster than JS when doing raw CPU-intensive sorting. However, when using a + // custom comparator function, calling back and forth between the VM's C++ and + // JIT'd JS is rather slow *and* loses JIT type information, resulting in + // worse generated code for the comparator function than would be optimal. In + // fact, when sorting with a comparator, these costs outweigh the benefits of + // sorting in C++. By using our own JS-implemented Quick Sort (below), we get + // a ~3500ms mean speed-up in `bench/bench.html`. + + /** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ + function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; + } + + /** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ + function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); + } + + /** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ + function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } + } + + /** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ + exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); + }; + + +/***/ }, +/* 10 */ +/***/ function(module, exports, __webpack_require__) { + + /* -*- Mode: js; js-indent-level: 2; -*- */ + /* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + + var SourceMapGenerator = __webpack_require__(1).SourceMapGenerator; + var util = __webpack_require__(4); + + // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other + // operating systems these days (capturing the result). + var REGEX_NEWLINE = /(\r?\n)/; + + // Newline character code for charCodeAt() comparisons + var NEWLINE_CODE = 10; + + // Private symbol for identifying `SourceNode`s when multiple versions of + // the source-map library are loaded. This MUST NOT CHANGE across + // versions! + var isSourceNode = "$$$isSourceNode$$$"; + + /** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ + function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are removed from this array, by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var shiftNextLine = function() { + var lineContents = remainingLines.shift(); + // The last line of a file might not have a newline. + var newLine = remainingLines.shift() || ""; + return lineContents + newLine; + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[0]; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[0] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[0]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[0] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLines.length > 0) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + }; + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + }; + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + }; + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; + }; + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; + }; + + /** + * Returns the string representation of this source node along with a source + * map. + */ + SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; + }; + + exports.SourceNode = SourceNode; + + +/***/ } +/******/ ]) +}); +; \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.min.js b/node_modules/source-map/dist/source-map.min.js new file mode 100644 index 0000000..e03b6d5 --- /dev/null +++ b/node_modules/source-map/dist/source-map.min.js @@ -0,0 +1,2 @@ +!function(e,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.sourceMap=n():e.sourceMap=n()}(this,function(){return function(e){function n(t){if(r[t])return r[t].exports;var o=r[t]={exports:{},id:t,loaded:!1};return e[t].call(o.exports,o,o.exports,n),o.loaded=!0,o.exports}var r={};return n.m=e,n.c=r,n.p="",n(0)}([function(e,n,r){n.SourceMapGenerator=r(1).SourceMapGenerator,n.SourceMapConsumer=r(7).SourceMapConsumer,n.SourceNode=r(10).SourceNode},function(e,n,r){function t(e){e||(e={}),this._file=i.getArg(e,"file",null),this._sourceRoot=i.getArg(e,"sourceRoot",null),this._skipValidation=i.getArg(e,"skipValidation",!1),this._sources=new s,this._names=new s,this._mappings=new a,this._sourcesContents=null}var o=r(2),i=r(4),s=r(5).ArraySet,a=r(6).MappingList;t.prototype._version=3,t.fromSourceMap=function(e){var n=e.sourceRoot,r=new t({file:e.file,sourceRoot:n});return e.eachMapping(function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};null!=e.source&&(t.source=e.source,null!=n&&(t.source=i.relative(n,t.source)),t.original={line:e.originalLine,column:e.originalColumn},null!=e.name&&(t.name=e.name)),r.addMapping(t)}),e.sources.forEach(function(n){var t=e.sourceContentFor(n);null!=t&&r.setSourceContent(n,t)}),r},t.prototype.addMapping=function(e){var n=i.getArg(e,"generated"),r=i.getArg(e,"original",null),t=i.getArg(e,"source",null),o=i.getArg(e,"name",null);this._skipValidation||this._validateMapping(n,r,t,o),null!=t&&(t=String(t),this._sources.has(t)||this._sources.add(t)),null!=o&&(o=String(o),this._names.has(o)||this._names.add(o)),this._mappings.add({generatedLine:n.line,generatedColumn:n.column,originalLine:null!=r&&r.line,originalColumn:null!=r&&r.column,source:t,name:o})},t.prototype.setSourceContent=function(e,n){var r=e;null!=this._sourceRoot&&(r=i.relative(this._sourceRoot,r)),null!=n?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[i.toSetString(r)]=n):this._sourcesContents&&(delete this._sourcesContents[i.toSetString(r)],0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))},t.prototype.applySourceMap=function(e,n,r){var t=n;if(null==n){if(null==e.file)throw new Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');t=e.file}var o=this._sourceRoot;null!=o&&(t=i.relative(o,t));var a=new s,u=new s;this._mappings.unsortedForEach(function(n){if(n.source===t&&null!=n.originalLine){var s=e.originalPositionFor({line:n.originalLine,column:n.originalColumn});null!=s.source&&(n.source=s.source,null!=r&&(n.source=i.join(r,n.source)),null!=o&&(n.source=i.relative(o,n.source)),n.originalLine=s.line,n.originalColumn=s.column,null!=s.name&&(n.name=s.name))}var l=n.source;null==l||a.has(l)||a.add(l);var c=n.name;null==c||u.has(c)||u.add(c)},this),this._sources=a,this._names=u,e.sources.forEach(function(n){var t=e.sourceContentFor(n);null!=t&&(null!=r&&(n=i.join(r,n)),null!=o&&(n=i.relative(o,n)),this.setSourceContent(n,t))},this)},t.prototype._validateMapping=function(e,n,r,t){if((!(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0)||n||r||t)&&!(e&&"line"in e&&"column"in e&&n&&"line"in n&&"column"in n&&e.line>0&&e.column>=0&&n.line>0&&n.column>=0&&r))throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:r,original:n,name:t}))},t.prototype._serializeMappings=function(){for(var e,n,r,t,s=0,a=1,u=0,l=0,c=0,g=0,p="",h=this._mappings.toArray(),f=0,d=h.length;d>f;f++){if(n=h[f],e="",n.generatedLine!==a)for(s=0;n.generatedLine!==a;)e+=";",a++;else if(f>0){if(!i.compareByGeneratedPositionsInflated(n,h[f-1]))continue;e+=","}e+=o.encode(n.generatedColumn-s),s=n.generatedColumn,null!=n.source&&(t=this._sources.indexOf(n.source),e+=o.encode(t-g),g=t,e+=o.encode(n.originalLine-1-l),l=n.originalLine-1,e+=o.encode(n.originalColumn-u),u=n.originalColumn,null!=n.name&&(r=this._names.indexOf(n.name),e+=o.encode(r-c),c=r)),p+=e}return p},t.prototype._generateSourcesContent=function(e,n){return e.map(function(e){if(!this._sourcesContents)return null;null!=n&&(e=i.relative(n,e));var r=i.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,r)?this._sourcesContents[r]:null},this)},t.prototype.toJSON=function(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};return null!=this._file&&(e.file=this._file),null!=this._sourceRoot&&(e.sourceRoot=this._sourceRoot),this._sourcesContents&&(e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)),e},t.prototype.toString=function(){return JSON.stringify(this.toJSON())},n.SourceMapGenerator=t},function(e,n,r){function t(e){return 0>e?(-e<<1)+1:(e<<1)+0}function o(e){var n=1===(1&e),r=e>>1;return n?-r:r}var i=r(3),s=5,a=1<>>=s,o>0&&(n|=l),r+=i.encode(n);while(o>0);return r},n.decode=function(e,n,r){var t,a,c=e.length,g=0,p=0;do{if(n>=c)throw new Error("Expected more digits in base 64 VLQ value.");if(a=i.decode(e.charCodeAt(n++)),-1===a)throw new Error("Invalid base64 digit: "+e.charAt(n-1));t=!!(a&l),a&=u,g+=a<=0&&e=n&&r>=e?e-n:e>=t&&o>=e?e-t+l:e>=i&&s>=e?e-i+c:e==a?62:e==u?63:-1}},function(e,n){function r(e,n,r){if(n in e)return e[n];if(3===arguments.length)return r;throw new Error('"'+n+'" is a required argument.')}function t(e){var n=e.match(m);return n?{scheme:n[1],auth:n[2],host:n[3],port:n[4],path:n[5]}:null}function o(e){var n="";return e.scheme&&(n+=e.scheme+":"),n+="//",e.auth&&(n+=e.auth+"@"),e.host&&(n+=e.host),e.port&&(n+=":"+e.port),e.path&&(n+=e.path),n}function i(e){var r=e,i=t(e);if(i){if(!i.path)return e;r=i.path}for(var s,a=n.isAbsolute(r),u=r.split(/\/+/),l=0,c=u.length-1;c>=0;c--)s=u[c],"."===s?u.splice(c,1):".."===s?l++:l>0&&(""===s?(u.splice(c+1,l),l=0):(u.splice(c,2),l--));return r=u.join("/"),""===r&&(r=a?"/":"."),i?(i.path=r,o(i)):r}function s(e,n){""===e&&(e="."),""===n&&(n=".");var r=t(n),s=t(e);if(s&&(e=s.path||"/"),r&&!r.scheme)return s&&(r.scheme=s.scheme),o(r);if(r||n.match(_))return n;if(s&&!s.host&&!s.path)return s.host=n,o(s);var a="/"===n.charAt(0)?n:i(e.replace(/\/+$/,"")+"/"+n);return s?(s.path=a,o(s)):a}function a(e,n){""===e&&(e="."),e=e.replace(/\/$/,"");for(var r=0;0!==n.indexOf(e+"/");){var t=e.lastIndexOf("/");if(0>t)return n;if(e=e.slice(0,t),e.match(/^([^\/]+:\/)?\/*$/))return n;++r}return Array(r+1).join("../")+n.substr(e.length+1)}function u(e){return e}function l(e){return g(e)?"$"+e:e}function c(e){return g(e)?e.slice(1):e}function g(e){if(!e)return!1;var n=e.length;if(9>n)return!1;if(95!==e.charCodeAt(n-1)||95!==e.charCodeAt(n-2)||111!==e.charCodeAt(n-3)||116!==e.charCodeAt(n-4)||111!==e.charCodeAt(n-5)||114!==e.charCodeAt(n-6)||112!==e.charCodeAt(n-7)||95!==e.charCodeAt(n-8)||95!==e.charCodeAt(n-9))return!1;for(var r=n-10;r>=0;r--)if(36!==e.charCodeAt(r))return!1;return!0}function p(e,n,r){var t=e.source-n.source;return 0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t||r?t:(t=e.generatedColumn-n.generatedColumn,0!==t?t:(t=e.generatedLine-n.generatedLine,0!==t?t:e.name-n.name))))}function h(e,n,r){var t=e.generatedLine-n.generatedLine;return 0!==t?t:(t=e.generatedColumn-n.generatedColumn,0!==t||r?t:(t=e.source-n.source,0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t?t:e.name-n.name))))}function f(e,n){return e===n?0:e>n?1:-1}function d(e,n){var r=e.generatedLine-n.generatedLine;return 0!==r?r:(r=e.generatedColumn-n.generatedColumn,0!==r?r:(r=f(e.source,n.source),0!==r?r:(r=e.originalLine-n.originalLine,0!==r?r:(r=e.originalColumn-n.originalColumn,0!==r?r:f(e.name,n.name)))))}n.getArg=r;var m=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/,_=/^data:.+\,.+$/;n.urlParse=t,n.urlGenerate=o,n.normalize=i,n.join=s,n.isAbsolute=function(e){return"/"===e.charAt(0)||!!e.match(m)},n.relative=a;var v=function(){var e=Object.create(null);return!("__proto__"in e)}();n.toSetString=v?u:l,n.fromSetString=v?u:c,n.compareByOriginalPositions=p,n.compareByGeneratedPositionsDeflated=h,n.compareByGeneratedPositionsInflated=d},function(e,n,r){function t(){this._array=[],this._set=Object.create(null)}var o=r(4),i=Object.prototype.hasOwnProperty;t.fromArray=function(e,n){for(var r=new t,o=0,i=e.length;i>o;o++)r.add(e[o],n);return r},t.prototype.size=function(){return Object.getOwnPropertyNames(this._set).length},t.prototype.add=function(e,n){var r=o.toSetString(e),t=i.call(this._set,r),s=this._array.length;(!t||n)&&this._array.push(e),t||(this._set[r]=s)},t.prototype.has=function(e){var n=o.toSetString(e);return i.call(this._set,n)},t.prototype.indexOf=function(e){var n=o.toSetString(e);if(i.call(this._set,n))return this._set[n];throw new Error('"'+e+'" is not in the set.')},t.prototype.at=function(e){if(e>=0&&er||t==r&&s>=o||i.compareByGeneratedPositionsInflated(e,n)<=0}function o(){this._array=[],this._sorted=!0,this._last={generatedLine:-1,generatedColumn:0}}var i=r(4);o.prototype.unsortedForEach=function(e,n){this._array.forEach(e,n)},o.prototype.add=function(e){t(this._last,e)?(this._last=e,this._array.push(e)):(this._sorted=!1,this._array.push(e))},o.prototype.toArray=function(){return this._sorted||(this._array.sort(i.compareByGeneratedPositionsInflated),this._sorted=!0),this._array},n.MappingList=o},function(e,n,r){function t(e){var n=e;return"string"==typeof e&&(n=JSON.parse(e.replace(/^\)\]\}'/,""))),null!=n.sections?new s(n):new o(n)}function o(e){var n=e;"string"==typeof e&&(n=JSON.parse(e.replace(/^\)\]\}'/,"")));var r=a.getArg(n,"version"),t=a.getArg(n,"sources"),o=a.getArg(n,"names",[]),i=a.getArg(n,"sourceRoot",null),s=a.getArg(n,"sourcesContent",null),u=a.getArg(n,"mappings"),c=a.getArg(n,"file",null);if(r!=this._version)throw new Error("Unsupported version: "+r);t=t.map(String).map(a.normalize).map(function(e){return i&&a.isAbsolute(i)&&a.isAbsolute(e)?a.relative(i,e):e}),this._names=l.fromArray(o.map(String),!0),this._sources=l.fromArray(t,!0),this.sourceRoot=i,this.sourcesContent=s,this._mappings=u,this.file=c}function i(){this.generatedLine=0,this.generatedColumn=0,this.source=null,this.originalLine=null,this.originalColumn=null,this.name=null}function s(e){var n=e;"string"==typeof e&&(n=JSON.parse(e.replace(/^\)\]\}'/,"")));var r=a.getArg(n,"version"),o=a.getArg(n,"sections");if(r!=this._version)throw new Error("Unsupported version: "+r);this._sources=new l,this._names=new l;var i={line:-1,column:0};this._sections=o.map(function(e){if(e.url)throw new Error("Support for url field in sections not implemented.");var n=a.getArg(e,"offset"),r=a.getArg(n,"line"),o=a.getArg(n,"column");if(r=0){var i=this._originalMappings[o];if(void 0===e.column)for(var s=i.originalLine;i&&i.originalLine===s;)t.push({line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}),i=this._originalMappings[++o];else for(var l=i.originalColumn;i&&i.originalLine===n&&i.originalColumn==l;)t.push({line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}),i=this._originalMappings[++o]}return t},n.SourceMapConsumer=t,o.prototype=Object.create(t.prototype),o.prototype.consumer=t,o.fromSourceMap=function(e){var n=Object.create(o.prototype),r=n._names=l.fromArray(e._names.toArray(),!0),t=n._sources=l.fromArray(e._sources.toArray(),!0);n.sourceRoot=e._sourceRoot,n.sourcesContent=e._generateSourcesContent(n._sources.toArray(),n.sourceRoot),n.file=e._file;for(var s=e._mappings.toArray().slice(),u=n.__generatedMappings=[],c=n.__originalMappings=[],p=0,h=s.length;h>p;p++){var f=s[p],d=new i;d.generatedLine=f.generatedLine,d.generatedColumn=f.generatedColumn,f.source&&(d.source=t.indexOf(f.source),d.originalLine=f.originalLine,d.originalColumn=f.originalColumn,f.name&&(d.name=r.indexOf(f.name)),c.push(d)),u.push(d)}return g(n.__originalMappings,a.compareByOriginalPositions),n},o.prototype._version=3,Object.defineProperty(o.prototype,"sources",{get:function(){return this._sources.toArray().map(function(e){return null!=this.sourceRoot?a.join(this.sourceRoot,e):e},this)}}),o.prototype._parseMappings=function(e,n){for(var r,t,o,s,u,l=1,p=0,h=0,f=0,d=0,m=0,_=e.length,v=0,C={},y={},A=[],S=[];_>v;)if(";"===e.charAt(v))l++,v++,p=0;else if(","===e.charAt(v))v++;else{for(r=new i,r.generatedLine=l,s=v;_>s&&!this._charIsMappingSeparator(e,s);s++);if(t=e.slice(v,s),o=C[t])v+=t.length;else{for(o=[];s>v;)c.decode(e,v,y),u=y.value,v=y.rest,o.push(u);if(2===o.length)throw new Error("Found a source, but no line and column");if(3===o.length)throw new Error("Found a source and line, but no column");C[t]=o}r.generatedColumn=p+o[0],p=r.generatedColumn,o.length>1&&(r.source=d+o[1],d+=o[1],r.originalLine=h+o[2],h=r.originalLine,r.originalLine+=1,r.originalColumn=f+o[3],f=r.originalColumn,o.length>4&&(r.name=m+o[4],m+=o[4])),S.push(r),"number"==typeof r.originalLine&&A.push(r)}g(S,a.compareByGeneratedPositionsDeflated),this.__generatedMappings=S,g(A,a.compareByOriginalPositions),this.__originalMappings=A},o.prototype._findMapping=function(e,n,r,t,o,i){if(e[r]<=0)throw new TypeError("Line must be greater than or equal to 1, got "+e[r]);if(e[t]<0)throw new TypeError("Column must be greater than or equal to 0, got "+e[t]);return u.search(e,n,o,i)},o.prototype.computeColumnSpans=function(){for(var e=0;e=0){var o=this._generatedMappings[r];if(o.generatedLine===n.generatedLine){var i=a.getArg(o,"source",null);null!==i&&(i=this._sources.at(i),null!=this.sourceRoot&&(i=a.join(this.sourceRoot,i)));var s=a.getArg(o,"name",null);return null!==s&&(s=this._names.at(s)),{source:i,line:a.getArg(o,"originalLine",null),column:a.getArg(o,"originalColumn",null),name:s}}}return{source:null,line:null,column:null,name:null}},o.prototype.hasContentsOfAllSources=function(){return this.sourcesContent?this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some(function(e){return null==e}):!1},o.prototype.sourceContentFor=function(e,n){if(!this.sourcesContent)return null;if(null!=this.sourceRoot&&(e=a.relative(this.sourceRoot,e)),this._sources.has(e))return this.sourcesContent[this._sources.indexOf(e)];var r;if(null!=this.sourceRoot&&(r=a.urlParse(this.sourceRoot))){var t=e.replace(/^file:\/\//,"");if("file"==r.scheme&&this._sources.has(t))return this.sourcesContent[this._sources.indexOf(t)];if((!r.path||"/"==r.path)&&this._sources.has("/"+e))return this.sourcesContent[this._sources.indexOf("/"+e)]}if(n)return null;throw new Error('"'+e+'" is not in the SourceMap.')},o.prototype.generatedPositionFor=function(e){var n=a.getArg(e,"source");if(null!=this.sourceRoot&&(n=a.relative(this.sourceRoot,n)),!this._sources.has(n))return{line:null,column:null,lastColumn:null};n=this._sources.indexOf(n);var r={source:n,originalLine:a.getArg(e,"line"),originalColumn:a.getArg(e,"column")},o=this._findMapping(r,this._originalMappings,"originalLine","originalColumn",a.compareByOriginalPositions,a.getArg(e,"bias",t.GREATEST_LOWER_BOUND));if(o>=0){var i=this._originalMappings[o];if(i.source===r.source)return{line:a.getArg(i,"generatedLine",null),column:a.getArg(i,"generatedColumn",null),lastColumn:a.getArg(i,"lastGeneratedColumn",null)}}return{line:null,column:null,lastColumn:null}},n.BasicSourceMapConsumer=o,s.prototype=Object.create(t.prototype),s.prototype.constructor=t,s.prototype._version=3,Object.defineProperty(s.prototype,"sources",{get:function(){for(var e=[],n=0;n0?t-u>1?r(u,t,o,i,s,a):a==n.LEAST_UPPER_BOUND?t1?r(e,u,o,i,s,a):a==n.LEAST_UPPER_BOUND?u:0>e?-1:e}n.GREATEST_LOWER_BOUND=1,n.LEAST_UPPER_BOUND=2,n.search=function(e,t,o,i){if(0===t.length)return-1;var s=r(-1,t.length,e,t,o,i||n.GREATEST_LOWER_BOUND);if(0>s)return-1;for(;s-1>=0&&0===o(t[s],t[s-1],!0);)--s;return s}},function(e,n){function r(e,n,r){var t=e[n];e[n]=e[r],e[r]=t}function t(e,n){return Math.round(e+Math.random()*(n-e))}function o(e,n,i,s){if(s>i){var a=t(i,s),u=i-1;r(e,a,s);for(var l=e[s],c=i;s>c;c++)n(e[c],l)<=0&&(u+=1,r(e,u,c));r(e,u+1,c);var g=u+1;o(e,n,i,g-1),o(e,n,g+1,s)}}n.quickSort=function(e,n){o(e,n,0,e.length-1)}},function(e,n,r){function t(e,n,r,t,o){this.children=[],this.sourceContents={},this.line=null==e?null:e,this.column=null==n?null:n,this.source=null==r?null:r,this.name=null==o?null:o,this[u]=!0,null!=t&&this.add(t)}var o=r(1).SourceMapGenerator,i=r(4),s=/(\r?\n)/,a=10,u="$$$isSourceNode$$$";t.fromStringWithSourceMap=function(e,n,r){function o(e,n){if(null===e||void 0===e.source)a.add(n);else{var o=r?i.join(r,e.source):e.source;a.add(new t(e.originalLine,e.originalColumn,o,n,e.name))}}var a=new t,u=e.split(s),l=function(){var e=u.shift(),n=u.shift()||"";return e+n},c=1,g=0,p=null;return n.eachMapping(function(e){if(null!==p){if(!(c0&&(p&&o(p,l()),a.add(u.join(""))),n.sources.forEach(function(e){var t=n.sourceContentFor(e);null!=t&&(null!=r&&(e=i.join(r,e)),a.setSourceContent(e,t))}),a},t.prototype.add=function(e){if(Array.isArray(e))e.forEach(function(e){this.add(e)},this);else{if(!e[u]&&"string"!=typeof e)throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e);e&&this.children.push(e)}return this},t.prototype.prepend=function(e){if(Array.isArray(e))for(var n=e.length-1;n>=0;n--)this.prepend(e[n]);else{if(!e[u]&&"string"!=typeof e)throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e);this.children.unshift(e)}return this},t.prototype.walk=function(e){for(var n,r=0,t=this.children.length;t>r;r++)n=this.children[r],n[u]?n.walk(e):""!==n&&e(n,{source:this.source,line:this.line,column:this.column,name:this.name})},t.prototype.join=function(e){var n,r,t=this.children.length;if(t>0){for(n=[],r=0;t-1>r;r++)n.push(this.children[r]),n.push(e);n.push(this.children[r]),this.children=n}return this},t.prototype.replaceRight=function(e,n){var r=this.children[this.children.length-1];return r[u]?r.replaceRight(e,n):"string"==typeof r?this.children[this.children.length-1]=r.replace(e,n):this.children.push("".replace(e,n)),this},t.prototype.setSourceContent=function(e,n){this.sourceContents[i.toSetString(e)]=n},t.prototype.walkSourceContents=function(e){for(var n=0,r=this.children.length;r>n;n++)this.children[n][u]&&this.children[n].walkSourceContents(e);for(var t=Object.keys(this.sourceContents),n=0,r=t.length;r>n;n++)e(i.fromSetString(t[n]),this.sourceContents[t[n]])},t.prototype.toString=function(){var e="";return this.walk(function(n){e+=n}),e},t.prototype.toStringWithSourceMap=function(e){var n={code:"",line:1,column:0},r=new o(e),t=!1,i=null,s=null,u=null,l=null;return this.walk(function(e,o){n.code+=e,null!==o.source&&null!==o.line&&null!==o.column?((i!==o.source||s!==o.line||u!==o.column||l!==o.name)&&r.addMapping({source:o.source,original:{line:o.line,column:o.column},generated:{line:n.line,column:n.column},name:o.name}),i=o.source,s=o.line,u=o.column,l=o.name,t=!0):t&&(r.addMapping({generated:{line:n.line,column:n.column}}),i=null,t=!1);for(var c=0,g=e.length;g>c;c++)e.charCodeAt(c)===a?(n.line++,n.column=0,c+1===g?(i=null,t=!1):t&&r.addMapping({source:o.source,original:{line:o.line,column:o.column},generated:{line:n.line,column:n.column},name:o.name})):n.column++}),this.walkSourceContents(function(e,n){r.setSourceContent(e,n)}),{code:n.code,map:r}},n.SourceNode=t}])}); +//# sourceMappingURL=source-map.min.js.map \ No newline at end of file diff --git a/node_modules/source-map/dist/source-map.min.js.map b/node_modules/source-map/dist/source-map.min.js.map new file mode 100644 index 0000000..3275a32 --- /dev/null +++ b/node_modules/source-map/dist/source-map.min.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["webpack:///webpack/universalModuleDefinition","webpack:///source-map.min.js","webpack:///webpack/bootstrap 5d9a9f4df3bd553ed2f9","webpack:///./source-map.js","webpack:///./lib/source-map-generator.js","webpack:///./lib/base64-vlq.js","webpack:///./lib/base64.js","webpack:///./lib/util.js","webpack:///./lib/array-set.js","webpack:///./lib/mapping-list.js","webpack:///./lib/source-map-consumer.js","webpack:///./lib/binary-search.js","webpack:///./lib/quick-sort.js","webpack:///./lib/source-node.js"],"names":["root","factory","exports","module","define","amd","this","modules","__webpack_require__","moduleId","installedModules","id","loaded","call","m","c","p","SourceMapGenerator","SourceMapConsumer","SourceNode","aArgs","_file","util","getArg","_sourceRoot","_skipValidation","_sources","ArraySet","_names","_mappings","MappingList","_sourcesContents","base64VLQ","prototype","_version","fromSourceMap","aSourceMapConsumer","sourceRoot","generator","file","eachMapping","mapping","newMapping","generated","line","generatedLine","column","generatedColumn","source","relative","original","originalLine","originalColumn","name","addMapping","sources","forEach","sourceFile","content","sourceContentFor","setSourceContent","_validateMapping","String","has","add","aSourceFile","aSourceContent","Object","create","toSetString","keys","length","applySourceMap","aSourceMapPath","Error","newSources","newNames","unsortedForEach","originalPositionFor","join","aGenerated","aOriginal","aSource","aName","JSON","stringify","_serializeMappings","next","nameIdx","sourceIdx","previousGeneratedColumn","previousGeneratedLine","previousOriginalColumn","previousOriginalLine","previousName","previousSource","result","mappings","toArray","i","len","compareByGeneratedPositionsInflated","encode","indexOf","_generateSourcesContent","aSources","aSourceRoot","map","key","hasOwnProperty","toJSON","version","names","sourcesContent","toString","toVLQSigned","aValue","fromVLQSigned","isNegative","shifted","base64","VLQ_BASE_SHIFT","VLQ_BASE","VLQ_BASE_MASK","VLQ_CONTINUATION_BIT","digit","encoded","vlq","decode","aStr","aIndex","aOutParam","continuation","strLen","shift","charCodeAt","charAt","value","rest","intToCharMap","split","number","TypeError","charCode","bigA","bigZ","littleA","littleZ","zero","nine","plus","slash","littleOffset","numberOffset","aDefaultValue","arguments","urlParse","aUrl","match","urlRegexp","scheme","auth","host","port","path","urlGenerate","aParsedUrl","url","normalize","aPath","part","isAbsolute","parts","up","splice","aRoot","aPathUrl","aRootUrl","dataUrlRegexp","joined","replace","level","index","lastIndexOf","slice","Array","substr","identity","s","isProtoString","fromSetString","compareByOriginalPositions","mappingA","mappingB","onlyCompareOriginal","cmp","compareByGeneratedPositionsDeflated","onlyCompareGenerated","strcmp","aStr1","aStr2","supportsNullProto","obj","_array","_set","fromArray","aArray","aAllowDuplicates","set","size","getOwnPropertyNames","sStr","isDuplicate","idx","push","at","aIdx","generatedPositionAfter","lineA","lineB","columnA","columnB","_sorted","_last","aCallback","aThisArg","aMapping","sort","aSourceMap","sourceMap","parse","sections","IndexedSourceMapConsumer","BasicSourceMapConsumer","Mapping","lastOffset","_sections","offset","offsetLine","offsetColumn","generatedOffset","consumer","binarySearch","quickSort","__generatedMappings","defineProperty","get","_parseMappings","__originalMappings","_charIsMappingSeparator","GENERATED_ORDER","ORIGINAL_ORDER","GREATEST_LOWER_BOUND","LEAST_UPPER_BOUND","aContext","aOrder","context","order","_generatedMappings","_originalMappings","allGeneratedPositionsFor","needle","_findMapping","undefined","lastColumn","smc","generatedMappings","destGeneratedMappings","destOriginalMappings","srcMapping","destMapping","str","segment","end","cachedSegments","temp","originalMappings","aNeedle","aMappings","aLineName","aColumnName","aComparator","aBias","search","computeColumnSpans","nextMapping","lastGeneratedColumn","Infinity","hasContentsOfAllSources","some","sc","nullOnMissing","fileUriAbsPath","generatedPositionFor","constructor","j","sectionIndex","section","bias","every","generatedPosition","ret","sectionMappings","adjustedMapping","recursiveSearch","aLow","aHigh","aHaystack","aCompare","mid","Math","floor","swap","ary","x","y","randomIntInRange","low","high","round","random","doQuickSort","comparator","r","pivotIndex","pivot","q","aLine","aColumn","aChunks","children","sourceContents","isSourceNode","REGEX_NEWLINE","NEWLINE_CODE","fromStringWithSourceMap","aGeneratedCode","aRelativePath","addMappingWithCode","code","node","remainingLines","shiftNextLine","lineContents","newLine","lastGeneratedLine","lastMapping","nextLine","aChunk","isArray","chunk","prepend","unshift","walk","aFn","aSep","newChildren","replaceRight","aPattern","aReplacement","lastChild","walkSourceContents","toStringWithSourceMap","sourceMappingActive","lastOriginalSource","lastOriginalLine","lastOriginalColumn","lastOriginalName","sourceContent"],"mappings":"CAAA,SAAAA,EAAAC,GACA,gBAAAC,UAAA,gBAAAC,QACAA,OAAAD,QAAAD,IACA,kBAAAG,gBAAAC,IACAD,UAAAH,GACA,gBAAAC,SACAA,QAAA,UAAAD,IAEAD,EAAA,UAAAC,KACCK,KAAA,WACD,MCAgB,UAAUC,GCN1B,QAAAC,GAAAC,GAGA,GAAAC,EAAAD,GACA,MAAAC,GAAAD,GAAAP,OAGA,IAAAC,GAAAO,EAAAD,IACAP,WACAS,GAAAF,EACAG,QAAA,EAUA,OANAL,GAAAE,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAS,QAAA,EAGAT,EAAAD,QAvBA,GAAAQ,KAqCA,OATAF,GAAAM,EAAAP,EAGAC,EAAAO,EAAAL,EAGAF,EAAAQ,EAAA,GAGAR,EAAA,KDgBM,SAASL,EAAQD,EAASM,GEjDhCN,EAAAe,mBAAAT,EAAA,GAAAS,mBACAf,EAAAgB,kBAAAV,EAAA,GAAAU,kBACAhB,EAAAiB,WAAAX,EAAA,IAAAW,YF6DM,SAAShB,EAAQD,EAASM,GGhDhC,QAAAS,GAAAG,GACAA,IACAA,MAEAd,KAAAe,MAAAC,EAAAC,OAAAH,EAAA,aACAd,KAAAkB,YAAAF,EAAAC,OAAAH,EAAA,mBACAd,KAAAmB,gBAAAH,EAAAC,OAAAH,EAAA,qBACAd,KAAAoB,SAAA,GAAAC,GACArB,KAAAsB,OAAA,GAAAD,GACArB,KAAAuB,UAAA,GAAAC,GACAxB,KAAAyB,iBAAA,KAvBA,GAAAC,GAAAxB,EAAA,GACAc,EAAAd,EAAA,GACAmB,EAAAnB,EAAA,GAAAmB,SACAG,EAAAtB,EAAA,GAAAsB,WAuBAb,GAAAgB,UAAAC,SAAA,EAOAjB,EAAAkB,cACA,SAAAC,GACA,GAAAC,GAAAD,EAAAC,WACAC,EAAA,GAAArB,IACAsB,KAAAH,EAAAG,KACAF,cAkCA,OAhCAD,GAAAI,YAAA,SAAAC,GACA,GAAAC,IACAC,WACAC,KAAAH,EAAAI,cACAC,OAAAL,EAAAM,iBAIA,OAAAN,EAAAO,SACAN,EAAAM,OAAAP,EAAAO,OACA,MAAAX,IACAK,EAAAM,OAAA1B,EAAA2B,SAAAZ,EAAAK,EAAAM,SAGAN,EAAAQ,UACAN,KAAAH,EAAAU,aACAL,OAAAL,EAAAW,gBAGA,MAAAX,EAAAY,OACAX,EAAAW,KAAAZ,EAAAY,OAIAf,EAAAgB,WAAAZ,KAEAN,EAAAmB,QAAAC,QAAA,SAAAC,GACA,GAAAC,GAAAtB,EAAAuB,iBAAAF,EACA,OAAAC,GACApB,EAAAsB,iBAAAH,EAAAC,KAGApB,GAaArB,EAAAgB,UAAAqB,WACA,SAAAlC,GACA,GAAAuB,GAAArB,EAAAC,OAAAH,EAAA,aACA8B,EAAA5B,EAAAC,OAAAH,EAAA,iBACA4B,EAAA1B,EAAAC,OAAAH,EAAA,eACAiC,EAAA/B,EAAAC,OAAAH,EAAA,YAEAd,MAAAmB,iBACAnB,KAAAuD,iBAAAlB,EAAAO,EAAAF,EAAAK,GAGA,MAAAL,IACAA,EAAAc,OAAAd,GACA1C,KAAAoB,SAAAqC,IAAAf,IACA1C,KAAAoB,SAAAsC,IAAAhB,IAIA,MAAAK,IACAA,EAAAS,OAAAT,GACA/C,KAAAsB,OAAAmC,IAAAV,IACA/C,KAAAsB,OAAAoC,IAAAX,IAIA/C,KAAAuB,UAAAmC,KACAnB,cAAAF,EAAAC,KACAG,gBAAAJ,EAAAG,OACAK,aAAA,MAAAD,KAAAN,KACAQ,eAAA,MAAAF,KAAAJ,OACAE,SACAK,UAOApC,EAAAgB,UAAA2B,iBACA,SAAAK,EAAAC,GACA,GAAAlB,GAAAiB,CACA,OAAA3D,KAAAkB,cACAwB,EAAA1B,EAAA2B,SAAA3C,KAAAkB,YAAAwB,IAGA,MAAAkB,GAGA5D,KAAAyB,mBACAzB,KAAAyB,iBAAAoC,OAAAC,OAAA,OAEA9D,KAAAyB,iBAAAT,EAAA+C,YAAArB,IAAAkB,GACK5D,KAAAyB,yBAGLzB,MAAAyB,iBAAAT,EAAA+C,YAAArB,IACA,IAAAmB,OAAAG,KAAAhE,KAAAyB,kBAAAwC,SACAjE,KAAAyB,iBAAA,QAqBAd,EAAAgB,UAAAuC,eACA,SAAApC,EAAA6B,EAAAQ,GACA,GAAAhB,GAAAQ,CAEA,UAAAA,EAAA,CACA,SAAA7B,EAAAG,KACA,SAAAmC,OACA,gJAIAjB,GAAArB,EAAAG,KAEA,GAAAF,GAAA/B,KAAAkB,WAEA,OAAAa,IACAoB,EAAAnC,EAAA2B,SAAAZ,EAAAoB,GAIA,IAAAkB,GAAA,GAAAhD,GACAiD,EAAA,GAAAjD,EAGArB,MAAAuB,UAAAgD,gBAAA,SAAApC,GACA,GAAAA,EAAAO,SAAAS,GAAA,MAAAhB,EAAAU,aAAA,CAEA,GAAAD,GAAAd,EAAA0C,qBACAlC,KAAAH,EAAAU,aACAL,OAAAL,EAAAW,gBAEA,OAAAF,EAAAF,SAEAP,EAAAO,OAAAE,EAAAF,OACA,MAAAyB,IACAhC,EAAAO,OAAA1B,EAAAyD,KAAAN,EAAAhC,EAAAO,SAEA,MAAAX,IACAI,EAAAO,OAAA1B,EAAA2B,SAAAZ,EAAAI,EAAAO,SAEAP,EAAAU,aAAAD,EAAAN,KACAH,EAAAW,eAAAF,EAAAJ,OACA,MAAAI,EAAAG,OACAZ,EAAAY,KAAAH,EAAAG,OAKA,GAAAL,GAAAP,EAAAO,MACA,OAAAA,GAAA2B,EAAAZ,IAAAf,IACA2B,EAAAX,IAAAhB,EAGA,IAAAK,GAAAZ,EAAAY,IACA,OAAAA,GAAAuB,EAAAb,IAAAV,IACAuB,EAAAZ,IAAAX,IAGK/C,MACLA,KAAAoB,SAAAiD,EACArE,KAAAsB,OAAAgD,EAGAxC,EAAAmB,QAAAC,QAAA,SAAAC,GACA,GAAAC,GAAAtB,EAAAuB,iBAAAF,EACA,OAAAC,IACA,MAAAe,IACAhB,EAAAnC,EAAAyD,KAAAN,EAAAhB,IAEA,MAAApB,IACAoB,EAAAnC,EAAA2B,SAAAZ,EAAAoB,IAEAnD,KAAAsD,iBAAAH,EAAAC,KAEKpD,OAcLW,EAAAgB,UAAA4B,iBACA,SAAAmB,EAAAC,EAAAC,EACAC,GACA,MAAAH,GAAA,QAAAA,IAAA,UAAAA,IACAA,EAAApC,KAAA,GAAAoC,EAAAlC,QAAA,IACAmC,GAAAC,GAAAC,MAIAH,GAAA,QAAAA,IAAA,UAAAA,IACAC,GAAA,QAAAA,IAAA,UAAAA,IACAD,EAAApC,KAAA,GAAAoC,EAAAlC,QAAA,GACAmC,EAAArC,KAAA,GAAAqC,EAAAnC,QAAA,GACAoC,GAKA,SAAAR,OAAA,oBAAAU,KAAAC,WACA1C,UAAAqC,EACAhC,OAAAkC,EACAhC,SAAA+B,EACA5B,KAAA8B,MASAlE,EAAAgB,UAAAqD,mBACA,WAcA,OANAC,GACA9C,EACA+C,EACAC,EAVAC,EAAA,EACAC,EAAA,EACAC,EAAA,EACAC,EAAA,EACAC,EAAA,EACAC,EAAA,EACAC,EAAA,GAMAC,EAAA3F,KAAAuB,UAAAqE,UACAC,EAAA,EAAAC,EAAAH,EAAA1B,OAA0C6B,EAAAD,EAASA,IAAA,CAInD,GAHA1D,EAAAwD,EAAAE,GACAZ,EAAA,GAEA9C,EAAAI,gBAAA8C,EAEA,IADAD,EAAA,EACAjD,EAAAI,gBAAA8C,GACAJ,GAAA,IACAI,QAIA,IAAAQ,EAAA,GACA,IAAA7E,EAAA+E,oCAAA5D,EAAAwD,EAAAE,EAAA,IACA,QAEAZ,IAAA,IAIAA,GAAAvD,EAAAsE,OAAA7D,EAAAM,gBACA2C,GACAA,EAAAjD,EAAAM,gBAEA,MAAAN,EAAAO,SACAyC,EAAAnF,KAAAoB,SAAA6E,QAAA9D,EAAAO,QACAuC,GAAAvD,EAAAsE,OAAAb,EAAAM,GACAA,EAAAN,EAGAF,GAAAvD,EAAAsE,OAAA7D,EAAAU,aAAA,EACA0C,GACAA,EAAApD,EAAAU,aAAA,EAEAoC,GAAAvD,EAAAsE,OAAA7D,EAAAW,eACAwC,GACAA,EAAAnD,EAAAW,eAEA,MAAAX,EAAAY,OACAmC,EAAAlF,KAAAsB,OAAA2E,QAAA9D,EAAAY,MACAkC,GAAAvD,EAAAsE,OAAAd,EAAAM,GACAA,EAAAN,IAIAQ,GAAAT,EAGA,MAAAS,IAGA/E,EAAAgB,UAAAuE,wBACA,SAAAC,EAAAC,GACA,MAAAD,GAAAE,IAAA,SAAA3D,GACA,IAAA1C,KAAAyB,iBACA,WAEA,OAAA2E,IACA1D,EAAA1B,EAAA2B,SAAAyD,EAAA1D,GAEA,IAAA4D,GAAAtF,EAAA+C,YAAArB,EACA,OAAAmB,QAAAlC,UAAA4E,eAAAhG,KAAAP,KAAAyB,iBAAA6E,GACAtG,KAAAyB,iBAAA6E,GACA,MACKtG,OAMLW,EAAAgB,UAAA6E,OACA,WACA,GAAAH,IACAI,QAAAzG,KAAA4B,SACAqB,QAAAjD,KAAAoB,SAAAwE,UACAc,MAAA1G,KAAAsB,OAAAsE,UACAD,SAAA3F,KAAAgF,qBAYA,OAVA,OAAAhF,KAAAe,QACAsF,EAAApE,KAAAjC,KAAAe,OAEA,MAAAf,KAAAkB,cACAmF,EAAAtE,WAAA/B,KAAAkB,aAEAlB,KAAAyB,mBACA4E,EAAAM,eAAA3G,KAAAkG,wBAAAG,EAAApD,QAAAoD,EAAAtE,aAGAsE,GAMA1F,EAAAgB,UAAAiF,SACA,WACA,MAAA9B,MAAAC,UAAA/E,KAAAwG,WAGA5G,EAAAe,sBH2EM,SAASd,EAAQD,EAASM,GI1ZhC,QAAA2G,GAAAC,GACA,SAAAA,IACAA,GAAA,MACAA,GAAA,KASA,QAAAC,GAAAD,GACA,GAAAE,GAAA,OAAAF,GACAG,EAAAH,GAAA,CACA,OAAAE,IACAC,EACAA,EAhDA,GAAAC,GAAAhH,EAAA,GAcAiH,EAAA,EAGAC,EAAA,GAAAD,EAGAE,EAAAD,EAAA,EAGAE,EAAAF,CA+BAxH,GAAAoG,OAAA,SAAAc,GACA,GACAS,GADAC,EAAA,GAGAC,EAAAZ,EAAAC,EAEA,GACAS,GAAAE,EAAAJ,EACAI,KAAAN,EACAM,EAAA,IAGAF,GAAAD,GAEAE,GAAAN,EAAAlB,OAAAuB,SACGE,EAAA,EAEH,OAAAD,IAOA5H,EAAA8H,OAAA,SAAAC,EAAAC,EAAAC,GACA,GAGAC,GAAAP,EAHAQ,EAAAJ,EAAA1D,OACAyB,EAAA,EACAsC,EAAA,CAGA,IACA,GAAAJ,GAAAG,EACA,SAAA3D,OAAA,6CAIA,IADAmD,EAAAL,EAAAQ,OAAAC,EAAAM,WAAAL,MACA,KAAAL,EACA,SAAAnD,OAAA,yBAAAuD,EAAAO,OAAAN,EAAA,GAGAE,MAAAP,EAAAD,GACAC,GAAAF,EACA3B,GAAA6B,GAAAS,EACAA,GAAAb,QACGW,EAEHD,GAAAM,MAAApB,EAAArB,GACAmC,EAAAO,KAAAR,IJseM,SAAS/H,EAAQD,GKzmBvB,GAAAyI,GAAA,mEAAAC,MAAA,GAKA1I,GAAAoG,OAAA,SAAAuC,GACA,GAAAA,GAAA,GAAAA,EAAAF,EAAApE,OACA,MAAAoE,GAAAE,EAEA,UAAAC,WAAA,6BAAAD,IAOA3I,EAAA8H,OAAA,SAAAe,GACA,GAAAC,GAAA,GACAC,EAAA,GAEAC,EAAA,GACAC,EAAA,IAEAC,EAAA,GACAC,EAAA,GAEAC,EAAA,GACAC,EAAA,GAEAC,EAAA,GACAC,EAAA,EAGA,OAAAV,IAAAC,GAAAC,GAAAF,EACAA,EAAAC,EAIAD,GAAAG,GAAAC,GAAAJ,EACAA,EAAAG,EAAAM,EAIAT,GAAAK,GAAAC,GAAAN,EACAA,EAAAK,EAAAK,EAIAV,GAAAO,EACA,GAIAP,GAAAQ,EACA,GAIA,KLwnBM,SAASpJ,EAAQD,GMxqBvB,QAAAqB,GAAAH,EAAA+D,EAAAuE,GACA,GAAAvE,IAAA/D,GACA,MAAAA,GAAA+D,EACG,QAAAwE,UAAApF,OACH,MAAAmF,EAEA,UAAAhF,OAAA,IAAAS,EAAA,6BAQA,QAAAyE,GAAAC,GACA,GAAAC,GAAAD,EAAAC,MAAAC,EACA,OAAAD,IAIAE,OAAAF,EAAA,GACAG,KAAAH,EAAA,GACAI,KAAAJ,EAAA,GACAK,KAAAL,EAAA,GACAM,KAAAN,EAAA,IAPA,KAYA,QAAAO,GAAAC,GACA,GAAAC,GAAA,EAiBA,OAhBAD,GAAAN,SACAO,GAAAD,EAAAN,OAAA,KAEAO,GAAA,KACAD,EAAAL,OACAM,GAAAD,EAAAL,KAAA,KAEAK,EAAAJ,OACAK,GAAAD,EAAAJ,MAEAI,EAAAH,OACAI,GAAA,IAAAD,EAAAH,MAEAG,EAAAF,OACAG,GAAAD,EAAAF,MAEAG,EAeA,QAAAC,GAAAC,GACA,GAAAL,GAAAK,EACAF,EAAAX,EAAAa,EACA,IAAAF,EAAA,CACA,IAAAA,EAAAH,KACA,MAAAK,EAEAL,GAAAG,EAAAH,KAKA,OAAAM,GAHAC,EAAAzK,EAAAyK,WAAAP,GAEAQ,EAAAR,EAAAxB,MAAA,OACAiC,EAAA,EAAA1E,EAAAyE,EAAArG,OAAA,EAA8C4B,GAAA,EAAQA,IACtDuE,EAAAE,EAAAzE,GACA,MAAAuE,EACAE,EAAAE,OAAA3E,EAAA,GACK,OAAAuE,EACLG,IACKA,EAAA,IACL,KAAAH,GAIAE,EAAAE,OAAA3E,EAAA,EAAA0E,GACAA,EAAA,IAEAD,EAAAE,OAAA3E,EAAA,GACA0E,KAUA,OANAT,GAAAQ,EAAA7F,KAAA,KAEA,KAAAqF,IACAA,EAAAO,EAAA,SAGAJ,GACAA,EAAAH,OACAC,EAAAE,IAEAH,EAoBA,QAAArF,GAAAgG,EAAAN,GACA,KAAAM,IACAA,EAAA,KAEA,KAAAN,IACAA,EAAA,IAEA,IAAAO,GAAApB,EAAAa,GACAQ,EAAArB,EAAAmB,EAMA,IALAE,IACAF,EAAAE,EAAAb,MAAA,KAIAY,MAAAhB,OAIA,MAHAiB,KACAD,EAAAhB,OAAAiB,EAAAjB,QAEAK,EAAAW,EAGA,IAAAA,GAAAP,EAAAX,MAAAoB,GACA,MAAAT,EAIA,IAAAQ,MAAAf,OAAAe,EAAAb,KAEA,MADAa,GAAAf,KAAAO,EACAJ,EAAAY,EAGA,IAAAE,GAAA,MAAAV,EAAAjC,OAAA,GACAiC,EACAD,EAAAO,EAAAK,QAAA,eAAAX,EAEA,OAAAQ,IACAA,EAAAb,KAAAe,EACAd,EAAAY,IAEAE,EAcA,QAAAlI,GAAA8H,EAAAN,GACA,KAAAM,IACAA,EAAA,KAGAA,IAAAK,QAAA,SAOA,KADA,GAAAC,GAAA,EACA,IAAAZ,EAAAlE,QAAAwE,EAAA,OACA,GAAAO,GAAAP,EAAAQ,YAAA,IACA,MAAAD,EACA,MAAAb,EAOA,IADAM,IAAAS,MAAA,EAAAF,GACAP,EAAAjB,MAAA,qBACA,MAAAW,KAGAY,EAIA,MAAAI,OAAAJ,EAAA,GAAAtG,KAAA,OAAA0F,EAAAiB,OAAAX,EAAAxG,OAAA,GASA,QAAAoH,GAAAC,GACA,MAAAA,GAYA,QAAAvH,GAAA4D,GACA,MAAA4D,GAAA5D,GACA,IAAAA,EAGAA,EAIA,QAAA6D,GAAA7D,GACA,MAAA4D,GAAA5D,GACAA,EAAAuD,MAAA,GAGAvD,EAIA,QAAA4D,GAAAD,GACA,IAAAA,EACA,QAGA,IAAArH,GAAAqH,EAAArH,MAEA,MAAAA,EACA,QAGA,SAAAqH,EAAArD,WAAAhE,EAAA,IACA,KAAAqH,EAAArD,WAAAhE,EAAA,IACA,MAAAqH,EAAArD,WAAAhE,EAAA,IACA,MAAAqH,EAAArD,WAAAhE,EAAA,IACA,MAAAqH,EAAArD,WAAAhE,EAAA,IACA,MAAAqH,EAAArD,WAAAhE,EAAA,IACA,MAAAqH,EAAArD,WAAAhE,EAAA,IACA,KAAAqH,EAAArD,WAAAhE,EAAA,IACA,KAAAqH,EAAArD,WAAAhE,EAAA,GACA,QAGA,QAAA4B,GAAA5B,EAAA,GAA2B4B,GAAA,EAAQA,IACnC,QAAAyF,EAAArD,WAAApC,GACA,QAIA,UAWA,QAAA4F,GAAAC,EAAAC,EAAAC,GACA,GAAAC,GAAAH,EAAAhJ,OAAAiJ,EAAAjJ,MACA,YAAAmJ,EACAA,GAGAA,EAAAH,EAAA7I,aAAA8I,EAAA9I,aACA,IAAAgJ,EACAA,GAGAA,EAAAH,EAAA5I,eAAA6I,EAAA7I,eACA,IAAA+I,GAAAD,EACAC,GAGAA,EAAAH,EAAAjJ,gBAAAkJ,EAAAlJ,gBACA,IAAAoJ,EACAA,GAGAA,EAAAH,EAAAnJ,cAAAoJ,EAAApJ,cACA,IAAAsJ,EACAA,EAGAH,EAAA3I,KAAA4I,EAAA5I,SAaA,QAAA+I,GAAAJ,EAAAC,EAAAI,GACA,GAAAF,GAAAH,EAAAnJ,cAAAoJ,EAAApJ,aACA,YAAAsJ,EACAA,GAGAA,EAAAH,EAAAjJ,gBAAAkJ,EAAAlJ,gBACA,IAAAoJ,GAAAE,EACAF,GAGAA,EAAAH,EAAAhJ,OAAAiJ,EAAAjJ,OACA,IAAAmJ,EACAA,GAGAA,EAAAH,EAAA7I,aAAA8I,EAAA9I,aACA,IAAAgJ,EACAA,GAGAA,EAAAH,EAAA5I,eAAA6I,EAAA7I,eACA,IAAA+I,EACAA,EAGAH,EAAA3I,KAAA4I,EAAA5I,SAIA,QAAAiJ,GAAAC,EAAAC,GACA,MAAAD,KAAAC,EACA,EAGAD,EAAAC,EACA,EAGA,GAOA,QAAAnG,GAAA2F,EAAAC,GACA,GAAAE,GAAAH,EAAAnJ,cAAAoJ,EAAApJ,aACA,YAAAsJ,EACAA,GAGAA,EAAAH,EAAAjJ,gBAAAkJ,EAAAlJ,gBACA,IAAAoJ,EACAA,GAGAA,EAAAG,EAAAN,EAAAhJ,OAAAiJ,EAAAjJ,QACA,IAAAmJ,EACAA,GAGAA,EAAAH,EAAA7I,aAAA8I,EAAA9I,aACA,IAAAgJ,EACAA,GAGAA,EAAAH,EAAA5I,eAAA6I,EAAA7I,eACA,IAAA+I,EACAA,EAGAG,EAAAN,EAAA3I,KAAA4I,EAAA5I,UApYAnD,EAAAqB,QAEA,IAAAwI,GAAA,iEACAmB,EAAA,eAeAhL,GAAA0J,WAsBA1J,EAAAmK,cAwDAnK,EAAAsK,YA2DAtK,EAAA6E,OAEA7E,EAAAyK,WAAA,SAAAF,GACA,YAAAA,EAAAjC,OAAA,MAAAiC,EAAAX,MAAAC,IAyCA7J,EAAA+C,UAEA,IAAAwJ,GAAA,WACA,GAAAC,GAAAvI,OAAAC,OAAA,KACA,sBAAAsI,MAuBAxM,GAAAmE,YAAAoI,EAAAd,EAAAtH,EASAnE,EAAA4L,cAAAW,EAAAd,EAAAG,EAsEA5L,EAAA6L,6BAuCA7L,EAAAkM,sCA8CAlM,EAAAmG,uCNgsBM,SAASlG,EAAQD,EAASM,GOhlChC,QAAAmB,KACArB,KAAAqM,UACArM,KAAAsM,KAAAzI,OAAAC,OAAA,MAXA,GAAA9C,GAAAd,EAAA,GACAuD,EAAAI,OAAAlC,UAAA4E,cAgBAlF,GAAAkL,UAAA,SAAAC,EAAAC,GAEA,OADAC,GAAA,GAAArL,GACAwE,EAAA,EAAAC,EAAA0G,EAAAvI,OAAsC6B,EAAAD,EAASA,IAC/C6G,EAAAhJ,IAAA8I,EAAA3G,GAAA4G,EAEA,OAAAC,IASArL,EAAAM,UAAAgL,KAAA,WACA,MAAA9I,QAAA+I,oBAAA5M,KAAAsM,MAAArI,QAQA5C,EAAAM,UAAA+B,IAAA,SAAAiE,EAAA8E,GACA,GAAAI,GAAA7L,EAAA+C,YAAA4D,GACAmF,EAAArJ,EAAAlD,KAAAP,KAAAsM,KAAAO,GACAE,EAAA/M,KAAAqM,OAAApI,SACA6I,GAAAL,IACAzM,KAAAqM,OAAAW,KAAArF,GAEAmF,IACA9M,KAAAsM,KAAAO,GAAAE,IASA1L,EAAAM,UAAA8B,IAAA,SAAAkE,GACA,GAAAkF,GAAA7L,EAAA+C,YAAA4D,EACA,OAAAlE,GAAAlD,KAAAP,KAAAsM,KAAAO,IAQAxL,EAAAM,UAAAsE,QAAA,SAAA0B,GACA,GAAAkF,GAAA7L,EAAA+C,YAAA4D,EACA,IAAAlE,EAAAlD,KAAAP,KAAAsM,KAAAO,GACA,MAAA7M,MAAAsM,KAAAO,EAEA,UAAAzI,OAAA,IAAAuD,EAAA,yBAQAtG,EAAAM,UAAAsL,GAAA,SAAAC,GACA,GAAAA,GAAA,GAAAA,EAAAlN,KAAAqM,OAAApI,OACA,MAAAjE,MAAAqM,OAAAa,EAEA,UAAA9I,OAAA,yBAAA8I,IAQA7L,EAAAM,UAAAiE,QAAA,WACA,MAAA5F,MAAAqM,OAAAnB,SAGAtL,EAAAyB,YPumCM,SAASxB,EAAQD,EAASM,GQjsChC,QAAAiN,GAAAzB,EAAAC,GAEA,GAAAyB,GAAA1B,EAAAnJ,cACA8K,EAAA1B,EAAApJ,cACA+K,EAAA5B,EAAAjJ,gBACA8K,EAAA5B,EAAAlJ,eACA,OAAA4K,GAAAD,GAAAC,GAAAD,GAAAG,GAAAD,GACAtM,EAAA+E,oCAAA2F,EAAAC,IAAA,EAQA,QAAAnK,KACAxB,KAAAqM,UACArM,KAAAwN,SAAA,EAEAxN,KAAAyN,OAAgBlL,cAAA,GAAAE,gBAAA,GAzBhB,GAAAzB,GAAAd,EAAA,EAkCAsB,GAAAG,UAAA4C,gBACA,SAAAmJ,EAAAC,GACA3N,KAAAqM,OAAAnJ,QAAAwK,EAAAC,IAQAnM,EAAAG,UAAA+B,IAAA,SAAAkK,GACAT,EAAAnN,KAAAyN,MAAAG,IACA5N,KAAAyN,MAAAG,EACA5N,KAAAqM,OAAAW,KAAAY,KAEA5N,KAAAwN,SAAA,EACAxN,KAAAqM,OAAAW,KAAAY,KAaApM,EAAAG,UAAAiE,QAAA,WAKA,MAJA5F,MAAAwN,UACAxN,KAAAqM,OAAAwB,KAAA7M,EAAA+E,qCACA/F,KAAAwN,SAAA,GAEAxN,KAAAqM,QAGAzM,EAAA4B,eRqtCM,SAAS3B,EAAQD,EAASM,GStxChC,QAAAU,GAAAkN,GACA,GAAAC,GAAAD,CAKA,OAJA,gBAAAA,KACAC,EAAAjJ,KAAAkJ,MAAAF,EAAAhD,QAAA,WAAsD,MAGtD,MAAAiD,EAAAE,SACA,GAAAC,GAAAH,GACA,GAAAI,GAAAJ,GAoQA,QAAAI,GAAAL,GACA,GAAAC,GAAAD,CACA,iBAAAA,KACAC,EAAAjJ,KAAAkJ,MAAAF,EAAAhD,QAAA,WAAsD,KAGtD,IAAArE,GAAAzF,EAAAC,OAAA8M,EAAA,WACA9K,EAAAjC,EAAAC,OAAA8M,EAAA,WAGArH,EAAA1F,EAAAC,OAAA8M,EAAA,YACAhM,EAAAf,EAAAC,OAAA8M,EAAA,mBACApH,EAAA3F,EAAAC,OAAA8M,EAAA,uBACApI,EAAA3E,EAAAC,OAAA8M,EAAA,YACA9L,EAAAjB,EAAAC,OAAA8M,EAAA,YAIA,IAAAtH,GAAAzG,KAAA4B,SACA,SAAAwC,OAAA,wBAAAqC,EAGAxD,KACAoD,IAAA7C,QAIA6C,IAAArF,EAAAkJ,WAKA7D,IAAA,SAAA3D,GACA,MAAAX,IAAAf,EAAAqJ,WAAAtI,IAAAf,EAAAqJ,WAAA3H,GACA1B,EAAA2B,SAAAZ,EAAAW,GACAA,IAOA1C,KAAAsB,OAAAD,EAAAkL,UAAA7F,EAAAL,IAAA7C,SAAA,GACAxD,KAAAoB,SAAAC,EAAAkL,UAAAtJ,GAAA,GAEAjD,KAAA+B,aACA/B,KAAA2G,iBACA3G,KAAAuB,UAAAoE,EACA3F,KAAAiC,OA8EA,QAAAmM,KACApO,KAAAuC,cAAA,EACAvC,KAAAyC,gBAAA,EACAzC,KAAA0C,OAAA,KACA1C,KAAA6C,aAAA,KACA7C,KAAA8C,eAAA,KACA9C,KAAA+C,KAAA,KAyZA,QAAAmL,GAAAJ,GACA,GAAAC,GAAAD,CACA,iBAAAA,KACAC,EAAAjJ,KAAAkJ,MAAAF,EAAAhD,QAAA,WAAsD,KAGtD,IAAArE,GAAAzF,EAAAC,OAAA8M,EAAA,WACAE,EAAAjN,EAAAC,OAAA8M,EAAA,WAEA,IAAAtH,GAAAzG,KAAA4B,SACA,SAAAwC,OAAA,wBAAAqC,EAGAzG,MAAAoB,SAAA,GAAAC,GACArB,KAAAsB,OAAA,GAAAD,EAEA,IAAAgN,IACA/L,KAAA,GACAE,OAAA,EAEAxC,MAAAsO,UAAAL,EAAA5H,IAAA,SAAAiF,GACA,GAAAA,EAAArB,IAGA,SAAA7F,OAAA,qDAEA,IAAAmK,GAAAvN,EAAAC,OAAAqK,EAAA,UACAkD,EAAAxN,EAAAC,OAAAsN,EAAA,QACAE,EAAAzN,EAAAC,OAAAsN,EAAA,SAEA,IAAAC,EAAAH,EAAA/L,MACAkM,IAAAH,EAAA/L,MAAAmM,EAAAJ,EAAA7L,OACA,SAAA4B,OAAA,uDAIA,OAFAiK,GAAAE,GAGAG,iBAGAnM,cAAAiM,EAAA,EACA/L,gBAAAgM,EAAA,GAEAE,SAAA,GAAA/N,GAAAI,EAAAC,OAAAqK,EAAA,WA11BA,GAAAtK,GAAAd,EAAA,GACA0O,EAAA1O,EAAA,GACAmB,EAAAnB,EAAA,GAAAmB,SACAK,EAAAxB,EAAA,GACA2O,EAAA3O,EAAA,GAAA2O,SAaAjO,GAAAiB,cAAA,SAAAiM,GACA,MAAAK,GAAAtM,cAAAiM,IAMAlN,EAAAe,UAAAC,SAAA,EAgCAhB,EAAAe,UAAAmN,oBAAA,KACAjL,OAAAkL,eAAAnO,EAAAe,UAAA,sBACAqN,IAAA,WAKA,MAJAhP,MAAA8O,qBACA9O,KAAAiP,eAAAjP,KAAAuB,UAAAvB,KAAA+B,YAGA/B,KAAA8O,uBAIAlO,EAAAe,UAAAuN,mBAAA,KACArL,OAAAkL,eAAAnO,EAAAe,UAAA,qBACAqN,IAAA,WAKA,MAJAhP,MAAAkP,oBACAlP,KAAAiP,eAAAjP,KAAAuB,UAAAvB,KAAA+B,YAGA/B,KAAAkP,sBAIAtO,EAAAe,UAAAwN,wBACA,SAAAxH,EAAAqD,GACA,GAAAvK,GAAAkH,EAAAO,OAAA8C,EACA,aAAAvK,GAAmB,MAAAA,GAQnBG,EAAAe,UAAAsN,eACA,SAAAtH,EAAAvB,GACA,SAAAhC,OAAA,6CAGAxD,EAAAwO,gBAAA,EACAxO,EAAAyO,eAAA,EAEAzO,EAAA0O,qBAAA,EACA1O,EAAA2O,kBAAA,EAkBA3O,EAAAe,UAAAO,YACA,SAAAwL,EAAA8B,EAAAC,GACA,GAGA9J,GAHA+J,EAAAF,GAAA,KACAG,EAAAF,GAAA7O,EAAAwO,eAGA,QAAAO,GACA,IAAA/O,GAAAwO,gBACAzJ,EAAA3F,KAAA4P,kBACA,MACA,KAAAhP,GAAAyO,eACA1J,EAAA3F,KAAA6P,iBACA,MACA,SACA,SAAAzL,OAAA,+BAGA,GAAArC,GAAA/B,KAAA+B,UACA4D,GAAAU,IAAA,SAAAlE,GACA,GAAAO,GAAA,OAAAP,EAAAO,OAAA,KAAA1C,KAAAoB,SAAA6L,GAAA9K,EAAAO,OAIA,OAHA,OAAAA,GAAA,MAAAX,IACAW,EAAA1B,EAAAyD,KAAA1C,EAAAW,KAGAA,SACAH,cAAAJ,EAAAI,cACAE,gBAAAN,EAAAM,gBACAI,aAAAV,EAAAU,aACAC,eAAAX,EAAAW,eACAC,KAAA,OAAAZ,EAAAY,KAAA,KAAA/C,KAAAsB,OAAA2L,GAAA9K,EAAAY,QAEK/C,MAAAkD,QAAAwK,EAAAgC,IAsBL9O,EAAAe,UAAAmO,yBACA,SAAAhP,GACA,GAAAwB,GAAAtB,EAAAC,OAAAH,EAAA,QAMAiP,GACArN,OAAA1B,EAAAC,OAAAH,EAAA,UACA+B,aAAAP,EACAQ,eAAA9B,EAAAC,OAAAH,EAAA,YAMA,IAHA,MAAAd,KAAA+B,aACAgO,EAAArN,OAAA1B,EAAA2B,SAAA3C,KAAA+B,WAAAgO,EAAArN,UAEA1C,KAAAoB,SAAAqC,IAAAsM,EAAArN,QACA,QAEAqN,GAAArN,OAAA1C,KAAAoB,SAAA6E,QAAA8J,EAAArN,OAEA,IAAAiD,MAEAqF,EAAAhL,KAAAgQ,aAAAD,EACA/P,KAAA6P,kBACA,eACA,iBACA7O,EAAAyK,2BACAmD,EAAAW,kBACA,IAAAvE,GAAA,GACA,GAAA7I,GAAAnC,KAAA6P,kBAAA7E,EAEA,IAAAiF,SAAAnP,EAAA0B,OAOA,IANA,GAAAK,GAAAV,EAAAU,aAMAV,KAAAU,kBACA8C,EAAAqH,MACA1K,KAAAtB,EAAAC,OAAAkB,EAAA,sBACAK,OAAAxB,EAAAC,OAAAkB,EAAA,wBACA+N,WAAAlP,EAAAC,OAAAkB,EAAA,8BAGAA,EAAAnC,KAAA6P,oBAAA7E,OASA,KANA,GAAAlI,GAAAX,EAAAW,eAMAX,GACAA,EAAAU,eAAAP,GACAH,EAAAW,mBACA6C,EAAAqH,MACA1K,KAAAtB,EAAAC,OAAAkB,EAAA,sBACAK,OAAAxB,EAAAC,OAAAkB,EAAA,wBACA+N,WAAAlP,EAAAC,OAAAkB,EAAA,8BAGAA,EAAAnC,KAAA6P,oBAAA7E,GAKA,MAAArF,IAGA/F,EAAAgB,oBAmFAuN,EAAAxM,UAAAkC,OAAAC,OAAAlD,EAAAe,WACAwM,EAAAxM,UAAAgN,SAAA/N,EASAuN,EAAAtM,cACA,SAAAiM,GACA,GAAAqC,GAAAtM,OAAAC,OAAAqK,EAAAxM,WAEA+E,EAAAyJ,EAAA7O,OAAAD,EAAAkL,UAAAuB,EAAAxM,OAAAsE,WAAA,GACA3C,EAAAkN,EAAA/O,SAAAC,EAAAkL,UAAAuB,EAAA1M,SAAAwE,WAAA,EACAuK,GAAApO,WAAA+L,EAAA5M,YACAiP,EAAAxJ,eAAAmH,EAAA5H,wBAAAiK,EAAA/O,SAAAwE,UACAuK,EAAApO,YACAoO,EAAAlO,KAAA6L,EAAA/M,KAWA,QAJAqP,GAAAtC,EAAAvM,UAAAqE,UAAAsF,QACAmF,EAAAF,EAAArB,uBACAwB,EAAAH,EAAAjB,sBAEArJ,EAAA,EAAA5B,EAAAmM,EAAAnM,OAAsDA,EAAA4B,EAAYA,IAAA,CAClE,GAAA0K,GAAAH,EAAAvK,GACA2K,EAAA,GAAApC,EACAoC,GAAAjO,cAAAgO,EAAAhO,cACAiO,EAAA/N,gBAAA8N,EAAA9N,gBAEA8N,EAAA7N,SACA8N,EAAA9N,OAAAO,EAAAgD,QAAAsK,EAAA7N,QACA8N,EAAA3N,aAAA0N,EAAA1N,aACA2N,EAAA1N,eAAAyN,EAAAzN,eAEAyN,EAAAxN,OACAyN,EAAAzN,KAAA2D,EAAAT,QAAAsK,EAAAxN,OAGAuN,EAAAtD,KAAAwD,IAGAH,EAAArD,KAAAwD,GAKA,MAFA3B,GAAAsB,EAAAjB,mBAAAlO,EAAAyK,4BAEA0E,GAMAhC,EAAAxM,UAAAC,SAAA,EAKAiC,OAAAkL,eAAAZ,EAAAxM,UAAA,WACAqN,IAAA,WACA,MAAAhP,MAAAoB,SAAAwE,UAAAS,IAAA,SAAAiF,GACA,aAAAtL,KAAA+B,WAAAf,EAAAyD,KAAAzE,KAAA+B,WAAAuJ,MACKtL,SAqBLmO,EAAAxM,UAAAsN,eACA,SAAAtH,EAAAvB,GAeA,IAdA,GAYAjE,GAAAsO,EAAAC,EAAAC,EAAAxI,EAZA5F,EAAA,EACA6C,EAAA,EACAG,EAAA,EACAD,EAAA,EACAG,EAAA,EACAD,EAAA,EACAvB,EAAA0D,EAAA1D,OACA+G,EAAA,EACA4F,KACAC,KACAC,KACAV,KAGAnM,EAAA+G,GACA,SAAArD,EAAAO,OAAA8C,GACAzI,IACAyI,IACA5F,EAAA,MAEA,UAAAuC,EAAAO,OAAA8C,GACAA,QAEA,CASA,IARA7I,EAAA,GAAAiM,GACAjM,EAAAI,gBAOAoO,EAAA3F,EAAyB/G,EAAA0M,IACzB3Q,KAAAmP,wBAAAxH,EAAAgJ,GADuCA,KAQvC,GAHAF,EAAA9I,EAAAuD,MAAAF,EAAA2F,GAEAD,EAAAE,EAAAH,GAEAzF,GAAAyF,EAAAxM,WACS,CAET,IADAyM,KACAC,EAAA3F,GACAtJ,EAAAgG,OAAAC,EAAAqD,EAAA6F,GACA1I,EAAA0I,EAAA1I,MACA6C,EAAA6F,EAAAzI,KACAsI,EAAA1D,KAAA7E,EAGA,QAAAuI,EAAAzM,OACA,SAAAG,OAAA,yCAGA,QAAAsM,EAAAzM,OACA,SAAAG,OAAA,yCAGAwM,GAAAH,GAAAC,EAIAvO,EAAAM,gBAAA2C,EAAAsL,EAAA,GACAtL,EAAAjD,EAAAM,gBAEAiO,EAAAzM,OAAA,IAEA9B,EAAAO,OAAA+C,EAAAiL,EAAA,GACAjL,GAAAiL,EAAA,GAGAvO,EAAAU,aAAA0C,EAAAmL,EAAA,GACAnL,EAAApD,EAAAU,aAEAV,EAAAU,cAAA,EAGAV,EAAAW,eAAAwC,EAAAoL,EAAA,GACApL,EAAAnD,EAAAW,eAEA4N,EAAAzM,OAAA,IAEA9B,EAAAY,KAAAyC,EAAAkL,EAAA,GACAlL,GAAAkL,EAAA,KAIAN,EAAApD,KAAA7K,GACA,gBAAAA,GAAAU,cACAiO,EAAA9D,KAAA7K,GAKA0M,EAAAuB,EAAApP,EAAA8K,qCACA9L,KAAA8O,oBAAAsB,EAEAvB,EAAAiC,EAAA9P,EAAAyK,4BACAzL,KAAAkP,mBAAA4B,GAOA3C,EAAAxM,UAAAqO,aACA,SAAAe,EAAAC,EAAAC,EACAC,EAAAC,EAAAC,GAMA,GAAAL,EAAAE,IAAA,EACA,SAAAzI,WAAA,gDACAuI,EAAAE,GAEA,IAAAF,EAAAG,GAAA,EACA,SAAA1I,WAAA,kDACAuI,EAAAG,GAGA,OAAAtC,GAAAyC,OAAAN,EAAAC,EAAAG,EAAAC,IAOAjD,EAAAxM,UAAA2P,mBACA,WACA,OAAAtG,GAAA,EAAuBA,EAAAhL,KAAA4P,mBAAA3L,SAAwC+G,EAAA,CAC/D,GAAA7I,GAAAnC,KAAA4P,mBAAA5E,EAMA,IAAAA,EAAA,EAAAhL,KAAA4P,mBAAA3L,OAAA,CACA,GAAAsN,GAAAvR,KAAA4P,mBAAA5E,EAAA,EAEA,IAAA7I,EAAAI,gBAAAgP,EAAAhP,cAAA,CACAJ,EAAAqP,oBAAAD,EAAA9O,gBAAA,CACA,WAKAN,EAAAqP,oBAAAC,MAwBAtD,EAAAxM,UAAA6C,oBACA,SAAA1D,GACA,GAAAiP,IACAxN,cAAAvB,EAAAC,OAAAH,EAAA,QACA2B,gBAAAzB,EAAAC,OAAAH,EAAA,WAGAkK,EAAAhL,KAAAgQ,aACAD,EACA/P,KAAA4P,mBACA,gBACA,kBACA5O,EAAA8K,oCACA9K,EAAAC,OAAAH,EAAA,OAAAF,EAAA0O,sBAGA,IAAAtE,GAAA,GACA,GAAA7I,GAAAnC,KAAA4P,mBAAA5E,EAEA,IAAA7I,EAAAI,gBAAAwN,EAAAxN,cAAA,CACA,GAAAG,GAAA1B,EAAAC,OAAAkB,EAAA,cACA,QAAAO,IACAA,EAAA1C,KAAAoB,SAAA6L,GAAAvK,GACA,MAAA1C,KAAA+B,aACAW,EAAA1B,EAAAyD,KAAAzE,KAAA+B,WAAAW,IAGA,IAAAK,GAAA/B,EAAAC,OAAAkB,EAAA,YAIA,OAHA,QAAAY,IACAA,EAAA/C,KAAAsB,OAAA2L,GAAAlK,KAGAL,SACAJ,KAAAtB,EAAAC,OAAAkB,EAAA,qBACAK,OAAAxB,EAAAC,OAAAkB,EAAA,uBACAY,SAKA,OACAL,OAAA,KACAJ,KAAA,KACAE,OAAA,KACAO,KAAA,OAQAoL,EAAAxM,UAAA+P,wBACA,WACA,MAAA1R,MAAA2G,eAGA3G,KAAA2G,eAAA1C,QAAAjE,KAAAoB,SAAAuL,SACA3M,KAAA2G,eAAAgL,KAAA,SAAAC,GAA+C,aAAAA,KAH/C,GAWAzD,EAAAxM,UAAA0B,iBACA,SAAAuB,EAAAiN,GACA,IAAA7R,KAAA2G,eACA,WAOA,IAJA,MAAA3G,KAAA+B,aACA6C,EAAA5D,EAAA2B,SAAA3C,KAAA+B,WAAA6C,IAGA5E,KAAAoB,SAAAqC,IAAAmB,GACA,MAAA5E,MAAA2G,eAAA3G,KAAAoB,SAAA6E,QAAArB,GAGA,IAAAqF,EACA,UAAAjK,KAAA+B,aACAkI,EAAAjJ,EAAAsI,SAAAtJ,KAAA+B,aAAA,CAKA,GAAA+P,GAAAlN,EAAAkG,QAAA,gBACA,YAAAb,EAAAP,QACA1J,KAAAoB,SAAAqC,IAAAqO,GACA,MAAA9R,MAAA2G,eAAA3G,KAAAoB,SAAA6E,QAAA6L,GAGA,MAAA7H,EAAAH,MAAA,KAAAG,EAAAH,OACA9J,KAAAoB,SAAAqC,IAAA,IAAAmB,GACA,MAAA5E,MAAA2G,eAAA3G,KAAAoB,SAAA6E,QAAA,IAAArB,IAQA,GAAAiN,EACA,WAGA,UAAAzN,OAAA,IAAAQ,EAAA,+BAuBAuJ,EAAAxM,UAAAoQ,qBACA,SAAAjR,GACA,GAAA4B,GAAA1B,EAAAC,OAAAH,EAAA,SAIA,IAHA,MAAAd,KAAA+B,aACAW,EAAA1B,EAAA2B,SAAA3C,KAAA+B,WAAAW,KAEA1C,KAAAoB,SAAAqC,IAAAf,GACA,OACAJ,KAAA,KACAE,OAAA,KACA0N,WAAA,KAGAxN,GAAA1C,KAAAoB,SAAA6E,QAAAvD,EAEA,IAAAqN,IACArN,SACAG,aAAA7B,EAAAC,OAAAH,EAAA,QACAgC,eAAA9B,EAAAC,OAAAH,EAAA,WAGAkK,EAAAhL,KAAAgQ,aACAD,EACA/P,KAAA6P,kBACA,eACA,iBACA7O,EAAAyK,2BACAzK,EAAAC,OAAAH,EAAA,OAAAF,EAAA0O,sBAGA,IAAAtE,GAAA,GACA,GAAA7I,GAAAnC,KAAA6P,kBAAA7E,EAEA,IAAA7I,EAAAO,SAAAqN,EAAArN,OACA,OACAJ,KAAAtB,EAAAC,OAAAkB,EAAA,sBACAK,OAAAxB,EAAAC,OAAAkB,EAAA,wBACA+N,WAAAlP,EAAAC,OAAAkB,EAAA,6BAKA,OACAG,KAAA,KACAE,OAAA,KACA0N,WAAA,OAIAtQ,EAAAuO,yBA+FAD,EAAAvM,UAAAkC,OAAAC,OAAAlD,EAAAe,WACAuM,EAAAvM,UAAAqQ,YAAApR,EAKAsN,EAAAvM,UAAAC,SAAA,EAKAiC,OAAAkL,eAAAb,EAAAvM,UAAA,WACAqN,IAAA,WAEA,OADA/L,MACA4C,EAAA,EAAmBA,EAAA7F,KAAAsO,UAAArK,OAA2B4B,IAC9C,OAAAoM,GAAA,EAAqBA,EAAAjS,KAAAsO,UAAAzI,GAAA8I,SAAA1L,QAAAgB,OAA+CgO,IACpEhP,EAAA+J,KAAAhN,KAAAsO,UAAAzI,GAAA8I,SAAA1L,QAAAgP,GAGA,OAAAhP,MAmBAiL,EAAAvM,UAAA6C,oBACA,SAAA1D,GACA,GAAAiP,IACAxN,cAAAvB,EAAAC,OAAAH,EAAA,QACA2B,gBAAAzB,EAAAC,OAAAH,EAAA,WAKAoR,EAAAtD,EAAAyC,OAAAtB,EAAA/P,KAAAsO,UACA,SAAAyB,EAAAoC,GACA,GAAAtG,GAAAkE,EAAAxN,cAAA4P,EAAAzD,gBAAAnM,aACA,OAAAsJ,GACAA,EAGAkE,EAAAtN,gBACA0P,EAAAzD,gBAAAjM,kBAEA0P,EAAAnS,KAAAsO,UAAA4D,EAEA,OAAAC,GASAA,EAAAxD,SAAAnK,qBACAlC,KAAAyN,EAAAxN,eACA4P,EAAAzD,gBAAAnM,cAAA,GACAC,OAAAuN,EAAAtN,iBACA0P,EAAAzD,gBAAAnM,gBAAAwN,EAAAxN,cACA4P,EAAAzD,gBAAAjM,gBAAA,EACA,GACA2P,KAAAtR,EAAAsR,QAdA1P,OAAA,KACAJ,KAAA,KACAE,OAAA,KACAO,KAAA,OAmBAmL,EAAAvM,UAAA+P,wBACA,WACA,MAAA1R,MAAAsO,UAAA+D,MAAA,SAAA/G,GACA,MAAAA,GAAAqD,SAAA+C,6BASAxD,EAAAvM,UAAA0B,iBACA,SAAAuB,EAAAiN,GACA,OAAAhM,GAAA,EAAmBA,EAAA7F,KAAAsO,UAAArK,OAA2B4B,IAAA,CAC9C,GAAAsM,GAAAnS,KAAAsO,UAAAzI,GAEAzC,EAAA+O,EAAAxD,SAAAtL,iBAAAuB,GAAA,EACA,IAAAxB,EACA,MAAAA,GAGA,GAAAyO,EACA,WAGA,UAAAzN,OAAA,IAAAQ,EAAA,+BAkBAsJ,EAAAvM,UAAAoQ,qBACA,SAAAjR,GACA,OAAA+E,GAAA,EAAmBA,EAAA7F,KAAAsO,UAAArK,OAA2B4B,IAAA,CAC9C,GAAAsM,GAAAnS,KAAAsO,UAAAzI,EAIA,SAAAsM,EAAAxD,SAAA1L,QAAAgD,QAAAjF,EAAAC,OAAAH,EAAA,YAGA,GAAAwR,GAAAH,EAAAxD,SAAAoD,qBAAAjR,EACA,IAAAwR,EAAA,CACA,GAAAC,IACAjQ,KAAAgQ,EAAAhQ,MACA6P,EAAAzD,gBAAAnM,cAAA,GACAC,OAAA8P,EAAA9P,QACA2P,EAAAzD,gBAAAnM,gBAAA+P,EAAAhQ,KACA6P,EAAAzD,gBAAAjM,gBAAA,EACA,GAEA,OAAA8P,KAIA,OACAjQ,KAAA,KACAE,OAAA,OASA0L,EAAAvM,UAAAsN,eACA,SAAAtH,EAAAvB,GACApG,KAAA8O,uBACA9O,KAAAkP,qBACA,QAAArJ,GAAA,EAAmBA,EAAA7F,KAAAsO,UAAArK,OAA2B4B,IAG9C,OAFAsM,GAAAnS,KAAAsO,UAAAzI,GACA2M,EAAAL,EAAAxD,SAAAiB,mBACAqC,EAAA,EAAqBA,EAAAO,EAAAvO,OAA4BgO,IAAA,CACjD,GAAA9P,GAAAqQ,EAAAP,GAEAvP,EAAAyP,EAAAxD,SAAAvN,SAAA6L,GAAA9K,EAAAO,OACA,QAAAyP,EAAAxD,SAAA5M,aACAW,EAAA1B,EAAAyD,KAAA0N,EAAAxD,SAAA5M,WAAAW,IAEA1C,KAAAoB,SAAAsC,IAAAhB,GACAA,EAAA1C,KAAAoB,SAAA6E,QAAAvD,EAEA,IAAAK,GAAAoP,EAAAxD,SAAArN,OAAA2L,GAAA9K,EAAAY,KACA/C,MAAAsB,OAAAoC,IAAAX,GACAA,EAAA/C,KAAAsB,OAAA2E,QAAAlD,EAMA,IAAA0P,IACA/P,SACAH,cAAAJ,EAAAI,eACA4P,EAAAzD,gBAAAnM,cAAA,GACAE,gBAAAN,EAAAM,iBACA0P,EAAAzD,gBAAAnM,gBAAAJ,EAAAI,cACA4P,EAAAzD,gBAAAjM,gBAAA,EACA,GACAI,aAAAV,EAAAU,aACAC,eAAAX,EAAAW,eACAC,OAGA/C,MAAA8O,oBAAA9B,KAAAyF,GACA,gBAAAA,GAAA5P,cACA7C,KAAAkP,mBAAAlC,KAAAyF,GAKA5D,EAAA7O,KAAA8O,oBAAA9N,EAAA8K,qCACA+C,EAAA7O,KAAAkP,mBAAAlO,EAAAyK,6BAGA7L,EAAAsO,4BT0yCM,SAASrO,EAAQD,GU50EvB,QAAA8S,GAAAC,EAAAC,EAAA7B,EAAA8B,EAAAC,EAAA1B,GAUA,GAAA2B,GAAAC,KAAAC,OAAAL,EAAAD,GAAA,GAAAA,EACA9G,EAAAiH,EAAA/B,EAAA8B,EAAAE,IAAA,EACA,YAAAlH,EAEAkH,EAEAlH,EAAA,EAEA+G,EAAAG,EAAA,EAEAL,EAAAK,EAAAH,EAAA7B,EAAA8B,EAAAC,EAAA1B,GAKAA,GAAAxR,EAAA2P,kBACAqD,EAAAC,EAAA5O,OAAA2O,EAAA,GAEAG,EAKAA,EAAAJ,EAAA,EAEAD,EAAAC,EAAAI,EAAAhC,EAAA8B,EAAAC,EAAA1B,GAIAA,GAAAxR,EAAA2P,kBACAwD,EAEA,EAAAJ,EAAA,GAAAA,EA1DA/S,EAAA0P,qBAAA,EACA1P,EAAA2P,kBAAA,EAgFA3P,EAAAyR,OAAA,SAAAN,EAAA8B,EAAAC,EAAA1B,GACA,OAAAyB,EAAA5O,OACA,QAGA,IAAA+G,GAAA0H,EAAA,GAAAG,EAAA5O,OAAA8M,EAAA8B,EACAC,EAAA1B,GAAAxR,EAAA0P,qBACA,MAAAtE,EACA,QAMA,MAAAA,EAAA,MACA,IAAA8H,EAAAD,EAAA7H,GAAA6H,EAAA7H,EAAA,UAGAA,CAGA,OAAAA,KV22EM,SAASnL,EAAQD,GW77EvB,QAAAsT,GAAAC,EAAAC,EAAAC,GACA,GAAAxC,GAAAsC,EAAAC,EACAD,GAAAC,GAAAD,EAAAE,GACAF,EAAAE,GAAAxC,EAWA,QAAAyC,GAAAC,EAAAC,GACA,MAAAR,MAAAS,MAAAF,EAAAP,KAAAU,UAAAF,EAAAD,IAeA,QAAAI,GAAAR,EAAAS,EAAAlT,EAAAmT,GAKA,GAAAA,EAAAnT,EAAA,CAYA,GAAAoT,GAAAR,EAAA5S,EAAAmT,GACAhO,EAAAnF,EAAA,CAEAwS,GAAAC,EAAAW,EAAAD,EASA,QARAE,GAAAZ,EAAAU,GAQA5B,EAAAvR,EAAmBmT,EAAA5B,EAAOA,IAC1B2B,EAAAT,EAAAlB,GAAA8B,IAAA,IACAlO,GAAA,EACAqN,EAAAC,EAAAtN,EAAAoM,GAIAiB,GAAAC,EAAAtN,EAAA,EAAAoM,EACA,IAAA+B,GAAAnO,EAAA,CAIA8N,GAAAR,EAAAS,EAAAlT,EAAAsT,EAAA,GACAL,EAAAR,EAAAS,EAAAI,EAAA,EAAAH,IAYAjU,EAAAiP,UAAA,SAAAsE,EAAAS,GACAD,EAAAR,EAAAS,EAAA,EAAAT,EAAAlP,OAAA,KXg+EM,SAASpE,EAAQD,EAASM,GY9iFhC,QAAAW,GAAAoT,EAAAC,EAAAtP,EAAAuP,EAAAtP,GACA7E,KAAAoU,YACApU,KAAAqU,kBACArU,KAAAsC,KAAA,MAAA2R,EAAA,KAAAA,EACAjU,KAAAwC,OAAA,MAAA0R,EAAA,KAAAA,EACAlU,KAAA0C,OAAA,MAAAkC,EAAA,KAAAA,EACA5E,KAAA+C,KAAA,MAAA8B,EAAA,KAAAA,EACA7E,KAAAsU,IAAA,EACA,MAAAH,GAAAnU,KAAA0D,IAAAyQ,GAnCA,GAAAxT,GAAAT,EAAA,GAAAS,mBACAK,EAAAd,EAAA,GAIAqU,EAAA,UAGAC,EAAA,GAKAF,EAAA,oBAiCAzT,GAAA4T,wBACA,SAAAC,EAAA5S,EAAA6S,GAyFA,QAAAC,GAAAzS,EAAA0S,GACA,UAAA1S,GAAA8N,SAAA9N,EAAAO,OACAoS,EAAApR,IAAAmR,OACO,CACP,GAAAnS,GAAAiS,EACA3T,EAAAyD,KAAAkQ,EAAAxS,EAAAO,QACAP,EAAAO,MACAoS,GAAApR,IAAA,GAAA7C,GAAAsB,EAAAU,aACAV,EAAAW,eACAJ,EACAmS,EACA1S,EAAAY,QAjGA,GAAA+R,GAAA,GAAAjU,GAMAkU,EAAAL,EAAApM,MAAAiM,GACAS,EAAA,WACA,GAAAC,GAAAF,EAAA/M,QAEAkN,EAAAH,EAAA/M,SAAA,EACA,OAAAiN,GAAAC,GAIAC,EAAA,EAAA3D,EAAA,EAKA4D,EAAA,IAgEA,OA9DAtT,GAAAI,YAAA,SAAAC,GACA,UAAAiT,EAAA,CAGA,KAAAD,EAAAhT,EAAAI,eAMS,CAIT,GAAA8S,GAAAN,EAAA,GACAF,EAAAQ,EAAAjK,OAAA,EAAAjJ,EAAAM,gBACA+O,EAOA,OANAuD,GAAA,GAAAM,EAAAjK,OAAAjJ,EAAAM,gBACA+O,GACAA,EAAArP,EAAAM,gBACAmS,EAAAQ,EAAAP,QAEAO,EAAAjT,GAhBAyS,EAAAQ,EAAAJ,KACAG,IACA3D,EAAA,EAqBA,KAAA2D,EAAAhT,EAAAI,eACAuS,EAAApR,IAAAsR,KACAG,GAEA,IAAA3D,EAAArP,EAAAM,gBAAA,CACA,GAAA4S,GAAAN,EAAA,EACAD,GAAApR,IAAA2R,EAAAjK,OAAA,EAAAjJ,EAAAM,kBACAsS,EAAA,GAAAM,EAAAjK,OAAAjJ,EAAAM,iBACA+O,EAAArP,EAAAM,gBAEA2S,EAAAjT,GACKnC,MAEL+U,EAAA9Q,OAAA,IACAmR,GAEAR,EAAAQ,EAAAJ,KAGAF,EAAApR,IAAAqR,EAAAtQ,KAAA,MAIA3C,EAAAmB,QAAAC,QAAA,SAAAC,GACA,GAAAC,GAAAtB,EAAAuB,iBAAAF,EACA,OAAAC,IACA,MAAAuR,IACAxR,EAAAnC,EAAAyD,KAAAkQ,EAAAxR,IAEA2R,EAAAxR,iBAAAH,EAAAC,MAIA0R,GAwBAjU,EAAAc,UAAA+B,IAAA,SAAA4R,GACA,GAAAnK,MAAAoK,QAAAD,GACAA,EAAApS,QAAA,SAAAsS,GACAxV,KAAA0D,IAAA8R,IACKxV,UAEL,KAAAsV,EAAAhB,IAAA,gBAAAgB,GAMA,SAAA9M,WACA,8EAAA8M,EANAA,IACAtV,KAAAoU,SAAApH,KAAAsI,GAQA,MAAAtV,OASAa,EAAAc,UAAA8T,QAAA,SAAAH,GACA,GAAAnK,MAAAoK,QAAAD,GACA,OAAAzP,GAAAyP,EAAArR,OAAA,EAAiC4B,GAAA,EAAQA,IACzC7F,KAAAyV,QAAAH,EAAAzP,QAGA,KAAAyP,EAAAhB,IAAA,gBAAAgB,GAIA,SAAA9M,WACA,8EAAA8M,EAJAtV,MAAAoU,SAAAsB,QAAAJ,GAOA,MAAAtV,OAUAa,EAAAc,UAAAgU,KAAA,SAAAC,GAEA,OADAJ,GACA3P,EAAA,EAAAC,EAAA9F,KAAAoU,SAAAnQ,OAA6C6B,EAAAD,EAASA,IACtD2P,EAAAxV,KAAAoU,SAAAvO,GACA2P,EAAAlB,GACAkB,EAAAG,KAAAC,GAGA,KAAAJ,GACAI,EAAAJ,GAAoB9S,OAAA1C,KAAA0C,OACpBJ,KAAAtC,KAAAsC,KACAE,OAAAxC,KAAAwC,OACAO,KAAA/C,KAAA+C,QAYAlC,EAAAc,UAAA8C,KAAA,SAAAoR,GACA,GAAAC,GACAjQ,EACAC,EAAA9F,KAAAoU,SAAAnQ,MACA,IAAA6B,EAAA,GAEA,IADAgQ,KACAjQ,EAAA,EAAeC,EAAA,EAAAD,EAAWA,IAC1BiQ,EAAA9I,KAAAhN,KAAAoU,SAAAvO,IACAiQ,EAAA9I,KAAA6I,EAEAC,GAAA9I,KAAAhN,KAAAoU,SAAAvO,IACA7F,KAAAoU,SAAA0B,EAEA,MAAA9V,OAUAa,EAAAc,UAAAoU,aAAA,SAAAC,EAAAC,GACA,GAAAC,GAAAlW,KAAAoU,SAAApU,KAAAoU,SAAAnQ,OAAA,EAUA,OATAiS,GAAA5B,GACA4B,EAAAH,aAAAC,EAAAC,GAEA,gBAAAC,GACAlW,KAAAoU,SAAApU,KAAAoU,SAAAnQ,OAAA,GAAAiS,EAAApL,QAAAkL,EAAAC,GAGAjW,KAAAoU,SAAApH,KAAA,GAAAlC,QAAAkL,EAAAC,IAEAjW,MAUAa,EAAAc,UAAA2B,iBACA,SAAAK,EAAAC,GACA5D,KAAAqU,eAAArT,EAAA+C,YAAAJ,IAAAC,GASA/C,EAAAc,UAAAwU,mBACA,SAAAP,GACA,OAAA/P,GAAA,EAAAC,EAAA9F,KAAAoU,SAAAnQ,OAA+C6B,EAAAD,EAASA,IACxD7F,KAAAoU,SAAAvO,GAAAyO,IACAtU,KAAAoU,SAAAvO,GAAAsQ,mBAAAP,EAKA,QADA3S,GAAAY,OAAAG,KAAAhE,KAAAqU,gBACAxO,EAAA,EAAAC,EAAA7C,EAAAgB,OAAyC6B,EAAAD,EAASA,IAClD+P,EAAA5U,EAAAwK,cAAAvI,EAAA4C,IAAA7F,KAAAqU,eAAApR,EAAA4C,MAQAhF,EAAAc,UAAAiF,SAAA,WACA,GAAA6J,GAAA,EAIA,OAHAzQ,MAAA2V,KAAA,SAAAH,GACA/E,GAAA+E,IAEA/E,GAOA5P,EAAAc,UAAAyU,sBAAA,SAAAtV,GACA,GAAAuB,IACAwS,KAAA,GACAvS,KAAA,EACAE,OAAA,GAEA6D,EAAA,GAAA1F,GAAAG,GACAuV,GAAA,EACAC,EAAA,KACAC,EAAA,KACAC,EAAA,KACAC,EAAA,IAqEA,OApEAzW,MAAA2V,KAAA,SAAAH,EAAA5S,GACAP,EAAAwS,MAAAW,EACA,OAAA5S,EAAAF,QACA,OAAAE,EAAAN,MACA,OAAAM,EAAAJ,SACA8T,IAAA1T,EAAAF,QACA6T,IAAA3T,EAAAN,MACAkU,IAAA5T,EAAAJ,QACAiU,IAAA7T,EAAAG,OACAsD,EAAArD,YACAN,OAAAE,EAAAF,OACAE,UACAN,KAAAM,EAAAN,KACAE,OAAAI,EAAAJ,QAEAH,WACAC,KAAAD,EAAAC,KACAE,OAAAH,EAAAG,QAEAO,KAAAH,EAAAG,OAGAuT,EAAA1T,EAAAF,OACA6T,EAAA3T,EAAAN,KACAkU,EAAA5T,EAAAJ,OACAiU,EAAA7T,EAAAG,KACAsT,GAAA,GACKA,IACLhQ,EAAArD,YACAX,WACAC,KAAAD,EAAAC,KACAE,OAAAH,EAAAG,UAGA8T,EAAA,KACAD,GAAA,EAEA,QAAAtJ,GAAA,EAAA9I,EAAAuR,EAAAvR,OAA4CA,EAAA8I,EAAcA,IAC1DyI,EAAAvN,WAAA8E,KAAAyH,GACAnS,EAAAC,OACAD,EAAAG,OAAA,EAEAuK,EAAA,IAAA9I,GACAqS,EAAA,KACAD,GAAA,GACSA,GACThQ,EAAArD,YACAN,OAAAE,EAAAF,OACAE,UACAN,KAAAM,EAAAN,KACAE,OAAAI,EAAAJ,QAEAH,WACAC,KAAAD,EAAAC,KACAE,OAAAH,EAAAG,QAEAO,KAAAH,EAAAG,QAIAV,EAAAG,WAIAxC,KAAAmW,mBAAA,SAAAhT,EAAAuT,GACArQ,EAAA/C,iBAAAH,EAAAuT,MAGU7B,KAAAxS,EAAAwS,KAAAxO,QAGVzG,EAAAiB","file":"source-map.min.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"sourceMap\"] = factory();\n\telse\n\t\troot[\"sourceMap\"] = factory();\n})(this, function() {\nreturn \n\n\n/** WEBPACK FOOTER **\n ** webpack/universalModuleDefinition\n **/","(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"sourceMap\"] = factory();\n\telse\n\t\troot[\"sourceMap\"] = factory();\n})(this, function() {\nreturn /******/ (function(modules) { // webpackBootstrap\n/******/ \t// The module cache\n/******/ \tvar installedModules = {};\n/******/\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(installedModules[moduleId])\n/******/ \t\t\treturn installedModules[moduleId].exports;\n/******/\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = installedModules[moduleId] = {\n/******/ \t\t\texports: {},\n/******/ \t\t\tid: moduleId,\n/******/ \t\t\tloaded: false\n/******/ \t\t};\n/******/\n/******/ \t\t// Execute the module function\n/******/ \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n/******/\n/******/ \t\t// Flag the module as loaded\n/******/ \t\tmodule.loaded = true;\n/******/\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/\n/******/\n/******/ \t// expose the modules object (__webpack_modules__)\n/******/ \t__webpack_require__.m = modules;\n/******/\n/******/ \t// expose the module cache\n/******/ \t__webpack_require__.c = installedModules;\n/******/\n/******/ \t// __webpack_public_path__\n/******/ \t__webpack_require__.p = \"\";\n/******/\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(0);\n/******/ })\n/************************************************************************/\n/******/ ([\n/* 0 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/*\n\t * Copyright 2009-2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE.txt or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\texports.SourceMapGenerator = __webpack_require__(1).SourceMapGenerator;\n\texports.SourceMapConsumer = __webpack_require__(7).SourceMapConsumer;\n\texports.SourceNode = __webpack_require__(10).SourceNode;\n\n\n/***/ },\n/* 1 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar base64VLQ = __webpack_require__(2);\n\tvar util = __webpack_require__(4);\n\tvar ArraySet = __webpack_require__(5).ArraySet;\n\tvar MappingList = __webpack_require__(6).MappingList;\n\t\n\t/**\n\t * An instance of the SourceMapGenerator represents a source map which is\n\t * being built incrementally. You may pass an object with the following\n\t * properties:\n\t *\n\t * - file: The filename of the generated source.\n\t * - sourceRoot: A root for all relative URLs in this source map.\n\t */\n\tfunction SourceMapGenerator(aArgs) {\n\t if (!aArgs) {\n\t aArgs = {};\n\t }\n\t this._file = util.getArg(aArgs, 'file', null);\n\t this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n\t this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n\t this._sources = new ArraySet();\n\t this._names = new ArraySet();\n\t this._mappings = new MappingList();\n\t this._sourcesContents = null;\n\t}\n\t\n\tSourceMapGenerator.prototype._version = 3;\n\t\n\t/**\n\t * Creates a new SourceMapGenerator based on a SourceMapConsumer\n\t *\n\t * @param aSourceMapConsumer The SourceMap.\n\t */\n\tSourceMapGenerator.fromSourceMap =\n\t function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n\t var sourceRoot = aSourceMapConsumer.sourceRoot;\n\t var generator = new SourceMapGenerator({\n\t file: aSourceMapConsumer.file,\n\t sourceRoot: sourceRoot\n\t });\n\t aSourceMapConsumer.eachMapping(function (mapping) {\n\t var newMapping = {\n\t generated: {\n\t line: mapping.generatedLine,\n\t column: mapping.generatedColumn\n\t }\n\t };\n\t\n\t if (mapping.source != null) {\n\t newMapping.source = mapping.source;\n\t if (sourceRoot != null) {\n\t newMapping.source = util.relative(sourceRoot, newMapping.source);\n\t }\n\t\n\t newMapping.original = {\n\t line: mapping.originalLine,\n\t column: mapping.originalColumn\n\t };\n\t\n\t if (mapping.name != null) {\n\t newMapping.name = mapping.name;\n\t }\n\t }\n\t\n\t generator.addMapping(newMapping);\n\t });\n\t aSourceMapConsumer.sources.forEach(function (sourceFile) {\n\t var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n\t if (content != null) {\n\t generator.setSourceContent(sourceFile, content);\n\t }\n\t });\n\t return generator;\n\t };\n\t\n\t/**\n\t * Add a single mapping from original source line and column to the generated\n\t * source's line and column for this source map being created. The mapping\n\t * object should have the following properties:\n\t *\n\t * - generated: An object with the generated line and column positions.\n\t * - original: An object with the original line and column positions.\n\t * - source: The original source file (relative to the sourceRoot).\n\t * - name: An optional original token name for this mapping.\n\t */\n\tSourceMapGenerator.prototype.addMapping =\n\t function SourceMapGenerator_addMapping(aArgs) {\n\t var generated = util.getArg(aArgs, 'generated');\n\t var original = util.getArg(aArgs, 'original', null);\n\t var source = util.getArg(aArgs, 'source', null);\n\t var name = util.getArg(aArgs, 'name', null);\n\t\n\t if (!this._skipValidation) {\n\t this._validateMapping(generated, original, source, name);\n\t }\n\t\n\t if (source != null) {\n\t source = String(source);\n\t if (!this._sources.has(source)) {\n\t this._sources.add(source);\n\t }\n\t }\n\t\n\t if (name != null) {\n\t name = String(name);\n\t if (!this._names.has(name)) {\n\t this._names.add(name);\n\t }\n\t }\n\t\n\t this._mappings.add({\n\t generatedLine: generated.line,\n\t generatedColumn: generated.column,\n\t originalLine: original != null && original.line,\n\t originalColumn: original != null && original.column,\n\t source: source,\n\t name: name\n\t });\n\t };\n\t\n\t/**\n\t * Set the source content for a source file.\n\t */\n\tSourceMapGenerator.prototype.setSourceContent =\n\t function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n\t var source = aSourceFile;\n\t if (this._sourceRoot != null) {\n\t source = util.relative(this._sourceRoot, source);\n\t }\n\t\n\t if (aSourceContent != null) {\n\t // Add the source content to the _sourcesContents map.\n\t // Create a new _sourcesContents map if the property is null.\n\t if (!this._sourcesContents) {\n\t this._sourcesContents = Object.create(null);\n\t }\n\t this._sourcesContents[util.toSetString(source)] = aSourceContent;\n\t } else if (this._sourcesContents) {\n\t // Remove the source file from the _sourcesContents map.\n\t // If the _sourcesContents map is empty, set the property to null.\n\t delete this._sourcesContents[util.toSetString(source)];\n\t if (Object.keys(this._sourcesContents).length === 0) {\n\t this._sourcesContents = null;\n\t }\n\t }\n\t };\n\t\n\t/**\n\t * Applies the mappings of a sub-source-map for a specific source file to the\n\t * source map being generated. Each mapping to the supplied source file is\n\t * rewritten using the supplied source map. Note: The resolution for the\n\t * resulting mappings is the minimium of this map and the supplied map.\n\t *\n\t * @param aSourceMapConsumer The source map to be applied.\n\t * @param aSourceFile Optional. The filename of the source file.\n\t * If omitted, SourceMapConsumer's file property will be used.\n\t * @param aSourceMapPath Optional. The dirname of the path to the source map\n\t * to be applied. If relative, it is relative to the SourceMapConsumer.\n\t * This parameter is needed when the two source maps aren't in the same\n\t * directory, and the source map to be applied contains relative source\n\t * paths. If so, those relative source paths need to be rewritten\n\t * relative to the SourceMapGenerator.\n\t */\n\tSourceMapGenerator.prototype.applySourceMap =\n\t function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n\t var sourceFile = aSourceFile;\n\t // If aSourceFile is omitted, we will use the file property of the SourceMap\n\t if (aSourceFile == null) {\n\t if (aSourceMapConsumer.file == null) {\n\t throw new Error(\n\t 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n\t 'or the source map\\'s \"file\" property. Both were omitted.'\n\t );\n\t }\n\t sourceFile = aSourceMapConsumer.file;\n\t }\n\t var sourceRoot = this._sourceRoot;\n\t // Make \"sourceFile\" relative if an absolute Url is passed.\n\t if (sourceRoot != null) {\n\t sourceFile = util.relative(sourceRoot, sourceFile);\n\t }\n\t // Applying the SourceMap can add and remove items from the sources and\n\t // the names array.\n\t var newSources = new ArraySet();\n\t var newNames = new ArraySet();\n\t\n\t // Find mappings for the \"sourceFile\"\n\t this._mappings.unsortedForEach(function (mapping) {\n\t if (mapping.source === sourceFile && mapping.originalLine != null) {\n\t // Check if it can be mapped by the source map, then update the mapping.\n\t var original = aSourceMapConsumer.originalPositionFor({\n\t line: mapping.originalLine,\n\t column: mapping.originalColumn\n\t });\n\t if (original.source != null) {\n\t // Copy mapping\n\t mapping.source = original.source;\n\t if (aSourceMapPath != null) {\n\t mapping.source = util.join(aSourceMapPath, mapping.source)\n\t }\n\t if (sourceRoot != null) {\n\t mapping.source = util.relative(sourceRoot, mapping.source);\n\t }\n\t mapping.originalLine = original.line;\n\t mapping.originalColumn = original.column;\n\t if (original.name != null) {\n\t mapping.name = original.name;\n\t }\n\t }\n\t }\n\t\n\t var source = mapping.source;\n\t if (source != null && !newSources.has(source)) {\n\t newSources.add(source);\n\t }\n\t\n\t var name = mapping.name;\n\t if (name != null && !newNames.has(name)) {\n\t newNames.add(name);\n\t }\n\t\n\t }, this);\n\t this._sources = newSources;\n\t this._names = newNames;\n\t\n\t // Copy sourcesContents of applied map.\n\t aSourceMapConsumer.sources.forEach(function (sourceFile) {\n\t var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n\t if (content != null) {\n\t if (aSourceMapPath != null) {\n\t sourceFile = util.join(aSourceMapPath, sourceFile);\n\t }\n\t if (sourceRoot != null) {\n\t sourceFile = util.relative(sourceRoot, sourceFile);\n\t }\n\t this.setSourceContent(sourceFile, content);\n\t }\n\t }, this);\n\t };\n\t\n\t/**\n\t * A mapping can have one of the three levels of data:\n\t *\n\t * 1. Just the generated position.\n\t * 2. The Generated position, original position, and original source.\n\t * 3. Generated and original position, original source, as well as a name\n\t * token.\n\t *\n\t * To maintain consistency, we validate that any new mapping being added falls\n\t * in to one of these categories.\n\t */\n\tSourceMapGenerator.prototype._validateMapping =\n\t function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n\t aName) {\n\t if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n\t && aGenerated.line > 0 && aGenerated.column >= 0\n\t && !aOriginal && !aSource && !aName) {\n\t // Case 1.\n\t return;\n\t }\n\t else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n\t && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n\t && aGenerated.line > 0 && aGenerated.column >= 0\n\t && aOriginal.line > 0 && aOriginal.column >= 0\n\t && aSource) {\n\t // Cases 2 and 3.\n\t return;\n\t }\n\t else {\n\t throw new Error('Invalid mapping: ' + JSON.stringify({\n\t generated: aGenerated,\n\t source: aSource,\n\t original: aOriginal,\n\t name: aName\n\t }));\n\t }\n\t };\n\t\n\t/**\n\t * Serialize the accumulated mappings in to the stream of base 64 VLQs\n\t * specified by the source map format.\n\t */\n\tSourceMapGenerator.prototype._serializeMappings =\n\t function SourceMapGenerator_serializeMappings() {\n\t var previousGeneratedColumn = 0;\n\t var previousGeneratedLine = 1;\n\t var previousOriginalColumn = 0;\n\t var previousOriginalLine = 0;\n\t var previousName = 0;\n\t var previousSource = 0;\n\t var result = '';\n\t var next;\n\t var mapping;\n\t var nameIdx;\n\t var sourceIdx;\n\t\n\t var mappings = this._mappings.toArray();\n\t for (var i = 0, len = mappings.length; i < len; i++) {\n\t mapping = mappings[i];\n\t next = ''\n\t\n\t if (mapping.generatedLine !== previousGeneratedLine) {\n\t previousGeneratedColumn = 0;\n\t while (mapping.generatedLine !== previousGeneratedLine) {\n\t next += ';';\n\t previousGeneratedLine++;\n\t }\n\t }\n\t else {\n\t if (i > 0) {\n\t if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n\t continue;\n\t }\n\t next += ',';\n\t }\n\t }\n\t\n\t next += base64VLQ.encode(mapping.generatedColumn\n\t - previousGeneratedColumn);\n\t previousGeneratedColumn = mapping.generatedColumn;\n\t\n\t if (mapping.source != null) {\n\t sourceIdx = this._sources.indexOf(mapping.source);\n\t next += base64VLQ.encode(sourceIdx - previousSource);\n\t previousSource = sourceIdx;\n\t\n\t // lines are stored 0-based in SourceMap spec version 3\n\t next += base64VLQ.encode(mapping.originalLine - 1\n\t - previousOriginalLine);\n\t previousOriginalLine = mapping.originalLine - 1;\n\t\n\t next += base64VLQ.encode(mapping.originalColumn\n\t - previousOriginalColumn);\n\t previousOriginalColumn = mapping.originalColumn;\n\t\n\t if (mapping.name != null) {\n\t nameIdx = this._names.indexOf(mapping.name);\n\t next += base64VLQ.encode(nameIdx - previousName);\n\t previousName = nameIdx;\n\t }\n\t }\n\t\n\t result += next;\n\t }\n\t\n\t return result;\n\t };\n\t\n\tSourceMapGenerator.prototype._generateSourcesContent =\n\t function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n\t return aSources.map(function (source) {\n\t if (!this._sourcesContents) {\n\t return null;\n\t }\n\t if (aSourceRoot != null) {\n\t source = util.relative(aSourceRoot, source);\n\t }\n\t var key = util.toSetString(source);\n\t return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n\t ? this._sourcesContents[key]\n\t : null;\n\t }, this);\n\t };\n\t\n\t/**\n\t * Externalize the source map.\n\t */\n\tSourceMapGenerator.prototype.toJSON =\n\t function SourceMapGenerator_toJSON() {\n\t var map = {\n\t version: this._version,\n\t sources: this._sources.toArray(),\n\t names: this._names.toArray(),\n\t mappings: this._serializeMappings()\n\t };\n\t if (this._file != null) {\n\t map.file = this._file;\n\t }\n\t if (this._sourceRoot != null) {\n\t map.sourceRoot = this._sourceRoot;\n\t }\n\t if (this._sourcesContents) {\n\t map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n\t }\n\t\n\t return map;\n\t };\n\t\n\t/**\n\t * Render the source map being generated to a string.\n\t */\n\tSourceMapGenerator.prototype.toString =\n\t function SourceMapGenerator_toString() {\n\t return JSON.stringify(this.toJSON());\n\t };\n\t\n\texports.SourceMapGenerator = SourceMapGenerator;\n\n\n/***/ },\n/* 2 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t *\n\t * Based on the Base 64 VLQ implementation in Closure Compiler:\n\t * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n\t *\n\t * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n\t * Redistribution and use in source and binary forms, with or without\n\t * modification, are permitted provided that the following conditions are\n\t * met:\n\t *\n\t * * Redistributions of source code must retain the above copyright\n\t * notice, this list of conditions and the following disclaimer.\n\t * * Redistributions in binary form must reproduce the above\n\t * copyright notice, this list of conditions and the following\n\t * disclaimer in the documentation and/or other materials provided\n\t * with the distribution.\n\t * * Neither the name of Google Inc. nor the names of its\n\t * contributors may be used to endorse or promote products derived\n\t * from this software without specific prior written permission.\n\t *\n\t * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\t * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n\t * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n\t * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n\t * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n\t * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n\t * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n\t * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n\t * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n\t * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\t * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\t */\n\t\n\tvar base64 = __webpack_require__(3);\n\t\n\t// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n\t// length quantities we use in the source map spec, the first bit is the sign,\n\t// the next four bits are the actual value, and the 6th bit is the\n\t// continuation bit. The continuation bit tells us whether there are more\n\t// digits in this value following this digit.\n\t//\n\t// Continuation\n\t// | Sign\n\t// | |\n\t// V V\n\t// 101011\n\t\n\tvar VLQ_BASE_SHIFT = 5;\n\t\n\t// binary: 100000\n\tvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\t\n\t// binary: 011111\n\tvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\t\n\t// binary: 100000\n\tvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\t\n\t/**\n\t * Converts from a two-complement value to a value where the sign bit is\n\t * placed in the least significant bit. For example, as decimals:\n\t * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n\t * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n\t */\n\tfunction toVLQSigned(aValue) {\n\t return aValue < 0\n\t ? ((-aValue) << 1) + 1\n\t : (aValue << 1) + 0;\n\t}\n\t\n\t/**\n\t * Converts to a two-complement value from a value where the sign bit is\n\t * placed in the least significant bit. For example, as decimals:\n\t * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n\t * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n\t */\n\tfunction fromVLQSigned(aValue) {\n\t var isNegative = (aValue & 1) === 1;\n\t var shifted = aValue >> 1;\n\t return isNegative\n\t ? -shifted\n\t : shifted;\n\t}\n\t\n\t/**\n\t * Returns the base 64 VLQ encoded value.\n\t */\n\texports.encode = function base64VLQ_encode(aValue) {\n\t var encoded = \"\";\n\t var digit;\n\t\n\t var vlq = toVLQSigned(aValue);\n\t\n\t do {\n\t digit = vlq & VLQ_BASE_MASK;\n\t vlq >>>= VLQ_BASE_SHIFT;\n\t if (vlq > 0) {\n\t // There are still more digits in this value, so we must make sure the\n\t // continuation bit is marked.\n\t digit |= VLQ_CONTINUATION_BIT;\n\t }\n\t encoded += base64.encode(digit);\n\t } while (vlq > 0);\n\t\n\t return encoded;\n\t};\n\t\n\t/**\n\t * Decodes the next base 64 VLQ value from the given string and returns the\n\t * value and the rest of the string via the out parameter.\n\t */\n\texports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n\t var strLen = aStr.length;\n\t var result = 0;\n\t var shift = 0;\n\t var continuation, digit;\n\t\n\t do {\n\t if (aIndex >= strLen) {\n\t throw new Error(\"Expected more digits in base 64 VLQ value.\");\n\t }\n\t\n\t digit = base64.decode(aStr.charCodeAt(aIndex++));\n\t if (digit === -1) {\n\t throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n\t }\n\t\n\t continuation = !!(digit & VLQ_CONTINUATION_BIT);\n\t digit &= VLQ_BASE_MASK;\n\t result = result + (digit << shift);\n\t shift += VLQ_BASE_SHIFT;\n\t } while (continuation);\n\t\n\t aOutParam.value = fromVLQSigned(result);\n\t aOutParam.rest = aIndex;\n\t};\n\n\n/***/ },\n/* 3 */\n/***/ function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\t\n\t/**\n\t * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n\t */\n\texports.encode = function (number) {\n\t if (0 <= number && number < intToCharMap.length) {\n\t return intToCharMap[number];\n\t }\n\t throw new TypeError(\"Must be between 0 and 63: \" + number);\n\t};\n\t\n\t/**\n\t * Decode a single base 64 character code digit to an integer. Returns -1 on\n\t * failure.\n\t */\n\texports.decode = function (charCode) {\n\t var bigA = 65; // 'A'\n\t var bigZ = 90; // 'Z'\n\t\n\t var littleA = 97; // 'a'\n\t var littleZ = 122; // 'z'\n\t\n\t var zero = 48; // '0'\n\t var nine = 57; // '9'\n\t\n\t var plus = 43; // '+'\n\t var slash = 47; // '/'\n\t\n\t var littleOffset = 26;\n\t var numberOffset = 52;\n\t\n\t // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n\t if (bigA <= charCode && charCode <= bigZ) {\n\t return (charCode - bigA);\n\t }\n\t\n\t // 26 - 51: abcdefghijklmnopqrstuvwxyz\n\t if (littleA <= charCode && charCode <= littleZ) {\n\t return (charCode - littleA + littleOffset);\n\t }\n\t\n\t // 52 - 61: 0123456789\n\t if (zero <= charCode && charCode <= nine) {\n\t return (charCode - zero + numberOffset);\n\t }\n\t\n\t // 62: +\n\t if (charCode == plus) {\n\t return 62;\n\t }\n\t\n\t // 63: /\n\t if (charCode == slash) {\n\t return 63;\n\t }\n\t\n\t // Invalid base64 digit.\n\t return -1;\n\t};\n\n\n/***/ },\n/* 4 */\n/***/ function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\t/**\n\t * This is a helper function for getting values from parameter/options\n\t * objects.\n\t *\n\t * @param args The object we are extracting values from\n\t * @param name The name of the property we are getting.\n\t * @param defaultValue An optional value to return if the property is missing\n\t * from the object. If this is not specified and the property is missing, an\n\t * error will be thrown.\n\t */\n\tfunction getArg(aArgs, aName, aDefaultValue) {\n\t if (aName in aArgs) {\n\t return aArgs[aName];\n\t } else if (arguments.length === 3) {\n\t return aDefaultValue;\n\t } else {\n\t throw new Error('\"' + aName + '\" is a required argument.');\n\t }\n\t}\n\texports.getArg = getArg;\n\t\n\tvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.]*)(?::(\\d+))?(\\S*)$/;\n\tvar dataUrlRegexp = /^data:.+\\,.+$/;\n\t\n\tfunction urlParse(aUrl) {\n\t var match = aUrl.match(urlRegexp);\n\t if (!match) {\n\t return null;\n\t }\n\t return {\n\t scheme: match[1],\n\t auth: match[2],\n\t host: match[3],\n\t port: match[4],\n\t path: match[5]\n\t };\n\t}\n\texports.urlParse = urlParse;\n\t\n\tfunction urlGenerate(aParsedUrl) {\n\t var url = '';\n\t if (aParsedUrl.scheme) {\n\t url += aParsedUrl.scheme + ':';\n\t }\n\t url += '//';\n\t if (aParsedUrl.auth) {\n\t url += aParsedUrl.auth + '@';\n\t }\n\t if (aParsedUrl.host) {\n\t url += aParsedUrl.host;\n\t }\n\t if (aParsedUrl.port) {\n\t url += \":\" + aParsedUrl.port\n\t }\n\t if (aParsedUrl.path) {\n\t url += aParsedUrl.path;\n\t }\n\t return url;\n\t}\n\texports.urlGenerate = urlGenerate;\n\t\n\t/**\n\t * Normalizes a path, or the path portion of a URL:\n\t *\n\t * - Replaces consequtive slashes with one slash.\n\t * - Removes unnecessary '.' parts.\n\t * - Removes unnecessary '/..' parts.\n\t *\n\t * Based on code in the Node.js 'path' core module.\n\t *\n\t * @param aPath The path or url to normalize.\n\t */\n\tfunction normalize(aPath) {\n\t var path = aPath;\n\t var url = urlParse(aPath);\n\t if (url) {\n\t if (!url.path) {\n\t return aPath;\n\t }\n\t path = url.path;\n\t }\n\t var isAbsolute = exports.isAbsolute(path);\n\t\n\t var parts = path.split(/\\/+/);\n\t for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n\t part = parts[i];\n\t if (part === '.') {\n\t parts.splice(i, 1);\n\t } else if (part === '..') {\n\t up++;\n\t } else if (up > 0) {\n\t if (part === '') {\n\t // The first part is blank if the path is absolute. Trying to go\n\t // above the root is a no-op. Therefore we can remove all '..' parts\n\t // directly after the root.\n\t parts.splice(i + 1, up);\n\t up = 0;\n\t } else {\n\t parts.splice(i, 2);\n\t up--;\n\t }\n\t }\n\t }\n\t path = parts.join('/');\n\t\n\t if (path === '') {\n\t path = isAbsolute ? '/' : '.';\n\t }\n\t\n\t if (url) {\n\t url.path = path;\n\t return urlGenerate(url);\n\t }\n\t return path;\n\t}\n\texports.normalize = normalize;\n\t\n\t/**\n\t * Joins two paths/URLs.\n\t *\n\t * @param aRoot The root path or URL.\n\t * @param aPath The path or URL to be joined with the root.\n\t *\n\t * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n\t * scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n\t * first.\n\t * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n\t * is updated with the result and aRoot is returned. Otherwise the result\n\t * is returned.\n\t * - If aPath is absolute, the result is aPath.\n\t * - Otherwise the two paths are joined with a slash.\n\t * - Joining for example 'http://' and 'www.example.com' is also supported.\n\t */\n\tfunction join(aRoot, aPath) {\n\t if (aRoot === \"\") {\n\t aRoot = \".\";\n\t }\n\t if (aPath === \"\") {\n\t aPath = \".\";\n\t }\n\t var aPathUrl = urlParse(aPath);\n\t var aRootUrl = urlParse(aRoot);\n\t if (aRootUrl) {\n\t aRoot = aRootUrl.path || '/';\n\t }\n\t\n\t // `join(foo, '//www.example.org')`\n\t if (aPathUrl && !aPathUrl.scheme) {\n\t if (aRootUrl) {\n\t aPathUrl.scheme = aRootUrl.scheme;\n\t }\n\t return urlGenerate(aPathUrl);\n\t }\n\t\n\t if (aPathUrl || aPath.match(dataUrlRegexp)) {\n\t return aPath;\n\t }\n\t\n\t // `join('http://', 'www.example.com')`\n\t if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n\t aRootUrl.host = aPath;\n\t return urlGenerate(aRootUrl);\n\t }\n\t\n\t var joined = aPath.charAt(0) === '/'\n\t ? aPath\n\t : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\t\n\t if (aRootUrl) {\n\t aRootUrl.path = joined;\n\t return urlGenerate(aRootUrl);\n\t }\n\t return joined;\n\t}\n\texports.join = join;\n\t\n\texports.isAbsolute = function (aPath) {\n\t return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);\n\t};\n\t\n\t/**\n\t * Make a path relative to a URL or another path.\n\t *\n\t * @param aRoot The root path or URL.\n\t * @param aPath The path or URL to be made relative to aRoot.\n\t */\n\tfunction relative(aRoot, aPath) {\n\t if (aRoot === \"\") {\n\t aRoot = \".\";\n\t }\n\t\n\t aRoot = aRoot.replace(/\\/$/, '');\n\t\n\t // It is possible for the path to be above the root. In this case, simply\n\t // checking whether the root is a prefix of the path won't work. Instead, we\n\t // need to remove components from the root one by one, until either we find\n\t // a prefix that fits, or we run out of components to remove.\n\t var level = 0;\n\t while (aPath.indexOf(aRoot + '/') !== 0) {\n\t var index = aRoot.lastIndexOf(\"/\");\n\t if (index < 0) {\n\t return aPath;\n\t }\n\t\n\t // If the only part of the root that is left is the scheme (i.e. http://,\n\t // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n\t // have exhausted all components, so the path is not relative to the root.\n\t aRoot = aRoot.slice(0, index);\n\t if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n\t return aPath;\n\t }\n\t\n\t ++level;\n\t }\n\t\n\t // Make sure we add a \"../\" for each component we removed from the root.\n\t return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n\t}\n\texports.relative = relative;\n\t\n\tvar supportsNullProto = (function () {\n\t var obj = Object.create(null);\n\t return !('__proto__' in obj);\n\t}());\n\t\n\tfunction identity (s) {\n\t return s;\n\t}\n\t\n\t/**\n\t * Because behavior goes wacky when you set `__proto__` on objects, we\n\t * have to prefix all the strings in our set with an arbitrary character.\n\t *\n\t * See https://github.com/mozilla/source-map/pull/31 and\n\t * https://github.com/mozilla/source-map/issues/30\n\t *\n\t * @param String aStr\n\t */\n\tfunction toSetString(aStr) {\n\t if (isProtoString(aStr)) {\n\t return '$' + aStr;\n\t }\n\t\n\t return aStr;\n\t}\n\texports.toSetString = supportsNullProto ? identity : toSetString;\n\t\n\tfunction fromSetString(aStr) {\n\t if (isProtoString(aStr)) {\n\t return aStr.slice(1);\n\t }\n\t\n\t return aStr;\n\t}\n\texports.fromSetString = supportsNullProto ? identity : fromSetString;\n\t\n\tfunction isProtoString(s) {\n\t if (!s) {\n\t return false;\n\t }\n\t\n\t var length = s.length;\n\t\n\t if (length < 9 /* \"__proto__\".length */) {\n\t return false;\n\t }\n\t\n\t if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 2) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n\t s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n\t s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n\t s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n\t s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n\t s.charCodeAt(length - 8) !== 95 /* '_' */ ||\n\t s.charCodeAt(length - 9) !== 95 /* '_' */) {\n\t return false;\n\t }\n\t\n\t for (var i = length - 10; i >= 0; i--) {\n\t if (s.charCodeAt(i) !== 36 /* '$' */) {\n\t return false;\n\t }\n\t }\n\t\n\t return true;\n\t}\n\t\n\t/**\n\t * Comparator between two mappings where the original positions are compared.\n\t *\n\t * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n\t * mappings with the same original source/line/column, but different generated\n\t * line and column the same. Useful when searching for a mapping with a\n\t * stubbed out mapping.\n\t */\n\tfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n\t var cmp = mappingA.source - mappingB.source;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0 || onlyCompareOriginal) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return mappingA.name - mappingB.name;\n\t}\n\texports.compareByOriginalPositions = compareByOriginalPositions;\n\t\n\t/**\n\t * Comparator between two mappings with deflated source and name indices where\n\t * the generated positions are compared.\n\t *\n\t * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n\t * mappings with the same generated line and column, but different\n\t * source/name/original line and column the same. Useful when searching for a\n\t * mapping with a stubbed out mapping.\n\t */\n\tfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n\t var cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0 || onlyCompareGenerated) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.source - mappingB.source;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return mappingA.name - mappingB.name;\n\t}\n\texports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\t\n\tfunction strcmp(aStr1, aStr2) {\n\t if (aStr1 === aStr2) {\n\t return 0;\n\t }\n\t\n\t if (aStr1 > aStr2) {\n\t return 1;\n\t }\n\t\n\t return -1;\n\t}\n\t\n\t/**\n\t * Comparator between two mappings with inflated source and name strings where\n\t * the generated positions are compared.\n\t */\n\tfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n\t var cmp = mappingA.generatedLine - mappingB.generatedLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = strcmp(mappingA.source, mappingB.source);\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalLine - mappingB.originalLine;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t cmp = mappingA.originalColumn - mappingB.originalColumn;\n\t if (cmp !== 0) {\n\t return cmp;\n\t }\n\t\n\t return strcmp(mappingA.name, mappingB.name);\n\t}\n\texports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n\n/***/ },\n/* 5 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\tvar has = Object.prototype.hasOwnProperty;\n\t\n\t/**\n\t * A data structure which is a combination of an array and a set. Adding a new\n\t * member is O(1), testing for membership is O(1), and finding the index of an\n\t * element is O(1). Removing elements from the set is not supported. Only\n\t * strings are supported for membership.\n\t */\n\tfunction ArraySet() {\n\t this._array = [];\n\t this._set = Object.create(null);\n\t}\n\t\n\t/**\n\t * Static method for creating ArraySet instances from an existing array.\n\t */\n\tArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n\t var set = new ArraySet();\n\t for (var i = 0, len = aArray.length; i < len; i++) {\n\t set.add(aArray[i], aAllowDuplicates);\n\t }\n\t return set;\n\t};\n\t\n\t/**\n\t * Return how many unique items are in this ArraySet. If duplicates have been\n\t * added, than those do not count towards the size.\n\t *\n\t * @returns Number\n\t */\n\tArraySet.prototype.size = function ArraySet_size() {\n\t return Object.getOwnPropertyNames(this._set).length;\n\t};\n\t\n\t/**\n\t * Add the given string to this set.\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n\t var sStr = util.toSetString(aStr);\n\t var isDuplicate = has.call(this._set, sStr);\n\t var idx = this._array.length;\n\t if (!isDuplicate || aAllowDuplicates) {\n\t this._array.push(aStr);\n\t }\n\t if (!isDuplicate) {\n\t this._set[sStr] = idx;\n\t }\n\t};\n\t\n\t/**\n\t * Is the given string a member of this set?\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.has = function ArraySet_has(aStr) {\n\t var sStr = util.toSetString(aStr);\n\t return has.call(this._set, sStr);\n\t};\n\t\n\t/**\n\t * What is the index of the given string in the array?\n\t *\n\t * @param String aStr\n\t */\n\tArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n\t var sStr = util.toSetString(aStr);\n\t if (has.call(this._set, sStr)) {\n\t return this._set[sStr];\n\t }\n\t throw new Error('\"' + aStr + '\" is not in the set.');\n\t};\n\t\n\t/**\n\t * What is the element at the given index?\n\t *\n\t * @param Number aIdx\n\t */\n\tArraySet.prototype.at = function ArraySet_at(aIdx) {\n\t if (aIdx >= 0 && aIdx < this._array.length) {\n\t return this._array[aIdx];\n\t }\n\t throw new Error('No element indexed by ' + aIdx);\n\t};\n\t\n\t/**\n\t * Returns the array representation of this set (which has the proper indices\n\t * indicated by indexOf). Note that this is a copy of the internal array used\n\t * for storing the members so that no one can mess with internal state.\n\t */\n\tArraySet.prototype.toArray = function ArraySet_toArray() {\n\t return this._array.slice();\n\t};\n\t\n\texports.ArraySet = ArraySet;\n\n\n/***/ },\n/* 6 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2014 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\t\n\t/**\n\t * Determine whether mappingB is after mappingA with respect to generated\n\t * position.\n\t */\n\tfunction generatedPositionAfter(mappingA, mappingB) {\n\t // Optimized for most common case\n\t var lineA = mappingA.generatedLine;\n\t var lineB = mappingB.generatedLine;\n\t var columnA = mappingA.generatedColumn;\n\t var columnB = mappingB.generatedColumn;\n\t return lineB > lineA || lineB == lineA && columnB >= columnA ||\n\t util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n\t}\n\t\n\t/**\n\t * A data structure to provide a sorted view of accumulated mappings in a\n\t * performance conscious manner. It trades a neglibable overhead in general\n\t * case for a large speedup in case of mappings being added in order.\n\t */\n\tfunction MappingList() {\n\t this._array = [];\n\t this._sorted = true;\n\t // Serves as infimum\n\t this._last = {generatedLine: -1, generatedColumn: 0};\n\t}\n\t\n\t/**\n\t * Iterate through internal items. This method takes the same arguments that\n\t * `Array.prototype.forEach` takes.\n\t *\n\t * NOTE: The order of the mappings is NOT guaranteed.\n\t */\n\tMappingList.prototype.unsortedForEach =\n\t function MappingList_forEach(aCallback, aThisArg) {\n\t this._array.forEach(aCallback, aThisArg);\n\t };\n\t\n\t/**\n\t * Add the given source mapping.\n\t *\n\t * @param Object aMapping\n\t */\n\tMappingList.prototype.add = function MappingList_add(aMapping) {\n\t if (generatedPositionAfter(this._last, aMapping)) {\n\t this._last = aMapping;\n\t this._array.push(aMapping);\n\t } else {\n\t this._sorted = false;\n\t this._array.push(aMapping);\n\t }\n\t};\n\t\n\t/**\n\t * Returns the flat, sorted array of mappings. The mappings are sorted by\n\t * generated position.\n\t *\n\t * WARNING: This method returns internal data without copying, for\n\t * performance. The return value must NOT be mutated, and should be treated as\n\t * an immutable borrow. If you want to take ownership, you must make your own\n\t * copy.\n\t */\n\tMappingList.prototype.toArray = function MappingList_toArray() {\n\t if (!this._sorted) {\n\t this._array.sort(util.compareByGeneratedPositionsInflated);\n\t this._sorted = true;\n\t }\n\t return this._array;\n\t};\n\t\n\texports.MappingList = MappingList;\n\n\n/***/ },\n/* 7 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar util = __webpack_require__(4);\n\tvar binarySearch = __webpack_require__(8);\n\tvar ArraySet = __webpack_require__(5).ArraySet;\n\tvar base64VLQ = __webpack_require__(2);\n\tvar quickSort = __webpack_require__(9).quickSort;\n\t\n\tfunction SourceMapConsumer(aSourceMap) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n\t }\n\t\n\t return sourceMap.sections != null\n\t ? new IndexedSourceMapConsumer(sourceMap)\n\t : new BasicSourceMapConsumer(sourceMap);\n\t}\n\t\n\tSourceMapConsumer.fromSourceMap = function(aSourceMap) {\n\t return BasicSourceMapConsumer.fromSourceMap(aSourceMap);\n\t}\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tSourceMapConsumer.prototype._version = 3;\n\t\n\t// `__generatedMappings` and `__originalMappings` are arrays that hold the\n\t// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n\t// are lazily instantiated, accessed via the `_generatedMappings` and\n\t// `_originalMappings` getters respectively, and we only parse the mappings\n\t// and create these arrays once queried for a source location. We jump through\n\t// these hoops because there can be many thousands of mappings, and parsing\n\t// them is expensive, so we only want to do it if we must.\n\t//\n\t// Each object in the arrays is of the form:\n\t//\n\t// {\n\t// generatedLine: The line number in the generated code,\n\t// generatedColumn: The column number in the generated code,\n\t// source: The path to the original source file that generated this\n\t// chunk of code,\n\t// originalLine: The line number in the original source that\n\t// corresponds to this chunk of generated code,\n\t// originalColumn: The column number in the original source that\n\t// corresponds to this chunk of generated code,\n\t// name: The name of the original symbol which generated this chunk of\n\t// code.\n\t// }\n\t//\n\t// All properties except for `generatedLine` and `generatedColumn` can be\n\t// `null`.\n\t//\n\t// `_generatedMappings` is ordered by the generated positions.\n\t//\n\t// `_originalMappings` is ordered by the original positions.\n\t\n\tSourceMapConsumer.prototype.__generatedMappings = null;\n\tObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n\t get: function () {\n\t if (!this.__generatedMappings) {\n\t this._parseMappings(this._mappings, this.sourceRoot);\n\t }\n\t\n\t return this.__generatedMappings;\n\t }\n\t});\n\t\n\tSourceMapConsumer.prototype.__originalMappings = null;\n\tObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n\t get: function () {\n\t if (!this.__originalMappings) {\n\t this._parseMappings(this._mappings, this.sourceRoot);\n\t }\n\t\n\t return this.__originalMappings;\n\t }\n\t});\n\t\n\tSourceMapConsumer.prototype._charIsMappingSeparator =\n\t function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n\t var c = aStr.charAt(index);\n\t return c === \";\" || c === \",\";\n\t };\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tSourceMapConsumer.prototype._parseMappings =\n\t function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t throw new Error(\"Subclasses must implement _parseMappings\");\n\t };\n\t\n\tSourceMapConsumer.GENERATED_ORDER = 1;\n\tSourceMapConsumer.ORIGINAL_ORDER = 2;\n\t\n\tSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\n\tSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\t\n\t/**\n\t * Iterate over each mapping between an original source/line/column and a\n\t * generated line/column in this source map.\n\t *\n\t * @param Function aCallback\n\t * The function that is called with each mapping.\n\t * @param Object aContext\n\t * Optional. If specified, this object will be the value of `this` every\n\t * time that `aCallback` is called.\n\t * @param aOrder\n\t * Either `SourceMapConsumer.GENERATED_ORDER` or\n\t * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n\t * iterate over the mappings sorted by the generated file's line/column\n\t * order or the original's source/line/column order, respectively. Defaults to\n\t * `SourceMapConsumer.GENERATED_ORDER`.\n\t */\n\tSourceMapConsumer.prototype.eachMapping =\n\t function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n\t var context = aContext || null;\n\t var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\t\n\t var mappings;\n\t switch (order) {\n\t case SourceMapConsumer.GENERATED_ORDER:\n\t mappings = this._generatedMappings;\n\t break;\n\t case SourceMapConsumer.ORIGINAL_ORDER:\n\t mappings = this._originalMappings;\n\t break;\n\t default:\n\t throw new Error(\"Unknown order of iteration.\");\n\t }\n\t\n\t var sourceRoot = this.sourceRoot;\n\t mappings.map(function (mapping) {\n\t var source = mapping.source === null ? null : this._sources.at(mapping.source);\n\t if (source != null && sourceRoot != null) {\n\t source = util.join(sourceRoot, source);\n\t }\n\t return {\n\t source: source,\n\t generatedLine: mapping.generatedLine,\n\t generatedColumn: mapping.generatedColumn,\n\t originalLine: mapping.originalLine,\n\t originalColumn: mapping.originalColumn,\n\t name: mapping.name === null ? null : this._names.at(mapping.name)\n\t };\n\t }, this).forEach(aCallback, context);\n\t };\n\t\n\t/**\n\t * Returns all generated line and column information for the original source,\n\t * line, and column provided. If no column is provided, returns all mappings\n\t * corresponding to a either the line we are searching for or the next\n\t * closest line that has any mappings. Otherwise, returns all mappings\n\t * corresponding to the given line and either the column we are searching for\n\t * or the next closest column that has any offsets.\n\t *\n\t * The only argument is an object with the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source.\n\t * - column: Optional. the column number in the original source.\n\t *\n\t * and an array of objects is returned, each with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null.\n\t * - column: The column number in the generated source, or null.\n\t */\n\tSourceMapConsumer.prototype.allGeneratedPositionsFor =\n\t function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n\t var line = util.getArg(aArgs, 'line');\n\t\n\t // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n\t // returns the index of the closest mapping less than the needle. By\n\t // setting needle.originalColumn to 0, we thus find the last mapping for\n\t // the given line, provided such a mapping exists.\n\t var needle = {\n\t source: util.getArg(aArgs, 'source'),\n\t originalLine: line,\n\t originalColumn: util.getArg(aArgs, 'column', 0)\n\t };\n\t\n\t if (this.sourceRoot != null) {\n\t needle.source = util.relative(this.sourceRoot, needle.source);\n\t }\n\t if (!this._sources.has(needle.source)) {\n\t return [];\n\t }\n\t needle.source = this._sources.indexOf(needle.source);\n\t\n\t var mappings = [];\n\t\n\t var index = this._findMapping(needle,\n\t this._originalMappings,\n\t \"originalLine\",\n\t \"originalColumn\",\n\t util.compareByOriginalPositions,\n\t binarySearch.LEAST_UPPER_BOUND);\n\t if (index >= 0) {\n\t var mapping = this._originalMappings[index];\n\t\n\t if (aArgs.column === undefined) {\n\t var originalLine = mapping.originalLine;\n\t\n\t // Iterate until either we run out of mappings, or we run into\n\t // a mapping for a different line than the one we found. Since\n\t // mappings are sorted, this is guaranteed to find all mappings for\n\t // the line we found.\n\t while (mapping && mapping.originalLine === originalLine) {\n\t mappings.push({\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t });\n\t\n\t mapping = this._originalMappings[++index];\n\t }\n\t } else {\n\t var originalColumn = mapping.originalColumn;\n\t\n\t // Iterate until either we run out of mappings, or we run into\n\t // a mapping for a different line than the one we were searching for.\n\t // Since mappings are sorted, this is guaranteed to find all mappings for\n\t // the line we are searching for.\n\t while (mapping &&\n\t mapping.originalLine === line &&\n\t mapping.originalColumn == originalColumn) {\n\t mappings.push({\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t });\n\t\n\t mapping = this._originalMappings[++index];\n\t }\n\t }\n\t }\n\t\n\t return mappings;\n\t };\n\t\n\texports.SourceMapConsumer = SourceMapConsumer;\n\t\n\t/**\n\t * A BasicSourceMapConsumer instance represents a parsed source map which we can\n\t * query for information about the original file positions by giving it a file\n\t * position in the generated source.\n\t *\n\t * The only parameter is the raw source map (either as a JSON string, or\n\t * already parsed to an object). According to the spec, source maps have the\n\t * following attributes:\n\t *\n\t * - version: Which version of the source map spec this map is following.\n\t * - sources: An array of URLs to the original source files.\n\t * - names: An array of identifiers which can be referrenced by individual mappings.\n\t * - sourceRoot: Optional. The URL root from which all sources are relative.\n\t * - sourcesContent: Optional. An array of contents of the original source files.\n\t * - mappings: A string of base64 VLQs which contain the actual mappings.\n\t * - file: Optional. The generated file this source map is associated with.\n\t *\n\t * Here is an example source map, taken from the source map spec[0]:\n\t *\n\t * {\n\t * version : 3,\n\t * file: \"out.js\",\n\t * sourceRoot : \"\",\n\t * sources: [\"foo.js\", \"bar.js\"],\n\t * names: [\"src\", \"maps\", \"are\", \"fun\"],\n\t * mappings: \"AA,AB;;ABCDE;\"\n\t * }\n\t *\n\t * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n\t */\n\tfunction BasicSourceMapConsumer(aSourceMap) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n\t }\n\t\n\t var version = util.getArg(sourceMap, 'version');\n\t var sources = util.getArg(sourceMap, 'sources');\n\t // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n\t // requires the array) to play nice here.\n\t var names = util.getArg(sourceMap, 'names', []);\n\t var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n\t var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n\t var mappings = util.getArg(sourceMap, 'mappings');\n\t var file = util.getArg(sourceMap, 'file', null);\n\t\n\t // Once again, Sass deviates from the spec and supplies the version as a\n\t // string rather than a number, so we use loose equality checking here.\n\t if (version != this._version) {\n\t throw new Error('Unsupported version: ' + version);\n\t }\n\t\n\t sources = sources\n\t .map(String)\n\t // Some source maps produce relative source paths like \"./foo.js\" instead of\n\t // \"foo.js\". Normalize these first so that future comparisons will succeed.\n\t // See bugzil.la/1090768.\n\t .map(util.normalize)\n\t // Always ensure that absolute sources are internally stored relative to\n\t // the source root, if the source root is absolute. Not doing this would\n\t // be particularly problematic when the source root is a prefix of the\n\t // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n\t .map(function (source) {\n\t return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n\t ? util.relative(sourceRoot, source)\n\t : source;\n\t });\n\t\n\t // Pass `true` below to allow duplicate names and sources. While source maps\n\t // are intended to be compressed and deduplicated, the TypeScript compiler\n\t // sometimes generates source maps with duplicates in them. See Github issue\n\t // #72 and bugzil.la/889492.\n\t this._names = ArraySet.fromArray(names.map(String), true);\n\t this._sources = ArraySet.fromArray(sources, true);\n\t\n\t this.sourceRoot = sourceRoot;\n\t this.sourcesContent = sourcesContent;\n\t this._mappings = mappings;\n\t this.file = file;\n\t}\n\t\n\tBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\n\tBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\t\n\t/**\n\t * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n\t *\n\t * @param SourceMapGenerator aSourceMap\n\t * The source map that will be consumed.\n\t * @returns BasicSourceMapConsumer\n\t */\n\tBasicSourceMapConsumer.fromSourceMap =\n\t function SourceMapConsumer_fromSourceMap(aSourceMap) {\n\t var smc = Object.create(BasicSourceMapConsumer.prototype);\n\t\n\t var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n\t var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n\t smc.sourceRoot = aSourceMap._sourceRoot;\n\t smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n\t smc.sourceRoot);\n\t smc.file = aSourceMap._file;\n\t\n\t // Because we are modifying the entries (by converting string sources and\n\t // names to indices into the sources and names ArraySets), we have to make\n\t // a copy of the entry or else bad things happen. Shared mutable state\n\t // strikes again! See github issue #191.\n\t\n\t var generatedMappings = aSourceMap._mappings.toArray().slice();\n\t var destGeneratedMappings = smc.__generatedMappings = [];\n\t var destOriginalMappings = smc.__originalMappings = [];\n\t\n\t for (var i = 0, length = generatedMappings.length; i < length; i++) {\n\t var srcMapping = generatedMappings[i];\n\t var destMapping = new Mapping;\n\t destMapping.generatedLine = srcMapping.generatedLine;\n\t destMapping.generatedColumn = srcMapping.generatedColumn;\n\t\n\t if (srcMapping.source) {\n\t destMapping.source = sources.indexOf(srcMapping.source);\n\t destMapping.originalLine = srcMapping.originalLine;\n\t destMapping.originalColumn = srcMapping.originalColumn;\n\t\n\t if (srcMapping.name) {\n\t destMapping.name = names.indexOf(srcMapping.name);\n\t }\n\t\n\t destOriginalMappings.push(destMapping);\n\t }\n\t\n\t destGeneratedMappings.push(destMapping);\n\t }\n\t\n\t quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\t\n\t return smc;\n\t };\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tBasicSourceMapConsumer.prototype._version = 3;\n\t\n\t/**\n\t * The list of original sources.\n\t */\n\tObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n\t get: function () {\n\t return this._sources.toArray().map(function (s) {\n\t return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;\n\t }, this);\n\t }\n\t});\n\t\n\t/**\n\t * Provide the JIT with a nice shape / hidden class.\n\t */\n\tfunction Mapping() {\n\t this.generatedLine = 0;\n\t this.generatedColumn = 0;\n\t this.source = null;\n\t this.originalLine = null;\n\t this.originalColumn = null;\n\t this.name = null;\n\t}\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tBasicSourceMapConsumer.prototype._parseMappings =\n\t function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t var generatedLine = 1;\n\t var previousGeneratedColumn = 0;\n\t var previousOriginalLine = 0;\n\t var previousOriginalColumn = 0;\n\t var previousSource = 0;\n\t var previousName = 0;\n\t var length = aStr.length;\n\t var index = 0;\n\t var cachedSegments = {};\n\t var temp = {};\n\t var originalMappings = [];\n\t var generatedMappings = [];\n\t var mapping, str, segment, end, value;\n\t\n\t while (index < length) {\n\t if (aStr.charAt(index) === ';') {\n\t generatedLine++;\n\t index++;\n\t previousGeneratedColumn = 0;\n\t }\n\t else if (aStr.charAt(index) === ',') {\n\t index++;\n\t }\n\t else {\n\t mapping = new Mapping();\n\t mapping.generatedLine = generatedLine;\n\t\n\t // Because each offset is encoded relative to the previous one,\n\t // many segments often have the same encoding. We can exploit this\n\t // fact by caching the parsed variable length fields of each segment,\n\t // allowing us to avoid a second parse if we encounter the same\n\t // segment again.\n\t for (end = index; end < length; end++) {\n\t if (this._charIsMappingSeparator(aStr, end)) {\n\t break;\n\t }\n\t }\n\t str = aStr.slice(index, end);\n\t\n\t segment = cachedSegments[str];\n\t if (segment) {\n\t index += str.length;\n\t } else {\n\t segment = [];\n\t while (index < end) {\n\t base64VLQ.decode(aStr, index, temp);\n\t value = temp.value;\n\t index = temp.rest;\n\t segment.push(value);\n\t }\n\t\n\t if (segment.length === 2) {\n\t throw new Error('Found a source, but no line and column');\n\t }\n\t\n\t if (segment.length === 3) {\n\t throw new Error('Found a source and line, but no column');\n\t }\n\t\n\t cachedSegments[str] = segment;\n\t }\n\t\n\t // Generated column.\n\t mapping.generatedColumn = previousGeneratedColumn + segment[0];\n\t previousGeneratedColumn = mapping.generatedColumn;\n\t\n\t if (segment.length > 1) {\n\t // Original source.\n\t mapping.source = previousSource + segment[1];\n\t previousSource += segment[1];\n\t\n\t // Original line.\n\t mapping.originalLine = previousOriginalLine + segment[2];\n\t previousOriginalLine = mapping.originalLine;\n\t // Lines are stored 0-based\n\t mapping.originalLine += 1;\n\t\n\t // Original column.\n\t mapping.originalColumn = previousOriginalColumn + segment[3];\n\t previousOriginalColumn = mapping.originalColumn;\n\t\n\t if (segment.length > 4) {\n\t // Original name.\n\t mapping.name = previousName + segment[4];\n\t previousName += segment[4];\n\t }\n\t }\n\t\n\t generatedMappings.push(mapping);\n\t if (typeof mapping.originalLine === 'number') {\n\t originalMappings.push(mapping);\n\t }\n\t }\n\t }\n\t\n\t quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n\t this.__generatedMappings = generatedMappings;\n\t\n\t quickSort(originalMappings, util.compareByOriginalPositions);\n\t this.__originalMappings = originalMappings;\n\t };\n\t\n\t/**\n\t * Find the mapping that best matches the hypothetical \"needle\" mapping that\n\t * we are searching for in the given \"haystack\" of mappings.\n\t */\n\tBasicSourceMapConsumer.prototype._findMapping =\n\t function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n\t aColumnName, aComparator, aBias) {\n\t // To return the position we are searching for, we must first find the\n\t // mapping for the given position and then return the opposite position it\n\t // points to. Because the mappings are sorted, we can use binary search to\n\t // find the best mapping.\n\t\n\t if (aNeedle[aLineName] <= 0) {\n\t throw new TypeError('Line must be greater than or equal to 1, got '\n\t + aNeedle[aLineName]);\n\t }\n\t if (aNeedle[aColumnName] < 0) {\n\t throw new TypeError('Column must be greater than or equal to 0, got '\n\t + aNeedle[aColumnName]);\n\t }\n\t\n\t return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n\t };\n\t\n\t/**\n\t * Compute the last column for each generated mapping. The last column is\n\t * inclusive.\n\t */\n\tBasicSourceMapConsumer.prototype.computeColumnSpans =\n\t function SourceMapConsumer_computeColumnSpans() {\n\t for (var index = 0; index < this._generatedMappings.length; ++index) {\n\t var mapping = this._generatedMappings[index];\n\t\n\t // Mappings do not contain a field for the last generated columnt. We\n\t // can come up with an optimistic estimate, however, by assuming that\n\t // mappings are contiguous (i.e. given two consecutive mappings, the\n\t // first mapping ends where the second one starts).\n\t if (index + 1 < this._generatedMappings.length) {\n\t var nextMapping = this._generatedMappings[index + 1];\n\t\n\t if (mapping.generatedLine === nextMapping.generatedLine) {\n\t mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n\t continue;\n\t }\n\t }\n\t\n\t // The last mapping for each line spans the entire line.\n\t mapping.lastGeneratedColumn = Infinity;\n\t }\n\t };\n\t\n\t/**\n\t * Returns the original source, line, and column information for the generated\n\t * source's line and column positions provided. The only argument is an object\n\t * with the following properties:\n\t *\n\t * - line: The line number in the generated source.\n\t * - column: The column number in the generated source.\n\t * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n\t * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - source: The original source file, or null.\n\t * - line: The line number in the original source, or null.\n\t * - column: The column number in the original source, or null.\n\t * - name: The original identifier, or null.\n\t */\n\tBasicSourceMapConsumer.prototype.originalPositionFor =\n\t function SourceMapConsumer_originalPositionFor(aArgs) {\n\t var needle = {\n\t generatedLine: util.getArg(aArgs, 'line'),\n\t generatedColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t var index = this._findMapping(\n\t needle,\n\t this._generatedMappings,\n\t \"generatedLine\",\n\t \"generatedColumn\",\n\t util.compareByGeneratedPositionsDeflated,\n\t util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n\t );\n\t\n\t if (index >= 0) {\n\t var mapping = this._generatedMappings[index];\n\t\n\t if (mapping.generatedLine === needle.generatedLine) {\n\t var source = util.getArg(mapping, 'source', null);\n\t if (source !== null) {\n\t source = this._sources.at(source);\n\t if (this.sourceRoot != null) {\n\t source = util.join(this.sourceRoot, source);\n\t }\n\t }\n\t var name = util.getArg(mapping, 'name', null);\n\t if (name !== null) {\n\t name = this._names.at(name);\n\t }\n\t return {\n\t source: source,\n\t line: util.getArg(mapping, 'originalLine', null),\n\t column: util.getArg(mapping, 'originalColumn', null),\n\t name: name\n\t };\n\t }\n\t }\n\t\n\t return {\n\t source: null,\n\t line: null,\n\t column: null,\n\t name: null\n\t };\n\t };\n\t\n\t/**\n\t * Return true if we have the source content for every source in the source\n\t * map, false otherwise.\n\t */\n\tBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n\t function BasicSourceMapConsumer_hasContentsOfAllSources() {\n\t if (!this.sourcesContent) {\n\t return false;\n\t }\n\t return this.sourcesContent.length >= this._sources.size() &&\n\t !this.sourcesContent.some(function (sc) { return sc == null; });\n\t };\n\t\n\t/**\n\t * Returns the original source content. The only argument is the url of the\n\t * original source file. Returns null if no original source content is\n\t * available.\n\t */\n\tBasicSourceMapConsumer.prototype.sourceContentFor =\n\t function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n\t if (!this.sourcesContent) {\n\t return null;\n\t }\n\t\n\t if (this.sourceRoot != null) {\n\t aSource = util.relative(this.sourceRoot, aSource);\n\t }\n\t\n\t if (this._sources.has(aSource)) {\n\t return this.sourcesContent[this._sources.indexOf(aSource)];\n\t }\n\t\n\t var url;\n\t if (this.sourceRoot != null\n\t && (url = util.urlParse(this.sourceRoot))) {\n\t // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n\t // many users. We can help them out when they expect file:// URIs to\n\t // behave like it would if they were running a local HTTP server. See\n\t // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n\t var fileUriAbsPath = aSource.replace(/^file:\\/\\//, \"\");\n\t if (url.scheme == \"file\"\n\t && this._sources.has(fileUriAbsPath)) {\n\t return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n\t }\n\t\n\t if ((!url.path || url.path == \"/\")\n\t && this._sources.has(\"/\" + aSource)) {\n\t return this.sourcesContent[this._sources.indexOf(\"/\" + aSource)];\n\t }\n\t }\n\t\n\t // This function is used recursively from\n\t // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n\t // don't want to throw if we can't find the source - we just want to\n\t // return null, so we provide a flag to exit gracefully.\n\t if (nullOnMissing) {\n\t return null;\n\t }\n\t else {\n\t throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n\t }\n\t };\n\t\n\t/**\n\t * Returns the generated line and column information for the original source,\n\t * line, and column positions provided. The only argument is an object with\n\t * the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source.\n\t * - column: The column number in the original source.\n\t * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n\t * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null.\n\t * - column: The column number in the generated source, or null.\n\t */\n\tBasicSourceMapConsumer.prototype.generatedPositionFor =\n\t function SourceMapConsumer_generatedPositionFor(aArgs) {\n\t var source = util.getArg(aArgs, 'source');\n\t if (this.sourceRoot != null) {\n\t source = util.relative(this.sourceRoot, source);\n\t }\n\t if (!this._sources.has(source)) {\n\t return {\n\t line: null,\n\t column: null,\n\t lastColumn: null\n\t };\n\t }\n\t source = this._sources.indexOf(source);\n\t\n\t var needle = {\n\t source: source,\n\t originalLine: util.getArg(aArgs, 'line'),\n\t originalColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t var index = this._findMapping(\n\t needle,\n\t this._originalMappings,\n\t \"originalLine\",\n\t \"originalColumn\",\n\t util.compareByOriginalPositions,\n\t util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n\t );\n\t\n\t if (index >= 0) {\n\t var mapping = this._originalMappings[index];\n\t\n\t if (mapping.source === needle.source) {\n\t return {\n\t line: util.getArg(mapping, 'generatedLine', null),\n\t column: util.getArg(mapping, 'generatedColumn', null),\n\t lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n\t };\n\t }\n\t }\n\t\n\t return {\n\t line: null,\n\t column: null,\n\t lastColumn: null\n\t };\n\t };\n\t\n\texports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\t\n\t/**\n\t * An IndexedSourceMapConsumer instance represents a parsed source map which\n\t * we can query for information. It differs from BasicSourceMapConsumer in\n\t * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n\t * input.\n\t *\n\t * The only parameter is a raw source map (either as a JSON string, or already\n\t * parsed to an object). According to the spec for indexed source maps, they\n\t * have the following attributes:\n\t *\n\t * - version: Which version of the source map spec this map is following.\n\t * - file: Optional. The generated file this source map is associated with.\n\t * - sections: A list of section definitions.\n\t *\n\t * Each value under the \"sections\" field has two fields:\n\t * - offset: The offset into the original specified at which this section\n\t * begins to apply, defined as an object with a \"line\" and \"column\"\n\t * field.\n\t * - map: A source map definition. This source map could also be indexed,\n\t * but doesn't have to be.\n\t *\n\t * Instead of the \"map\" field, it's also possible to have a \"url\" field\n\t * specifying a URL to retrieve a source map from, but that's currently\n\t * unsupported.\n\t *\n\t * Here's an example source map, taken from the source map spec[0], but\n\t * modified to omit a section which uses the \"url\" field.\n\t *\n\t * {\n\t * version : 3,\n\t * file: \"app.js\",\n\t * sections: [{\n\t * offset: {line:100, column:10},\n\t * map: {\n\t * version : 3,\n\t * file: \"section.js\",\n\t * sources: [\"foo.js\", \"bar.js\"],\n\t * names: [\"src\", \"maps\", \"are\", \"fun\"],\n\t * mappings: \"AAAA,E;;ABCDE;\"\n\t * }\n\t * }],\n\t * }\n\t *\n\t * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n\t */\n\tfunction IndexedSourceMapConsumer(aSourceMap) {\n\t var sourceMap = aSourceMap;\n\t if (typeof aSourceMap === 'string') {\n\t sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n\t }\n\t\n\t var version = util.getArg(sourceMap, 'version');\n\t var sections = util.getArg(sourceMap, 'sections');\n\t\n\t if (version != this._version) {\n\t throw new Error('Unsupported version: ' + version);\n\t }\n\t\n\t this._sources = new ArraySet();\n\t this._names = new ArraySet();\n\t\n\t var lastOffset = {\n\t line: -1,\n\t column: 0\n\t };\n\t this._sections = sections.map(function (s) {\n\t if (s.url) {\n\t // The url field will require support for asynchronicity.\n\t // See https://github.com/mozilla/source-map/issues/16\n\t throw new Error('Support for url field in sections not implemented.');\n\t }\n\t var offset = util.getArg(s, 'offset');\n\t var offsetLine = util.getArg(offset, 'line');\n\t var offsetColumn = util.getArg(offset, 'column');\n\t\n\t if (offsetLine < lastOffset.line ||\n\t (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n\t throw new Error('Section offsets must be ordered and non-overlapping.');\n\t }\n\t lastOffset = offset;\n\t\n\t return {\n\t generatedOffset: {\n\t // The offset fields are 0-based, but we use 1-based indices when\n\t // encoding/decoding from VLQ.\n\t generatedLine: offsetLine + 1,\n\t generatedColumn: offsetColumn + 1\n\t },\n\t consumer: new SourceMapConsumer(util.getArg(s, 'map'))\n\t }\n\t });\n\t}\n\t\n\tIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\n\tIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\t\n\t/**\n\t * The version of the source mapping spec that we are consuming.\n\t */\n\tIndexedSourceMapConsumer.prototype._version = 3;\n\t\n\t/**\n\t * The list of original sources.\n\t */\n\tObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n\t get: function () {\n\t var sources = [];\n\t for (var i = 0; i < this._sections.length; i++) {\n\t for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n\t sources.push(this._sections[i].consumer.sources[j]);\n\t }\n\t }\n\t return sources;\n\t }\n\t});\n\t\n\t/**\n\t * Returns the original source, line, and column information for the generated\n\t * source's line and column positions provided. The only argument is an object\n\t * with the following properties:\n\t *\n\t * - line: The line number in the generated source.\n\t * - column: The column number in the generated source.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - source: The original source file, or null.\n\t * - line: The line number in the original source, or null.\n\t * - column: The column number in the original source, or null.\n\t * - name: The original identifier, or null.\n\t */\n\tIndexedSourceMapConsumer.prototype.originalPositionFor =\n\t function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n\t var needle = {\n\t generatedLine: util.getArg(aArgs, 'line'),\n\t generatedColumn: util.getArg(aArgs, 'column')\n\t };\n\t\n\t // Find the section containing the generated position we're trying to map\n\t // to an original position.\n\t var sectionIndex = binarySearch.search(needle, this._sections,\n\t function(needle, section) {\n\t var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n\t if (cmp) {\n\t return cmp;\n\t }\n\t\n\t return (needle.generatedColumn -\n\t section.generatedOffset.generatedColumn);\n\t });\n\t var section = this._sections[sectionIndex];\n\t\n\t if (!section) {\n\t return {\n\t source: null,\n\t line: null,\n\t column: null,\n\t name: null\n\t };\n\t }\n\t\n\t return section.consumer.originalPositionFor({\n\t line: needle.generatedLine -\n\t (section.generatedOffset.generatedLine - 1),\n\t column: needle.generatedColumn -\n\t (section.generatedOffset.generatedLine === needle.generatedLine\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0),\n\t bias: aArgs.bias\n\t });\n\t };\n\t\n\t/**\n\t * Return true if we have the source content for every source in the source\n\t * map, false otherwise.\n\t */\n\tIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n\t function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n\t return this._sections.every(function (s) {\n\t return s.consumer.hasContentsOfAllSources();\n\t });\n\t };\n\t\n\t/**\n\t * Returns the original source content. The only argument is the url of the\n\t * original source file. Returns null if no original source content is\n\t * available.\n\t */\n\tIndexedSourceMapConsumer.prototype.sourceContentFor =\n\t function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t\n\t var content = section.consumer.sourceContentFor(aSource, true);\n\t if (content) {\n\t return content;\n\t }\n\t }\n\t if (nullOnMissing) {\n\t return null;\n\t }\n\t else {\n\t throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n\t }\n\t };\n\t\n\t/**\n\t * Returns the generated line and column information for the original source,\n\t * line, and column positions provided. The only argument is an object with\n\t * the following properties:\n\t *\n\t * - source: The filename of the original source.\n\t * - line: The line number in the original source.\n\t * - column: The column number in the original source.\n\t *\n\t * and an object is returned with the following properties:\n\t *\n\t * - line: The line number in the generated source, or null.\n\t * - column: The column number in the generated source, or null.\n\t */\n\tIndexedSourceMapConsumer.prototype.generatedPositionFor =\n\t function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t\n\t // Only consider this section if the requested source is in the list of\n\t // sources of the consumer.\n\t if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {\n\t continue;\n\t }\n\t var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n\t if (generatedPosition) {\n\t var ret = {\n\t line: generatedPosition.line +\n\t (section.generatedOffset.generatedLine - 1),\n\t column: generatedPosition.column +\n\t (section.generatedOffset.generatedLine === generatedPosition.line\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0)\n\t };\n\t return ret;\n\t }\n\t }\n\t\n\t return {\n\t line: null,\n\t column: null\n\t };\n\t };\n\t\n\t/**\n\t * Parse the mappings in a string in to a data structure which we can easily\n\t * query (the ordered arrays in the `this.__generatedMappings` and\n\t * `this.__originalMappings` properties).\n\t */\n\tIndexedSourceMapConsumer.prototype._parseMappings =\n\t function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n\t this.__generatedMappings = [];\n\t this.__originalMappings = [];\n\t for (var i = 0; i < this._sections.length; i++) {\n\t var section = this._sections[i];\n\t var sectionMappings = section.consumer._generatedMappings;\n\t for (var j = 0; j < sectionMappings.length; j++) {\n\t var mapping = sectionMappings[j];\n\t\n\t var source = section.consumer._sources.at(mapping.source);\n\t if (section.consumer.sourceRoot !== null) {\n\t source = util.join(section.consumer.sourceRoot, source);\n\t }\n\t this._sources.add(source);\n\t source = this._sources.indexOf(source);\n\t\n\t var name = section.consumer._names.at(mapping.name);\n\t this._names.add(name);\n\t name = this._names.indexOf(name);\n\t\n\t // The mappings coming from the consumer for the section have\n\t // generated positions relative to the start of the section, so we\n\t // need to offset them to be relative to the start of the concatenated\n\t // generated file.\n\t var adjustedMapping = {\n\t source: source,\n\t generatedLine: mapping.generatedLine +\n\t (section.generatedOffset.generatedLine - 1),\n\t generatedColumn: mapping.generatedColumn +\n\t (section.generatedOffset.generatedLine === mapping.generatedLine\n\t ? section.generatedOffset.generatedColumn - 1\n\t : 0),\n\t originalLine: mapping.originalLine,\n\t originalColumn: mapping.originalColumn,\n\t name: name\n\t };\n\t\n\t this.__generatedMappings.push(adjustedMapping);\n\t if (typeof adjustedMapping.originalLine === 'number') {\n\t this.__originalMappings.push(adjustedMapping);\n\t }\n\t }\n\t }\n\t\n\t quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n\t quickSort(this.__originalMappings, util.compareByOriginalPositions);\n\t };\n\t\n\texports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n/***/ },\n/* 8 */\n/***/ function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\texports.GREATEST_LOWER_BOUND = 1;\n\texports.LEAST_UPPER_BOUND = 2;\n\t\n\t/**\n\t * Recursive implementation of binary search.\n\t *\n\t * @param aLow Indices here and lower do not contain the needle.\n\t * @param aHigh Indices here and higher do not contain the needle.\n\t * @param aNeedle The element being searched for.\n\t * @param aHaystack The non-empty array being searched.\n\t * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n\t * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n\t * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t */\n\tfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n\t // This function terminates when one of the following is true:\n\t //\n\t // 1. We find the exact element we are looking for.\n\t //\n\t // 2. We did not find the exact element, but we can return the index of\n\t // the next-closest element.\n\t //\n\t // 3. We did not find the exact element, and there is no next-closest\n\t // element than the one we are searching for, so we return -1.\n\t var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n\t var cmp = aCompare(aNeedle, aHaystack[mid], true);\n\t if (cmp === 0) {\n\t // Found the element we are looking for.\n\t return mid;\n\t }\n\t else if (cmp > 0) {\n\t // Our needle is greater than aHaystack[mid].\n\t if (aHigh - mid > 1) {\n\t // The element is in the upper half.\n\t return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n\t }\n\t\n\t // The exact needle element was not found in this haystack. Determine if\n\t // we are in termination case (3) or (2) and return the appropriate thing.\n\t if (aBias == exports.LEAST_UPPER_BOUND) {\n\t return aHigh < aHaystack.length ? aHigh : -1;\n\t } else {\n\t return mid;\n\t }\n\t }\n\t else {\n\t // Our needle is less than aHaystack[mid].\n\t if (mid - aLow > 1) {\n\t // The element is in the lower half.\n\t return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n\t }\n\t\n\t // we are in termination case (3) or (2) and return the appropriate thing.\n\t if (aBias == exports.LEAST_UPPER_BOUND) {\n\t return mid;\n\t } else {\n\t return aLow < 0 ? -1 : aLow;\n\t }\n\t }\n\t}\n\t\n\t/**\n\t * This is an implementation of binary search which will always try and return\n\t * the index of the closest element if there is no exact hit. This is because\n\t * mappings between original and generated line/col pairs are single points,\n\t * and there is an implicit region between each of them, so a miss just means\n\t * that you aren't on the very start of a region.\n\t *\n\t * @param aNeedle The element you are looking for.\n\t * @param aHaystack The array that is being searched.\n\t * @param aCompare A function which takes the needle and an element in the\n\t * array and returns -1, 0, or 1 depending on whether the needle is less\n\t * than, equal to, or greater than the element, respectively.\n\t * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n\t * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n\t * closest element that is smaller than or greater than the one we are\n\t * searching for, respectively, if the exact element cannot be found.\n\t * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n\t */\n\texports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n\t if (aHaystack.length === 0) {\n\t return -1;\n\t }\n\t\n\t var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n\t aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n\t if (index < 0) {\n\t return -1;\n\t }\n\t\n\t // We have found either the exact element, or the next-closest element than\n\t // the one we are searching for. However, there may be more than one such\n\t // element. Make sure we always return the smallest of these.\n\t while (index - 1 >= 0) {\n\t if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n\t break;\n\t }\n\t --index;\n\t }\n\t\n\t return index;\n\t};\n\n\n/***/ },\n/* 9 */\n/***/ function(module, exports) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\t// It turns out that some (most?) JavaScript engines don't self-host\n\t// `Array.prototype.sort`. This makes sense because C++ will likely remain\n\t// faster than JS when doing raw CPU-intensive sorting. However, when using a\n\t// custom comparator function, calling back and forth between the VM's C++ and\n\t// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n\t// worse generated code for the comparator function than would be optimal. In\n\t// fact, when sorting with a comparator, these costs outweigh the benefits of\n\t// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n\t// a ~3500ms mean speed-up in `bench/bench.html`.\n\t\n\t/**\n\t * Swap the elements indexed by `x` and `y` in the array `ary`.\n\t *\n\t * @param {Array} ary\n\t * The array.\n\t * @param {Number} x\n\t * The index of the first item.\n\t * @param {Number} y\n\t * The index of the second item.\n\t */\n\tfunction swap(ary, x, y) {\n\t var temp = ary[x];\n\t ary[x] = ary[y];\n\t ary[y] = temp;\n\t}\n\t\n\t/**\n\t * Returns a random integer within the range `low .. high` inclusive.\n\t *\n\t * @param {Number} low\n\t * The lower bound on the range.\n\t * @param {Number} high\n\t * The upper bound on the range.\n\t */\n\tfunction randomIntInRange(low, high) {\n\t return Math.round(low + (Math.random() * (high - low)));\n\t}\n\t\n\t/**\n\t * The Quick Sort algorithm.\n\t *\n\t * @param {Array} ary\n\t * An array to sort.\n\t * @param {function} comparator\n\t * Function to use to compare two items.\n\t * @param {Number} p\n\t * Start index of the array\n\t * @param {Number} r\n\t * End index of the array\n\t */\n\tfunction doQuickSort(ary, comparator, p, r) {\n\t // If our lower bound is less than our upper bound, we (1) partition the\n\t // array into two pieces and (2) recurse on each half. If it is not, this is\n\t // the empty array and our base case.\n\t\n\t if (p < r) {\n\t // (1) Partitioning.\n\t //\n\t // The partitioning chooses a pivot between `p` and `r` and moves all\n\t // elements that are less than or equal to the pivot to the before it, and\n\t // all the elements that are greater than it after it. The effect is that\n\t // once partition is done, the pivot is in the exact place it will be when\n\t // the array is put in sorted order, and it will not need to be moved\n\t // again. This runs in O(n) time.\n\t\n\t // Always choose a random pivot so that an input array which is reverse\n\t // sorted does not cause O(n^2) running time.\n\t var pivotIndex = randomIntInRange(p, r);\n\t var i = p - 1;\n\t\n\t swap(ary, pivotIndex, r);\n\t var pivot = ary[r];\n\t\n\t // Immediately after `j` is incremented in this loop, the following hold\n\t // true:\n\t //\n\t // * Every element in `ary[p .. i]` is less than or equal to the pivot.\n\t //\n\t // * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n\t for (var j = p; j < r; j++) {\n\t if (comparator(ary[j], pivot) <= 0) {\n\t i += 1;\n\t swap(ary, i, j);\n\t }\n\t }\n\t\n\t swap(ary, i + 1, j);\n\t var q = i + 1;\n\t\n\t // (2) Recurse on each half.\n\t\n\t doQuickSort(ary, comparator, p, q - 1);\n\t doQuickSort(ary, comparator, q + 1, r);\n\t }\n\t}\n\t\n\t/**\n\t * Sort the given array in-place with the given comparator function.\n\t *\n\t * @param {Array} ary\n\t * An array to sort.\n\t * @param {function} comparator\n\t * Function to use to compare two items.\n\t */\n\texports.quickSort = function (ary, comparator) {\n\t doQuickSort(ary, comparator, 0, ary.length - 1);\n\t};\n\n\n/***/ },\n/* 10 */\n/***/ function(module, exports, __webpack_require__) {\n\n\t/* -*- Mode: js; js-indent-level: 2; -*- */\n\t/*\n\t * Copyright 2011 Mozilla Foundation and contributors\n\t * Licensed under the New BSD license. See LICENSE or:\n\t * http://opensource.org/licenses/BSD-3-Clause\n\t */\n\t\n\tvar SourceMapGenerator = __webpack_require__(1).SourceMapGenerator;\n\tvar util = __webpack_require__(4);\n\t\n\t// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n\t// operating systems these days (capturing the result).\n\tvar REGEX_NEWLINE = /(\\r?\\n)/;\n\t\n\t// Newline character code for charCodeAt() comparisons\n\tvar NEWLINE_CODE = 10;\n\t\n\t// Private symbol for identifying `SourceNode`s when multiple versions of\n\t// the source-map library are loaded. This MUST NOT CHANGE across\n\t// versions!\n\tvar isSourceNode = \"$$$isSourceNode$$$\";\n\t\n\t/**\n\t * SourceNodes provide a way to abstract over interpolating/concatenating\n\t * snippets of generated JavaScript source code while maintaining the line and\n\t * column information associated with the original source code.\n\t *\n\t * @param aLine The original line number.\n\t * @param aColumn The original column number.\n\t * @param aSource The original source's filename.\n\t * @param aChunks Optional. An array of strings which are snippets of\n\t * generated JS, or other SourceNodes.\n\t * @param aName The original identifier.\n\t */\n\tfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n\t this.children = [];\n\t this.sourceContents = {};\n\t this.line = aLine == null ? null : aLine;\n\t this.column = aColumn == null ? null : aColumn;\n\t this.source = aSource == null ? null : aSource;\n\t this.name = aName == null ? null : aName;\n\t this[isSourceNode] = true;\n\t if (aChunks != null) this.add(aChunks);\n\t}\n\t\n\t/**\n\t * Creates a SourceNode from generated code and a SourceMapConsumer.\n\t *\n\t * @param aGeneratedCode The generated code\n\t * @param aSourceMapConsumer The SourceMap for the generated code\n\t * @param aRelativePath Optional. The path that relative sources in the\n\t * SourceMapConsumer should be relative to.\n\t */\n\tSourceNode.fromStringWithSourceMap =\n\t function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n\t // The SourceNode we want to fill with the generated code\n\t // and the SourceMap\n\t var node = new SourceNode();\n\t\n\t // All even indices of this array are one line of the generated code,\n\t // while all odd indices are the newlines between two adjacent lines\n\t // (since `REGEX_NEWLINE` captures its match).\n\t // Processed fragments are removed from this array, by calling `shiftNextLine`.\n\t var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n\t var shiftNextLine = function() {\n\t var lineContents = remainingLines.shift();\n\t // The last line of a file might not have a newline.\n\t var newLine = remainingLines.shift() || \"\";\n\t return lineContents + newLine;\n\t };\n\t\n\t // We need to remember the position of \"remainingLines\"\n\t var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\t\n\t // The generate SourceNodes we need a code range.\n\t // To extract it current and last mapping is used.\n\t // Here we store the last mapping.\n\t var lastMapping = null;\n\t\n\t aSourceMapConsumer.eachMapping(function (mapping) {\n\t if (lastMapping !== null) {\n\t // We add the code from \"lastMapping\" to \"mapping\":\n\t // First check if there is a new line in between.\n\t if (lastGeneratedLine < mapping.generatedLine) {\n\t // Associate first line with \"lastMapping\"\n\t addMappingWithCode(lastMapping, shiftNextLine());\n\t lastGeneratedLine++;\n\t lastGeneratedColumn = 0;\n\t // The remaining code is added without mapping\n\t } else {\n\t // There is no new line in between.\n\t // Associate the code between \"lastGeneratedColumn\" and\n\t // \"mapping.generatedColumn\" with \"lastMapping\"\n\t var nextLine = remainingLines[0];\n\t var code = nextLine.substr(0, mapping.generatedColumn -\n\t lastGeneratedColumn);\n\t remainingLines[0] = nextLine.substr(mapping.generatedColumn -\n\t lastGeneratedColumn);\n\t lastGeneratedColumn = mapping.generatedColumn;\n\t addMappingWithCode(lastMapping, code);\n\t // No more remaining code, continue\n\t lastMapping = mapping;\n\t return;\n\t }\n\t }\n\t // We add the generated code until the first mapping\n\t // to the SourceNode without any mapping.\n\t // Each line is added as separate string.\n\t while (lastGeneratedLine < mapping.generatedLine) {\n\t node.add(shiftNextLine());\n\t lastGeneratedLine++;\n\t }\n\t if (lastGeneratedColumn < mapping.generatedColumn) {\n\t var nextLine = remainingLines[0];\n\t node.add(nextLine.substr(0, mapping.generatedColumn));\n\t remainingLines[0] = nextLine.substr(mapping.generatedColumn);\n\t lastGeneratedColumn = mapping.generatedColumn;\n\t }\n\t lastMapping = mapping;\n\t }, this);\n\t // We have processed all mappings.\n\t if (remainingLines.length > 0) {\n\t if (lastMapping) {\n\t // Associate the remaining code in the current line with \"lastMapping\"\n\t addMappingWithCode(lastMapping, shiftNextLine());\n\t }\n\t // and add the remaining lines without any mapping\n\t node.add(remainingLines.join(\"\"));\n\t }\n\t\n\t // Copy sourcesContent into SourceNode\n\t aSourceMapConsumer.sources.forEach(function (sourceFile) {\n\t var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n\t if (content != null) {\n\t if (aRelativePath != null) {\n\t sourceFile = util.join(aRelativePath, sourceFile);\n\t }\n\t node.setSourceContent(sourceFile, content);\n\t }\n\t });\n\t\n\t return node;\n\t\n\t function addMappingWithCode(mapping, code) {\n\t if (mapping === null || mapping.source === undefined) {\n\t node.add(code);\n\t } else {\n\t var source = aRelativePath\n\t ? util.join(aRelativePath, mapping.source)\n\t : mapping.source;\n\t node.add(new SourceNode(mapping.originalLine,\n\t mapping.originalColumn,\n\t source,\n\t code,\n\t mapping.name));\n\t }\n\t }\n\t };\n\t\n\t/**\n\t * Add a chunk of generated JS to this source node.\n\t *\n\t * @param aChunk A string snippet of generated JS code, another instance of\n\t * SourceNode, or an array where each member is one of those things.\n\t */\n\tSourceNode.prototype.add = function SourceNode_add(aChunk) {\n\t if (Array.isArray(aChunk)) {\n\t aChunk.forEach(function (chunk) {\n\t this.add(chunk);\n\t }, this);\n\t }\n\t else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n\t if (aChunk) {\n\t this.children.push(aChunk);\n\t }\n\t }\n\t else {\n\t throw new TypeError(\n\t \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n\t );\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Add a chunk of generated JS to the beginning of this source node.\n\t *\n\t * @param aChunk A string snippet of generated JS code, another instance of\n\t * SourceNode, or an array where each member is one of those things.\n\t */\n\tSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n\t if (Array.isArray(aChunk)) {\n\t for (var i = aChunk.length-1; i >= 0; i--) {\n\t this.prepend(aChunk[i]);\n\t }\n\t }\n\t else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n\t this.children.unshift(aChunk);\n\t }\n\t else {\n\t throw new TypeError(\n\t \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n\t );\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Walk over the tree of JS snippets in this node and its children. The\n\t * walking function is called once for each snippet of JS and is passed that\n\t * snippet and the its original associated source's line/column location.\n\t *\n\t * @param aFn The traversal function.\n\t */\n\tSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n\t var chunk;\n\t for (var i = 0, len = this.children.length; i < len; i++) {\n\t chunk = this.children[i];\n\t if (chunk[isSourceNode]) {\n\t chunk.walk(aFn);\n\t }\n\t else {\n\t if (chunk !== '') {\n\t aFn(chunk, { source: this.source,\n\t line: this.line,\n\t column: this.column,\n\t name: this.name });\n\t }\n\t }\n\t }\n\t};\n\t\n\t/**\n\t * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n\t * each of `this.children`.\n\t *\n\t * @param aSep The separator.\n\t */\n\tSourceNode.prototype.join = function SourceNode_join(aSep) {\n\t var newChildren;\n\t var i;\n\t var len = this.children.length;\n\t if (len > 0) {\n\t newChildren = [];\n\t for (i = 0; i < len-1; i++) {\n\t newChildren.push(this.children[i]);\n\t newChildren.push(aSep);\n\t }\n\t newChildren.push(this.children[i]);\n\t this.children = newChildren;\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Call String.prototype.replace on the very right-most source snippet. Useful\n\t * for trimming whitespace from the end of a source node, etc.\n\t *\n\t * @param aPattern The pattern to replace.\n\t * @param aReplacement The thing to replace the pattern with.\n\t */\n\tSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n\t var lastChild = this.children[this.children.length - 1];\n\t if (lastChild[isSourceNode]) {\n\t lastChild.replaceRight(aPattern, aReplacement);\n\t }\n\t else if (typeof lastChild === 'string') {\n\t this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n\t }\n\t else {\n\t this.children.push(''.replace(aPattern, aReplacement));\n\t }\n\t return this;\n\t};\n\t\n\t/**\n\t * Set the source content for a source file. This will be added to the SourceMapGenerator\n\t * in the sourcesContent field.\n\t *\n\t * @param aSourceFile The filename of the source file\n\t * @param aSourceContent The content of the source file\n\t */\n\tSourceNode.prototype.setSourceContent =\n\t function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n\t this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n\t };\n\t\n\t/**\n\t * Walk over the tree of SourceNodes. The walking function is called for each\n\t * source file content and is passed the filename and source content.\n\t *\n\t * @param aFn The traversal function.\n\t */\n\tSourceNode.prototype.walkSourceContents =\n\t function SourceNode_walkSourceContents(aFn) {\n\t for (var i = 0, len = this.children.length; i < len; i++) {\n\t if (this.children[i][isSourceNode]) {\n\t this.children[i].walkSourceContents(aFn);\n\t }\n\t }\n\t\n\t var sources = Object.keys(this.sourceContents);\n\t for (var i = 0, len = sources.length; i < len; i++) {\n\t aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n\t }\n\t };\n\t\n\t/**\n\t * Return the string representation of this source node. Walks over the tree\n\t * and concatenates all the various snippets together to one string.\n\t */\n\tSourceNode.prototype.toString = function SourceNode_toString() {\n\t var str = \"\";\n\t this.walk(function (chunk) {\n\t str += chunk;\n\t });\n\t return str;\n\t};\n\t\n\t/**\n\t * Returns the string representation of this source node along with a source\n\t * map.\n\t */\n\tSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n\t var generated = {\n\t code: \"\",\n\t line: 1,\n\t column: 0\n\t };\n\t var map = new SourceMapGenerator(aArgs);\n\t var sourceMappingActive = false;\n\t var lastOriginalSource = null;\n\t var lastOriginalLine = null;\n\t var lastOriginalColumn = null;\n\t var lastOriginalName = null;\n\t this.walk(function (chunk, original) {\n\t generated.code += chunk;\n\t if (original.source !== null\n\t && original.line !== null\n\t && original.column !== null) {\n\t if(lastOriginalSource !== original.source\n\t || lastOriginalLine !== original.line\n\t || lastOriginalColumn !== original.column\n\t || lastOriginalName !== original.name) {\n\t map.addMapping({\n\t source: original.source,\n\t original: {\n\t line: original.line,\n\t column: original.column\n\t },\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t },\n\t name: original.name\n\t });\n\t }\n\t lastOriginalSource = original.source;\n\t lastOriginalLine = original.line;\n\t lastOriginalColumn = original.column;\n\t lastOriginalName = original.name;\n\t sourceMappingActive = true;\n\t } else if (sourceMappingActive) {\n\t map.addMapping({\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t }\n\t });\n\t lastOriginalSource = null;\n\t sourceMappingActive = false;\n\t }\n\t for (var idx = 0, length = chunk.length; idx < length; idx++) {\n\t if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n\t generated.line++;\n\t generated.column = 0;\n\t // Mappings end at eol\n\t if (idx + 1 === length) {\n\t lastOriginalSource = null;\n\t sourceMappingActive = false;\n\t } else if (sourceMappingActive) {\n\t map.addMapping({\n\t source: original.source,\n\t original: {\n\t line: original.line,\n\t column: original.column\n\t },\n\t generated: {\n\t line: generated.line,\n\t column: generated.column\n\t },\n\t name: original.name\n\t });\n\t }\n\t } else {\n\t generated.column++;\n\t }\n\t }\n\t });\n\t this.walkSourceContents(function (sourceFile, sourceContent) {\n\t map.setSourceContent(sourceFile, sourceContent);\n\t });\n\t\n\t return { code: generated.code, map: map };\n\t};\n\t\n\texports.SourceNode = SourceNode;\n\n\n/***/ }\n/******/ ])\n});\n;\n\n\n/** WEBPACK FOOTER **\n ** source-map.min.js\n **/"," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId])\n \t\t\treturn installedModules[moduleId].exports;\n\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\texports: {},\n \t\t\tid: moduleId,\n \t\t\tloaded: false\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.loaded = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(0);\n\n\n\n/** WEBPACK FOOTER **\n ** webpack/bootstrap 5d9a9f4df3bd553ed2f9\n **/","/*\n * Copyright 2009-2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE.txt or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\nexports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator;\nexports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer;\nexports.SourceNode = require('./lib/source-node').SourceNode;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./source-map.js\n ** module id = 0\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar base64VLQ = require('./base64-vlq');\nvar util = require('./util');\nvar ArraySet = require('./array-set').ArraySet;\nvar MappingList = require('./mapping-list').MappingList;\n\n/**\n * An instance of the SourceMapGenerator represents a source map which is\n * being built incrementally. You may pass an object with the following\n * properties:\n *\n * - file: The filename of the generated source.\n * - sourceRoot: A root for all relative URLs in this source map.\n */\nfunction SourceMapGenerator(aArgs) {\n if (!aArgs) {\n aArgs = {};\n }\n this._file = util.getArg(aArgs, 'file', null);\n this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);\n this._skipValidation = util.getArg(aArgs, 'skipValidation', false);\n this._sources = new ArraySet();\n this._names = new ArraySet();\n this._mappings = new MappingList();\n this._sourcesContents = null;\n}\n\nSourceMapGenerator.prototype._version = 3;\n\n/**\n * Creates a new SourceMapGenerator based on a SourceMapConsumer\n *\n * @param aSourceMapConsumer The SourceMap.\n */\nSourceMapGenerator.fromSourceMap =\n function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {\n var sourceRoot = aSourceMapConsumer.sourceRoot;\n var generator = new SourceMapGenerator({\n file: aSourceMapConsumer.file,\n sourceRoot: sourceRoot\n });\n aSourceMapConsumer.eachMapping(function (mapping) {\n var newMapping = {\n generated: {\n line: mapping.generatedLine,\n column: mapping.generatedColumn\n }\n };\n\n if (mapping.source != null) {\n newMapping.source = mapping.source;\n if (sourceRoot != null) {\n newMapping.source = util.relative(sourceRoot, newMapping.source);\n }\n\n newMapping.original = {\n line: mapping.originalLine,\n column: mapping.originalColumn\n };\n\n if (mapping.name != null) {\n newMapping.name = mapping.name;\n }\n }\n\n generator.addMapping(newMapping);\n });\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n generator.setSourceContent(sourceFile, content);\n }\n });\n return generator;\n };\n\n/**\n * Add a single mapping from original source line and column to the generated\n * source's line and column for this source map being created. The mapping\n * object should have the following properties:\n *\n * - generated: An object with the generated line and column positions.\n * - original: An object with the original line and column positions.\n * - source: The original source file (relative to the sourceRoot).\n * - name: An optional original token name for this mapping.\n */\nSourceMapGenerator.prototype.addMapping =\n function SourceMapGenerator_addMapping(aArgs) {\n var generated = util.getArg(aArgs, 'generated');\n var original = util.getArg(aArgs, 'original', null);\n var source = util.getArg(aArgs, 'source', null);\n var name = util.getArg(aArgs, 'name', null);\n\n if (!this._skipValidation) {\n this._validateMapping(generated, original, source, name);\n }\n\n if (source != null) {\n source = String(source);\n if (!this._sources.has(source)) {\n this._sources.add(source);\n }\n }\n\n if (name != null) {\n name = String(name);\n if (!this._names.has(name)) {\n this._names.add(name);\n }\n }\n\n this._mappings.add({\n generatedLine: generated.line,\n generatedColumn: generated.column,\n originalLine: original != null && original.line,\n originalColumn: original != null && original.column,\n source: source,\n name: name\n });\n };\n\n/**\n * Set the source content for a source file.\n */\nSourceMapGenerator.prototype.setSourceContent =\n function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {\n var source = aSourceFile;\n if (this._sourceRoot != null) {\n source = util.relative(this._sourceRoot, source);\n }\n\n if (aSourceContent != null) {\n // Add the source content to the _sourcesContents map.\n // Create a new _sourcesContents map if the property is null.\n if (!this._sourcesContents) {\n this._sourcesContents = Object.create(null);\n }\n this._sourcesContents[util.toSetString(source)] = aSourceContent;\n } else if (this._sourcesContents) {\n // Remove the source file from the _sourcesContents map.\n // If the _sourcesContents map is empty, set the property to null.\n delete this._sourcesContents[util.toSetString(source)];\n if (Object.keys(this._sourcesContents).length === 0) {\n this._sourcesContents = null;\n }\n }\n };\n\n/**\n * Applies the mappings of a sub-source-map for a specific source file to the\n * source map being generated. Each mapping to the supplied source file is\n * rewritten using the supplied source map. Note: The resolution for the\n * resulting mappings is the minimium of this map and the supplied map.\n *\n * @param aSourceMapConsumer The source map to be applied.\n * @param aSourceFile Optional. The filename of the source file.\n * If omitted, SourceMapConsumer's file property will be used.\n * @param aSourceMapPath Optional. The dirname of the path to the source map\n * to be applied. If relative, it is relative to the SourceMapConsumer.\n * This parameter is needed when the two source maps aren't in the same\n * directory, and the source map to be applied contains relative source\n * paths. If so, those relative source paths need to be rewritten\n * relative to the SourceMapGenerator.\n */\nSourceMapGenerator.prototype.applySourceMap =\n function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {\n var sourceFile = aSourceFile;\n // If aSourceFile is omitted, we will use the file property of the SourceMap\n if (aSourceFile == null) {\n if (aSourceMapConsumer.file == null) {\n throw new Error(\n 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +\n 'or the source map\\'s \"file\" property. Both were omitted.'\n );\n }\n sourceFile = aSourceMapConsumer.file;\n }\n var sourceRoot = this._sourceRoot;\n // Make \"sourceFile\" relative if an absolute Url is passed.\n if (sourceRoot != null) {\n sourceFile = util.relative(sourceRoot, sourceFile);\n }\n // Applying the SourceMap can add and remove items from the sources and\n // the names array.\n var newSources = new ArraySet();\n var newNames = new ArraySet();\n\n // Find mappings for the \"sourceFile\"\n this._mappings.unsortedForEach(function (mapping) {\n if (mapping.source === sourceFile && mapping.originalLine != null) {\n // Check if it can be mapped by the source map, then update the mapping.\n var original = aSourceMapConsumer.originalPositionFor({\n line: mapping.originalLine,\n column: mapping.originalColumn\n });\n if (original.source != null) {\n // Copy mapping\n mapping.source = original.source;\n if (aSourceMapPath != null) {\n mapping.source = util.join(aSourceMapPath, mapping.source)\n }\n if (sourceRoot != null) {\n mapping.source = util.relative(sourceRoot, mapping.source);\n }\n mapping.originalLine = original.line;\n mapping.originalColumn = original.column;\n if (original.name != null) {\n mapping.name = original.name;\n }\n }\n }\n\n var source = mapping.source;\n if (source != null && !newSources.has(source)) {\n newSources.add(source);\n }\n\n var name = mapping.name;\n if (name != null && !newNames.has(name)) {\n newNames.add(name);\n }\n\n }, this);\n this._sources = newSources;\n this._names = newNames;\n\n // Copy sourcesContents of applied map.\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n if (aSourceMapPath != null) {\n sourceFile = util.join(aSourceMapPath, sourceFile);\n }\n if (sourceRoot != null) {\n sourceFile = util.relative(sourceRoot, sourceFile);\n }\n this.setSourceContent(sourceFile, content);\n }\n }, this);\n };\n\n/**\n * A mapping can have one of the three levels of data:\n *\n * 1. Just the generated position.\n * 2. The Generated position, original position, and original source.\n * 3. Generated and original position, original source, as well as a name\n * token.\n *\n * To maintain consistency, we validate that any new mapping being added falls\n * in to one of these categories.\n */\nSourceMapGenerator.prototype._validateMapping =\n function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,\n aName) {\n if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n && aGenerated.line > 0 && aGenerated.column >= 0\n && !aOriginal && !aSource && !aName) {\n // Case 1.\n return;\n }\n else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated\n && aOriginal && 'line' in aOriginal && 'column' in aOriginal\n && aGenerated.line > 0 && aGenerated.column >= 0\n && aOriginal.line > 0 && aOriginal.column >= 0\n && aSource) {\n // Cases 2 and 3.\n return;\n }\n else {\n throw new Error('Invalid mapping: ' + JSON.stringify({\n generated: aGenerated,\n source: aSource,\n original: aOriginal,\n name: aName\n }));\n }\n };\n\n/**\n * Serialize the accumulated mappings in to the stream of base 64 VLQs\n * specified by the source map format.\n */\nSourceMapGenerator.prototype._serializeMappings =\n function SourceMapGenerator_serializeMappings() {\n var previousGeneratedColumn = 0;\n var previousGeneratedLine = 1;\n var previousOriginalColumn = 0;\n var previousOriginalLine = 0;\n var previousName = 0;\n var previousSource = 0;\n var result = '';\n var next;\n var mapping;\n var nameIdx;\n var sourceIdx;\n\n var mappings = this._mappings.toArray();\n for (var i = 0, len = mappings.length; i < len; i++) {\n mapping = mappings[i];\n next = ''\n\n if (mapping.generatedLine !== previousGeneratedLine) {\n previousGeneratedColumn = 0;\n while (mapping.generatedLine !== previousGeneratedLine) {\n next += ';';\n previousGeneratedLine++;\n }\n }\n else {\n if (i > 0) {\n if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {\n continue;\n }\n next += ',';\n }\n }\n\n next += base64VLQ.encode(mapping.generatedColumn\n - previousGeneratedColumn);\n previousGeneratedColumn = mapping.generatedColumn;\n\n if (mapping.source != null) {\n sourceIdx = this._sources.indexOf(mapping.source);\n next += base64VLQ.encode(sourceIdx - previousSource);\n previousSource = sourceIdx;\n\n // lines are stored 0-based in SourceMap spec version 3\n next += base64VLQ.encode(mapping.originalLine - 1\n - previousOriginalLine);\n previousOriginalLine = mapping.originalLine - 1;\n\n next += base64VLQ.encode(mapping.originalColumn\n - previousOriginalColumn);\n previousOriginalColumn = mapping.originalColumn;\n\n if (mapping.name != null) {\n nameIdx = this._names.indexOf(mapping.name);\n next += base64VLQ.encode(nameIdx - previousName);\n previousName = nameIdx;\n }\n }\n\n result += next;\n }\n\n return result;\n };\n\nSourceMapGenerator.prototype._generateSourcesContent =\n function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {\n return aSources.map(function (source) {\n if (!this._sourcesContents) {\n return null;\n }\n if (aSourceRoot != null) {\n source = util.relative(aSourceRoot, source);\n }\n var key = util.toSetString(source);\n return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)\n ? this._sourcesContents[key]\n : null;\n }, this);\n };\n\n/**\n * Externalize the source map.\n */\nSourceMapGenerator.prototype.toJSON =\n function SourceMapGenerator_toJSON() {\n var map = {\n version: this._version,\n sources: this._sources.toArray(),\n names: this._names.toArray(),\n mappings: this._serializeMappings()\n };\n if (this._file != null) {\n map.file = this._file;\n }\n if (this._sourceRoot != null) {\n map.sourceRoot = this._sourceRoot;\n }\n if (this._sourcesContents) {\n map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);\n }\n\n return map;\n };\n\n/**\n * Render the source map being generated to a string.\n */\nSourceMapGenerator.prototype.toString =\n function SourceMapGenerator_toString() {\n return JSON.stringify(this.toJSON());\n };\n\nexports.SourceMapGenerator = SourceMapGenerator;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/source-map-generator.js\n ** module id = 1\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n *\n * Based on the Base 64 VLQ implementation in Closure Compiler:\n * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java\n *\n * Copyright 2011 The Closure Compiler Authors. All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n * * Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * * Redistributions in binary form must reproduce the above\n * copyright notice, this list of conditions and the following\n * disclaimer in the documentation and/or other materials provided\n * with the distribution.\n * * Neither the name of Google Inc. nor the names of its\n * contributors may be used to endorse or promote products derived\n * from this software without specific prior written permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n * \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\nvar base64 = require('./base64');\n\n// A single base 64 digit can contain 6 bits of data. For the base 64 variable\n// length quantities we use in the source map spec, the first bit is the sign,\n// the next four bits are the actual value, and the 6th bit is the\n// continuation bit. The continuation bit tells us whether there are more\n// digits in this value following this digit.\n//\n// Continuation\n// | Sign\n// | |\n// V V\n// 101011\n\nvar VLQ_BASE_SHIFT = 5;\n\n// binary: 100000\nvar VLQ_BASE = 1 << VLQ_BASE_SHIFT;\n\n// binary: 011111\nvar VLQ_BASE_MASK = VLQ_BASE - 1;\n\n// binary: 100000\nvar VLQ_CONTINUATION_BIT = VLQ_BASE;\n\n/**\n * Converts from a two-complement value to a value where the sign bit is\n * placed in the least significant bit. For example, as decimals:\n * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)\n * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)\n */\nfunction toVLQSigned(aValue) {\n return aValue < 0\n ? ((-aValue) << 1) + 1\n : (aValue << 1) + 0;\n}\n\n/**\n * Converts to a two-complement value from a value where the sign bit is\n * placed in the least significant bit. For example, as decimals:\n * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1\n * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2\n */\nfunction fromVLQSigned(aValue) {\n var isNegative = (aValue & 1) === 1;\n var shifted = aValue >> 1;\n return isNegative\n ? -shifted\n : shifted;\n}\n\n/**\n * Returns the base 64 VLQ encoded value.\n */\nexports.encode = function base64VLQ_encode(aValue) {\n var encoded = \"\";\n var digit;\n\n var vlq = toVLQSigned(aValue);\n\n do {\n digit = vlq & VLQ_BASE_MASK;\n vlq >>>= VLQ_BASE_SHIFT;\n if (vlq > 0) {\n // There are still more digits in this value, so we must make sure the\n // continuation bit is marked.\n digit |= VLQ_CONTINUATION_BIT;\n }\n encoded += base64.encode(digit);\n } while (vlq > 0);\n\n return encoded;\n};\n\n/**\n * Decodes the next base 64 VLQ value from the given string and returns the\n * value and the rest of the string via the out parameter.\n */\nexports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {\n var strLen = aStr.length;\n var result = 0;\n var shift = 0;\n var continuation, digit;\n\n do {\n if (aIndex >= strLen) {\n throw new Error(\"Expected more digits in base 64 VLQ value.\");\n }\n\n digit = base64.decode(aStr.charCodeAt(aIndex++));\n if (digit === -1) {\n throw new Error(\"Invalid base64 digit: \" + aStr.charAt(aIndex - 1));\n }\n\n continuation = !!(digit & VLQ_CONTINUATION_BIT);\n digit &= VLQ_BASE_MASK;\n result = result + (digit << shift);\n shift += VLQ_BASE_SHIFT;\n } while (continuation);\n\n aOutParam.value = fromVLQSigned(result);\n aOutParam.rest = aIndex;\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/base64-vlq.js\n ** module id = 2\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split('');\n\n/**\n * Encode an integer in the range of 0 to 63 to a single base 64 digit.\n */\nexports.encode = function (number) {\n if (0 <= number && number < intToCharMap.length) {\n return intToCharMap[number];\n }\n throw new TypeError(\"Must be between 0 and 63: \" + number);\n};\n\n/**\n * Decode a single base 64 character code digit to an integer. Returns -1 on\n * failure.\n */\nexports.decode = function (charCode) {\n var bigA = 65; // 'A'\n var bigZ = 90; // 'Z'\n\n var littleA = 97; // 'a'\n var littleZ = 122; // 'z'\n\n var zero = 48; // '0'\n var nine = 57; // '9'\n\n var plus = 43; // '+'\n var slash = 47; // '/'\n\n var littleOffset = 26;\n var numberOffset = 52;\n\n // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ\n if (bigA <= charCode && charCode <= bigZ) {\n return (charCode - bigA);\n }\n\n // 26 - 51: abcdefghijklmnopqrstuvwxyz\n if (littleA <= charCode && charCode <= littleZ) {\n return (charCode - littleA + littleOffset);\n }\n\n // 52 - 61: 0123456789\n if (zero <= charCode && charCode <= nine) {\n return (charCode - zero + numberOffset);\n }\n\n // 62: +\n if (charCode == plus) {\n return 62;\n }\n\n // 63: /\n if (charCode == slash) {\n return 63;\n }\n\n // Invalid base64 digit.\n return -1;\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/base64.js\n ** module id = 3\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n/**\n * This is a helper function for getting values from parameter/options\n * objects.\n *\n * @param args The object we are extracting values from\n * @param name The name of the property we are getting.\n * @param defaultValue An optional value to return if the property is missing\n * from the object. If this is not specified and the property is missing, an\n * error will be thrown.\n */\nfunction getArg(aArgs, aName, aDefaultValue) {\n if (aName in aArgs) {\n return aArgs[aName];\n } else if (arguments.length === 3) {\n return aDefaultValue;\n } else {\n throw new Error('\"' + aName + '\" is a required argument.');\n }\n}\nexports.getArg = getArg;\n\nvar urlRegexp = /^(?:([\\w+\\-.]+):)?\\/\\/(?:(\\w+:\\w+)@)?([\\w.]*)(?::(\\d+))?(\\S*)$/;\nvar dataUrlRegexp = /^data:.+\\,.+$/;\n\nfunction urlParse(aUrl) {\n var match = aUrl.match(urlRegexp);\n if (!match) {\n return null;\n }\n return {\n scheme: match[1],\n auth: match[2],\n host: match[3],\n port: match[4],\n path: match[5]\n };\n}\nexports.urlParse = urlParse;\n\nfunction urlGenerate(aParsedUrl) {\n var url = '';\n if (aParsedUrl.scheme) {\n url += aParsedUrl.scheme + ':';\n }\n url += '//';\n if (aParsedUrl.auth) {\n url += aParsedUrl.auth + '@';\n }\n if (aParsedUrl.host) {\n url += aParsedUrl.host;\n }\n if (aParsedUrl.port) {\n url += \":\" + aParsedUrl.port\n }\n if (aParsedUrl.path) {\n url += aParsedUrl.path;\n }\n return url;\n}\nexports.urlGenerate = urlGenerate;\n\n/**\n * Normalizes a path, or the path portion of a URL:\n *\n * - Replaces consequtive slashes with one slash.\n * - Removes unnecessary '.' parts.\n * - Removes unnecessary '/..' parts.\n *\n * Based on code in the Node.js 'path' core module.\n *\n * @param aPath The path or url to normalize.\n */\nfunction normalize(aPath) {\n var path = aPath;\n var url = urlParse(aPath);\n if (url) {\n if (!url.path) {\n return aPath;\n }\n path = url.path;\n }\n var isAbsolute = exports.isAbsolute(path);\n\n var parts = path.split(/\\/+/);\n for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {\n part = parts[i];\n if (part === '.') {\n parts.splice(i, 1);\n } else if (part === '..') {\n up++;\n } else if (up > 0) {\n if (part === '') {\n // The first part is blank if the path is absolute. Trying to go\n // above the root is a no-op. Therefore we can remove all '..' parts\n // directly after the root.\n parts.splice(i + 1, up);\n up = 0;\n } else {\n parts.splice(i, 2);\n up--;\n }\n }\n }\n path = parts.join('/');\n\n if (path === '') {\n path = isAbsolute ? '/' : '.';\n }\n\n if (url) {\n url.path = path;\n return urlGenerate(url);\n }\n return path;\n}\nexports.normalize = normalize;\n\n/**\n * Joins two paths/URLs.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be joined with the root.\n *\n * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a\n * scheme-relative URL: Then the scheme of aRoot, if any, is prepended\n * first.\n * - Otherwise aPath is a path. If aRoot is a URL, then its path portion\n * is updated with the result and aRoot is returned. Otherwise the result\n * is returned.\n * - If aPath is absolute, the result is aPath.\n * - Otherwise the two paths are joined with a slash.\n * - Joining for example 'http://' and 'www.example.com' is also supported.\n */\nfunction join(aRoot, aPath) {\n if (aRoot === \"\") {\n aRoot = \".\";\n }\n if (aPath === \"\") {\n aPath = \".\";\n }\n var aPathUrl = urlParse(aPath);\n var aRootUrl = urlParse(aRoot);\n if (aRootUrl) {\n aRoot = aRootUrl.path || '/';\n }\n\n // `join(foo, '//www.example.org')`\n if (aPathUrl && !aPathUrl.scheme) {\n if (aRootUrl) {\n aPathUrl.scheme = aRootUrl.scheme;\n }\n return urlGenerate(aPathUrl);\n }\n\n if (aPathUrl || aPath.match(dataUrlRegexp)) {\n return aPath;\n }\n\n // `join('http://', 'www.example.com')`\n if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {\n aRootUrl.host = aPath;\n return urlGenerate(aRootUrl);\n }\n\n var joined = aPath.charAt(0) === '/'\n ? aPath\n : normalize(aRoot.replace(/\\/+$/, '') + '/' + aPath);\n\n if (aRootUrl) {\n aRootUrl.path = joined;\n return urlGenerate(aRootUrl);\n }\n return joined;\n}\nexports.join = join;\n\nexports.isAbsolute = function (aPath) {\n return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp);\n};\n\n/**\n * Make a path relative to a URL or another path.\n *\n * @param aRoot The root path or URL.\n * @param aPath The path or URL to be made relative to aRoot.\n */\nfunction relative(aRoot, aPath) {\n if (aRoot === \"\") {\n aRoot = \".\";\n }\n\n aRoot = aRoot.replace(/\\/$/, '');\n\n // It is possible for the path to be above the root. In this case, simply\n // checking whether the root is a prefix of the path won't work. Instead, we\n // need to remove components from the root one by one, until either we find\n // a prefix that fits, or we run out of components to remove.\n var level = 0;\n while (aPath.indexOf(aRoot + '/') !== 0) {\n var index = aRoot.lastIndexOf(\"/\");\n if (index < 0) {\n return aPath;\n }\n\n // If the only part of the root that is left is the scheme (i.e. http://,\n // file:///, etc.), one or more slashes (/), or simply nothing at all, we\n // have exhausted all components, so the path is not relative to the root.\n aRoot = aRoot.slice(0, index);\n if (aRoot.match(/^([^\\/]+:\\/)?\\/*$/)) {\n return aPath;\n }\n\n ++level;\n }\n\n // Make sure we add a \"../\" for each component we removed from the root.\n return Array(level + 1).join(\"../\") + aPath.substr(aRoot.length + 1);\n}\nexports.relative = relative;\n\nvar supportsNullProto = (function () {\n var obj = Object.create(null);\n return !('__proto__' in obj);\n}());\n\nfunction identity (s) {\n return s;\n}\n\n/**\n * Because behavior goes wacky when you set `__proto__` on objects, we\n * have to prefix all the strings in our set with an arbitrary character.\n *\n * See https://github.com/mozilla/source-map/pull/31 and\n * https://github.com/mozilla/source-map/issues/30\n *\n * @param String aStr\n */\nfunction toSetString(aStr) {\n if (isProtoString(aStr)) {\n return '$' + aStr;\n }\n\n return aStr;\n}\nexports.toSetString = supportsNullProto ? identity : toSetString;\n\nfunction fromSetString(aStr) {\n if (isProtoString(aStr)) {\n return aStr.slice(1);\n }\n\n return aStr;\n}\nexports.fromSetString = supportsNullProto ? identity : fromSetString;\n\nfunction isProtoString(s) {\n if (!s) {\n return false;\n }\n\n var length = s.length;\n\n if (length < 9 /* \"__proto__\".length */) {\n return false;\n }\n\n if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||\n s.charCodeAt(length - 2) !== 95 /* '_' */ ||\n s.charCodeAt(length - 3) !== 111 /* 'o' */ ||\n s.charCodeAt(length - 4) !== 116 /* 't' */ ||\n s.charCodeAt(length - 5) !== 111 /* 'o' */ ||\n s.charCodeAt(length - 6) !== 114 /* 'r' */ ||\n s.charCodeAt(length - 7) !== 112 /* 'p' */ ||\n s.charCodeAt(length - 8) !== 95 /* '_' */ ||\n s.charCodeAt(length - 9) !== 95 /* '_' */) {\n return false;\n }\n\n for (var i = length - 10; i >= 0; i--) {\n if (s.charCodeAt(i) !== 36 /* '$' */) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Comparator between two mappings where the original positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same original source/line/column, but different generated\n * line and column the same. Useful when searching for a mapping with a\n * stubbed out mapping.\n */\nfunction compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {\n var cmp = mappingA.source - mappingB.source;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0 || onlyCompareOriginal) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n return mappingA.name - mappingB.name;\n}\nexports.compareByOriginalPositions = compareByOriginalPositions;\n\n/**\n * Comparator between two mappings with deflated source and name indices where\n * the generated positions are compared.\n *\n * Optionally pass in `true` as `onlyCompareGenerated` to consider two\n * mappings with the same generated line and column, but different\n * source/name/original line and column the same. Useful when searching for a\n * mapping with a stubbed out mapping.\n */\nfunction compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {\n var cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0 || onlyCompareGenerated) {\n return cmp;\n }\n\n cmp = mappingA.source - mappingB.source;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n return mappingA.name - mappingB.name;\n}\nexports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;\n\nfunction strcmp(aStr1, aStr2) {\n if (aStr1 === aStr2) {\n return 0;\n }\n\n if (aStr1 > aStr2) {\n return 1;\n }\n\n return -1;\n}\n\n/**\n * Comparator between two mappings with inflated source and name strings where\n * the generated positions are compared.\n */\nfunction compareByGeneratedPositionsInflated(mappingA, mappingB) {\n var cmp = mappingA.generatedLine - mappingB.generatedLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.generatedColumn - mappingB.generatedColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = strcmp(mappingA.source, mappingB.source);\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalLine - mappingB.originalLine;\n if (cmp !== 0) {\n return cmp;\n }\n\n cmp = mappingA.originalColumn - mappingB.originalColumn;\n if (cmp !== 0) {\n return cmp;\n }\n\n return strcmp(mappingA.name, mappingB.name);\n}\nexports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/util.js\n ** module id = 4\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar has = Object.prototype.hasOwnProperty;\n\n/**\n * A data structure which is a combination of an array and a set. Adding a new\n * member is O(1), testing for membership is O(1), and finding the index of an\n * element is O(1). Removing elements from the set is not supported. Only\n * strings are supported for membership.\n */\nfunction ArraySet() {\n this._array = [];\n this._set = Object.create(null);\n}\n\n/**\n * Static method for creating ArraySet instances from an existing array.\n */\nArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {\n var set = new ArraySet();\n for (var i = 0, len = aArray.length; i < len; i++) {\n set.add(aArray[i], aAllowDuplicates);\n }\n return set;\n};\n\n/**\n * Return how many unique items are in this ArraySet. If duplicates have been\n * added, than those do not count towards the size.\n *\n * @returns Number\n */\nArraySet.prototype.size = function ArraySet_size() {\n return Object.getOwnPropertyNames(this._set).length;\n};\n\n/**\n * Add the given string to this set.\n *\n * @param String aStr\n */\nArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {\n var sStr = util.toSetString(aStr);\n var isDuplicate = has.call(this._set, sStr);\n var idx = this._array.length;\n if (!isDuplicate || aAllowDuplicates) {\n this._array.push(aStr);\n }\n if (!isDuplicate) {\n this._set[sStr] = idx;\n }\n};\n\n/**\n * Is the given string a member of this set?\n *\n * @param String aStr\n */\nArraySet.prototype.has = function ArraySet_has(aStr) {\n var sStr = util.toSetString(aStr);\n return has.call(this._set, sStr);\n};\n\n/**\n * What is the index of the given string in the array?\n *\n * @param String aStr\n */\nArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {\n var sStr = util.toSetString(aStr);\n if (has.call(this._set, sStr)) {\n return this._set[sStr];\n }\n throw new Error('\"' + aStr + '\" is not in the set.');\n};\n\n/**\n * What is the element at the given index?\n *\n * @param Number aIdx\n */\nArraySet.prototype.at = function ArraySet_at(aIdx) {\n if (aIdx >= 0 && aIdx < this._array.length) {\n return this._array[aIdx];\n }\n throw new Error('No element indexed by ' + aIdx);\n};\n\n/**\n * Returns the array representation of this set (which has the proper indices\n * indicated by indexOf). Note that this is a copy of the internal array used\n * for storing the members so that no one can mess with internal state.\n */\nArraySet.prototype.toArray = function ArraySet_toArray() {\n return this._array.slice();\n};\n\nexports.ArraySet = ArraySet;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/array-set.js\n ** module id = 5\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2014 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\n\n/**\n * Determine whether mappingB is after mappingA with respect to generated\n * position.\n */\nfunction generatedPositionAfter(mappingA, mappingB) {\n // Optimized for most common case\n var lineA = mappingA.generatedLine;\n var lineB = mappingB.generatedLine;\n var columnA = mappingA.generatedColumn;\n var columnB = mappingB.generatedColumn;\n return lineB > lineA || lineB == lineA && columnB >= columnA ||\n util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;\n}\n\n/**\n * A data structure to provide a sorted view of accumulated mappings in a\n * performance conscious manner. It trades a neglibable overhead in general\n * case for a large speedup in case of mappings being added in order.\n */\nfunction MappingList() {\n this._array = [];\n this._sorted = true;\n // Serves as infimum\n this._last = {generatedLine: -1, generatedColumn: 0};\n}\n\n/**\n * Iterate through internal items. This method takes the same arguments that\n * `Array.prototype.forEach` takes.\n *\n * NOTE: The order of the mappings is NOT guaranteed.\n */\nMappingList.prototype.unsortedForEach =\n function MappingList_forEach(aCallback, aThisArg) {\n this._array.forEach(aCallback, aThisArg);\n };\n\n/**\n * Add the given source mapping.\n *\n * @param Object aMapping\n */\nMappingList.prototype.add = function MappingList_add(aMapping) {\n if (generatedPositionAfter(this._last, aMapping)) {\n this._last = aMapping;\n this._array.push(aMapping);\n } else {\n this._sorted = false;\n this._array.push(aMapping);\n }\n};\n\n/**\n * Returns the flat, sorted array of mappings. The mappings are sorted by\n * generated position.\n *\n * WARNING: This method returns internal data without copying, for\n * performance. The return value must NOT be mutated, and should be treated as\n * an immutable borrow. If you want to take ownership, you must make your own\n * copy.\n */\nMappingList.prototype.toArray = function MappingList_toArray() {\n if (!this._sorted) {\n this._array.sort(util.compareByGeneratedPositionsInflated);\n this._sorted = true;\n }\n return this._array;\n};\n\nexports.MappingList = MappingList;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/mapping-list.js\n ** module id = 6\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar util = require('./util');\nvar binarySearch = require('./binary-search');\nvar ArraySet = require('./array-set').ArraySet;\nvar base64VLQ = require('./base64-vlq');\nvar quickSort = require('./quick-sort').quickSort;\n\nfunction SourceMapConsumer(aSourceMap) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n }\n\n return sourceMap.sections != null\n ? new IndexedSourceMapConsumer(sourceMap)\n : new BasicSourceMapConsumer(sourceMap);\n}\n\nSourceMapConsumer.fromSourceMap = function(aSourceMap) {\n return BasicSourceMapConsumer.fromSourceMap(aSourceMap);\n}\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nSourceMapConsumer.prototype._version = 3;\n\n// `__generatedMappings` and `__originalMappings` are arrays that hold the\n// parsed mapping coordinates from the source map's \"mappings\" attribute. They\n// are lazily instantiated, accessed via the `_generatedMappings` and\n// `_originalMappings` getters respectively, and we only parse the mappings\n// and create these arrays once queried for a source location. We jump through\n// these hoops because there can be many thousands of mappings, and parsing\n// them is expensive, so we only want to do it if we must.\n//\n// Each object in the arrays is of the form:\n//\n// {\n// generatedLine: The line number in the generated code,\n// generatedColumn: The column number in the generated code,\n// source: The path to the original source file that generated this\n// chunk of code,\n// originalLine: The line number in the original source that\n// corresponds to this chunk of generated code,\n// originalColumn: The column number in the original source that\n// corresponds to this chunk of generated code,\n// name: The name of the original symbol which generated this chunk of\n// code.\n// }\n//\n// All properties except for `generatedLine` and `generatedColumn` can be\n// `null`.\n//\n// `_generatedMappings` is ordered by the generated positions.\n//\n// `_originalMappings` is ordered by the original positions.\n\nSourceMapConsumer.prototype.__generatedMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {\n get: function () {\n if (!this.__generatedMappings) {\n this._parseMappings(this._mappings, this.sourceRoot);\n }\n\n return this.__generatedMappings;\n }\n});\n\nSourceMapConsumer.prototype.__originalMappings = null;\nObject.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {\n get: function () {\n if (!this.__originalMappings) {\n this._parseMappings(this._mappings, this.sourceRoot);\n }\n\n return this.__originalMappings;\n }\n});\n\nSourceMapConsumer.prototype._charIsMappingSeparator =\n function SourceMapConsumer_charIsMappingSeparator(aStr, index) {\n var c = aStr.charAt(index);\n return c === \";\" || c === \",\";\n };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nSourceMapConsumer.prototype._parseMappings =\n function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n throw new Error(\"Subclasses must implement _parseMappings\");\n };\n\nSourceMapConsumer.GENERATED_ORDER = 1;\nSourceMapConsumer.ORIGINAL_ORDER = 2;\n\nSourceMapConsumer.GREATEST_LOWER_BOUND = 1;\nSourceMapConsumer.LEAST_UPPER_BOUND = 2;\n\n/**\n * Iterate over each mapping between an original source/line/column and a\n * generated line/column in this source map.\n *\n * @param Function aCallback\n * The function that is called with each mapping.\n * @param Object aContext\n * Optional. If specified, this object will be the value of `this` every\n * time that `aCallback` is called.\n * @param aOrder\n * Either `SourceMapConsumer.GENERATED_ORDER` or\n * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to\n * iterate over the mappings sorted by the generated file's line/column\n * order or the original's source/line/column order, respectively. Defaults to\n * `SourceMapConsumer.GENERATED_ORDER`.\n */\nSourceMapConsumer.prototype.eachMapping =\n function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {\n var context = aContext || null;\n var order = aOrder || SourceMapConsumer.GENERATED_ORDER;\n\n var mappings;\n switch (order) {\n case SourceMapConsumer.GENERATED_ORDER:\n mappings = this._generatedMappings;\n break;\n case SourceMapConsumer.ORIGINAL_ORDER:\n mappings = this._originalMappings;\n break;\n default:\n throw new Error(\"Unknown order of iteration.\");\n }\n\n var sourceRoot = this.sourceRoot;\n mappings.map(function (mapping) {\n var source = mapping.source === null ? null : this._sources.at(mapping.source);\n if (source != null && sourceRoot != null) {\n source = util.join(sourceRoot, source);\n }\n return {\n source: source,\n generatedLine: mapping.generatedLine,\n generatedColumn: mapping.generatedColumn,\n originalLine: mapping.originalLine,\n originalColumn: mapping.originalColumn,\n name: mapping.name === null ? null : this._names.at(mapping.name)\n };\n }, this).forEach(aCallback, context);\n };\n\n/**\n * Returns all generated line and column information for the original source,\n * line, and column provided. If no column is provided, returns all mappings\n * corresponding to a either the line we are searching for or the next\n * closest line that has any mappings. Otherwise, returns all mappings\n * corresponding to the given line and either the column we are searching for\n * or the next closest column that has any offsets.\n *\n * The only argument is an object with the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source.\n * - column: Optional. the column number in the original source.\n *\n * and an array of objects is returned, each with the following properties:\n *\n * - line: The line number in the generated source, or null.\n * - column: The column number in the generated source, or null.\n */\nSourceMapConsumer.prototype.allGeneratedPositionsFor =\n function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {\n var line = util.getArg(aArgs, 'line');\n\n // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping\n // returns the index of the closest mapping less than the needle. By\n // setting needle.originalColumn to 0, we thus find the last mapping for\n // the given line, provided such a mapping exists.\n var needle = {\n source: util.getArg(aArgs, 'source'),\n originalLine: line,\n originalColumn: util.getArg(aArgs, 'column', 0)\n };\n\n if (this.sourceRoot != null) {\n needle.source = util.relative(this.sourceRoot, needle.source);\n }\n if (!this._sources.has(needle.source)) {\n return [];\n }\n needle.source = this._sources.indexOf(needle.source);\n\n var mappings = [];\n\n var index = this._findMapping(needle,\n this._originalMappings,\n \"originalLine\",\n \"originalColumn\",\n util.compareByOriginalPositions,\n binarySearch.LEAST_UPPER_BOUND);\n if (index >= 0) {\n var mapping = this._originalMappings[index];\n\n if (aArgs.column === undefined) {\n var originalLine = mapping.originalLine;\n\n // Iterate until either we run out of mappings, or we run into\n // a mapping for a different line than the one we found. Since\n // mappings are sorted, this is guaranteed to find all mappings for\n // the line we found.\n while (mapping && mapping.originalLine === originalLine) {\n mappings.push({\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n });\n\n mapping = this._originalMappings[++index];\n }\n } else {\n var originalColumn = mapping.originalColumn;\n\n // Iterate until either we run out of mappings, or we run into\n // a mapping for a different line than the one we were searching for.\n // Since mappings are sorted, this is guaranteed to find all mappings for\n // the line we are searching for.\n while (mapping &&\n mapping.originalLine === line &&\n mapping.originalColumn == originalColumn) {\n mappings.push({\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n });\n\n mapping = this._originalMappings[++index];\n }\n }\n }\n\n return mappings;\n };\n\nexports.SourceMapConsumer = SourceMapConsumer;\n\n/**\n * A BasicSourceMapConsumer instance represents a parsed source map which we can\n * query for information about the original file positions by giving it a file\n * position in the generated source.\n *\n * The only parameter is the raw source map (either as a JSON string, or\n * already parsed to an object). According to the spec, source maps have the\n * following attributes:\n *\n * - version: Which version of the source map spec this map is following.\n * - sources: An array of URLs to the original source files.\n * - names: An array of identifiers which can be referrenced by individual mappings.\n * - sourceRoot: Optional. The URL root from which all sources are relative.\n * - sourcesContent: Optional. An array of contents of the original source files.\n * - mappings: A string of base64 VLQs which contain the actual mappings.\n * - file: Optional. The generated file this source map is associated with.\n *\n * Here is an example source map, taken from the source map spec[0]:\n *\n * {\n * version : 3,\n * file: \"out.js\",\n * sourceRoot : \"\",\n * sources: [\"foo.js\", \"bar.js\"],\n * names: [\"src\", \"maps\", \"are\", \"fun\"],\n * mappings: \"AA,AB;;ABCDE;\"\n * }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#\n */\nfunction BasicSourceMapConsumer(aSourceMap) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n }\n\n var version = util.getArg(sourceMap, 'version');\n var sources = util.getArg(sourceMap, 'sources');\n // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which\n // requires the array) to play nice here.\n var names = util.getArg(sourceMap, 'names', []);\n var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);\n var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);\n var mappings = util.getArg(sourceMap, 'mappings');\n var file = util.getArg(sourceMap, 'file', null);\n\n // Once again, Sass deviates from the spec and supplies the version as a\n // string rather than a number, so we use loose equality checking here.\n if (version != this._version) {\n throw new Error('Unsupported version: ' + version);\n }\n\n sources = sources\n .map(String)\n // Some source maps produce relative source paths like \"./foo.js\" instead of\n // \"foo.js\". Normalize these first so that future comparisons will succeed.\n // See bugzil.la/1090768.\n .map(util.normalize)\n // Always ensure that absolute sources are internally stored relative to\n // the source root, if the source root is absolute. Not doing this would\n // be particularly problematic when the source root is a prefix of the\n // source (valid, but why??). See github issue #199 and bugzil.la/1188982.\n .map(function (source) {\n return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source)\n ? util.relative(sourceRoot, source)\n : source;\n });\n\n // Pass `true` below to allow duplicate names and sources. While source maps\n // are intended to be compressed and deduplicated, the TypeScript compiler\n // sometimes generates source maps with duplicates in them. See Github issue\n // #72 and bugzil.la/889492.\n this._names = ArraySet.fromArray(names.map(String), true);\n this._sources = ArraySet.fromArray(sources, true);\n\n this.sourceRoot = sourceRoot;\n this.sourcesContent = sourcesContent;\n this._mappings = mappings;\n this.file = file;\n}\n\nBasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nBasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;\n\n/**\n * Create a BasicSourceMapConsumer from a SourceMapGenerator.\n *\n * @param SourceMapGenerator aSourceMap\n * The source map that will be consumed.\n * @returns BasicSourceMapConsumer\n */\nBasicSourceMapConsumer.fromSourceMap =\n function SourceMapConsumer_fromSourceMap(aSourceMap) {\n var smc = Object.create(BasicSourceMapConsumer.prototype);\n\n var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);\n var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);\n smc.sourceRoot = aSourceMap._sourceRoot;\n smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),\n smc.sourceRoot);\n smc.file = aSourceMap._file;\n\n // Because we are modifying the entries (by converting string sources and\n // names to indices into the sources and names ArraySets), we have to make\n // a copy of the entry or else bad things happen. Shared mutable state\n // strikes again! See github issue #191.\n\n var generatedMappings = aSourceMap._mappings.toArray().slice();\n var destGeneratedMappings = smc.__generatedMappings = [];\n var destOriginalMappings = smc.__originalMappings = [];\n\n for (var i = 0, length = generatedMappings.length; i < length; i++) {\n var srcMapping = generatedMappings[i];\n var destMapping = new Mapping;\n destMapping.generatedLine = srcMapping.generatedLine;\n destMapping.generatedColumn = srcMapping.generatedColumn;\n\n if (srcMapping.source) {\n destMapping.source = sources.indexOf(srcMapping.source);\n destMapping.originalLine = srcMapping.originalLine;\n destMapping.originalColumn = srcMapping.originalColumn;\n\n if (srcMapping.name) {\n destMapping.name = names.indexOf(srcMapping.name);\n }\n\n destOriginalMappings.push(destMapping);\n }\n\n destGeneratedMappings.push(destMapping);\n }\n\n quickSort(smc.__originalMappings, util.compareByOriginalPositions);\n\n return smc;\n };\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nBasicSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {\n get: function () {\n return this._sources.toArray().map(function (s) {\n return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;\n }, this);\n }\n});\n\n/**\n * Provide the JIT with a nice shape / hidden class.\n */\nfunction Mapping() {\n this.generatedLine = 0;\n this.generatedColumn = 0;\n this.source = null;\n this.originalLine = null;\n this.originalColumn = null;\n this.name = null;\n}\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nBasicSourceMapConsumer.prototype._parseMappings =\n function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n var generatedLine = 1;\n var previousGeneratedColumn = 0;\n var previousOriginalLine = 0;\n var previousOriginalColumn = 0;\n var previousSource = 0;\n var previousName = 0;\n var length = aStr.length;\n var index = 0;\n var cachedSegments = {};\n var temp = {};\n var originalMappings = [];\n var generatedMappings = [];\n var mapping, str, segment, end, value;\n\n while (index < length) {\n if (aStr.charAt(index) === ';') {\n generatedLine++;\n index++;\n previousGeneratedColumn = 0;\n }\n else if (aStr.charAt(index) === ',') {\n index++;\n }\n else {\n mapping = new Mapping();\n mapping.generatedLine = generatedLine;\n\n // Because each offset is encoded relative to the previous one,\n // many segments often have the same encoding. We can exploit this\n // fact by caching the parsed variable length fields of each segment,\n // allowing us to avoid a second parse if we encounter the same\n // segment again.\n for (end = index; end < length; end++) {\n if (this._charIsMappingSeparator(aStr, end)) {\n break;\n }\n }\n str = aStr.slice(index, end);\n\n segment = cachedSegments[str];\n if (segment) {\n index += str.length;\n } else {\n segment = [];\n while (index < end) {\n base64VLQ.decode(aStr, index, temp);\n value = temp.value;\n index = temp.rest;\n segment.push(value);\n }\n\n if (segment.length === 2) {\n throw new Error('Found a source, but no line and column');\n }\n\n if (segment.length === 3) {\n throw new Error('Found a source and line, but no column');\n }\n\n cachedSegments[str] = segment;\n }\n\n // Generated column.\n mapping.generatedColumn = previousGeneratedColumn + segment[0];\n previousGeneratedColumn = mapping.generatedColumn;\n\n if (segment.length > 1) {\n // Original source.\n mapping.source = previousSource + segment[1];\n previousSource += segment[1];\n\n // Original line.\n mapping.originalLine = previousOriginalLine + segment[2];\n previousOriginalLine = mapping.originalLine;\n // Lines are stored 0-based\n mapping.originalLine += 1;\n\n // Original column.\n mapping.originalColumn = previousOriginalColumn + segment[3];\n previousOriginalColumn = mapping.originalColumn;\n\n if (segment.length > 4) {\n // Original name.\n mapping.name = previousName + segment[4];\n previousName += segment[4];\n }\n }\n\n generatedMappings.push(mapping);\n if (typeof mapping.originalLine === 'number') {\n originalMappings.push(mapping);\n }\n }\n }\n\n quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated);\n this.__generatedMappings = generatedMappings;\n\n quickSort(originalMappings, util.compareByOriginalPositions);\n this.__originalMappings = originalMappings;\n };\n\n/**\n * Find the mapping that best matches the hypothetical \"needle\" mapping that\n * we are searching for in the given \"haystack\" of mappings.\n */\nBasicSourceMapConsumer.prototype._findMapping =\n function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,\n aColumnName, aComparator, aBias) {\n // To return the position we are searching for, we must first find the\n // mapping for the given position and then return the opposite position it\n // points to. Because the mappings are sorted, we can use binary search to\n // find the best mapping.\n\n if (aNeedle[aLineName] <= 0) {\n throw new TypeError('Line must be greater than or equal to 1, got '\n + aNeedle[aLineName]);\n }\n if (aNeedle[aColumnName] < 0) {\n throw new TypeError('Column must be greater than or equal to 0, got '\n + aNeedle[aColumnName]);\n }\n\n return binarySearch.search(aNeedle, aMappings, aComparator, aBias);\n };\n\n/**\n * Compute the last column for each generated mapping. The last column is\n * inclusive.\n */\nBasicSourceMapConsumer.prototype.computeColumnSpans =\n function SourceMapConsumer_computeColumnSpans() {\n for (var index = 0; index < this._generatedMappings.length; ++index) {\n var mapping = this._generatedMappings[index];\n\n // Mappings do not contain a field for the last generated columnt. We\n // can come up with an optimistic estimate, however, by assuming that\n // mappings are contiguous (i.e. given two consecutive mappings, the\n // first mapping ends where the second one starts).\n if (index + 1 < this._generatedMappings.length) {\n var nextMapping = this._generatedMappings[index + 1];\n\n if (mapping.generatedLine === nextMapping.generatedLine) {\n mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;\n continue;\n }\n }\n\n // The last mapping for each line spans the entire line.\n mapping.lastGeneratedColumn = Infinity;\n }\n };\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n * - line: The line number in the generated source.\n * - column: The column number in the generated source.\n * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n * - source: The original source file, or null.\n * - line: The line number in the original source, or null.\n * - column: The column number in the original source, or null.\n * - name: The original identifier, or null.\n */\nBasicSourceMapConsumer.prototype.originalPositionFor =\n function SourceMapConsumer_originalPositionFor(aArgs) {\n var needle = {\n generatedLine: util.getArg(aArgs, 'line'),\n generatedColumn: util.getArg(aArgs, 'column')\n };\n\n var index = this._findMapping(\n needle,\n this._generatedMappings,\n \"generatedLine\",\n \"generatedColumn\",\n util.compareByGeneratedPositionsDeflated,\n util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n );\n\n if (index >= 0) {\n var mapping = this._generatedMappings[index];\n\n if (mapping.generatedLine === needle.generatedLine) {\n var source = util.getArg(mapping, 'source', null);\n if (source !== null) {\n source = this._sources.at(source);\n if (this.sourceRoot != null) {\n source = util.join(this.sourceRoot, source);\n }\n }\n var name = util.getArg(mapping, 'name', null);\n if (name !== null) {\n name = this._names.at(name);\n }\n return {\n source: source,\n line: util.getArg(mapping, 'originalLine', null),\n column: util.getArg(mapping, 'originalColumn', null),\n name: name\n };\n }\n }\n\n return {\n source: null,\n line: null,\n column: null,\n name: null\n };\n };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nBasicSourceMapConsumer.prototype.hasContentsOfAllSources =\n function BasicSourceMapConsumer_hasContentsOfAllSources() {\n if (!this.sourcesContent) {\n return false;\n }\n return this.sourcesContent.length >= this._sources.size() &&\n !this.sourcesContent.some(function (sc) { return sc == null; });\n };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nBasicSourceMapConsumer.prototype.sourceContentFor =\n function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n if (!this.sourcesContent) {\n return null;\n }\n\n if (this.sourceRoot != null) {\n aSource = util.relative(this.sourceRoot, aSource);\n }\n\n if (this._sources.has(aSource)) {\n return this.sourcesContent[this._sources.indexOf(aSource)];\n }\n\n var url;\n if (this.sourceRoot != null\n && (url = util.urlParse(this.sourceRoot))) {\n // XXX: file:// URIs and absolute paths lead to unexpected behavior for\n // many users. We can help them out when they expect file:// URIs to\n // behave like it would if they were running a local HTTP server. See\n // https://bugzilla.mozilla.org/show_bug.cgi?id=885597.\n var fileUriAbsPath = aSource.replace(/^file:\\/\\//, \"\");\n if (url.scheme == \"file\"\n && this._sources.has(fileUriAbsPath)) {\n return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]\n }\n\n if ((!url.path || url.path == \"/\")\n && this._sources.has(\"/\" + aSource)) {\n return this.sourcesContent[this._sources.indexOf(\"/\" + aSource)];\n }\n }\n\n // This function is used recursively from\n // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we\n // don't want to throw if we can't find the source - we just want to\n // return null, so we provide a flag to exit gracefully.\n if (nullOnMissing) {\n return null;\n }\n else {\n throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n }\n };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source.\n * - column: The column number in the original source.\n * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or\n * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.\n *\n * and an object is returned with the following properties:\n *\n * - line: The line number in the generated source, or null.\n * - column: The column number in the generated source, or null.\n */\nBasicSourceMapConsumer.prototype.generatedPositionFor =\n function SourceMapConsumer_generatedPositionFor(aArgs) {\n var source = util.getArg(aArgs, 'source');\n if (this.sourceRoot != null) {\n source = util.relative(this.sourceRoot, source);\n }\n if (!this._sources.has(source)) {\n return {\n line: null,\n column: null,\n lastColumn: null\n };\n }\n source = this._sources.indexOf(source);\n\n var needle = {\n source: source,\n originalLine: util.getArg(aArgs, 'line'),\n originalColumn: util.getArg(aArgs, 'column')\n };\n\n var index = this._findMapping(\n needle,\n this._originalMappings,\n \"originalLine\",\n \"originalColumn\",\n util.compareByOriginalPositions,\n util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)\n );\n\n if (index >= 0) {\n var mapping = this._originalMappings[index];\n\n if (mapping.source === needle.source) {\n return {\n line: util.getArg(mapping, 'generatedLine', null),\n column: util.getArg(mapping, 'generatedColumn', null),\n lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)\n };\n }\n }\n\n return {\n line: null,\n column: null,\n lastColumn: null\n };\n };\n\nexports.BasicSourceMapConsumer = BasicSourceMapConsumer;\n\n/**\n * An IndexedSourceMapConsumer instance represents a parsed source map which\n * we can query for information. It differs from BasicSourceMapConsumer in\n * that it takes \"indexed\" source maps (i.e. ones with a \"sections\" field) as\n * input.\n *\n * The only parameter is a raw source map (either as a JSON string, or already\n * parsed to an object). According to the spec for indexed source maps, they\n * have the following attributes:\n *\n * - version: Which version of the source map spec this map is following.\n * - file: Optional. The generated file this source map is associated with.\n * - sections: A list of section definitions.\n *\n * Each value under the \"sections\" field has two fields:\n * - offset: The offset into the original specified at which this section\n * begins to apply, defined as an object with a \"line\" and \"column\"\n * field.\n * - map: A source map definition. This source map could also be indexed,\n * but doesn't have to be.\n *\n * Instead of the \"map\" field, it's also possible to have a \"url\" field\n * specifying a URL to retrieve a source map from, but that's currently\n * unsupported.\n *\n * Here's an example source map, taken from the source map spec[0], but\n * modified to omit a section which uses the \"url\" field.\n *\n * {\n * version : 3,\n * file: \"app.js\",\n * sections: [{\n * offset: {line:100, column:10},\n * map: {\n * version : 3,\n * file: \"section.js\",\n * sources: [\"foo.js\", \"bar.js\"],\n * names: [\"src\", \"maps\", \"are\", \"fun\"],\n * mappings: \"AAAA,E;;ABCDE;\"\n * }\n * }],\n * }\n *\n * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt\n */\nfunction IndexedSourceMapConsumer(aSourceMap) {\n var sourceMap = aSourceMap;\n if (typeof aSourceMap === 'string') {\n sourceMap = JSON.parse(aSourceMap.replace(/^\\)\\]\\}'/, ''));\n }\n\n var version = util.getArg(sourceMap, 'version');\n var sections = util.getArg(sourceMap, 'sections');\n\n if (version != this._version) {\n throw new Error('Unsupported version: ' + version);\n }\n\n this._sources = new ArraySet();\n this._names = new ArraySet();\n\n var lastOffset = {\n line: -1,\n column: 0\n };\n this._sections = sections.map(function (s) {\n if (s.url) {\n // The url field will require support for asynchronicity.\n // See https://github.com/mozilla/source-map/issues/16\n throw new Error('Support for url field in sections not implemented.');\n }\n var offset = util.getArg(s, 'offset');\n var offsetLine = util.getArg(offset, 'line');\n var offsetColumn = util.getArg(offset, 'column');\n\n if (offsetLine < lastOffset.line ||\n (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {\n throw new Error('Section offsets must be ordered and non-overlapping.');\n }\n lastOffset = offset;\n\n return {\n generatedOffset: {\n // The offset fields are 0-based, but we use 1-based indices when\n // encoding/decoding from VLQ.\n generatedLine: offsetLine + 1,\n generatedColumn: offsetColumn + 1\n },\n consumer: new SourceMapConsumer(util.getArg(s, 'map'))\n }\n });\n}\n\nIndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);\nIndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;\n\n/**\n * The version of the source mapping spec that we are consuming.\n */\nIndexedSourceMapConsumer.prototype._version = 3;\n\n/**\n * The list of original sources.\n */\nObject.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {\n get: function () {\n var sources = [];\n for (var i = 0; i < this._sections.length; i++) {\n for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {\n sources.push(this._sections[i].consumer.sources[j]);\n }\n }\n return sources;\n }\n});\n\n/**\n * Returns the original source, line, and column information for the generated\n * source's line and column positions provided. The only argument is an object\n * with the following properties:\n *\n * - line: The line number in the generated source.\n * - column: The column number in the generated source.\n *\n * and an object is returned with the following properties:\n *\n * - source: The original source file, or null.\n * - line: The line number in the original source, or null.\n * - column: The column number in the original source, or null.\n * - name: The original identifier, or null.\n */\nIndexedSourceMapConsumer.prototype.originalPositionFor =\n function IndexedSourceMapConsumer_originalPositionFor(aArgs) {\n var needle = {\n generatedLine: util.getArg(aArgs, 'line'),\n generatedColumn: util.getArg(aArgs, 'column')\n };\n\n // Find the section containing the generated position we're trying to map\n // to an original position.\n var sectionIndex = binarySearch.search(needle, this._sections,\n function(needle, section) {\n var cmp = needle.generatedLine - section.generatedOffset.generatedLine;\n if (cmp) {\n return cmp;\n }\n\n return (needle.generatedColumn -\n section.generatedOffset.generatedColumn);\n });\n var section = this._sections[sectionIndex];\n\n if (!section) {\n return {\n source: null,\n line: null,\n column: null,\n name: null\n };\n }\n\n return section.consumer.originalPositionFor({\n line: needle.generatedLine -\n (section.generatedOffset.generatedLine - 1),\n column: needle.generatedColumn -\n (section.generatedOffset.generatedLine === needle.generatedLine\n ? section.generatedOffset.generatedColumn - 1\n : 0),\n bias: aArgs.bias\n });\n };\n\n/**\n * Return true if we have the source content for every source in the source\n * map, false otherwise.\n */\nIndexedSourceMapConsumer.prototype.hasContentsOfAllSources =\n function IndexedSourceMapConsumer_hasContentsOfAllSources() {\n return this._sections.every(function (s) {\n return s.consumer.hasContentsOfAllSources();\n });\n };\n\n/**\n * Returns the original source content. The only argument is the url of the\n * original source file. Returns null if no original source content is\n * available.\n */\nIndexedSourceMapConsumer.prototype.sourceContentFor =\n function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n\n var content = section.consumer.sourceContentFor(aSource, true);\n if (content) {\n return content;\n }\n }\n if (nullOnMissing) {\n return null;\n }\n else {\n throw new Error('\"' + aSource + '\" is not in the SourceMap.');\n }\n };\n\n/**\n * Returns the generated line and column information for the original source,\n * line, and column positions provided. The only argument is an object with\n * the following properties:\n *\n * - source: The filename of the original source.\n * - line: The line number in the original source.\n * - column: The column number in the original source.\n *\n * and an object is returned with the following properties:\n *\n * - line: The line number in the generated source, or null.\n * - column: The column number in the generated source, or null.\n */\nIndexedSourceMapConsumer.prototype.generatedPositionFor =\n function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n\n // Only consider this section if the requested source is in the list of\n // sources of the consumer.\n if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {\n continue;\n }\n var generatedPosition = section.consumer.generatedPositionFor(aArgs);\n if (generatedPosition) {\n var ret = {\n line: generatedPosition.line +\n (section.generatedOffset.generatedLine - 1),\n column: generatedPosition.column +\n (section.generatedOffset.generatedLine === generatedPosition.line\n ? section.generatedOffset.generatedColumn - 1\n : 0)\n };\n return ret;\n }\n }\n\n return {\n line: null,\n column: null\n };\n };\n\n/**\n * Parse the mappings in a string in to a data structure which we can easily\n * query (the ordered arrays in the `this.__generatedMappings` and\n * `this.__originalMappings` properties).\n */\nIndexedSourceMapConsumer.prototype._parseMappings =\n function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {\n this.__generatedMappings = [];\n this.__originalMappings = [];\n for (var i = 0; i < this._sections.length; i++) {\n var section = this._sections[i];\n var sectionMappings = section.consumer._generatedMappings;\n for (var j = 0; j < sectionMappings.length; j++) {\n var mapping = sectionMappings[j];\n\n var source = section.consumer._sources.at(mapping.source);\n if (section.consumer.sourceRoot !== null) {\n source = util.join(section.consumer.sourceRoot, source);\n }\n this._sources.add(source);\n source = this._sources.indexOf(source);\n\n var name = section.consumer._names.at(mapping.name);\n this._names.add(name);\n name = this._names.indexOf(name);\n\n // The mappings coming from the consumer for the section have\n // generated positions relative to the start of the section, so we\n // need to offset them to be relative to the start of the concatenated\n // generated file.\n var adjustedMapping = {\n source: source,\n generatedLine: mapping.generatedLine +\n (section.generatedOffset.generatedLine - 1),\n generatedColumn: mapping.generatedColumn +\n (section.generatedOffset.generatedLine === mapping.generatedLine\n ? section.generatedOffset.generatedColumn - 1\n : 0),\n originalLine: mapping.originalLine,\n originalColumn: mapping.originalColumn,\n name: name\n };\n\n this.__generatedMappings.push(adjustedMapping);\n if (typeof adjustedMapping.originalLine === 'number') {\n this.__originalMappings.push(adjustedMapping);\n }\n }\n }\n\n quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated);\n quickSort(this.__originalMappings, util.compareByOriginalPositions);\n };\n\nexports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/source-map-consumer.js\n ** module id = 7\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nexports.GREATEST_LOWER_BOUND = 1;\nexports.LEAST_UPPER_BOUND = 2;\n\n/**\n * Recursive implementation of binary search.\n *\n * @param aLow Indices here and lower do not contain the needle.\n * @param aHigh Indices here and higher do not contain the needle.\n * @param aNeedle The element being searched for.\n * @param aHaystack The non-empty array being searched.\n * @param aCompare Function which takes two elements and returns -1, 0, or 1.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n */\nfunction recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {\n // This function terminates when one of the following is true:\n //\n // 1. We find the exact element we are looking for.\n //\n // 2. We did not find the exact element, but we can return the index of\n // the next-closest element.\n //\n // 3. We did not find the exact element, and there is no next-closest\n // element than the one we are searching for, so we return -1.\n var mid = Math.floor((aHigh - aLow) / 2) + aLow;\n var cmp = aCompare(aNeedle, aHaystack[mid], true);\n if (cmp === 0) {\n // Found the element we are looking for.\n return mid;\n }\n else if (cmp > 0) {\n // Our needle is greater than aHaystack[mid].\n if (aHigh - mid > 1) {\n // The element is in the upper half.\n return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);\n }\n\n // The exact needle element was not found in this haystack. Determine if\n // we are in termination case (3) or (2) and return the appropriate thing.\n if (aBias == exports.LEAST_UPPER_BOUND) {\n return aHigh < aHaystack.length ? aHigh : -1;\n } else {\n return mid;\n }\n }\n else {\n // Our needle is less than aHaystack[mid].\n if (mid - aLow > 1) {\n // The element is in the lower half.\n return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);\n }\n\n // we are in termination case (3) or (2) and return the appropriate thing.\n if (aBias == exports.LEAST_UPPER_BOUND) {\n return mid;\n } else {\n return aLow < 0 ? -1 : aLow;\n }\n }\n}\n\n/**\n * This is an implementation of binary search which will always try and return\n * the index of the closest element if there is no exact hit. This is because\n * mappings between original and generated line/col pairs are single points,\n * and there is an implicit region between each of them, so a miss just means\n * that you aren't on the very start of a region.\n *\n * @param aNeedle The element you are looking for.\n * @param aHaystack The array that is being searched.\n * @param aCompare A function which takes the needle and an element in the\n * array and returns -1, 0, or 1 depending on whether the needle is less\n * than, equal to, or greater than the element, respectively.\n * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or\n * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the\n * closest element that is smaller than or greater than the one we are\n * searching for, respectively, if the exact element cannot be found.\n * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.\n */\nexports.search = function search(aNeedle, aHaystack, aCompare, aBias) {\n if (aHaystack.length === 0) {\n return -1;\n }\n\n var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,\n aCompare, aBias || exports.GREATEST_LOWER_BOUND);\n if (index < 0) {\n return -1;\n }\n\n // We have found either the exact element, or the next-closest element than\n // the one we are searching for. However, there may be more than one such\n // element. Make sure we always return the smallest of these.\n while (index - 1 >= 0) {\n if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {\n break;\n }\n --index;\n }\n\n return index;\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/binary-search.js\n ** module id = 8\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\n// It turns out that some (most?) JavaScript engines don't self-host\n// `Array.prototype.sort`. This makes sense because C++ will likely remain\n// faster than JS when doing raw CPU-intensive sorting. However, when using a\n// custom comparator function, calling back and forth between the VM's C++ and\n// JIT'd JS is rather slow *and* loses JIT type information, resulting in\n// worse generated code for the comparator function than would be optimal. In\n// fact, when sorting with a comparator, these costs outweigh the benefits of\n// sorting in C++. By using our own JS-implemented Quick Sort (below), we get\n// a ~3500ms mean speed-up in `bench/bench.html`.\n\n/**\n * Swap the elements indexed by `x` and `y` in the array `ary`.\n *\n * @param {Array} ary\n * The array.\n * @param {Number} x\n * The index of the first item.\n * @param {Number} y\n * The index of the second item.\n */\nfunction swap(ary, x, y) {\n var temp = ary[x];\n ary[x] = ary[y];\n ary[y] = temp;\n}\n\n/**\n * Returns a random integer within the range `low .. high` inclusive.\n *\n * @param {Number} low\n * The lower bound on the range.\n * @param {Number} high\n * The upper bound on the range.\n */\nfunction randomIntInRange(low, high) {\n return Math.round(low + (Math.random() * (high - low)));\n}\n\n/**\n * The Quick Sort algorithm.\n *\n * @param {Array} ary\n * An array to sort.\n * @param {function} comparator\n * Function to use to compare two items.\n * @param {Number} p\n * Start index of the array\n * @param {Number} r\n * End index of the array\n */\nfunction doQuickSort(ary, comparator, p, r) {\n // If our lower bound is less than our upper bound, we (1) partition the\n // array into two pieces and (2) recurse on each half. If it is not, this is\n // the empty array and our base case.\n\n if (p < r) {\n // (1) Partitioning.\n //\n // The partitioning chooses a pivot between `p` and `r` and moves all\n // elements that are less than or equal to the pivot to the before it, and\n // all the elements that are greater than it after it. The effect is that\n // once partition is done, the pivot is in the exact place it will be when\n // the array is put in sorted order, and it will not need to be moved\n // again. This runs in O(n) time.\n\n // Always choose a random pivot so that an input array which is reverse\n // sorted does not cause O(n^2) running time.\n var pivotIndex = randomIntInRange(p, r);\n var i = p - 1;\n\n swap(ary, pivotIndex, r);\n var pivot = ary[r];\n\n // Immediately after `j` is incremented in this loop, the following hold\n // true:\n //\n // * Every element in `ary[p .. i]` is less than or equal to the pivot.\n //\n // * Every element in `ary[i+1 .. j-1]` is greater than the pivot.\n for (var j = p; j < r; j++) {\n if (comparator(ary[j], pivot) <= 0) {\n i += 1;\n swap(ary, i, j);\n }\n }\n\n swap(ary, i + 1, j);\n var q = i + 1;\n\n // (2) Recurse on each half.\n\n doQuickSort(ary, comparator, p, q - 1);\n doQuickSort(ary, comparator, q + 1, r);\n }\n}\n\n/**\n * Sort the given array in-place with the given comparator function.\n *\n * @param {Array} ary\n * An array to sort.\n * @param {function} comparator\n * Function to use to compare two items.\n */\nexports.quickSort = function (ary, comparator) {\n doQuickSort(ary, comparator, 0, ary.length - 1);\n};\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/quick-sort.js\n ** module id = 9\n ** module chunks = 0\n **/","/* -*- Mode: js; js-indent-level: 2; -*- */\n/*\n * Copyright 2011 Mozilla Foundation and contributors\n * Licensed under the New BSD license. See LICENSE or:\n * http://opensource.org/licenses/BSD-3-Clause\n */\n\nvar SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;\nvar util = require('./util');\n\n// Matches a Windows-style `\\r\\n` newline or a `\\n` newline used by all other\n// operating systems these days (capturing the result).\nvar REGEX_NEWLINE = /(\\r?\\n)/;\n\n// Newline character code for charCodeAt() comparisons\nvar NEWLINE_CODE = 10;\n\n// Private symbol for identifying `SourceNode`s when multiple versions of\n// the source-map library are loaded. This MUST NOT CHANGE across\n// versions!\nvar isSourceNode = \"$$$isSourceNode$$$\";\n\n/**\n * SourceNodes provide a way to abstract over interpolating/concatenating\n * snippets of generated JavaScript source code while maintaining the line and\n * column information associated with the original source code.\n *\n * @param aLine The original line number.\n * @param aColumn The original column number.\n * @param aSource The original source's filename.\n * @param aChunks Optional. An array of strings which are snippets of\n * generated JS, or other SourceNodes.\n * @param aName The original identifier.\n */\nfunction SourceNode(aLine, aColumn, aSource, aChunks, aName) {\n this.children = [];\n this.sourceContents = {};\n this.line = aLine == null ? null : aLine;\n this.column = aColumn == null ? null : aColumn;\n this.source = aSource == null ? null : aSource;\n this.name = aName == null ? null : aName;\n this[isSourceNode] = true;\n if (aChunks != null) this.add(aChunks);\n}\n\n/**\n * Creates a SourceNode from generated code and a SourceMapConsumer.\n *\n * @param aGeneratedCode The generated code\n * @param aSourceMapConsumer The SourceMap for the generated code\n * @param aRelativePath Optional. The path that relative sources in the\n * SourceMapConsumer should be relative to.\n */\nSourceNode.fromStringWithSourceMap =\n function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {\n // The SourceNode we want to fill with the generated code\n // and the SourceMap\n var node = new SourceNode();\n\n // All even indices of this array are one line of the generated code,\n // while all odd indices are the newlines between two adjacent lines\n // (since `REGEX_NEWLINE` captures its match).\n // Processed fragments are removed from this array, by calling `shiftNextLine`.\n var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);\n var shiftNextLine = function() {\n var lineContents = remainingLines.shift();\n // The last line of a file might not have a newline.\n var newLine = remainingLines.shift() || \"\";\n return lineContents + newLine;\n };\n\n // We need to remember the position of \"remainingLines\"\n var lastGeneratedLine = 1, lastGeneratedColumn = 0;\n\n // The generate SourceNodes we need a code range.\n // To extract it current and last mapping is used.\n // Here we store the last mapping.\n var lastMapping = null;\n\n aSourceMapConsumer.eachMapping(function (mapping) {\n if (lastMapping !== null) {\n // We add the code from \"lastMapping\" to \"mapping\":\n // First check if there is a new line in between.\n if (lastGeneratedLine < mapping.generatedLine) {\n // Associate first line with \"lastMapping\"\n addMappingWithCode(lastMapping, shiftNextLine());\n lastGeneratedLine++;\n lastGeneratedColumn = 0;\n // The remaining code is added without mapping\n } else {\n // There is no new line in between.\n // Associate the code between \"lastGeneratedColumn\" and\n // \"mapping.generatedColumn\" with \"lastMapping\"\n var nextLine = remainingLines[0];\n var code = nextLine.substr(0, mapping.generatedColumn -\n lastGeneratedColumn);\n remainingLines[0] = nextLine.substr(mapping.generatedColumn -\n lastGeneratedColumn);\n lastGeneratedColumn = mapping.generatedColumn;\n addMappingWithCode(lastMapping, code);\n // No more remaining code, continue\n lastMapping = mapping;\n return;\n }\n }\n // We add the generated code until the first mapping\n // to the SourceNode without any mapping.\n // Each line is added as separate string.\n while (lastGeneratedLine < mapping.generatedLine) {\n node.add(shiftNextLine());\n lastGeneratedLine++;\n }\n if (lastGeneratedColumn < mapping.generatedColumn) {\n var nextLine = remainingLines[0];\n node.add(nextLine.substr(0, mapping.generatedColumn));\n remainingLines[0] = nextLine.substr(mapping.generatedColumn);\n lastGeneratedColumn = mapping.generatedColumn;\n }\n lastMapping = mapping;\n }, this);\n // We have processed all mappings.\n if (remainingLines.length > 0) {\n if (lastMapping) {\n // Associate the remaining code in the current line with \"lastMapping\"\n addMappingWithCode(lastMapping, shiftNextLine());\n }\n // and add the remaining lines without any mapping\n node.add(remainingLines.join(\"\"));\n }\n\n // Copy sourcesContent into SourceNode\n aSourceMapConsumer.sources.forEach(function (sourceFile) {\n var content = aSourceMapConsumer.sourceContentFor(sourceFile);\n if (content != null) {\n if (aRelativePath != null) {\n sourceFile = util.join(aRelativePath, sourceFile);\n }\n node.setSourceContent(sourceFile, content);\n }\n });\n\n return node;\n\n function addMappingWithCode(mapping, code) {\n if (mapping === null || mapping.source === undefined) {\n node.add(code);\n } else {\n var source = aRelativePath\n ? util.join(aRelativePath, mapping.source)\n : mapping.source;\n node.add(new SourceNode(mapping.originalLine,\n mapping.originalColumn,\n source,\n code,\n mapping.name));\n }\n }\n };\n\n/**\n * Add a chunk of generated JS to this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n * SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.add = function SourceNode_add(aChunk) {\n if (Array.isArray(aChunk)) {\n aChunk.forEach(function (chunk) {\n this.add(chunk);\n }, this);\n }\n else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n if (aChunk) {\n this.children.push(aChunk);\n }\n }\n else {\n throw new TypeError(\n \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n );\n }\n return this;\n};\n\n/**\n * Add a chunk of generated JS to the beginning of this source node.\n *\n * @param aChunk A string snippet of generated JS code, another instance of\n * SourceNode, or an array where each member is one of those things.\n */\nSourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {\n if (Array.isArray(aChunk)) {\n for (var i = aChunk.length-1; i >= 0; i--) {\n this.prepend(aChunk[i]);\n }\n }\n else if (aChunk[isSourceNode] || typeof aChunk === \"string\") {\n this.children.unshift(aChunk);\n }\n else {\n throw new TypeError(\n \"Expected a SourceNode, string, or an array of SourceNodes and strings. Got \" + aChunk\n );\n }\n return this;\n};\n\n/**\n * Walk over the tree of JS snippets in this node and its children. The\n * walking function is called once for each snippet of JS and is passed that\n * snippet and the its original associated source's line/column location.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walk = function SourceNode_walk(aFn) {\n var chunk;\n for (var i = 0, len = this.children.length; i < len; i++) {\n chunk = this.children[i];\n if (chunk[isSourceNode]) {\n chunk.walk(aFn);\n }\n else {\n if (chunk !== '') {\n aFn(chunk, { source: this.source,\n line: this.line,\n column: this.column,\n name: this.name });\n }\n }\n }\n};\n\n/**\n * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between\n * each of `this.children`.\n *\n * @param aSep The separator.\n */\nSourceNode.prototype.join = function SourceNode_join(aSep) {\n var newChildren;\n var i;\n var len = this.children.length;\n if (len > 0) {\n newChildren = [];\n for (i = 0; i < len-1; i++) {\n newChildren.push(this.children[i]);\n newChildren.push(aSep);\n }\n newChildren.push(this.children[i]);\n this.children = newChildren;\n }\n return this;\n};\n\n/**\n * Call String.prototype.replace on the very right-most source snippet. Useful\n * for trimming whitespace from the end of a source node, etc.\n *\n * @param aPattern The pattern to replace.\n * @param aReplacement The thing to replace the pattern with.\n */\nSourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {\n var lastChild = this.children[this.children.length - 1];\n if (lastChild[isSourceNode]) {\n lastChild.replaceRight(aPattern, aReplacement);\n }\n else if (typeof lastChild === 'string') {\n this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);\n }\n else {\n this.children.push(''.replace(aPattern, aReplacement));\n }\n return this;\n};\n\n/**\n * Set the source content for a source file. This will be added to the SourceMapGenerator\n * in the sourcesContent field.\n *\n * @param aSourceFile The filename of the source file\n * @param aSourceContent The content of the source file\n */\nSourceNode.prototype.setSourceContent =\n function SourceNode_setSourceContent(aSourceFile, aSourceContent) {\n this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;\n };\n\n/**\n * Walk over the tree of SourceNodes. The walking function is called for each\n * source file content and is passed the filename and source content.\n *\n * @param aFn The traversal function.\n */\nSourceNode.prototype.walkSourceContents =\n function SourceNode_walkSourceContents(aFn) {\n for (var i = 0, len = this.children.length; i < len; i++) {\n if (this.children[i][isSourceNode]) {\n this.children[i].walkSourceContents(aFn);\n }\n }\n\n var sources = Object.keys(this.sourceContents);\n for (var i = 0, len = sources.length; i < len; i++) {\n aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);\n }\n };\n\n/**\n * Return the string representation of this source node. Walks over the tree\n * and concatenates all the various snippets together to one string.\n */\nSourceNode.prototype.toString = function SourceNode_toString() {\n var str = \"\";\n this.walk(function (chunk) {\n str += chunk;\n });\n return str;\n};\n\n/**\n * Returns the string representation of this source node along with a source\n * map.\n */\nSourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {\n var generated = {\n code: \"\",\n line: 1,\n column: 0\n };\n var map = new SourceMapGenerator(aArgs);\n var sourceMappingActive = false;\n var lastOriginalSource = null;\n var lastOriginalLine = null;\n var lastOriginalColumn = null;\n var lastOriginalName = null;\n this.walk(function (chunk, original) {\n generated.code += chunk;\n if (original.source !== null\n && original.line !== null\n && original.column !== null) {\n if(lastOriginalSource !== original.source\n || lastOriginalLine !== original.line\n || lastOriginalColumn !== original.column\n || lastOriginalName !== original.name) {\n map.addMapping({\n source: original.source,\n original: {\n line: original.line,\n column: original.column\n },\n generated: {\n line: generated.line,\n column: generated.column\n },\n name: original.name\n });\n }\n lastOriginalSource = original.source;\n lastOriginalLine = original.line;\n lastOriginalColumn = original.column;\n lastOriginalName = original.name;\n sourceMappingActive = true;\n } else if (sourceMappingActive) {\n map.addMapping({\n generated: {\n line: generated.line,\n column: generated.column\n }\n });\n lastOriginalSource = null;\n sourceMappingActive = false;\n }\n for (var idx = 0, length = chunk.length; idx < length; idx++) {\n if (chunk.charCodeAt(idx) === NEWLINE_CODE) {\n generated.line++;\n generated.column = 0;\n // Mappings end at eol\n if (idx + 1 === length) {\n lastOriginalSource = null;\n sourceMappingActive = false;\n } else if (sourceMappingActive) {\n map.addMapping({\n source: original.source,\n original: {\n line: original.line,\n column: original.column\n },\n generated: {\n line: generated.line,\n column: generated.column\n },\n name: original.name\n });\n }\n } else {\n generated.column++;\n }\n }\n });\n this.walkSourceContents(function (sourceFile, sourceContent) {\n map.setSourceContent(sourceFile, sourceContent);\n });\n\n return { code: generated.code, map: map };\n};\n\nexports.SourceNode = SourceNode;\n\n\n\n/*****************\n ** WEBPACK FOOTER\n ** ./lib/source-node.js\n ** module id = 10\n ** module chunks = 0\n **/"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/source-map/lib/array-set.js b/node_modules/source-map/lib/array-set.js new file mode 100644 index 0000000..51dffeb --- /dev/null +++ b/node_modules/source-map/lib/array-set.js @@ -0,0 +1,104 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var has = Object.prototype.hasOwnProperty; + +/** + * A data structure which is a combination of an array and a set. Adding a new + * member is O(1), testing for membership is O(1), and finding the index of an + * element is O(1). Removing elements from the set is not supported. Only + * strings are supported for membership. + */ +function ArraySet() { + this._array = []; + this._set = Object.create(null); +} + +/** + * Static method for creating ArraySet instances from an existing array. + */ +ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { + var set = new ArraySet(); + for (var i = 0, len = aArray.length; i < len; i++) { + set.add(aArray[i], aAllowDuplicates); + } + return set; +}; + +/** + * Return how many unique items are in this ArraySet. If duplicates have been + * added, than those do not count towards the size. + * + * @returns Number + */ +ArraySet.prototype.size = function ArraySet_size() { + return Object.getOwnPropertyNames(this._set).length; +}; + +/** + * Add the given string to this set. + * + * @param String aStr + */ +ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { + var sStr = util.toSetString(aStr); + var isDuplicate = has.call(this._set, sStr); + var idx = this._array.length; + if (!isDuplicate || aAllowDuplicates) { + this._array.push(aStr); + } + if (!isDuplicate) { + this._set[sStr] = idx; + } +}; + +/** + * Is the given string a member of this set? + * + * @param String aStr + */ +ArraySet.prototype.has = function ArraySet_has(aStr) { + var sStr = util.toSetString(aStr); + return has.call(this._set, sStr); +}; + +/** + * What is the index of the given string in the array? + * + * @param String aStr + */ +ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { + var sStr = util.toSetString(aStr); + if (has.call(this._set, sStr)) { + return this._set[sStr]; + } + throw new Error('"' + aStr + '" is not in the set.'); +}; + +/** + * What is the element at the given index? + * + * @param Number aIdx + */ +ArraySet.prototype.at = function ArraySet_at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error('No element indexed by ' + aIdx); +}; + +/** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ +ArraySet.prototype.toArray = function ArraySet_toArray() { + return this._array.slice(); +}; + +exports.ArraySet = ArraySet; diff --git a/node_modules/source-map/lib/base64-vlq.js b/node_modules/source-map/lib/base64-vlq.js new file mode 100644 index 0000000..612b404 --- /dev/null +++ b/node_modules/source-map/lib/base64-vlq.js @@ -0,0 +1,140 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +var base64 = require('./base64'); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +var VLQ_BASE_SHIFT = 5; + +// binary: 100000 +var VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +var VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +var VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +function fromVLQSigned(aValue) { + var isNegative = (aValue & 1) === 1; + var shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + var encoded = ""; + var digit; + + var vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; + +/** + * Decodes the next base 64 VLQ value from the given string and returns the + * value and the rest of the string via the out parameter. + */ +exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { + var strLen = aStr.length; + var result = 0; + var shift = 0; + var continuation, digit; + + do { + if (aIndex >= strLen) { + throw new Error("Expected more digits in base 64 VLQ value."); + } + + digit = base64.decode(aStr.charCodeAt(aIndex++)); + if (digit === -1) { + throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); + } + + continuation = !!(digit & VLQ_CONTINUATION_BIT); + digit &= VLQ_BASE_MASK; + result = result + (digit << shift); + shift += VLQ_BASE_SHIFT; + } while (continuation); + + aOutParam.value = fromVLQSigned(result); + aOutParam.rest = aIndex; +}; diff --git a/node_modules/source-map/lib/base64.js b/node_modules/source-map/lib/base64.js new file mode 100644 index 0000000..8aa86b3 --- /dev/null +++ b/node_modules/source-map/lib/base64.js @@ -0,0 +1,67 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function (number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; + +/** + * Decode a single base 64 character code digit to an integer. Returns -1 on + * failure. + */ +exports.decode = function (charCode) { + var bigA = 65; // 'A' + var bigZ = 90; // 'Z' + + var littleA = 97; // 'a' + var littleZ = 122; // 'z' + + var zero = 48; // '0' + var nine = 57; // '9' + + var plus = 43; // '+' + var slash = 47; // '/' + + var littleOffset = 26; + var numberOffset = 52; + + // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ + if (bigA <= charCode && charCode <= bigZ) { + return (charCode - bigA); + } + + // 26 - 51: abcdefghijklmnopqrstuvwxyz + if (littleA <= charCode && charCode <= littleZ) { + return (charCode - littleA + littleOffset); + } + + // 52 - 61: 0123456789 + if (zero <= charCode && charCode <= nine) { + return (charCode - zero + numberOffset); + } + + // 62: + + if (charCode == plus) { + return 62; + } + + // 63: / + if (charCode == slash) { + return 63; + } + + // Invalid base64 digit. + return -1; +}; diff --git a/node_modules/source-map/lib/binary-search.js b/node_modules/source-map/lib/binary-search.js new file mode 100644 index 0000000..010ac94 --- /dev/null +++ b/node_modules/source-map/lib/binary-search.js @@ -0,0 +1,111 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + var mid = Math.floor((aHigh - aLow) / 2) + aLow; + var cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } + else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } else { + return mid; + } + } + else { + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } else { + return aLow < 0 ? -1 : aLow; + } + } +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; diff --git a/node_modules/source-map/lib/mapping-list.js b/node_modules/source-map/lib/mapping-list.js new file mode 100644 index 0000000..06d1274 --- /dev/null +++ b/node_modules/source-map/lib/mapping-list.js @@ -0,0 +1,79 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + var lineA = mappingA.generatedLine; + var lineB = mappingB.generatedLine; + var columnA = mappingA.generatedColumn; + var columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a neglibable overhead in general + * case for a large speedup in case of mappings being added in order. + */ +function MappingList() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; +} + +/** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ +MappingList.prototype.unsortedForEach = + function MappingList_forEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + }; + +/** + * Add the given source mapping. + * + * @param Object aMapping + */ +MappingList.prototype.add = function MappingList_add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } +}; + +/** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ +MappingList.prototype.toArray = function MappingList_toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; +}; + +exports.MappingList = MappingList; diff --git a/node_modules/source-map/lib/quick-sort.js b/node_modules/source-map/lib/quick-sort.js new file mode 100644 index 0000000..6a7caad --- /dev/null +++ b/node_modules/source-map/lib/quick-sort.js @@ -0,0 +1,114 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +// It turns out that some (most?) JavaScript engines don't self-host +// `Array.prototype.sort`. This makes sense because C++ will likely remain +// faster than JS when doing raw CPU-intensive sorting. However, when using a +// custom comparator function, calling back and forth between the VM's C++ and +// JIT'd JS is rather slow *and* loses JIT type information, resulting in +// worse generated code for the comparator function than would be optimal. In +// fact, when sorting with a comparator, these costs outweigh the benefits of +// sorting in C++. By using our own JS-implemented Quick Sort (below), we get +// a ~3500ms mean speed-up in `bench/bench.html`. + +/** + * Swap the elements indexed by `x` and `y` in the array `ary`. + * + * @param {Array} ary + * The array. + * @param {Number} x + * The index of the first item. + * @param {Number} y + * The index of the second item. + */ +function swap(ary, x, y) { + var temp = ary[x]; + ary[x] = ary[y]; + ary[y] = temp; +} + +/** + * Returns a random integer within the range `low .. high` inclusive. + * + * @param {Number} low + * The lower bound on the range. + * @param {Number} high + * The upper bound on the range. + */ +function randomIntInRange(low, high) { + return Math.round(low + (Math.random() * (high - low))); +} + +/** + * The Quick Sort algorithm. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + * @param {Number} p + * Start index of the array + * @param {Number} r + * End index of the array + */ +function doQuickSort(ary, comparator, p, r) { + // If our lower bound is less than our upper bound, we (1) partition the + // array into two pieces and (2) recurse on each half. If it is not, this is + // the empty array and our base case. + + if (p < r) { + // (1) Partitioning. + // + // The partitioning chooses a pivot between `p` and `r` and moves all + // elements that are less than or equal to the pivot to the before it, and + // all the elements that are greater than it after it. The effect is that + // once partition is done, the pivot is in the exact place it will be when + // the array is put in sorted order, and it will not need to be moved + // again. This runs in O(n) time. + + // Always choose a random pivot so that an input array which is reverse + // sorted does not cause O(n^2) running time. + var pivotIndex = randomIntInRange(p, r); + var i = p - 1; + + swap(ary, pivotIndex, r); + var pivot = ary[r]; + + // Immediately after `j` is incremented in this loop, the following hold + // true: + // + // * Every element in `ary[p .. i]` is less than or equal to the pivot. + // + // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. + for (var j = p; j < r; j++) { + if (comparator(ary[j], pivot) <= 0) { + i += 1; + swap(ary, i, j); + } + } + + swap(ary, i + 1, j); + var q = i + 1; + + // (2) Recurse on each half. + + doQuickSort(ary, comparator, p, q - 1); + doQuickSort(ary, comparator, q + 1, r); + } +} + +/** + * Sort the given array in-place with the given comparator function. + * + * @param {Array} ary + * An array to sort. + * @param {function} comparator + * Function to use to compare two items. + */ +exports.quickSort = function (ary, comparator) { + doQuickSort(ary, comparator, 0, ary.length - 1); +}; diff --git a/node_modules/source-map/lib/source-map-consumer.js b/node_modules/source-map/lib/source-map-consumer.js new file mode 100644 index 0000000..6abcc28 --- /dev/null +++ b/node_modules/source-map/lib/source-map-consumer.js @@ -0,0 +1,1082 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var util = require('./util'); +var binarySearch = require('./binary-search'); +var ArraySet = require('./array-set').ArraySet; +var base64VLQ = require('./base64-vlq'); +var quickSort = require('./quick-sort').quickSort; + +function SourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + return sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap) + : new BasicSourceMapConsumer(sourceMap); +} + +SourceMapConsumer.fromSourceMap = function(aSourceMap) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap); +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; + +// `__generatedMappings` and `__originalMappings` are arrays that hold the +// parsed mapping coordinates from the source map's "mappings" attribute. They +// are lazily instantiated, accessed via the `_generatedMappings` and +// `_originalMappings` getters respectively, and we only parse the mappings +// and create these arrays once queried for a source location. We jump through +// these hoops because there can be many thousands of mappings, and parsing +// them is expensive, so we only want to do it if we must. +// +// Each object in the arrays is of the form: +// +// { +// generatedLine: The line number in the generated code, +// generatedColumn: The column number in the generated code, +// source: The path to the original source file that generated this +// chunk of code, +// originalLine: The line number in the original source that +// corresponds to this chunk of generated code, +// originalColumn: The column number in the original source that +// corresponds to this chunk of generated code, +// name: The name of the original symbol which generated this chunk of +// code. +// } +// +// All properties except for `generatedLine` and `generatedColumn` can be +// `null`. +// +// `_generatedMappings` is ordered by the generated positions. +// +// `_originalMappings` is ordered by the original positions. + +SourceMapConsumer.prototype.__generatedMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { + get: function () { + if (!this.__generatedMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappings; + } +}); + +SourceMapConsumer.prototype.__originalMappings = null; +Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { + get: function () { + if (!this.__originalMappings) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappings; + } +}); + +SourceMapConsumer.prototype._charIsMappingSeparator = + function SourceMapConsumer_charIsMappingSeparator(aStr, index) { + var c = aStr.charAt(index); + return c === ";" || c === ","; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +SourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + }; + +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +/** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ +SourceMapConsumer.prototype.eachMapping = + function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { + var context = aContext || null; + var order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + var mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + var sourceRoot = this.sourceRoot; + mappings.map(function (mapping) { + var source = mapping.source === null ? null : this._sources.at(mapping.source); + if (source != null && sourceRoot != null) { + source = util.join(sourceRoot, source); + } + return { + source: source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + }; + +/** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: Optional. the column number in the original source. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ +SourceMapConsumer.prototype.allGeneratedPositionsFor = + function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { + var line = util.getArg(aArgs, 'line'); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + var needle = { + source: util.getArg(aArgs, 'source'), + originalLine: line, + originalColumn: util.getArg(aArgs, 'column', 0) + }; + + if (this.sourceRoot != null) { + needle.source = util.relative(this.sourceRoot, needle.source); + } + if (!this._sources.has(needle.source)) { + return []; + } + needle.source = this._sources.indexOf(needle.source); + + var mappings = []; + + var index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + var originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } else { + var originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + mappings.push({ + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + }; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The only parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referrenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +function BasicSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sources = util.getArg(sourceMap, 'sources'); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + var names = util.getArg(sourceMap, 'names', []); + var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); + var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); + var mappings = util.getArg(sourceMap, 'mappings'); + var file = util.getArg(sourceMap, 'file', null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function (source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + this._names = ArraySet.fromArray(names.map(String), true); + this._sources = ArraySet.fromArray(sources, true); + + this.sourceRoot = sourceRoot; + this.sourcesContent = sourcesContent; + this._mappings = mappings; + this.file = file; +} + +BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; + +/** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @returns BasicSourceMapConsumer + */ +BasicSourceMapConsumer.fromSourceMap = + function SourceMapConsumer_fromSourceMap(aSourceMap) { + var smc = Object.create(BasicSourceMapConsumer.prototype); + + var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); + var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); + smc.sourceRoot = aSourceMap._sourceRoot; + smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), + smc.sourceRoot); + smc.file = aSourceMap._file; + + // Because we are modifying the entries (by converting string sources and + // names to indices into the sources and names ArraySets), we have to make + // a copy of the entry or else bad things happen. Shared mutable state + // strikes again! See github issue #191. + + var generatedMappings = aSourceMap._mappings.toArray().slice(); + var destGeneratedMappings = smc.__generatedMappings = []; + var destOriginalMappings = smc.__originalMappings = []; + + for (var i = 0, length = generatedMappings.length; i < length; i++) { + var srcMapping = generatedMappings[i]; + var destMapping = new Mapping; + destMapping.generatedLine = srcMapping.generatedLine; + destMapping.generatedColumn = srcMapping.generatedColumn; + + if (srcMapping.source) { + destMapping.source = sources.indexOf(srcMapping.source); + destMapping.originalLine = srcMapping.originalLine; + destMapping.originalColumn = srcMapping.originalColumn; + + if (srcMapping.name) { + destMapping.name = names.indexOf(srcMapping.name); + } + + destOriginalMappings.push(destMapping); + } + + destGeneratedMappings.push(destMapping); + } + + quickSort(smc.__originalMappings, util.compareByOriginalPositions); + + return smc; + }; + +/** + * The version of the source mapping spec that we are consuming. + */ +BasicSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { + get: function () { + return this._sources.toArray().map(function (s) { + return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; + }, this); + } +}); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +BasicSourceMapConsumer.prototype._parseMappings = + function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { + var generatedLine = 1; + var previousGeneratedColumn = 0; + var previousOriginalLine = 0; + var previousOriginalColumn = 0; + var previousSource = 0; + var previousName = 0; + var length = aStr.length; + var index = 0; + var cachedSegments = {}; + var temp = {}; + var originalMappings = []; + var generatedMappings = []; + var mapping, str, segment, end, value; + + while (index < length) { + if (aStr.charAt(index) === ';') { + generatedLine++; + index++; + previousGeneratedColumn = 0; + } + else if (aStr.charAt(index) === ',') { + index++; + } + else { + mapping = new Mapping(); + mapping.generatedLine = generatedLine; + + // Because each offset is encoded relative to the previous one, + // many segments often have the same encoding. We can exploit this + // fact by caching the parsed variable length fields of each segment, + // allowing us to avoid a second parse if we encounter the same + // segment again. + for (end = index; end < length; end++) { + if (this._charIsMappingSeparator(aStr, end)) { + break; + } + } + str = aStr.slice(index, end); + + segment = cachedSegments[str]; + if (segment) { + index += str.length; + } else { + segment = []; + while (index < end) { + base64VLQ.decode(aStr, index, temp); + value = temp.value; + index = temp.rest; + segment.push(value); + } + + if (segment.length === 2) { + throw new Error('Found a source, but no line and column'); + } + + if (segment.length === 3) { + throw new Error('Found a source and line, but no column'); + } + + cachedSegments[str] = segment; + } + + // Generated column. + mapping.generatedColumn = previousGeneratedColumn + segment[0]; + previousGeneratedColumn = mapping.generatedColumn; + + if (segment.length > 1) { + // Original source. + mapping.source = previousSource + segment[1]; + previousSource += segment[1]; + + // Original line. + mapping.originalLine = previousOriginalLine + segment[2]; + previousOriginalLine = mapping.originalLine; + // Lines are stored 0-based + mapping.originalLine += 1; + + // Original column. + mapping.originalColumn = previousOriginalColumn + segment[3]; + previousOriginalColumn = mapping.originalColumn; + + if (segment.length > 4) { + // Original name. + mapping.name = previousName + segment[4]; + previousName += segment[4]; + } + } + + generatedMappings.push(mapping); + if (typeof mapping.originalLine === 'number') { + originalMappings.push(mapping); + } + } + } + + quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = generatedMappings; + + quickSort(originalMappings, util.compareByOriginalPositions); + this.__originalMappings = originalMappings; + }; + +/** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ +BasicSourceMapConsumer.prototype._findMapping = + function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError('Line must be greater than or equal to 1, got ' + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError('Column must be greater than or equal to 0, got ' + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + }; + +/** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ +BasicSourceMapConsumer.prototype.computeColumnSpans = + function SourceMapConsumer_computeColumnSpans() { + for (var index = 0; index < this._generatedMappings.length; ++index) { + var mapping = this._generatedMappings[index]; + + // Mappings do not contain a field for the last generated columnt. We + // can come up with an optimistic estimate, however, by assuming that + // mappings are contiguous (i.e. given two consecutive mappings, the + // first mapping ends where the second one starts). + if (index + 1 < this._generatedMappings.length) { + var nextMapping = this._generatedMappings[index + 1]; + + if (mapping.generatedLine === nextMapping.generatedLine) { + mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; + continue; + } + } + + // The last mapping for each line spans the entire line. + mapping.lastGeneratedColumn = Infinity; + } + }; + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ +BasicSourceMapConsumer.prototype.originalPositionFor = + function SourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._generatedMappings, + "generatedLine", + "generatedColumn", + util.compareByGeneratedPositionsDeflated, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._generatedMappings[index]; + + if (mapping.generatedLine === needle.generatedLine) { + var source = util.getArg(mapping, 'source', null); + if (source !== null) { + source = this._sources.at(source); + if (this.sourceRoot != null) { + source = util.join(this.sourceRoot, source); + } + } + var name = util.getArg(mapping, 'name', null); + if (name !== null) { + name = this._names.at(name); + } + return { + source: source, + line: util.getArg(mapping, 'originalLine', null), + column: util.getArg(mapping, 'originalColumn', null), + name: name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +BasicSourceMapConsumer.prototype.hasContentsOfAllSources = + function BasicSourceMapConsumer_hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function (sc) { return sc == null; }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +BasicSourceMapConsumer.prototype.sourceContentFor = + function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + if (this.sourceRoot != null) { + aSource = util.relative(this.sourceRoot, aSource); + } + + if (this._sources.has(aSource)) { + return this.sourcesContent[this._sources.indexOf(aSource)]; + } + + var url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + aSource)) { + return this.sourcesContent[this._sources.indexOf("/" + aSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ +BasicSourceMapConsumer.prototype.generatedPositionFor = + function SourceMapConsumer_generatedPositionFor(aArgs) { + var source = util.getArg(aArgs, 'source'); + if (this.sourceRoot != null) { + source = util.relative(this.sourceRoot, source); + } + if (!this._sources.has(source)) { + return { + line: null, + column: null, + lastColumn: null + }; + } + source = this._sources.indexOf(source); + + var needle = { + source: source, + originalLine: util.getArg(aArgs, 'line'), + originalColumn: util.getArg(aArgs, 'column') + }; + + var index = this._findMapping( + needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) + ); + + if (index >= 0) { + var mapping = this._originalMappings[index]; + + if (mapping.source === needle.source) { + return { + line: util.getArg(mapping, 'generatedLine', null), + column: util.getArg(mapping, 'generatedColumn', null), + lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + }; + +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The only parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +function IndexedSourceMapConsumer(aSourceMap) { + var sourceMap = aSourceMap; + if (typeof aSourceMap === 'string') { + sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); + } + + var version = util.getArg(sourceMap, 'version'); + var sections = util.getArg(sourceMap, 'sections'); + + if (version != this._version) { + throw new Error('Unsupported version: ' + version); + } + + this._sources = new ArraySet(); + this._names = new ArraySet(); + + var lastOffset = { + line: -1, + column: 0 + }; + this._sections = sections.map(function (s) { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error('Support for url field in sections not implemented.'); + } + var offset = util.getArg(s, 'offset'); + var offsetLine = util.getArg(offset, 'line'); + var offsetColumn = util.getArg(offset, 'column'); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error('Section offsets must be ordered and non-overlapping.'); + } + lastOffset = offset; + + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer: new SourceMapConsumer(util.getArg(s, 'map')) + } + }); +} + +IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); +IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; + +/** + * The version of the source mapping spec that we are consuming. + */ +IndexedSourceMapConsumer.prototype._version = 3; + +/** + * The list of original sources. + */ +Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { + get: function () { + var sources = []; + for (var i = 0; i < this._sections.length; i++) { + for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } +}); + +/** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. + * - column: The column number in the generated source. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. + * - column: The column number in the original source, or null. + * - name: The original identifier, or null. + */ +IndexedSourceMapConsumer.prototype.originalPositionFor = + function IndexedSourceMapConsumer_originalPositionFor(aArgs) { + var needle = { + generatedLine: util.getArg(aArgs, 'line'), + generatedColumn: util.getArg(aArgs, 'column') + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + var sectionIndex = binarySearch.search(needle, this._sections, + function(needle, section) { + var cmp = needle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (needle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + var section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + }; + +/** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ +IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = + function IndexedSourceMapConsumer_hasContentsOfAllSources() { + return this._sections.every(function (s) { + return s.consumer.hasContentsOfAllSources(); + }); + }; + +/** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ +IndexedSourceMapConsumer.prototype.sourceContentFor = + function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + var content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + else { + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + }; + +/** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. + * - column: The column number in the original source. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. + * - column: The column number in the generated source, or null. + */ +IndexedSourceMapConsumer.prototype.generatedPositionFor = + function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { + continue; + } + var generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + var ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + }; + +/** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ +IndexedSourceMapConsumer.prototype._parseMappings = + function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { + this.__generatedMappings = []; + this.__originalMappings = []; + for (var i = 0; i < this._sections.length; i++) { + var section = this._sections[i]; + var sectionMappings = section.consumer._generatedMappings; + for (var j = 0; j < sectionMappings.length; j++) { + var mapping = sectionMappings[j]; + + var source = section.consumer._sources.at(mapping.source); + if (section.consumer.sourceRoot !== null) { + source = util.join(section.consumer.sourceRoot, source); + } + this._sources.add(source); + source = this._sources.indexOf(source); + + var name = section.consumer._names.at(mapping.name); + this._names.add(name); + name = this._names.indexOf(name); + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + var adjustedMapping = { + source: source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: name + }; + + this.__generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === 'number') { + this.__originalMappings.push(adjustedMapping); + } + } + } + + quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); + quickSort(this.__originalMappings, util.compareByOriginalPositions); + }; + +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; diff --git a/node_modules/source-map/lib/source-map-generator.js b/node_modules/source-map/lib/source-map-generator.js new file mode 100644 index 0000000..8fbb8e8 --- /dev/null +++ b/node_modules/source-map/lib/source-map-generator.js @@ -0,0 +1,404 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var base64VLQ = require('./base64-vlq'); +var util = require('./util'); +var ArraySet = require('./array-set').ArraySet; +var MappingList = require('./mapping-list').MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +function SourceMapGenerator(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, 'file', null); + this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); + this._skipValidation = util.getArg(aArgs, 'skipValidation', false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; +} + +SourceMapGenerator.prototype._version = 3; + +/** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ +SourceMapGenerator.fromSourceMap = + function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { + var sourceRoot = aSourceMapConsumer.sourceRoot; + var generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot: sourceRoot + }); + aSourceMapConsumer.eachMapping(function (mapping) { + var newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + }; + +/** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ +SourceMapGenerator.prototype.addMapping = + function SourceMapGenerator_addMapping(aArgs) { + var generated = util.getArg(aArgs, 'generated'); + var original = util.getArg(aArgs, 'original', null); + var source = util.getArg(aArgs, 'source', null); + var name = util.getArg(aArgs, 'name', null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source: source, + name: name + }); + }; + +/** + * Set the source content for a source file. + */ +SourceMapGenerator.prototype.setSourceContent = + function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { + var source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + }; + +/** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ +SourceMapGenerator.prototype.applySourceMap = + function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + var sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + var sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + var newSources = new ArraySet(); + var newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function (mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + var original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source) + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + var source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + var name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aSourceMapPath != null) { + sourceFile = util.join(aSourceMapPath, sourceFile); + } + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + this.setSourceContent(sourceFile, content); + } + }, this); + }; + +/** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ +SourceMapGenerator.prototype._validateMapping = + function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, + aName) { + if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + return; + } + else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated + && aOriginal && 'line' in aOriginal && 'column' in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + return; + } + else { + throw new Error('Invalid mapping: ' + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + }; + +/** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ +SourceMapGenerator.prototype._serializeMappings = + function SourceMapGenerator_serializeMappings() { + var previousGeneratedColumn = 0; + var previousGeneratedLine = 1; + var previousOriginalColumn = 0; + var previousOriginalLine = 0; + var previousName = 0; + var previousSource = 0; + var result = ''; + var next; + var mapping; + var nameIdx; + var sourceIdx; + + var mappings = this._mappings.toArray(); + for (var i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = '' + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ';'; + previousGeneratedLine++; + } + } + else { + if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ','; + } + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + }; + +SourceMapGenerator.prototype._generateSourcesContent = + function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function (source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + var key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + }; + +/** + * Externalize the source map. + */ +SourceMapGenerator.prototype.toJSON = + function SourceMapGenerator_toJSON() { + var map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + }; + +/** + * Render the source map being generated to a string. + */ +SourceMapGenerator.prototype.toString = + function SourceMapGenerator_toString() { + return JSON.stringify(this.toJSON()); + }; + +exports.SourceMapGenerator = SourceMapGenerator; diff --git a/node_modules/source-map/lib/source-node.js b/node_modules/source-map/lib/source-node.js new file mode 100644 index 0000000..e927f66 --- /dev/null +++ b/node_modules/source-map/lib/source-node.js @@ -0,0 +1,407 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; +var util = require('./util'); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +var REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +var NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +var isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +function SourceNode(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); +} + +/** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ +SourceNode.fromStringWithSourceMap = + function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + var node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are removed from this array, by calling `shiftNextLine`. + var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + var shiftNextLine = function() { + var lineContents = remainingLines.shift(); + // The last line of a file might not have a newline. + var newLine = remainingLines.shift() || ""; + return lineContents + newLine; + }; + + // We need to remember the position of "remainingLines" + var lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + var lastMapping = null; + + aSourceMapConsumer.eachMapping(function (mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + var nextLine = remainingLines[0]; + var code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[0] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + var nextLine = remainingLines[0]; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[0] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLines.length > 0) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function (sourceFile) { + var content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + var source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + }; + +/** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.add = function SourceNode_add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function (chunk) { + this.add(chunk); + }, this); + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ +SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (var i = aChunk.length-1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } + else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } + else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; +}; + +/** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walk = function SourceNode_walk(aFn) { + var chunk; + for (var i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } + else { + if (chunk !== '') { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } +}; + +/** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ +SourceNode.prototype.join = function SourceNode_join(aSep) { + var newChildren; + var i; + var len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len-1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; +}; + +/** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ +SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { + var lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } + else if (typeof lastChild === 'string') { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } + else { + this.children.push(''.replace(aPattern, aReplacement)); + } + return this; +}; + +/** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ +SourceNode.prototype.setSourceContent = + function SourceNode_setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + }; + +/** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ +SourceNode.prototype.walkSourceContents = + function SourceNode_walkSourceContents(aFn) { + for (var i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + var sources = Object.keys(this.sourceContents); + for (var i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + }; + +/** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ +SourceNode.prototype.toString = function SourceNode_toString() { + var str = ""; + this.walk(function (chunk) { + str += chunk; + }); + return str; +}; + +/** + * Returns the string representation of this source node along with a source + * map. + */ +SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { + var generated = { + code: "", + line: 1, + column: 0 + }; + var map = new SourceMapGenerator(aArgs); + var sourceMappingActive = false; + var lastOriginalSource = null; + var lastOriginalLine = null; + var lastOriginalColumn = null; + var lastOriginalName = null; + this.walk(function (chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if(lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (var idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function (sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map: map }; +}; + +exports.SourceNode = SourceNode; diff --git a/node_modules/source-map/lib/util.js b/node_modules/source-map/lib/util.js new file mode 100644 index 0000000..44e0e45 --- /dev/null +++ b/node_modules/source-map/lib/util.js @@ -0,0 +1,417 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } else { + throw new Error('"' + aName + '" is a required argument.'); + } +} +exports.getArg = getArg; + +var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; +var dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + var match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + var url = ''; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ':'; + } + url += '//'; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + '@'; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +function normalize(aPath) { + var path = aPath; + var url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + var isAbsolute = exports.isAbsolute(path); + + var parts = path.split(/\/+/); + for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { + part = parts[i]; + if (part === '.') { + parts.splice(i, 1); + } else if (part === '..') { + up++; + } else if (up > 0) { + if (part === '') { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join('/'); + + if (path === '') { + path = isAbsolute ? '/' : '.'; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +} +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + var aPathUrl = urlParse(aPath); + var aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || '/'; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + var joined = aPath.charAt(0) === '/' + ? aPath + : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function (aPath) { + return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ''); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + var level = 0; + while (aPath.indexOf(aRoot + '/') !== 0) { + var index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +var supportsNullProto = (function () { + var obj = Object.create(null); + return !('__proto__' in obj); +}()); + +function identity (s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return '$' + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + var length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + + for (var i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + var cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = mappingA.source - mappingB.source; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return mappingA.name - mappingB.name; +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + var cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; diff --git a/node_modules/source-map/package.json b/node_modules/source-map/package.json new file mode 100644 index 0000000..5f793ff --- /dev/null +++ b/node_modules/source-map/package.json @@ -0,0 +1,71 @@ +{ + "name": "source-map", + "description": "Generates and consumes source maps", + "version": "0.5.6", + "homepage": "https://github.com/mozilla/source-map", + "author": "Nick Fitzgerald ", + "contributors": [ + "Tobias Koppers ", + "Duncan Beevers ", + "Stephen Crane ", + "Ryan Seddon ", + "Miles Elam ", + "Mihai Bazon ", + "Michael Ficarra ", + "Todd Wolfson ", + "Alexander Solovyov ", + "Felix Gnass ", + "Conrad Irwin ", + "usrbincc ", + "David Glasser ", + "Chase Douglas ", + "Evan Wallace ", + "Heather Arthur ", + "Hugh Kennedy ", + "David Glasser ", + "Simon Lydell ", + "Jmeas Smith ", + "Michael Z Goddard ", + "azu ", + "John Gozde ", + "Adam Kirkton ", + "Chris Montgomery ", + "J. Ryan Stinnett ", + "Jack Herrington ", + "Chris Truter ", + "Daniel Espeset ", + "Jamie Wong ", + "Eddy Bruël ", + "Hawken Rives ", + "Gilad Peleg ", + "djchie ", + "Gary Ye ", + "Nicolas Lalevée " + ], + "repository": { + "type": "git", + "url": "http://github.com/mozilla/source-map.git" + }, + "main": "./source-map.js", + "files": [ + "source-map.js", + "lib/", + "dist/source-map.debug.js", + "dist/source-map.js", + "dist/source-map.min.js", + "dist/source-map.min.js.map" + ], + "engines": { + "node": ">=0.10.0" + }, + "license": "BSD-3-Clause", + "scripts": { + "test": "npm run build && node test/run-tests.js", + "build": "webpack --color", + "toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md" + }, + "devDependencies": { + "doctoc": "^0.15.0", + "webpack": "^1.12.0" + } +} diff --git a/node_modules/source-map/source-map.js b/node_modules/source-map/source-map.js new file mode 100644 index 0000000..bc88fe8 --- /dev/null +++ b/node_modules/source-map/source-map.js @@ -0,0 +1,8 @@ +/* + * Copyright 2009-2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE.txt or: + * http://opensource.org/licenses/BSD-3-Clause + */ +exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; +exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; +exports.SourceNode = require('./lib/source-node').SourceNode; diff --git a/node_modules/sshpk/.npmignore b/node_modules/sshpk/.npmignore new file mode 100644 index 0000000..8000b59 --- /dev/null +++ b/node_modules/sshpk/.npmignore @@ -0,0 +1,9 @@ +.gitmodules +deps +docs +Makefile +node_modules +test +tools +coverage +man/src diff --git a/node_modules/sshpk/.travis.yml b/node_modules/sshpk/.travis.yml new file mode 100644 index 0000000..c3394c2 --- /dev/null +++ b/node_modules/sshpk/.travis.yml @@ -0,0 +1,11 @@ +language: node_js +node_js: + - "5.10" + - "4.4" + - "4.1" + - "0.12" + - "0.10" +before_install: + - "make check" +after_success: + - '[ "${TRAVIS_NODE_VERSION}" = "4.4" ] && make codecovio' diff --git a/node_modules/sshpk/LICENSE b/node_modules/sshpk/LICENSE new file mode 100644 index 0000000..f6d947d --- /dev/null +++ b/node_modules/sshpk/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/sshpk/README.md b/node_modules/sshpk/README.md new file mode 100644 index 0000000..00a4055 --- /dev/null +++ b/node_modules/sshpk/README.md @@ -0,0 +1,666 @@ +sshpk +========= + +Parse, convert, fingerprint and use SSH keys (both public and private) in pure +node -- no `ssh-keygen` or other external dependencies. + +Supports RSA, DSA, ECDSA (nistp-\*) and ED25519 key types, in PEM (PKCS#1, +PKCS#8) and OpenSSH formats. + +This library has been extracted from +[`node-http-signature`](https://github.com/joyent/node-http-signature) +(work by [Mark Cavage](https://github.com/mcavage) and +[Dave Eddy](https://github.com/bahamas10)) and +[`node-ssh-fingerprint`](https://github.com/bahamas10/node-ssh-fingerprint) +(work by Dave Eddy), with additions (including ECDSA support) by +[Alex Wilson](https://github.com/arekinath). + +Install +------- + +``` +npm install sshpk +``` + +Examples +-------- + +```js +var sshpk = require('sshpk'); + +var fs = require('fs'); + +/* Read in an OpenSSH-format public key */ +var keyPub = fs.readFileSync('id_rsa.pub'); +var key = sshpk.parseKey(keyPub, 'ssh'); + +/* Get metadata about the key */ +console.log('type => %s', key.type); +console.log('size => %d bits', key.size); +console.log('comment => %s', key.comment); + +/* Compute key fingerprints, in new OpenSSH (>6.7) format, and old MD5 */ +console.log('fingerprint => %s', key.fingerprint().toString()); +console.log('old-style fingerprint => %s', key.fingerprint('md5').toString()); +``` + +Example output: + +``` +type => rsa +size => 2048 bits +comment => foo@foo.com +fingerprint => SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w +old-style fingerprint => a0:c8:ad:6c:32:9a:32:fa:59:cc:a9:8c:0a:0d:6e:bd +``` + +More examples: converting between formats: + +```js +/* Read in a PEM public key */ +var keyPem = fs.readFileSync('id_rsa.pem'); +var key = sshpk.parseKey(keyPem, 'pem'); + +/* Convert to PEM PKCS#8 public key format */ +var pemBuf = key.toBuffer('pkcs8'); + +/* Convert to SSH public key format (and return as a string) */ +var sshKey = key.toString('ssh'); +``` + +Signing and verifying: + +```js +/* Read in an OpenSSH/PEM *private* key */ +var keyPriv = fs.readFileSync('id_ecdsa'); +var key = sshpk.parsePrivateKey(keyPriv, 'pem'); + +var data = 'some data'; + +/* Sign some data with the key */ +var s = key.createSign('sha1'); +s.update(data); +var signature = s.sign(); + +/* Now load the public key (could also use just key.toPublic()) */ +var keyPub = fs.readFileSync('id_ecdsa.pub'); +key = sshpk.parseKey(keyPub, 'ssh'); + +/* Make a crypto.Verifier with this key */ +var v = key.createVerify('sha1'); +v.update(data); +var valid = v.verify(signature); +/* => true! */ +``` + +Matching fingerprints with keys: + +```js +var fp = sshpk.parseFingerprint('SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w'); + +var keys = [sshpk.parseKey(...), sshpk.parseKey(...), ...]; + +keys.forEach(function (key) { + if (fp.matches(key)) + console.log('found it!'); +}); +``` + +Usage +----- + +## Public keys + +### `parseKey(data[, format = 'auto'[, options]])` + +Parses a key from a given data format and returns a new `Key` object. + +Parameters + +- `data` -- Either a Buffer or String, containing the key +- `format` -- String name of format to use, valid options are: + - `auto`: choose automatically from all below + - `pem`: supports both PKCS#1 and PKCS#8 + - `ssh`: standard OpenSSH format, + - `pkcs1`, `pkcs8`: variants of `pem` + - `rfc4253`: raw OpenSSH wire format + - `openssh`: new post-OpenSSH 6.5 internal format, produced by + `ssh-keygen -o` +- `options` -- Optional Object, extra options, with keys: + - `filename` -- Optional String, name for the key being parsed + (eg. the filename that was opened). Used to generate + Error messages + - `passphrase` -- Optional String, encryption passphrase used to decrypt an + encrypted PEM file + +### `Key.isKey(obj)` + +Returns `true` if the given object is a valid `Key` object created by a version +of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `Key#type` + +String, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`. + +### `Key#size` + +Integer, "size" of the key in bits. For RSA/DSA this is the size of the modulus; +for ECDSA this is the bit size of the curve in use. + +### `Key#comment` + +Optional string, a key comment used by some formats (eg the `ssh` format). + +### `Key#curve` + +Only present if `this.type === 'ecdsa'`, string containing the name of the +named curve used with this key. Possible values include `nistp256`, `nistp384` +and `nistp521`. + +### `Key#toBuffer([format = 'ssh'])` + +Convert the key into a given data format and return the serialized key as +a Buffer. + +Parameters + +- `format` -- String name of format to use, for valid options see `parseKey()` + +### `Key#toString([format = 'ssh])` + +Same as `this.toBuffer(format).toString()`. + +### `Key#fingerprint([algorithm = 'sha256'])` + +Creates a new `Fingerprint` object representing this Key's fingerprint. + +Parameters + +- `algorithm` -- String name of hash algorithm to use, valid options are `md5`, + `sha1`, `sha256`, `sha384`, `sha512` + +### `Key#createVerify([hashAlgorithm])` + +Creates a `crypto.Verifier` specialized to use this Key (and the correct public +key algorithm to match it). The returned Verifier has the same API as a regular +one, except that the `verify()` function takes only the target signature as an +argument. + +Parameters + +- `hashAlgorithm` -- optional String name of hash algorithm to use, any + supported by OpenSSL are valid, usually including + `sha1`, `sha256`. + +`v.verify(signature[, format])` Parameters + +- `signature` -- either a Signature object, or a Buffer or String +- `format` -- optional String, name of format to interpret given String with. + Not valid if `signature` is a Signature or Buffer. + +### `Key#createDiffieHellman()` +### `Key#createDH()` + +Creates a Diffie-Hellman key exchange object initialized with this key and all +necessary parameters. This has the same API as a `crypto.DiffieHellman` +instance, except that functions take `Key` and `PrivateKey` objects as +arguments, and return them where indicated for. + +This is only valid for keys belonging to a cryptosystem that supports DHE +or a close analogue (i.e. `dsa`, `ecdsa` and `curve25519` keys). An attempt +to call this function on other keys will yield an `Error`. + +## Private keys + +### `parsePrivateKey(data[, format = 'auto'[, options]])` + +Parses a private key from a given data format and returns a new +`PrivateKey` object. + +Parameters + +- `data` -- Either a Buffer or String, containing the key +- `format` -- String name of format to use, valid options are: + - `auto`: choose automatically from all below + - `pem`: supports both PKCS#1 and PKCS#8 + - `ssh`, `openssh`: new post-OpenSSH 6.5 internal format, produced by + `ssh-keygen -o` + - `pkcs1`, `pkcs8`: variants of `pem` + - `rfc4253`: raw OpenSSH wire format +- `options` -- Optional Object, extra options, with keys: + - `filename` -- Optional String, name for the key being parsed + (eg. the filename that was opened). Used to generate + Error messages + - `passphrase` -- Optional String, encryption passphrase used to decrypt an + encrypted PEM file + +### `PrivateKey.isPrivateKey(obj)` + +Returns `true` if the given object is a valid `PrivateKey` object created by a +version of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `PrivateKey#type` + +String, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`. + +### `PrivateKey#size` + +Integer, "size" of the key in bits. For RSA/DSA this is the size of the modulus; +for ECDSA this is the bit size of the curve in use. + +### `PrivateKey#curve` + +Only present if `this.type === 'ecdsa'`, string containing the name of the +named curve used with this key. Possible values include `nistp256`, `nistp384` +and `nistp521`. + +### `PrivateKey#toBuffer([format = 'pkcs1'])` + +Convert the key into a given data format and return the serialized key as +a Buffer. + +Parameters + +- `format` -- String name of format to use, valid options are listed under + `parsePrivateKey`. Note that ED25519 keys default to `openssh` + format instead (as they have no `pkcs1` representation). + +### `PrivateKey#toString([format = 'pkcs1'])` + +Same as `this.toBuffer(format).toString()`. + +### `PrivateKey#toPublic()` + +Extract just the public part of this private key, and return it as a `Key` +object. + +### `PrivateKey#fingerprint([algorithm = 'sha256'])` + +Same as `this.toPublic().fingerprint()`. + +### `PrivateKey#createVerify([hashAlgorithm])` + +Same as `this.toPublic().createVerify()`. + +### `PrivateKey#createSign([hashAlgorithm])` + +Creates a `crypto.Sign` specialized to use this PrivateKey (and the correct +key algorithm to match it). The returned Signer has the same API as a regular +one, except that the `sign()` function takes no arguments, and returns a +`Signature` object. + +Parameters + +- `hashAlgorithm` -- optional String name of hash algorithm to use, any + supported by OpenSSL are valid, usually including + `sha1`, `sha256`. + +`v.sign()` Parameters + +- none + +### `PrivateKey#derive(newType)` + +Derives a related key of type `newType` from this key. Currently this is +only supported to change between `ed25519` and `curve25519` keys which are +stored with the same private key (but usually distinct public keys in order +to avoid degenerate keys that lead to a weak Diffie-Hellman exchange). + +Parameters + +- `newType` -- String, type of key to derive, either `ed25519` or `curve25519` + +## Fingerprints + +### `parseFingerprint(fingerprint[, algorithms])` + +Pre-parses a fingerprint, creating a `Fingerprint` object that can be used to +quickly locate a key by using the `Fingerprint#matches` function. + +Parameters + +- `fingerprint` -- String, the fingerprint value, in any supported format +- `algorithms` -- Optional list of strings, names of hash algorithms to limit + support to. If `fingerprint` uses a hash algorithm not on + this list, throws `InvalidAlgorithmError`. + +### `Fingerprint.isFingerprint(obj)` + +Returns `true` if the given object is a valid `Fingerprint` object created by a +version of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `Fingerprint#toString([format])` + +Returns a fingerprint as a string, in the given format. + +Parameters + +- `format` -- Optional String, format to use, valid options are `hex` and + `base64`. If this `Fingerprint` uses the `md5` algorithm, the + default format is `hex`. Otherwise, the default is `base64`. + +### `Fingerprint#matches(key)` + +Verifies whether or not this `Fingerprint` matches a given `Key`. This function +uses double-hashing to avoid leaking timing information. Returns a boolean. + +Parameters + +- `key` -- a `Key` object, the key to match this fingerprint against + +## Signatures + +### `parseSignature(signature, algorithm, format)` + +Parses a signature in a given format, creating a `Signature` object. Useful +for converting between the SSH and ASN.1 (PKCS/OpenSSL) signature formats, and +also returned as output from `PrivateKey#createSign().sign()`. + +A Signature object can also be passed to a verifier produced by +`Key#createVerify()` and it will automatically be converted internally into the +correct format for verification. + +Parameters + +- `signature` -- a Buffer (binary) or String (base64), data of the actual + signature in the given format +- `algorithm` -- a String, name of the algorithm to be used, possible values + are `rsa`, `dsa`, `ecdsa` +- `format` -- a String, either `asn1` or `ssh` + +### `Signature.isSignature(obj)` + +Returns `true` if the given object is a valid `Signature` object created by a +version of `sshpk` compatible with this one. + +Parameters + +- `obj` -- Object to identify + +### `Signature#toBuffer([format = 'asn1'])` + +Converts a Signature to the given format and returns it as a Buffer. + +Parameters + +- `format` -- a String, either `asn1` or `ssh` + +### `Signature#toString([format = 'asn1'])` + +Same as `this.toBuffer(format).toString('base64')`. + +## Certificates + +`sshpk` includes basic support for parsing certificates in X.509 (PEM) format +and the OpenSSH certificate format. This feature is intended to be used mainly +to access basic metadata about certificates, extract public keys from them, and +also to generate simple self-signed certificates from an existing key. + +Notably, there is no implementation of CA chain-of-trust verification, and no +support for key usage restrictions (or other kinds of restrictions). Please do +the security world a favour, and DO NOT use this code for certificate +verification in the traditional X.509 CA chain style. + +### `parseCertificate(data, format)` + +Parameters + + - `data` -- a Buffer or String + - `format` -- a String, format to use, one of `'openssh'`, `'pem'` (X.509 in a + PEM wrapper), or `'x509'` (raw DER encoded) + +### `createSelfSignedCertificate(subject, privateKey[, options])` + +Parameters + + - `subject` -- an Identity, the subject of the certificate + - `privateKey` -- a PrivateKey, the key of the subject: will be used both to be + placed in the certificate and also to sign it (since this is + a self-signed certificate) + - `options` -- optional Object, with keys: + - `lifetime` -- optional Number, lifetime of the certificate from now in + seconds + - `validFrom`, `validUntil` -- optional Dates, beginning and end of + certificate validity period. If given + `lifetime` will be ignored + - `serial` -- optional Buffer, the serial number of the certificate + +### `createCertificate(subject, key, issuer, issuerKey[, options])` + +Parameters + + - `subject` -- an Identity, the subject of the certificate + - `key` -- a Key, the public key of the subject + - `issuer` -- an Identity, the issuer of the certificate who will sign it + - `issuerKey` -- a PrivateKey, the issuer's private key for signing + - `options` -- optional Object, with keys: + - `lifetime` -- optional Number, lifetime of the certificate from now in + seconds + - `validFrom`, `validUntil` -- optional Dates, beginning and end of + certificate validity period. If given + `lifetime` will be ignored + - `serial` -- optional Buffer, the serial number of the certificate + +### `Certificate#subjects` + +Array of `Identity` instances describing the subject of this certificate. + +### `Certificate#issuer` + +The `Identity` of the Certificate's issuer (signer). + +### `Certificate#subjectKey` + +The public key of the subject of the certificate, as a `Key` instance. + +### `Certificate#issuerKey` + +The public key of the signing issuer of this certificate, as a `Key` instance. +May be `undefined` if the issuer's key is unknown (e.g. on an X509 certificate). + +### `Certificate#serial` + +The serial number of the certificate. As this is normally a 64-bit or wider +integer, it is returned as a Buffer. + +### `Certificate#isExpired([when])` + +Tests whether the Certificate is currently expired (i.e. the `validFrom` and +`validUntil` dates specify a range of time that does not include the current +time). + +Parameters + + - `when` -- optional Date, if specified, tests whether the Certificate was or + will be expired at the specified time instead of now + +Returns a Boolean. + +### `Certificate#isSignedByKey(key)` + +Tests whether the Certificate was validly signed by the given (public) Key. + +Parameters + + - `key` -- a Key instance + +Returns a Boolean. + +### `Certificate#isSignedBy(certificate)` + +Tests whether this Certificate was validly signed by the subject of the given +certificate. Also tests that the issuer Identity of this Certificate and the +subject Identity of the other Certificate are equivalent. + +Parameters + + - `certificate` -- another Certificate instance + +Returns a Boolean. + +### `Certificate#fingerprint([hashAlgo])` + +Returns the X509-style fingerprint of the entire certificate (as a Fingerprint +instance). This matches what a web-browser or similar would display as the +certificate fingerprint and should not be confused with the fingerprint of the +subject's public key. + +Parameters + + - `hashAlgo` -- an optional String, any hash function name + +### `Certificate#toBuffer([format])` + +Serializes the Certificate to a Buffer and returns it. + +Parameters + + - `format` -- an optional String, output format, one of `'openssh'`, `'pem'` or + `'x509'`. Defaults to `'x509'`. + +Returns a Buffer. + +### `Certificate#toString([format])` + + - `format` -- an optional String, output format, one of `'openssh'`, `'pem'` or + `'x509'`. Defaults to `'pem'`. + +Returns a String. + +## Certificate identities + +### `identityForHost(hostname)` + +Constructs a host-type Identity for a given hostname. + +Parameters + + - `hostname` -- the fully qualified DNS name of the host + +Returns an Identity instance. + +### `identityForUser(uid)` + +Constructs a user-type Identity for a given UID. + +Parameters + + - `uid` -- a String, user identifier (login name) + +Returns an Identity instance. + +### `identityForEmail(email)` + +Constructs an email-type Identity for a given email address. + +Parameters + + - `email` -- a String, email address + +Returns an Identity instance. + +### `identityFromDN(dn)` + +Parses an LDAP-style DN string (e.g. `'CN=foo, C=US'`) and turns it into an +Identity instance. + +Parameters + + - `dn` -- a String + +Returns an Identity instance. + +### `Identity#toString()` + +Returns the identity as an LDAP-style DN string. +e.g. `'CN=foo, O=bar corp, C=us'` + +### `Identity#type` + +The type of identity. One of `'host'`, `'user'`, `'email'` or `'unknown'` + +### `Identity#hostname` +### `Identity#uid` +### `Identity#email` + +Set when `type` is `'host'`, `'user'`, or `'email'`, respectively. Strings. + +### `Identity#cn` + +The value of the first `CN=` in the DN, if any. + +Errors +------ + +### `InvalidAlgorithmError` + +The specified algorithm is not valid, either because it is not supported, or +because it was not included on a list of allowed algorithms. + +Thrown by `Fingerprint.parse`, `Key#fingerprint`. + +Properties + +- `algorithm` -- the algorithm that could not be validated + +### `FingerprintFormatError` + +The fingerprint string given could not be parsed as a supported fingerprint +format, or the specified fingerprint format is invalid. + +Thrown by `Fingerprint.parse`, `Fingerprint#toString`. + +Properties + +- `fingerprint` -- if caused by a fingerprint, the string value given +- `format` -- if caused by an invalid format specification, the string value given + +### `KeyParseError` + +The key data given could not be parsed as a valid key. + +Properties + +- `keyName` -- `filename` that was given to `parseKey` +- `format` -- the `format` that was trying to parse the key (see `parseKey`) +- `innerErr` -- the inner Error thrown by the format parser + +### `KeyEncryptedError` + +The key is encrypted with a symmetric key (ie, it is password protected). The +parsing operation would succeed if it was given the `passphrase` option. + +Properties + +- `keyName` -- `filename` that was given to `parseKey` +- `format` -- the `format` that was trying to parse the key (currently can only + be `"pem"`) + +### `CertificateParseError` + +The certificate data given could not be parsed as a valid certificate. + +Properties + +- `certName` -- `filename` that was given to `parseCertificate` +- `format` -- the `format` that was trying to parse the key + (see `parseCertificate`) +- `innerErr` -- the inner Error thrown by the format parser + +Friends of sshpk +---------------- + + * [`sshpk-agent`](https://github.com/arekinath/node-sshpk-agent) is a library + for speaking the `ssh-agent` protocol from node.js, which uses `sshpk` diff --git a/node_modules/sshpk/bin/sshpk-conv b/node_modules/sshpk/bin/sshpk-conv new file mode 100755 index 0000000..444045a --- /dev/null +++ b/node_modules/sshpk/bin/sshpk-conv @@ -0,0 +1,201 @@ +#!/usr/bin/env node +// -*- mode: js -*- +// vim: set filetype=javascript : +// Copyright 2015 Joyent, Inc. All rights reserved. + +var dashdash = require('dashdash'); +var sshpk = require('../lib/index'); +var fs = require('fs'); +var path = require('path'); +var tty = require('tty'); +var readline = require('readline'); +var getPassword = require('getpass').getPass; + +var options = [ + { + names: ['outformat', 't'], + type: 'string', + help: 'Output format' + }, + { + names: ['informat', 'T'], + type: 'string', + help: 'Input format' + }, + { + names: ['file', 'f'], + type: 'string', + help: 'Input file name (default stdin)' + }, + { + names: ['out', 'o'], + type: 'string', + help: 'Output file name (default stdout)' + }, + { + names: ['private', 'p'], + type: 'bool', + help: 'Produce a private key as output' + }, + { + names: ['derive', 'd'], + type: 'string', + help: 'Output a new key derived from this one, with given algo' + }, + { + names: ['identify', 'i'], + type: 'bool', + help: 'Print key metadata instead of converting' + }, + { + names: ['comment', 'c'], + type: 'string', + help: 'Set key comment, if output format supports' + }, + { + names: ['help', 'h'], + type: 'bool', + help: 'Shows this help text' + } +]; + +if (require.main === module) { + var parser = dashdash.createParser({ + options: options + }); + + try { + var opts = parser.parse(process.argv); + } catch (e) { + console.error('sshpk-conv: error: %s', e.message); + process.exit(1); + } + + if (opts.help || opts._args.length > 1) { + var help = parser.help({}).trimRight(); + console.error('sshpk-conv: converts between SSH key formats\n'); + console.error(help); + console.error('\navailable formats:'); + console.error(' - pem, pkcs1 eg id_rsa'); + console.error(' - ssh eg id_rsa.pub'); + console.error(' - pkcs8 format you want for openssl'); + console.error(' - openssh like output of ssh-keygen -o'); + console.error(' - rfc4253 raw OpenSSH wire format'); + process.exit(1); + } + + /* + * Key derivation can only be done on private keys, so use of the -d + * option necessarily implies -p. + */ + if (opts.derive) + opts.private = true; + + var inFile = process.stdin; + var inFileName = 'stdin'; + + var inFilePath; + if (opts.file) { + inFilePath = opts.file; + } else if (opts._args.length === 1) { + inFilePath = opts._args[0]; + } + + if (inFilePath) + inFileName = path.basename(inFilePath); + + try { + if (inFilePath) { + fs.accessSync(inFilePath, fs.R_OK); + inFile = fs.createReadStream(inFilePath); + } + } catch (e) { + console.error('sshpk-conv: error opening input file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + var outFile = process.stdout; + + try { + if (opts.out && !opts.identify) { + fs.accessSync(path.dirname(opts.out), fs.W_OK); + outFile = fs.createWriteStream(opts.out); + } + } catch (e) { + console.error('sshpk-conv: error opening output file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + var bufs = []; + inFile.on('readable', function () { + var data; + while ((data = inFile.read())) + bufs.push(data); + }); + var parseOpts = {}; + parseOpts.filename = inFileName; + inFile.on('end', function processKey() { + var buf = Buffer.concat(bufs); + var fmt = 'auto'; + if (opts.informat) + fmt = opts.informat; + var f = sshpk.parseKey; + if (opts.private) + f = sshpk.parsePrivateKey; + try { + var key = f(buf, fmt, parseOpts); + } catch (e) { + if (e.name === 'KeyEncryptedError') { + getPassword(function (err, pw) { + if (err) { + console.log('sshpk-conv: ' + + err.name + ': ' + + err.message); + process.exit(1); + } + parseOpts.passphrase = pw; + processKey(); + }); + return; + } + console.error('sshpk-conv: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + if (opts.derive) + key = key.derive(opts.derive); + + if (opts.comment) + key.comment = opts.comment; + + if (!opts.identify) { + fmt = undefined; + if (opts.outformat) + fmt = opts.outformat; + outFile.write(key.toBuffer(fmt)); + if (fmt === 'ssh' || + (!opts.private && fmt === undefined)) + outFile.write('\n'); + outFile.once('drain', function () { + process.exit(0); + }); + } else { + var kind = 'public'; + if (sshpk.PrivateKey.isPrivateKey(key)) + kind = 'private'; + console.log('%s: a %d bit %s %s key', inFileName, + key.size, key.type.toUpperCase(), kind); + if (key.type === 'ecdsa') + console.log('ECDSA curve: %s', key.curve); + if (key.comment) + console.log('Comment: %s', key.comment); + console.log('Fingerprint:'); + console.log(' ' + key.fingerprint().toString()); + console.log(' ' + key.fingerprint('md5').toString()); + process.exit(0); + } + }); +} diff --git a/node_modules/sshpk/bin/sshpk-sign b/node_modules/sshpk/bin/sshpk-sign new file mode 100755 index 0000000..673fc98 --- /dev/null +++ b/node_modules/sshpk/bin/sshpk-sign @@ -0,0 +1,191 @@ +#!/usr/bin/env node +// -*- mode: js -*- +// vim: set filetype=javascript : +// Copyright 2015 Joyent, Inc. All rights reserved. + +var dashdash = require('dashdash'); +var sshpk = require('../lib/index'); +var fs = require('fs'); +var path = require('path'); +var getPassword = require('getpass').getPass; + +var options = [ + { + names: ['hash', 'H'], + type: 'string', + help: 'Hash algorithm (sha1, sha256, sha384, sha512)' + }, + { + names: ['verbose', 'v'], + type: 'bool', + help: 'Display verbose info about key and hash used' + }, + { + names: ['identity', 'i'], + type: 'string', + help: 'Path to key to use' + }, + { + names: ['file', 'f'], + type: 'string', + help: 'Input filename' + }, + { + names: ['out', 'o'], + type: 'string', + help: 'Output filename' + }, + { + names: ['format', 't'], + type: 'string', + help: 'Signature format (asn1, ssh, raw)' + }, + { + names: ['binary', 'b'], + type: 'bool', + help: 'Output raw binary instead of base64' + }, + { + names: ['help', 'h'], + type: 'bool', + help: 'Shows this help text' + } +]; + +var parseOpts = {}; + +if (require.main === module) { + var parser = dashdash.createParser({ + options: options + }); + + try { + var opts = parser.parse(process.argv); + } catch (e) { + console.error('sshpk-sign: error: %s', e.message); + process.exit(1); + } + + if (opts.help || opts._args.length > 1) { + var help = parser.help({}).trimRight(); + console.error('sshpk-sign: sign data using an SSH key\n'); + console.error(help); + process.exit(1); + } + + if (!opts.identity) { + var help = parser.help({}).trimRight(); + console.error('sshpk-sign: the -i or --identity option ' + + 'is required\n'); + console.error(help); + process.exit(1); + } + + var keyData = fs.readFileSync(opts.identity); + parseOpts.filename = opts.identity; + + run(); +} + +function run() { + var key; + try { + key = sshpk.parsePrivateKey(keyData, 'auto', parseOpts); + } catch (e) { + if (e.name === 'KeyEncryptedError') { + getPassword(function (err, pw) { + parseOpts.passphrase = pw; + run(); + }); + return; + } + console.error('sshpk-sign: error loading private key "' + + opts.identity + '": ' + e.name + ': ' + e.message); + process.exit(1); + } + + var hash = opts.hash || key.defaultHashAlgorithm(); + + var signer; + try { + signer = key.createSign(hash); + } catch (e) { + console.error('sshpk-sign: error creating signer: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + if (opts.verbose) { + console.error('sshpk-sign: using %s-%s with a %d bit key', + key.type, hash, key.size); + } + + var inFile = process.stdin; + var inFileName = 'stdin'; + + var inFilePath; + if (opts.file) { + inFilePath = opts.file; + } else if (opts._args.length === 1) { + inFilePath = opts._args[0]; + } + + if (inFilePath) + inFileName = path.basename(inFilePath); + + try { + if (inFilePath) { + fs.accessSync(inFilePath, fs.R_OK); + inFile = fs.createReadStream(inFilePath); + } + } catch (e) { + console.error('sshpk-sign: error opening input file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + var outFile = process.stdout; + + try { + if (opts.out && !opts.identify) { + fs.accessSync(path.dirname(opts.out), fs.W_OK); + outFile = fs.createWriteStream(opts.out); + } + } catch (e) { + console.error('sshpk-sign: error opening output file' + + ': ' + e.name + ': ' + e.message); + process.exit(1); + } + + inFile.pipe(signer); + inFile.on('end', function () { + var sig; + try { + sig = signer.sign(); + } catch (e) { + console.error('sshpk-sign: error signing data: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + var fmt = opts.format || 'asn1'; + var output; + try { + output = sig.toBuffer(fmt); + if (!opts.binary) + output = output.toString('base64'); + } catch (e) { + console.error('sshpk-sign: error converting signature' + + ' to ' + fmt + ' format: ' + e.name + ': ' + + e.message); + process.exit(1); + } + + outFile.write(output); + if (!opts.binary) + outFile.write('\n'); + outFile.once('drain', function () { + process.exit(0); + }); + }); +} diff --git a/node_modules/sshpk/bin/sshpk-verify b/node_modules/sshpk/bin/sshpk-verify new file mode 100755 index 0000000..a1669f4 --- /dev/null +++ b/node_modules/sshpk/bin/sshpk-verify @@ -0,0 +1,166 @@ +#!/usr/bin/env node +// -*- mode: js -*- +// vim: set filetype=javascript : +// Copyright 2015 Joyent, Inc. All rights reserved. + +var dashdash = require('dashdash'); +var sshpk = require('../lib/index'); +var fs = require('fs'); +var path = require('path'); + +var options = [ + { + names: ['hash', 'H'], + type: 'string', + help: 'Hash algorithm (sha1, sha256, sha384, sha512)' + }, + { + names: ['verbose', 'v'], + type: 'bool', + help: 'Display verbose info about key and hash used' + }, + { + names: ['identity', 'i'], + type: 'string', + help: 'Path to (public) key to use' + }, + { + names: ['file', 'f'], + type: 'string', + help: 'Input filename' + }, + { + names: ['format', 't'], + type: 'string', + help: 'Signature format (asn1, ssh, raw)' + }, + { + names: ['signature', 's'], + type: 'string', + help: 'base64-encoded signature data' + }, + { + names: ['help', 'h'], + type: 'bool', + help: 'Shows this help text' + } +]; + +if (require.main === module) { + var parser = dashdash.createParser({ + options: options + }); + + try { + var opts = parser.parse(process.argv); + } catch (e) { + console.error('sshpk-verify: error: %s', e.message); + process.exit(3); + } + + if (opts.help || opts._args.length > 1) { + var help = parser.help({}).trimRight(); + console.error('sshpk-verify: sign data using an SSH key\n'); + console.error(help); + process.exit(3); + } + + if (!opts.identity) { + var help = parser.help({}).trimRight(); + console.error('sshpk-verify: the -i or --identity option ' + + 'is required\n'); + console.error(help); + process.exit(3); + } + + if (!opts.signature) { + var help = parser.help({}).trimRight(); + console.error('sshpk-verify: the -s or --signature option ' + + 'is required\n'); + console.error(help); + process.exit(3); + } + + var keyData = fs.readFileSync(opts.identity); + + var key; + try { + key = sshpk.parseKey(keyData); + } catch (e) { + console.error('sshpk-verify: error loading key "' + + opts.identity + '": ' + e.name + ': ' + e.message); + process.exit(2); + } + + var fmt = opts.format || 'asn1'; + var sigData = new Buffer(opts.signature, 'base64'); + + var sig; + try { + sig = sshpk.parseSignature(sigData, key.type, fmt); + } catch (e) { + console.error('sshpk-verify: error parsing signature: ' + + e.name + ': ' + e.message); + process.exit(2); + } + + var hash = opts.hash || key.defaultHashAlgorithm(); + + var verifier; + try { + verifier = key.createVerify(hash); + } catch (e) { + console.error('sshpk-verify: error creating verifier: ' + + e.name + ': ' + e.message); + process.exit(2); + } + + if (opts.verbose) { + console.error('sshpk-verify: using %s-%s with a %d bit key', + key.type, hash, key.size); + } + + var inFile = process.stdin; + var inFileName = 'stdin'; + + var inFilePath; + if (opts.file) { + inFilePath = opts.file; + } else if (opts._args.length === 1) { + inFilePath = opts._args[0]; + } + + if (inFilePath) + inFileName = path.basename(inFilePath); + + try { + if (inFilePath) { + fs.accessSync(inFilePath, fs.R_OK); + inFile = fs.createReadStream(inFilePath); + } + } catch (e) { + console.error('sshpk-verify: error opening input file' + + ': ' + e.name + ': ' + e.message); + process.exit(2); + } + + inFile.pipe(verifier); + inFile.on('end', function () { + var ret; + try { + ret = verifier.verify(sig); + } catch (e) { + console.error('sshpk-verify: error verifying data: ' + + e.name + ': ' + e.message); + process.exit(1); + } + + if (ret) { + console.error('OK'); + process.exit(0); + } + + console.error('NOT OK'); + process.exit(1); + }); +} diff --git a/node_modules/sshpk/lib/algs.js b/node_modules/sshpk/lib/algs.js new file mode 100644 index 0000000..f30af56 --- /dev/null +++ b/node_modules/sshpk/lib/algs.js @@ -0,0 +1,168 @@ +// Copyright 2015 Joyent, Inc. + +var algInfo = { + 'dsa': { + parts: ['p', 'q', 'g', 'y'], + sizePart: 'p' + }, + 'rsa': { + parts: ['e', 'n'], + sizePart: 'n' + }, + 'ecdsa': { + parts: ['curve', 'Q'], + sizePart: 'Q' + }, + 'ed25519': { + parts: ['R'], + normalize: false, + sizePart: 'R' + } +}; +algInfo['curve25519'] = algInfo['ed25519']; + +var algPrivInfo = { + 'dsa': { + parts: ['p', 'q', 'g', 'y', 'x'] + }, + 'rsa': { + parts: ['n', 'e', 'd', 'iqmp', 'p', 'q'] + }, + 'ecdsa': { + parts: ['curve', 'Q', 'd'] + }, + 'ed25519': { + parts: ['R', 'r'], + normalize: false + } +}; +algPrivInfo['curve25519'] = algPrivInfo['ed25519']; + +var hashAlgs = { + 'md5': true, + 'sha1': true, + 'sha256': true, + 'sha384': true, + 'sha512': true +}; + +/* + * Taken from + * http://csrc.nist.gov/groups/ST/toolkit/documents/dss/NISTReCur.pdf + */ +var curves = { + 'nistp256': { + size: 256, + pkcs8oid: '1.2.840.10045.3.1.7', + p: new Buffer(('00' + + 'ffffffff 00000001 00000000 00000000' + + '00000000 ffffffff ffffffff ffffffff'). + replace(/ /g, ''), 'hex'), + a: new Buffer(('00' + + 'FFFFFFFF 00000001 00000000 00000000' + + '00000000 FFFFFFFF FFFFFFFF FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: new Buffer(( + '5ac635d8 aa3a93e7 b3ebbd55 769886bc' + + '651d06b0 cc53b0f6 3bce3c3e 27d2604b'). + replace(/ /g, ''), 'hex'), + s: new Buffer(('00' + + 'c49d3608 86e70493 6a6678e1 139d26b7' + + '819f7e90'). + replace(/ /g, ''), 'hex'), + n: new Buffer(('00' + + 'ffffffff 00000000 ffffffff ffffffff' + + 'bce6faad a7179e84 f3b9cac2 fc632551'). + replace(/ /g, ''), 'hex'), + G: new Buffer(('04' + + '6b17d1f2 e12c4247 f8bce6e5 63a440f2' + + '77037d81 2deb33a0 f4a13945 d898c296' + + '4fe342e2 fe1a7f9b 8ee7eb4a 7c0f9e16' + + '2bce3357 6b315ece cbb64068 37bf51f5'). + replace(/ /g, ''), 'hex') + }, + 'nistp384': { + size: 384, + pkcs8oid: '1.3.132.0.34', + p: new Buffer(('00' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff fffffffe' + + 'ffffffff 00000000 00000000 ffffffff'). + replace(/ /g, ''), 'hex'), + a: new Buffer(('00' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFE' + + 'FFFFFFFF 00000000 00000000 FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: new Buffer(( + 'b3312fa7 e23ee7e4 988e056b e3f82d19' + + '181d9c6e fe814112 0314088f 5013875a' + + 'c656398d 8a2ed19d 2a85c8ed d3ec2aef'). + replace(/ /g, ''), 'hex'), + s: new Buffer(('00' + + 'a335926a a319a27a 1d00896a 6773a482' + + '7acdac73'). + replace(/ /g, ''), 'hex'), + n: new Buffer(('00' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff c7634d81 f4372ddf' + + '581a0db2 48b0a77a ecec196a ccc52973'). + replace(/ /g, ''), 'hex'), + G: new Buffer(('04' + + 'aa87ca22 be8b0537 8eb1c71e f320ad74' + + '6e1d3b62 8ba79b98 59f741e0 82542a38' + + '5502f25d bf55296c 3a545e38 72760ab7' + + '3617de4a 96262c6f 5d9e98bf 9292dc29' + + 'f8f41dbd 289a147c e9da3113 b5f0b8c0' + + '0a60b1ce 1d7e819d 7a431d7c 90ea0e5f'). + replace(/ /g, ''), 'hex') + }, + 'nistp521': { + size: 521, + pkcs8oid: '1.3.132.0.35', + p: new Buffer(( + '01ffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffff').replace(/ /g, ''), 'hex'), + a: new Buffer(('01FF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFF' + + 'FFFFFFFF FFFFFFFF FFFFFFFF FFFFFFFC'). + replace(/ /g, ''), 'hex'), + b: new Buffer(('51' + + '953eb961 8e1c9a1f 929a21a0 b68540ee' + + 'a2da725b 99b315f3 b8b48991 8ef109e1' + + '56193951 ec7e937b 1652c0bd 3bb1bf07' + + '3573df88 3d2c34f1 ef451fd4 6b503f00'). + replace(/ /g, ''), 'hex'), + s: new Buffer(('00' + + 'd09e8800 291cb853 96cc6717 393284aa' + + 'a0da64ba').replace(/ /g, ''), 'hex'), + n: new Buffer(('01ff' + + 'ffffffff ffffffff ffffffff ffffffff' + + 'ffffffff ffffffff ffffffff fffffffa' + + '51868783 bf2f966b 7fcc0148 f709a5d0' + + '3bb5c9b8 899c47ae bb6fb71e 91386409'). + replace(/ /g, ''), 'hex'), + G: new Buffer(('04' + + '00c6 858e06b7 0404e9cd 9e3ecb66 2395b442' + + '9c648139 053fb521 f828af60 6b4d3dba' + + 'a14b5e77 efe75928 fe1dc127 a2ffa8de' + + '3348b3c1 856a429b f97e7e31 c2e5bd66' + + '0118 39296a78 9a3bc004 5c8a5fb4 2c7d1bd9' + + '98f54449 579b4468 17afbd17 273e662c' + + '97ee7299 5ef42640 c550b901 3fad0761' + + '353c7086 a272c240 88be9476 9fd16650'). + replace(/ /g, ''), 'hex') + } +}; + +module.exports = { + info: algInfo, + privInfo: algPrivInfo, + hashAlgs: hashAlgs, + curves: curves +}; diff --git a/node_modules/sshpk/lib/certificate.js b/node_modules/sshpk/lib/certificate.js new file mode 100644 index 0000000..4fbe6ab --- /dev/null +++ b/node_modules/sshpk/lib/certificate.js @@ -0,0 +1,291 @@ +// Copyright 2016 Joyent, Inc. + +module.exports = Certificate; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var errs = require('./errors'); +var util = require('util'); +var utils = require('./utils'); +var Key = require('./key'); +var PrivateKey = require('./private-key'); +var Identity = require('./identity'); + +var formats = {}; +formats['openssh'] = require('./formats/openssh-cert'); +formats['x509'] = require('./formats/x509'); +formats['pem'] = require('./formats/x509-pem'); + +var CertificateParseError = errs.CertificateParseError; +var InvalidAlgorithmError = errs.InvalidAlgorithmError; + +function Certificate(opts) { + assert.object(opts, 'options'); + assert.arrayOfObject(opts.subjects, 'options.subjects'); + utils.assertCompatible(opts.subjects[0], Identity, [1, 0], + 'options.subjects'); + utils.assertCompatible(opts.subjectKey, Key, [1, 0], + 'options.subjectKey'); + utils.assertCompatible(opts.issuer, Identity, [1, 0], 'options.issuer'); + if (opts.issuerKey !== undefined) { + utils.assertCompatible(opts.issuerKey, Key, [1, 0], + 'options.issuerKey'); + } + assert.object(opts.signatures, 'options.signatures'); + assert.buffer(opts.serial, 'options.serial'); + assert.date(opts.validFrom, 'options.validFrom'); + assert.date(opts.validUntil, 'optons.validUntil'); + + this._hashCache = {}; + + this.subjects = opts.subjects; + this.issuer = opts.issuer; + this.subjectKey = opts.subjectKey; + this.issuerKey = opts.issuerKey; + this.signatures = opts.signatures; + this.serial = opts.serial; + this.validFrom = opts.validFrom; + this.validUntil = opts.validUntil; +} + +Certificate.formats = formats; + +Certificate.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'x509'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + return (formats[format].write(this, options)); +}; + +Certificate.prototype.toString = function (format, options) { + if (format === undefined) + format = 'pem'; + return (this.toBuffer(format, options).toString()); +}; + +Certificate.prototype.fingerprint = function (algo) { + if (algo === undefined) + algo = 'sha256'; + assert.string(algo, 'algorithm'); + var opts = { + type: 'certificate', + hash: this.hash(algo), + algorithm: algo + }; + return (new Fingerprint(opts)); +}; + +Certificate.prototype.hash = function (algo) { + assert.string(algo, 'algorithm'); + algo = algo.toLowerCase(); + if (algs.hashAlgs[algo] === undefined) + throw (new InvalidAlgorithmError(algo)); + + if (this._hashCache[algo]) + return (this._hashCache[algo]); + + var hash = crypto.createHash(algo). + update(this.toBuffer('x509')).digest(); + this._hashCache[algo] = hash; + return (hash); +}; + +Certificate.prototype.isExpired = function (when) { + if (when === undefined) + when = new Date(); + return (!((when.getTime() >= this.validFrom.getTime()) && + (when.getTime() < this.validUntil.getTime()))); +}; + +Certificate.prototype.isSignedBy = function (issuerCert) { + utils.assertCompatible(issuerCert, Certificate, [1, 0], 'issuer'); + + if (!this.issuer.equals(issuerCert.subjects[0])) + return (false); + + return (this.isSignedByKey(issuerCert.subjectKey)); +}; + +Certificate.prototype.isSignedByKey = function (issuerKey) { + utils.assertCompatible(issuerKey, Key, [1, 2], 'issuerKey'); + + if (this.issuerKey !== undefined) { + return (this.issuerKey. + fingerprint('sha512').matches(issuerKey)); + } + + var fmt = Object.keys(this.signatures)[0]; + var valid = formats[fmt].verify(this, issuerKey); + if (valid) + this.issuerKey = issuerKey; + return (valid); +}; + +Certificate.prototype.signWith = function (key) { + utils.assertCompatible(key, PrivateKey, [1, 2], 'key'); + var fmts = Object.keys(formats); + var didOne = false; + for (var i = 0; i < fmts.length; ++i) { + if (fmts[i] !== 'pem') { + var ret = formats[fmts[i]].sign(this, key); + if (ret === true) + didOne = true; + } + } + if (!didOne) { + throw (new Error('Failed to sign the certificate for any ' + + 'available certificate formats')); + } +}; + +Certificate.createSelfSigned = function (subjectOrSubjects, key, options) { + var subjects; + if (Array.isArray(subjectOrSubjects)) + subjects = subjectOrSubjects; + else + subjects = [subjectOrSubjects]; + + assert.arrayOfObject(subjects); + subjects.forEach(function (subject) { + utils.assertCompatible(subject, Identity, [1, 0], 'subject'); + }); + + utils.assertCompatible(key, PrivateKey, [1, 2], 'private key'); + + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalObject(options.validFrom, 'options.validFrom'); + assert.optionalObject(options.validUntil, 'options.validUntil'); + var validFrom = options.validFrom; + var validUntil = options.validUntil; + if (validFrom === undefined) + validFrom = new Date(); + if (validUntil === undefined) { + assert.optionalNumber(options.lifetime, 'options.lifetime'); + var lifetime = options.lifetime; + if (lifetime === undefined) + lifetime = 10*365*24*3600; + validUntil = new Date(); + validUntil.setTime(validUntil.getTime() + lifetime*1000); + } + assert.optionalBuffer(options.serial, 'options.serial'); + var serial = options.serial; + if (serial === undefined) + serial = new Buffer('0000000000000001', 'hex'); + + var cert = new Certificate({ + subjects: subjects, + issuer: subjects[0], + subjectKey: key.toPublic(), + issuerKey: key.toPublic(), + signatures: {}, + serial: serial, + validFrom: validFrom, + validUntil: validUntil + }); + cert.signWith(key); + + return (cert); +}; + +Certificate.create = + function (subjectOrSubjects, key, issuer, issuerKey, options) { + var subjects; + if (Array.isArray(subjectOrSubjects)) + subjects = subjectOrSubjects; + else + subjects = [subjectOrSubjects]; + + assert.arrayOfObject(subjects); + subjects.forEach(function (subject) { + utils.assertCompatible(subject, Identity, [1, 0], 'subject'); + }); + + utils.assertCompatible(key, Key, [1, 0], 'key'); + if (PrivateKey.isPrivateKey(key)) + key = key.toPublic(); + utils.assertCompatible(issuer, Identity, [1, 0], 'issuer'); + utils.assertCompatible(issuerKey, PrivateKey, [1, 2], 'issuer key'); + + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalObject(options.validFrom, 'options.validFrom'); + assert.optionalObject(options.validUntil, 'options.validUntil'); + var validFrom = options.validFrom; + var validUntil = options.validUntil; + if (validFrom === undefined) + validFrom = new Date(); + if (validUntil === undefined) { + assert.optionalNumber(options.lifetime, 'options.lifetime'); + var lifetime = options.lifetime; + if (lifetime === undefined) + lifetime = 10*365*24*3600; + validUntil = new Date(); + validUntil.setTime(validUntil.getTime() + lifetime*1000); + } + assert.optionalBuffer(options.serial, 'options.serial'); + var serial = options.serial; + if (serial === undefined) + serial = new Buffer('0000000000000001', 'hex'); + + var cert = new Certificate({ + subjects: subjects, + issuer: issuer, + subjectKey: key, + issuerKey: issuerKey.toPublic(), + signatures: {}, + serial: serial, + validFrom: validFrom, + validUntil: validUntil + }); + cert.signWith(issuerKey); + + return (cert); +}; + +Certificate.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + return (k); + } catch (e) { + throw (new CertificateParseError(options.filename, format, e)); + } +}; + +Certificate.isCertificate = function (obj, ver) { + return (utils.isCompatible(obj, Certificate, ver)); +}; + +/* + * API versions for Certificate: + * [1,0] -- initial ver + */ +Certificate.prototype._sshpkApiVersion = [1, 0]; + +Certificate._oldVersionDetect = function (obj) { + return ([1, 0]); +}; diff --git a/node_modules/sshpk/lib/dhe.js b/node_modules/sshpk/lib/dhe.js new file mode 100644 index 0000000..8f9548c --- /dev/null +++ b/node_modules/sshpk/lib/dhe.js @@ -0,0 +1,311 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = DiffieHellman; + +var assert = require('assert-plus'); +var crypto = require('crypto'); +var algs = require('./algs'); +var utils = require('./utils'); +var ed; + +var Key = require('./key'); +var PrivateKey = require('./private-key'); + +var CRYPTO_HAVE_ECDH = (crypto.createECDH !== undefined); + +var ecdh, ec, jsbn; + +function DiffieHellman(key) { + utils.assertCompatible(key, Key, [1, 4], 'key'); + this._isPriv = PrivateKey.isPrivateKey(key, [1, 3]); + this._algo = key.type; + this._curve = key.curve; + this._key = key; + if (key.type === 'dsa') { + if (!CRYPTO_HAVE_ECDH) { + throw (new Error('Due to bugs in the node 0.10 ' + + 'crypto API, node 0.12.x or later is required ' + + 'to use DH')); + } + this._dh = crypto.createDiffieHellman( + key.part.p.data, undefined, + key.part.g.data, undefined); + this._p = key.part.p; + this._g = key.part.g; + if (this._isPriv) + this._dh.setPrivateKey(key.part.x.data); + this._dh.setPublicKey(key.part.y.data); + + } else if (key.type === 'ecdsa') { + if (!CRYPTO_HAVE_ECDH) { + if (ecdh === undefined) + ecdh = require('ecc-jsbn'); + if (ec === undefined) + ec = require('ecc-jsbn/lib/ec'); + if (jsbn === undefined) + jsbn = require('jsbn').BigInteger; + + this._ecParams = new X9ECParameters(this._curve); + + if (this._isPriv) { + this._priv = new ECPrivate( + this._ecParams, key.part.d.data); + } + return; + } + + var curve = { + 'nistp256': 'prime256v1', + 'nistp384': 'secp384r1', + 'nistp521': 'secp521r1' + }[key.curve]; + this._dh = crypto.createECDH(curve); + if (typeof (this._dh) !== 'object' || + typeof (this._dh.setPrivateKey) !== 'function') { + CRYPTO_HAVE_ECDH = false; + DiffieHellman.call(this, key); + return; + } + if (this._isPriv) + this._dh.setPrivateKey(key.part.d.data); + this._dh.setPublicKey(key.part.Q.data); + + } else if (key.type === 'curve25519') { + if (ed === undefined) + ed = require('jodid25519'); + + if (this._isPriv) { + this._priv = key.part.r.data; + if (this._priv[0] === 0x00) + this._priv = this._priv.slice(1); + this._priv = this._priv.slice(0, 32); + } + + } else { + throw (new Error('DH not supported for ' + key.type + ' keys')); + } +} + +DiffieHellman.prototype.getPublicKey = function () { + if (this._isPriv) + return (this._key.toPublic()); + return (this._key); +}; + +DiffieHellman.prototype.getPrivateKey = function () { + if (this._isPriv) + return (this._key); + else + return (undefined); +}; +DiffieHellman.prototype.getKey = DiffieHellman.prototype.getPrivateKey; + +DiffieHellman.prototype._keyCheck = function (pk, isPub) { + assert.object(pk, 'key'); + if (!isPub) + utils.assertCompatible(pk, PrivateKey, [1, 3], 'key'); + utils.assertCompatible(pk, Key, [1, 4], 'key'); + + if (pk.type !== this._algo) { + throw (new Error('A ' + pk.type + ' key cannot be used in ' + + this._algo + ' Diffie-Hellman')); + } + + if (pk.curve !== this._curve) { + throw (new Error('A key from the ' + pk.curve + ' curve ' + + 'cannot be used with a ' + this._curve + + ' Diffie-Hellman')); + } + + if (pk.type === 'dsa') { + assert.deepEqual(pk.part.p, this._p, + 'DSA key prime does not match'); + assert.deepEqual(pk.part.g, this._g, + 'DSA key generator does not match'); + } +}; + +DiffieHellman.prototype.setKey = function (pk) { + this._keyCheck(pk); + + if (pk.type === 'dsa') { + this._dh.setPrivateKey(pk.part.x.data); + this._dh.setPublicKey(pk.part.y.data); + + } else if (pk.type === 'ecdsa') { + if (CRYPTO_HAVE_ECDH) { + this._dh.setPrivateKey(pk.part.d.data); + this._dh.setPublicKey(pk.part.Q.data); + } else { + this._priv = new ECPrivate( + this._ecParams, pk.part.d.data); + } + + } else if (pk.type === 'curve25519') { + this._priv = pk.part.r.data; + if (this._priv[0] === 0x00) + this._priv = this._priv.slice(1); + this._priv = this._priv.slice(0, 32); + } + this._key = pk; + this._isPriv = true; +}; +DiffieHellman.prototype.setPrivateKey = DiffieHellman.prototype.setKey; + +DiffieHellman.prototype.computeSecret = function (otherpk) { + this._keyCheck(otherpk, true); + if (!this._isPriv) + throw (new Error('DH exchange has not been initialized with ' + + 'a private key yet')); + + var pub; + if (this._algo === 'dsa') { + return (this._dh.computeSecret( + otherpk.part.y.data)); + + } else if (this._algo === 'ecdsa') { + if (CRYPTO_HAVE_ECDH) { + return (this._dh.computeSecret( + otherpk.part.Q.data)); + } else { + pub = new ECPublic( + this._ecParams, otherpk.part.Q.data); + return (this._priv.deriveSharedSecret(pub)); + } + + } else if (this._algo === 'curve25519') { + pub = otherpk.part.R.data; + if (pub[0] === 0x00) + pub = pub.slice(1); + + var secret = ed.dh.computeKey( + this._priv.toString('binary'), + pub.toString('binary')); + + return (new Buffer(secret, 'binary')); + } + + throw (new Error('Invalid algorithm: ' + this._algo)); +}; + +DiffieHellman.prototype.generateKey = function () { + var parts = []; + var priv, pub; + if (this._algo === 'dsa') { + this._dh.generateKeys(); + + parts.push({name: 'p', data: this._p.data}); + parts.push({name: 'q', data: this._key.part.q.data}); + parts.push({name: 'g', data: this._g.data}); + parts.push({name: 'y', data: this._dh.getPublicKey()}); + parts.push({name: 'x', data: this._dh.getPrivateKey()}); + this._key = new PrivateKey({ + type: 'dsa', + parts: parts + }); + this._isPriv = true; + return (this._key); + + } else if (this._algo === 'ecdsa') { + if (CRYPTO_HAVE_ECDH) { + this._dh.generateKeys(); + + parts.push({name: 'curve', + data: new Buffer(this._curve)}); + parts.push({name: 'Q', data: this._dh.getPublicKey()}); + parts.push({name: 'd', data: this._dh.getPrivateKey()}); + this._key = new PrivateKey({ + type: 'ecdsa', + curve: this._curve, + parts: parts + }); + this._isPriv = true; + return (this._key); + + } else { + var n = this._ecParams.getN(); + var r = new jsbn(crypto.randomBytes(n.bitLength())); + var n1 = n.subtract(jsbn.ONE); + priv = r.mod(n1).add(jsbn.ONE); + pub = this._ecParams.getG().multiply(priv); + + priv = new Buffer(priv.toByteArray()); + pub = new Buffer(this._ecParams.getCurve(). + encodePointHex(pub), 'hex'); + + this._priv = new ECPrivate(this._ecParams, priv); + + parts.push({name: 'curve', + data: new Buffer(this._curve)}); + parts.push({name: 'Q', data: pub}); + parts.push({name: 'd', data: priv}); + + this._key = new PrivateKey({ + type: 'ecdsa', + curve: this._curve, + parts: parts + }); + this._isPriv = true; + return (this._key); + } + + } else if (this._algo === 'curve25519') { + priv = ed.dh.generateKey(); + pub = ed.dh.publicKey(priv); + this._priv = priv = new Buffer(priv, 'binary'); + pub = new Buffer(pub, 'binary'); + + parts.push({name: 'R', data: pub}); + parts.push({name: 'r', data: Buffer.concat([priv, pub])}); + this._key = new PrivateKey({ + type: 'curve25519', + parts: parts + }); + this._isPriv = true; + return (this._key); + } + + throw (new Error('Invalid algorithm: ' + this._algo)); +}; +DiffieHellman.prototype.generateKeys = DiffieHellman.prototype.generateKey; + +/* These are helpers for using ecc-jsbn (for node 0.10 compatibility). */ + +function X9ECParameters(name) { + var params = algs.curves[name]; + assert.object(params); + + var p = new jsbn(params.p); + var a = new jsbn(params.a); + var b = new jsbn(params.b); + var n = new jsbn(params.n); + var h = jsbn.ONE; + var curve = new ec.ECCurveFp(p, a, b); + var G = curve.decodePointHex(params.G.toString('hex')); + + this.curve = curve; + this.g = G; + this.n = n; + this.h = h; +} +X9ECParameters.prototype.getCurve = function () { return (this.curve); }; +X9ECParameters.prototype.getG = function () { return (this.g); }; +X9ECParameters.prototype.getN = function () { return (this.n); }; +X9ECParameters.prototype.getH = function () { return (this.h); }; + +function ECPublic(params, buffer) { + this._params = params; + if (buffer[0] === 0x00) + buffer = buffer.slice(1); + this._pub = params.getCurve().decodePointHex(buffer.toString('hex')); +} + +function ECPrivate(params, buffer) { + this._params = params; + this._priv = new jsbn(utils.mpNormalize(buffer)); +} +ECPrivate.prototype.deriveSharedSecret = function (pubKey) { + assert.ok(pubKey instanceof ECPublic); + var S = pubKey._pub.multiply(this._priv); + return (new Buffer(S.getX().toBigInteger().toByteArray())); +}; diff --git a/node_modules/sshpk/lib/ed-compat.js b/node_modules/sshpk/lib/ed-compat.js new file mode 100644 index 0000000..5365fb1 --- /dev/null +++ b/node_modules/sshpk/lib/ed-compat.js @@ -0,0 +1,96 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + Verifier: Verifier, + Signer: Signer +}; + +var nacl; +var stream = require('stream'); +var util = require('util'); +var assert = require('assert-plus'); +var Signature = require('./signature'); + +function Verifier(key, hashAlgo) { + if (nacl === undefined) + nacl = require('tweetnacl'); + + if (hashAlgo.toLowerCase() !== 'sha512') + throw (new Error('ED25519 only supports the use of ' + + 'SHA-512 hashes')); + + this.key = key; + this.chunks = []; + + stream.Writable.call(this, {}); +} +util.inherits(Verifier, stream.Writable); + +Verifier.prototype._write = function (chunk, enc, cb) { + this.chunks.push(chunk); + cb(); +}; + +Verifier.prototype.update = function (chunk) { + if (typeof (chunk) === 'string') + chunk = new Buffer(chunk, 'binary'); + this.chunks.push(chunk); +}; + +Verifier.prototype.verify = function (signature, fmt) { + var sig; + if (Signature.isSignature(signature, [2, 0])) { + if (signature.type !== 'ed25519') + return (false); + sig = signature.toBuffer('raw'); + + } else if (typeof (signature) === 'string') { + sig = new Buffer(signature, 'base64'); + + } else if (Signature.isSignature(signature, [1, 0])) { + throw (new Error('signature was created by too old ' + + 'a version of sshpk and cannot be verified')); + } + + assert.buffer(sig); + return (nacl.sign.detached.verify( + new Uint8Array(Buffer.concat(this.chunks)), + new Uint8Array(sig), + new Uint8Array(this.key.part.R.data))); +}; + +function Signer(key, hashAlgo) { + if (nacl === undefined) + nacl = require('tweetnacl'); + + if (hashAlgo.toLowerCase() !== 'sha512') + throw (new Error('ED25519 only supports the use of ' + + 'SHA-512 hashes')); + + this.key = key; + this.chunks = []; + + stream.Writable.call(this, {}); +} +util.inherits(Signer, stream.Writable); + +Signer.prototype._write = function (chunk, enc, cb) { + this.chunks.push(chunk); + cb(); +}; + +Signer.prototype.update = function (chunk) { + if (typeof (chunk) === 'string') + chunk = new Buffer(chunk, 'binary'); + this.chunks.push(chunk); +}; + +Signer.prototype.sign = function () { + var sig = nacl.sign.detached( + new Uint8Array(Buffer.concat(this.chunks)), + new Uint8Array(this.key.part.r.data)); + var sigBuf = new Buffer(sig); + var sigObj = Signature.parse(sigBuf, 'ed25519', 'raw'); + sigObj.hashAlgorithm = 'sha512'; + return (sigObj); +}; diff --git a/node_modules/sshpk/lib/errors.js b/node_modules/sshpk/lib/errors.js new file mode 100644 index 0000000..1cc09ec --- /dev/null +++ b/node_modules/sshpk/lib/errors.js @@ -0,0 +1,84 @@ +// Copyright 2015 Joyent, Inc. + +var assert = require('assert-plus'); +var util = require('util'); + +function FingerprintFormatError(fp, format) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, FingerprintFormatError); + this.name = 'FingerprintFormatError'; + this.fingerprint = fp; + this.format = format; + this.message = 'Fingerprint format is not supported, or is invalid: '; + if (fp !== undefined) + this.message += ' fingerprint = ' + fp; + if (format !== undefined) + this.message += ' format = ' + format; +} +util.inherits(FingerprintFormatError, Error); + +function InvalidAlgorithmError(alg) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, InvalidAlgorithmError); + this.name = 'InvalidAlgorithmError'; + this.algorithm = alg; + this.message = 'Algorithm "' + alg + '" is not supported'; +} +util.inherits(InvalidAlgorithmError, Error); + +function KeyParseError(name, format, innerErr) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, KeyParseError); + this.name = 'KeyParseError'; + this.format = format; + this.keyName = name; + this.innerErr = innerErr; + this.message = 'Failed to parse ' + name + ' as a valid ' + format + + ' format key: ' + innerErr.message; +} +util.inherits(KeyParseError, Error); + +function SignatureParseError(type, format, innerErr) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, SignatureParseError); + this.name = 'SignatureParseError'; + this.type = type; + this.format = format; + this.innerErr = innerErr; + this.message = 'Failed to parse the given data as a ' + type + + ' signature in ' + format + ' format: ' + innerErr.message; +} +util.inherits(SignatureParseError, Error); + +function CertificateParseError(name, format, innerErr) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, CertificateParseError); + this.name = 'CertificateParseError'; + this.format = format; + this.certName = name; + this.innerErr = innerErr; + this.message = 'Failed to parse ' + name + ' as a valid ' + format + + ' format certificate: ' + innerErr.message; +} +util.inherits(CertificateParseError, Error); + +function KeyEncryptedError(name, format) { + if (Error.captureStackTrace) + Error.captureStackTrace(this, KeyEncryptedError); + this.name = 'KeyEncryptedError'; + this.format = format; + this.keyName = name; + this.message = 'The ' + format + ' format key ' + name + ' is ' + + 'encrypted (password-protected), and no passphrase was ' + + 'provided in `options`'; +} +util.inherits(KeyEncryptedError, Error); + +module.exports = { + FingerprintFormatError: FingerprintFormatError, + InvalidAlgorithmError: InvalidAlgorithmError, + KeyParseError: KeyParseError, + SignatureParseError: SignatureParseError, + KeyEncryptedError: KeyEncryptedError, + CertificateParseError: CertificateParseError +}; diff --git a/node_modules/sshpk/lib/fingerprint.js b/node_modules/sshpk/lib/fingerprint.js new file mode 100644 index 0000000..7ed7e51 --- /dev/null +++ b/node_modules/sshpk/lib/fingerprint.js @@ -0,0 +1,161 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = Fingerprint; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var errs = require('./errors'); +var Key = require('./key'); +var Certificate = require('./certificate'); +var utils = require('./utils'); + +var FingerprintFormatError = errs.FingerprintFormatError; +var InvalidAlgorithmError = errs.InvalidAlgorithmError; + +function Fingerprint(opts) { + assert.object(opts, 'options'); + assert.string(opts.type, 'options.type'); + assert.buffer(opts.hash, 'options.hash'); + assert.string(opts.algorithm, 'options.algorithm'); + + this.algorithm = opts.algorithm.toLowerCase(); + if (algs.hashAlgs[this.algorithm] !== true) + throw (new InvalidAlgorithmError(this.algorithm)); + + this.hash = opts.hash; + this.type = opts.type; +} + +Fingerprint.prototype.toString = function (format) { + if (format === undefined) { + if (this.algorithm === 'md5') + format = 'hex'; + else + format = 'base64'; + } + assert.string(format); + + switch (format) { + case 'hex': + return (addColons(this.hash.toString('hex'))); + case 'base64': + return (sshBase64Format(this.algorithm, + this.hash.toString('base64'))); + default: + throw (new FingerprintFormatError(undefined, format)); + } +}; + +Fingerprint.prototype.matches = function (other) { + assert.object(other, 'key or certificate'); + if (this.type === 'key') { + utils.assertCompatible(other, Key, [1, 0], 'key'); + } else { + utils.assertCompatible(other, Certificate, [1, 0], + 'certificate'); + } + + var theirHash = other.hash(this.algorithm); + var theirHash2 = crypto.createHash(this.algorithm). + update(theirHash).digest('base64'); + + if (this.hash2 === undefined) + this.hash2 = crypto.createHash(this.algorithm). + update(this.hash).digest('base64'); + + return (this.hash2 === theirHash2); +}; + +Fingerprint.parse = function (fp, options) { + assert.string(fp, 'fingerprint'); + + var alg, hash, enAlgs; + if (Array.isArray(options)) { + enAlgs = options; + options = {}; + } + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + if (options.enAlgs !== undefined) + enAlgs = options.enAlgs; + assert.optionalArrayOfString(enAlgs, 'algorithms'); + + var parts = fp.split(':'); + if (parts.length == 2) { + alg = parts[0].toLowerCase(); + /*JSSTYLED*/ + var base64RE = /^[A-Za-z0-9+\/=]+$/; + if (!base64RE.test(parts[1])) + throw (new FingerprintFormatError(fp)); + try { + hash = new Buffer(parts[1], 'base64'); + } catch (e) { + throw (new FingerprintFormatError(fp)); + } + } else if (parts.length > 2) { + alg = 'md5'; + if (parts[0].toLowerCase() === 'md5') + parts = parts.slice(1); + parts = parts.join(''); + /*JSSTYLED*/ + var md5RE = /^[a-fA-F0-9]+$/; + if (!md5RE.test(parts)) + throw (new FingerprintFormatError(fp)); + try { + hash = new Buffer(parts, 'hex'); + } catch (e) { + throw (new FingerprintFormatError(fp)); + } + } + + if (alg === undefined) + throw (new FingerprintFormatError(fp)); + + if (algs.hashAlgs[alg] === undefined) + throw (new InvalidAlgorithmError(alg)); + + if (enAlgs !== undefined) { + enAlgs = enAlgs.map(function (a) { return a.toLowerCase(); }); + if (enAlgs.indexOf(alg) === -1) + throw (new InvalidAlgorithmError(alg)); + } + + return (new Fingerprint({ + algorithm: alg, + hash: hash, + type: options.type || 'key' + })); +}; + +function addColons(s) { + /*JSSTYLED*/ + return (s.replace(/(.{2})(?=.)/g, '$1:')); +} + +function base64Strip(s) { + /*JSSTYLED*/ + return (s.replace(/=*$/, '')); +} + +function sshBase64Format(alg, h) { + return (alg.toUpperCase() + ':' + base64Strip(h)); +} + +Fingerprint.isFingerprint = function (obj, ver) { + return (utils.isCompatible(obj, Fingerprint, ver)); +}; + +/* + * API versions for Fingerprint: + * [1,0] -- initial ver + * [1,1] -- first tagged ver + */ +Fingerprint.prototype._sshpkApiVersion = [1, 1]; + +Fingerprint._oldVersionDetect = function (obj) { + assert.func(obj.toString); + assert.func(obj.matches); + return ([1, 0]); +}; diff --git a/node_modules/sshpk/lib/formats/auto.js b/node_modules/sshpk/lib/formats/auto.js new file mode 100644 index 0000000..973c032 --- /dev/null +++ b/node_modules/sshpk/lib/formats/auto.js @@ -0,0 +1,73 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + write: write +}; + +var assert = require('assert-plus'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); + +var pem = require('./pem'); +var ssh = require('./ssh'); +var rfc4253 = require('./rfc4253'); + +function read(buf, options) { + if (typeof (buf) === 'string') { + if (buf.trim().match(/^[-]+[ ]*BEGIN/)) + return (pem.read(buf, options)); + if (buf.match(/^\s*ssh-[a-z]/)) + return (ssh.read(buf, options)); + if (buf.match(/^\s*ecdsa-/)) + return (ssh.read(buf, options)); + buf = new Buffer(buf, 'binary'); + } else { + assert.buffer(buf); + if (findPEMHeader(buf)) + return (pem.read(buf, options)); + if (findSSHHeader(buf)) + return (ssh.read(buf, options)); + } + if (buf.readUInt32BE(0) < buf.length) + return (rfc4253.read(buf, options)); + throw (new Error('Failed to auto-detect format of key')); +} + +function findSSHHeader(buf) { + var offset = 0; + while (offset < buf.length && + (buf[offset] === 32 || buf[offset] === 10 || buf[offset] === 9)) + ++offset; + if (offset + 4 <= buf.length && + buf.slice(offset, offset + 4).toString('ascii') === 'ssh-') + return (true); + if (offset + 6 <= buf.length && + buf.slice(offset, offset + 6).toString('ascii') === 'ecdsa-') + return (true); + return (false); +} + +function findPEMHeader(buf) { + var offset = 0; + while (offset < buf.length && + (buf[offset] === 32 || buf[offset] === 10)) + ++offset; + if (buf[offset] !== 45) + return (false); + while (offset < buf.length && + (buf[offset] === 45)) + ++offset; + while (offset < buf.length && + (buf[offset] === 32)) + ++offset; + if (offset + 5 > buf.length || + buf.slice(offset, offset + 5).toString('ascii') !== 'BEGIN') + return (false); + return (true); +} + +function write(key, options) { + throw (new Error('"auto" format cannot be used for writing')); +} diff --git a/node_modules/sshpk/lib/formats/openssh-cert.js b/node_modules/sshpk/lib/formats/openssh-cert.js new file mode 100644 index 0000000..8ce7350 --- /dev/null +++ b/node_modules/sshpk/lib/formats/openssh-cert.js @@ -0,0 +1,289 @@ +// Copyright 2016 Joyent, Inc. + +module.exports = { + read: read, + verify: verify, + sign: sign, + write: write, + + /* Internal private API */ + fromBuffer: fromBuffer, + toBuffer: toBuffer +}; + +var assert = require('assert-plus'); +var SSHBuffer = require('../ssh-buffer'); +var crypto = require('crypto'); +var algs = require('../algs'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var Identity = require('../identity'); +var rfc4253 = require('./rfc4253'); +var Signature = require('../signature'); +var utils = require('../utils'); +var Certificate = require('../certificate'); + +function verify(cert, key) { + /* + * We always give an issuerKey, so if our verify() is being called then + * there was no signature. Return false. + */ + return (false); +} + +var TYPES = { + 'user': 1, + 'host': 2 +}; +Object.keys(TYPES).forEach(function (k) { TYPES[TYPES[k]] = k; }); + +var ECDSA_ALGO = /^ecdsa-sha2-([^@-]+)-cert-v01@openssh.com$/; + +function read(buf, options) { + if (Buffer.isBuffer(buf)) + buf = buf.toString('ascii'); + var parts = buf.trim().split(/[ \t\n]+/g); + if (parts.length < 2 || parts.length > 3) + throw (new Error('Not a valid SSH certificate line')); + + var algo = parts[0]; + var data = parts[1]; + + data = new Buffer(data, 'base64'); + return (fromBuffer(data, algo)); +} + +function fromBuffer(data, algo, partial) { + var sshbuf = new SSHBuffer({ buffer: data }); + var innerAlgo = sshbuf.readString(); + if (algo !== undefined && innerAlgo !== algo) + throw (new Error('SSH certificate algorithm mismatch')); + if (algo === undefined) + algo = innerAlgo; + + var cert = {}; + cert.signatures = {}; + cert.signatures.openssh = {}; + + cert.signatures.openssh.nonce = sshbuf.readBuffer(); + + var key = {}; + var parts = (key.parts = []); + key.type = getAlg(algo); + + var partCount = algs.info[key.type].parts.length; + while (parts.length < partCount) + parts.push(sshbuf.readPart()); + assert.ok(parts.length >= 1, 'key must have at least one part'); + + var algInfo = algs.info[key.type]; + if (key.type === 'ecdsa') { + var res = ECDSA_ALGO.exec(algo); + assert.ok(res !== null); + assert.strictEqual(res[1], parts[0].data.toString()); + } + + for (var i = 0; i < algInfo.parts.length; ++i) { + parts[i].name = algInfo.parts[i]; + if (parts[i].name !== 'curve' && + algInfo.normalize !== false) { + var p = parts[i]; + p.data = utils.mpNormalize(p.data); + } + } + + cert.subjectKey = new Key(key); + + cert.serial = sshbuf.readInt64(); + + var type = TYPES[sshbuf.readInt()]; + assert.string(type, 'valid cert type'); + + cert.signatures.openssh.keyId = sshbuf.readString(); + + var principals = []; + var pbuf = sshbuf.readBuffer(); + var psshbuf = new SSHBuffer({ buffer: pbuf }); + while (!psshbuf.atEnd()) + principals.push(psshbuf.readString()); + if (principals.length === 0) + principals = ['*']; + + cert.subjects = principals.map(function (pr) { + if (type === 'user') + return (Identity.forUser(pr)); + else if (type === 'host') + return (Identity.forHost(pr)); + throw (new Error('Unknown identity type ' + type)); + }); + + cert.validFrom = int64ToDate(sshbuf.readInt64()); + cert.validUntil = int64ToDate(sshbuf.readInt64()); + + cert.signatures.openssh.critical = sshbuf.readBuffer(); + cert.signatures.openssh.exts = sshbuf.readBuffer(); + + /* reserved */ + sshbuf.readBuffer(); + + var signingKeyBuf = sshbuf.readBuffer(); + cert.issuerKey = rfc4253.read(signingKeyBuf); + + /* + * OpenSSH certs don't give the identity of the issuer, just their + * public key. So, we use an Identity that matches anything. The + * isSignedBy() function will later tell you if the key matches. + */ + cert.issuer = Identity.forHost('**'); + + var sigBuf = sshbuf.readBuffer(); + cert.signatures.openssh.signature = + Signature.parse(sigBuf, cert.issuerKey.type, 'ssh'); + + if (partial !== undefined) { + partial.remainder = sshbuf.remainder(); + partial.consumed = sshbuf._offset; + } + + return (new Certificate(cert)); +} + +function int64ToDate(buf) { + var i = buf.readUInt32BE(0) * 4294967296; + i += buf.readUInt32BE(4); + var d = new Date(); + d.setTime(i * 1000); + d.sourceInt64 = buf; + return (d); +} + +function dateToInt64(date) { + if (date.sourceInt64 !== undefined) + return (date.sourceInt64); + var i = Math.round(date.getTime() / 1000); + var upper = Math.floor(i / 4294967296); + var lower = Math.floor(i % 4294967296); + var buf = new Buffer(8); + buf.writeUInt32BE(upper, 0); + buf.writeUInt32BE(lower, 4); + return (buf); +} + +function sign(cert, key) { + if (cert.signatures.openssh === undefined) + cert.signatures.openssh = {}; + try { + var blob = toBuffer(cert, true); + } catch (e) { + delete (cert.signatures.openssh); + return (false); + } + var sig = cert.signatures.openssh; + var hashAlgo = undefined; + if (key.type === 'rsa' || key.type === 'dsa') + hashAlgo = 'sha1'; + var signer = key.createSign(hashAlgo); + signer.write(blob); + sig.signature = signer.sign(); + return (true); +} + +function write(cert, options) { + if (options === undefined) + options = {}; + + var blob = toBuffer(cert); + var out = getCertType(cert.subjectKey) + ' ' + blob.toString('base64'); + if (options.comment) + out = out + ' ' + options.comment; + return (out); +} + + +function toBuffer(cert, noSig) { + assert.object(cert.signatures.openssh, 'signature for openssh format'); + var sig = cert.signatures.openssh; + + if (sig.nonce === undefined) + sig.nonce = crypto.randomBytes(16); + var buf = new SSHBuffer({}); + buf.writeString(getCertType(cert.subjectKey)); + buf.writeBuffer(sig.nonce); + + var key = cert.subjectKey; + var algInfo = algs.info[key.type]; + algInfo.parts.forEach(function (part) { + buf.writePart(key.part[part]); + }); + + buf.writeInt64(cert.serial); + + var type = cert.subjects[0].type; + assert.notStrictEqual(type, 'unknown'); + cert.subjects.forEach(function (id) { + assert.strictEqual(id.type, type); + }); + type = TYPES[type]; + buf.writeInt(type); + + if (sig.keyId === undefined) { + sig.keyId = cert.subjects[0].type + '_' + + (cert.subjects[0].uid || cert.subjects[0].hostname); + } + buf.writeString(sig.keyId); + + var sub = new SSHBuffer({}); + cert.subjects.forEach(function (id) { + if (type === TYPES.host) + sub.writeString(id.hostname); + else if (type === TYPES.user) + sub.writeString(id.uid); + }); + buf.writeBuffer(sub.toBuffer()); + + buf.writeInt64(dateToInt64(cert.validFrom)); + buf.writeInt64(dateToInt64(cert.validUntil)); + + if (sig.critical === undefined) + sig.critical = new Buffer(0); + buf.writeBuffer(sig.critical); + + if (sig.exts === undefined) + sig.exts = new Buffer(0); + buf.writeBuffer(sig.exts); + + /* reserved */ + buf.writeBuffer(new Buffer(0)); + + sub = rfc4253.write(cert.issuerKey); + buf.writeBuffer(sub); + + if (!noSig) + buf.writeBuffer(sig.signature.toBuffer('ssh')); + + return (buf.toBuffer()); +} + +function getAlg(certType) { + if (certType === 'ssh-rsa-cert-v01@openssh.com') + return ('rsa'); + if (certType === 'ssh-dss-cert-v01@openssh.com') + return ('dsa'); + if (certType.match(ECDSA_ALGO)) + return ('ecdsa'); + if (certType === 'ssh-ed25519-cert-v01@openssh.com') + return ('ed25519'); + throw (new Error('Unsupported cert type ' + certType)); +} + +function getCertType(key) { + if (key.type === 'rsa') + return ('ssh-rsa-cert-v01@openssh.com'); + if (key.type === 'dsa') + return ('ssh-dss-cert-v01@openssh.com'); + if (key.type === 'ecdsa') + return ('ecdsa-sha2-' + key.curve + '-cert-v01@openssh.com'); + if (key.type === 'ed25519') + return ('ssh-ed25519-cert-v01@openssh.com'); + throw (new Error('Unsupported key type ' + key.type)); +} diff --git a/node_modules/sshpk/lib/formats/pem.js b/node_modules/sshpk/lib/formats/pem.js new file mode 100644 index 0000000..c254e4e --- /dev/null +++ b/node_modules/sshpk/lib/formats/pem.js @@ -0,0 +1,186 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + write: write +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var crypto = require('crypto'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); + +var pkcs1 = require('./pkcs1'); +var pkcs8 = require('./pkcs8'); +var sshpriv = require('./ssh-private'); +var rfc4253 = require('./rfc4253'); + +var errors = require('../errors'); + +/* + * For reading we support both PKCS#1 and PKCS#8. If we find a private key, + * we just take the public component of it and use that. + */ +function read(buf, options, forceType) { + var input = buf; + if (typeof (buf) !== 'string') { + assert.buffer(buf, 'buf'); + buf = buf.toString('ascii'); + } + + var lines = buf.trim().split('\n'); + + var m = lines[0].match(/*JSSTYLED*/ + /[-]+[ ]*BEGIN ([A-Z0-9]+ )?(PUBLIC|PRIVATE) KEY[ ]*[-]+/); + assert.ok(m, 'invalid PEM header'); + + var m2 = lines[lines.length - 1].match(/*JSSTYLED*/ + /[-]+[ ]*END ([A-Z0-9]+ )?(PUBLIC|PRIVATE) KEY[ ]*[-]+/); + assert.ok(m2, 'invalid PEM footer'); + + /* Begin and end banners must match key type */ + assert.equal(m[2], m2[2]); + var type = m[2].toLowerCase(); + + var alg; + if (m[1]) { + /* They also must match algorithms, if given */ + assert.equal(m[1], m2[1], 'PEM header and footer mismatch'); + alg = m[1].trim(); + } + + var headers = {}; + while (true) { + lines = lines.slice(1); + m = lines[0].match(/*JSSTYLED*/ + /^([A-Za-z0-9-]+): (.+)$/); + if (!m) + break; + headers[m[1].toLowerCase()] = m[2]; + } + + var cipher, key, iv; + if (headers['proc-type']) { + var parts = headers['proc-type'].split(','); + if (parts[0] === '4' && parts[1] === 'ENCRYPTED') { + if (typeof (options.passphrase) === 'string') { + options.passphrase = new Buffer( + options.passphrase, 'utf-8'); + } + if (!Buffer.isBuffer(options.passphrase)) { + throw (new errors.KeyEncryptedError( + options.filename, 'PEM')); + } else { + parts = headers['dek-info'].split(','); + assert.ok(parts.length === 2); + cipher = parts[0].toLowerCase(); + iv = new Buffer(parts[1], 'hex'); + key = utils.opensslKeyDeriv(cipher, iv, + options.passphrase, 1).key; + } + } + } + + /* Chop off the first and last lines */ + lines = lines.slice(0, -1).join(''); + buf = new Buffer(lines, 'base64'); + + if (cipher && key && iv) { + var cipherStream = crypto.createDecipheriv(cipher, key, iv); + var chunk, chunks = []; + cipherStream.once('error', function (e) { + if (e.toString().indexOf('bad decrypt') !== -1) { + throw (new Error('Incorrect passphrase ' + + 'supplied, could not decrypt key')); + } + throw (e); + }); + cipherStream.write(buf); + cipherStream.end(); + while ((chunk = cipherStream.read()) !== null) + chunks.push(chunk); + buf = Buffer.concat(chunks); + } + + /* The new OpenSSH internal format abuses PEM headers */ + if (alg && alg.toLowerCase() === 'openssh') + return (sshpriv.readSSHPrivate(type, buf, options)); + if (alg && alg.toLowerCase() === 'ssh2') + return (rfc4253.readType(type, buf, options)); + + var der = new asn1.BerReader(buf); + der.originalInput = input; + + /* + * All of the PEM file types start with a sequence tag, so chop it + * off here + */ + der.readSequence(); + + /* PKCS#1 type keys name an algorithm in the banner explicitly */ + if (alg) { + if (forceType) + assert.strictEqual(forceType, 'pkcs1'); + return (pkcs1.readPkcs1(alg, type, der)); + } else { + if (forceType) + assert.strictEqual(forceType, 'pkcs8'); + return (pkcs8.readPkcs8(alg, type, der)); + } +} + +function write(key, options, type) { + assert.object(key); + + var alg = {'ecdsa': 'EC', 'rsa': 'RSA', 'dsa': 'DSA'}[key.type]; + var header; + + var der = new asn1.BerWriter(); + + if (PrivateKey.isPrivateKey(key)) { + if (type && type === 'pkcs8') { + header = 'PRIVATE KEY'; + pkcs8.writePkcs8(der, key); + } else { + if (type) + assert.strictEqual(type, 'pkcs1'); + header = alg + ' PRIVATE KEY'; + pkcs1.writePkcs1(der, key); + } + + } else if (Key.isKey(key)) { + if (type && type === 'pkcs1') { + header = alg + ' PUBLIC KEY'; + pkcs1.writePkcs1(der, key); + } else { + if (type) + assert.strictEqual(type, 'pkcs8'); + header = 'PUBLIC KEY'; + pkcs8.writePkcs8(der, key); + } + + } else { + throw (new Error('key is not a Key or PrivateKey')); + } + + var tmp = der.buffer.toString('base64'); + var len = tmp.length + (tmp.length / 64) + + 18 + 16 + header.length*2 + 10; + var buf = new Buffer(len); + var o = 0; + o += buf.write('-----BEGIN ' + header + '-----\n', o); + for (var i = 0; i < tmp.length; ) { + var limit = i + 64; + if (limit > tmp.length) + limit = tmp.length; + o += buf.write(tmp.slice(i, limit), o); + buf[o++] = 10; + i = limit; + } + o += buf.write('-----END ' + header + '-----\n', o); + + return (buf.slice(0, o)); +} diff --git a/node_modules/sshpk/lib/formats/pkcs1.js b/node_modules/sshpk/lib/formats/pkcs1.js new file mode 100644 index 0000000..a5676af --- /dev/null +++ b/node_modules/sshpk/lib/formats/pkcs1.js @@ -0,0 +1,320 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + readPkcs1: readPkcs1, + write: write, + writePkcs1: writePkcs1 +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); + +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); + +var pkcs8 = require('./pkcs8'); +var readECDSACurve = pkcs8.readECDSACurve; + +function read(buf, options) { + return (pem.read(buf, options, 'pkcs1')); +} + +function write(key, options) { + return (pem.write(key, options, 'pkcs1')); +} + +/* Helper to read in a single mpint */ +function readMPInt(der, nm) { + assert.strictEqual(der.peek(), asn1.Ber.Integer, + nm + ' is not an Integer'); + return (utils.mpNormalize(der.readString(asn1.Ber.Integer, true))); +} + +function readPkcs1(alg, type, der) { + switch (alg) { + case 'RSA': + if (type === 'public') + return (readPkcs1RSAPublic(der)); + else if (type === 'private') + return (readPkcs1RSAPrivate(der)); + throw (new Error('Unknown key type: ' + type)); + case 'DSA': + if (type === 'public') + return (readPkcs1DSAPublic(der)); + else if (type === 'private') + return (readPkcs1DSAPrivate(der)); + throw (new Error('Unknown key type: ' + type)); + case 'EC': + case 'ECDSA': + if (type === 'private') + return (readPkcs1ECDSAPrivate(der)); + else if (type === 'public') + return (readPkcs1ECDSAPublic(der)); + throw (new Error('Unknown key type: ' + type)); + default: + throw (new Error('Unknown key algo: ' + alg)); + } +} + +function readPkcs1RSAPublic(der) { + // modulus and exponent + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'exponent'); + + // now, make the key + var key = { + type: 'rsa', + parts: [ + { name: 'e', data: e }, + { name: 'n', data: n } + ] + }; + + return (new Key(key)); +} + +function readPkcs1RSAPrivate(der) { + var version = readMPInt(der, 'version'); + assert.strictEqual(version[0], 0); + + // modulus then public exponent + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'public exponent'); + var d = readMPInt(der, 'private exponent'); + var p = readMPInt(der, 'prime1'); + var q = readMPInt(der, 'prime2'); + var dmodp = readMPInt(der, 'exponent1'); + var dmodq = readMPInt(der, 'exponent2'); + var iqmp = readMPInt(der, 'iqmp'); + + // now, make the key + var key = { + type: 'rsa', + parts: [ + { name: 'n', data: n }, + { name: 'e', data: e }, + { name: 'd', data: d }, + { name: 'iqmp', data: iqmp }, + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'dmodp', data: dmodp }, + { name: 'dmodq', data: dmodq } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs1DSAPrivate(der) { + var version = readMPInt(der, 'version'); + assert.strictEqual(version.readUInt8(0), 0); + + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + var y = readMPInt(der, 'y'); + var x = readMPInt(der, 'x'); + + // now, make the key + var key = { + type: 'dsa', + parts: [ + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g }, + { name: 'y', data: y }, + { name: 'x', data: x } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs1DSAPublic(der) { + var y = readMPInt(der, 'y'); + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + + var key = { + type: 'dsa', + parts: [ + { name: 'y', data: y }, + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g } + ] + }; + + return (new Key(key)); +} + +function readPkcs1ECDSAPublic(der) { + der.readSequence(); + + var oid = der.readOID(); + assert.strictEqual(oid, '1.2.840.10045.2.1', 'must be ecPublicKey'); + + var curveOid = der.readOID(); + + var curve; + var curves = Object.keys(algs.curves); + for (var j = 0; j < curves.length; ++j) { + var c = curves[j]; + var cd = algs.curves[c]; + if (cd.pkcs8oid === curveOid) { + curve = c; + break; + } + } + assert.string(curve, 'a known ECDSA named curve'); + + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curve) }, + { name: 'Q', data: Q } + ] + }; + + return (new Key(key)); +} + +function readPkcs1ECDSAPrivate(der) { + var version = readMPInt(der, 'version'); + assert.strictEqual(version.readUInt8(0), 1); + + // private key + var d = der.readString(asn1.Ber.OctetString, true); + + der.readSequence(0xa0); + var curve = readECDSACurve(der); + assert.string(curve, 'a known elliptic curve'); + + der.readSequence(0xa1); + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curve) }, + { name: 'Q', data: Q }, + { name: 'd', data: d } + ] + }; + + return (new PrivateKey(key)); +} + +function writePkcs1(der, key) { + der.startSequence(); + + switch (key.type) { + case 'rsa': + if (PrivateKey.isPrivateKey(key)) + writePkcs1RSAPrivate(der, key); + else + writePkcs1RSAPublic(der, key); + break; + case 'dsa': + if (PrivateKey.isPrivateKey(key)) + writePkcs1DSAPrivate(der, key); + else + writePkcs1DSAPublic(der, key); + break; + case 'ecdsa': + if (PrivateKey.isPrivateKey(key)) + writePkcs1ECDSAPrivate(der, key); + else + writePkcs1ECDSAPublic(der, key); + break; + default: + throw (new Error('Unknown key algo: ' + key.type)); + } + + der.endSequence(); +} + +function writePkcs1RSAPublic(der, key) { + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); +} + +function writePkcs1RSAPrivate(der, key) { + var ver = new Buffer(1); + ver[0] = 0; + der.writeBuffer(ver, asn1.Ber.Integer); + + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); + der.writeBuffer(key.part.d.data, asn1.Ber.Integer); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + if (!key.part.dmodp || !key.part.dmodq) + utils.addRSAMissing(key); + der.writeBuffer(key.part.dmodp.data, asn1.Ber.Integer); + der.writeBuffer(key.part.dmodq.data, asn1.Ber.Integer); + der.writeBuffer(key.part.iqmp.data, asn1.Ber.Integer); +} + +function writePkcs1DSAPrivate(der, key) { + var ver = new Buffer(1); + ver[0] = 0; + der.writeBuffer(ver, asn1.Ber.Integer); + + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); + der.writeBuffer(key.part.y.data, asn1.Ber.Integer); + der.writeBuffer(key.part.x.data, asn1.Ber.Integer); +} + +function writePkcs1DSAPublic(der, key) { + der.writeBuffer(key.part.y.data, asn1.Ber.Integer); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); +} + +function writePkcs1ECDSAPublic(der, key) { + der.startSequence(); + + der.writeOID('1.2.840.10045.2.1'); /* ecPublicKey */ + var curve = key.part.curve.data.toString(); + var curveOid = algs.curves[curve].pkcs8oid; + assert.string(curveOid, 'a known ECDSA named curve'); + der.writeOID(curveOid); + + der.endSequence(); + + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); +} + +function writePkcs1ECDSAPrivate(der, key) { + var ver = new Buffer(1); + ver[0] = 1; + der.writeBuffer(ver, asn1.Ber.Integer); + + der.writeBuffer(key.part.d.data, asn1.Ber.OctetString); + + der.startSequence(0xa0); + var curve = key.part.curve.data.toString(); + var curveOid = algs.curves[curve].pkcs8oid; + assert.string(curveOid, 'a known ECDSA named curve'); + der.writeOID(curveOid); + der.endSequence(); + + der.startSequence(0xa1); + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); + der.endSequence(); +} diff --git a/node_modules/sshpk/lib/formats/pkcs8.js b/node_modules/sshpk/lib/formats/pkcs8.js new file mode 100644 index 0000000..4ccbefc --- /dev/null +++ b/node_modules/sshpk/lib/formats/pkcs8.js @@ -0,0 +1,505 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + readPkcs8: readPkcs8, + write: write, + writePkcs8: writePkcs8, + + readECDSACurve: readECDSACurve, + writeECDSACurve: writeECDSACurve +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); + +function read(buf, options) { + return (pem.read(buf, options, 'pkcs8')); +} + +function write(key, options) { + return (pem.write(key, options, 'pkcs8')); +} + +/* Helper to read in a single mpint */ +function readMPInt(der, nm) { + assert.strictEqual(der.peek(), asn1.Ber.Integer, + nm + ' is not an Integer'); + return (utils.mpNormalize(der.readString(asn1.Ber.Integer, true))); +} + +function readPkcs8(alg, type, der) { + /* Private keys in pkcs#8 format have a weird extra int */ + if (der.peek() === asn1.Ber.Integer) { + assert.strictEqual(type, 'private', + 'unexpected Integer at start of public key'); + der.readString(asn1.Ber.Integer, true); + } + + der.readSequence(); + var next = der.offset + der.length; + + var oid = der.readOID(); + switch (oid) { + case '1.2.840.113549.1.1.1': + der._offset = next; + if (type === 'public') + return (readPkcs8RSAPublic(der)); + else + return (readPkcs8RSAPrivate(der)); + case '1.2.840.10040.4.1': + if (type === 'public') + return (readPkcs8DSAPublic(der)); + else + return (readPkcs8DSAPrivate(der)); + case '1.2.840.10045.2.1': + if (type === 'public') + return (readPkcs8ECDSAPublic(der)); + else + return (readPkcs8ECDSAPrivate(der)); + default: + throw (new Error('Unknown key type OID ' + oid)); + } +} + +function readPkcs8RSAPublic(der) { + // bit string sequence + der.readSequence(asn1.Ber.BitString); + der.readByte(); + der.readSequence(); + + // modulus + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'exponent'); + + // now, make the key + var key = { + type: 'rsa', + source: der.originalInput, + parts: [ + { name: 'e', data: e }, + { name: 'n', data: n } + ] + }; + + return (new Key(key)); +} + +function readPkcs8RSAPrivate(der) { + der.readSequence(asn1.Ber.OctetString); + der.readSequence(); + + var ver = readMPInt(der, 'version'); + assert.equal(ver[0], 0x0, 'unknown RSA private key version'); + + // modulus then public exponent + var n = readMPInt(der, 'modulus'); + var e = readMPInt(der, 'public exponent'); + var d = readMPInt(der, 'private exponent'); + var p = readMPInt(der, 'prime1'); + var q = readMPInt(der, 'prime2'); + var dmodp = readMPInt(der, 'exponent1'); + var dmodq = readMPInt(der, 'exponent2'); + var iqmp = readMPInt(der, 'iqmp'); + + // now, make the key + var key = { + type: 'rsa', + parts: [ + { name: 'n', data: n }, + { name: 'e', data: e }, + { name: 'd', data: d }, + { name: 'iqmp', data: iqmp }, + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'dmodp', data: dmodp }, + { name: 'dmodq', data: dmodq } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs8DSAPublic(der) { + der.readSequence(); + + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + + // bit string sequence + der.readSequence(asn1.Ber.BitString); + der.readByte(); + + var y = readMPInt(der, 'y'); + + // now, make the key + var key = { + type: 'dsa', + parts: [ + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g }, + { name: 'y', data: y } + ] + }; + + return (new Key(key)); +} + +function readPkcs8DSAPrivate(der) { + der.readSequence(); + + var p = readMPInt(der, 'p'); + var q = readMPInt(der, 'q'); + var g = readMPInt(der, 'g'); + + der.readSequence(asn1.Ber.OctetString); + var x = readMPInt(der, 'x'); + + /* The pkcs#8 format does not include the public key */ + var y = utils.calculateDSAPublic(g, p, x); + + var key = { + type: 'dsa', + parts: [ + { name: 'p', data: p }, + { name: 'q', data: q }, + { name: 'g', data: g }, + { name: 'y', data: y }, + { name: 'x', data: x } + ] + }; + + return (new PrivateKey(key)); +} + +function readECDSACurve(der) { + var curveName, curveNames; + var j, c, cd; + + if (der.peek() === asn1.Ber.OID) { + var oid = der.readOID(); + + curveNames = Object.keys(algs.curves); + for (j = 0; j < curveNames.length; ++j) { + c = curveNames[j]; + cd = algs.curves[c]; + if (cd.pkcs8oid === oid) { + curveName = c; + break; + } + } + + } else { + // ECParameters sequence + der.readSequence(); + var version = der.readString(asn1.Ber.Integer, true); + assert.strictEqual(version[0], 1, 'ECDSA key not version 1'); + + var curve = {}; + + // FieldID sequence + der.readSequence(); + var fieldTypeOid = der.readOID(); + assert.strictEqual(fieldTypeOid, '1.2.840.10045.1.1', + 'ECDSA key is not from a prime-field'); + var p = curve.p = utils.mpNormalize( + der.readString(asn1.Ber.Integer, true)); + /* + * p always starts with a 1 bit, so count the zeros to get its + * real size. + */ + curve.size = p.length * 8 - utils.countZeros(p); + + // Curve sequence + der.readSequence(); + curve.a = utils.mpNormalize( + der.readString(asn1.Ber.OctetString, true)); + curve.b = utils.mpNormalize( + der.readString(asn1.Ber.OctetString, true)); + if (der.peek() === asn1.Ber.BitString) + curve.s = der.readString(asn1.Ber.BitString, true); + + // Combined Gx and Gy + curve.G = der.readString(asn1.Ber.OctetString, true); + assert.strictEqual(curve.G[0], 0x4, + 'uncompressed G is required'); + + curve.n = utils.mpNormalize( + der.readString(asn1.Ber.Integer, true)); + curve.h = utils.mpNormalize( + der.readString(asn1.Ber.Integer, true)); + assert.strictEqual(curve.h[0], 0x1, 'a cofactor=1 curve is ' + + 'required'); + + curveNames = Object.keys(algs.curves); + var ks = Object.keys(curve); + for (j = 0; j < curveNames.length; ++j) { + c = curveNames[j]; + cd = algs.curves[c]; + var equal = true; + for (var i = 0; i < ks.length; ++i) { + var k = ks[i]; + if (cd[k] === undefined) + continue; + if (typeof (cd[k]) === 'object' && + cd[k].equals !== undefined) { + if (!cd[k].equals(curve[k])) { + equal = false; + break; + } + } else if (Buffer.isBuffer(cd[k])) { + if (cd[k].toString('binary') + !== curve[k].toString('binary')) { + equal = false; + break; + } + } else { + if (cd[k] !== curve[k]) { + equal = false; + break; + } + } + } + if (equal) { + curveName = c; + break; + } + } + } + return (curveName); +} + +function readPkcs8ECDSAPrivate(der) { + var curveName = readECDSACurve(der); + assert.string(curveName, 'a known elliptic curve'); + + der.readSequence(asn1.Ber.OctetString); + der.readSequence(); + + var version = readMPInt(der, 'version'); + assert.equal(version[0], 1, 'unknown version of ECDSA key'); + + var d = der.readString(asn1.Ber.OctetString, true); + der.readSequence(0xa1); + + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curveName) }, + { name: 'Q', data: Q }, + { name: 'd', data: d } + ] + }; + + return (new PrivateKey(key)); +} + +function readPkcs8ECDSAPublic(der) { + var curveName = readECDSACurve(der); + assert.string(curveName, 'a known elliptic curve'); + + var Q = der.readString(asn1.Ber.BitString, true); + Q = utils.ecNormalize(Q); + + var key = { + type: 'ecdsa', + parts: [ + { name: 'curve', data: new Buffer(curveName) }, + { name: 'Q', data: Q } + ] + }; + + return (new Key(key)); +} + +function writePkcs8(der, key) { + der.startSequence(); + + if (PrivateKey.isPrivateKey(key)) { + var sillyInt = new Buffer(1); + sillyInt[0] = 0x0; + der.writeBuffer(sillyInt, asn1.Ber.Integer); + } + + der.startSequence(); + switch (key.type) { + case 'rsa': + der.writeOID('1.2.840.113549.1.1.1'); + if (PrivateKey.isPrivateKey(key)) + writePkcs8RSAPrivate(key, der); + else + writePkcs8RSAPublic(key, der); + break; + case 'dsa': + der.writeOID('1.2.840.10040.4.1'); + if (PrivateKey.isPrivateKey(key)) + writePkcs8DSAPrivate(key, der); + else + writePkcs8DSAPublic(key, der); + break; + case 'ecdsa': + der.writeOID('1.2.840.10045.2.1'); + if (PrivateKey.isPrivateKey(key)) + writePkcs8ECDSAPrivate(key, der); + else + writePkcs8ECDSAPublic(key, der); + break; + default: + throw (new Error('Unsupported key type: ' + key.type)); + } + + der.endSequence(); +} + +function writePkcs8RSAPrivate(key, der) { + der.writeNull(); + der.endSequence(); + + der.startSequence(asn1.Ber.OctetString); + der.startSequence(); + + var version = new Buffer(1); + version[0] = 0; + der.writeBuffer(version, asn1.Ber.Integer); + + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); + der.writeBuffer(key.part.d.data, asn1.Ber.Integer); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + if (!key.part.dmodp || !key.part.dmodq) + utils.addRSAMissing(key); + der.writeBuffer(key.part.dmodp.data, asn1.Ber.Integer); + der.writeBuffer(key.part.dmodq.data, asn1.Ber.Integer); + der.writeBuffer(key.part.iqmp.data, asn1.Ber.Integer); + + der.endSequence(); + der.endSequence(); +} + +function writePkcs8RSAPublic(key, der) { + der.writeNull(); + der.endSequence(); + + der.startSequence(asn1.Ber.BitString); + der.writeByte(0x00); + + der.startSequence(); + der.writeBuffer(key.part.n.data, asn1.Ber.Integer); + der.writeBuffer(key.part.e.data, asn1.Ber.Integer); + der.endSequence(); + + der.endSequence(); +} + +function writePkcs8DSAPrivate(key, der) { + der.startSequence(); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); + der.endSequence(); + + der.endSequence(); + + der.startSequence(asn1.Ber.OctetString); + der.writeBuffer(key.part.x.data, asn1.Ber.Integer); + der.endSequence(); +} + +function writePkcs8DSAPublic(key, der) { + der.startSequence(); + der.writeBuffer(key.part.p.data, asn1.Ber.Integer); + der.writeBuffer(key.part.q.data, asn1.Ber.Integer); + der.writeBuffer(key.part.g.data, asn1.Ber.Integer); + der.endSequence(); + der.endSequence(); + + der.startSequence(asn1.Ber.BitString); + der.writeByte(0x00); + der.writeBuffer(key.part.y.data, asn1.Ber.Integer); + der.endSequence(); +} + +function writeECDSACurve(key, der) { + var curve = algs.curves[key.curve]; + if (curve.pkcs8oid) { + /* This one has a name in pkcs#8, so just write the oid */ + der.writeOID(curve.pkcs8oid); + + } else { + // ECParameters sequence + der.startSequence(); + + var version = new Buffer(1); + version.writeUInt8(1, 0); + der.writeBuffer(version, asn1.Ber.Integer); + + // FieldID sequence + der.startSequence(); + der.writeOID('1.2.840.10045.1.1'); // prime-field + der.writeBuffer(curve.p, asn1.Ber.Integer); + der.endSequence(); + + // Curve sequence + der.startSequence(); + var a = curve.p; + if (a[0] === 0x0) + a = a.slice(1); + der.writeBuffer(a, asn1.Ber.OctetString); + der.writeBuffer(curve.b, asn1.Ber.OctetString); + der.writeBuffer(curve.s, asn1.Ber.BitString); + der.endSequence(); + + der.writeBuffer(curve.G, asn1.Ber.OctetString); + der.writeBuffer(curve.n, asn1.Ber.Integer); + var h = curve.h; + if (!h) { + h = new Buffer(1); + h[0] = 1; + } + der.writeBuffer(h, asn1.Ber.Integer); + + // ECParameters + der.endSequence(); + } +} + +function writePkcs8ECDSAPublic(key, der) { + writeECDSACurve(key, der); + der.endSequence(); + + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); +} + +function writePkcs8ECDSAPrivate(key, der) { + writeECDSACurve(key, der); + der.endSequence(); + + der.startSequence(asn1.Ber.OctetString); + der.startSequence(); + + var version = new Buffer(1); + version[0] = 1; + der.writeBuffer(version, asn1.Ber.Integer); + + der.writeBuffer(key.part.d.data, asn1.Ber.OctetString); + + der.startSequence(0xa1); + var Q = utils.ecNormalize(key.part.Q.data, true); + der.writeBuffer(Q, asn1.Ber.BitString); + der.endSequence(); + + der.endSequence(); + der.endSequence(); +} diff --git a/node_modules/sshpk/lib/formats/rfc4253.js b/node_modules/sshpk/lib/formats/rfc4253.js new file mode 100644 index 0000000..9d436dd --- /dev/null +++ b/node_modules/sshpk/lib/formats/rfc4253.js @@ -0,0 +1,146 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read.bind(undefined, false, undefined), + readType: read.bind(undefined, false), + write: write, + /* semi-private api, used by sshpk-agent */ + readPartial: read.bind(undefined, true), + + /* shared with ssh format */ + readInternal: read, + keyTypeToAlg: keyTypeToAlg, + algToKeyType: algToKeyType +}; + +var assert = require('assert-plus'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var SSHBuffer = require('../ssh-buffer'); + +function algToKeyType(alg) { + assert.string(alg); + if (alg === 'ssh-dss') + return ('dsa'); + else if (alg === 'ssh-rsa') + return ('rsa'); + else if (alg === 'ssh-ed25519') + return ('ed25519'); + else if (alg === 'ssh-curve25519') + return ('curve25519'); + else if (alg.match(/^ecdsa-sha2-/)) + return ('ecdsa'); + else + throw (new Error('Unknown algorithm ' + alg)); +} + +function keyTypeToAlg(key) { + assert.object(key); + if (key.type === 'dsa') + return ('ssh-dss'); + else if (key.type === 'rsa') + return ('ssh-rsa'); + else if (key.type === 'ed25519') + return ('ssh-ed25519'); + else if (key.type === 'curve25519') + return ('ssh-curve25519'); + else if (key.type === 'ecdsa') + return ('ecdsa-sha2-' + key.part.curve.data.toString()); + else + throw (new Error('Unknown key type ' + key.type)); +} + +function read(partial, type, buf, options) { + if (typeof (buf) === 'string') + buf = new Buffer(buf); + assert.buffer(buf, 'buf'); + + var key = {}; + + var parts = key.parts = []; + var sshbuf = new SSHBuffer({buffer: buf}); + + var alg = sshbuf.readString(); + assert.ok(!sshbuf.atEnd(), 'key must have at least one part'); + + key.type = algToKeyType(alg); + + var partCount = algs.info[key.type].parts.length; + if (type && type === 'private') + partCount = algs.privInfo[key.type].parts.length; + + while (!sshbuf.atEnd() && parts.length < partCount) + parts.push(sshbuf.readPart()); + while (!partial && !sshbuf.atEnd()) + parts.push(sshbuf.readPart()); + + assert.ok(parts.length >= 1, + 'key must have at least one part'); + assert.ok(partial || sshbuf.atEnd(), + 'leftover bytes at end of key'); + + var Constructor = Key; + var algInfo = algs.info[key.type]; + if (type === 'private' || algInfo.parts.length !== parts.length) { + algInfo = algs.privInfo[key.type]; + Constructor = PrivateKey; + } + assert.strictEqual(algInfo.parts.length, parts.length); + + if (key.type === 'ecdsa') { + var res = /^ecdsa-sha2-(.+)$/.exec(alg); + assert.ok(res !== null); + assert.strictEqual(res[1], parts[0].data.toString()); + } + + var normalized = true; + for (var i = 0; i < algInfo.parts.length; ++i) { + parts[i].name = algInfo.parts[i]; + if (parts[i].name !== 'curve' && + algInfo.normalize !== false) { + var p = parts[i]; + var nd = utils.mpNormalize(p.data); + if (nd !== p.data) { + p.data = nd; + normalized = false; + } + } + } + + if (normalized) + key._rfc4253Cache = sshbuf.toBuffer(); + + if (partial && typeof (partial) === 'object') { + partial.remainder = sshbuf.remainder(); + partial.consumed = sshbuf._offset; + } + + return (new Constructor(key)); +} + +function write(key, options) { + assert.object(key); + + var alg = keyTypeToAlg(key); + var i; + + var algInfo = algs.info[key.type]; + if (PrivateKey.isPrivateKey(key)) + algInfo = algs.privInfo[key.type]; + var parts = algInfo.parts; + + var buf = new SSHBuffer({}); + + buf.writeString(alg); + + for (i = 0; i < parts.length; ++i) { + var data = key.part[parts[i]].data; + if (algInfo.normalize !== false) + data = utils.mpNormalize(data); + buf.writeBuffer(data); + } + + return (buf.toBuffer()); +} diff --git a/node_modules/sshpk/lib/formats/ssh-private.js b/node_modules/sshpk/lib/formats/ssh-private.js new file mode 100644 index 0000000..2fcf719 --- /dev/null +++ b/node_modules/sshpk/lib/formats/ssh-private.js @@ -0,0 +1,261 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + readSSHPrivate: readSSHPrivate, + write: write +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); +var crypto = require('crypto'); + +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); +var rfc4253 = require('./rfc4253'); +var SSHBuffer = require('../ssh-buffer'); +var errors = require('../errors'); + +var bcrypt; + +function read(buf, options) { + return (pem.read(buf, options)); +} + +var MAGIC = 'openssh-key-v1'; + +function readSSHPrivate(type, buf, options) { + buf = new SSHBuffer({buffer: buf}); + + var magic = buf.readCString(); + assert.strictEqual(magic, MAGIC, 'bad magic string'); + + var cipher = buf.readString(); + var kdf = buf.readString(); + var kdfOpts = buf.readBuffer(); + + var nkeys = buf.readInt(); + if (nkeys !== 1) { + throw (new Error('OpenSSH-format key file contains ' + + 'multiple keys: this is unsupported.')); + } + + var pubKey = buf.readBuffer(); + + if (type === 'public') { + assert.ok(buf.atEnd(), 'excess bytes left after key'); + return (rfc4253.read(pubKey)); + } + + var privKeyBlob = buf.readBuffer(); + assert.ok(buf.atEnd(), 'excess bytes left after key'); + + var kdfOptsBuf = new SSHBuffer({ buffer: kdfOpts }); + switch (kdf) { + case 'none': + if (cipher !== 'none') { + throw (new Error('OpenSSH-format key uses KDF "none" ' + + 'but specifies a cipher other than "none"')); + } + break; + case 'bcrypt': + var salt = kdfOptsBuf.readBuffer(); + var rounds = kdfOptsBuf.readInt(); + var cinf = utils.opensshCipherInfo(cipher); + if (bcrypt === undefined) { + bcrypt = require('bcrypt-pbkdf'); + } + + if (typeof (options.passphrase) === 'string') { + options.passphrase = new Buffer(options.passphrase, + 'utf-8'); + } + if (!Buffer.isBuffer(options.passphrase)) { + throw (new errors.KeyEncryptedError( + options.filename, 'OpenSSH')); + } + + var pass = new Uint8Array(options.passphrase); + var salti = new Uint8Array(salt); + /* Use the pbkdf to derive both the key and the IV. */ + var out = new Uint8Array(cinf.keySize + cinf.blockSize); + var res = bcrypt.pbkdf(pass, pass.length, salti, salti.length, + out, out.length, rounds); + if (res !== 0) { + throw (new Error('bcrypt_pbkdf function returned ' + + 'failure, parameters invalid')); + } + out = new Buffer(out); + var ckey = out.slice(0, cinf.keySize); + var iv = out.slice(cinf.keySize, cinf.keySize + cinf.blockSize); + var cipherStream = crypto.createDecipheriv(cinf.opensslName, + ckey, iv); + cipherStream.setAutoPadding(false); + var chunk, chunks = []; + cipherStream.once('error', function (e) { + if (e.toString().indexOf('bad decrypt') !== -1) { + throw (new Error('Incorrect passphrase ' + + 'supplied, could not decrypt key')); + } + throw (e); + }); + cipherStream.write(privKeyBlob); + cipherStream.end(); + while ((chunk = cipherStream.read()) !== null) + chunks.push(chunk); + privKeyBlob = Buffer.concat(chunks); + break; + default: + throw (new Error( + 'OpenSSH-format key uses unknown KDF "' + kdf + '"')); + } + + buf = new SSHBuffer({buffer: privKeyBlob}); + + var checkInt1 = buf.readInt(); + var checkInt2 = buf.readInt(); + if (checkInt1 !== checkInt2) { + throw (new Error('Incorrect passphrase supplied, could not ' + + 'decrypt key')); + } + + var ret = {}; + var key = rfc4253.readInternal(ret, 'private', buf.remainder()); + + buf.skip(ret.consumed); + + var comment = buf.readString(); + key.comment = comment; + + return (key); +} + +function write(key, options) { + var pubKey; + if (PrivateKey.isPrivateKey(key)) + pubKey = key.toPublic(); + else + pubKey = key; + + var cipher = 'none'; + var kdf = 'none'; + var kdfopts = new Buffer(0); + var cinf = { blockSize: 8 }; + var passphrase; + if (options !== undefined) { + passphrase = options.passphrase; + if (typeof (passphrase) === 'string') + passphrase = new Buffer(passphrase, 'utf-8'); + if (passphrase !== undefined) { + assert.buffer(passphrase, 'options.passphrase'); + assert.optionalString(options.cipher, 'options.cipher'); + cipher = options.cipher; + if (cipher === undefined) + cipher = 'aes128-ctr'; + cinf = utils.opensshCipherInfo(cipher); + kdf = 'bcrypt'; + } + } + + var privBuf; + if (PrivateKey.isPrivateKey(key)) { + privBuf = new SSHBuffer({}); + var checkInt = crypto.randomBytes(4).readUInt32BE(0); + privBuf.writeInt(checkInt); + privBuf.writeInt(checkInt); + privBuf.write(key.toBuffer('rfc4253')); + privBuf.writeString(key.comment || ''); + + var n = 1; + while (privBuf._offset % cinf.blockSize !== 0) + privBuf.writeChar(n++); + privBuf = privBuf.toBuffer(); + } + + switch (kdf) { + case 'none': + break; + case 'bcrypt': + var salt = crypto.randomBytes(16); + var rounds = 16; + var kdfssh = new SSHBuffer({}); + kdfssh.writeBuffer(salt); + kdfssh.writeInt(rounds); + kdfopts = kdfssh.toBuffer(); + + if (bcrypt === undefined) { + bcrypt = require('bcrypt-pbkdf'); + } + var pass = new Uint8Array(passphrase); + var salti = new Uint8Array(salt); + /* Use the pbkdf to derive both the key and the IV. */ + var out = new Uint8Array(cinf.keySize + cinf.blockSize); + var res = bcrypt.pbkdf(pass, pass.length, salti, salti.length, + out, out.length, rounds); + if (res !== 0) { + throw (new Error('bcrypt_pbkdf function returned ' + + 'failure, parameters invalid')); + } + out = new Buffer(out); + var ckey = out.slice(0, cinf.keySize); + var iv = out.slice(cinf.keySize, cinf.keySize + cinf.blockSize); + + var cipherStream = crypto.createCipheriv(cinf.opensslName, + ckey, iv); + cipherStream.setAutoPadding(false); + var chunk, chunks = []; + cipherStream.once('error', function (e) { + throw (e); + }); + cipherStream.write(privBuf); + cipherStream.end(); + while ((chunk = cipherStream.read()) !== null) + chunks.push(chunk); + privBuf = Buffer.concat(chunks); + break; + default: + throw (new Error('Unsupported kdf ' + kdf)); + } + + var buf = new SSHBuffer({}); + + buf.writeCString(MAGIC); + buf.writeString(cipher); /* cipher */ + buf.writeString(kdf); /* kdf */ + buf.writeBuffer(kdfopts); /* kdfoptions */ + + buf.writeInt(1); /* nkeys */ + buf.writeBuffer(pubKey.toBuffer('rfc4253')); + + if (privBuf) + buf.writeBuffer(privBuf); + + buf = buf.toBuffer(); + + var header; + if (PrivateKey.isPrivateKey(key)) + header = 'OPENSSH PRIVATE KEY'; + else + header = 'OPENSSH PUBLIC KEY'; + + var tmp = buf.toString('base64'); + var len = tmp.length + (tmp.length / 70) + + 18 + 16 + header.length*2 + 10; + buf = new Buffer(len); + var o = 0; + o += buf.write('-----BEGIN ' + header + '-----\n', o); + for (var i = 0; i < tmp.length; ) { + var limit = i + 70; + if (limit > tmp.length) + limit = tmp.length; + o += buf.write(tmp.slice(i, limit), o); + buf[o++] = 10; + i = limit; + } + o += buf.write('-----END ' + header + '-----\n', o); + + return (buf.slice(0, o)); +} diff --git a/node_modules/sshpk/lib/formats/ssh.js b/node_modules/sshpk/lib/formats/ssh.js new file mode 100644 index 0000000..655c9ea --- /dev/null +++ b/node_modules/sshpk/lib/formats/ssh.js @@ -0,0 +1,114 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + read: read, + write: write +}; + +var assert = require('assert-plus'); +var rfc4253 = require('./rfc4253'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); + +var sshpriv = require('./ssh-private'); + +/*JSSTYLED*/ +var SSHKEY_RE = /^([a-z0-9-]+)[ \t]+([a-zA-Z0-9+\/]+[=]*)([\n \t]+([^\n]+))?$/; +/*JSSTYLED*/ +var SSHKEY_RE2 = /^([a-z0-9-]+)[ \t]+([a-zA-Z0-9+\/ \t\n]+[=]*)(.*)$/; + +function read(buf, options) { + if (typeof (buf) !== 'string') { + assert.buffer(buf, 'buf'); + buf = buf.toString('ascii'); + } + + var trimmed = buf.trim().replace(/[\\\r]/g, ''); + var m = trimmed.match(SSHKEY_RE); + if (!m) + m = trimmed.match(SSHKEY_RE2); + assert.ok(m, 'key must match regex'); + + var type = rfc4253.algToKeyType(m[1]); + var kbuf = new Buffer(m[2], 'base64'); + + /* + * This is a bit tricky. If we managed to parse the key and locate the + * key comment with the regex, then do a non-partial read and assert + * that we have consumed all bytes. If we couldn't locate the key + * comment, though, there may be whitespace shenanigans going on that + * have conjoined the comment to the rest of the key. We do a partial + * read in this case to try to make the best out of a sorry situation. + */ + var key; + var ret = {}; + if (m[4]) { + try { + key = rfc4253.read(kbuf); + + } catch (e) { + m = trimmed.match(SSHKEY_RE2); + assert.ok(m, 'key must match regex'); + kbuf = new Buffer(m[2], 'base64'); + key = rfc4253.readInternal(ret, 'public', kbuf); + } + } else { + key = rfc4253.readInternal(ret, 'public', kbuf); + } + + assert.strictEqual(type, key.type); + + if (m[4] && m[4].length > 0) { + key.comment = m[4]; + + } else if (ret.consumed) { + /* + * Now the magic: trying to recover the key comment when it's + * gotten conjoined to the key or otherwise shenanigan'd. + * + * Work out how much base64 we used, then drop all non-base64 + * chars from the beginning up to this point in the the string. + * Then offset in this and try to make up for missing = chars. + */ + var data = m[2] + m[3]; + var realOffset = Math.ceil(ret.consumed / 3) * 4; + data = data.slice(0, realOffset - 2). /*JSSTYLED*/ + replace(/[^a-zA-Z0-9+\/=]/g, '') + + data.slice(realOffset - 2); + + var padding = ret.consumed % 3; + if (padding > 0 && + data.slice(realOffset - 1, realOffset) !== '=') + realOffset--; + while (data.slice(realOffset, realOffset + 1) === '=') + realOffset++; + + /* Finally, grab what we think is the comment & clean it up. */ + var trailer = data.slice(realOffset); + trailer = trailer.replace(/[\r\n]/g, ' '). + replace(/^\s+/, ''); + if (trailer.match(/^[a-zA-Z0-9]/)) + key.comment = trailer; + } + + return (key); +} + +function write(key, options) { + assert.object(key); + if (!Key.isKey(key)) + throw (new Error('Must be a public key')); + + var parts = []; + var alg = rfc4253.keyTypeToAlg(key); + parts.push(alg); + + var buf = rfc4253.write(key); + parts.push(buf.toString('base64')); + + if (key.comment) + parts.push(key.comment); + + return (new Buffer(parts.join(' '))); +} diff --git a/node_modules/sshpk/lib/formats/x509-pem.js b/node_modules/sshpk/lib/formats/x509-pem.js new file mode 100644 index 0000000..c59c7d5 --- /dev/null +++ b/node_modules/sshpk/lib/formats/x509-pem.js @@ -0,0 +1,77 @@ +// Copyright 2016 Joyent, Inc. + +var x509 = require('./x509'); + +module.exports = { + read: read, + verify: x509.verify, + sign: x509.sign, + write: write +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); +var Identity = require('../identity'); +var Signature = require('../signature'); +var Certificate = require('../certificate'); + +function read(buf, options) { + if (typeof (buf) !== 'string') { + assert.buffer(buf, 'buf'); + buf = buf.toString('ascii'); + } + + var lines = buf.trim().split(/[\r\n]+/g); + + var m = lines[0].match(/*JSSTYLED*/ + /[-]+[ ]*BEGIN CERTIFICATE[ ]*[-]+/); + assert.ok(m, 'invalid PEM header'); + + var m2 = lines[lines.length - 1].match(/*JSSTYLED*/ + /[-]+[ ]*END CERTIFICATE[ ]*[-]+/); + assert.ok(m2, 'invalid PEM footer'); + + var headers = {}; + while (true) { + lines = lines.slice(1); + m = lines[0].match(/*JSSTYLED*/ + /^([A-Za-z0-9-]+): (.+)$/); + if (!m) + break; + headers[m[1].toLowerCase()] = m[2]; + } + + /* Chop off the first and last lines */ + lines = lines.slice(0, -1).join(''); + buf = new Buffer(lines, 'base64'); + + return (x509.read(buf, options)); +} + +function write(cert, options) { + var dbuf = x509.write(cert, options); + + var header = 'CERTIFICATE'; + var tmp = dbuf.toString('base64'); + var len = tmp.length + (tmp.length / 64) + + 18 + 16 + header.length*2 + 10; + var buf = new Buffer(len); + var o = 0; + o += buf.write('-----BEGIN ' + header + '-----\n', o); + for (var i = 0; i < tmp.length; ) { + var limit = i + 64; + if (limit > tmp.length) + limit = tmp.length; + o += buf.write(tmp.slice(i, limit), o); + buf[o++] = 10; + i = limit; + } + o += buf.write('-----END ' + header + '-----\n', o); + + return (buf.slice(0, o)); +} diff --git a/node_modules/sshpk/lib/formats/x509.js b/node_modules/sshpk/lib/formats/x509.js new file mode 100644 index 0000000..a297540 --- /dev/null +++ b/node_modules/sshpk/lib/formats/x509.js @@ -0,0 +1,484 @@ +// Copyright 2016 Joyent, Inc. + +module.exports = { + read: read, + verify: verify, + sign: sign, + write: write +}; + +var assert = require('assert-plus'); +var asn1 = require('asn1'); +var algs = require('../algs'); +var utils = require('../utils'); +var Key = require('../key'); +var PrivateKey = require('../private-key'); +var pem = require('./pem'); +var Identity = require('../identity'); +var Signature = require('../signature'); +var Certificate = require('../certificate'); +var pkcs8 = require('./pkcs8'); + +/* + * This file is based on RFC5280 (X.509). + */ + +/* Helper to read in a single mpint */ +function readMPInt(der, nm) { + assert.strictEqual(der.peek(), asn1.Ber.Integer, + nm + ' is not an Integer'); + return (utils.mpNormalize(der.readString(asn1.Ber.Integer, true))); +} + +function verify(cert, key) { + var sig = cert.signatures.x509; + assert.object(sig, 'x509 signature'); + + var algParts = sig.algo.split('-'); + if (algParts[0] !== key.type) + return (false); + + var blob = sig.cache; + if (blob === undefined) { + var der = new asn1.BerWriter(); + writeTBSCert(cert, der); + blob = der.buffer; + } + + var verifier = key.createVerify(algParts[1]); + verifier.write(blob); + return (verifier.verify(sig.signature)); +} + +function Local(i) { + return (asn1.Ber.Context | asn1.Ber.Constructor | i); +} + +function Context(i) { + return (asn1.Ber.Context | i); +} + +var SIGN_ALGS = { + 'rsa-md5': '1.2.840.113549.1.1.4', + 'rsa-sha1': '1.2.840.113549.1.1.5', + 'rsa-sha256': '1.2.840.113549.1.1.11', + 'rsa-sha384': '1.2.840.113549.1.1.12', + 'rsa-sha512': '1.2.840.113549.1.1.13', + 'dsa-sha1': '1.2.840.10040.4.3', + 'dsa-sha256': '2.16.840.1.101.3.4.3.2', + 'ecdsa-sha1': '1.2.840.10045.4.1', + 'ecdsa-sha256': '1.2.840.10045.4.3.2', + 'ecdsa-sha384': '1.2.840.10045.4.3.3', + 'ecdsa-sha512': '1.2.840.10045.4.3.4' +}; +Object.keys(SIGN_ALGS).forEach(function (k) { + SIGN_ALGS[SIGN_ALGS[k]] = k; +}); +SIGN_ALGS['1.3.14.3.2.3'] = 'rsa-md5'; +SIGN_ALGS['1.3.14.3.2.29'] = 'rsa-sha1'; + +var EXTS = { + 'issuerKeyId': '2.5.29.35', + 'altName': '2.5.29.17' +}; + +function read(buf, options) { + if (typeof (buf) === 'string') { + buf = new Buffer(buf, 'binary'); + } + assert.buffer(buf, 'buf'); + + var der = new asn1.BerReader(buf); + + der.readSequence(); + if (Math.abs(der.length - der.remain) > 1) { + throw (new Error('DER sequence does not contain whole byte ' + + 'stream')); + } + + var tbsStart = der.offset; + der.readSequence(); + var sigOffset = der.offset + der.length; + var tbsEnd = sigOffset; + + if (der.peek() === Local(0)) { + der.readSequence(Local(0)); + var version = der.readInt(); + assert.ok(version <= 3, + 'only x.509 versions up to v3 supported'); + } + + var cert = {}; + cert.signatures = {}; + var sig = (cert.signatures.x509 = {}); + sig.extras = {}; + + cert.serial = readMPInt(der, 'serial'); + + der.readSequence(); + var after = der.offset + der.length; + var certAlgOid = der.readOID(); + var certAlg = SIGN_ALGS[certAlgOid]; + if (certAlg === undefined) + throw (new Error('unknown signature algorithm ' + certAlgOid)); + + der._offset = after; + cert.issuer = Identity.parseAsn1(der); + + der.readSequence(); + cert.validFrom = readDate(der); + cert.validUntil = readDate(der); + + cert.subjects = [Identity.parseAsn1(der)]; + + der.readSequence(); + after = der.offset + der.length; + cert.subjectKey = pkcs8.readPkcs8(undefined, 'public', der); + der._offset = after; + + /* issuerUniqueID */ + if (der.peek() === Local(1)) { + der.readSequence(Local(1)); + sig.extras.issuerUniqueID = + buf.slice(der.offset, der.offset + der.length); + der._offset += der.length; + } + + /* subjectUniqueID */ + if (der.peek() === Local(2)) { + der.readSequence(Local(2)); + sig.extras.subjectUniqueID = + buf.slice(der.offset, der.offset + der.length); + der._offset += der.length; + } + + /* extensions */ + if (der.peek() === Local(3)) { + der.readSequence(Local(3)); + var extEnd = der.offset + der.length; + der.readSequence(); + + while (der.offset < extEnd) + readExtension(cert, buf, der); + + assert.strictEqual(der.offset, extEnd); + } + + assert.strictEqual(der.offset, sigOffset); + + der.readSequence(); + after = der.offset + der.length; + var sigAlgOid = der.readOID(); + var sigAlg = SIGN_ALGS[sigAlgOid]; + if (sigAlg === undefined) + throw (new Error('unknown signature algorithm ' + sigAlgOid)); + der._offset = after; + + var sigData = der.readString(asn1.Ber.BitString, true); + if (sigData[0] === 0) + sigData = sigData.slice(1); + var algParts = sigAlg.split('-'); + + sig.signature = Signature.parse(sigData, algParts[0], 'asn1'); + sig.signature.hashAlgorithm = algParts[1]; + sig.algo = sigAlg; + sig.cache = buf.slice(tbsStart, tbsEnd); + + return (new Certificate(cert)); +} + +function readDate(der) { + if (der.peek() === asn1.Ber.UTCTime) { + return (utcTimeToDate(der.readString(asn1.Ber.UTCTime))); + } else if (der.peek() === asn1.Ber.GeneralizedTime) { + return (gTimeToDate(der.readString(asn1.Ber.GeneralizedTime))); + } else { + throw (new Error('Unsupported date format')); + } +} + +/* RFC5280, section 4.2.1.6 (GeneralName type) */ +var ALTNAME = { + OtherName: Local(0), + RFC822Name: Context(1), + DNSName: Context(2), + X400Address: Local(3), + DirectoryName: Local(4), + EDIPartyName: Local(5), + URI: Context(6), + IPAddress: Context(7), + OID: Context(8) +}; + +function readExtension(cert, buf, der) { + der.readSequence(); + var after = der.offset + der.length; + var extId = der.readOID(); + var id; + var sig = cert.signatures.x509; + sig.extras.exts = []; + + var critical; + if (der.peek() === asn1.Ber.Boolean) + critical = der.readBoolean(); + + switch (extId) { + case (EXTS.altName): + der.readSequence(asn1.Ber.OctetString); + der.readSequence(); + var aeEnd = der.offset + der.length; + while (der.offset < aeEnd) { + switch (der.peek()) { + case ALTNAME.OtherName: + case ALTNAME.EDIPartyName: + der.readSequence(); + der._offset += der.length; + break; + case ALTNAME.OID: + der.readOID(ALTNAME.OID); + break; + case ALTNAME.RFC822Name: + /* RFC822 specifies email addresses */ + var email = der.readString(ALTNAME.RFC822Name); + id = Identity.forEmail(email); + if (!cert.subjects[0].equals(id)) + cert.subjects.push(id); + break; + case ALTNAME.DirectoryName: + der.readSequence(ALTNAME.DirectoryName); + id = Identity.parseAsn1(der); + if (!cert.subjects[0].equals(id)) + cert.subjects.push(id); + break; + case ALTNAME.DNSName: + var host = der.readString( + ALTNAME.DNSName); + id = Identity.forHost(host); + if (!cert.subjects[0].equals(id)) + cert.subjects.push(id); + break; + default: + der.readString(der.peek()); + break; + } + } + sig.extras.exts.push({ oid: extId, critical: critical }); + break; + default: + sig.extras.exts.push({ + oid: extId, + critical: critical, + data: der.readString(asn1.Ber.OctetString, true) + }); + break; + } + + der._offset = after; +} + +var UTCTIME_RE = + /^([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})?Z$/; +function utcTimeToDate(t) { + var m = t.match(UTCTIME_RE); + assert.ok(m, 'timestamps must be in UTC'); + var d = new Date(); + + var thisYear = d.getUTCFullYear(); + var century = Math.floor(thisYear / 100) * 100; + + var year = parseInt(m[1], 10); + if (thisYear % 100 < 50 && year >= 60) + year += (century - 1); + else + year += century; + d.setUTCFullYear(year, parseInt(m[2], 10) - 1, parseInt(m[3], 10)); + d.setUTCHours(parseInt(m[4], 10), parseInt(m[5], 10)); + if (m[6] && m[6].length > 0) + d.setUTCSeconds(parseInt(m[6], 10)); + return (d); +} + +var GTIME_RE = + /^([0-9]{4})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})?Z$/; +function gTimeToDate(t) { + var m = t.match(GTIME_RE); + assert.ok(m); + var d = new Date(); + + d.setUTCFullYear(parseInt(m[1], 10), parseInt(m[2], 10) - 1, + parseInt(m[3], 10)); + d.setUTCHours(parseInt(m[4], 10), parseInt(m[5], 10)); + if (m[6] && m[6].length > 0) + d.setUTCSeconds(parseInt(m[6], 10)); + return (d); +} + +function zeroPad(n) { + var s = '' + n; + while (s.length < 2) + s = '0' + s; + return (s); +} + +function dateToUTCTime(d) { + var s = ''; + s += zeroPad(d.getUTCFullYear() % 100); + s += zeroPad(d.getUTCMonth() + 1); + s += zeroPad(d.getUTCDate()); + s += zeroPad(d.getUTCHours()); + s += zeroPad(d.getUTCMinutes()); + s += zeroPad(d.getUTCSeconds()); + s += 'Z'; + return (s); +} + +function sign(cert, key) { + if (cert.signatures.x509 === undefined) + cert.signatures.x509 = {}; + var sig = cert.signatures.x509; + + sig.algo = key.type + '-' + key.defaultHashAlgorithm(); + if (SIGN_ALGS[sig.algo] === undefined) + return (false); + + var der = new asn1.BerWriter(); + writeTBSCert(cert, der); + var blob = der.buffer; + sig.cache = blob; + + var signer = key.createSign(); + signer.write(blob); + cert.signatures.x509.signature = signer.sign(); + + return (true); +} + +function write(cert, options) { + var sig = cert.signatures.x509; + assert.object(sig, 'x509 signature'); + + var der = new asn1.BerWriter(); + der.startSequence(); + if (sig.cache) { + der._ensure(sig.cache.length); + sig.cache.copy(der._buf, der._offset); + der._offset += sig.cache.length; + } else { + writeTBSCert(cert, der); + } + + der.startSequence(); + der.writeOID(SIGN_ALGS[sig.algo]); + if (sig.algo.match(/^rsa-/)) + der.writeNull(); + der.endSequence(); + + var sigData = sig.signature.toBuffer('asn1'); + var data = new Buffer(sigData.length + 1); + data[0] = 0; + sigData.copy(data, 1); + der.writeBuffer(data, asn1.Ber.BitString); + der.endSequence(); + + return (der.buffer); +} + +function writeTBSCert(cert, der) { + var sig = cert.signatures.x509; + assert.object(sig, 'x509 signature'); + + der.startSequence(); + + der.startSequence(Local(0)); + der.writeInt(2); + der.endSequence(); + + der.writeBuffer(utils.mpNormalize(cert.serial), asn1.Ber.Integer); + + der.startSequence(); + der.writeOID(SIGN_ALGS[sig.algo]); + der.endSequence(); + + cert.issuer.toAsn1(der); + + der.startSequence(); + der.writeString(dateToUTCTime(cert.validFrom), asn1.Ber.UTCTime); + der.writeString(dateToUTCTime(cert.validUntil), asn1.Ber.UTCTime); + der.endSequence(); + + var subject = cert.subjects[0]; + var altNames = cert.subjects.slice(1); + subject.toAsn1(der); + + pkcs8.writePkcs8(der, cert.subjectKey); + + if (sig.extras && sig.extras.issuerUniqueID) { + der.writeBuffer(sig.extras.issuerUniqueID, Local(1)); + } + + if (sig.extras && sig.extras.subjectUniqueID) { + der.writeBuffer(sig.extras.subjectUniqueID, Local(2)); + } + + if (altNames.length > 0 || subject.type === 'host' || + (sig.extras && sig.extras.exts)) { + der.startSequence(Local(3)); + der.startSequence(); + + var exts = [ + { oid: EXTS.altName } + ]; + if (sig.extras && sig.extras.exts) + exts = sig.extras.exts; + + for (var i = 0; i < exts.length; ++i) { + der.startSequence(); + der.writeOID(exts[i].oid); + + if (exts[i].critical !== undefined) + der.writeBoolean(exts[i].critical); + + if (exts[i].oid === EXTS.altName) { + der.startSequence(asn1.Ber.OctetString); + der.startSequence(); + if (subject.type === 'host') { + der.writeString(subject.hostname, + Context(2)); + } + for (var j = 0; j < altNames.length; ++j) { + if (altNames[j].type === 'host') { + der.writeString( + altNames[j].hostname, + ALTNAME.DNSName); + } else if (altNames[j].type === + 'email') { + der.writeString( + altNames[j].email, + ALTNAME.RFC822Name); + } else { + /* + * Encode anything else as a + * DN style name for now. + */ + der.startSequence( + ALTNAME.DirectoryName); + altNames[j].toAsn1(der); + der.endSequence(); + } + } + der.endSequence(); + der.endSequence(); + } else { + der.writeBuffer(exts[i].data, + asn1.Ber.OctetString); + } + + der.endSequence(); + } + + der.endSequence(); + der.endSequence(); + } + + der.endSequence(); +} diff --git a/node_modules/sshpk/lib/identity.js b/node_modules/sshpk/lib/identity.js new file mode 100644 index 0000000..b4f5cd7 --- /dev/null +++ b/node_modules/sshpk/lib/identity.js @@ -0,0 +1,255 @@ +// Copyright 2016 Joyent, Inc. + +module.exports = Identity; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var errs = require('./errors'); +var util = require('util'); +var utils = require('./utils'); +var asn1 = require('asn1'); + +/*JSSTYLED*/ +var DNS_NAME_RE = /^([*]|[a-z0-9][a-z0-9\-]{0,62})(?:\.([*]|[a-z0-9][a-z0-9\-]{0,62}))*$/i; + +var oids = {}; +oids.cn = '2.5.4.3'; +oids.o = '2.5.4.10'; +oids.ou = '2.5.4.11'; +oids.l = '2.5.4.7'; +oids.s = '2.5.4.8'; +oids.c = '2.5.4.6'; +oids.sn = '2.5.4.4'; +oids.dc = '0.9.2342.19200300.100.1.25'; +oids.uid = '0.9.2342.19200300.100.1.1'; +oids.mail = '0.9.2342.19200300.100.1.3'; + +var unoids = {}; +Object.keys(oids).forEach(function (k) { + unoids[oids[k]] = k; +}); + +function Identity(opts) { + var self = this; + assert.object(opts, 'options'); + assert.arrayOfObject(opts.components, 'options.components'); + this.components = opts.components; + this.componentLookup = {}; + this.components.forEach(function (c) { + if (c.name && !c.oid) + c.oid = oids[c.name]; + if (c.oid && !c.name) + c.name = unoids[c.oid]; + if (self.componentLookup[c.name] === undefined) + self.componentLookup[c.name] = []; + self.componentLookup[c.name].push(c); + }); + if (this.componentLookup.cn && this.componentLookup.cn.length > 0) { + this.cn = this.componentLookup.cn[0].value; + } + assert.optionalString(opts.type, 'options.type'); + if (opts.type === undefined) { + if (this.components.length === 1 && + this.componentLookup.cn && + this.componentLookup.cn.length === 1 && + this.componentLookup.cn[0].value.match(DNS_NAME_RE)) { + this.type = 'host'; + this.hostname = this.componentLookup.cn[0].value; + + } else if (this.componentLookup.dc && + this.components.length === this.componentLookup.dc.length) { + this.type = 'host'; + this.hostname = this.componentLookup.dc.map( + function (c) { + return (c.value); + }).join('.'); + + } else if (this.componentLookup.uid && + this.components.length === + this.componentLookup.uid.length) { + this.type = 'user'; + this.uid = this.componentLookup.uid[0].value; + + } else if (this.componentLookup.cn && + this.componentLookup.cn.length === 1 && + this.componentLookup.cn[0].value.match(DNS_NAME_RE)) { + this.type = 'host'; + this.hostname = this.componentLookup.cn[0].value; + + } else if (this.componentLookup.uid && + this.componentLookup.uid.length === 1) { + this.type = 'user'; + this.uid = this.componentLookup.uid[0].value; + + } else if (this.componentLookup.mail && + this.componentLookup.mail.length === 1) { + this.type = 'email'; + this.email = this.componentLookup.mail[0].value; + + } else if (this.componentLookup.cn && + this.componentLookup.cn.length === 1) { + this.type = 'user'; + this.uid = this.componentLookup.cn[0].value; + + } else { + this.type = 'unknown'; + } + } else { + this.type = opts.type; + if (this.type === 'host') + this.hostname = opts.hostname; + else if (this.type === 'user') + this.uid = opts.uid; + else if (this.type === 'email') + this.email = opts.email; + else + throw (new Error('Unknown type ' + this.type)); + } +} + +Identity.prototype.toString = function () { + return (this.components.map(function (c) { + return (c.name.toUpperCase() + '=' + c.value); + }).join(', ')); +}; + +Identity.prototype.toAsn1 = function (der, tag) { + der.startSequence(tag); + this.components.forEach(function (c) { + der.startSequence(asn1.Ber.Constructor | asn1.Ber.Set); + der.startSequence(); + der.writeOID(c.oid); + der.writeString(c.value, asn1.Ber.PrintableString); + der.endSequence(); + der.endSequence(); + }); + der.endSequence(); +}; + +function globMatch(a, b) { + if (a === '**' || b === '**') + return (true); + var aParts = a.split('.'); + var bParts = b.split('.'); + if (aParts.length !== bParts.length) + return (false); + for (var i = 0; i < aParts.length; ++i) { + if (aParts[i] === '*' || bParts[i] === '*') + continue; + if (aParts[i] !== bParts[i]) + return (false); + } + return (true); +} + +Identity.prototype.equals = function (other) { + if (!Identity.isIdentity(other, [1, 0])) + return (false); + if (other.components.length !== this.components.length) + return (false); + for (var i = 0; i < this.components.length; ++i) { + if (this.components[i].oid !== other.components[i].oid) + return (false); + if (!globMatch(this.components[i].value, + other.components[i].value)) { + return (false); + } + } + return (true); +}; + +Identity.forHost = function (hostname) { + assert.string(hostname, 'hostname'); + return (new Identity({ + type: 'host', + hostname: hostname, + components: [ { name: 'cn', value: hostname } ] + })); +}; + +Identity.forUser = function (uid) { + assert.string(uid, 'uid'); + return (new Identity({ + type: 'user', + uid: uid, + components: [ { name: 'uid', value: uid } ] + })); +}; + +Identity.forEmail = function (email) { + assert.string(email, 'email'); + return (new Identity({ + type: 'email', + email: email, + components: [ { name: 'mail', value: email } ] + })); +}; + +Identity.parseDN = function (dn) { + assert.string(dn, 'dn'); + var parts = dn.split(','); + var cmps = parts.map(function (c) { + c = c.trim(); + var eqPos = c.indexOf('='); + var name = c.slice(0, eqPos).toLowerCase(); + var value = c.slice(eqPos + 1); + return ({ name: name, value: value }); + }); + return (new Identity({ components: cmps })); +}; + +Identity.parseAsn1 = function (der, top) { + var components = []; + der.readSequence(top); + var end = der.offset + der.length; + while (der.offset < end) { + der.readSequence(asn1.Ber.Constructor | asn1.Ber.Set); + var after = der.offset + der.length; + der.readSequence(); + var oid = der.readOID(); + var type = der.peek(); + var value; + switch (type) { + case asn1.Ber.PrintableString: + case asn1.Ber.IA5String: + case asn1.Ber.OctetString: + case asn1.Ber.T61String: + value = der.readString(type); + break; + case asn1.Ber.Utf8String: + value = der.readString(type, true); + value = value.toString('utf8'); + break; + case asn1.Ber.CharacterString: + case asn1.Ber.BMPString: + value = der.readString(type, true); + value = value.toString('utf16le'); + break; + default: + throw (new Error('Unknown asn1 type ' + type)); + } + components.push({ oid: oid, value: value }); + der._offset = after; + } + der._offset = end; + return (new Identity({ + components: components + })); +}; + +Identity.isIdentity = function (obj, ver) { + return (utils.isCompatible(obj, Identity, ver)); +}; + +/* + * API versions for Identity: + * [1,0] -- initial ver + */ +Identity.prototype._sshpkApiVersion = [1, 0]; + +Identity._oldVersionDetect = function (obj) { + return ([1, 0]); +}; diff --git a/node_modules/sshpk/lib/index.js b/node_modules/sshpk/lib/index.js new file mode 100644 index 0000000..96a1384 --- /dev/null +++ b/node_modules/sshpk/lib/index.js @@ -0,0 +1,38 @@ +// Copyright 2015 Joyent, Inc. + +var Key = require('./key'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var PrivateKey = require('./private-key'); +var Certificate = require('./certificate'); +var Identity = require('./identity'); +var errs = require('./errors'); + +module.exports = { + /* top-level classes */ + Key: Key, + parseKey: Key.parse, + Fingerprint: Fingerprint, + parseFingerprint: Fingerprint.parse, + Signature: Signature, + parseSignature: Signature.parse, + PrivateKey: PrivateKey, + parsePrivateKey: PrivateKey.parse, + Certificate: Certificate, + parseCertificate: Certificate.parse, + createSelfSignedCertificate: Certificate.createSelfSigned, + createCertificate: Certificate.create, + Identity: Identity, + identityFromDN: Identity.parseDN, + identityForHost: Identity.forHost, + identityForUser: Identity.forUser, + identityForEmail: Identity.forEmail, + + /* errors */ + FingerprintFormatError: errs.FingerprintFormatError, + InvalidAlgorithmError: errs.InvalidAlgorithmError, + KeyParseError: errs.KeyParseError, + SignatureParseError: errs.SignatureParseError, + KeyEncryptedError: errs.KeyEncryptedError, + CertificateParseError: errs.CertificateParseError +}; diff --git a/node_modules/sshpk/lib/key.js b/node_modules/sshpk/lib/key.js new file mode 100644 index 0000000..ff5c363 --- /dev/null +++ b/node_modules/sshpk/lib/key.js @@ -0,0 +1,270 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = Key; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var DiffieHellman = require('./dhe'); +var errs = require('./errors'); +var utils = require('./utils'); +var PrivateKey = require('./private-key'); +var edCompat; + +try { + edCompat = require('./ed-compat'); +} catch (e) { + /* Just continue through, and bail out if we try to use it. */ +} + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var KeyParseError = errs.KeyParseError; + +var formats = {}; +formats['auto'] = require('./formats/auto'); +formats['pem'] = require('./formats/pem'); +formats['pkcs1'] = require('./formats/pkcs1'); +formats['pkcs8'] = require('./formats/pkcs8'); +formats['rfc4253'] = require('./formats/rfc4253'); +formats['ssh'] = require('./formats/ssh'); +formats['ssh-private'] = require('./formats/ssh-private'); +formats['openssh'] = formats['ssh-private']; + +function Key(opts) { + assert.object(opts, 'options'); + assert.arrayOfObject(opts.parts, 'options.parts'); + assert.string(opts.type, 'options.type'); + assert.optionalString(opts.comment, 'options.comment'); + + var algInfo = algs.info[opts.type]; + if (typeof (algInfo) !== 'object') + throw (new InvalidAlgorithmError(opts.type)); + + var partLookup = {}; + for (var i = 0; i < opts.parts.length; ++i) { + var part = opts.parts[i]; + partLookup[part.name] = part; + } + + this.type = opts.type; + this.parts = opts.parts; + this.part = partLookup; + this.comment = undefined; + this.source = opts.source; + + /* for speeding up hashing/fingerprint operations */ + this._rfc4253Cache = opts._rfc4253Cache; + this._hashCache = {}; + + var sz; + this.curve = undefined; + if (this.type === 'ecdsa') { + var curve = this.part.curve.data.toString(); + this.curve = curve; + sz = algs.curves[curve].size; + } else if (this.type === 'ed25519') { + sz = 256; + this.curve = 'curve25519'; + } else { + var szPart = this.part[algInfo.sizePart]; + sz = szPart.data.length; + sz = sz * 8 - utils.countZeros(szPart.data); + } + this.size = sz; +} + +Key.formats = formats; + +Key.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'ssh'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + if (format === 'rfc4253') { + if (this._rfc4253Cache === undefined) + this._rfc4253Cache = formats['rfc4253'].write(this); + return (this._rfc4253Cache); + } + + return (formats[format].write(this, options)); +}; + +Key.prototype.toString = function (format, options) { + return (this.toBuffer(format, options).toString()); +}; + +Key.prototype.hash = function (algo) { + assert.string(algo, 'algorithm'); + algo = algo.toLowerCase(); + if (algs.hashAlgs[algo] === undefined) + throw (new InvalidAlgorithmError(algo)); + + if (this._hashCache[algo]) + return (this._hashCache[algo]); + + var hash = crypto.createHash(algo). + update(this.toBuffer('rfc4253')).digest(); + this._hashCache[algo] = hash; + return (hash); +}; + +Key.prototype.fingerprint = function (algo) { + if (algo === undefined) + algo = 'sha256'; + assert.string(algo, 'algorithm'); + var opts = { + type: 'key', + hash: this.hash(algo), + algorithm: algo + }; + return (new Fingerprint(opts)); +}; + +Key.prototype.defaultHashAlgorithm = function () { + var hashAlgo = 'sha1'; + if (this.type === 'rsa') + hashAlgo = 'sha256'; + if (this.type === 'dsa' && this.size > 1024) + hashAlgo = 'sha256'; + if (this.type === 'ed25519') + hashAlgo = 'sha512'; + if (this.type === 'ecdsa') { + if (this.size <= 256) + hashAlgo = 'sha256'; + else if (this.size <= 384) + hashAlgo = 'sha384'; + else + hashAlgo = 'sha512'; + } + return (hashAlgo); +}; + +Key.prototype.createVerify = function (hashAlgo) { + if (hashAlgo === undefined) + hashAlgo = this.defaultHashAlgorithm(); + assert.string(hashAlgo, 'hash algorithm'); + + /* ED25519 is not supported by OpenSSL, use a javascript impl. */ + if (this.type === 'ed25519' && edCompat !== undefined) + return (new edCompat.Verifier(this, hashAlgo)); + if (this.type === 'curve25519') + throw (new Error('Curve25519 keys are not suitable for ' + + 'signing or verification')); + + var v, nm, err; + try { + nm = hashAlgo.toUpperCase(); + v = crypto.createVerify(nm); + } catch (e) { + err = e; + } + if (v === undefined || (err instanceof Error && + err.message.match(/Unknown message digest/))) { + nm = 'RSA-'; + nm += hashAlgo.toUpperCase(); + v = crypto.createVerify(nm); + } + assert.ok(v, 'failed to create verifier'); + var oldVerify = v.verify.bind(v); + var key = this.toBuffer('pkcs8'); + var self = this; + v.verify = function (signature, fmt) { + if (Signature.isSignature(signature, [2, 0])) { + if (signature.type !== self.type) + return (false); + if (signature.hashAlgorithm && + signature.hashAlgorithm !== hashAlgo) + return (false); + return (oldVerify(key, signature.toBuffer('asn1'))); + + } else if (typeof (signature) === 'string' || + Buffer.isBuffer(signature)) { + return (oldVerify(key, signature, fmt)); + + /* + * Avoid doing this on valid arguments, walking the prototype + * chain can be quite slow. + */ + } else if (Signature.isSignature(signature, [1, 0])) { + throw (new Error('signature was created by too old ' + + 'a version of sshpk and cannot be verified')); + + } else { + throw (new TypeError('signature must be a string, ' + + 'Buffer, or Signature object')); + } + }; + return (v); +}; + +Key.prototype.createDiffieHellman = function () { + if (this.type === 'rsa') + throw (new Error('RSA keys do not support Diffie-Hellman')); + + return (new DiffieHellman(this)); +}; +Key.prototype.createDH = Key.prototype.createDiffieHellman; + +Key.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + if (k instanceof PrivateKey) + k = k.toPublic(); + if (!k.comment) + k.comment = options.filename; + return (k); + } catch (e) { + if (e.name === 'KeyEncryptedError') + throw (e); + throw (new KeyParseError(options.filename, format, e)); + } +}; + +Key.isKey = function (obj, ver) { + return (utils.isCompatible(obj, Key, ver)); +}; + +/* + * API versions for Key: + * [1,0] -- initial ver, may take Signature for createVerify or may not + * [1,1] -- added pkcs1, pkcs8 formats + * [1,2] -- added auto, ssh-private, openssh formats + * [1,3] -- added defaultHashAlgorithm + * [1,4] -- added ed support, createDH + * [1,5] -- first explicitly tagged version + */ +Key.prototype._sshpkApiVersion = [1, 5]; + +Key._oldVersionDetect = function (obj) { + assert.func(obj.toBuffer); + assert.func(obj.fingerprint); + if (obj.createDH) + return ([1, 4]); + if (obj.defaultHashAlgorithm) + return ([1, 3]); + if (obj.formats['auto']) + return ([1, 2]); + if (obj.formats['pkcs1']) + return ([1, 1]); + return ([1, 0]); +}; diff --git a/node_modules/sshpk/lib/private-key.js b/node_modules/sshpk/lib/private-key.js new file mode 100644 index 0000000..f80d939 --- /dev/null +++ b/node_modules/sshpk/lib/private-key.js @@ -0,0 +1,231 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = PrivateKey; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var Fingerprint = require('./fingerprint'); +var Signature = require('./signature'); +var errs = require('./errors'); +var util = require('util'); +var utils = require('./utils'); +var edCompat; +var ed; + +try { + edCompat = require('./ed-compat'); +} catch (e) { + /* Just continue through, and bail out if we try to use it. */ +} + +var Key = require('./key'); + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var KeyParseError = errs.KeyParseError; +var KeyEncryptedError = errs.KeyEncryptedError; + +var formats = {}; +formats['auto'] = require('./formats/auto'); +formats['pem'] = require('./formats/pem'); +formats['pkcs1'] = require('./formats/pkcs1'); +formats['pkcs8'] = require('./formats/pkcs8'); +formats['rfc4253'] = require('./formats/rfc4253'); +formats['ssh-private'] = require('./formats/ssh-private'); +formats['openssh'] = formats['ssh-private']; +formats['ssh'] = formats['ssh-private']; + +function PrivateKey(opts) { + assert.object(opts, 'options'); + Key.call(this, opts); + + this._pubCache = undefined; +} +util.inherits(PrivateKey, Key); + +PrivateKey.formats = formats; + +PrivateKey.prototype.toBuffer = function (format, options) { + if (format === undefined) + format = 'pkcs1'; + assert.string(format, 'format'); + assert.object(formats[format], 'formats[format]'); + assert.optionalObject(options, 'options'); + + return (formats[format].write(this, options)); +}; + +PrivateKey.prototype.hash = function (algo) { + return (this.toPublic().hash(algo)); +}; + +PrivateKey.prototype.toPublic = function () { + if (this._pubCache) + return (this._pubCache); + + var algInfo = algs.info[this.type]; + var pubParts = []; + for (var i = 0; i < algInfo.parts.length; ++i) { + var p = algInfo.parts[i]; + pubParts.push(this.part[p]); + } + + this._pubCache = new Key({ + type: this.type, + source: this, + parts: pubParts + }); + if (this.comment) + this._pubCache.comment = this.comment; + return (this._pubCache); +}; + +PrivateKey.prototype.derive = function (newType, newSize) { + assert.string(newType, 'type'); + assert.optionalNumber(newSize, 'size'); + var priv, pub; + + if (this.type === 'ed25519' && newType === 'curve25519') { + if (ed === undefined) + ed = require('jodid25519'); + + priv = this.part.r.data; + if (priv[0] === 0x00) + priv = priv.slice(1); + priv = priv.slice(0, 32); + + pub = ed.dh.publicKey(priv); + priv = utils.mpNormalize(Buffer.concat([priv, pub])); + + return (new PrivateKey({ + type: 'curve25519', + parts: [ + { name: 'R', data: utils.mpNormalize(pub) }, + { name: 'r', data: priv } + ] + })); + } else if (this.type === 'curve25519' && newType === 'ed25519') { + if (ed === undefined) + ed = require('jodid25519'); + + priv = this.part.r.data; + if (priv[0] === 0x00) + priv = priv.slice(1); + priv = priv.slice(0, 32); + + pub = ed.eddsa.publicKey(priv.toString('binary')); + pub = new Buffer(pub, 'binary'); + + priv = utils.mpNormalize(Buffer.concat([priv, pub])); + + return (new PrivateKey({ + type: 'ed25519', + parts: [ + { name: 'R', data: utils.mpNormalize(pub) }, + { name: 'r', data: priv } + ] + })); + } + throw (new Error('Key derivation not supported from ' + this.type + + ' to ' + newType)); +}; + +PrivateKey.prototype.createVerify = function (hashAlgo) { + return (this.toPublic().createVerify(hashAlgo)); +}; + +PrivateKey.prototype.createSign = function (hashAlgo) { + if (hashAlgo === undefined) + hashAlgo = this.defaultHashAlgorithm(); + assert.string(hashAlgo, 'hash algorithm'); + + /* ED25519 is not supported by OpenSSL, use a javascript impl. */ + if (this.type === 'ed25519' && edCompat !== undefined) + return (new edCompat.Signer(this, hashAlgo)); + if (this.type === 'curve25519') + throw (new Error('Curve25519 keys are not suitable for ' + + 'signing or verification')); + + var v, nm, err; + try { + nm = hashAlgo.toUpperCase(); + v = crypto.createSign(nm); + } catch (e) { + err = e; + } + if (v === undefined || (err instanceof Error && + err.message.match(/Unknown message digest/))) { + nm = 'RSA-'; + nm += hashAlgo.toUpperCase(); + v = crypto.createSign(nm); + } + assert.ok(v, 'failed to create verifier'); + var oldSign = v.sign.bind(v); + var key = this.toBuffer('pkcs1'); + var type = this.type; + v.sign = function () { + var sig = oldSign(key); + if (typeof (sig) === 'string') + sig = new Buffer(sig, 'binary'); + sig = Signature.parse(sig, type, 'asn1'); + sig.hashAlgorithm = hashAlgo; + return (sig); + }; + return (v); +}; + +PrivateKey.parse = function (data, format, options) { + if (typeof (data) !== 'string') + assert.buffer(data, 'data'); + if (format === undefined) + format = 'auto'; + assert.string(format, 'format'); + if (typeof (options) === 'string') + options = { filename: options }; + assert.optionalObject(options, 'options'); + if (options === undefined) + options = {}; + assert.optionalString(options.filename, 'options.filename'); + if (options.filename === undefined) + options.filename = '(unnamed)'; + + assert.object(formats[format], 'formats[format]'); + + try { + var k = formats[format].read(data, options); + assert.ok(k instanceof PrivateKey, 'key is not a private key'); + if (!k.comment) + k.comment = options.filename; + return (k); + } catch (e) { + if (e.name === 'KeyEncryptedError') + throw (e); + throw (new KeyParseError(options.filename, format, e)); + } +}; + +PrivateKey.isPrivateKey = function (obj, ver) { + return (utils.isCompatible(obj, PrivateKey, ver)); +}; + +/* + * API versions for PrivateKey: + * [1,0] -- initial ver + * [1,1] -- added auto, pkcs[18], openssh/ssh-private formats + * [1,2] -- added defaultHashAlgorithm + * [1,3] -- added derive, ed, createDH + * [1,4] -- first tagged version + */ +PrivateKey.prototype._sshpkApiVersion = [1, 4]; + +PrivateKey._oldVersionDetect = function (obj) { + assert.func(obj.toPublic); + assert.func(obj.createSign); + if (obj.derive) + return ([1, 3]); + if (obj.defaultHashAlgorithm) + return ([1, 2]); + if (obj.formats['auto']) + return ([1, 1]); + return ([1, 0]); +}; diff --git a/node_modules/sshpk/lib/signature.js b/node_modules/sshpk/lib/signature.js new file mode 100644 index 0000000..964f55c --- /dev/null +++ b/node_modules/sshpk/lib/signature.js @@ -0,0 +1,245 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = Signature; + +var assert = require('assert-plus'); +var algs = require('./algs'); +var crypto = require('crypto'); +var errs = require('./errors'); +var utils = require('./utils'); +var asn1 = require('asn1'); +var SSHBuffer = require('./ssh-buffer'); + +var InvalidAlgorithmError = errs.InvalidAlgorithmError; +var SignatureParseError = errs.SignatureParseError; + +function Signature(opts) { + assert.object(opts, 'options'); + assert.arrayOfObject(opts.parts, 'options.parts'); + assert.string(opts.type, 'options.type'); + + var partLookup = {}; + for (var i = 0; i < opts.parts.length; ++i) { + var part = opts.parts[i]; + partLookup[part.name] = part; + } + + this.type = opts.type; + this.hashAlgorithm = opts.hashAlgo; + this.parts = opts.parts; + this.part = partLookup; +} + +Signature.prototype.toBuffer = function (format) { + if (format === undefined) + format = 'asn1'; + assert.string(format, 'format'); + + var buf; + + switch (this.type) { + case 'rsa': + case 'ed25519': + if (format === 'ssh') { + buf = new SSHBuffer({}); + buf.writeString('ssh-' + this.type); + buf.writePart(this.part.sig); + return (buf.toBuffer()); + } else { + return (this.part.sig.data); + } + + case 'dsa': + case 'ecdsa': + var r, s; + if (format === 'asn1') { + var der = new asn1.BerWriter(); + der.startSequence(); + r = utils.mpNormalize(this.part.r.data); + s = utils.mpNormalize(this.part.s.data); + der.writeBuffer(r, asn1.Ber.Integer); + der.writeBuffer(s, asn1.Ber.Integer); + der.endSequence(); + return (der.buffer); + } else if (format === 'ssh' && this.type === 'dsa') { + buf = new SSHBuffer({}); + buf.writeString('ssh-dss'); + r = this.part.r.data; + if (r.length > 20 && r[0] === 0x00) + r = r.slice(1); + s = this.part.s.data; + if (s.length > 20 && s[0] === 0x00) + s = s.slice(1); + if ((this.hashAlgorithm && + this.hashAlgorithm !== 'sha1') || + r.length + s.length !== 40) { + throw (new Error('OpenSSH only supports ' + + 'DSA signatures with SHA1 hash')); + } + buf.writeBuffer(Buffer.concat([r, s])); + return (buf.toBuffer()); + } else if (format === 'ssh' && this.type === 'ecdsa') { + var inner = new SSHBuffer({}); + r = this.part.r.data; + inner.writeBuffer(r); + inner.writePart(this.part.s); + + buf = new SSHBuffer({}); + /* XXX: find a more proper way to do this? */ + var curve; + if (r[0] === 0x00) + r = r.slice(1); + var sz = r.length * 8; + if (sz === 256) + curve = 'nistp256'; + else if (sz === 384) + curve = 'nistp384'; + else if (sz === 528) + curve = 'nistp521'; + buf.writeString('ecdsa-sha2-' + curve); + buf.writeBuffer(inner.toBuffer()); + return (buf.toBuffer()); + } + throw (new Error('Invalid signature format')); + default: + throw (new Error('Invalid signature data')); + } +}; + +Signature.prototype.toString = function (format) { + assert.optionalString(format, 'format'); + return (this.toBuffer(format).toString('base64')); +}; + +Signature.parse = function (data, type, format) { + if (typeof (data) === 'string') + data = new Buffer(data, 'base64'); + assert.buffer(data, 'data'); + assert.string(format, 'format'); + assert.string(type, 'type'); + + var opts = {}; + opts.type = type.toLowerCase(); + opts.parts = []; + + try { + assert.ok(data.length > 0, 'signature must not be empty'); + switch (opts.type) { + case 'rsa': + return (parseOneNum(data, type, format, opts, + 'ssh-rsa')); + case 'ed25519': + return (parseOneNum(data, type, format, opts, + 'ssh-ed25519')); + + case 'dsa': + case 'ecdsa': + if (format === 'asn1') + return (parseDSAasn1(data, type, format, opts)); + else if (opts.type === 'dsa') + return (parseDSA(data, type, format, opts)); + else + return (parseECDSA(data, type, format, opts)); + + default: + throw (new InvalidAlgorithmError(type)); + } + + } catch (e) { + if (e instanceof InvalidAlgorithmError) + throw (e); + throw (new SignatureParseError(type, format, e)); + } +}; + +function parseOneNum(data, type, format, opts, headType) { + if (format === 'ssh') { + try { + var buf = new SSHBuffer({buffer: data}); + var head = buf.readString(); + } catch (e) { + /* fall through */ + } + if (head === headType) { + var sig = buf.readPart(); + assert.ok(buf.atEnd(), 'extra trailing bytes'); + sig.name = 'sig'; + opts.parts.push(sig); + return (new Signature(opts)); + } + } + opts.parts.push({name: 'sig', data: data}); + return (new Signature(opts)); +} + +function parseDSAasn1(data, type, format, opts) { + var der = new asn1.BerReader(data); + der.readSequence(); + var r = der.readString(asn1.Ber.Integer, true); + var s = der.readString(asn1.Ber.Integer, true); + + opts.parts.push({name: 'r', data: utils.mpNormalize(r)}); + opts.parts.push({name: 's', data: utils.mpNormalize(s)}); + + return (new Signature(opts)); +} + +function parseDSA(data, type, format, opts) { + if (data.length != 40) { + var buf = new SSHBuffer({buffer: data}); + var d = buf.readBuffer(); + if (d.toString('ascii') === 'ssh-dss') + d = buf.readBuffer(); + assert.ok(buf.atEnd(), 'extra trailing bytes'); + assert.strictEqual(d.length, 40, 'invalid inner length'); + data = d; + } + opts.parts.push({name: 'r', data: data.slice(0, 20)}); + opts.parts.push({name: 's', data: data.slice(20, 40)}); + return (new Signature(opts)); +} + +function parseECDSA(data, type, format, opts) { + var buf = new SSHBuffer({buffer: data}); + + var r, s; + var inner = buf.readBuffer(); + if (inner.toString('ascii').match(/^ecdsa-/)) { + inner = buf.readBuffer(); + assert.ok(buf.atEnd(), 'extra trailing bytes on outer'); + buf = new SSHBuffer({buffer: inner}); + r = buf.readPart(); + } else { + r = {data: inner}; + } + + s = buf.readPart(); + assert.ok(buf.atEnd(), 'extra trailing bytes'); + + r.name = 'r'; + s.name = 's'; + + opts.parts.push(r); + opts.parts.push(s); + return (new Signature(opts)); +} + +Signature.isSignature = function (obj, ver) { + return (utils.isCompatible(obj, Signature, ver)); +}; + +/* + * API versions for Signature: + * [1,0] -- initial ver + * [2,0] -- support for rsa in full ssh format, compat with sshpk-agent + * hashAlgorithm property + * [2,1] -- first tagged version + */ +Signature.prototype._sshpkApiVersion = [2, 1]; + +Signature._oldVersionDetect = function (obj) { + assert.func(obj.toBuffer); + if (obj.hasOwnProperty('hashAlgorithm')) + return ([2, 0]); + return ([1, 0]); +}; diff --git a/node_modules/sshpk/lib/ssh-buffer.js b/node_modules/sshpk/lib/ssh-buffer.js new file mode 100644 index 0000000..8fc2cb8 --- /dev/null +++ b/node_modules/sshpk/lib/ssh-buffer.js @@ -0,0 +1,148 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = SSHBuffer; + +var assert = require('assert-plus'); + +function SSHBuffer(opts) { + assert.object(opts, 'options'); + if (opts.buffer !== undefined) + assert.buffer(opts.buffer, 'options.buffer'); + + this._size = opts.buffer ? opts.buffer.length : 1024; + this._buffer = opts.buffer || (new Buffer(this._size)); + this._offset = 0; +} + +SSHBuffer.prototype.toBuffer = function () { + return (this._buffer.slice(0, this._offset)); +}; + +SSHBuffer.prototype.atEnd = function () { + return (this._offset >= this._buffer.length); +}; + +SSHBuffer.prototype.remainder = function () { + return (this._buffer.slice(this._offset)); +}; + +SSHBuffer.prototype.skip = function (n) { + this._offset += n; +}; + +SSHBuffer.prototype.expand = function () { + this._size *= 2; + var buf = new Buffer(this._size); + this._buffer.copy(buf, 0); + this._buffer = buf; +}; + +SSHBuffer.prototype.readPart = function () { + return ({data: this.readBuffer()}); +}; + +SSHBuffer.prototype.readBuffer = function () { + var len = this._buffer.readUInt32BE(this._offset); + this._offset += 4; + assert.ok(this._offset + len <= this._buffer.length, + 'length out of bounds at +0x' + this._offset.toString(16) + + ' (data truncated?)'); + var buf = this._buffer.slice(this._offset, this._offset + len); + this._offset += len; + return (buf); +}; + +SSHBuffer.prototype.readString = function () { + return (this.readBuffer().toString()); +}; + +SSHBuffer.prototype.readCString = function () { + var offset = this._offset; + while (offset < this._buffer.length && + this._buffer[offset] !== 0x00) + offset++; + assert.ok(offset < this._buffer.length, 'c string does not terminate'); + var str = this._buffer.slice(this._offset, offset).toString(); + this._offset = offset + 1; + return (str); +}; + +SSHBuffer.prototype.readInt = function () { + var v = this._buffer.readUInt32BE(this._offset); + this._offset += 4; + return (v); +}; + +SSHBuffer.prototype.readInt64 = function () { + assert.ok(this._offset + 8 < this._buffer.length, + 'buffer not long enough to read Int64'); + var v = this._buffer.slice(this._offset, this._offset + 8); + this._offset += 8; + return (v); +}; + +SSHBuffer.prototype.readChar = function () { + var v = this._buffer[this._offset++]; + return (v); +}; + +SSHBuffer.prototype.writeBuffer = function (buf) { + while (this._offset + 4 + buf.length > this._size) + this.expand(); + this._buffer.writeUInt32BE(buf.length, this._offset); + this._offset += 4; + buf.copy(this._buffer, this._offset); + this._offset += buf.length; +}; + +SSHBuffer.prototype.writeString = function (str) { + this.writeBuffer(new Buffer(str, 'utf8')); +}; + +SSHBuffer.prototype.writeCString = function (str) { + while (this._offset + 1 + str.length > this._size) + this.expand(); + this._buffer.write(str, this._offset); + this._offset += str.length; + this._buffer[this._offset++] = 0; +}; + +SSHBuffer.prototype.writeInt = function (v) { + while (this._offset + 4 > this._size) + this.expand(); + this._buffer.writeUInt32BE(v, this._offset); + this._offset += 4; +}; + +SSHBuffer.prototype.writeInt64 = function (v) { + assert.buffer(v, 'value'); + if (v.length > 8) { + var lead = v.slice(0, v.length - 8); + for (var i = 0; i < lead.length; ++i) { + assert.strictEqual(lead[i], 0, + 'must fit in 64 bits of precision'); + } + v = v.slice(v.length - 8, v.length); + } + while (this._offset + 8 > this._size) + this.expand(); + v.copy(this._buffer, this._offset); + this._offset += 8; +}; + +SSHBuffer.prototype.writeChar = function (v) { + while (this._offset + 1 > this._size) + this.expand(); + this._buffer[this._offset++] = v; +}; + +SSHBuffer.prototype.writePart = function (p) { + this.writeBuffer(p.data); +}; + +SSHBuffer.prototype.write = function (buf) { + while (this._offset + buf.length > this._size) + this.expand(); + buf.copy(this._buffer, this._offset); + this._offset += buf.length; +}; diff --git a/node_modules/sshpk/lib/utils.js b/node_modules/sshpk/lib/utils.js new file mode 100644 index 0000000..466634c --- /dev/null +++ b/node_modules/sshpk/lib/utils.js @@ -0,0 +1,288 @@ +// Copyright 2015 Joyent, Inc. + +module.exports = { + bufferSplit: bufferSplit, + addRSAMissing: addRSAMissing, + calculateDSAPublic: calculateDSAPublic, + mpNormalize: mpNormalize, + ecNormalize: ecNormalize, + countZeros: countZeros, + assertCompatible: assertCompatible, + isCompatible: isCompatible, + opensslKeyDeriv: opensslKeyDeriv, + opensshCipherInfo: opensshCipherInfo +}; + +var assert = require('assert-plus'); +var PrivateKey = require('./private-key'); +var crypto = require('crypto'); + +var MAX_CLASS_DEPTH = 3; + +function isCompatible(obj, klass, needVer) { + if (obj === null || typeof (obj) !== 'object') + return (false); + if (needVer === undefined) + needVer = klass.prototype._sshpkApiVersion; + if (obj instanceof klass && + klass.prototype._sshpkApiVersion[0] == needVer[0]) + return (true); + var proto = Object.getPrototypeOf(obj); + var depth = 0; + while (proto.constructor.name !== klass.name) { + proto = Object.getPrototypeOf(proto); + if (!proto || ++depth > MAX_CLASS_DEPTH) + return (false); + } + if (proto.constructor.name !== klass.name) + return (false); + var ver = proto._sshpkApiVersion; + if (ver === undefined) + ver = klass._oldVersionDetect(obj); + if (ver[0] != needVer[0] || ver[1] < needVer[1]) + return (false); + return (true); +} + +function assertCompatible(obj, klass, needVer, name) { + if (name === undefined) + name = 'object'; + assert.ok(obj, name + ' must not be null'); + assert.object(obj, name + ' must be an object'); + if (needVer === undefined) + needVer = klass.prototype._sshpkApiVersion; + if (obj instanceof klass && + klass.prototype._sshpkApiVersion[0] == needVer[0]) + return; + var proto = Object.getPrototypeOf(obj); + var depth = 0; + while (proto.constructor.name !== klass.name) { + proto = Object.getPrototypeOf(proto); + assert.ok(proto && ++depth <= MAX_CLASS_DEPTH, + name + ' must be a ' + klass.name + ' instance'); + } + assert.strictEqual(proto.constructor.name, klass.name, + name + ' must be a ' + klass.name + ' instance'); + var ver = proto._sshpkApiVersion; + if (ver === undefined) + ver = klass._oldVersionDetect(obj); + assert.ok(ver[0] == needVer[0] && ver[1] >= needVer[1], + name + ' must be compatible with ' + klass.name + ' klass ' + + 'version ' + needVer[0] + '.' + needVer[1]); +} + +var CIPHER_LEN = { + 'des-ede3-cbc': { key: 7, iv: 8 }, + 'aes-128-cbc': { key: 16, iv: 16 } +}; +var PKCS5_SALT_LEN = 8; + +function opensslKeyDeriv(cipher, salt, passphrase, count) { + assert.buffer(salt, 'salt'); + assert.buffer(passphrase, 'passphrase'); + assert.number(count, 'iteration count'); + + var clen = CIPHER_LEN[cipher]; + assert.object(clen, 'supported cipher'); + + salt = salt.slice(0, PKCS5_SALT_LEN); + + var D, D_prev, bufs; + var material = new Buffer(0); + while (material.length < clen.key + clen.iv) { + bufs = []; + if (D_prev) + bufs.push(D_prev); + bufs.push(passphrase); + bufs.push(salt); + D = Buffer.concat(bufs); + for (var j = 0; j < count; ++j) + D = crypto.createHash('md5').update(D).digest(); + material = Buffer.concat([material, D]); + D_prev = D; + } + + return ({ + key: material.slice(0, clen.key), + iv: material.slice(clen.key, clen.key + clen.iv) + }); +} + +/* Count leading zero bits on a buffer */ +function countZeros(buf) { + var o = 0, obit = 8; + while (o < buf.length) { + var mask = (1 << obit); + if ((buf[o] & mask) === mask) + break; + obit--; + if (obit < 0) { + o++; + obit = 8; + } + } + return (o*8 + (8 - obit) - 1); +} + +function bufferSplit(buf, chr) { + assert.buffer(buf); + assert.string(chr); + + var parts = []; + var lastPart = 0; + var matches = 0; + for (var i = 0; i < buf.length; ++i) { + if (buf[i] === chr.charCodeAt(matches)) + ++matches; + else if (buf[i] === chr.charCodeAt(0)) + matches = 1; + else + matches = 0; + + if (matches >= chr.length) { + var newPart = i + 1; + parts.push(buf.slice(lastPart, newPart - matches)); + lastPart = newPart; + matches = 0; + } + } + if (lastPart <= buf.length) + parts.push(buf.slice(lastPart, buf.length)); + + return (parts); +} + +function ecNormalize(buf, addZero) { + assert.buffer(buf); + if (buf[0] === 0x00 && buf[1] === 0x04) { + if (addZero) + return (buf); + return (buf.slice(1)); + } else if (buf[0] === 0x04) { + if (!addZero) + return (buf); + } else { + while (buf[0] === 0x00) + buf = buf.slice(1); + if (buf[0] === 0x02 || buf[0] === 0x03) + throw (new Error('Compressed elliptic curve points ' + + 'are not supported')); + if (buf[0] !== 0x04) + throw (new Error('Not a valid elliptic curve point')); + if (!addZero) + return (buf); + } + var b = new Buffer(buf.length + 1); + b[0] = 0x0; + buf.copy(b, 1); + return (b); +} + +function mpNormalize(buf) { + assert.buffer(buf); + while (buf.length > 1 && buf[0] === 0x00 && (buf[1] & 0x80) === 0x00) + buf = buf.slice(1); + if ((buf[0] & 0x80) === 0x80) { + var b = new Buffer(buf.length + 1); + b[0] = 0x00; + buf.copy(b, 1); + buf = b; + } + return (buf); +} + +function bigintToMpBuf(bigint) { + var buf = new Buffer(bigint.toByteArray()); + buf = mpNormalize(buf); + return (buf); +} + +function calculateDSAPublic(g, p, x) { + assert.buffer(g); + assert.buffer(p); + assert.buffer(x); + try { + var bigInt = require('jsbn').BigInteger; + } catch (e) { + throw (new Error('To load a PKCS#8 format DSA private key, ' + + 'the node jsbn library is required.')); + } + g = new bigInt(g); + p = new bigInt(p); + x = new bigInt(x); + var y = g.modPow(x, p); + var ybuf = bigintToMpBuf(y); + return (ybuf); +} + +function addRSAMissing(key) { + assert.object(key); + assertCompatible(key, PrivateKey, [1, 1]); + try { + var bigInt = require('jsbn').BigInteger; + } catch (e) { + throw (new Error('To write a PEM private key from ' + + 'this source, the node jsbn lib is required.')); + } + + var d = new bigInt(key.part.d.data); + var buf; + + if (!key.part.dmodp) { + var p = new bigInt(key.part.p.data); + var dmodp = d.mod(p.subtract(1)); + + buf = bigintToMpBuf(dmodp); + key.part.dmodp = {name: 'dmodp', data: buf}; + key.parts.push(key.part.dmodp); + } + if (!key.part.dmodq) { + var q = new bigInt(key.part.q.data); + var dmodq = d.mod(q.subtract(1)); + + buf = bigintToMpBuf(dmodq); + key.part.dmodq = {name: 'dmodq', data: buf}; + key.parts.push(key.part.dmodq); + } +} + +function opensshCipherInfo(cipher) { + var inf = {}; + switch (cipher) { + case '3des-cbc': + inf.keySize = 24; + inf.blockSize = 8; + inf.opensslName = 'des-ede3-cbc'; + break; + case 'blowfish-cbc': + inf.keySize = 16; + inf.blockSize = 8; + inf.opensslName = 'bf-cbc'; + break; + case 'aes128-cbc': + case 'aes128-ctr': + case 'aes128-gcm@openssh.com': + inf.keySize = 16; + inf.blockSize = 16; + inf.opensslName = 'aes-128-' + cipher.slice(7, 10); + break; + case 'aes192-cbc': + case 'aes192-ctr': + case 'aes192-gcm@openssh.com': + inf.keySize = 24; + inf.blockSize = 16; + inf.opensslName = 'aes-192-' + cipher.slice(7, 10); + break; + case 'aes256-cbc': + case 'aes256-ctr': + case 'aes256-gcm@openssh.com': + inf.keySize = 32; + inf.blockSize = 16; + inf.opensslName = 'aes-256-' + cipher.slice(7, 10); + break; + default: + throw (new Error( + 'Unsupported openssl cipher "' + cipher + '"')); + } + return (inf); +} diff --git a/node_modules/sshpk/man/man1/sshpk-conv.1 b/node_modules/sshpk/man/man1/sshpk-conv.1 new file mode 100644 index 0000000..0887dce --- /dev/null +++ b/node_modules/sshpk/man/man1/sshpk-conv.1 @@ -0,0 +1,135 @@ +.TH sshpk\-conv 1 "Jan 2016" sshpk "sshpk Commands" +.SH NAME +.PP +sshpk\-conv \- convert between key formats +.SH SYNOPSYS +.PP +\fB\fCsshpk\-conv\fR \-t FORMAT [FILENAME] [OPTIONS...] +.PP +\fB\fCsshpk\-conv\fR \-i [FILENAME] [OPTIONS...] +.SH DESCRIPTION +.PP +Reads in a public or private key and converts it between different formats, +particularly formats used in the SSH protocol and the well\-known PEM PKCS#1/7 +formats. +.PP +In the second form, with the \fB\fC\-i\fR option given, identifies a key and prints to +stderr information about its nature, size and fingerprint. +.SH EXAMPLES +.PP +Assume the following SSH\-format public key in \fB\fCid_ecdsa.pub\fR: +.PP +.RS +.nf +ecdsa\-sha2\-nistp256 AAAAE2VjZHNhLXNoYTI...9M/4c4= user@host +.fi +.RE +.PP +Identify it with \fB\fC\-i\fR: +.PP +.RS +.nf +$ sshpk\-conv \-i id_ecdsa.pub +id_ecdsa: a 256 bit ECDSA public key +ECDSA curve: nistp256 +Comment: user@host +Fingerprint: + SHA256:vCNX7eUkdvqqW0m4PoxQAZRv+CM4P4fS8+CbliAvS4k + 81:ad:d5:57:e5:6f:7d:a2:93:79:56:af:d7:c0:38:51 +.fi +.RE +.PP +Convert it to \fB\fCpkcs8\fR format, for use with e.g. OpenSSL: +.PP +.RS +.nf +$ sshpk\-conv \-t pkcs8 id_ecdsa +\-\-\-\-\-BEGIN PUBLIC KEY\-\-\-\-\- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEAsA4R6N6AS3gzaPBeLjG2ObSgUsR +zOt+kWJoijLnw3ZMYUKmAx+lD0I5XUxdrPcs1vH5f3cn9TvRvO9L0z/hzg== +\-\-\-\-\-END PUBLIC KEY\-\-\-\-\- +.fi +.RE +.PP +Retrieve the public half of a private key: +.PP +.RS +.nf +$ openssl genrsa 2048 | sshpk\-conv \-t ssh \-c foo@bar +ssh\-rsa AAAAB3NzaC1yc2EAAA...koK7 foo@bar +.fi +.RE +.PP +Convert a private key to PKCS#1 (OpenSSL) format from a new\-style OpenSSH key +format (the \fB\fCssh\-keygen \-o\fR format): +.PP +.RS +.nf +$ ssh\-keygen \-o \-f foobar +\&... +$ sshpk\-conv \-p \-t pkcs1 foobar +\-\-\-\-\-BEGIN RSA PRIVATE KEY\-\-\-\-\- +MIIDpAIBAAKCAQEA6T/GYJndb1TRH3+NL.... +\-\-\-\-\-END RSA PRIVATE KEY\-\-\-\-\- +.fi +.RE +.SH OPTIONS +.TP +\fB\fC\-i, \-\-identify\fR +Instead of converting the key, output identifying information about it to +stderr, including its type, size and fingerprints. +.TP +\fB\fC\-p, \-\-private\fR +Treat the key as a private key instead of a public key (the default). If you +supply \fB\fCsshpk\-conv\fR with a private key and do not give this option, it will +extract only the public half of the key from it and work with that. +.TP +\fB\fC\-f PATH, \-\-file=PATH\fR +Input file to take the key from instead of stdin. If a filename is supplied +as a positional argument, it is equivalent to using this option. +.TP +\fB\fC\-o PATH, \-\-out=PATH\fR +Output file name to use instead of stdout. +.PP +\fB\fC\-T FORMAT, \-\-informat=FORMAT\fR +.TP +\fB\fC\-t FORMAT, \-\-outformat=FORMAT\fR +Selects the input and output formats to be used (see FORMATS, below). +.TP +\fB\fC\-c TEXT, \-\-comment=TEXT\fR +Sets the key comment for the output file, if supported. +.SH FORMATS +.PP +Currently supported formats: +.TP +\fB\fCpem, pkcs1\fR +The standard PEM format used by older OpenSSH and most TLS libraries such as +OpenSSL. The classic \fB\fCid_rsa\fR file is usually in this format. It is an ASN.1 +encoded structure, base64\-encoded and placed between PEM headers. +.TP +\fB\fCssh\fR +The SSH public key text format (the format of an \fB\fCid_rsa.pub\fR file). A single +line, containing 3 space separated parts: the key type, key body and optional +key comment. +.TP +\fB\fCpkcs8\fR +A newer PEM format, usually used only for public keys by TLS libraries such +as OpenSSL. The ASN.1 structure is more generic than that of \fB\fCpkcs1\fR\&. +.TP +\fB\fCopenssh\fR +The new \fB\fCssh\-keygen \-o\fR format from OpenSSH. This can be mistaken for a PEM +encoding but is actually an OpenSSH internal format. +.TP +\fB\fCrfc4253\fR +The internal binary format of keys when sent over the wire in the SSH +protocol. This is also the format that the \fB\fCssh\-agent\fR uses in its protocol. +.SH SEE ALSO +.PP +.BR ssh-keygen (1), +.BR openssl (1) +.SH BUGS +.PP +Encrypted (password\-protected) keys are not supported. +.PP +Report bugs at Github +\[la]https://github.com/arekinath/node-sshpk/issues\[ra] diff --git a/node_modules/sshpk/man/man1/sshpk-sign.1 b/node_modules/sshpk/man/man1/sshpk-sign.1 new file mode 100644 index 0000000..749916b --- /dev/null +++ b/node_modules/sshpk/man/man1/sshpk-sign.1 @@ -0,0 +1,81 @@ +.TH sshpk\-sign 1 "Jan 2016" sshpk "sshpk Commands" +.SH NAME +.PP +sshpk\-sign \- sign data using an SSH key +.SH SYNOPSYS +.PP +\fB\fCsshpk\-sign\fR \-i KEYPATH [OPTION...] +.SH DESCRIPTION +.PP +Takes in arbitrary bytes, and signs them using an SSH private key. The key can +be of any type or format supported by the \fB\fCsshpk\fR library, including the +standard OpenSSH formats, as well as PEM PKCS#1 and PKCS#8. +.PP +The signature is printed out in Base64 encoding, unless the \fB\fC\-\-binary\fR or \fB\fC\-b\fR +option is given. +.SH EXAMPLES +.PP +Signing with default settings: +.PP +.RS +.nf +$ printf 'foo' | sshpk\-sign \-i ~/.ssh/id_ecdsa +MEUCIAMdLS/vXrrtWFepwe... +.fi +.RE +.PP +Signing in SSH (RFC 4253) format (rather than the default ASN.1): +.PP +.RS +.nf +$ printf 'foo' | sshpk\-sign \-i ~/.ssh/id_ecdsa \-t ssh +AAAAFGVjZHNhLXNoYTIt... +.fi +.RE +.PP +Saving the binary signature to a file: +.PP +.RS +.nf +$ printf 'foo' | sshpk\-sign \-i ~/.ssh/id_ecdsa \\ + \-o signature.bin \-b +$ cat signature.bin | base64 +MEUCIAMdLS/vXrrtWFepwe... +.fi +.RE +.SH OPTIONS +.TP +\fB\fC\-v, \-\-verbose\fR +Print extra information about the key and signature to stderr when signing. +.TP +\fB\fC\-b, \-\-binary\fR +Don't base64\-encode the signature before outputting it. +.TP +\fB\fC\-i KEY, \-\-identity=KEY\fR +Select the key to be used for signing. \fB\fCKEY\fR must be a relative or absolute +filesystem path to the key file. Any format supported by the \fB\fCsshpk\fR library +is supported, including OpenSSH formats and standard PEM PKCS. +.TP +\fB\fC\-f PATH, \-\-file=PATH\fR +Input file to sign instead of stdin. +.TP +\fB\fC\-o PATH, \-\-out=PATH\fR +Output file to save signature in instead of stdout. +.TP +\fB\fC\-H HASH, \-\-hash=HASH\fR +Set the hash algorithm to be used for signing. This should be one of \fB\fCsha1\fR, +\fB\fCsha256\fR or \fB\fCsha512\fR\&. Some key types may place restrictions on which hash +algorithms may be used (e.g. ED25519 keys can only use SHA\-512). +.TP +\fB\fC\-t FORMAT, \-\-format=FORMAT\fR +Choose the signature format to use, from \fB\fCasn1\fR, \fB\fCssh\fR or \fB\fCraw\fR (only for +ED25519 signatures). The \fB\fCasn1\fR format is the default, as it is the format +used with TLS and typically the standard in most non\-SSH libraries (e.g. +OpenSSL). The \fB\fCssh\fR format is used in the SSH protocol and by the ssh\-agent. +.SH SEE ALSO +.PP +.BR sshpk-verify (1) +.SH BUGS +.PP +Report bugs at Github +\[la]https://github.com/arekinath/node-sshpk/issues\[ra] diff --git a/node_modules/sshpk/man/man1/sshpk-verify.1 b/node_modules/sshpk/man/man1/sshpk-verify.1 new file mode 100644 index 0000000..f79169d --- /dev/null +++ b/node_modules/sshpk/man/man1/sshpk-verify.1 @@ -0,0 +1,68 @@ +.TH sshpk\-verify 1 "Jan 2016" sshpk "sshpk Commands" +.SH NAME +.PP +sshpk\-verify \- verify a signature on data using an SSH key +.SH SYNOPSYS +.PP +\fB\fCsshpk\-verify\fR \-i KEYPATH \-s SIGNATURE [OPTION...] +.SH DESCRIPTION +.PP +Takes in arbitrary bytes and a Base64\-encoded signature, and verifies that the +signature was produced by the private half of the given SSH public key. +.SH EXAMPLES +.PP +.RS +.nf +$ printf 'foo' | sshpk\-verify \-i ~/.ssh/id_ecdsa \-s MEUCIQCYp... +OK +$ printf 'foo' | sshpk\-verify \-i ~/.ssh/id_ecdsa \-s GARBAGE... +NOT OK +.fi +.RE +.SH EXIT STATUS +.TP +\fB\fC0\fR +Signature validates and matches the key. +.TP +\fB\fC1\fR +Signature is parseable and the correct length but does not match the key or +otherwise is invalid. +.TP +\fB\fC2\fR +The signature or key could not be parsed. +.TP +\fB\fC3\fR +Invalid commandline options were supplied. +.SH OPTIONS +.TP +\fB\fC\-v, \-\-verbose\fR +Print extra information about the key and signature to stderr when verifying. +.TP +\fB\fC\-i KEY, \-\-identity=KEY\fR +Select the key to be used for verification. \fB\fCKEY\fR must be a relative or +absolute filesystem path to the key file. Any format supported by the \fB\fCsshpk\fR +library is supported, including OpenSSH formats and standard PEM PKCS. +.TP +\fB\fC\-s BASE64, \-\-signature=BASE64\fR +Supplies the base64\-encoded signature to be verified. +.TP +\fB\fC\-f PATH, \-\-file=PATH\fR +Input file to verify instead of stdin. +.TP +\fB\fC\-H HASH, \-\-hash=HASH\fR +Set the hash algorithm to be used for signing. This should be one of \fB\fCsha1\fR, +\fB\fCsha256\fR or \fB\fCsha512\fR\&. Some key types may place restrictions on which hash +algorithms may be used (e.g. ED25519 keys can only use SHA\-512). +.TP +\fB\fC\-t FORMAT, \-\-format=FORMAT\fR +Choose the signature format to use, from \fB\fCasn1\fR, \fB\fCssh\fR or \fB\fCraw\fR (only for +ED25519 signatures). The \fB\fCasn1\fR format is the default, as it is the format +used with TLS and typically the standard in most non\-SSH libraries (e.g. +OpenSSL). The \fB\fCssh\fR format is used in the SSH protocol and by the ssh\-agent. +.SH SEE ALSO +.PP +.BR sshpk-sign (1) +.SH BUGS +.PP +Report bugs at Github +\[la]https://github.com/arekinath/node-sshpk/issues\[ra] diff --git a/node_modules/sshpk/node_modules/assert-plus/AUTHORS b/node_modules/sshpk/node_modules/assert-plus/AUTHORS new file mode 100644 index 0000000..1923524 --- /dev/null +++ b/node_modules/sshpk/node_modules/assert-plus/AUTHORS @@ -0,0 +1,6 @@ +Dave Eddy +Fred Kuo +Lars-Magnus Skog +Mark Cavage +Patrick Mooney +Rob Gulewich diff --git a/node_modules/sshpk/node_modules/assert-plus/CHANGES.md b/node_modules/sshpk/node_modules/assert-plus/CHANGES.md new file mode 100644 index 0000000..57d92bf --- /dev/null +++ b/node_modules/sshpk/node_modules/assert-plus/CHANGES.md @@ -0,0 +1,14 @@ +# assert-plus Changelog + +## 1.0.0 + +- *BREAKING* assert.number (and derivatives) now accept Infinity as valid input +- Add assert.finite check. Previous assert.number callers should use this if + they expect Infinity inputs to throw. + +## 0.2.0 + +- Fix `assert.object(null)` so it throws +- Fix optional/arrayOf exports for non-type-of asserts +- Add optiona/arrayOf exports for Stream/Date/Regex/uuid +- Add basic unit test coverage diff --git a/node_modules/sshpk/node_modules/assert-plus/README.md b/node_modules/sshpk/node_modules/assert-plus/README.md new file mode 100644 index 0000000..ec200d1 --- /dev/null +++ b/node_modules/sshpk/node_modules/assert-plus/README.md @@ -0,0 +1,162 @@ +# assert-plus + +This library is a super small wrapper over node's assert module that has two +things: (1) the ability to disable assertions with the environment variable +NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like +`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks +like this: + +```javascript + var assert = require('assert-plus'); + + function fooAccount(options, callback) { + assert.object(options, 'options'); + assert.number(options.id, 'options.id'); + assert.bool(options.isManager, 'options.isManager'); + assert.string(options.name, 'options.name'); + assert.arrayOfString(options.email, 'options.email'); + assert.func(callback, 'callback'); + + // Do stuff + callback(null, {}); + } +``` + +# API + +All methods that *aren't* part of node's core assert API are simply assumed to +take an argument, and then a string 'name' that's not a message; `AssertionError` +will be thrown if the assertion fails with a message like: + + AssertionError: foo (string) is required + at test (/home/mark/work/foo/foo.js:3:9) + at Object. (/home/mark/work/foo/foo.js:15:1) + at Module._compile (module.js:446:26) + at Object..js (module.js:464:10) + at Module.load (module.js:353:31) + at Function._load (module.js:311:12) + at Array.0 (module.js:484:10) + at EventEmitter._tickCallback (node.js:190:38) + +from: + +```javascript + function test(foo) { + assert.string(foo, 'foo'); + } +``` + +There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`: + +```javascript + function test(foo) { + assert.arrayOfString(foo, 'foo'); + } +``` + +You can assert IFF an argument is not `undefined` (i.e., an optional arg): + +```javascript + assert.optionalString(foo, 'foo'); +``` + +Lastly, you can opt-out of assertion checking altogether by setting the +environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have +lots of assertions, and don't want to pay `typeof ()` taxes to v8 in +production. Be advised: The standard functions re-exported from `assert` are +also disabled in assert-plus if NDEBUG is specified. Using them directly from +the `assert` module avoids this behavior. + +The complete list of APIs is: + +* assert.array +* assert.bool +* assert.buffer +* assert.func +* assert.number +* assert.finite +* assert.object +* assert.string +* assert.stream +* assert.date +* assert.regexp +* assert.uuid +* assert.arrayOfArray +* assert.arrayOfBool +* assert.arrayOfBuffer +* assert.arrayOfFunc +* assert.arrayOfNumber +* assert.arrayOfFinite +* assert.arrayOfObject +* assert.arrayOfString +* assert.arrayOfStream +* assert.arrayOfDate +* assert.arrayOfRegexp +* assert.arrayOfUuid +* assert.optionalArray +* assert.optionalBool +* assert.optionalBuffer +* assert.optionalFunc +* assert.optionalNumber +* assert.optionalFinite +* assert.optionalObject +* assert.optionalString +* assert.optionalStream +* assert.optionalDate +* assert.optionalRegexp +* assert.optionalUuid +* assert.optionalArrayOfArray +* assert.optionalArrayOfBool +* assert.optionalArrayOfBuffer +* assert.optionalArrayOfFunc +* assert.optionalArrayOfNumber +* assert.optionalArrayOfFinite +* assert.optionalArrayOfObject +* assert.optionalArrayOfString +* assert.optionalArrayOfStream +* assert.optionalArrayOfDate +* assert.optionalArrayOfRegexp +* assert.optionalArrayOfUuid +* assert.AssertionError +* assert.fail +* assert.ok +* assert.equal +* assert.notEqual +* assert.deepEqual +* assert.notDeepEqual +* assert.strictEqual +* assert.notStrictEqual +* assert.throws +* assert.doesNotThrow +* assert.ifError + +# Installation + + npm install assert-plus + +## License + +The MIT License (MIT) +Copyright (c) 2012 Mark Cavage + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +## Bugs + +See . diff --git a/node_modules/sshpk/node_modules/assert-plus/assert.js b/node_modules/sshpk/node_modules/assert-plus/assert.js new file mode 100644 index 0000000..26f944e --- /dev/null +++ b/node_modules/sshpk/node_modules/assert-plus/assert.js @@ -0,0 +1,211 @@ +// Copyright (c) 2012, Mark Cavage. All rights reserved. +// Copyright 2015 Joyent, Inc. + +var assert = require('assert'); +var Stream = require('stream').Stream; +var util = require('util'); + + +///--- Globals + +/* JSSTYLED */ +var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/; + + +///--- Internal + +function _capitalize(str) { + return (str.charAt(0).toUpperCase() + str.slice(1)); +} + +function _toss(name, expected, oper, arg, actual) { + throw new assert.AssertionError({ + message: util.format('%s (%s) is required', name, expected), + actual: (actual === undefined) ? typeof (arg) : actual(arg), + expected: expected, + operator: oper || '===', + stackStartFunction: _toss.caller + }); +} + +function _getClass(arg) { + return (Object.prototype.toString.call(arg).slice(8, -1)); +} + +function noop() { + // Why even bother with asserts? +} + + +///--- Exports + +var types = { + bool: { + check: function (arg) { return typeof (arg) === 'boolean'; } + }, + func: { + check: function (arg) { return typeof (arg) === 'function'; } + }, + string: { + check: function (arg) { return typeof (arg) === 'string'; } + }, + object: { + check: function (arg) { + return typeof (arg) === 'object' && arg !== null; + } + }, + number: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg); + } + }, + finite: { + check: function (arg) { + return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg); + } + }, + buffer: { + check: function (arg) { return Buffer.isBuffer(arg); }, + operator: 'Buffer.isBuffer' + }, + array: { + check: function (arg) { return Array.isArray(arg); }, + operator: 'Array.isArray' + }, + stream: { + check: function (arg) { return arg instanceof Stream; }, + operator: 'instanceof', + actual: _getClass + }, + date: { + check: function (arg) { return arg instanceof Date; }, + operator: 'instanceof', + actual: _getClass + }, + regexp: { + check: function (arg) { return arg instanceof RegExp; }, + operator: 'instanceof', + actual: _getClass + }, + uuid: { + check: function (arg) { + return typeof (arg) === 'string' && UUID_REGEXP.test(arg); + }, + operator: 'isUUID' + } +}; + +function _setExports(ndebug) { + var keys = Object.keys(types); + var out; + + /* re-export standard assert */ + if (process.env.NODE_NDEBUG) { + out = noop; + } else { + out = function (arg, msg) { + if (!arg) { + _toss(msg, 'true', arg); + } + }; + } + + /* standard checks */ + keys.forEach(function (k) { + if (ndebug) { + out[k] = noop; + return; + } + var type = types[k]; + out[k] = function (arg, msg) { + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* optional checks */ + keys.forEach(function (k) { + var name = 'optional' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!type.check(arg)) { + _toss(msg, k, type.operator, arg, type.actual); + } + }; + }); + + /* arrayOf checks */ + keys.forEach(function (k) { + var name = 'arrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* optionalArrayOf checks */ + keys.forEach(function (k) { + var name = 'optionalArrayOf' + _capitalize(k); + if (ndebug) { + out[name] = noop; + return; + } + var type = types[k]; + var expected = '[' + k + ']'; + out[name] = function (arg, msg) { + if (arg === undefined || arg === null) { + return; + } + if (!Array.isArray(arg)) { + _toss(msg, expected, type.operator, arg, type.actual); + } + var i; + for (i = 0; i < arg.length; i++) { + if (!type.check(arg[i])) { + _toss(msg, expected, type.operator, arg, type.actual); + } + } + }; + }); + + /* re-export built-in assertions */ + Object.keys(assert).forEach(function (k) { + if (k === 'AssertionError') { + out[k] = assert[k]; + return; + } + if (ndebug) { + out[k] = noop; + return; + } + out[k] = assert[k]; + }); + + /* export ourselves (for unit tests _only_) */ + out._setExports = _setExports; + + return out; +} + +module.exports = _setExports(process.env.NODE_NDEBUG); diff --git a/node_modules/sshpk/node_modules/assert-plus/package.json b/node_modules/sshpk/node_modules/assert-plus/package.json new file mode 100644 index 0000000..40d6a5c --- /dev/null +++ b/node_modules/sshpk/node_modules/assert-plus/package.json @@ -0,0 +1,23 @@ +{ + "author": "Mark Cavage ", + "name": "assert-plus", + "description": "Extra assertions on top of node's assert module", + "version": "1.0.0", + "license": "MIT", + "main": "./assert.js", + "devDependencies": { + "tape": "4.2.2", + "faucet": "0.0.1" + }, + "optionalDependencies": {}, + "scripts": { + "test": "./node_modules/.bin/tape tests/*.js | ./node_modules/.bin/faucet" + }, + "repository": { + "type": "git", + "url": "https://github.com/mcavage/node-assert-plus.git" + }, + "engines": { + "node": ">=0.8" + } +} diff --git a/node_modules/sshpk/package.json b/node_modules/sshpk/package.json new file mode 100644 index 0000000..873a226 --- /dev/null +++ b/node_modules/sshpk/package.json @@ -0,0 +1,60 @@ +{ + "name": "sshpk", + "version": "1.10.1", + "description": "A library for finding and using SSH public keys", + "main": "lib/index.js", + "scripts": { + "test": "tape test/*.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/arekinath/node-sshpk.git" + }, + "author": "Joyent, Inc", + "contributors": [ + { + "name": "Dave Eddy", + "email": "dave@daveeddy.com" + }, + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "Alex Wilson", + "email": "alex@cooperi.net" + } + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/arekinath/node-sshpk/issues" + }, + "engines": { + "node": ">=0.10.0" + }, + "directories": { + "bin": "./bin", + "lib": "./lib", + "man": "./man/man1" + }, + "homepage": "https://github.com/arekinath/node-sshpk#readme", + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "dashdash": "^1.12.0", + "getpass": "^0.1.1" + }, + "optionalDependencies": { + "jsbn": "~0.1.0", + "tweetnacl": "~0.14.0", + "jodid25519": "^1.0.0", + "ecc-jsbn": "~0.1.1", + "bcrypt-pbkdf": "^1.0.0" + }, + "devDependencies": { + "tape": "^3.5.0", + "benchmark": "^1.0.0", + "sinon": "^1.17.2", + "temp": "^0.8.2" + } +} diff --git a/node_modules/statuses/HISTORY.md b/node_modules/statuses/HISTORY.md new file mode 100644 index 0000000..b175575 --- /dev/null +++ b/node_modules/statuses/HISTORY.md @@ -0,0 +1,50 @@ +1.3.0 / 2016-05-17 +================== + + * Add `421 Misdirected Request` + * perf: enable strict mode + +1.2.1 / 2015-02-01 +================== + + * Fix message for status 451 + - `451 Unavailable For Legal Reasons` + +1.2.0 / 2014-09-28 +================== + + * Add `208 Already Repored` + * Add `226 IM Used` + * Add `306 (Unused)` + * Add `415 Unable For Legal Reasons` + * Add `508 Loop Detected` + +1.1.1 / 2014-09-24 +================== + + * Add missing 308 to `codes.json` + +1.1.0 / 2014-09-21 +================== + + * Add `codes.json` for universal support + +1.0.4 / 2014-08-20 +================== + + * Package cleanup + +1.0.3 / 2014-06-08 +================== + + * Add 308 to `.redirect` category + +1.0.2 / 2014-03-13 +================== + + * Add `.retry` category + +1.0.1 / 2014-03-12 +================== + + * Initial release diff --git a/node_modules/statuses/LICENSE b/node_modules/statuses/LICENSE new file mode 100644 index 0000000..82af4df --- /dev/null +++ b/node_modules/statuses/LICENSE @@ -0,0 +1,23 @@ + +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong me@jongleberry.com +Copyright (c) 2016 Douglas Christopher Wilson doug@somethingdoug.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/statuses/README.md b/node_modules/statuses/README.md new file mode 100644 index 0000000..1a699da --- /dev/null +++ b/node_modules/statuses/README.md @@ -0,0 +1,114 @@ +# Statuses + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +HTTP status utility for node. + +## API + +```js +var status = require('statuses') +``` + +### var code = status(Integer || String) + +If `Integer` or `String` is a valid HTTP code or status message, then the appropriate `code` will be returned. Otherwise, an error will be thrown. + +```js +status(403) // => 403 +status('403') // => 403 +status('forbidden') // => 403 +status('Forbidden') // => 403 +status(306) // throws, as it's not supported by node.js +``` + +### status.codes + +Returns an array of all the status codes as `Integer`s. + +### var msg = status[code] + +Map of `code` to `status message`. `undefined` for invalid `code`s. + +```js +status[404] // => 'Not Found' +``` + +### var code = status[msg] + +Map of `status message` to `code`. `msg` can either be title-cased or lower-cased. `undefined` for invalid `status message`s. + +```js +status['not found'] // => 404 +status['Not Found'] // => 404 +``` + +### status.redirect[code] + +Returns `true` if a status code is a valid redirect status. + +```js +status.redirect[200] // => undefined +status.redirect[301] // => true +``` + +### status.empty[code] + +Returns `true` if a status code expects an empty body. + +```js +status.empty[200] // => undefined +status.empty[204] // => true +status.empty[304] // => true +``` + +### status.retry[code] + +Returns `true` if you should retry the rest. + +```js +status.retry[501] // => undefined +status.retry[503] // => true +``` + +### statuses/codes.json + +```js +var codes = require('statuses/codes.json') +``` + +This is a JSON file of the status codes +taken from `require('http').STATUS_CODES`. +This is saved so that codes are consistent even in older node.js versions. +For example, `308` will be added in v0.12. + +## Adding Status Codes + +The status codes are primarily sourced from http://www.iana.org/assignments/http-status-codes/http-status-codes-1.csv. +Additionally, custom codes are added from http://en.wikipedia.org/wiki/List_of_HTTP_status_codes. +These are added manually in the `lib/*.json` files. +If you would like to add a status code, add it to the appropriate JSON file. + +To rebuild `codes.json`, run the following: + +```bash +# update src/iana.json +npm run fetch +# build codes.json +npm run build +``` + +[npm-image]: https://img.shields.io/npm/v/statuses.svg +[npm-url]: https://npmjs.org/package/statuses +[node-version-image]: https://img.shields.io/badge/node.js-%3E%3D_0.6-brightgreen.svg +[node-version-url]: https://nodejs.org/en/download +[travis-image]: https://img.shields.io/travis/jshttp/statuses.svg +[travis-url]: https://travis-ci.org/jshttp/statuses +[coveralls-image]: https://img.shields.io/coveralls/jshttp/statuses.svg +[coveralls-url]: https://coveralls.io/r/jshttp/statuses?branch=master +[downloads-image]: https://img.shields.io/npm/dm/statuses.svg +[downloads-url]: https://npmjs.org/package/statuses diff --git a/node_modules/statuses/codes.json b/node_modules/statuses/codes.json new file mode 100644 index 0000000..e765123 --- /dev/null +++ b/node_modules/statuses/codes.json @@ -0,0 +1,65 @@ +{ + "100": "Continue", + "101": "Switching Protocols", + "102": "Processing", + "200": "OK", + "201": "Created", + "202": "Accepted", + "203": "Non-Authoritative Information", + "204": "No Content", + "205": "Reset Content", + "206": "Partial Content", + "207": "Multi-Status", + "208": "Already Reported", + "226": "IM Used", + "300": "Multiple Choices", + "301": "Moved Permanently", + "302": "Found", + "303": "See Other", + "304": "Not Modified", + "305": "Use Proxy", + "306": "(Unused)", + "307": "Temporary Redirect", + "308": "Permanent Redirect", + "400": "Bad Request", + "401": "Unauthorized", + "402": "Payment Required", + "403": "Forbidden", + "404": "Not Found", + "405": "Method Not Allowed", + "406": "Not Acceptable", + "407": "Proxy Authentication Required", + "408": "Request Timeout", + "409": "Conflict", + "410": "Gone", + "411": "Length Required", + "412": "Precondition Failed", + "413": "Payload Too Large", + "414": "URI Too Long", + "415": "Unsupported Media Type", + "416": "Range Not Satisfiable", + "417": "Expectation Failed", + "418": "I'm a teapot", + "421": "Misdirected Request", + "422": "Unprocessable Entity", + "423": "Locked", + "424": "Failed Dependency", + "425": "Unordered Collection", + "426": "Upgrade Required", + "428": "Precondition Required", + "429": "Too Many Requests", + "431": "Request Header Fields Too Large", + "451": "Unavailable For Legal Reasons", + "500": "Internal Server Error", + "501": "Not Implemented", + "502": "Bad Gateway", + "503": "Service Unavailable", + "504": "Gateway Timeout", + "505": "HTTP Version Not Supported", + "506": "Variant Also Negotiates", + "507": "Insufficient Storage", + "508": "Loop Detected", + "509": "Bandwidth Limit Exceeded", + "510": "Not Extended", + "511": "Network Authentication Required" +} \ No newline at end of file diff --git a/node_modules/statuses/index.js b/node_modules/statuses/index.js new file mode 100644 index 0000000..c2b555a --- /dev/null +++ b/node_modules/statuses/index.js @@ -0,0 +1,110 @@ +/*! + * statuses + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var codes = require('./codes.json') + +/** + * Module exports. + * @public + */ + +module.exports = status + +// array of status codes +status.codes = populateStatusesMap(status, codes) + +// status codes for redirects +status.redirect = { + 300: true, + 301: true, + 302: true, + 303: true, + 305: true, + 307: true, + 308: true +} + +// status codes for empty bodies +status.empty = { + 204: true, + 205: true, + 304: true +} + +// status codes for when you should retry the request +status.retry = { + 502: true, + 503: true, + 504: true +} + +/** + * Populate the statuses map for given codes. + * @private + */ + +function populateStatusesMap (statuses, codes) { + var arr = [] + + Object.keys(codes).forEach(function forEachCode (code) { + var message = codes[code] + var status = Number(code) + + // Populate properties + statuses[status] = message + statuses[message] = status + statuses[message.toLowerCase()] = status + + // Add to array + arr.push(status) + }) + + return arr +} + +/** + * Get the status code. + * + * Given a number, this will throw if it is not a known status + * code, otherwise the code will be returned. Given a string, + * the string will be parsed for a number and return the code + * if valid, otherwise will lookup the code assuming this is + * the status message. + * + * @param {string|number} code + * @returns {string} + * @public + */ + +function status (code) { + if (typeof code === 'number') { + if (!status[code]) throw new Error('invalid status code: ' + code) + return code + } + + if (typeof code !== 'string') { + throw new TypeError('code must be a number or string') + } + + // '403' + var n = parseInt(code, 10) + if (!isNaN(n)) { + if (!status[n]) throw new Error('invalid status code: ' + n) + return n + } + + n = status[code.toLowerCase()] + if (!n) throw new Error('invalid status message: "' + code + '"') + return n +} diff --git a/node_modules/statuses/package.json b/node_modules/statuses/package.json new file mode 100644 index 0000000..b8ebe06 --- /dev/null +++ b/node_modules/statuses/package.json @@ -0,0 +1,44 @@ +{ + "name": "statuses", + "description": "HTTP status utility", + "version": "1.3.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "repository": "jshttp/statuses", + "license": "MIT", + "keywords": [ + "http", + "status", + "code" + ], + "files": [ + "HISTORY.md", + "index.js", + "codes.json", + "LICENSE" + ], + "devDependencies": { + "csv-parse": "1.0.1", + "eslint": "2.10.2", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.1.0", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "1.21.5", + "stream-to-array": "2.2.0" + }, + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build.js", + "fetch": "node scripts/fetch.js", + "lint": "eslint **/*.js", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "update": "npm run fetch && npm run build" + } +} diff --git a/node_modules/stream-browserify/.travis.yml b/node_modules/stream-browserify/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/stream-browserify/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/stream-browserify/LICENSE b/node_modules/stream-browserify/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/stream-browserify/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/stream-browserify/index.js b/node_modules/stream-browserify/index.js new file mode 100644 index 0000000..8d6a13a --- /dev/null +++ b/node_modules/stream-browserify/index.js @@ -0,0 +1,127 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +module.exports = Stream; + +var EE = require('events').EventEmitter; +var inherits = require('inherits'); + +inherits(Stream, EE); +Stream.Readable = require('readable-stream/readable.js'); +Stream.Writable = require('readable-stream/writable.js'); +Stream.Duplex = require('readable-stream/duplex.js'); +Stream.Transform = require('readable-stream/transform.js'); +Stream.PassThrough = require('readable-stream/passthrough.js'); + +// Backwards-compat with node 0.4.x +Stream.Stream = Stream; + + + +// old-style streams. Note that the pipe method (the only relevant +// part of this class) is overridden in the Readable class. + +function Stream() { + EE.call(this); +} + +Stream.prototype.pipe = function(dest, options) { + var source = this; + + function ondata(chunk) { + if (dest.writable) { + if (false === dest.write(chunk) && source.pause) { + source.pause(); + } + } + } + + source.on('data', ondata); + + function ondrain() { + if (source.readable && source.resume) { + source.resume(); + } + } + + dest.on('drain', ondrain); + + // If the 'end' option is not supplied, dest.end() will be called when + // source gets the 'end' or 'close' events. Only dest.end() once. + if (!dest._isStdio && (!options || options.end !== false)) { + source.on('end', onend); + source.on('close', onclose); + } + + var didOnEnd = false; + function onend() { + if (didOnEnd) return; + didOnEnd = true; + + dest.end(); + } + + + function onclose() { + if (didOnEnd) return; + didOnEnd = true; + + if (typeof dest.destroy === 'function') dest.destroy(); + } + + // don't leave dangling pipes when there are errors. + function onerror(er) { + cleanup(); + if (EE.listenerCount(this, 'error') === 0) { + throw er; // Unhandled stream error in pipe. + } + } + + source.on('error', onerror); + dest.on('error', onerror); + + // remove all the event listeners that were added. + function cleanup() { + source.removeListener('data', ondata); + dest.removeListener('drain', ondrain); + + source.removeListener('end', onend); + source.removeListener('close', onclose); + + source.removeListener('error', onerror); + dest.removeListener('error', onerror); + + source.removeListener('end', cleanup); + source.removeListener('close', cleanup); + + dest.removeListener('close', cleanup); + } + + source.on('end', cleanup); + source.on('close', cleanup); + + dest.on('close', cleanup); + + dest.emit('pipe', source); + + // Allow for unix-like usage: A.pipe(B).pipe(C) + return dest; +}; diff --git a/node_modules/stream-browserify/package.json b/node_modules/stream-browserify/package.json new file mode 100644 index 0000000..800dbcd --- /dev/null +++ b/node_modules/stream-browserify/package.json @@ -0,0 +1,51 @@ +{ + "name": "stream-browserify", + "version": "1.0.0", + "description": "the stream module from node core for browsers", + "main": "index.js", + "dependencies": { + "inherits": "~2.0.1", + "readable-stream": "^1.0.27-1" + }, + "devDependencies": { + "typedarray": "~0.0.5", + "tape": "~2.3.2" + }, + "scripts": { + "test": "tape test/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/stream-browserify.git" + }, + "homepage": "https://github.com/substack/stream-browserify", + "keywords": [ + "stream", + "browser", + "browserify" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/3.5", + "firefox/10", + "firefox/nightly", + "chrome/10", + "chrome/latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/stream-browserify/readme.markdown b/node_modules/stream-browserify/readme.markdown new file mode 100644 index 0000000..9f55b94 --- /dev/null +++ b/node_modules/stream-browserify/readme.markdown @@ -0,0 +1,27 @@ +# stream-browserify + +the stream module from node core, for browsers! + +[![build status](https://secure.travis-ci.org/substack/stream-browserify.png)](http://travis-ci.org/substack/stream-browserify) + +[![testling badge](https://ci.testling.com/substack/stream-browserify.png)](https://ci.testling.com/substack/stream-browserify) + +# methods + +Consult the node core +[documentation on streams](http://nodejs.org/docs/latest/api/stream.html). + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install stream-browserify +``` + +but if you are using browserify you will get this module automatically when you +do `require('stream')`. + +# license + +MIT diff --git a/node_modules/stream-browserify/test/buf.js b/node_modules/stream-browserify/test/buf.js new file mode 100644 index 0000000..8217819 --- /dev/null +++ b/node_modules/stream-browserify/test/buf.js @@ -0,0 +1,32 @@ +var path = require('path'); +var test = require('tape'); + +var Writable = require('../').Writable; +var inherits = require('inherits'); + +inherits(TestWritable, Writable); + +function TestWritable(opt) { + if (!(this instanceof TestWritable)) + return new TestWritable(opt); + Writable.call(this, opt); + this._written = []; +} + +TestWritable.prototype._write = function(chunk, encoding, cb) { + this._written.push(chunk); + cb(); +}; + +var buf = Buffer([ 88 ]); + +test('.writable writing ArrayBuffer', function(t) { + var writable = new TestWritable(); + + writable.write(buf); + writable.end(); + + t.equal(writable._written.length, 1); + t.equal(writable._written[0].toString(), 'X') + t.end() +}); diff --git a/node_modules/string-width/index.js b/node_modules/string-width/index.js new file mode 100644 index 0000000..b9bec62 --- /dev/null +++ b/node_modules/string-width/index.js @@ -0,0 +1,37 @@ +'use strict'; +var stripAnsi = require('strip-ansi'); +var codePointAt = require('code-point-at'); +var isFullwidthCodePoint = require('is-fullwidth-code-point'); + +// https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1345 +module.exports = function (str) { + if (typeof str !== 'string' || str.length === 0) { + return 0; + } + + var width = 0; + + str = stripAnsi(str); + + for (var i = 0; i < str.length; i++) { + var code = codePointAt(str, i); + + // ignore control characters + if (code <= 0x1f || (code >= 0x7f && code <= 0x9f)) { + continue; + } + + // surrogates + if (code >= 0x10000) { + i++; + } + + if (isFullwidthCodePoint(code)) { + width += 2; + } else { + width++; + } + } + + return width; +}; diff --git a/node_modules/string-width/license b/node_modules/string-width/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/string-width/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/string-width/package.json b/node_modules/string-width/package.json new file mode 100644 index 0000000..5ba4361 --- /dev/null +++ b/node_modules/string-width/package.json @@ -0,0 +1,56 @@ +{ + "name": "string-width", + "version": "1.0.2", + "description": "Get the visual width of a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/string-width", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "string", + "str", + "character", + "char", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/string-width/readme.md b/node_modules/string-width/readme.md new file mode 100644 index 0000000..1ab42c9 --- /dev/null +++ b/node_modules/string-width/readme.md @@ -0,0 +1,42 @@ +# string-width [![Build Status](https://travis-ci.org/sindresorhus/string-width.svg?branch=master)](https://travis-ci.org/sindresorhus/string-width) + +> Get the visual width of a string - the number of columns required to display it + +Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + +Useful to be able to measure the actual width of command-line output. + + +## Install + +``` +$ npm install --save string-width +``` + + +## Usage + +```js +const stringWidth = require('string-width'); + +stringWidth('古'); +//=> 2 + +stringWidth('\u001b[1m古\u001b[22m'); +//=> 2 + +stringWidth('a'); +//=> 1 +``` + + +## Related + +- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module +- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string +- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/string_decoder/.npmignore b/node_modules/string_decoder/.npmignore new file mode 100644 index 0000000..206320c --- /dev/null +++ b/node_modules/string_decoder/.npmignore @@ -0,0 +1,2 @@ +build +test diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..6de584a --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,20 @@ +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 0000000..4d2aa00 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,7 @@ +**string_decoder.js** (`require('string_decoder')`) from Node.js core + +Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. + +Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** + +The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/node_modules/string_decoder/index.js b/node_modules/string_decoder/index.js new file mode 100644 index 0000000..b00e54f --- /dev/null +++ b/node_modules/string_decoder/index.js @@ -0,0 +1,221 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var Buffer = require('buffer').Buffer; + +var isBufferEncoding = Buffer.isEncoding + || function(encoding) { + switch (encoding && encoding.toLowerCase()) { + case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; + default: return false; + } + } + + +function assertEncoding(encoding) { + if (encoding && !isBufferEncoding(encoding)) { + throw new Error('Unknown encoding: ' + encoding); + } +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. CESU-8 is handled as part of the UTF-8 encoding. +// +// @TODO Handling all encodings inside a single object makes it very difficult +// to reason about this code, so it should be split up in the future. +// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code +// points as used by CESU-8. +var StringDecoder = exports.StringDecoder = function(encoding) { + this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); + assertEncoding(encoding); + switch (this.encoding) { + case 'utf8': + // CESU-8 represents each of Surrogate Pair by 3-bytes + this.surrogateSize = 3; + break; + case 'ucs2': + case 'utf16le': + // UTF-16 represents each of Surrogate Pair by 2-bytes + this.surrogateSize = 2; + this.detectIncompleteChar = utf16DetectIncompleteChar; + break; + case 'base64': + // Base-64 stores 3 bytes in 4 chars, and pads the remainder. + this.surrogateSize = 3; + this.detectIncompleteChar = base64DetectIncompleteChar; + break; + default: + this.write = passThroughWrite; + return; + } + + // Enough space to store all bytes of a single character. UTF-8 needs 4 + // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). + this.charBuffer = new Buffer(6); + // Number of bytes received for the current incomplete multi-byte character. + this.charReceived = 0; + // Number of bytes expected for the current incomplete multi-byte character. + this.charLength = 0; +}; + + +// write decodes the given buffer and returns it as JS string that is +// guaranteed to not contain any partial multi-byte characters. Any partial +// character found at the end of the buffer is buffered up, and will be +// returned when calling write again with the remaining bytes. +// +// Note: Converting a Buffer containing an orphan surrogate to a String +// currently works, but converting a String to a Buffer (via `new Buffer`, or +// Buffer#write) will replace incomplete surrogates with the unicode +// replacement character. See https://codereview.chromium.org/121173009/ . +StringDecoder.prototype.write = function(buffer) { + var charStr = ''; + // if our last write ended with an incomplete multibyte character + while (this.charLength) { + // determine how many remaining bytes this buffer has to offer for this char + var available = (buffer.length >= this.charLength - this.charReceived) ? + this.charLength - this.charReceived : + buffer.length; + + // add the new bytes to the char buffer + buffer.copy(this.charBuffer, this.charReceived, 0, available); + this.charReceived += available; + + if (this.charReceived < this.charLength) { + // still not enough chars in this buffer? wait for more ... + return ''; + } + + // remove bytes belonging to the current character from the buffer + buffer = buffer.slice(available, buffer.length); + + // get the character that was split + charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); + + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + var charCode = charStr.charCodeAt(charStr.length - 1); + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + this.charLength += this.surrogateSize; + charStr = ''; + continue; + } + this.charReceived = this.charLength = 0; + + // if there are no more bytes in this buffer, just emit our char + if (buffer.length === 0) { + return charStr; + } + break; + } + + // determine and set charLength / charReceived + this.detectIncompleteChar(buffer); + + var end = buffer.length; + if (this.charLength) { + // buffer the incomplete character bytes we got + buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); + end -= this.charReceived; + } + + charStr += buffer.toString(this.encoding, 0, end); + + var end = charStr.length - 1; + var charCode = charStr.charCodeAt(end); + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + var size = this.surrogateSize; + this.charLength += size; + this.charReceived += size; + this.charBuffer.copy(this.charBuffer, size, 0, size); + buffer.copy(this.charBuffer, 0, 0, size); + return charStr.substring(0, end); + } + + // or just emit the charStr + return charStr; +}; + +// detectIncompleteChar determines if there is an incomplete UTF-8 character at +// the end of the given buffer. If so, it sets this.charLength to the byte +// length that character, and sets this.charReceived to the number of bytes +// that are available for this character. +StringDecoder.prototype.detectIncompleteChar = function(buffer) { + // determine how many bytes we have to check at the end of this buffer + var i = (buffer.length >= 3) ? 3 : buffer.length; + + // Figure out if one of the last i bytes of our buffer announces an + // incomplete char. + for (; i > 0; i--) { + var c = buffer[buffer.length - i]; + + // See http://en.wikipedia.org/wiki/UTF-8#Description + + // 110XXXXX + if (i == 1 && c >> 5 == 0x06) { + this.charLength = 2; + break; + } + + // 1110XXXX + if (i <= 2 && c >> 4 == 0x0E) { + this.charLength = 3; + break; + } + + // 11110XXX + if (i <= 3 && c >> 3 == 0x1E) { + this.charLength = 4; + break; + } + } + this.charReceived = i; +}; + +StringDecoder.prototype.end = function(buffer) { + var res = ''; + if (buffer && buffer.length) + res = this.write(buffer); + + if (this.charReceived) { + var cr = this.charReceived; + var buf = this.charBuffer; + var enc = this.encoding; + res += buf.slice(0, cr).toString(enc); + } + + return res; +}; + +function passThroughWrite(buffer) { + return buffer.toString(this.encoding); +} + +function utf16DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 2; + this.charLength = this.charReceived ? 2 : 0; +} + +function base64DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 3; + this.charLength = this.charReceived ? 3 : 0; +} diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 0000000..f2dd499 --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,25 @@ +{ + "name": "string_decoder", + "version": "0.10.31", + "description": "The string_decoder module from Node core", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/simple/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/rvagg/string_decoder.git" + }, + "homepage": "https://github.com/rvagg/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/stringstream/.npmignore b/node_modules/stringstream/.npmignore new file mode 100644 index 0000000..7dccd97 --- /dev/null +++ b/node_modules/stringstream/.npmignore @@ -0,0 +1,15 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz + +pids +logs +results + +node_modules +npm-debug.log \ No newline at end of file diff --git a/node_modules/stringstream/.travis.yml b/node_modules/stringstream/.travis.yml new file mode 100644 index 0000000..f1d0f13 --- /dev/null +++ b/node_modules/stringstream/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/stringstream/LICENSE.txt b/node_modules/stringstream/LICENSE.txt new file mode 100644 index 0000000..ab861ac --- /dev/null +++ b/node_modules/stringstream/LICENSE.txt @@ -0,0 +1,22 @@ +Copyright (c) 2012 Michael Hart (michael.hart.au@gmail.com) + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/stringstream/README.md b/node_modules/stringstream/README.md new file mode 100644 index 0000000..32fc982 --- /dev/null +++ b/node_modules/stringstream/README.md @@ -0,0 +1,38 @@ +# Decode streams into strings The Right Way(tm) + +```javascript +var fs = require('fs') +var zlib = require('zlib') +var strs = require('stringstream') + +var utf8Stream = fs.createReadStream('massiveLogFile.gz') + .pipe(zlib.createGunzip()) + .pipe(strs('utf8')) +``` + +No need to deal with `setEncoding()` weirdness, just compose streams +like they were supposed to be! + +Handles input and output encoding: + +```javascript +// Stream from utf8 to hex to base64... Why not, ay. +var hex64Stream = fs.createReadStream('myFile') + .pipe(strs('utf8', 'hex')) + .pipe(strs('hex', 'base64')) +``` + +Also deals with `base64` output correctly by aligning each emitted data +chunk so that there are no dangling `=` characters: + +```javascript +var stream = fs.createReadStream('myFile').pipe(strs('base64')) + +var base64Str = '' + +stream.on('data', function(data) { base64Str += data }) +stream.on('end', function() { + console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding() + console.log('Original file is: ' + new Buffer(base64Str, 'base64')) +}) +``` diff --git a/node_modules/stringstream/example.js b/node_modules/stringstream/example.js new file mode 100644 index 0000000..f82b85e --- /dev/null +++ b/node_modules/stringstream/example.js @@ -0,0 +1,27 @@ +var fs = require('fs') +var zlib = require('zlib') +var strs = require('stringstream') + +var utf8Stream = fs.createReadStream('massiveLogFile.gz') + .pipe(zlib.createGunzip()) + .pipe(strs('utf8')) + +utf8Stream.pipe(process.stdout) + +// Stream from utf8 to hex to base64... Why not, ay. +var hex64Stream = fs.createReadStream('myFile') + .pipe(strs('utf8', 'hex')) + .pipe(strs('hex', 'base64')) + +hex64Stream.pipe(process.stdout) + +// Deals with base64 correctly by aligning chunks +var stream = fs.createReadStream('myFile').pipe(strs('base64')) + +var base64Str = '' + +stream.on('data', function(data) { base64Str += data }) +stream.on('end', function() { + console.log('My base64 encoded file is: ' + base64Str) // Wouldn't work with setEncoding() + console.log('Original file is: ' + new Buffer(base64Str, 'base64')) +}) diff --git a/node_modules/stringstream/package.json b/node_modules/stringstream/package.json new file mode 100644 index 0000000..e6f9019 --- /dev/null +++ b/node_modules/stringstream/package.json @@ -0,0 +1,18 @@ +{ + "name": "stringstream", + "version": "0.0.5", + "description": "Encode and decode streams into string streams", + "author": "Michael Hart (http://github.com/mhart)", + "main": "stringstream.js", + "keywords": [ + "string", + "stream", + "base64", + "gzip" + ], + "repository": { + "type": "git", + "url": "https://github.com/mhart/StringStream.git" + }, + "license": "MIT" +} diff --git a/node_modules/stringstream/stringstream.js b/node_modules/stringstream/stringstream.js new file mode 100644 index 0000000..4ece127 --- /dev/null +++ b/node_modules/stringstream/stringstream.js @@ -0,0 +1,102 @@ +var util = require('util') +var Stream = require('stream') +var StringDecoder = require('string_decoder').StringDecoder + +module.exports = StringStream +module.exports.AlignedStringDecoder = AlignedStringDecoder + +function StringStream(from, to) { + if (!(this instanceof StringStream)) return new StringStream(from, to) + + Stream.call(this) + + if (from == null) from = 'utf8' + + this.readable = this.writable = true + this.paused = false + this.toEncoding = (to == null ? from : to) + this.fromEncoding = (to == null ? '' : from) + this.decoder = new AlignedStringDecoder(this.toEncoding) +} +util.inherits(StringStream, Stream) + +StringStream.prototype.write = function(data) { + if (!this.writable) { + var err = new Error('stream not writable') + err.code = 'EPIPE' + this.emit('error', err) + return false + } + if (this.fromEncoding) { + if (Buffer.isBuffer(data)) data = data.toString() + data = new Buffer(data, this.fromEncoding) + } + var string = this.decoder.write(data) + if (string.length) this.emit('data', string) + return !this.paused +} + +StringStream.prototype.flush = function() { + if (this.decoder.flush) { + var string = this.decoder.flush() + if (string.length) this.emit('data', string) + } +} + +StringStream.prototype.end = function() { + if (!this.writable && !this.readable) return + this.flush() + this.emit('end') + this.writable = this.readable = false + this.destroy() +} + +StringStream.prototype.destroy = function() { + this.decoder = null + this.writable = this.readable = false + this.emit('close') +} + +StringStream.prototype.pause = function() { + this.paused = true +} + +StringStream.prototype.resume = function () { + if (this.paused) this.emit('drain') + this.paused = false +} + +function AlignedStringDecoder(encoding) { + StringDecoder.call(this, encoding) + + switch (this.encoding) { + case 'base64': + this.write = alignedWrite + this.alignedBuffer = new Buffer(3) + this.alignedBytes = 0 + break + } +} +util.inherits(AlignedStringDecoder, StringDecoder) + +AlignedStringDecoder.prototype.flush = function() { + if (!this.alignedBuffer || !this.alignedBytes) return '' + var leftover = this.alignedBuffer.toString(this.encoding, 0, this.alignedBytes) + this.alignedBytes = 0 + return leftover +} + +function alignedWrite(buffer) { + var rem = (this.alignedBytes + buffer.length) % this.alignedBuffer.length + if (!rem && !this.alignedBytes) return buffer.toString(this.encoding) + + var returnBuffer = new Buffer(this.alignedBytes + buffer.length - rem) + + this.alignedBuffer.copy(returnBuffer, 0, 0, this.alignedBytes) + buffer.copy(returnBuffer, this.alignedBytes, 0, buffer.length - rem) + + buffer.copy(this.alignedBuffer, 0, buffer.length - rem, buffer.length) + this.alignedBytes = rem + + return returnBuffer.toString(this.encoding) +} diff --git a/node_modules/strip-ansi/index.js b/node_modules/strip-ansi/index.js new file mode 100644 index 0000000..099480f --- /dev/null +++ b/node_modules/strip-ansi/index.js @@ -0,0 +1,6 @@ +'use strict'; +var ansiRegex = require('ansi-regex')(); + +module.exports = function (str) { + return typeof str === 'string' ? str.replace(ansiRegex, '') : str; +}; diff --git a/node_modules/strip-ansi/license b/node_modules/strip-ansi/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/strip-ansi/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/strip-ansi/package.json b/node_modules/strip-ansi/package.json new file mode 100644 index 0000000..301685b --- /dev/null +++ b/node_modules/strip-ansi/package.json @@ -0,0 +1,57 @@ +{ + "name": "strip-ansi", + "version": "3.0.1", + "description": "Strip ANSI escape codes", + "license": "MIT", + "repository": "chalk/strip-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Boy Nicolai Appelman (jbna.nl)", + "JD Ballard (github.com/qix-)" + ], + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/strip-ansi/readme.md b/node_modules/strip-ansi/readme.md new file mode 100644 index 0000000..cb7d9ff --- /dev/null +++ b/node_modules/strip-ansi/readme.md @@ -0,0 +1,33 @@ +# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) + +> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install --save strip-ansi +``` + + +## Usage + +```js +var stripAnsi = require('strip-ansi'); + +stripAnsi('\u001b[4mcake\u001b[0m'); +//=> 'cake' +``` + + +## Related + +- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/strip-json-comments/cli.js b/node_modules/strip-json-comments/cli.js new file mode 100755 index 0000000..aec5aa2 --- /dev/null +++ b/node_modules/strip-json-comments/cli.js @@ -0,0 +1,41 @@ +#!/usr/bin/env node +'use strict'; +var fs = require('fs'); +var strip = require('./strip-json-comments'); +var input = process.argv[2]; + + +function getStdin(cb) { + var ret = ''; + + process.stdin.setEncoding('utf8'); + + process.stdin.on('data', function (data) { + ret += data; + }); + + process.stdin.on('end', function () { + cb(ret); + }); +} + +if (process.argv.indexOf('-h') !== -1 || process.argv.indexOf('--help') !== -1) { + console.log('strip-json-comments input-file > output-file'); + console.log('or'); + console.log('strip-json-comments < input-file > output-file'); + return; +} + +if (process.argv.indexOf('-v') !== -1 || process.argv.indexOf('--version') !== -1) { + console.log(require('./package').version); + return; +} + +if (input) { + process.stdout.write(strip(fs.readFileSync(input, 'utf8'))); + return; +} + +getStdin(function (data) { + process.stdout.write(strip(data)); +}); diff --git a/node_modules/strip-json-comments/license b/node_modules/strip-json-comments/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/strip-json-comments/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/strip-json-comments/package.json b/node_modules/strip-json-comments/package.json new file mode 100644 index 0000000..20c9c25 --- /dev/null +++ b/node_modules/strip-json-comments/package.json @@ -0,0 +1,46 @@ +{ + "name": "strip-json-comments", + "version": "1.0.4", + "description": "Strip comments from JSON. Lets you use comments in your JSON files!", + "keywords": [ + "json", + "strip", + "remove", + "delete", + "trim", + "comments", + "multiline", + "parse", + "config", + "configuration", + "conf", + "settings", + "util", + "env", + "environment", + "cli", + "bin" + ], + "license": "MIT", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "files": [ + "cli.js", + "strip-json-comments.js" + ], + "main": "strip-json-comments", + "bin": "cli.js", + "repository": "sindresorhus/strip-json-comments", + "scripts": { + "test": "mocha --ui tdd" + }, + "devDependencies": { + "mocha": "*" + }, + "engines": { + "node": ">=0.8.0" + } +} diff --git a/node_modules/strip-json-comments/readme.md b/node_modules/strip-json-comments/readme.md new file mode 100644 index 0000000..63ce165 --- /dev/null +++ b/node_modules/strip-json-comments/readme.md @@ -0,0 +1,80 @@ +# strip-json-comments [![Build Status](https://travis-ci.org/sindresorhus/strip-json-comments.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-json-comments) + +> Strip comments from JSON. Lets you use comments in your JSON files! + +This is now possible: + +```js +{ + // rainbows + "unicorn": /* ❤ */ "cake" +} +``` + +It will remove single-line comments `//` and multi-line comments `/**/`. + +Also available as a [gulp](https://github.com/sindresorhus/gulp-strip-json-comments)/[grunt](https://github.com/sindresorhus/grunt-strip-json-comments)/[broccoli](https://github.com/sindresorhus/broccoli-strip-json-comments) plugin. + +- + +*There's also [`json-comments`](https://npmjs.org/package/json-comments), but it's only for Node.js, inefficient, bloated as it also minifies, and comes with a `require` hook, which is :(* + + +## Install + +```sh +$ npm install --save strip-json-comments +``` + +```sh +$ bower install --save strip-json-comments +``` + +```sh +$ component install sindresorhus/strip-json-comments +``` + + +## Usage + +```js +var json = '{/*rainbows*/"unicorn":"cake"}'; +JSON.parse(stripJsonComments(json)); +//=> {unicorn: 'cake'} +``` + + +## API + +### stripJsonComments(input) + +#### input + +Type: `string` + +Accepts a string with JSON and returns a string without comments. + + +## CLI + +```sh +$ npm install --global strip-json-comments +``` + +```sh +$ strip-json-comments --help + +strip-json-comments input-file > output-file +# or +strip-json-comments < input-file > output-file +``` + + +## Related + +- [`strip-css-comments`](https://github.com/sindresorhus/strip-css-comments) + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/strip-json-comments/strip-json-comments.js b/node_modules/strip-json-comments/strip-json-comments.js new file mode 100644 index 0000000..eb77ce7 --- /dev/null +++ b/node_modules/strip-json-comments/strip-json-comments.js @@ -0,0 +1,73 @@ +/*! + strip-json-comments + Strip comments from JSON. Lets you use comments in your JSON files! + https://github.com/sindresorhus/strip-json-comments + by Sindre Sorhus + MIT License +*/ +(function () { + 'use strict'; + + var singleComment = 1; + var multiComment = 2; + + function stripJsonComments(str) { + var currentChar; + var nextChar; + var insideString = false; + var insideComment = false; + var ret = ''; + + for (var i = 0; i < str.length; i++) { + currentChar = str[i]; + nextChar = str[i + 1]; + + if (!insideComment && currentChar === '"') { + var escaped = str[i - 1] === '\\' && str[i - 2] !== '\\'; + if (!insideComment && !escaped && currentChar === '"') { + insideString = !insideString; + } + } + + if (insideString) { + ret += currentChar; + continue; + } + + if (!insideComment && currentChar + nextChar === '//') { + insideComment = singleComment; + i++; + } else if (insideComment === singleComment && currentChar + nextChar === '\r\n') { + insideComment = false; + i++; + ret += currentChar; + ret += nextChar; + continue; + } else if (insideComment === singleComment && currentChar === '\n') { + insideComment = false; + } else if (!insideComment && currentChar + nextChar === '/*') { + insideComment = multiComment; + i++; + continue; + } else if (insideComment === multiComment && currentChar + nextChar === '*/') { + insideComment = false; + i++; + continue; + } + + if (insideComment) { + continue; + } + + ret += currentChar; + } + + return ret; + } + + if (typeof module !== 'undefined' && module.exports) { + module.exports = stripJsonComments; + } else { + window.stripJsonComments = stripJsonComments; + } +})(); diff --git a/node_modules/supports-color/browser.js b/node_modules/supports-color/browser.js new file mode 100644 index 0000000..ae7c87b --- /dev/null +++ b/node_modules/supports-color/browser.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = false; diff --git a/node_modules/supports-color/index.js b/node_modules/supports-color/index.js new file mode 100644 index 0000000..113040d --- /dev/null +++ b/node_modules/supports-color/index.js @@ -0,0 +1,72 @@ +'use strict'; +var hasFlag = require('has-flag'); + +var support = function (level) { + if (level === 0) { + return false; + } + + return { + level: level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +}; + +var supportLevel = (function () { + if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false')) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + return 1; + } + + if (process.stdout && !process.stdout.isTTY) { + return 0; + } + + if (process.platform === 'win32') { + return 1; + } + + if ('COLORTERM' in process.env) { + return 1; + } + + if (process.env.TERM === 'dumb') { + return 0; + } + + if (/^xterm-256(?:color)?/.test(process.env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) { + return 1; + } + + return 0; +})(); + +if (supportLevel === 0 && 'FORCE_COLOR' in process.env) { + supportLevel = 1; +} + +module.exports = process && support(supportLevel); diff --git a/node_modules/supports-color/license b/node_modules/supports-color/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/supports-color/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/supports-color/package.json b/node_modules/supports-color/package.json new file mode 100644 index 0000000..0f08cb9 --- /dev/null +++ b/node_modules/supports-color/package.json @@ -0,0 +1,66 @@ +{ + "name": "supports-color", + "version": "3.1.2", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Appelman (jbnicolai.com)", + "JD Ballard (github.com/qix-)" + ], + "browser": "browser.js", + "engines": { + "node": ">=0.8.0" + }, + "scripts": { + "test": "xo && mocha", + "travis": "mocha" + }, + "files": [ + "index.js", + "browser.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m", + "million" + ], + "dependencies": { + "has-flag": "^1.0.0" + }, + "devDependencies": { + "mocha": "*", + "require-uncached": "^1.0.2", + "xo": "*" + }, + "xo": { + "envs": [ + "node", + "mocha" + ] + } +} diff --git a/node_modules/supports-color/readme.md b/node_modules/supports-color/readme.md new file mode 100644 index 0000000..f7bae4c --- /dev/null +++ b/node_modules/supports-color/readme.md @@ -0,0 +1,60 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install --save supports-color +``` + + +## Usage + +```js +var supportsColor = require('supports-color'); + +if (supportsColor) { + console.log('Terminal supports color'); +} + +if (supportsColor.has256) { + console.log('Terminal supports 256 colors'); +} + +if (supportsColor.has16m) { + console.log('Terminal supports 16 million colors (truecolor)'); +} +``` + + +## API + +Returns an `object`, or `false` if color is not supported. + +The returned object specifies a level of support for color through a `.level` property and a corresponding flag: + +- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors) +- `.level = 2` and `.has256 = true`: 256 color support +- `.level = 3` and `.has16m = true`: 16 million (truecolor) support + + +## Info + +It obeys the `--color` and `--no-color` CLI flags. + +For situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`. + +Explicit 256/truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/tapable/.gitattributes b/node_modules/tapable/.gitattributes new file mode 100644 index 0000000..176a458 --- /dev/null +++ b/node_modules/tapable/.gitattributes @@ -0,0 +1 @@ +* text=auto diff --git a/node_modules/tapable/.npmignore b/node_modules/tapable/.npmignore new file mode 100644 index 0000000..99a52ed --- /dev/null +++ b/node_modules/tapable/.npmignore @@ -0,0 +1,11 @@ +/node_modules + +############ +## Windows +############ + +# Windows image file caches +Thumbs.db + +# Folder config file +Desktop.ini \ No newline at end of file diff --git a/node_modules/tapable/.travis.yml b/node_modules/tapable/.travis.yml new file mode 100644 index 0000000..8111245 --- /dev/null +++ b/node_modules/tapable/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 \ No newline at end of file diff --git a/node_modules/tapable/README.md b/node_modules/tapable/README.md new file mode 100644 index 0000000..82f6e5c --- /dev/null +++ b/node_modules/tapable/README.md @@ -0,0 +1,155 @@ +# Tapable + +``` javascript +var Tapable = require("tapable"); +``` + +`Tapable` is a class for plugin binding and applying. + +Just extend it. + +``` javascript +function MyClass() { + Tapable.call(this); +} + +MyClass.prototype = Object.create(Tapable.prototype); + +MyClass.prototype.method = function() {}; +``` + +Or mix it in. + +``` javascript +function MyClass2() { + EventEmitter.call(this); + Tapable.call(this); +} + +MyClass2.prototype = Object.create(EventEmitter.prototype); +Tapable.mixin(MyClass2.prototype); + +MyClass2.prototype.method = function() {}; +``` + +## Public functions + +### apply + +``` javascript +void apply(plugins: Plugin...) +``` + +Attaches all plugins passed as arguments to the instance, by calling `apply` on them. + +### plugin + +``` javascript +void plugin(names: string|string[], handler: Function) +``` + +`names` are the names (or a single name) of the plugin interfaces the class provides. + +`handler` is a callback function. The signature depends on the class. `this` is the instance of the class. + +### restartApplyPlugins + +``` javascript +void restartApplyPlugins() +``` + +Should only be called from a handler function. + +It restarts the process of applying handers. + +## Protected functions + +### applyPlugins + +``` javascript +void applyPlugins(name: string, args: any...) +``` + +Synchronous applies all registered handers for `name`. The handler functions are called with all args. + +### applyPluginsWaterfall + +``` javascript +any applyPluginsWaterfall(name: string, init: any, args: any...) +``` + +Synchronous applies all registered handers for `name`. The handler functions are called with the return value of the previous handler and all args. For the first handler `init` is used and the return value of the last handler is return by `applyPluginsWaterfall` + +### applyPluginsAsync + +``` javascript +void applyPluginsAsync( + name: string, + args: any..., + callback: (err?: Error) -> void +) +``` + +Asynchronously applies all registered handers for `name`. The handler functions are called with all args and a callback function with the signature `(err?: Error) -> void`. The hander functions are called in order of registration. + +`callback` is called after all handlers are called. + +### applyPluginsBailResult + +``` javascript +any applyPluginsBailResult(name: string, args: any...) +``` + +Synchronous applies all registered handers for `name`. The handler function are called with all args. If a handler function returns something `!== undefined`, the value is returned and no more handers are applied. + +### applyPluginsAsyncWaterfall + +``` javascript +applyPluginsAsyncWaterfall( + name: string, + init: any, + callback: (err: Error, result: any) -> void +) +``` + +Asynchronously applies all registered handers for `name`. The hander functions are called with the current value and a callback function with the signature `(err: Error, nextValue: any) -> void`. When called `nextValue` is the current value for the next handler. The current value for the first handler is `init`. After all handlers are applied, `callback` is called with the last value. If any handler passes a value for `err`, the `callback` is called with this error and no more handlers are called. + +### applyPluginsAsyncSeries + +``` javascript +applyPluginsAsyncSeries( + name: string, + args: any..., + callback: (err: Error, result: any) -> void +) +``` + +Asynchronously applies all registered handers for `name`. The hander functions are called with all `args` and a callback function with the signature `(err: Error) -> void`. The handers are called in series, one at a time. After all handlers are applied, `callback` is called. If any handler passes a value for `err`, the `callback` is called with this error and no more handlers are called. + +### applyPluginsParallel + +``` javascript +applyPluginsParallel( + name: string, + args: any..., + callback: (err?: Error) -> void +) +``` + +Applies all registered handlers for `name` parallel. The handler functions are called with all args and a callback function with the signature `(err?: Error) -> void`. The `callback` function is called when all handlers called the callback without `err`. If any handler calls the callback with `err`, `callback` is invoked with this error and the other handlers are ignored. + +`restartApplyPlugins` cannot be used. + +### applyPluginsParallelBailResult + +``` javascript +applyPluginsParallelBailResult( + name: string, + args: any..., + callback: (err: Error, result: any) -> void +) +``` + +Applies all registered handlers for `name` parallel. The handler functions are called with all args and a callback function with the signature `(err?: Error) -> void`. Handler functions must call the callback. They can either pass an error, or pass undefined, or pass an value. The first result (either error or value) with is not undefined is passed to the `callback`. The order is defined by registeration not by speed of the handler function. This function compentate this. + +`restartApplyPlugins` cannot be used. diff --git a/node_modules/tapable/lib/Tapable.js b/node_modules/tapable/lib/Tapable.js new file mode 100644 index 0000000..c42b6e4 --- /dev/null +++ b/node_modules/tapable/lib/Tapable.js @@ -0,0 +1,166 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +function Tapable() { + this._plugins = {}; +} +module.exports = Tapable; + +function copyProperties(from, to) { + for(var key in from) + to[key] = from[key]; + return to; +} + +Tapable.mixin = function mixinTapable(pt) { + copyProperties(Tapable.prototype, pt); +} + +Tapable.prototype.applyPlugins = function applyPlugins(name) { + if(!this._plugins[name]) return; + var args = Array.prototype.slice.call(arguments, 1); + var plugins = this._plugins[name]; + var old = this._currentPluginApply; + for(this._currentPluginApply = 0; this._currentPluginApply < plugins.length; this._currentPluginApply++) + plugins[this._currentPluginApply].apply(this, args); + this._currentPluginApply = old; +}; + +Tapable.prototype.applyPluginsWaterfall = function applyPlugins(name, init) { + if(!this._plugins[name]) return init; + var args = Array.prototype.slice.call(arguments, 2); + var plugins = this._plugins[name]; + var current = init; + var old = this._currentPluginApply; + for(this._currentPluginApply = 0; this._currentPluginApply < plugins.length; this._currentPluginApply++) + current = plugins[this._currentPluginApply].apply(this, [current].concat(args)); + this._currentPluginApply = old; + return current; +}; + +Tapable.prototype.applyPluginsBailResult = function applyPluginsBailResult(name) { + if(!this._plugins[name]) return; + var args = Array.prototype.slice.call(arguments, 1); + var plugins = this._plugins[name]; + var old = this._currentPluginApply + for(this._currentPluginApply = 0; this._currentPluginApply < plugins.length; this._currentPluginApply++) { + var result = plugins[this._currentPluginApply].apply(this, args); + if(typeof result !== "undefined") { + this._currentPluginApply = old; + return result; + } + } + this._currentPluginApply = old; +}; + +Tapable.prototype.applyPluginsAsyncSeries = Tapable.prototype.applyPluginsAsync = function applyPluginsAsync(name) { + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + if(!this._plugins[name] || this._plugins[name].length == 0) return callback(); + var plugins = this._plugins[name]; + var i = 0; + args.push(copyProperties(callback, function next(err) { + if(err) return callback(err); + i++; + if(i >= plugins.length) { + return callback(); + } + plugins[i].apply(this, args); + }.bind(this))); + plugins[0].apply(this, args); +}; + +Tapable.prototype.applyPluginsAsyncWaterfall = function applyPluginsAsyncWaterfall(name, init, callback) { + if(!this._plugins[name] || this._plugins[name].length == 0) return callback(null, init); + var plugins = this._plugins[name]; + var i = 0; + var next = copyProperties(callback, function(err, value) { + if(err) return callback(err); + i++; + if(i >= plugins.length) { + return callback(null, value); + } + plugins[i].call(this, value, next); + }.bind(this)); + plugins[0].call(this, init, next); +}; + +Tapable.prototype.applyPluginsParallel = function applyPluginsParallel(name) { + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + if(!this._plugins[name] || this._plugins[name].length == 0) return callback(); + var plugins = this._plugins[name]; + var remaining = plugins.length; + args.push(copyProperties(callback, function(err) { + if(remaining < 0) return; // ignore + if(err) { + remaining = -1; + return callback(err); + } + remaining--; + if(remaining == 0) { + return callback(); + } + })); + for(var i = 0; i < plugins.length; i++) { + plugins[i].apply(this, args); + if(remaining < 0) return; + } +}; + +Tapable.prototype.applyPluginsParallelBailResult = function applyPluginsParallelBailResult(name) { + var args = Array.prototype.slice.call(arguments, 1); + var callback = args[args.length-1]; + if(!this._plugins[name] || this._plugins[name].length == 0) return callback(); + var plugins = this._plugins[name]; + var currentPos = plugins.length; + var currentError, currentResult; + var done = []; + for(var i = 0; i < plugins.length; i++) { + args[args.length-1] = (function(i) { + return copyProperties(callback, function(err, result) { + if(i >= currentPos) return; // ignore + done.push(i); + if(err || result) { + currentPos = i + 1; + done = done.filter(function(item) { + return item <= i; + }); + currentError = err; + currentResult = result; + } + if(done.length == currentPos) { + callback(currentError, currentResult); + currentPos = 0; + } + }); + }(i)); + plugins[i].apply(this, args); + } +}; + + +Tapable.prototype.restartApplyPlugins = function restartApplyPlugins() { + if(typeof this._currentPluginApply !== "number") + throw new Error("Tapable.prototype.restartApplyPlugins can only be used inside of any sync plugins application"); + this._currentPluginApply = -1; +}; + + +Tapable.prototype.plugin = function plugin(name, fn) { + if(Array.isArray(name)) { + name.forEach(function(name) { + this.plugin(name, fn); + }, this); + return; + } + if(!this._plugins[name]) this._plugins[name] = [fn]; + else this._plugins[name].push(fn); +}; + +Tapable.prototype.apply = function apply() { + for(var i = 0; i < arguments.length; i++) { + arguments[i].apply(this); + } +}; diff --git a/node_modules/tapable/package.json b/node_modules/tapable/package.json new file mode 100644 index 0000000..e0b40ce --- /dev/null +++ b/node_modules/tapable/package.json @@ -0,0 +1,23 @@ +{ + "name": "tapable", + "version": "0.1.10", + "author": "Tobias Koppers @sokra", + "description": "Just a little module for plugins.", + "license": "MIT", + "repository": { + "type": "git", + "url": "http://github.com/webpack/tapable.git" + }, + "devDependencies": { + "mocha": "^2.2.4", + "should": "^5.2.0" + }, + "engines": { + "node": ">=0.6" + }, + "homepage": "https://github.com/webpack/tapable", + "main": "lib/Tapable.js", + "scripts": { + "test": "mocha --reporter spec" + } +} diff --git a/node_modules/tapable/test/applyPluginsParallelBailResultTest.js b/node_modules/tapable/test/applyPluginsParallelBailResultTest.js new file mode 100644 index 0000000..1ebd219 --- /dev/null +++ b/node_modules/tapable/test/applyPluginsParallelBailResultTest.js @@ -0,0 +1,140 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var Tapable = require("../lib/Tapable"); +var should = require("should"); + +function makeTestPlugin(arr, index) { + var last; + var f = function() { + f.shouldNotBeCalled(); + var args = Array.prototype.slice.call(arguments); + args.unshift(index); + last = args; + arr.push(args); + }; + f.issue = function() { + f.shouldBeCalled(); + last.pop().apply(null, arguments); + last = null; + }; + f.shouldNotBeCalled = function() { + if(last) throw new Error("Plugin " + index + " was called, but shouldn't be."); + }; + f.shouldBeCalled = function() { + if(!last) throw new Error("Plugin " + index + " was not called, but should be."); + }; + f.shouldBeCalledAsyncWith = function() { + f.shouldBeCalled(); + var args = Array.prototype.slice.call(arguments); + for(var i = 0; i < args.length && i < last.length - 2; i++) { + if(args[i] === null || args[i] === undefined) { + should.not.exist(last[i+1]); + } else { + should.exist(last[i+1]); + last[i+1].should.be.eql(args[i]); + } + } + args.length.should.be.eql(last.length - 2); + }; + return f; +} + +describe("applyPluginsParallelBailResult", function() { + it("should call all handlers", function() { + var tapable = new Tapable(); + var log = []; + var p1 = makeTestPlugin(log, 1); + var p2 = makeTestPlugin(log, 2); + var p3 = makeTestPlugin(log, 3); + var p4 = makeTestPlugin(log, 4); + var result = makeTestPlugin(log, 0); + tapable.plugin("test", p1); + tapable.plugin("test", p2); + tapable.plugin("xxxx", p3); + tapable.plugin("test", p4); + tapable.applyPluginsParallelBailResult("test", 1, 2, result); + p1.shouldBeCalledAsyncWith(1, 2); + p2.shouldBeCalledAsyncWith(1, 2); + p3.shouldNotBeCalled(); + p4.shouldBeCalledAsyncWith(1, 2); + p1.issue(); + p2.issue(null, "ok"); + p4.issue(null, "fail"); + log.should.be.eql([ + [1, 1, 2], + [2, 1, 2], + [4, 1, 2], + [0, null, "ok"] + ]); + }); + it("should save valid results", function() { + var tapable = new Tapable(); + var log = []; + var p1 = makeTestPlugin(log, 1); + var p2 = makeTestPlugin(log, 2); + var p3 = makeTestPlugin(log, 3); + tapable.plugin("test", p1); + tapable.plugin("test", p2); + tapable.plugin("test", p3); + var result = makeTestPlugin(log, 0); + tapable.applyPluginsParallelBailResult("test", "a", result); + p3.issue(null, "fail"); + p2.issue(null, "ok"); + p1.issue(); + log.should.be.eql([ + [1, "a"], + [2, "a"], + [3, "a"], + [0, null, "ok"], + ]); + }); + it("should take the first result", function() { + var tapable = new Tapable(); + var log = []; + var p1 = makeTestPlugin(log, 1); + var p2 = makeTestPlugin(log, 2); + var p3 = makeTestPlugin(log, 3); + tapable.plugin("test", p1); + tapable.plugin("test", p2); + tapable.plugin("test", p3); + var result = makeTestPlugin(log, 0); + tapable.applyPluginsParallelBailResult("test", "a", result); + log.length.should.be.eql(3); + p1.issue(null, "ok"); + log.length.should.be.eql(4); + p2.issue(new Error("fail")); + p3.issue(); + log.should.be.eql([ + [1, "a"], + [2, "a"], + [3, "a"], + [0, null, "ok"], + ]); + }); + it("should return errors", function() { + var tapable = new Tapable(); + var log = []; + var p1 = makeTestPlugin(log, 1); + var p2 = makeTestPlugin(log, 2); + var p3 = makeTestPlugin(log, 3); + tapable.plugin("test", p1); + tapable.plugin("test", p2); + tapable.plugin("test", p3); + var result = makeTestPlugin(log, 0); + tapable.applyPluginsParallelBailResult("test", "a", result); + log.length.should.be.eql(3); + p1.issue("ok"); + log.length.should.be.eql(4); + p2.issue(); + p3.issue(null, "fail"); + log.should.be.eql([ + [1, "a"], + [2, "a"], + [3, "a"], + [0, "ok", undefined], + ]); + }); + +}); \ No newline at end of file diff --git a/node_modules/tar-pack/.npmignore b/node_modules/tar-pack/.npmignore new file mode 100644 index 0000000..c6f386f --- /dev/null +++ b/node_modules/tar-pack/.npmignore @@ -0,0 +1,14 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid + +pids +logs +results + +npm-debug.log +node_modules diff --git a/node_modules/tar-pack/.travis.yml b/node_modules/tar-pack/.travis.yml new file mode 100644 index 0000000..cf5d636 --- /dev/null +++ b/node_modules/tar-pack/.travis.yml @@ -0,0 +1,7 @@ +sudo: false +language: node_js +node_js: + - stable + - "0.10" + - "0.8" +before_install: if [[ `npm --version` != 3* ]]; then npm install -g npm; fi; diff --git a/node_modules/tar-pack/LICENSE b/node_modules/tar-pack/LICENSE new file mode 100644 index 0000000..67b20da --- /dev/null +++ b/node_modules/tar-pack/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2014, Forbes Lindesay +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/tar-pack/README.md b/node_modules/tar-pack/README.md new file mode 100644 index 0000000..819c707 --- /dev/null +++ b/node_modules/tar-pack/README.md @@ -0,0 +1,84 @@ +# Tar Pack + +Package and un-package modules of some sort (in tar/gz bundles). This is mostly useful for package managers. Note that it doesn't check for or touch `package.json` so it can be used even if that's not the way you store your package info. + +[![Build Status](https://img.shields.io/travis/ForbesLindesay/tar-pack/master.svg)](https://travis-ci.org/ForbesLindesay/tar-pack) +[![Dependency Status](https://img.shields.io/david/ForbesLindesay/tar-pack.svg)](https://david-dm.org/ForbesLindesay/tar-pack) +[![NPM version](https://img.shields.io/npm/v/tar-pack.svg)](https://www.npmjs.com/package/tar-pack) + +## Installation + + $ npm install tar-pack + +## API + +### pack(folder|packer, [options]) + +Pack the folder at `folder` into a gzipped tarball and return the tgz as a stream. Files ignored by `.gitignore` will not be in the package. + +You can optionally pass a `fstream.DirReader` directly, instead of folder. For example, to create an npm package, do: + +```js +pack(require("fstream-npm")(folder), [options]) +``` + +Options: + + - `noProprietary` (defaults to `false`) Set this to `true` to prevent any proprietary attributes being added to the tarball. These attributes are allowed by the spec, but may trip up some poorly written tarball parsers. + - `fromBase` (defaults to `false`) Set this to `true` to make sure your tarballs root is the directory you pass in. + - `ignoreFiles` (defaults to `['.gitignore']`) These files can specify files to be excluded from the package using the syntax of `.gitignore`. This option is ignored if you parse a `fstream.DirReader` instead of a string for folder. + - `filter` (defaults to `entry => true`) A function that takes an entry and returns `true` if it should be included in the package and `false` if it should not. Entryies are of the form `{path, basename, dirname, type}` where (type is "Directory" or "File"). This function is ignored if you parse a `fstream.DirReader` instead of a string for folder. + +Example: + +```js +var write = require('fs').createWriteStream +var pack = require('tar-pack').pack +pack(process.cwd()) + .pipe(write(__dirname + '/package.tar.gz')) + .on('error', function (err) { + console.error(err.stack) + }) + .on('close', function () { + console.log('done') + }) +``` + +### unpack(folder, [options,] cb) + +Return a stream that unpacks a tarball into a folder at `folder`. N.B. the output folder will be removed first if it already exists. + +The callback is called with an optional error and, as its second argument, a string which is one of: + + - `'directory'`, indicating that the extracted package was a directory (either `.tar.gz` or `.tar`) + - `'file'`, incating that the extracted package was just a single file (extracted to `defaultName`, see options) + +Basic Options: + + - `defaultName` (defaults to `index.js`) If the package is a single file, rather than a tarball, it will be "extracted" to this file name, set to `false` to disable. + +Advanced Options (you probably don't need any of these): + + - `gid` - (defaults to `null`) the `gid` to use when writing files + - `uid` - (defaults to `null`) the `uid` to use when writing files + - `dmode` - (defaults to `0777`) The mode to use when creating directories + - `fmode` - (defaults to `0666`) The mode to use when creating files + - `unsafe` - (defaults to `false`) (on non win32 OSes it overrides `gid` and `uid` with the current processes IDs) + - `strip` - (defaults to `1`) Number of path segments to strip from the root when extracting + - `keepFiles` - (defaults to `false`) Set this to `true` to prevent target directory to be removed. Extracted files overwrite existing files. + +Example: + +```js +var read = require('fs').createReadStream +var unpack = require('tar-pack').unpack +read(process.cwd() + '/package.tar.gz') + .pipe(unpack(__dirname + '/package/', function (err) { + if (err) console.error(err.stack) + else console.log('done') + })) +``` + +## License + + BSD diff --git a/node_modules/tar-pack/index.js b/node_modules/tar-pack/index.js new file mode 100644 index 0000000..2988c5c --- /dev/null +++ b/node_modules/tar-pack/index.js @@ -0,0 +1,245 @@ +"use strict" + +var debug = require('debug')('tar-pack') +var uidNumber = require('uid-number') +var rm = require('rimraf') +var tar = require('tar') +var once = require('once') +var fstream = require('fstream') +var packer = require('fstream-ignore') + +var PassThrough = require('stream').PassThrough || require('readable-stream').PassThrough +var zlib = require('zlib') +var path = require('path') + +var win32 = process.platform === 'win32' +var myUid = process.getuid && process.getuid() +var myGid = process.getgid && process.getgid() + +if (process.env.SUDO_UID && myUid === 0) { + if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID + if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID +} + +exports.pack = pack +exports.unpack = unpack + +function pack(folder, options) { + options = options || {} + if (typeof folder === 'string') { + + var filter = options.filter || function (entry) { return true; } + + folder = packer({ + path: folder, + type: 'Directory', + isDirectory: true, + ignoreFiles: options.ignoreFiles || ['.gitignore'], + filter: function (entry) { // {path, basename, dirname, type} (type is "Directory" or "File") + var basename = entry.basename + // some files are *never* allowed under any circumstances + // these files should always be either temporary files or + // version control related files + if (basename === '.git' || basename === '.lock-wscript' || basename.match(/^\.wafpickle-[0-9]+$/) || + basename === 'CVS' || basename === '.svn' || basename === '.hg' || basename.match(/^\..*\.swp$/) || + basename === '.DS_Store' || basename.match(/^\._/)) { + return false + } + //custom excludes + return filter(entry) + } + }) + } + // By default, npm includes some proprietary attributes in the + // package tarball. This is sane, and allowed by the spec. + // However, npm *itself* excludes these from its own package, + // so that it can be more easily bootstrapped using old and + // non-compliant tar implementations. + var tarPack = tar.Pack({ noProprietary: options.noProprietary || false, fromBase: options.fromBase || false }) + var gzip = zlib.Gzip() + + folder + .on('error', function (er) { + if (er) debug('Error reading folder') + return gzip.emit('error', er) + }) + tarPack + .on('error', function (er) { + if (er) debug('tar creation error') + gzip.emit('error', er) + }) + return folder.pipe(tarPack).pipe(gzip) +} + +function unpack(unpackTarget, options, cb) { + if (typeof options === 'function' && cb === undefined) cb = options, options = undefined + + var tarball = new PassThrough() + if (typeof cb === 'function') { + cb = once(cb) + tarball.on('error', cb) + tarball.on('close', function () { + cb() + }) + } + + var parent = path.dirname(unpackTarget) + var base = path.basename(unpackTarget) + + options = options || {} + var gid = options.gid || null + var uid = options.uid || null + var dMode = options.dmode || 0x0777 //npm.modes.exec + var fMode = options.fmode || 0x0666 //npm.modes.file + var defaultName = options.defaultName || (options.defaultName === false ? false : 'index.js') + var strip = (options.strip !== undefined) ? options.strip : 1 + + // figure out who we're supposed to be, if we're not pretending + // to be a specific user. + if (options.unsafe && !win32) { + uid = myUid + gid = myGid + } + + var pending = 2 + uidNumber(uid, gid, function (er, uid, gid) { + if (er) { + tarball.emit('error', er) + return tarball.end() + } + if (0 === --pending) next() + }) + if (!options.keepFiles) { + rm(unpackTarget, function (er) { + if (er) { + tarball.emit('error', er) + return tarball.end() + } + if (0 === --pending) next() + }) + } else { + next() + } + function next() { + // gzip {tarball} --decompress --stdout \ + // | tar -mvxpf - --strip-components={strip} -C {unpackTarget} + gunzTarPerm(tarball, unpackTarget, dMode, fMode, uid, gid, defaultName, strip) + } + return tarball +} + + +function gunzTarPerm(tarball, target, dMode, fMode, uid, gid, defaultName, strip) { + debug('modes %j', [dMode.toString(8), fMode.toString(8)]) + + function fixEntry(entry) { + debug('fixEntry %j', entry.path) + // never create things that are user-unreadable, + // or dirs that are user-un-listable. Only leads to headaches. + var originalMode = entry.mode = entry.mode || entry.props.mode + entry.mode = entry.mode | (entry.type === 'Directory' ? dMode : fMode) + entry.props.mode = entry.mode + if (originalMode !== entry.mode) { + debug('modified mode %j', [entry.path, originalMode, entry.mode]) + } + + // if there's a specific owner uid/gid that we want, then set that + if (!win32 && typeof uid === 'number' && typeof gid === 'number') { + entry.props.uid = entry.uid = uid + entry.props.gid = entry.gid = gid + } + } + + var extractOpts = { type: 'Directory', path: target, strip: strip } + + if (!win32 && typeof uid === 'number' && typeof gid === 'number') { + extractOpts.uid = uid + extractOpts.gid = gid + } + + extractOpts.filter = function () { + // symbolic links are not allowed in packages. + if (this.type.match(/^.*Link$/)) { + debug('excluding symbolic link: ' + this.path.substr(target.length + 1) + ' -> ' + this.linkpath) + return false + } + return true + } + + + type(tarball, function (err, type) { + if (err) return tarball.emit('error', err) + var strm = tarball + if (type === 'gzip') { + strm = strm.pipe(zlib.Unzip()) + strm.on('error', function (er) { + if (er) debug('unzip error') + tarball.emit('error', er) + }) + type = 'tar' + } + if (type === 'tar') { + strm + .pipe(tar.Extract(extractOpts)) + .on('entry', fixEntry) + .on('error', function (er) { + if (er) debug('untar error') + tarball.emit('error', er) + }) + .on('close', function () { + tarball.emit('close') + }) + return + } + if (type === 'naked-file' && defaultName) { + var jsOpts = { path: path.resolve(target, defaultName) } + + if (!win32 && typeof uid === 'number' && typeof gid === 'number') { + jsOpts.uid = uid + jsOpts.gid = gid + } + + strm + .pipe(fstream.Writer(jsOpts)) + .on('error', function (er) { + if (er) debug('copy error') + tarball.emit('error', er) + }) + .on('close', function () { + tarball.emit('close') + }) + return + } + + return cb(new Error('Unrecognised package type')); + }) +} + +function type(stream, callback) { + stream.on('error', handle) + stream.on('data', parse) + function handle(err) { + stream.removeListener('data', parse) + stream.removeListener('error', handle) + } + function parse(chunk) { + // detect what it is. + // Then, depending on that, we'll figure out whether it's + // a single-file module, gzipped tarball, or naked tarball. + + // gzipped files all start with 1f8b08 + if (chunk[0] === 0x1F && chunk[1] === 0x8B && chunk[2] === 0x08) { + callback(null, 'gzip') + } else if (chunk.toString().match(/^package\/\u0000/)) { + // note, this will only pick up on tarballs with a root directory called package + callback(null, 'tar') + } else { + callback(null, 'naked-file') + } + + // now un-hook, and re-emit the chunk + stream.removeListener('data', parse) + stream.removeListener('error', handle) + stream.unshift(chunk) + } +} diff --git a/node_modules/tar-pack/node_modules/.bin/rimraf b/node_modules/tar-pack/node_modules/.bin/rimraf new file mode 120000 index 0000000..632d6da --- /dev/null +++ b/node_modules/tar-pack/node_modules/.bin/rimraf @@ -0,0 +1 @@ +../../../rimraf/bin.js \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/once/LICENSE b/node_modules/tar-pack/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/tar-pack/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tar-pack/node_modules/once/README.md b/node_modules/tar-pack/node_modules/once/README.md new file mode 100644 index 0000000..a2981ea --- /dev/null +++ b/node_modules/tar-pack/node_modules/once/README.md @@ -0,0 +1,51 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` diff --git a/node_modules/tar-pack/node_modules/once/once.js b/node_modules/tar-pack/node_modules/once/once.js new file mode 100644 index 0000000..2e1e721 --- /dev/null +++ b/node_modules/tar-pack/node_modules/once/once.js @@ -0,0 +1,21 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} diff --git a/node_modules/tar-pack/node_modules/once/package.json b/node_modules/tar-pack/node_modules/once/package.json new file mode 100644 index 0000000..28fe2ff --- /dev/null +++ b/node_modules/tar-pack/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.3.3", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^1.2.0" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/tar-pack/node_modules/readable-stream/.npmignore b/node_modules/tar-pack/node_modules/readable-stream/.npmignore new file mode 100644 index 0000000..265ff73 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/.npmignore @@ -0,0 +1,8 @@ +build/ +test/ +examples/ +fs.js +zlib.js +.zuul.yml +.nyc_output +coverage diff --git a/node_modules/tar-pack/node_modules/readable-stream/.travis.yml b/node_modules/tar-pack/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000..84504c9 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/.travis.yml @@ -0,0 +1,49 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - npm install -g npm +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: TASK=test + - node_js: '0.10' + env: TASK=test + - node_js: '0.11' + env: TASK=test + - node_js: '0.12' + env: TASK=test + - node_js: 1 + env: TASK=test + - node_js: 2 + env: TASK=test + - node_js: 3 + env: TASK=test + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 5 + env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" + - node_js: 5 + env: TASK=browser BROWSER_NAME=microsoftedge BROWSER_VERSION=latest +script: "npm run $TASK" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/tar-pack/node_modules/readable-stream/LICENSE b/node_modules/tar-pack/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..e3d4e69 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/tar-pack/node_modules/readable-stream/README.md b/node_modules/tar-pack/node_modules/readable-stream/README.md new file mode 100644 index 0000000..9fb4fea --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/README.md @@ -0,0 +1,36 @@ +# readable-stream + +***Node-core v6.3.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core, including [documentation](doc/stream.md). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams WG Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/node_modules/tar-pack/node_modules/readable-stream/doc/stream.md b/node_modules/tar-pack/node_modules/readable-stream/doc/stream.md new file mode 100644 index 0000000..fc269c8 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/doc/stream.md @@ -0,0 +1,2015 @@ +# Stream + + Stability: 2 - Stable + +A stream is an abstract interface for working with streaming data in Node.js. +The `stream` module provides a base API that makes it easy to build objects +that implement the stream interface. + +There are many stream objects provided by Node.js. For instance, a +[request to an HTTP server][http-incoming-message] and [`process.stdout`][] +are both stream instances. + +Streams can be readable, writable, or both. All streams are instances of +[`EventEmitter`][]. + +The `stream` module can be accessed using: + +```js +const stream = require('stream'); +``` + +While it is important for all Node.js users to understand how streams works, +the `stream` module itself is most useful for developer's that are creating new +types of stream instances. Developer's who are primarily *consuming* stream +objects will rarely (if ever) have need to use the `stream` module directly. + +## Organization of this document + +This document is divided into two primary sections and third section for +additional notes. The first section explains the elements of the stream API that +are required to *use* streams within an application. The second section explains +the elements of the API that are required to *implement* new types of streams. + +## Types of Streams + +There are four fundamental stream types within Node.js: + +* [Readable][] - streams from which data can be read (for example + [`fs.createReadStream()`][]). +* [Writable][] - streams to which data can be written (for example + [`fs.createWriteStream()`][]). +* [Duplex][] - streams that are both Readable and Writable (for example + [`net.Socket`][]). +* [Transform][] - Duplex streams that can modify or transform the data as it + is written and read (for example [`zlib.createDeflate()`][]). + +### Object Mode + +All streams created by Node.js APIs operate exclusively on strings and `Buffer` +objects. It is possible, however, for stream implementations to work with other +types of JavaScript values (with the exception of `null` which serves a special +purpose within streams). Such streams are considered to operate in "object +mode". + +Stream instances are switched into object mode using the `objectMode` option +when the stream is created. Attempting to switch an existing stream into +object mode is not safe. + +### Buffering + + + +Both [Writable][] and [Readable][] streams will store data in an internal +buffer that can be retrieved using `writable._writableState.getBuffer()` or +`readable._readableState.buffer`, respectively. + +The amount of data potentially buffered depends on the `highWaterMark` option +passed into the streams constructor. For normal streams, the `highWaterMark` +option specifies a total number of bytes. For streams operating in object mode, +the `highWaterMark` specifies a total number of objects. + +Data is buffered in Readable streams when the implementation calls +[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not +call [`stream.read()`][stream-read], the data will sit in the internal +queue until it is consumed. + +Once the total size of the internal read buffer reaches the threshold specified +by `highWaterMark`, the stream will temporarily stop reading data from the +underlying resource until the data currently buffered can be consumed (that is, +the stream will stop calling the internal `readable._read()` method that is +used to fill the read buffer). + +Data is buffered in Writable streams when the +[`writable.write(chunk)`][stream-write] method is called repeatedly. While the +total size of the internal write buffer is below the threshold set by +`highWaterMark`, calls to `writable.write()` will return `true`. Once the +the size of the internal buffer reaches or exceeds the `highWaterMark`, `false` +will be returned. + +A key goal of the `stream` API, and in particular the [`stream.pipe()`] method, +is to limit the buffering of data to acceptable levels such that sources and +destinations of differing speeds will not overwhelm the available memory. + +Because [Duplex][] and [Transform][] streams are both Readable and Writable, +each maintain *two* separate internal buffers used for reading and writing, +allowing each side to operate independently of the other while maintaining an +appropriate and efficient flow of data. For example, [`net.Socket`][] instances +are [Duplex][] streams whose Readable side allows consumption of data received +*from* the socket and whose Writable side allows writing data *to* the socket. +Because data may be written to the socket at a faster or slower rate than data +is received, it is important each side operate (and buffer) independently of +the other. + +## API for Stream Consumers + + + +Almost all Node.js applications, no matter how simple, use streams in some +manner. The following is an example of using streams in a Node.js application +that implements an HTTP server: + +```js +const http = require('http'); + +const server = http.createServer( (req, res) => { + // req is an http.IncomingMessage, which is a Readable Stream + // res is an http.ServerResponse, which is a Writable Stream + + let body = ''; + // Get the data as utf8 strings. + // If an encoding is not set, Buffer objects will be received. + req.setEncoding('utf8'); + + // Readable streams emit 'data' events once a listener is added + req.on('data', (chunk) => { + body += chunk; + }); + + // the end event indicates that the entire body has been received + req.on('end', () => { + try { + const data = JSON.parse(body); + } catch (er) { + // uh oh! bad json! + res.statusCode = 400; + return res.end(`error: ${er.message}`); + } + + // write back something interesting to the user: + res.write(typeof data); + res.end(); + }); +}); + +server.listen(1337); + +// $ curl localhost:1337 -d '{}' +// object +// $ curl localhost:1337 -d '"foo"' +// string +// $ curl localhost:1337 -d 'not json' +// error: Unexpected token o +``` + +[Writable][] streams (such as `res` in the example) expose methods such as +`write()` and `end()` that are used to write data onto the stream. + +[Readable][] streams use the [`EventEmitter`][] API for notifying application +code when data is available to be read off the stream. That available data can +be read from the stream in multiple ways. + +Both [Writable][] and [Readable][] streams use the [`EventEmitter`][] API in +various ways to communicate the current state of the stream. + +[Duplex][] and [Transform][] streams are both [Writable][] and [Readable][]. + +Applications that are either writing data to or consuming data from a stream +are not required to implement the stream interfaces directly and will generally +have no reason to call `require('stream')`. + +Developers wishing to implement new types of streams should refer to the +section [API for Stream Implementers][]. + +### Writable Streams + +Writable streams are an abstraction for a *destination* to which data is +written. + +Examples of [Writable][] streams include: + +* [HTTP requests, on the client][] +* [HTTP responses, on the server][] +* [fs write streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdin][] +* [`process.stdout`][], [`process.stderr`][] + +*Note*: Some of these examples are actually [Duplex][] streams that implement +the [Writable][] interface. + +All [Writable][] streams implement the interface defined by the +`stream.Writable` class. + +While specific instances of [Writable][] streams may differ in various ways, +all Writable streams follow the same fundamental usage pattern as illustrated +in the example below: + +```js +const myStream = getWritableStreamSomehow(); +myStream.write('some data'); +myStream.write('some more data'); +myStream.end('done writing data'); +``` + +#### Class: stream.Writable + + + + +##### Event: 'close' + + +The `'close'` event is emitted when the stream and any of its underlying +resources (a file descriptor, for example) have been closed. The event indicates +that no more events will be emitted, and no further computation will occur. + +Not all Writable streams will emit the `'close'` event. + +##### Event: 'drain' + + +If a call to [`stream.write(chunk)`][stream-write] returns `false`, the +`'drain'` event will be emitted when it is appropriate to resume writing data +to the stream. + +```js +// Write the data to the supplied writable stream one million times. +// Be attentive to back-pressure. +function writeOneMillionTimes(writer, data, encoding, callback) { + let i = 1000000; + write(); + function write() { + var ok = true; + do { + i--; + if (i === 0) { + // last time! + writer.write(data, encoding, callback); + } else { + // see if we should continue, or wait + // don't pass the callback, because we're not done yet. + ok = writer.write(data, encoding); + } + } while (i > 0 && ok); + if (i > 0) { + // had to stop early! + // write some more once it drains + writer.once('drain', write); + } + } +} +``` + +##### Event: 'error' + + +* {Error} + +The `'error'` event is emitted if an error occurred while writing or piping +data. The listener callback is passed a single `Error` argument when called. + +*Note*: The stream is not closed when the `'error'` event is emitted. + +##### Event: 'finish' + + +The `'finish'` event is emitted after the [`stream.end()`][stream-end] method +has been called, and all data has been flushed to the underlying system. + +```js +const writer = getWritableStreamSomehow(); +for (var i = 0; i < 100; i ++) { + writer.write('hello, #${i}!\n'); +} +writer.end('This is the end\n'); +writer.on('finish', () => { + console.error('All writes are now complete.'); +}); +``` + +##### Event: 'pipe' + + +* `src` {stream.Readable} source stream that is piping to this writable + +The `'pipe'` event is emitted when the [`stream.pipe()`][] method is called on +a readable stream, adding this writable to its set of destinations. + +```js +const writer = getWritableStreamSomehow(); +const reader = getReadableStreamSomehow(); +writer.on('pipe', (src) => { + console.error('something is piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +``` + +##### Event: 'unpipe' + + +* `src` {[Readable][] Stream} The source stream that + [unpiped][`stream.unpipe()`] this writable + +The `'unpipe'` event is emitted when the [`stream.unpipe()`][] method is called +on a [Readable][] stream, removing this [Writable][] from its set of +destinations. + +```js +const writer = getWritableStreamSomehow(); +const reader = getReadableStreamSomehow(); +writer.on('unpipe', (src) => { + console.error('Something has stopped piping into the writer.'); + assert.equal(src, reader); +}); +reader.pipe(writer); +reader.unpipe(writer); +``` + +##### writable.cork() + + +The `writable.cork()` method forces all written data to be buffered in memory. +The buffered data will be flushed when either the [`stream.uncork()`][] or +[`stream.end()`][stream-end] methods are called. + +The primary intent of `writable.cork()` is to avoid a situation where writing +many small chunks of data to a stream do not cause an backup in the internal +buffer that would have an adverse impact on performance. In such situations, +implementations that implement the `writable._writev()` method can perform +buffered writes in a more optimized manner. + +##### writable.end([chunk][, encoding][, callback]) + + +* `chunk` {String|Buffer|any} Optional data to write. For streams not operating + in object mode, `chunk` must be a string or a `Buffer`. For object mode + streams, `chunk` may be any JavaScript value other than `null`. +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Optional callback for when the stream is finished + +Calling the `writable.end()` method signals that no more data will be written +to the [Writable][]. The optional `chunk` and `encoding` arguments allow one +final additional chunk of data to be written immediately before closing the +stream. If provided, the optional `callback` function is attached as a listener +for the [`'finish'`][] event. + +Calling the [`stream.write()`][stream-write] method after calling +[`stream.end()`][stream-end] will raise an error. + +```js +// write 'hello, ' and then end with 'world!' +const file = fs.createWriteStream('example.txt'); +file.write('hello, '); +file.end('world!'); +// writing more now is not allowed! +``` + +##### writable.setDefaultEncoding(encoding) + + +* `encoding` {String} The new default encoding +* Return: `this` + +The `writable.setDefaultEncoding()` method sets the default `encoding` for a +[Writable][] stream. + +##### writable.uncork() + + +The `writable.uncork()` method flushes all data buffered since +[`stream.cork()`][] was called. + +When using `writable.cork()` and `writable.uncork()` to manage the buffering +of writes to a stream, it is recommended that calls to `writable.uncork()` be +deferred using `process.nextTick()`. Doing so allows batching of all +`writable.write()` calls that occur within a given Node.js event loop phase. + +```js +stream.cork(); +stream.write('some '); +stream.write('data '); +process.nextTick(() => stream.uncork()); +``` + +If the `writable.cork()` method is called multiple times on a stream, the same +number of calls to `writable.uncork()` must be called to flush the buffered +data. + +``` +stream.cork(); +stream.write('some '); +stream.cork(); +stream.write('data '); +process.nextTick(() => { + stream.uncork(); + // The data will not be flushed until uncork() is called a second time. + stream.uncork(); +}); +``` + +##### writable.write(chunk[, encoding][, callback]) + + +* `chunk` {String|Buffer} The data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Callback for when this chunk of data is flushed +* Returns: {Boolean} `false` if the stream wishes for the calling code to + wait for the `'drain'` event to be emitted before continuing to write + additional data; otherwise `true`. + +The `writable.write()` method writes some data to the stream, and calls the +supplied `callback` once the data has been fully handled. If an error +occurs, the `callback` *may or may not* be called with the error as its +first argument. To reliably detect write errors, add a listener for the +`'error'` event. + +The return value indicates whether the written `chunk` was buffered internally +and the buffer has exceeded the `highWaterMark` configured when the stream was +created. If `false` is returned, further attempts to write data to the stream +should be paused until the `'drain'` event is emitted. + +A Writable stream in object mode will always ignore the `encoding` argument. + +### Readable Streams + +Readable streams are an abstraction for a *source* from which data is +consumed. + +Examples of Readable streams include: + +* [HTTP responses, on the client][http-incoming-message] +* [HTTP requests, on the server][http-incoming-message] +* [fs read streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdout and stderr][] +* [`process.stdin`][] + +All [Readable][] streams implement the interface defined by the +`stream.Readable` class. + +#### Two Modes + +Readable streams effectively operate in one of two modes: flowing and paused. + +When in flowing mode, data is read from the underlying system automatically +and provided to an application as quickly as possible using events via the +[`EventEmitter`][] interface. + +In paused mode, the [`stream.read()`][stream-read] method must be called +explicitly to read chunks of data from the stream. + +All [Readable][] streams begin in paused mode but can be switched to flowing +mode in one of the following ways: + +* Adding a [`'data'`][] event handler. +* Calling the [`stream.resume()`][stream-resume] method. +* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. + +The Readable can switch back to paused mode using one of the following: + +* If there are no pipe destinations, by calling the + [`stream.pause()`][stream-pause] method. +* If there are pipe destinations, by removing any [`'data'`][] event + handlers, and removing all pipe destinations by calling the + [`stream.unpipe()`][] method. + +The important concept to remember is that a Readable will not generate data +until a mechanism for either consuming or ignoring that data is provided. If +the consuming mechanism is disabled or taken away, the Readable will *attempt* +to stop generating the data. + +*Note*: For backwards compatibility reasons, removing [`'data'`][] event +handlers will **not** automatically pause the stream. Also, if there are piped +destinations, then calling [`stream.pause()`][stream-pause] will not guarantee +that the stream will *remain* paused once those destinations drain and ask for +more data. + +*Note*: If a [Readable][] is switched into flowing mode and there are no +consumers available handle the data, that data will be lost. This can occur, +for instance, when the `readable.resume()` method is called without a listener +attached to the `'data'` event, or when a `'data'` event handler is removed +from the stream. + +#### Three States + +The "two modes" of operation for a Readable stream are a simplified abstraction +for the more complicated internal state management that is happening within the +Readable stream implementation. + +Specifically, at any given point in time, every Readable is in one of three +possible states: + +* `readable._readableState.flowing = null` +* `readable._readableState.flowing = false` +* `readable._readableState.flowing = true` + +When `readable._readableState.flowing` is `null`, no mechanism for consuming the +streams data is provided so the stream will not generate its data. + +Attaching a listener for the `'data'` event, calling the `readable.pipe()` +method, or calling the `readable.resume()` method will switch +`readable._readableState.flowing` to `true`, causing the Readable to begin +actively emitting events as data is generated. + +Calling `readable.pause()`, `readable.unpipe()`, or receiving "back pressure" +will cause the `readable._readableState.flowing` to be set as `false`, +temporarily halting the flowing of events but *not* halting the generation of +data. + +While `readable._readableState.flowing` is `false`, data may be accumulating +within the streams internal buffer. + +#### Choose One + +The Readable stream API evolved across multiple Node.js versions and provides +multiple methods of consuming stream data. In general, developers should choose +*one* of the methods of consuming data and *should never* use multiple methods +to consume data from a single stream. + +Use of the `readable.pipe()` method is recommended for most users as it has been +implemented to provide the easiest way of consuming stream data. Developers that +require more fine-grained control over the transfer and generation of data can +use the [`EventEmitter`][] and `readable.pause()`/`readable.resume()` APIs. + +#### Class: stream.Readable + + + + +##### Event: 'close' + + +The `'close'` event is emitted when the stream and any of its underlying +resources (a file descriptor, for example) have been closed. The event indicates +that no more events will be emitted, and no further computation will occur. + +Not all [Readable][] streams will emit the `'close'` event. + +##### Event: 'data' + + +* `chunk` {Buffer|String|any} The chunk of data. For streams that are not + operating in object mode, the chunk will be either a string or `Buffer`. + For streams that are in object mode, the chunk can be any JavaScript value + other than `null`. + +The `'data'` event is emitted whenever the stream is relinquishing ownership of +a chunk of data to a consumer. This may occur whenever the stream is switched +in flowing mode by calling `readable.pipe()`, `readable.resume()`, or by +attaching a listener callback to the `'data'` event. The `'data'` event will +also be emitted whenever the `readable.read()` method is called and a chunk of +data is available to be returned. + +Attaching a `'data'` event listener to a stream that has not been explicitly +paused will switch the stream into flowing mode. Data will then be passed as +soon as it is available. + +The listener callback will be passed the chunk of data as a string if a default +encoding has been specified for the stream using the +`readable.setEncoding()` method; otherwise the data will be passed as a +`Buffer`. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); +}); +``` + +##### Event: 'end' + + +The `'end'` event is emitted when there is no more data to be consumed from +the stream. + +*Note*: The `'end'` event **will not be emitted** unless the data is +completely consumed. This can be accomplished by switching the stream into +flowing mode, or by calling [`stream.read()`][stream-read] repeatedly until +all data has been consumed. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); +}); +readable.on('end', () => { + console.log('There will be no more data.'); +}); +``` + +##### Event: 'error' + + +* {Error} + +The `'error'` event may be emitted by a Readable implementation at any time. +Typically, this may occur if the underlying stream in unable to generate data +due to an underlying internal failure, or when a stream implementation attempts +to push an invalid chunk of data. + +The listener callback will be passed a single `Error` object. + +##### Event: 'readable' + + +The `'readable'` event is emitted when there is data available to be read from +the stream. In some cases, attaching a listener for the `'readable'` event will +cause some amount of data to be read into an internal buffer. + +```javascript +const readable = getReadableStreamSomehow(); +readable.on('readable', () => { + // there is some data to read now +}); +``` +The `'readable'` event will also be emitted once the end of the stream data +has been reached but before the `'end'` event is emitted. + +Effectively, the `'readable'` event indicates that the stream has new +information: either new data is available or the end of the stream has been +reached. In the former case, [`stream.read()`][stream-read] will return the +available data. In the latter case, [`stream.read()`][stream-read] will return +`null`. For instance, in the following example, `foo.txt` is an empty file: + +```js +const fs = require('fs'); +const rr = fs.createReadStream('foo.txt'); +rr.on('readable', () => { + console.log('readable:', rr.read()); +}); +rr.on('end', () => { + console.log('end'); +}); +``` + +The output of running this script is: + +``` +$ node test.js +readable: null +end +``` + +*Note*: In general, the `readable.pipe()` and `'data'` event mechanisms are +preferred over the use of the `'readable'` event. + +##### readable.isPaused() + + +* Return: {Boolean} + +The `readable.isPaused()` method returns the current operating state of the +Readable. This is used primarily by the mechanism that underlies the +`readable.pipe()` method. In most typical cases, there will be no reason to +use this method directly. + +```js +const readable = new stream.Readable + +readable.isPaused() // === false +readable.pause() +readable.isPaused() // === true +readable.resume() +readable.isPaused() // === false +``` + +##### readable.pause() + + +* Return: `this` + +The `readable.pause()` method will cause a stream in flowing mode to stop +emitting [`'data'`][] events, switching out of flowing mode. Any data that +becomes available will remain in the internal buffer. + +```js +const readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log(`Received ${chunk.length} bytes of data.`); + readable.pause(); + console.log('There will be no additional data for 1 second.'); + setTimeout(() => { + console.log('Now data will start flowing again.'); + readable.resume(); + }, 1000); +}); +``` + +##### readable.pipe(destination[, options]) + + +* `destination` {stream.Writable} The destination for writing data +* `options` {Object} Pipe options + * `end` {Boolean} End the writer when the reader ends. Defaults to `true`. + +The `readable.pipe()` method attaches a [Writable][] stream to the `readable`, +causing it to switch automatically into flowing mode and push all of its data +to the attached [Writable][]. The flow of data will be automatically managed so +that the destination Writable stream is not overwhelmed by a faster Readable +stream. + +The following example pipes all of the data from the `readable` into a file +named `file.txt`: + +```js +const readable = getReadableStreamSomehow(); +const writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt' +readable.pipe(writable); +``` +It is possible to attach multiple Writable streams to a single Readable stream. + +The `readable.pipe()` method returns a reference to the *destination* stream +making it possible to set up chains of piped streams: + +```js +const r = fs.createReadStream('file.txt'); +const z = zlib.createGzip(); +const w = fs.createWriteStream('file.txt.gz'); +r.pipe(z).pipe(w); +``` + +By default, [`stream.end()`][stream-end] is called on the destination Writable +stream when the source Readable stream emits [`'end'`][], so that the +destination is no longer writable. To disable this default behavior, the `end` +option can be passed as `false`, causing the destination stream to remain open, +as illustrated in the following example: + +```js +reader.pipe(writer, { end: false }); +reader.on('end', () => { + writer.end('Goodbye\n'); +}); +``` + +One important caveat is that if the Readable stream emits an error during +processing, the Writable destination *is not closed* automatically. If an +error occurs, it will be necessary to *manually* close each stream in order +to prevent memory leaks. + +*Note*: The [`process.stderr`][] and [`process.stdout`][] Writable streams are +never closed until the Node.js process exits, regardless of the specified +options. + +##### readable.read([size]) + + +* `size` {Number} Optional argument to specify how much data to read. +* Return {String|Buffer|Null} + +The `readable.read()` method pulls some data out of the internal buffer and +returns it. If no data available to be read, `null` is returned. By default, +the data will be returned as a `Buffer` object unless an encoding has been +specified using the `readable.setEncoding()` method or the stream is operating +in object mode. + +The optional `size` argument specifies a specific number of bytes to read. If +`size` bytes are not available to be read, `null` will be returned *unless* +the stream has ended, in which case all of the data remaining in the internal +buffer will be returned (*even if it exceeds `size` bytes*). + +If the `size` argument is not specified, all of the data contained in the +internal buffer will be returned. + +The `readable.read()` method should only be called on Readable streams operating +in paused mode. In flowing mode, `readable.read()` is called automatically until +the internal buffer is fully drained. + +```js +const readable = getReadableStreamSomehow(); +readable.on('readable', () => { + var chunk; + while (null !== (chunk = readable.read())) { + console.log(`Received ${chunk.length} bytes of data.`); + } +}); +``` + +In general, it is recommended that developers avoid the use of the `'readable'` +event and the `readable.read()` method in favor of using either +`readable.pipe()` or the `'data'` event. + +A Readable stream in object mode will always return a single item from +a call to [`readable.read(size)`][stream-read], regardless of the value of the +`size` argument. + +*Note:* If the `readable.read()` method returns a chunk of data, a `'data'` +event will also be emitted. + +*Note*: Calling [`stream.read([size])`][stream-read] after the [`'end'`][] +event has been emitted will return `null`. No runtime error will be raised. + +##### readable.resume() + + +* Return: `this` + +The `readable.resume()` method causes an explicitly paused Readable stream to +resume emitting [`'data'`][] events, switching the stream into flowing mode. + +The `readable.resume()` method can be used to fully consume the data from a +stream without actually processing any of that data as illustrated in the +following example: + +```js +getReadableStreamSomehow() + .resume() + .on('end', () => { + console.log('Reached the end, but did not read anything.'); + }); +``` + +##### readable.setEncoding(encoding) + + +* `encoding` {String} The encoding to use. +* Return: `this` + +The `readable.setEncoding()` method sets the default character encoding for +data read from the Readable stream. + +Setting an encoding causes the stream data +to be returned as string of the specified encoding rather than as `Buffer` +objects. For instance, calling `readable.setEncoding('utf8')` will cause the +output data will be interpreted as UTF-8 data, and passed as strings. Calling +`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal +string format. + +The Readable stream will properly handle multi-byte characters delivered through +the stream that would otherwise become improperly decoded if simply pulled from +the stream as `Buffer` objects. + +Encoding can be disabled by calling `readable.setEncoding(null)`. This approach +is useful when working with binary data or with large multi-byte strings spread +out over multiple chunks. + +```js +const readable = getReadableStreamSomehow(); +readable.setEncoding('utf8'); +readable.on('data', (chunk) => { + assert.equal(typeof chunk, 'string'); + console.log('got %d characters of string data', chunk.length); +}); +``` + +##### readable.unpipe([destination]) + + +* `destination` {stream.Writable} Optional specific stream to unpipe + +The `readable.unpipe()` method detaches a Writable stream previously attached +using the [`stream.pipe()`][] method. + +If the `destination` is not specified, then *all* pipes are detached. + +If the `destination` is specified, but no pipe is set up for it, then +the method does nothing. + +```js +const readable = getReadableStreamSomehow(); +const writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt', +// but only for the first second +readable.pipe(writable); +setTimeout(() => { + console.log('Stop writing to file.txt'); + readable.unpipe(writable); + console.log('Manually close the file stream'); + writable.end(); +}, 1000); +``` + +##### readable.unshift(chunk) + + +* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue + +The `readable.unshift()` method pushes a chunk of data back into the internal +buffer. This is useful in certain situations where a stream is being consumed by +code that needs to "un-consume" some amount of data that it has optimistically +pulled out of the source, so that the data can be passed on to some other party. + +*Note*: The `stream.unshift(chunk)` method cannot be called after the +[`'end'`][] event has been emitted or a runtime error will be thrown. + +Developers using `stream.unshift()` often should consider switching to +use of a [Transform][] stream instead. See the [API for Stream Implementers][] +section for more information. + +```js +// Pull off a header delimited by \n\n +// use unshift() if we get too much +// Call the callback with (error, header, stream) +const StringDecoder = require('string_decoder').StringDecoder; +function parseHeader(stream, callback) { + stream.on('error', callback); + stream.on('readable', onReadable); + const decoder = new StringDecoder('utf8'); + var header = ''; + function onReadable() { + var chunk; + while (null !== (chunk = stream.read())) { + var str = decoder.write(chunk); + if (str.match(/\n\n/)) { + // found the header boundary + var split = str.split(/\n\n/); + header += split.shift(); + const remaining = split.join('\n\n'); + const buf = Buffer.from(remaining, 'utf8'); + if (buf.length) + stream.unshift(buf); + stream.removeListener('error', callback); + stream.removeListener('readable', onReadable); + // now the body of the message can be read from the stream. + callback(null, header, stream); + } else { + // still reading the header. + header += str; + } + } + } +} +``` + +*Note*: Unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` +will not end the reading process by resetting the internal reading state of the +stream. This can cause unexpected results if `readable.unshift()` is called +during a read (i.e. from within a [`stream._read()`][stream-_read] +implementation on a custom stream). Following the call to `readable.unshift()` +with an immediate [`stream.push('')`][stream-push] will reset the reading state +appropriately, however it is best to simply avoid calling `readable.unshift()` +while in the process of performing a read. + +##### readable.wrap(stream) + + +* `stream` {Stream} An "old style" readable stream + +Versions of Node.js prior to v0.10 had streams that did not implement the +entire `stream` module API as it is currently defined. (See [Compatibility][] +for more information.) + +When using an older Node.js library that emits [`'data'`][] events and has a +[`stream.pause()`][stream-pause] method that is advisory only, the +`readable.wrap()` method can be used to create a [Readable][] stream that uses +the old stream as its data source. + +It will rarely be necessary to use `readable.wrap()` but the method has been +provided as a convenience for interacting with older Node.js applications and +libraries. + +For example: + +```js +const OldReader = require('./old-api-module.js').OldReader; +const Readable = require('stream').Readable; +const oreader = new OldReader; +const myReader = new Readable().wrap(oreader); + +myReader.on('readable', () => { + myReader.read(); // etc. +}); +``` + +### Duplex and Transform Streams + +#### Class: stream.Duplex + + + + +Duplex streams are streams that implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Duplex streams include: + +* [TCP sockets][] +* [zlib streams][zlib] +* [crypto streams][crypto] + +#### Class: stream.Transform + + + + +Transform streams are [Duplex][] streams where the output is in some way +related to the input. Like all [Duplex][] streams, Transform streams +implement both the [Readable][] and [Writable][] interfaces. + +Examples of Transform streams include: + +* [zlib streams][zlib] +* [crypto streams][crypto] + + +## API for Stream Implementers + + + +The `stream` module API has been designed to make it possible to easily +implement streams using JavaScript's prototypical inheritance model. + +First, a stream developer would declare a new JavaScript class that extends one +of the four basic stream classes (`stream.Writable`, `stream.Readable`, +`stream.Duplex`, or `stream.Transform`), making sure the call the appropriate +parent class constructor: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + super(options); + } +} +``` + +The new stream class must then implement one or more specific methods, depending +on the type of stream being created, as detailed in the chart below: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Use-case

+
+

Class

+
+

Method(s) to implement

+
+

Reading only

+
+

[Readable](#stream_class_stream_readable)

+
+

[_read][stream-_read]

+
+

Writing only

+
+

[Writable](#stream_class_stream_writable)

+
+

[_write][stream-_write], [_writev][stream-_writev]

+
+

Reading and writing

+
+

[Duplex](#stream_class_stream_duplex)

+
+

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

+
+

Operate on written data, then read the result

+
+

[Transform](#stream_class_stream_transform)

+
+

[_transform][stream-_transform], [_flush][stream-_flush]

+
+ +*Note*: The implementation code for a stream should *never* call the "public" +methods of a stream that are intended for use by consumers (as described in +the [API for Stream Consumers][] section). Doing so may lead to adverse +side effects in application code consuming the stream. + +### Simplified Construction + +For many simple cases, it is possible to construct a stream without relying on +inheritance. This can be accomplished by directly creating instances of the +`stream.Writable`, `stream.Readable`, `stream.Duplex` or `stream.Transform` +objects and passing appropriate methods as constructor options. + +For example: + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + // ... + } +}); +``` + +### Implementing a Writable Stream + +The `stream.Writable` class is extended to implement a [Writable][] stream. + +Custom Writable streams *must* call the `new stream.Writable([options])` +constructor and implement the `writable._write()` method. The +`writable._writev()` method *may* also be implemented. + +#### Constructor: new stream.Writable([options]) + +* `options` {Object} + * `highWaterMark` {Number} Buffer level when + [`stream.write()`][stream-write] starts returning `false`. Defaults to + `16384` (16kb), or `16` for `objectMode` streams. + * `decodeStrings` {Boolean} Whether or not to decode strings into + Buffers before passing them to [`stream._write()`][stream-_write]. + Defaults to `true` + * `objectMode` {Boolean} Whether or not the + [`stream.write(anyObj)`][stream-write] is a valid operation. When set, + it becomes possible to write JavaScript values other than string or + `Buffer` if supported by the stream implementation. Defaults to `false` + * `write` {Function} Implementation for the + [`stream._write()`][stream-_write] method. + * `writev` {Function} Implementation for the + [`stream._writev()`][stream-_writev] method. + +For example: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + // Calls the stream.Writable() constructor + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Writable = require('stream').Writable; +const util = require('util'); + +function MyWritable(options) { + if (!(this instanceof MyWritable)) + return new MyWritable(options); + Writable.call(this, options); +} +util.inherits(MyWritable, Writable); +``` + +Or, using the Simplified Constructor approach: + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + // ... + }, + writev(chunks, callback) { + // ... + } +}); +``` + +#### writable.\_write(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be written. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then `encoding` is the + character encoding of that string. If chunk is a `Buffer`, or if the + stream is operating in object mode, `encoding` may be ignored. +* `callback` {Function} Call this function (optionally with an error + argument) when processing is complete for the supplied chunk. + +All Writable stream implementations must provide a +[`writable._write()`][stream-_write] method to send data to the underlying +resource. + +*Note*: [Transform][] streams provide their own implementation of the +[`writable._write()`][stream-_write]. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Writable +class methods only. + +The `callback` method must be called to signal either that the write completed +successfully or failed with an error. The first argument passed to the +`callback` must be the `Error` object if the call failed or `null` if the +write succeeded. + +It is important to note that all calls to `writable.write()` that occur between +the time `writable._write()` is called and the `callback` is called will cause +the written data to be buffered. Once the `callback` is invoked, the stream will +emit a `'drain'` event. If a stream implementation is capable of processing +multiple chunks of data at once, the `writable._writev()` method should be +implemented. + +If the `decodeStrings` property is set in the constructor options, then +`chunk` may be a string rather than a Buffer, and `encoding` will +indicate the character encoding of the string. This is to support +implementations that have an optimized handling for certain string +data encodings. If the `decodeStrings` property is explicitly set to `false`, +the `encoding` argument can be safely ignored, and `chunk` will always be a +`Buffer`. + +The `writable._write()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### writable.\_writev(chunks, callback) + +* `chunks` {Array} The chunks to be written. Each chunk has following + format: `{ chunk: ..., encoding: ... }`. +* `callback` {Function} A callback function (optionally with an error + argument) to be invoked when processing is complete for the supplied chunks. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Writable +class methods only. + +The `writable._writev()` method may be implemented in addition to +`writable._write()` in stream implementations that are capable of processing +multiple chunks of data at once. If implemented, the method will be called with +all chunks of data currently buffered in the write queue. + +The `writable._writev()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### Errors While Writing + +It is recommended that errors occurring during the processing of the +`writable._write()` and `writable._writev()` methods are reported by invoking +the callback and passing the error as the first argument. This will cause an +`'error'` event to be emitted by the Writable. Throwing an Error from within +`writable._write()` can result in expected and inconsistent behavior depending +on how the stream is being used. Using the callback ensures consistent and +predictable handling of errors. + +```js +const Writable = require('stream').Writable; + +const myWritable = new Writable({ + write(chunk, encoding, callback) { + if (chunk.toString().indexOf('a') >= 0) { + callback(new Error('chunk is invalid')); + } else { + callback(); + } + } +}); +``` + +#### An Example Writable Stream + +The following illustrates a rather simplistic (and somewhat pointless) custom +Writable stream implementation. While this specific Writable stream instance +is not of any real particular usefulness, the example illustrates each of the +required elements of a custom [Writable][] stream instance: + +```js +const Writable = require('stream').Writable; + +class MyWritable extends Writable { + constructor(options) { + super(options); + } + + _write(chunk, encoding, callback) { + if (chunk.toString().indexOf('a') >= 0) { + callback(new Error('chunk is invalid')); + } else { + callback(); + } + } +} +``` + +### Implementing a Readable Stream + +The `stream.Readable` class is extended to implement a [Readable][] stream. + +Custom Readable streams *must* call the `new stream.Readable([options])` +constructor and implement the `readable._read()` method. + +#### new stream.Readable([options]) + +* `options` {Object} + * `highWaterMark` {Number} The maximum number of bytes to store in + the internal buffer before ceasing to read from the underlying + resource. Defaults to `16384` (16kb), or `16` for `objectMode` streams + * `encoding` {String} If specified, then buffers will be decoded to + strings using the specified encoding. Defaults to `null` + * `objectMode` {Boolean} Whether this stream should behave + as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns + a single value instead of a Buffer of size n. Defaults to `false` + * `read` {Function} Implementation for the [`stream._read()`][stream-_read] + method. + +For example: + +```js +const Readable = require('stream').Readable; + +class MyReadable extends Readable { + constructor(options) { + // Calls the stream.Readable(options) constructor + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Readable = require('stream').Readable; +const util = require('util'); + +function MyReadable(options) { + if (!(this instanceof MyReadable)) + return new MyReadable(options); + Readable.call(this, options); +} +util.inherits(MyReadable, Readable); +``` + +Or, using the Simplified Constructor approach: + +```js +const Readable = require('stream').Readable; + +const myReadable = new Readable({ + read(size) { + // ... + } +}); +``` + +#### readable.\_read(size) + +* `size` {Number} Number of bytes to read asynchronously + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +All Readable stream implementations must provide an implementation of the +`readable._read()` method to fetch data from the underlying resource. + +When `readable._read()` is called, if data is available from the resource, the +implementation should begin pushing that data into the read queue using the +[`this.push(dataChunk)`][stream-push] method. `_read()` should continue reading +from the resource and pushing data until `readable.push()` returns `false`. Only +when `_read()` is called again after it has stopped should it resume pushing +additional data onto the queue. + +*Note*: Once the `readable._read()` method has been called, it will not be +called again until the [`readable.push()`][stream-push] method is called. + +The `size` argument is advisory. For implementations where a "read" is a +single operation that returns data can use the `size` argument to determine how +much data to fetch. Other implementations may ignore this argument and simply +provide data whenever it becomes available. There is no need to "wait" until +`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. + +The `readable._read()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### readable.push(chunk[, encoding]) + +* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue +* `encoding` {String} Encoding of String chunks. Must be a valid + Buffer encoding, such as `'utf8'` or `'ascii'` +* Returns {Boolean} `true` if additional chunks of data may continued to be + pushed; `false` otherwise. + +When `chunk` is a `Buffer` or `string`, the `chunk` of data will be added to the +internal queue for users of the stream to consume. Passing `chunk` as `null` +signals the end of the stream (EOF), after which no more data can be written. + +When the Readable is operating in paused mode, the data added with +`readable.push()` can be read out by calling the +[`readable.read()`][stream-read] method when the [`'readable'`][] event is +emitted. + +When the Readable is operating in flowing mode, the data added with +`readable.push()` will be delivered by emitting a `'data'` event. + +The `readable.push()` method is designed to be as flexible as possible. For +example, when wrapping a lower-level source that provides some form of +pause/resume mechanism, and a data callback, the low-level source can be wrapped +by the custom Readable instance as illustrated in the following example: + +```js +// source is an object with readStop() and readStart() methods, +// and an `ondata` member that gets called when it has data, and +// an `onend` member that gets called when the data is over. + +class SourceWrapper extends Readable { + constructor(options) { + super(options); + + this._source = getLowlevelSourceObject(); + + // Every time there's data, push it into the internal buffer. + this._source.ondata = (chunk) => { + // if push() returns false, then stop reading from source + if (!this.push(chunk)) + this._source.readStop(); + }; + + // When the source ends, push the EOF-signaling `null` chunk + this._source.onend = () => { + this.push(null); + }; + } + // _read will be called when the stream wants to pull more data in + // the advisory size argument is ignored in this case. + _read(size) { + this._source.readStart(); + } +} +``` +*Note*: The `readable.push()` method is intended be called only by Readable +Implementers, and only from within the `readable._read()` method. + +#### Errors While Reading + +It is recommended that errors occurring during the processing of the +`readable._read()` method are emitted using the `'error'` event rather than +being thrown. Throwing an Error from within `readable._read()` can result in +expected and inconsistent behavior depending on whether the stream is operating +in flowing or paused mode. Using the `'error'` event ensures consistent and +predictable handling of errors. + +```js +const Readable = require('stream').Readable; + +const myReadable = new Readable({ + read(size) { + if (checkSomeErrorCondition()) { + process.nextTick(() => this.emit('error', err)); + return; + } + // do some work + } +}); +``` + +#### An Example Counting Stream + + + +The following is a basic example of a Readable stream that emits the numerals +from 1 to 1,000,000 in ascending order, and then ends. + +```js +const Readable = require('stream').Readable; + +class Counter extends Readable { + constructor(opt) { + super(opt); + this._max = 1000000; + this._index = 1; + } + + _read() { + var i = this._index++; + if (i > this._max) + this.push(null); + else { + var str = '' + i; + var buf = Buffer.from(str, 'ascii'); + this.push(buf); + } + } +} +``` + +### Implementing a Duplex Stream + +A [Duplex][] stream is one that implements both [Readable][] and [Writable][], +such as a TCP socket connection. + +Because Javascript does not have support for multiple inheritance, the +`stream.Duplex` class is extended to implement a [Duplex][] stream (as opposed +to extending the `stream.Readable` *and* `stream.Writable` classes). + +*Note*: The `stream.Duplex` class prototypically inherits from `stream.Readable` +and parasitically from `stream.Writable`. + +Custom Duplex streams *must* call the `new stream.Duplex([options])` +constructor and implement *both* the `readable._read()` and +`writable._write()` methods. + +#### new stream.Duplex(options) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `allowHalfOpen` {Boolean} Defaults to `true`. If set to `false`, then + the stream will automatically end the readable side when the + writable side ends and vice versa. + * `readableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` + for readable side of the stream. Has no effect if `objectMode` + is `true`. + * `writableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` + for writable side of the stream. Has no effect if `objectMode` + is `true`. + +For example: + +```js +const Duplex = require('stream').Duplex; + +class MyDuplex extends Duplex { + constructor(options) { + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Duplex = require('stream').Duplex; +const util = require('util'); + +function MyDuplex(options) { + if (!(this instanceof MyDuplex)) + return new MyDuplex(options); + Duplex.call(this, options); +} +util.inherits(MyDuplex, Duplex); +``` + +Or, using the Simplified Constructor approach: + +```js +const Duplex = require('stream').Duplex; + +const myDuplex = new Duplex({ + read(size) { + // ... + }, + write(chunk, encoding, callback) { + // ... + } +}); +``` + +#### An Example Duplex Stream + +The following illustrates a simple example of a Duplex stream that wraps a +hypothetical lower-level source object to which data can be written, and +from which data can be read, albeit using an API that is not compatible with +Node.js streams. +The following illustrates a simple example of a Duplex stream that buffers +incoming written data via the [Writable][] interface that is read back out +via the [Readable][] interface. + +```js +const Duplex = require('stream').Duplex; +const kSource = Symbol('source'); + +class MyDuplex extends Duplex { + constructor(source, options) { + super(options); + this[kSource] = source; + } + + _write(chunk, encoding, callback) { + // The underlying source only deals with strings + if (Buffer.isBuffer(chunk)) + chunk = chunk.toString(encoding); + this[kSource].writeSomeData(chunk, encoding); + callback(); + } + + _read(size) { + this[kSource].fetchSomeData(size, (data, encoding) => { + this.push(Buffer.from(data, encoding)); + }); + } +} +``` + +The most important aspect of a Duplex stream is that the Readable and Writable +sides operate independently of one another despite co-existing within a single +object instance. + +#### Object Mode Duplex Streams + +For Duplex streams, `objectMode` can be set exclusively for either the Readable +or Writable side using the `readableObjectMode` and `writableObjectMode` options +respectively. + +In the following example, for instance, a new Transform stream (which is a +type of [Duplex][] stream) is created that has an object mode Writable side +that accepts JavaScript numbers that are converted to hexidecimal strings on +the Readable side. + +```js +const Transform = require('stream').Transform; + +// All Transform streams are also Duplex Streams +const myTransform = new Transform({ + writableObjectMode: true, + + transform(chunk, encoding, callback) { + // Coerce the chunk to a number if necessary + chunk |= 0; + + // Transform the chunk into something else. + const data = chunk.toString(16); + + // Push the data onto the readable queue. + callback(null, '0'.repeat(data.length % 2) + data); + } +}); + +myTransform.setEncoding('ascii'); +myTransform.on('data', (chunk) => console.log(chunk)); + +myTransform.write(1); + // Prints: 01 +myTransform.write(10); + // Prints: 0a +myTransform.write(100); + // Prints: 64 +``` + +### Implementing a Transform Stream + +A [Transform][] stream is a [Duplex][] stream where the output is computed +in some way from the input. Examples include [zlib][] streams or [crypto][] +streams that compress, encrypt, or decrypt data. + +*Note*: There is no requirement that the output be the same size as the input, +the same number of chunks, or arrive at the same time. For example, a +Hash stream will only ever have a single chunk of output which is +provided when the input is ended. A `zlib` stream will produce output +that is either much smaller or much larger than its input. + +The `stream.Transform` class is extended to implement a [Transform][] stream. + +The `stream.Transform` class prototypically inherits from `stream.Duplex` and +implements its own versions of the `writable._write()` and `readable._read()` +methods. Custom Transform implementations *must* implement the +[`transform._transform()`][stream-_transform] method and *may* also implement +the [`transform._flush()`][stream-_flush] method. + +*Note*: Care must be taken when using Transform streams in that data written +to the stream can cause the Writable side of the stream to become paused if +the output on the Readable side is not consumed. + +#### new stream.Transform([options]) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `transform` {Function} Implementation for the + [`stream._transform()`][stream-_transform] method. + * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] + method. + +For example: + +```js +const Transform = require('stream').Transform; + +class MyTransform extends Transform { + constructor(options) { + super(options); + } +} +``` + +Or, when using pre-ES6 style constructors: + +```js +const Transform = require('stream').Transform; +const util = require('util'); + +function MyTransform(options) { + if (!(this instanceof MyTransform)) + return new MyTransform(options); + Transform.call(this, options); +} +util.inherits(MyTransform, Transform); +``` + +Or, using the Simplified Constructor approach: + +```js +const Transform = require('stream').Transform; + +const myTransform = new Transform({ + transform(chunk, encoding, callback) { + // ... + } +}); +``` + +#### Events: 'finish' and 'end' + +The [`'finish'`][] and [`'end'`][] events are from the `stream.Writable` +and `stream.Readable` classes, respectively. The `'finish'` event is emitted +after [`stream.end()`][stream-end] is called and all chunks have been processed +by [`stream._transform()`][stream-_transform]. The `'end'` event is emitted +after all data has been output, which occurs after the callback in +[`transform._flush()`][stream-_flush] has been called. + +#### transform.\_flush(callback) + +* `callback` {Function} A callback function (optionally with an error + argument) to be called when remaining data has been flushed. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +In some cases, a transform operation may need to emit an additional bit of +data at the end of the stream. For example, a `zlib` compression stream will +store an amount of internal state used to optimally compress the output. When +the stream ends, however, that additional data needs to be flushed so that the +compressed data will be complete. + +Custom [Transform][] implementations *may* implement the `transform._flush()` +method. This will be called when there is no more written data to be consumed, +but before the [`'end'`][] event is emitted signaling the end of the +[Readable][] stream. + +Within the `transform._flush()` implementation, the `readable.push()` method +may be called zero or more times, as appropriate. The `callback` function must +be called when the flush operation is complete. + +The `transform._flush()` method is prefixed with an underscore because it is +internal to the class that defines it, and should never be called directly by +user programs. + +#### transform.\_transform(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be transformed. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} A callback function (optionally with an error + argument and data) to be called after the supplied `chunk` has been + processed. + +*Note*: **This function MUST NOT be called by application code directly.** It +should be implemented by child classes, and called only by the internal Readable +class methods only. + +All Transform stream implementations must provide a `_transform()` +method to accept input and produce output. The `transform._transform()` +implementation handles the bytes being written, computes an output, then passes +that output off to the readable portion using the `readable.push()` method. + +The `transform.push()` method may be called zero or more times to generate +output from a single input chunk, depending on how much is to be output +as a result of the chunk. + +It is possible that no output is generated from any given chunk of input data. + +The `callback` function must be called only when the current chunk is completely +consumed. The first argument passed to the `callback` must be an `Error` object +if an error occurred while processing the input or `null` otherwise. If a second +argument is passed to the `callback`, it will be forwarded on to the +`readable.push()` method. In other words the following are equivalent: + +```js +transform.prototype._transform = function (data, encoding, callback) { + this.push(data); + callback(); +}; + +transform.prototype._transform = function (data, encoding, callback) { + callback(null, data); +}; +``` + +The `transform._transform()` method is prefixed with an underscore because it +is internal to the class that defines it, and should never be called directly by +user programs. + +#### Class: stream.PassThrough + +The `stream.PassThrough` class is a trivial implementation of a [Transform][] +stream that simply passes the input bytes across to the output. Its purpose is +primarily for examples and testing, but there are some use cases where +`stream.PassThrough` is useful as a building block for novel sorts of streams. + +## Additional Notes + + + +### Compatibility with Older Node.js Versions + + + +In versions of Node.js prior to v0.10, the Readable stream interface was +simpler, but also less powerful and less useful. + +* Rather than waiting for calls the [`stream.read()`][stream-read] method, + [`'data'`][] events would begin emitting immediately. Applications that + would need to perform some amount of work to decide how to handle data + were required to store read data into buffers so the data would not be lost. +* The [`stream.pause()`][stream-pause] method was advisory, rather than + guaranteed. This meant that it was still necessary to be prepared to receive + [`'data'`][] events *even when the stream was in a paused state*. + +In Node.js v0.10, the [Readable][] class was added. For backwards compatibility +with older Node.js programs, Readable streams switch into "flowing mode" when a +[`'data'`][] event handler is added, or when the +[`stream.resume()`][stream-resume] method is called. The effect is that, even +when not using the new [`stream.read()`][stream-read] method and +[`'readable'`][] event, it is no longer necessary to worry about losing +[`'data'`][] chunks. + +While most applications will continue to function normally, this introduces an +edge case in the following conditions: + +* No [`'data'`][] event listener is added. +* The [`stream.resume()`][stream-resume] method is never called. +* The stream is not piped to any writable destination. + +For example, consider the following code: + +```js +// WARNING! BROKEN! +net.createServer((socket) => { + + // we add an 'end' method, but never consume the data + socket.on('end', () => { + // It will never get here. + socket.end('The message was received but was not processed.\n'); + }); + +}).listen(1337); +``` + +In versions of Node.js prior to v0.10, the incoming message data would be +simply discarded. However, in Node.js v0.10 and beyond, the socket remains +paused forever. + +The workaround in this situation is to call the +[`stream.resume()`][stream-resume] method to begin the flow of data: + +```js +// Workaround +net.createServer((socket) => { + + socket.on('end', () => { + socket.end('The message was received but was not processed.\n'); + }); + + // start the flow of data, discarding it. + socket.resume(); + +}).listen(1337); +``` + +In addition to new Readable streams switching into flowing mode, +pre-v0.10 style streams can be wrapped in a Readable class using the +[`readable.wrap()`][`stream.wrap()`] method. + + +### `readable.read(0)` + +There are some cases where it is necessary to trigger a refresh of the +underlying readable stream mechanisms, without actually consuming any +data. In such cases, it is possible to call `readable.read(0)`, which will +always return `null`. + +If the internal read buffer is below the `highWaterMark`, and the +stream is not currently reading, then calling `stream.read(0)` will trigger +a low-level [`stream._read()`][stream-_read] call. + +While most applications will almost never need to do this, there are +situations within Node.js where this is done, particularly in the +Readable stream class internals. + +### `readable.push('')` + +Use of `readable.push('')` is not recommended. + +Pushing a zero-byte string or `Buffer` to a stream that is not in object mode +has an interesting side effect. Because it *is* a call to +[`readable.push()`][stream-push], the call will end the reading process. +However, because the argument is an empty string, no data is added to the +readable buffer so there is nothing for a user to consume. + +[`'data'`]: #stream_event_data +[`'drain'`]: #stream_event_drain +[`'end'`]: #stream_event_end +[`'finish'`]: #stream_event_finish +[`'readable'`]: #stream_event_readable +[`buf.toString(encoding)`]: https://nodejs.org/docs/v6.3.1/api/buffer.html#buffer_buf_tostring_encoding_start_end +[`EventEmitter`]: https://nodejs.org/docs/v6.3.1/api/events.html#events_class_eventemitter +[`process.stderr`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stderr +[`process.stdin`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdin +[`process.stdout`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdout +[`stream.cork()`]: #stream_writable_cork +[`stream.pipe()`]: #stream_readable_pipe_destination_options +[`stream.uncork()`]: #stream_writable_uncork +[`stream.unpipe()`]: #stream_readable_unpipe_destination +[`stream.wrap()`]: #stream_readable_wrap_stream +[`tls.CryptoStream`]: https://nodejs.org/docs/v6.3.1/api/tls.html#tls_class_cryptostream +[API for Stream Consumers]: #stream_api_for_stream_consumers +[API for Stream Implementers]: #stream_api_for_stream_implementers +[child process stdin]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdin +[child process stdout and stderr]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdout +[Compatibility]: #stream_compatibility_with_older_node_js_versions +[crypto]: crypto.html +[Duplex]: #stream_class_stream_duplex +[fs read streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_readstream +[fs write streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_writestream +[`fs.createReadStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createreadstream_path_options +[`fs.createWriteStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createwritestream_path_options +[`net.Socket`]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket +[`zlib.createDeflate()`]: https://nodejs.org/docs/v6.3.1/api/zlib.html#zlib_zlib_createdeflate_options +[HTTP requests, on the client]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_clientrequest +[HTTP responses, on the server]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_serverresponse +[http-incoming-message]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_incomingmessage +[Object mode]: #stream_object_mode +[Readable]: #stream_class_stream_readable +[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 +[stream-_flush]: #stream_transform_flush_callback +[stream-_read]: #stream_readable_read_size_1 +[stream-_transform]: #stream_transform_transform_chunk_encoding_callback +[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 +[stream-_writev]: #stream_writable_writev_chunks_callback +[stream-end]: #stream_writable_end_chunk_encoding_callback +[stream-pause]: #stream_readable_pause +[stream-push]: #stream_readable_push_chunk_encoding +[stream-read]: #stream_readable_read_size +[stream-resume]: #stream_readable_resume +[stream-write]: #stream_writable_write_chunk_encoding_callback +[TCP sockets]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket +[Transform]: #stream_class_stream_transform +[Writable]: #stream_class_stream_writable +[zlib]: zlib.html diff --git a/node_modules/tar-pack/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/tar-pack/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/tar-pack/node_modules/readable-stream/duplex.js b/node_modules/tar-pack/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..ca807af --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..736693b --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,75 @@ +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +var keys = objectKeys(Writable.prototype); +for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + processNextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..d06f71f --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,26 @@ +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..208cc65 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,937 @@ +'use strict'; + +module.exports = Readable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var isArray = require('isarray'); +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var StringDecoder; + +util.inherits(Readable, Stream); + +function prependListener(emitter, event, fn) { + if (typeof emitter.prependListener === 'function') { + return emitter.prependListener(event, fn); + } else { + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; + } +} + +var Duplex; +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +var Duplex; +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options && typeof options.read === 'function') this._read = options.read; + + Stream.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + + if (!state.objectMode && typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = bufferShim.from(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + var state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + var er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (state.ended && !addToFront) { + var e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + var _e = new Error('stream.unshift() after end event'); + stream.emit('error', _e); + } else { + var skipAdd; + if (state.decoder && !addToFront && !encoding) { + chunk = state.decoder.write(chunk); + skipAdd = !state.objectMode && chunk.length === 0; + } + + if (!addToFront) state.reading = false; + + // Don't add to the buffer if we've decoded to an empty string chunk and + // we're not in object mode + if (!skipAdd) { + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + var er = null; + if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + processNextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : cleanup; + if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var _i = 0; _i < len; _i++) { + dests[_i].emit('unpipe', this); + }return this; + } + + // try to find the right one. + var i = indexOf(state.pipes, dest); + if (i === -1) return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + processNextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + processNextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + + var self = this; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + var events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, function (ev) { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = bufferShim.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + processNextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..dbc996e --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,180 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function TransformState(stream) { + this.afterTransform = function (er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; + this.writeencoding = null; +} + +function afterTransform(stream, er, data) { + var ts = stream._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (data !== null && data !== undefined) stream.push(data); + + cb(er); + + var rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(this); + + // when the writable side finishes, then flush out anything remaining. + var stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + this.once('prefinish', function () { + if (typeof this._flush === 'function') this._flush(function (er) { + done(stream, er); + });else done(stream); + }); +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('Not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +function done(stream, er) { + if (er) return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + var ws = stream._writableState; + var ts = stream._transformState; + + if (ws.length) throw new Error('Calling transform done when ws.length != 0'); + + if (ts.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..ed5efcb --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,526 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +util.inherits(Writable, Stream); + +function nop() {} + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +var Duplex; +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function writableStateGetBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') + }); + } catch (_) {} +})(); + +var Duplex; +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + processNextTick(cb, er); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + // Always throw error if a null is written + // if we are not in object mode then throw + // if it is not a buffer, string, or undefined. + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + processNextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = bufferShim.from(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) processNextTick(cb, er);else cb(er); + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + while (entry) { + buffer[count] = entry; + entry = entry.next; + count += 1; + } + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequestCount = 0; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else { + prefinish(stream, state); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) processNextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function (err) { + var entry = _this.entry; + _this.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = _this; + } else { + state.corkedRequestsFree = _this; + } + }; +} \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/tar-pack/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000..e4bfcf0 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,64 @@ +'use strict'; + +var Buffer = require('buffer').Buffer; +/**/ +var bufferShim = require('buffer-shims'); +/**/ + +module.exports = BufferList; + +function BufferList() { + this.head = null; + this.tail = null; + this.length = 0; +} + +BufferList.prototype.push = function (v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; +}; + +BufferList.prototype.unshift = function (v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; +}; + +BufferList.prototype.shift = function () { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; +}; + +BufferList.prototype.clear = function () { + this.head = this.tail = null; + this.length = 0; +}; + +BufferList.prototype.join = function (s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; +}; + +BufferList.prototype.concat = function (n) { + if (this.length === 0) return bufferShim.alloc(0); + if (this.length === 1) return this.head.data; + var ret = bufferShim.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + p.data.copy(ret, i); + i += p.data.length; + p = p.next; + } + return ret; +}; \ No newline at end of file diff --git a/node_modules/tar-pack/node_modules/readable-stream/package.json b/node_modules/tar-pack/node_modules/readable-stream/package.json new file mode 100644 index 0000000..4293090 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/package.json @@ -0,0 +1,49 @@ +{ + "name": "readable-stream", + "version": "2.1.5", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "buffer-shims": "^1.0.0", + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~0.10.x", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "~1.4.0", + "babel-polyfill": "^6.9.1", + "nyc": "^6.4.0", + "tap": "~0.7.1", + "tape": "~4.5.1", + "zuul": "~3.10.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js", + "browser": "npm run write-zuul && zuul --browser-retries 2 -- test/browser.js", + "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml", + "local": "zuul --local 3000 --no-coverage -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/tar-pack/node_modules/readable-stream/passthrough.js b/node_modules/tar-pack/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..27e8d8a --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/tar-pack/node_modules/readable-stream/readable.js b/node_modules/tar-pack/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..be2688a --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = (function (){ + try { + return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify + } catch(_){} +}()); +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = Stream || exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); + +if (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; +} diff --git a/node_modules/tar-pack/node_modules/readable-stream/transform.js b/node_modules/tar-pack/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..5d482f0 --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/tar-pack/node_modules/readable-stream/writable.js b/node_modules/tar-pack/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..e1e9efd --- /dev/null +++ b/node_modules/tar-pack/node_modules/readable-stream/writable.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_writable.js") diff --git a/node_modules/tar-pack/package.json b/node_modules/tar-pack/package.json new file mode 100644 index 0000000..a216377 --- /dev/null +++ b/node_modules/tar-pack/package.json @@ -0,0 +1,28 @@ +{ + "name": "tar-pack", + "version": "3.3.0", + "description": "Package and un-package modules of some sort (in tar/gz bundles).", + "scripts": { + "test": "mocha -R list" + }, + "repository": { + "type": "git", + "url": "https://github.com/ForbesLindesay/tar-pack.git" + }, + "license": "BSD-2-Clause", + "dependencies": { + "debug": "~2.2.0", + "fstream": "~1.0.10", + "fstream-ignore": "~1.0.5", + "once": "~1.3.3", + "readable-stream": "~2.1.4", + "rimraf": "~2.5.1", + "tar": "~2.2.1", + "uid-number": "~0.0.6" + }, + "devDependencies": { + "mocha": "*", + "rfile": "*", + "mkdirp": "*" + } +} diff --git a/node_modules/tar-pack/test/fixtures/packed-file.txt b/node_modules/tar-pack/test/fixtures/packed-file.txt new file mode 100644 index 0000000..ba0e162 --- /dev/null +++ b/node_modules/tar-pack/test/fixtures/packed-file.txt @@ -0,0 +1 @@ +bar \ No newline at end of file diff --git a/node_modules/tar-pack/test/fixtures/packed.tar b/node_modules/tar-pack/test/fixtures/packed.tar new file mode 100644 index 0000000..35fd174 Binary files /dev/null and b/node_modules/tar-pack/test/fixtures/packed.tar differ diff --git a/node_modules/tar-pack/test/fixtures/packed.tar.gz b/node_modules/tar-pack/test/fixtures/packed.tar.gz new file mode 100644 index 0000000..89516b1 Binary files /dev/null and b/node_modules/tar-pack/test/fixtures/packed.tar.gz differ diff --git a/node_modules/tar-pack/test/fixtures/to-pack/bar.txt b/node_modules/tar-pack/test/fixtures/to-pack/bar.txt new file mode 100644 index 0000000..3f95386 --- /dev/null +++ b/node_modules/tar-pack/test/fixtures/to-pack/bar.txt @@ -0,0 +1 @@ +baz \ No newline at end of file diff --git a/node_modules/tar-pack/test/fixtures/to-pack/foo.txt b/node_modules/tar-pack/test/fixtures/to-pack/foo.txt new file mode 100644 index 0000000..ba0e162 --- /dev/null +++ b/node_modules/tar-pack/test/fixtures/to-pack/foo.txt @@ -0,0 +1 @@ +bar \ No newline at end of file diff --git a/node_modules/tar-pack/test/index.js b/node_modules/tar-pack/test/index.js new file mode 100644 index 0000000..77bed66 --- /dev/null +++ b/node_modules/tar-pack/test/index.js @@ -0,0 +1,109 @@ +var tar = require('../') +var path = require('path') +var rfile = require('rfile') +var rimraf = require('rimraf').sync +var mkdir = require('mkdirp').sync + +var read = require('fs').createReadStream +var write = require('fs').createWriteStream +var assert = require('assert') + +beforeEach(function () { + rimraf(__dirname + '/output') +}) +afterEach(function () { + rimraf(__dirname + '/output') +}) +describe('tarball.pipe(unpack(directory, callback))', function () { + it('unpacks the tarball into the directory', function (done) { + read(__dirname + '/fixtures/packed.tar').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) + it('unpacks the tarball into the directory without deleting existing files', function (done) { + read(__dirname + '/fixtures/packed-file.txt').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + read(__dirname + '/fixtures/packed.tar').pipe(tar.unpack(__dirname + '/output/unpacked', {keepFiles: true}, function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/index.js'), rfile('./fixtures/packed-file.txt')) + done() + })) + })) + }) +}) +describe('tarball.pipe(unpack(directory, { strip: 0 }, callback))', function () { + it('unpacks the tarball into the directory with subdir package', function (done) { + read(__dirname + '/fixtures/packed.tar').pipe(tar.unpack(__dirname + '/output/unpacked', { strip: 0 } , function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/package/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/package/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) +}) +describe('gziptarball.pipe(unpack(directory, callback))', function () { + it('unpacks the tarball into the directory', function (done) { + read(__dirname + '/fixtures/packed.tar.gz').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) +}) +describe('file.pipe(unpack(directory, callback))', function () { + it('copies the file into the directory', function (done) { + read(__dirname + '/fixtures/packed-file.txt').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/index.js'), rfile('./fixtures/packed-file.txt')) + done() + })) + }) +}) +describe('pack(directory).pipe(tarball)', function () { + it('packs the directory into the output', function (done) { + var called = false + mkdir(__dirname + '/output/') + tar.pack(__dirname + '/fixtures/to-pack').pipe(write(__dirname + '/output/packed.tar.gz')) + .on('error', function (err) { + if (called) return + called = true + done(err) + }) + .on('close', function () { + if (called) return + called = true + read(__dirname + '/output/packed.tar.gz').pipe(tar.unpack(__dirname + '/output/unpacked', function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) + }) +}) +describe('pack(directory, { fromBase: true }).pipe(tarball)', function () { + it('packs the directory with input dir as base dir', function (done) { + var called = false + mkdir(__dirname + '/output/') + tar.pack(__dirname + '/fixtures/to-pack', { fromBase: true }).pipe(write(__dirname + '/output/packed.tar.gz')) + .on('error', function (err) { + if (called) return + called = true + done(err) + }) + .on('close', function () { + if (called) return + called = true + read(__dirname + '/output/packed.tar.gz').pipe(tar.unpack(__dirname + '/output/unpacked', { strip: 0 }, function (err) { + if (err) return done(err) + assert.equal(rfile('./output/unpacked/bar.txt'), rfile('./fixtures/to-pack/bar.txt')) + assert.equal(rfile('./output/unpacked/foo.txt'), rfile('./fixtures/to-pack/foo.txt')) + done() + })) + }) + }) +}) diff --git a/node_modules/tar/.npmignore b/node_modules/tar/.npmignore new file mode 100644 index 0000000..c167ad5 --- /dev/null +++ b/node_modules/tar/.npmignore @@ -0,0 +1,5 @@ +.*.swp +node_modules +examples/extract/ +test/tmp/ +test/fixtures/ diff --git a/node_modules/tar/.travis.yml b/node_modules/tar/.travis.yml new file mode 100644 index 0000000..fca8ef0 --- /dev/null +++ b/node_modules/tar/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.10 + - 0.11 diff --git a/node_modules/tar/LICENSE b/node_modules/tar/LICENSE new file mode 100644 index 0000000..019b7e4 --- /dev/null +++ b/node_modules/tar/LICENSE @@ -0,0 +1,12 @@ +The ISC License +Copyright (c) Isaac Z. Schlueter and Contributors +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md new file mode 100644 index 0000000..cfda2ac --- /dev/null +++ b/node_modules/tar/README.md @@ -0,0 +1,50 @@ +# node-tar + +Tar for Node.js. + +[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/) + +## API + +See `examples/` for usage examples. + +### var tar = require('tar') + +Returns an object with `.Pack`, `.Extract` and `.Parse` methods. + +### tar.Pack([properties]) + +Returns a through stream. Use +[fstream](https://npmjs.org/package/fstream) to write files into the +pack stream and you will receive tar archive data from the pack +stream. + +This only works with directories, it does not work with individual files. + +The optional `properties` object are used to set properties in the tar +'Global Extended Header'. If the `fromBase` property is set to true, +the tar will contain files relative to the path passed, and not with +the path included. + +### tar.Extract([options]) + +Returns a through stream. Write tar data to the stream and the files +in the tarball will be extracted onto the filesystem. + +`options` can be: + +```js +{ + path: '/path/to/extract/tar/into', + strip: 0, // how many path segments to strip from the root when extracting +} +``` + +`options` also get passed to the `fstream.Writer` instance that `tar` +uses internally. + +### tar.Parse() + +Returns a writable stream. Write tar data to it and it will emit +`entry` events for each entry parsed from the tarball. This is used by +`tar.Extract`. diff --git a/node_modules/tar/examples/extracter.js b/node_modules/tar/examples/extracter.js new file mode 100644 index 0000000..f6253a7 --- /dev/null +++ b/node_modules/tar/examples/extracter.js @@ -0,0 +1,19 @@ +var tar = require("../tar.js") + , fs = require("fs") + + +function onError(err) { + console.error('An error occurred:', err) +} + +function onEnd() { + console.log('Extracted!') +} + +var extractor = tar.Extract({path: __dirname + "/extract"}) + .on('error', onError) + .on('end', onEnd); + +fs.createReadStream(__dirname + "/../test/fixtures/c.tar") + .on('error', onError) + .pipe(extractor); diff --git a/node_modules/tar/examples/packer.js b/node_modules/tar/examples/packer.js new file mode 100644 index 0000000..039969c --- /dev/null +++ b/node_modules/tar/examples/packer.js @@ -0,0 +1,24 @@ +var tar = require("../tar.js") + , fstream = require("fstream") + , fs = require("fs") + +var dirDest = fs.createWriteStream('dir.tar') + + +function onError(err) { + console.error('An error occurred:', err) +} + +function onEnd() { + console.log('Packed!') +} + +var packer = tar.Pack({ noProprietary: true }) + .on('error', onError) + .on('end', onEnd); + +// This must be a "directory" +fstream.Reader({ path: __dirname, type: "Directory" }) + .on('error', onError) + .pipe(packer) + .pipe(dirDest) diff --git a/node_modules/tar/examples/reader.js b/node_modules/tar/examples/reader.js new file mode 100644 index 0000000..39f3f08 --- /dev/null +++ b/node_modules/tar/examples/reader.js @@ -0,0 +1,36 @@ +var tar = require("../tar.js") + , fs = require("fs") + +fs.createReadStream(__dirname + "/../test/fixtures/c.tar") + .pipe(tar.Parse()) + .on("extendedHeader", function (e) { + console.error("extended pax header", e.props) + e.on("end", function () { + console.error("extended pax fields:", e.fields) + }) + }) + .on("ignoredEntry", function (e) { + console.error("ignoredEntry?!?", e.props) + }) + .on("longLinkpath", function (e) { + console.error("longLinkpath entry", e.props) + e.on("end", function () { + console.error("value=%j", e.body.toString()) + }) + }) + .on("longPath", function (e) { + console.error("longPath entry", e.props) + e.on("end", function () { + console.error("value=%j", e.body.toString()) + }) + }) + .on("entry", function (e) { + console.error("entry", e.props) + e.on("data", function (c) { + console.error(" >>>" + c.toString().replace(/\n/g, "\\n")) + }) + e.on("end", function () { + console.error(" << 0 + return !this._needDrain +} + +EntryWriter.prototype.end = function (c) { + // console.error(".. ew end") + if (c) this._buffer.push(c) + this._buffer.push(EOF) + this._ended = true + this._process() + this._needDrain = this._buffer.length > 0 +} + +EntryWriter.prototype.pause = function () { + // console.error(".. ew pause") + this._paused = true + this.emit("pause") +} + +EntryWriter.prototype.resume = function () { + // console.error(".. ew resume") + this._paused = false + this.emit("resume") + this._process() +} + +EntryWriter.prototype.add = function (entry) { + // console.error(".. ew add") + if (!this.parent) return this.emit("error", new Error("no parent")) + + // make sure that the _header and such is emitted, and clear out + // the _currentEntry link on the parent. + if (!this._ended) this.end() + + return this.parent.add(entry) +} + +EntryWriter.prototype._header = function () { + // console.error(".. ew header") + if (this._didHeader) return + this._didHeader = true + + var headerBlock = TarHeader.encode(this.props) + + if (this.props.needExtended && !this._meta) { + var me = this + + ExtendedHeaderWriter = ExtendedHeaderWriter || + require("./extended-header-writer.js") + + ExtendedHeaderWriter(this.props) + .on("data", function (c) { + me.emit("data", c) + }) + .on("error", function (er) { + me.emit("error", er) + }) + .end() + } + + // console.error(".. .. ew headerBlock emitting") + this.emit("data", headerBlock) + this.emit("header") +} + +EntryWriter.prototype._process = function () { + // console.error(".. .. ew process") + if (!this._didHeader && !this._meta) { + this._header() + } + + if (this._paused || this._processing) { + // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing) + return + } + + this._processing = true + + var buf = this._buffer + for (var i = 0; i < buf.length; i ++) { + // console.error(".. .. .. i=%d", i) + + var c = buf[i] + + if (c === EOF) this._stream.end() + else this._stream.write(c) + + if (this._paused) { + // console.error(".. .. .. paused mid-emission") + this._processing = false + if (i < buf.length) { + this._needDrain = true + this._buffer = buf.slice(i + 1) + } + return + } + } + + // console.error(".. .. .. emitted") + this._buffer.length = 0 + this._processing = false + + // console.error(".. .. .. emitting drain") + this.emit("drain") +} + +EntryWriter.prototype.destroy = function () {} diff --git a/node_modules/tar/lib/entry.js b/node_modules/tar/lib/entry.js new file mode 100644 index 0000000..591202b --- /dev/null +++ b/node_modules/tar/lib/entry.js @@ -0,0 +1,220 @@ +// A passthrough read/write stream that sets its properties +// based on a header, extendedHeader, and globalHeader +// +// Can be either a file system object of some sort, or +// a pax/ustar metadata entry. + +module.exports = Entry + +var TarHeader = require("./header.js") + , tar = require("../tar") + , assert = require("assert").ok + , Stream = require("stream").Stream + , inherits = require("inherits") + , fstream = require("fstream").Abstract + +function Entry (header, extended, global) { + Stream.call(this) + this.readable = true + this.writable = true + + this._needDrain = false + this._paused = false + this._reading = false + this._ending = false + this._ended = false + this._remaining = 0 + this._abort = false + this._queue = [] + this._index = 0 + this._queueLen = 0 + + this._read = this._read.bind(this) + + this.props = {} + this._header = header + this._extended = extended || {} + + // globals can change throughout the course of + // a file parse operation. Freeze it at its current state. + this._global = {} + var me = this + Object.keys(global || {}).forEach(function (g) { + me._global[g] = global[g] + }) + + this._setProps() +} + +inherits(Entry, Stream) + +Entry.prototype.write = function (c) { + if (this._ending) this.error("write() after end()", null, true) + if (this._remaining === 0) { + this.error("invalid bytes past eof") + } + + // often we'll get a bunch of \0 at the end of the last write, + // since chunks will always be 512 bytes when reading a tarball. + if (c.length > this._remaining) { + c = c.slice(0, this._remaining) + } + this._remaining -= c.length + + // put it on the stack. + var ql = this._queueLen + this._queue.push(c) + this._queueLen ++ + + this._read() + + // either paused, or buffered + if (this._paused || ql > 0) { + this._needDrain = true + return false + } + + return true +} + +Entry.prototype.end = function (c) { + if (c) this.write(c) + this._ending = true + this._read() +} + +Entry.prototype.pause = function () { + this._paused = true + this.emit("pause") +} + +Entry.prototype.resume = function () { + // console.error(" Tar Entry resume", this.path) + this.emit("resume") + this._paused = false + this._read() + return this._queueLen - this._index > 1 +} + + // This is bound to the instance +Entry.prototype._read = function () { + // console.error(" Tar Entry _read", this.path) + + if (this._paused || this._reading || this._ended) return + + // set this flag so that event handlers don't inadvertently + // get multiple _read() calls running. + this._reading = true + + // have any data to emit? + while (this._index < this._queueLen && !this._paused) { + var chunk = this._queue[this._index ++] + this.emit("data", chunk) + } + + // check if we're drained + if (this._index >= this._queueLen) { + this._queue.length = this._queueLen = this._index = 0 + if (this._needDrain) { + this._needDrain = false + this.emit("drain") + } + if (this._ending) { + this._ended = true + this.emit("end") + } + } + + // if the queue gets too big, then pluck off whatever we can. + // this should be fairly rare. + var mql = this._maxQueueLen + if (this._queueLen > mql && this._index > 0) { + mql = Math.min(this._index, mql) + this._index -= mql + this._queueLen -= mql + this._queue = this._queue.slice(mql) + } + + this._reading = false +} + +Entry.prototype._setProps = function () { + // props = extended->global->header->{} + var header = this._header + , extended = this._extended + , global = this._global + , props = this.props + + // first get the values from the normal header. + var fields = tar.fields + for (var f = 0; fields[f] !== null; f ++) { + var field = fields[f] + , val = header[field] + if (typeof val !== "undefined") props[field] = val + } + + // next, the global header for this file. + // numeric values, etc, will have already been parsed. + ;[global, extended].forEach(function (p) { + Object.keys(p).forEach(function (f) { + if (typeof p[f] !== "undefined") props[f] = p[f] + }) + }) + + // no nulls allowed in path or linkpath + ;["path", "linkpath"].forEach(function (p) { + if (props.hasOwnProperty(p)) { + props[p] = props[p].split("\0")[0] + } + }) + + + // set date fields to be a proper date + ;["mtime", "ctime", "atime"].forEach(function (p) { + if (props.hasOwnProperty(p)) { + props[p] = new Date(props[p] * 1000) + } + }) + + // set the type so that we know what kind of file to create + var type + switch (tar.types[props.type]) { + case "OldFile": + case "ContiguousFile": + type = "File" + break + + case "GNUDumpDir": + type = "Directory" + break + + case undefined: + type = "Unknown" + break + + case "Link": + case "SymbolicLink": + case "CharacterDevice": + case "BlockDevice": + case "Directory": + case "FIFO": + default: + type = tar.types[props.type] + } + + this.type = type + this.path = props.path + this.size = props.size + + // size is special, since it signals when the file needs to end. + this._remaining = props.size +} + +// the parser may not call write if _abort is true. +// useful for skipping data from some files quickly. +Entry.prototype.abort = function(){ + this._abort = true +} + +Entry.prototype.warn = fstream.warn +Entry.prototype.error = fstream.error diff --git a/node_modules/tar/lib/extended-header-writer.js b/node_modules/tar/lib/extended-header-writer.js new file mode 100644 index 0000000..1728c45 --- /dev/null +++ b/node_modules/tar/lib/extended-header-writer.js @@ -0,0 +1,191 @@ + +module.exports = ExtendedHeaderWriter + +var inherits = require("inherits") + , EntryWriter = require("./entry-writer.js") + +inherits(ExtendedHeaderWriter, EntryWriter) + +var tar = require("../tar.js") + , path = require("path") + , TarHeader = require("./header.js") + +// props is the props of the thing we need to write an +// extended header for. +// Don't be shy with it. Just encode everything. +function ExtendedHeaderWriter (props) { + // console.error(">> ehw ctor") + var me = this + + if (!(me instanceof ExtendedHeaderWriter)) { + return new ExtendedHeaderWriter(props) + } + + me.fields = props + + var p = + { path : ("PaxHeader" + path.join("/", props.path || "")) + .replace(/\\/g, "/").substr(0, 100) + , mode : props.mode || 0666 + , uid : props.uid || 0 + , gid : props.gid || 0 + , size : 0 // will be set later + , mtime : props.mtime || Date.now() / 1000 + , type : "x" + , linkpath : "" + , ustar : "ustar\0" + , ustarver : "00" + , uname : props.uname || "" + , gname : props.gname || "" + , devmaj : props.devmaj || 0 + , devmin : props.devmin || 0 + } + + + EntryWriter.call(me, p) + // console.error(">> ehw props", me.props) + me.props = p + + me._meta = true +} + +ExtendedHeaderWriter.prototype.end = function () { + // console.error(">> ehw end") + var me = this + + if (me._ended) return + me._ended = true + + me._encodeFields() + + if (me.props.size === 0) { + // nothing to write! + me._ready = true + me._stream.end() + return + } + + me._stream.write(TarHeader.encode(me.props)) + me.body.forEach(function (l) { + me._stream.write(l) + }) + me._ready = true + + // console.error(">> ehw _process calling end()", me.props) + this._stream.end() +} + +ExtendedHeaderWriter.prototype._encodeFields = function () { + // console.error(">> ehw _encodeFields") + this.body = [] + if (this.fields.prefix) { + this.fields.path = this.fields.prefix + "/" + this.fields.path + this.fields.prefix = "" + } + encodeFields(this.fields, "", this.body, this.fields.noProprietary) + var me = this + this.body.forEach(function (l) { + me.props.size += l.length + }) +} + +function encodeFields (fields, prefix, body, nop) { + // console.error(">> >> ehw encodeFields") + // "%d %s=%s\n", , , + // The length is a decimal number, and includes itself and the \n + // Numeric values are decimal strings. + + Object.keys(fields).forEach(function (k) { + var val = fields[k] + , numeric = tar.numeric[k] + + if (prefix) k = prefix + "." + k + + // already including NODETAR.type, don't need File=true also + if (k === fields.type && val === true) return + + switch (k) { + // don't include anything that's always handled just fine + // in the normal header, or only meaningful in the context + // of nodetar + case "mode": + case "cksum": + case "ustar": + case "ustarver": + case "prefix": + case "basename": + case "dirname": + case "needExtended": + case "block": + case "filter": + return + + case "rdev": + if (val === 0) return + break + + case "nlink": + case "dev": // Truly a hero among men, Creator of Star! + case "ino": // Speak his name with reverent awe! It is: + k = "SCHILY." + k + break + + default: break + } + + if (val && typeof val === "object" && + !Buffer.isBuffer(val)) encodeFields(val, k, body, nop) + else if (val === null || val === undefined) return + else body.push.apply(body, encodeField(k, val, nop)) + }) + + return body +} + +function encodeField (k, v, nop) { + // lowercase keys must be valid, otherwise prefix with + // "NODETAR." + if (k.charAt(0) === k.charAt(0).toLowerCase()) { + var m = k.split(".")[0] + if (!tar.knownExtended[m]) k = "NODETAR." + k + } + + // no proprietary + if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) { + return [] + } + + if (typeof val === "number") val = val.toString(10) + + var s = new Buffer(" " + k + "=" + v + "\n") + , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1 + + // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) + + // if adding that many digits will make it go over that length, + // then add one to it. For example, if the string is: + // " foo=bar\n" + // then that's 9 characters. With the "9", that bumps the length + // up to 10. However, this is invalid: + // "10 foo=bar\n" + // but, since that's actually 11 characters, since 10 adds another + // character to the length, and the length includes the number + // itself. In that case, just bump it up again. + if (s.length + digits >= Math.pow(10, digits)) digits += 1 + // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length) + + var len = digits + s.length + // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len) + var lenBuf = new Buffer("" + len) + if (lenBuf.length + s.length !== len) { + throw new Error("Bad length calculation\n"+ + "len="+len+"\n"+ + "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+ + "lenBuf.length="+lenBuf.length+"\n"+ + "digits="+digits+"\n"+ + "s="+JSON.stringify(s.toString())+"\n"+ + "s.length="+s.length) + } + + return [lenBuf, s] +} diff --git a/node_modules/tar/lib/extended-header.js b/node_modules/tar/lib/extended-header.js new file mode 100644 index 0000000..74f432c --- /dev/null +++ b/node_modules/tar/lib/extended-header.js @@ -0,0 +1,140 @@ +// An Entry consisting of: +// +// "%d %s=%s\n", , , +// +// The length is a decimal number, and includes itself and the \n +// \0 does not terminate anything. Only the length terminates the string. +// Numeric values are decimal strings. + +module.exports = ExtendedHeader + +var Entry = require("./entry.js") + , inherits = require("inherits") + , tar = require("../tar.js") + , numeric = tar.numeric + , keyTrans = { "SCHILY.dev": "dev" + , "SCHILY.ino": "ino" + , "SCHILY.nlink": "nlink" } + +function ExtendedHeader () { + Entry.apply(this, arguments) + this.on("data", this._parse) + this.fields = {} + this._position = 0 + this._fieldPos = 0 + this._state = SIZE + this._sizeBuf = [] + this._keyBuf = [] + this._valBuf = [] + this._size = -1 + this._key = "" +} + +inherits(ExtendedHeader, Entry) +ExtendedHeader.prototype._parse = parse + +var s = 0 + , states = ExtendedHeader.states = {} + , SIZE = states.SIZE = s++ + , KEY = states.KEY = s++ + , VAL = states.VAL = s++ + , ERR = states.ERR = s++ + +Object.keys(states).forEach(function (s) { + states[states[s]] = states[s] +}) + +states[s] = null + +// char code values for comparison +var _0 = "0".charCodeAt(0) + , _9 = "9".charCodeAt(0) + , point = ".".charCodeAt(0) + , a = "a".charCodeAt(0) + , Z = "Z".charCodeAt(0) + , a = "a".charCodeAt(0) + , z = "z".charCodeAt(0) + , space = " ".charCodeAt(0) + , eq = "=".charCodeAt(0) + , cr = "\n".charCodeAt(0) + +function parse (c) { + if (this._state === ERR) return + + for ( var i = 0, l = c.length + ; i < l + ; this._position++, this._fieldPos++, i++) { + // console.error("top of loop, size="+this._size) + + var b = c[i] + + if (this._size >= 0 && this._fieldPos > this._size) { + error(this, "field exceeds length="+this._size) + return + } + + switch (this._state) { + case ERR: return + + case SIZE: + // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString()) + if (b === space) { + this._state = KEY + // this._fieldPos = this._sizeBuf.length + this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10) + this._sizeBuf.length = 0 + continue + } + if (b < _0 || b > _9) { + error(this, "expected [" + _0 + ".." + _9 + "], got " + b) + return + } + this._sizeBuf.push(b) + continue + + case KEY: + // can be any char except =, not > size. + if (b === eq) { + this._state = VAL + this._key = new Buffer(this._keyBuf).toString() + if (keyTrans[this._key]) this._key = keyTrans[this._key] + this._keyBuf.length = 0 + continue + } + this._keyBuf.push(b) + continue + + case VAL: + // field must end with cr + if (this._fieldPos === this._size - 1) { + // console.error("finished with "+this._key) + if (b !== cr) { + error(this, "expected \\n at end of field") + return + } + var val = new Buffer(this._valBuf).toString() + if (numeric[this._key]) { + val = parseFloat(val) + } + this.fields[this._key] = val + + this._valBuf.length = 0 + this._state = SIZE + this._size = -1 + this._fieldPos = -1 + continue + } + this._valBuf.push(b) + continue + } + } +} + +function error (me, msg) { + msg = "invalid header: " + msg + + "\nposition=" + me._position + + "\nfield position=" + me._fieldPos + + me.error(msg) + me.state = ERR +} diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js new file mode 100644 index 0000000..fe1bb97 --- /dev/null +++ b/node_modules/tar/lib/extract.js @@ -0,0 +1,94 @@ +// give it a tarball and a path, and it'll dump the contents + +module.exports = Extract + +var tar = require("../tar.js") + , fstream = require("fstream") + , inherits = require("inherits") + , path = require("path") + +function Extract (opts) { + if (!(this instanceof Extract)) return new Extract(opts) + tar.Parse.apply(this) + + if (typeof opts !== "object") { + opts = { path: opts } + } + + // better to drop in cwd? seems more standard. + opts.path = opts.path || path.resolve("node-tar-extract") + opts.type = "Directory" + opts.Directory = true + + // similar to --strip or --strip-components + opts.strip = +opts.strip + if (!opts.strip || opts.strip <= 0) opts.strip = 0 + + this._fst = fstream.Writer(opts) + + this.pause() + var me = this + + // Hardlinks in tarballs are relative to the root + // of the tarball. So, they need to be resolved against + // the target directory in order to be created properly. + me.on("entry", function (entry) { + // if there's a "strip" argument, then strip off that many + // path components. + if (opts.strip) { + var p = entry.path.split("/").slice(opts.strip).join("/") + entry.path = entry.props.path = p + if (entry.linkpath) { + var lp = entry.linkpath.split("/").slice(opts.strip).join("/") + entry.linkpath = entry.props.linkpath = lp + } + } + if (entry.type === "Link") { + entry.linkpath = entry.props.linkpath = + path.join(opts.path, path.join("/", entry.props.linkpath)) + } + + if (entry.type === "SymbolicLink") { + var dn = path.dirname(entry.path) || "" + var linkpath = entry.props.linkpath + var target = path.resolve(opts.path, dn, linkpath) + if (target.indexOf(opts.path) !== 0) { + linkpath = path.join(opts.path, path.join("/", linkpath)) + } + entry.linkpath = entry.props.linkpath = linkpath + } + }) + + this._fst.on("ready", function () { + me.pipe(me._fst, { end: false }) + me.resume() + }) + + this._fst.on('error', function(err) { + me.emit('error', err) + }) + + this._fst.on('drain', function() { + me.emit('drain') + }) + + // this._fst.on("end", function () { + // console.error("\nEEEE Extract End", me._fst.path) + // }) + + this._fst.on("close", function () { + // console.error("\nEEEE Extract End", me._fst.path) + me.emit("finish") + me.emit("end") + me.emit("close") + }) +} + +inherits(Extract, tar.Parse) + +Extract.prototype._streamEnd = function () { + var me = this + if (!me._ended || me._entry) me.error("unexpected eof") + me._fst.end() + // my .end() is coming later. +} diff --git a/node_modules/tar/lib/global-header-writer.js b/node_modules/tar/lib/global-header-writer.js new file mode 100644 index 0000000..0bfc7b8 --- /dev/null +++ b/node_modules/tar/lib/global-header-writer.js @@ -0,0 +1,14 @@ +module.exports = GlobalHeaderWriter + +var ExtendedHeaderWriter = require("./extended-header-writer.js") + , inherits = require("inherits") + +inherits(GlobalHeaderWriter, ExtendedHeaderWriter) + +function GlobalHeaderWriter (props) { + if (!(this instanceof GlobalHeaderWriter)) { + return new GlobalHeaderWriter(props) + } + ExtendedHeaderWriter.call(this, props) + this.props.type = "g" +} diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js new file mode 100644 index 0000000..05b237c --- /dev/null +++ b/node_modules/tar/lib/header.js @@ -0,0 +1,385 @@ +// parse a 512-byte header block to a data object, or vice-versa +// If the data won't fit nicely in a simple header, then generate +// the appropriate extended header file, and return that. + +module.exports = TarHeader + +var tar = require("../tar.js") + , fields = tar.fields + , fieldOffs = tar.fieldOffs + , fieldEnds = tar.fieldEnds + , fieldSize = tar.fieldSize + , numeric = tar.numeric + , assert = require("assert").ok + , space = " ".charCodeAt(0) + , slash = "/".charCodeAt(0) + , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null + +function TarHeader (block) { + if (!(this instanceof TarHeader)) return new TarHeader(block) + if (block) this.decode(block) +} + +TarHeader.prototype = + { decode : decode + , encode: encode + , calcSum: calcSum + , checkSum: checkSum + } + +TarHeader.parseNumeric = parseNumeric +TarHeader.encode = encode +TarHeader.decode = decode + +// note that this will only do the normal ustar header, not any kind +// of extended posix header file. If something doesn't fit comfortably, +// then it will set obj.needExtended = true, and set the block to +// the closest approximation. +function encode (obj) { + if (!obj && !(this instanceof TarHeader)) throw new Error( + "encode must be called on a TarHeader, or supplied an object") + + obj = obj || this + var block = obj.block = new Buffer(512) + + // if the object has a "prefix", then that's actually an extension of + // the path field. + if (obj.prefix) { + // console.error("%% header encoding, got a prefix", obj.prefix) + obj.path = obj.prefix + "/" + obj.path + // console.error("%% header encoding, prefixed path", obj.path) + obj.prefix = "" + } + + obj.needExtended = false + + if (obj.mode) { + if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8) + obj.mode = obj.mode & 0777 + } + + for (var f = 0; fields[f] !== null; f ++) { + var field = fields[f] + , off = fieldOffs[f] + , end = fieldEnds[f] + , ret + + switch (field) { + case "cksum": + // special, done below, after all the others + break + + case "prefix": + // special, this is an extension of the "path" field. + // console.error("%% header encoding, skip prefix later") + break + + case "type": + // convert from long name to a single char. + var type = obj.type || "0" + if (type.length > 1) { + type = tar.types[obj.type] + if (!type) type = "0" + } + writeText(block, off, end, type) + break + + case "path": + // uses the "prefix" field if > 100 bytes, but <= 255 + var pathLen = Buffer.byteLength(obj.path) + , pathFSize = fieldSize[fields.path] + , prefFSize = fieldSize[fields.prefix] + + // paths between 100 and 255 should use the prefix field. + // longer than 255 + if (pathLen > pathFSize && + pathLen <= pathFSize + prefFSize) { + // need to find a slash somewhere in the middle so that + // path and prefix both fit in their respective fields + var searchStart = pathLen - 1 - pathFSize + , searchEnd = prefFSize + , found = false + , pathBuf = new Buffer(obj.path) + + for ( var s = searchStart + ; (s <= searchEnd) + ; s ++ ) { + if (pathBuf[s] === slash || pathBuf[s] === bslash) { + found = s + break + } + } + + if (found !== false) { + prefix = pathBuf.slice(0, found).toString("utf8") + path = pathBuf.slice(found + 1).toString("utf8") + + ret = writeText(block, off, end, path) + off = fieldOffs[fields.prefix] + end = fieldEnds[fields.prefix] + // console.error("%% header writing prefix", off, end, prefix) + ret = writeText(block, off, end, prefix) || ret + break + } + } + + // paths less than 100 chars don't need a prefix + // and paths longer than 255 need an extended header and will fail + // on old implementations no matter what we do here. + // Null out the prefix, and fallthrough to default. + // console.error("%% header writing no prefix") + var poff = fieldOffs[fields.prefix] + , pend = fieldEnds[fields.prefix] + writeText(block, poff, pend, "") + // fallthrough + + // all other fields are numeric or text + default: + ret = numeric[field] + ? writeNumeric(block, off, end, obj[field]) + : writeText(block, off, end, obj[field] || "") + break + } + obj.needExtended = obj.needExtended || ret + } + + var off = fieldOffs[fields.cksum] + , end = fieldEnds[fields.cksum] + + writeNumeric(block, off, end, calcSum.call(this, block)) + + return block +} + +// if it's a negative number, or greater than will fit, +// then use write256. +var MAXNUM = { 12: 077777777777 + , 11: 07777777777 + , 8 : 07777777 + , 7 : 0777777 } +function writeNumeric (block, off, end, num) { + var writeLen = end - off + , maxNum = MAXNUM[writeLen] || 0 + + num = num || 0 + // console.error(" numeric", num) + + if (num instanceof Date || + Object.prototype.toString.call(num) === "[object Date]") { + num = num.getTime() / 1000 + } + + if (num > maxNum || num < 0) { + write256(block, off, end, num) + // need an extended header if negative or too big. + return true + } + + // god, tar is so annoying + // if the string is small enough, you should put a space + // between the octal string and the \0, but if it doesn't + // fit, then don't. + var numStr = Math.floor(num).toString(8) + if (num < MAXNUM[writeLen - 1]) numStr += " " + + // pad with "0" chars + if (numStr.length < writeLen) { + numStr = (new Array(writeLen - numStr.length).join("0")) + numStr + } + + if (numStr.length !== writeLen - 1) { + throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" + + "expected: "+writeLen) + } + block.write(numStr, off, writeLen, "utf8") + block[end - 1] = 0 +} + +function write256 (block, off, end, num) { + var buf = block.slice(off, end) + var positive = num >= 0 + buf[0] = positive ? 0x80 : 0xFF + + // get the number as a base-256 tuple + if (!positive) num *= -1 + var tuple = [] + do { + var n = num % 256 + tuple.push(n) + num = (num - n) / 256 + } while (num) + + var bytes = tuple.length + + var fill = buf.length - bytes + for (var i = 1; i < fill; i ++) { + buf[i] = positive ? 0 : 0xFF + } + + // tuple is a base256 number, with [0] as the *least* significant byte + // if it's negative, then we need to flip all the bits once we hit the + // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's- + // complement is (0xFF - n). + var zero = true + for (i = bytes; i > 0; i --) { + var byte = tuple[bytes - i] + if (positive) buf[fill + i] = byte + else if (zero && byte === 0) buf[fill + i] = 0 + else if (zero) { + zero = false + buf[fill + i] = 0x100 - byte + } else buf[fill + i] = 0xFF - byte + } +} + +function writeText (block, off, end, str) { + // strings are written as utf8, then padded with \0 + var strLen = Buffer.byteLength(str) + , writeLen = Math.min(strLen, end - off) + // non-ascii fields need extended headers + // long fields get truncated + , needExtended = strLen !== str.length || strLen > writeLen + + // write the string, and null-pad + if (writeLen > 0) block.write(str, off, writeLen, "utf8") + for (var i = off + writeLen; i < end; i ++) block[i] = 0 + + return needExtended +} + +function calcSum (block) { + block = block || this.block + assert(Buffer.isBuffer(block) && block.length === 512) + + if (!block) throw new Error("Need block to checksum") + + // now figure out what it would be if the cksum was " " + var sum = 0 + , start = fieldOffs[fields.cksum] + , end = fieldEnds[fields.cksum] + + for (var i = 0; i < fieldOffs[fields.cksum]; i ++) { + sum += block[i] + } + + for (var i = start; i < end; i ++) { + sum += space + } + + for (var i = end; i < 512; i ++) { + sum += block[i] + } + + return sum +} + + +function checkSum (block) { + var sum = calcSum.call(this, block) + block = block || this.block + + var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum]) + cksum = parseNumeric(cksum) + + return cksum === sum +} + +function decode (block) { + block = block || this.block + assert(Buffer.isBuffer(block) && block.length === 512) + + this.block = block + this.cksumValid = this.checkSum() + + var prefix = null + + // slice off each field. + for (var f = 0; fields[f] !== null; f ++) { + var field = fields[f] + , val = block.slice(fieldOffs[f], fieldEnds[f]) + + switch (field) { + case "ustar": + // if not ustar, then everything after that is just padding. + if (val.toString() !== "ustar\0") { + this.ustar = false + return + } else { + // console.error("ustar:", val, val.toString()) + this.ustar = val.toString() + } + break + + // prefix is special, since it might signal the xstar header + case "prefix": + var atime = parseNumeric(val.slice(131, 131 + 12)) + , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12)) + if ((val[130] === 0 || val[130] === space) && + typeof atime === "number" && + typeof ctime === "number" && + val[131 + 12] === space && + val[131 + 12 + 12] === space) { + this.atime = atime + this.ctime = ctime + val = val.slice(0, 130) + } + prefix = val.toString("utf8").replace(/\0+$/, "") + // console.error("%% header reading prefix", prefix) + break + + // all other fields are null-padding text + // or a number. + default: + if (numeric[field]) { + this[field] = parseNumeric(val) + } else { + this[field] = val.toString("utf8").replace(/\0+$/, "") + } + break + } + } + + // if we got a prefix, then prepend it to the path. + if (prefix) { + this.path = prefix + "/" + this.path + // console.error("%% header got a prefix", this.path) + } +} + +function parse256 (buf) { + // first byte MUST be either 80 or FF + // 80 for positive, FF for 2's comp + var positive + if (buf[0] === 0x80) positive = true + else if (buf[0] === 0xFF) positive = false + else return null + + // build up a base-256 tuple from the least sig to the highest + var zero = false + , tuple = [] + for (var i = buf.length - 1; i > 0; i --) { + var byte = buf[i] + if (positive) tuple.push(byte) + else if (zero && byte === 0) tuple.push(0) + else if (zero) { + zero = false + tuple.push(0x100 - byte) + } else tuple.push(0xFF - byte) + } + + for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) { + sum += tuple[i] * Math.pow(256, i) + } + + return positive ? sum : -1 * sum +} + +function parseNumeric (f) { + if (f[0] & 0x80) return parse256(f) + + var str = f.toString("utf8").split("\0")[0].trim() + , res = parseInt(str, 8) + + return isNaN(res) ? null : res +} + diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js new file mode 100644 index 0000000..5a3bb95 --- /dev/null +++ b/node_modules/tar/lib/pack.js @@ -0,0 +1,236 @@ +// pipe in an fstream, and it'll make a tarball. +// key-value pair argument is global extended header props. + +module.exports = Pack + +var EntryWriter = require("./entry-writer.js") + , Stream = require("stream").Stream + , path = require("path") + , inherits = require("inherits") + , GlobalHeaderWriter = require("./global-header-writer.js") + , collect = require("fstream").collect + , eof = new Buffer(512) + +for (var i = 0; i < 512; i ++) eof[i] = 0 + +inherits(Pack, Stream) + +function Pack (props) { + // console.error("-- p ctor") + var me = this + if (!(me instanceof Pack)) return new Pack(props) + + if (props) me._noProprietary = props.noProprietary + else me._noProprietary = false + + me._global = props + + me.readable = true + me.writable = true + me._buffer = [] + // console.error("-- -- set current to null in ctor") + me._currentEntry = null + me._processing = false + + me._pipeRoot = null + me.on("pipe", function (src) { + if (src.root === me._pipeRoot) return + me._pipeRoot = src + src.on("end", function () { + me._pipeRoot = null + }) + me.add(src) + }) +} + +Pack.prototype.addGlobal = function (props) { + // console.error("-- p addGlobal") + if (this._didGlobal) return + this._didGlobal = true + + var me = this + GlobalHeaderWriter(props) + .on("data", function (c) { + me.emit("data", c) + }) + .end() +} + +Pack.prototype.add = function (stream) { + if (this._global && !this._didGlobal) this.addGlobal(this._global) + + if (this._ended) return this.emit("error", new Error("add after end")) + + collect(stream) + this._buffer.push(stream) + this._process() + this._needDrain = this._buffer.length > 0 + return !this._needDrain +} + +Pack.prototype.pause = function () { + this._paused = true + if (this._currentEntry) this._currentEntry.pause() + this.emit("pause") +} + +Pack.prototype.resume = function () { + this._paused = false + if (this._currentEntry) this._currentEntry.resume() + this.emit("resume") + this._process() +} + +Pack.prototype.end = function () { + this._ended = true + this._buffer.push(eof) + this._process() +} + +Pack.prototype._process = function () { + var me = this + if (me._paused || me._processing) { + return + } + + var entry = me._buffer.shift() + + if (!entry) { + if (me._needDrain) { + me.emit("drain") + } + return + } + + if (entry.ready === false) { + // console.error("-- entry is not ready", entry) + me._buffer.unshift(entry) + entry.on("ready", function () { + // console.error("-- -- ready!", entry) + me._process() + }) + return + } + + me._processing = true + + if (entry === eof) { + // need 2 ending null blocks. + me.emit("data", eof) + me.emit("data", eof) + me.emit("end") + me.emit("close") + return + } + + // Change the path to be relative to the root dir that was + // added to the tarball. + // + // XXX This should be more like how -C works, so you can + // explicitly set a root dir, and also explicitly set a pathname + // in the tarball to use. That way we can skip a lot of extra + // work when resolving symlinks for bundled dependencies in npm. + + var root = path.dirname((entry.root || entry).path); + if (me._global && me._global.fromBase && entry.root && entry.root.path) { + // user set 'fromBase: true' indicating tar root should be directory itself + root = entry.root.path; + } + + var wprops = {} + + Object.keys(entry.props || {}).forEach(function (k) { + wprops[k] = entry.props[k] + }) + + if (me._noProprietary) wprops.noProprietary = true + + wprops.path = path.relative(root, entry.path || '') + + // actually not a matter of opinion or taste. + if (process.platform === "win32") { + wprops.path = wprops.path.replace(/\\/g, "/") + } + + if (!wprops.type) + wprops.type = 'Directory' + + switch (wprops.type) { + // sockets not supported + case "Socket": + return + + case "Directory": + wprops.path += "/" + wprops.size = 0 + break + + case "Link": + var lp = path.resolve(path.dirname(entry.path), entry.linkpath) + wprops.linkpath = path.relative(root, lp) || "." + wprops.size = 0 + break + + case "SymbolicLink": + var lp = path.resolve(path.dirname(entry.path), entry.linkpath) + wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "." + wprops.size = 0 + break + } + + // console.error("-- new writer", wprops) + // if (!wprops.type) { + // // console.error("-- no type?", entry.constructor.name, entry) + // } + + // console.error("-- -- set current to new writer", wprops.path) + var writer = me._currentEntry = EntryWriter(wprops) + + writer.parent = me + + // writer.on("end", function () { + // // console.error("-- -- writer end", writer.path) + // }) + + writer.on("data", function (c) { + me.emit("data", c) + }) + + writer.on("header", function () { + Buffer.prototype.toJSON = function () { + return this.toString().split(/\0/).join(".") + } + // console.error("-- -- writer header %j", writer.props) + if (writer.props.size === 0) nextEntry() + }) + writer.on("close", nextEntry) + + var ended = false + function nextEntry () { + if (ended) return + ended = true + + // console.error("-- -- writer close", writer.path) + // console.error("-- -- set current to null", wprops.path) + me._currentEntry = null + me._processing = false + me._process() + } + + writer.on("error", function (er) { + // console.error("-- -- writer error", writer.path) + me.emit("error", er) + }) + + // if it's the root, then there's no need to add its entries, + // or data, since they'll be added directly. + if (entry === me._pipeRoot) { + // console.error("-- is the root, don't auto-add") + writer.add = null + } + + entry.pipe(writer) +} + +Pack.prototype.destroy = function () {} +Pack.prototype.write = function () {} diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js new file mode 100644 index 0000000..600ad78 --- /dev/null +++ b/node_modules/tar/lib/parse.js @@ -0,0 +1,275 @@ + +// A writable stream. +// It emits "entry" events, which provide a readable stream that has +// header info attached. + +module.exports = Parse.create = Parse + +var stream = require("stream") + , Stream = stream.Stream + , BlockStream = require("block-stream") + , tar = require("../tar.js") + , TarHeader = require("./header.js") + , Entry = require("./entry.js") + , BufferEntry = require("./buffer-entry.js") + , ExtendedHeader = require("./extended-header.js") + , assert = require("assert").ok + , inherits = require("inherits") + , fstream = require("fstream") + +// reading a tar is a lot like reading a directory +// However, we're actually not going to run the ctor, +// since it does a stat and various other stuff. +// This inheritance gives us the pause/resume/pipe +// behavior that is desired. +inherits(Parse, fstream.Reader) + +function Parse () { + var me = this + if (!(me instanceof Parse)) return new Parse() + + // doesn't apply fstream.Reader ctor? + // no, becasue we don't want to stat/etc, we just + // want to get the entry/add logic from .pipe() + Stream.apply(me) + + me.writable = true + me.readable = true + me._stream = new BlockStream(512) + me.position = 0 + me._ended = false + + me._stream.on("error", function (e) { + me.emit("error", e) + }) + + me._stream.on("data", function (c) { + me._process(c) + }) + + me._stream.on("end", function () { + me._streamEnd() + }) + + me._stream.on("drain", function () { + me.emit("drain") + }) +} + +// overridden in Extract class, since it needs to +// wait for its DirWriter part to finish before +// emitting "end" +Parse.prototype._streamEnd = function () { + var me = this + if (!me._ended || me._entry) me.error("unexpected eof") + me.emit("end") +} + +// a tar reader is actually a filter, not just a readable stream. +// So, you should pipe a tarball stream into it, and it needs these +// write/end methods to do that. +Parse.prototype.write = function (c) { + if (this._ended) { + // gnutar puts a LOT of nulls at the end. + // you can keep writing these things forever. + // Just ignore them. + for (var i = 0, l = c.length; i > l; i ++) { + if (c[i] !== 0) return this.error("write() after end()") + } + return + } + return this._stream.write(c) +} + +Parse.prototype.end = function (c) { + this._ended = true + return this._stream.end(c) +} + +// don't need to do anything, since we're just +// proxying the data up from the _stream. +// Just need to override the parent's "Not Implemented" +// error-thrower. +Parse.prototype._read = function () {} + +Parse.prototype._process = function (c) { + assert(c && c.length === 512, "block size should be 512") + + // one of three cases. + // 1. A new header + // 2. A part of a file/extended header + // 3. One of two or more EOF null blocks + + if (this._entry) { + var entry = this._entry + if(!entry._abort) entry.write(c) + else { + entry._remaining -= c.length + if(entry._remaining < 0) entry._remaining = 0 + } + if (entry._remaining === 0) { + entry.end() + this._entry = null + } + } else { + // either zeroes or a header + var zero = true + for (var i = 0; i < 512 && zero; i ++) { + zero = c[i] === 0 + } + + // eof is *at least* 2 blocks of nulls, and then the end of the + // file. you can put blocks of nulls between entries anywhere, + // so appending one tarball to another is technically valid. + // ending without the eof null blocks is not allowed, however. + if (zero) { + if (this._eofStarted) + this._ended = true + this._eofStarted = true + } else { + this._eofStarted = false + this._startEntry(c) + } + } + + this.position += 512 +} + +// take a header chunk, start the right kind of entry. +Parse.prototype._startEntry = function (c) { + var header = new TarHeader(c) + , self = this + , entry + , ev + , EntryType + , onend + , meta = false + + if (null === header.size || !header.cksumValid) { + var e = new Error("invalid tar file") + e.header = header + e.tar_file_offset = this.position + e.tar_block = this.position / 512 + return this.emit("error", e) + } + + switch (tar.types[header.type]) { + case "File": + case "OldFile": + case "Link": + case "SymbolicLink": + case "CharacterDevice": + case "BlockDevice": + case "Directory": + case "FIFO": + case "ContiguousFile": + case "GNUDumpDir": + // start a file. + // pass in any extended headers + // These ones consumers are typically most interested in. + EntryType = Entry + ev = "entry" + break + + case "GlobalExtendedHeader": + // extended headers that apply to the rest of the tarball + EntryType = ExtendedHeader + onend = function () { + self._global = self._global || {} + Object.keys(entry.fields).forEach(function (k) { + self._global[k] = entry.fields[k] + }) + } + ev = "globalExtendedHeader" + meta = true + break + + case "ExtendedHeader": + case "OldExtendedHeader": + // extended headers that apply to the next entry + EntryType = ExtendedHeader + onend = function () { + self._extended = entry.fields + } + ev = "extendedHeader" + meta = true + break + + case "NextFileHasLongLinkpath": + // set linkpath= in extended header + EntryType = BufferEntry + onend = function () { + self._extended = self._extended || {} + self._extended.linkpath = entry.body + } + ev = "longLinkpath" + meta = true + break + + case "NextFileHasLongPath": + case "OldGnuLongPath": + // set path= in file-extended header + EntryType = BufferEntry + onend = function () { + self._extended = self._extended || {} + self._extended.path = entry.body + } + ev = "longPath" + meta = true + break + + default: + // all the rest we skip, but still set the _entry + // member, so that we can skip over their data appropriately. + // emit an event to say that this is an ignored entry type? + EntryType = Entry + ev = "ignoredEntry" + break + } + + var global, extended + if (meta) { + global = extended = null + } else { + var global = this._global + var extended = this._extended + + // extendedHeader only applies to one entry, so once we start + // an entry, it's over. + this._extended = null + } + entry = new EntryType(header, extended, global) + entry.meta = meta + + // only proxy data events of normal files. + if (!meta) { + entry.on("data", function (c) { + me.emit("data", c) + }) + } + + if (onend) entry.on("end", onend) + + this._entry = entry + var me = this + + entry.on("pause", function () { + me.pause() + }) + + entry.on("resume", function () { + me.resume() + }) + + if (this.listeners("*").length) { + this.emit("*", ev, entry) + } + + this.emit(ev, entry) + + // Zero-byte entry. End immediately. + if (entry.props.size === 0) { + entry.end() + this._entry = null + } +} diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json new file mode 100644 index 0000000..c5b999b --- /dev/null +++ b/node_modules/tar/package.json @@ -0,0 +1,26 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "tar", + "description": "tar for node", + "version": "2.2.1", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-tar.git" + }, + "main": "tar.js", + "scripts": { + "test": "tap test/*.js" + }, + "dependencies": { + "block-stream": "*", + "fstream": "^1.0.2", + "inherits": "2" + }, + "devDependencies": { + "graceful-fs": "^4.1.2", + "rimraf": "1.x", + "tap": "0.x", + "mkdirp": "^0.5.0" + }, + "license": "ISC" +} diff --git a/node_modules/tar/tar.js b/node_modules/tar/tar.js new file mode 100644 index 0000000..a81298b --- /dev/null +++ b/node_modules/tar/tar.js @@ -0,0 +1,173 @@ +// field paths that every tar file must have. +// header is padded to 512 bytes. +var f = 0 + , fields = {} + , path = fields.path = f++ + , mode = fields.mode = f++ + , uid = fields.uid = f++ + , gid = fields.gid = f++ + , size = fields.size = f++ + , mtime = fields.mtime = f++ + , cksum = fields.cksum = f++ + , type = fields.type = f++ + , linkpath = fields.linkpath = f++ + , headerSize = 512 + , blockSize = 512 + , fieldSize = [] + +fieldSize[path] = 100 +fieldSize[mode] = 8 +fieldSize[uid] = 8 +fieldSize[gid] = 8 +fieldSize[size] = 12 +fieldSize[mtime] = 12 +fieldSize[cksum] = 8 +fieldSize[type] = 1 +fieldSize[linkpath] = 100 + +// "ustar\0" may introduce another bunch of headers. +// these are optional, and will be nulled out if not present. + +var ustar = fields.ustar = f++ + , ustarver = fields.ustarver = f++ + , uname = fields.uname = f++ + , gname = fields.gname = f++ + , devmaj = fields.devmaj = f++ + , devmin = fields.devmin = f++ + , prefix = fields.prefix = f++ + , fill = fields.fill = f++ + +// terminate fields. +fields[f] = null + +fieldSize[ustar] = 6 +fieldSize[ustarver] = 2 +fieldSize[uname] = 32 +fieldSize[gname] = 32 +fieldSize[devmaj] = 8 +fieldSize[devmin] = 8 +fieldSize[prefix] = 155 +fieldSize[fill] = 12 + +// nb: prefix field may in fact be 130 bytes of prefix, +// a null char, 12 bytes for atime, 12 bytes for ctime. +// +// To recognize this format: +// 1. prefix[130] === ' ' or '\0' +// 2. atime and ctime are octal numeric values +// 3. atime and ctime have ' ' in their last byte + +var fieldEnds = {} + , fieldOffs = {} + , fe = 0 +for (var i = 0; i < f; i ++) { + fieldOffs[i] = fe + fieldEnds[i] = (fe += fieldSize[i]) +} + +// build a translation table of field paths. +Object.keys(fields).forEach(function (f) { + if (fields[f] !== null) fields[fields[f]] = f +}) + +// different values of the 'type' field +// paths match the values of Stats.isX() functions, where appropriate +var types = + { 0: "File" + , "\0": "OldFile" // like 0 + , "": "OldFile" + , 1: "Link" + , 2: "SymbolicLink" + , 3: "CharacterDevice" + , 4: "BlockDevice" + , 5: "Directory" + , 6: "FIFO" + , 7: "ContiguousFile" // like 0 + // posix headers + , g: "GlobalExtendedHeader" // k=v for the rest of the archive + , x: "ExtendedHeader" // k=v for the next file + // vendor-specific stuff + , A: "SolarisACL" // skip + , D: "GNUDumpDir" // like 5, but with data, which should be skipped + , I: "Inode" // metadata only, skip + , K: "NextFileHasLongLinkpath" // data = link path of next file + , L: "NextFileHasLongPath" // data = path of next file + , M: "ContinuationFile" // skip + , N: "OldGnuLongPath" // like L + , S: "SparseFile" // skip + , V: "TapeVolumeHeader" // skip + , X: "OldExtendedHeader" // like x + } + +Object.keys(types).forEach(function (t) { + types[types[t]] = types[types[t]] || t +}) + +// values for the mode field +var modes = + { suid: 04000 // set uid on extraction + , sgid: 02000 // set gid on extraction + , svtx: 01000 // set restricted deletion flag on dirs on extraction + , uread: 0400 + , uwrite: 0200 + , uexec: 0100 + , gread: 040 + , gwrite: 020 + , gexec: 010 + , oread: 4 + , owrite: 2 + , oexec: 1 + , all: 07777 + } + +var numeric = + { mode: true + , uid: true + , gid: true + , size: true + , mtime: true + , devmaj: true + , devmin: true + , cksum: true + , atime: true + , ctime: true + , dev: true + , ino: true + , nlink: true + } + +Object.keys(modes).forEach(function (t) { + modes[modes[t]] = modes[modes[t]] || t +}) + +var knownExtended = + { atime: true + , charset: true + , comment: true + , ctime: true + , gid: true + , gname: true + , linkpath: true + , mtime: true + , path: true + , realtime: true + , security: true + , size: true + , uid: true + , uname: true } + + +exports.fields = fields +exports.fieldSize = fieldSize +exports.fieldOffs = fieldOffs +exports.fieldEnds = fieldEnds +exports.types = types +exports.modes = modes +exports.numeric = numeric +exports.headerSize = headerSize +exports.blockSize = blockSize +exports.knownExtended = knownExtended + +exports.Pack = require("./lib/pack.js") +exports.Parse = require("./lib/parse.js") +exports.Extract = require("./lib/extract.js") diff --git a/node_modules/tar/test/00-setup-fixtures.js b/node_modules/tar/test/00-setup-fixtures.js new file mode 100644 index 0000000..1524ff7 --- /dev/null +++ b/node_modules/tar/test/00-setup-fixtures.js @@ -0,0 +1,53 @@ +// the fixtures have some weird stuff that is painful +// to include directly in the repo for various reasons. +// +// So, unpack the fixtures with the system tar first. +// +// This means, of course, that it'll only work if you +// already have a tar implementation, and some of them +// will not properly unpack the fixtures anyway. +// +// But, since usually those tests will fail on Windows +// and other systems with less capable filesystems anyway, +// at least this way we don't cause inconveniences by +// merely cloning the repo or installing the package. + +var tap = require("tap") +, child_process = require("child_process") +, rimraf = require("rimraf") +, test = tap.test +, path = require("path") + +test("clean fixtures", function (t) { + rimraf(path.resolve(__dirname, "fixtures"), function (er) { + t.ifError(er, "rimraf ./fixtures/") + t.end() + }) +}) + +test("clean tmp", function (t) { + rimraf(path.resolve(__dirname, "tmp"), function (er) { + t.ifError(er, "rimraf ./tmp/") + t.end() + }) +}) + +test("extract fixtures", function (t) { + var c = child_process.spawn("tar" + ,["xzvf", "fixtures.tgz"] + ,{ cwd: __dirname }) + + c.stdout.on("data", errwrite) + c.stderr.on("data", errwrite) + function errwrite (chunk) { + process.stderr.write(chunk) + } + + c.on("exit", function (code) { + t.equal(code, 0, "extract fixtures should exit with 0") + if (code) { + t.comment("Note, all tests from here on out will fail because of this.") + } + t.end() + }) +}) diff --git a/node_modules/tar/test/cb-never-called-1.0.1.tgz b/node_modules/tar/test/cb-never-called-1.0.1.tgz new file mode 100644 index 0000000..9e7014d Binary files /dev/null and b/node_modules/tar/test/cb-never-called-1.0.1.tgz differ diff --git a/node_modules/tar/test/dir-normalization.js b/node_modules/tar/test/dir-normalization.js new file mode 100644 index 0000000..9719c42 --- /dev/null +++ b/node_modules/tar/test/dir-normalization.js @@ -0,0 +1,177 @@ +// Set the umask, so that it works the same everywhere. +process.umask(parseInt('22', 8)) + +var fs = require('fs') +var path = require('path') + +var fstream = require('fstream') +var test = require('tap').test + +var tar = require('../tar.js') +var file = path.resolve(__dirname, 'dir-normalization.tar') +var target = path.resolve(__dirname, 'tmp/dir-normalization-test') +var ee = 0 + +var expectEntries = [ + { path: 'fixtures/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/a/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/the-chumbler', + mode: '755', + type: '2', + linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'), + }, + { path: 'fixtures/a/b/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/a/x', + mode: '644', + type: '0', + linkpath: '' + }, + { path: 'fixtures/a/b/c/', + mode: '755', + type: '5', + linkpath: '' + }, + { path: 'fixtures/a/b/c/y', + mode: '755', + type: '2', + linkpath: '../../x', + } +] + +var ef = 0 +var expectFiles = [ + { path: '', + mode: '40755', + type: 'Directory', + depth: 0, + linkpath: undefined + }, + { path: '/fixtures', + mode: '40755', + type: 'Directory', + depth: 1, + linkpath: undefined + }, + { path: '/fixtures/a', + mode: '40755', + type: 'Directory', + depth: 2, + linkpath: undefined + }, + { path: '/fixtures/a/b', + mode: '40755', + type: 'Directory', + depth: 3, + linkpath: undefined + }, + { path: '/fixtures/a/b/c', + mode: '40755', + type: 'Directory', + depth: 4, + linkpath: undefined + }, + { path: '/fixtures/a/b/c/y', + mode: '120755', + type: 'SymbolicLink', + depth: 5, + linkpath: '../../x' + }, + { path: '/fixtures/a/x', + mode: '100644', + type: 'File', + depth: 3, + linkpath: undefined + }, + { path: '/fixtures/the-chumbler', + mode: '120755', + type: 'SymbolicLink', + depth: 2, + linkpath: path.resolve(target, 'a/b/c/d/the-chumbler') + } +] + +test('preclean', function (t) { + require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test')) + t.pass('cleaned!') + t.end() +}) + +test('extract test', function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + inp.pipe(extract) + + extract.on('end', function () { + t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries') + + // should get no more entries after end + extract.removeAllListeners('entry') + extract.on('entry', function (e) { + t.fail('Should not get entries after end!') + }) + + next() + }) + + extract.on('entry', function (entry) { + var mode = entry.props.mode & (~parseInt('22', 8)) + var found = { + path: entry.path, + mode: mode.toString(8), + type: entry.props.type, + linkpath: entry.props.linkpath, + } + + var wanted = expectEntries[ee++] + t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path)) + }) + + function next () { + var r = fstream.Reader({ + path: target, + type: 'Directory', + sort: 'alpha' + }) + + r.on('ready', function () { + foundEntry(r) + }) + + r.on('end', finish) + + function foundEntry (entry) { + var p = entry.path.substr(target.length) + var mode = entry.props.mode & (~parseInt('22', 8)) + var found = { + path: p, + mode: mode.toString(8), + type: entry.props.type, + depth: entry.props.depth, + linkpath: entry.props.linkpath + } + + var wanted = expectFiles[ef++] + t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path)) + + entry.on('entry', foundEntry) + } + + function finish () { + t.equal(ef, expectFiles.length, 'should have ' + ef + ' items') + t.end() + } + } +}) diff --git a/node_modules/tar/test/dir-normalization.tar b/node_modules/tar/test/dir-normalization.tar new file mode 100644 index 0000000..3c48453 Binary files /dev/null and b/node_modules/tar/test/dir-normalization.tar differ diff --git a/node_modules/tar/test/error-on-broken.js b/node_modules/tar/test/error-on-broken.js new file mode 100644 index 0000000..e484920 --- /dev/null +++ b/node_modules/tar/test/error-on-broken.js @@ -0,0 +1,33 @@ +var fs = require('fs') +var path = require('path') +var zlib = require('zlib') + +var tap = require('tap') + +var tar = require('../tar.js') + +var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz') +var target = path.join(__dirname, 'tmp/extract-test') + +tap.test('preclean', function (t) { + require('rimraf').sync(__dirname + '/tmp/extract-test') + t.pass('cleaned!') + t.end() +}) + +tap.test('extract test', function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + inp.pipe(zlib.createGunzip()).pipe(extract) + + extract.on('error', function (er) { + t.equal(er.message, 'unexpected eof', 'error noticed') + t.end() + }) + + extract.on('end', function () { + t.fail('shouldn\'t reach this point due to errors') + t.end() + }) +}) diff --git a/node_modules/tar/test/extract-move.js b/node_modules/tar/test/extract-move.js new file mode 100644 index 0000000..45400cd --- /dev/null +++ b/node_modules/tar/test/extract-move.js @@ -0,0 +1,132 @@ +// Set the umask, so that it works the same everywhere. +process.umask(parseInt('22', 8)) + +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , gfs = require("graceful-fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/dir.tar") + , target = path.resolve(__dirname, "tmp/extract-test") + , index = 0 + , fstream = require("fstream") + , rimraf = require("rimraf") + , mkdirp = require("mkdirp") + + , ee = 0 + , expectEntries = [ + { + "path" : "dir/", + "mode" : "750", + "type" : "5", + "depth" : undefined, + "size" : 0, + "linkpath" : "", + "nlink" : undefined, + "dev" : undefined, + "ino" : undefined + }, + { + "path" : "dir/sub/", + "mode" : "750", + "type" : "5", + "depth" : undefined, + "size" : 0, + "linkpath" : "", + "nlink" : undefined, + "dev" : undefined, + "ino" : undefined + } ] + +function slow (fs, method, t1, t2) { + var orig = fs[method] + if (!orig) return null + fs[method] = function () { + var args = [].slice.call(arguments) + console.error("slow", method, args[0]) + var cb = args.pop() + + setTimeout(function () { + orig.apply(fs, args.concat(function(er, data) { + setTimeout(function() { + cb(er, data) + }, t2) + })) + }, t1) + } +} + +// Make sure we get the graceful-fs that fstream is using. +var gfs2 +try { + gfs2 = require("fstream/node_modules/graceful-fs") +} catch (er) {} + +var slowMethods = ["chown", "chmod", "utimes", "lutimes"] +slowMethods.forEach(function (method) { + var t1 = 500 + var t2 = 0 + slow(fs, method, t1, t2) + slow(gfs, method, t1, t2) + if (gfs2) { + slow(gfs2, method, t1, t2) + } +}) + + + +// The extract class basically just pipes the input +// to a Reader, and then to a fstream.DirWriter + +// So, this is as much a test of fstream.Reader and fstream.Writer +// as it is of tar.Extract, but it sort of makes sense. + +tap.test("preclean", function (t) { + rimraf.sync(target) + /mkdirp.sync(target) + t.pass("cleaned!") + t.end() +}) + +tap.test("extract test", function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + // give it a weird buffer size to try to break in odd places + inp.bufferSize = 1234 + + inp.pipe(extract) + + extract.on("end", function () { + rimraf.sync(target) + + t.equal(ee, expectEntries.length, "should see "+ee+" entries") + + // should get no more entries after end + extract.removeAllListeners("entry") + extract.on("entry", function (e) { + t.fail("Should not get entries after end!") + }) + + t.end() + }) + + + extract.on("entry", function (entry) { + var found = + { path: entry.path + , mode: entry.props.mode.toString(8) + , type: entry.props.type + , depth: entry.props.depth + , size: entry.props.size + , linkpath: entry.props.linkpath + , nlink: entry.props.nlink + , dev: entry.props.dev + , ino: entry.props.ino + } + + var wanted = expectEntries[ee ++] + + t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) + }) +}) diff --git a/node_modules/tar/test/extract.js b/node_modules/tar/test/extract.js new file mode 100644 index 0000000..eca4e7c --- /dev/null +++ b/node_modules/tar/test/extract.js @@ -0,0 +1,367 @@ +// Set the umask, so that it works the same everywhere. +process.umask(parseInt('22', 8)) + +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/c.tar") + , target = path.resolve(__dirname, "tmp/extract-test") + , index = 0 + , fstream = require("fstream") + + , ee = 0 + , expectEntries = +[ { path: 'c.txt', + mode: '644', + type: '0', + depth: undefined, + size: 513, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'cc.txt', + mode: '644', + type: '0', + depth: undefined, + size: 513, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '0', + depth: undefined, + size: 100, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'Ω.txt', + mode: '644', + type: '0', + depth: undefined, + size: 2, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: 'Ω.txt', + mode: '644', + type: '0', + depth: undefined, + size: 2, + linkpath: '', + nlink: 1, + dev: 234881026, + ino: 51693379 }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '0', + depth: undefined, + size: 200, + linkpath: '', + nlink: 1, + dev: 234881026, + ino: 51681874 }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '0', + depth: undefined, + size: 201, + linkpath: '', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', + mode: '777', + type: '2', + depth: undefined, + size: 0, + linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + nlink: undefined, + dev: undefined, + ino: undefined }, + { path: '200-hard', + mode: '644', + type: '0', + depth: undefined, + size: 200, + linkpath: '', + nlink: 2, + dev: 234881026, + ino: 51681874 }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '644', + type: '1', + depth: undefined, + size: 0, + linkpath: path.resolve(target, '200-hard'), + nlink: 2, + dev: 234881026, + ino: 51681874 } ] + + , ef = 0 + , expectFiles = +[ { path: '', + mode: '40755', + type: 'Directory', + depth: 0, + linkpath: undefined }, + { path: '/200-hard', + mode: '100644', + type: 'File', + depth: 1, + size: 200, + linkpath: undefined, + nlink: 2 }, + { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', + mode: '120777', + type: 'SymbolicLink', + depth: 1, + size: 200, + linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + nlink: 1 }, + { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '100644', + type: 'Link', + depth: 1, + size: 200, + linkpath: path.join(target, '200-hard'), + nlink: 2 }, + { path: '/c.txt', + mode: '100644', + type: 'File', + depth: 1, + size: 513, + linkpath: undefined, + nlink: 1 }, + { path: '/cc.txt', + mode: '100644', + type: 'File', + depth: 1, + size: 513, + linkpath: undefined, + nlink: 1 }, + { path: '/r', + mode: '40755', + type: 'Directory', + depth: 1, + linkpath: undefined }, + { path: '/r/e', + mode: '40755', + type: 'Directory', + depth: 2, + linkpath: undefined }, + { path: '/r/e/a', + mode: '40755', + type: 'Directory', + depth: 3, + linkpath: undefined }, + { path: '/r/e/a/l', + mode: '40755', + type: 'Directory', + depth: 4, + linkpath: undefined }, + { path: '/r/e/a/l/l', + mode: '40755', + type: 'Directory', + depth: 5, + linkpath: undefined }, + { path: '/r/e/a/l/l/y', + mode: '40755', + type: 'Directory', + depth: 6, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-', + mode: '40755', + type: 'Directory', + depth: 7, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d', + mode: '40755', + type: 'Directory', + depth: 8, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e', + mode: '40755', + type: 'Directory', + depth: 9, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e', + mode: '40755', + type: 'Directory', + depth: 10, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p', + mode: '40755', + type: 'Directory', + depth: 11, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-', + mode: '40755', + type: 'Directory', + depth: 12, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f', + mode: '40755', + type: 'Directory', + depth: 13, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o', + mode: '40755', + type: 'Directory', + depth: 14, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l', + mode: '40755', + type: 'Directory', + depth: 15, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d', + mode: '40755', + type: 'Directory', + depth: 16, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e', + mode: '40755', + type: 'Directory', + depth: 17, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r', + mode: '40755', + type: 'Directory', + depth: 18, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-', + mode: '40755', + type: 'Directory', + depth: 19, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p', + mode: '40755', + type: 'Directory', + depth: 20, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a', + mode: '40755', + type: 'Directory', + depth: 21, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t', + mode: '40755', + type: 'Directory', + depth: 22, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h', + mode: '40755', + type: 'Directory', + depth: 23, + linkpath: undefined }, + { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: '100644', + type: 'File', + depth: 24, + size: 100, + linkpath: undefined, + nlink: 1 }, + { path: '/Ω.txt', + mode: '100644', + type: 'File', + depth: 1, + size: 2, + linkpath: undefined, + nlink: 1 } ] + + + +// The extract class basically just pipes the input +// to a Reader, and then to a fstream.DirWriter + +// So, this is as much a test of fstream.Reader and fstream.Writer +// as it is of tar.Extract, but it sort of makes sense. + +tap.test("preclean", function (t) { + require("rimraf").sync(__dirname + "/tmp/extract-test") + t.pass("cleaned!") + t.end() +}) + +tap.test("extract test", function (t) { + var extract = tar.Extract(target) + var inp = fs.createReadStream(file) + + // give it a weird buffer size to try to break in odd places + inp.bufferSize = 1234 + + inp.pipe(extract) + + extract.on("end", function () { + t.equal(ee, expectEntries.length, "should see "+ee+" entries") + + // should get no more entries after end + extract.removeAllListeners("entry") + extract.on("entry", function (e) { + t.fail("Should not get entries after end!") + }) + + next() + }) + + extract.on("entry", function (entry) { + var found = + { path: entry.path + , mode: entry.props.mode.toString(8) + , type: entry.props.type + , depth: entry.props.depth + , size: entry.props.size + , linkpath: entry.props.linkpath + , nlink: entry.props.nlink + , dev: entry.props.dev + , ino: entry.props.ino + } + + var wanted = expectEntries[ee ++] + + t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path) + }) + + function next () { + var r = fstream.Reader({ path: target + , type: "Directory" + // this is just to encourage consistency + , sort: "alpha" }) + + r.on("ready", function () { + foundEntry(r) + }) + + r.on("end", finish) + + function foundEntry (entry) { + var p = entry.path.substr(target.length) + var found = + { path: p + , mode: entry.props.mode.toString(8) + , type: entry.props.type + , depth: entry.props.depth + , size: entry.props.size + , linkpath: entry.props.linkpath + , nlink: entry.props.nlink + } + + var wanted = expectFiles[ef ++] + + t.has(found, wanted, "unpacked file " + ef + " " + wanted.path) + + entry.on("entry", foundEntry) + } + + function finish () { + t.equal(ef, expectFiles.length, "should have "+ef+" items") + t.end() + } + } +}) diff --git a/node_modules/tar/test/fixtures.tgz b/node_modules/tar/test/fixtures.tgz new file mode 100644 index 0000000..f167602 Binary files /dev/null and b/node_modules/tar/test/fixtures.tgz differ diff --git a/node_modules/tar/test/header.js b/node_modules/tar/test/header.js new file mode 100644 index 0000000..8ea6f79 --- /dev/null +++ b/node_modules/tar/test/header.js @@ -0,0 +1,183 @@ +var tap = require("tap") +var TarHeader = require("../lib/header.js") +var tar = require("../tar.js") +var fs = require("fs") + + +var headers = + { "a.txt file header": + [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'a.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 257 + , mtime: 1319493851 + , cksum: 5417 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + + , "omega pax": // the extended header from omega tar. + [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'PaxHeader/Ω.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 120 + , mtime: 1301254537 + , cksum: 6697 + , type: 'x' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } ] + + , "omega file header": + [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'Ω.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 2 + , mtime: 1301254537 + , cksum: 5690 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } ] + + , "foo.js file header": + [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'foo.js' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 4 + , mtime: 1301246433 + , cksum: 5519 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + + , "b.txt file header": + [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true + , path: 'b.txt' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 512 + , mtime: 1319494079 + , cksum: 5425 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + + , "deep nested file": + [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + , { cksumValid: true, + path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' + , mode: 420 + , uid: 24561 + , gid: 20 + , size: 100 + , mtime: 1319687003 + , cksum: 18124 + , type: '0' + , linkpath: '' + , ustar: 'ustar\0' + , ustarver: '00' + , uname: 'isaacs' + , gname: 'staff' + , devmaj: 0 + , devmin: 0 + , fill: '' } + ] + } + +tap.test("parsing", function (t) { + Object.keys(headers).forEach(function (name) { + var h = headers[name] + , header = new Buffer(h[0], "hex") + , expect = h[1] + , parsed = new TarHeader(header) + + // console.error(parsed) + t.has(parsed, expect, "parse " + name) + }) + t.end() +}) + +tap.test("encoding", function (t) { + Object.keys(headers).forEach(function (name) { + var h = headers[name] + , expect = new Buffer(h[0], "hex") + , encoded = TarHeader.encode(h[1]) + + // might have slightly different bytes, since the standard + // isn't very strict, but should have the same semantics + // checkSum will be different, but cksumValid will be true + + var th = new TarHeader(encoded) + delete h[1].block + delete h[1].needExtended + delete h[1].cksum + t.has(th, h[1], "fields "+name) + }) + t.end() +}) + +// test these manually. they're a bit rare to find in the wild +tap.test("parseNumeric tests", function (t) { + var parseNumeric = TarHeader.parseNumeric + , numbers = + { "303737373737373700": 2097151 + , "30373737373737373737373700": 8589934591 + , "303030303036343400": 420 + , "800000ffffffffffff": 281474976710655 + , "ffffff000000000001": -281474976710654 + , "ffffff000000000000": -281474976710655 + , "800000000000200000": 2097152 + , "8000000000001544c5": 1393861 + , "ffffffffffff1544c5": -15383354 } + Object.keys(numbers).forEach(function (n) { + var b = new Buffer(n, "hex") + t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n]) + }) + t.end() +}) diff --git a/node_modules/tar/test/pack-no-proprietary.js b/node_modules/tar/test/pack-no-proprietary.js new file mode 100644 index 0000000..d4b03a1 --- /dev/null +++ b/node_modules/tar/test/pack-no-proprietary.js @@ -0,0 +1,886 @@ +// This is exactly like test/pack.js, except that it's excluding +// any proprietary headers. +// +// This loses some information about the filesystem, but creates +// tarballs that are supported by more versions of tar, especially +// old non-spec-compliant copies of gnutar. + +// the symlink file is excluded from git, because it makes +// windows freak the hell out. +var fs = require("fs") + , path = require("path") + , symlink = path.resolve(__dirname, "fixtures/symlink") +try { fs.unlinkSync(symlink) } catch (e) {} +fs.symlinkSync("./hardlink-1", symlink) +process.on("exit", function () { + fs.unlinkSync(symlink) +}) + +var tap = require("tap") + , tar = require("../tar.js") + , pkg = require("../package.json") + , Pack = tar.Pack + , fstream = require("fstream") + , Reader = fstream.Reader + , Writer = fstream.Writer + , input = path.resolve(__dirname, "fixtures/") + , target = path.resolve(__dirname, "tmp/pack.tar") + , uid = process.getuid ? process.getuid() : 0 + , gid = process.getgid ? process.getgid() : 0 + + , entries = + + // the global header and root fixtures/ dir are going to get + // a different date each time, so omit that bit. + // Also, dev/ino values differ across machines, so that's not + // included. + [ [ 'entry', + { path: 'fixtures/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + uid: uid, + gid: gid, + size: 200 } ] + + , [ 'entry', + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/a.txt', + mode: 420, + uid: uid, + gid: gid, + size: 257, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/b.txt', + mode: 420, + uid: uid, + gid: gid, + size: 512, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/c.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/cc.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/sub/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/foo.js', + mode: 420, + uid: uid, + gid: gid, + size: 4, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-1', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-2', + mode: 420, + uid: uid, + gid: gid, + size: 0, + type: '1', + linkpath: 'fixtures/hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/star.4.html', + mode: 420, + uid: uid, + gid: gid, + size: 54081, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/packtest/Ω.txt', + uid: uid, + gid: gid, + size: 2 } ] + + , [ 'entry', + { path: 'fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 100, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/symlink', + uid: uid, + gid: gid, + size: 0, + type: '2', + linkpath: 'hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: "fixtures/Ω.txt" + , uid: uid + , gid: gid + , size: 2 } ] + + , [ 'entry', + { path: 'fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + ] + + +// first, make sure that the hardlinks are actually hardlinks, or this +// won't work. Git has a way of replacing them with a copy. +var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") + , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") + , fs = require("fs") + +try { fs.unlinkSync(hard2) } catch (e) {} +fs.linkSync(hard1, hard2) + +tap.test("with global header", { timeout: 10000 }, function (t) { + runTest(t, true) +}) + +tap.test("without global header", { timeout: 10000 }, function (t) { + runTest(t, false) +}) + +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} + + +function runTest (t, doGH) { + var reader = Reader({ path: input + , filter: function () { + return !this.path.match(/\.(tar|hex)$/) + } + , sort: alphasort + }) + + var props = doGH ? pkg : {} + props.noProprietary = true + var pack = Pack(props) + var writer = Writer(target) + + // global header should be skipped regardless, since it has no content. + var entry = 0 + + t.ok(reader, "reader ok") + t.ok(pack, "pack ok") + t.ok(writer, "writer ok") + + pack.pipe(writer) + + var parse = tar.Parse() + t.ok(parse, "parser should be ok") + + pack.on("data", function (c) { + // console.error("PACK DATA") + if (c.length !== 512) { + // this one is too noisy, only assert if it'll be relevant + t.equal(c.length, 512, "parser should emit data in 512byte blocks") + } + parse.write(c) + }) + + pack.on("end", function () { + // console.error("PACK END") + t.pass("parser ends") + parse.end() + }) + + pack.on("error", function (er) { + t.fail("pack error", er) + }) + + parse.on("error", function (er) { + t.fail("parse error", er) + }) + + writer.on("error", function (er) { + t.fail("writer error", er) + }) + + reader.on("error", function (er) { + t.fail("reader error", er) + }) + + parse.on("*", function (ev, e) { + var wanted = entries[entry++] + if (!wanted) { + t.fail("unexpected event: "+ev) + return + } + t.equal(ev, wanted[0], "event type should be "+wanted[0]) + + if (ev !== wanted[0] || e.path !== wanted[1].path) { + console.error("wanted", wanted) + console.error([ev, e.props]) + e.on("end", function () { + console.error(e.fields) + throw "break" + }) + } + + t.has(e.props, wanted[1], "properties "+wanted[1].path) + if (wanted[2]) { + e.on("end", function () { + if (!e.fields) { + t.ok(e.fields, "should get fields") + } else { + t.has(e.fields, wanted[2], "should get expected fields") + } + }) + } + }) + + reader.pipe(pack) + + writer.on("close", function () { + t.equal(entry, entries.length, "should get all expected entries") + t.pass("it finished") + t.end() + }) + +} diff --git a/node_modules/tar/test/pack.js b/node_modules/tar/test/pack.js new file mode 100644 index 0000000..0f16c07 --- /dev/null +++ b/node_modules/tar/test/pack.js @@ -0,0 +1,952 @@ + +// the symlink file is excluded from git, because it makes +// windows freak the hell out. +var fs = require("fs") + , path = require("path") + , symlink = path.resolve(__dirname, "fixtures/symlink") +try { fs.unlinkSync(symlink) } catch (e) {} +fs.symlinkSync("./hardlink-1", symlink) +process.on("exit", function () { + fs.unlinkSync(symlink) +}) + + +var tap = require("tap") + , tar = require("../tar.js") + , pkg = require("../package.json") + , Pack = tar.Pack + , fstream = require("fstream") + , Reader = fstream.Reader + , Writer = fstream.Writer + , input = path.resolve(__dirname, "fixtures/") + , target = path.resolve(__dirname, "tmp/pack.tar") + , uid = process.getuid ? process.getuid() : 0 + , gid = process.getgid ? process.getgid() : 0 + + , entries = + + // the global header and root fixtures/ dir are going to get + // a different date each time, so omit that bit. + // Also, dev/ino values differ across machines, so that's not + // included. + [ [ 'globalExtendedHeader', + { path: 'PaxHeader/', + mode: 438, + uid: 0, + gid: 0, + type: 'g', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { "NODETAR.author": pkg.author, + "NODETAR.name": pkg.name, + "NODETAR.description": pkg.description, + "NODETAR.version": pkg.version, + "NODETAR.repository.type": pkg.repository.type, + "NODETAR.repository.url": pkg.repository.url, + "NODETAR.main": pkg.main, + "NODETAR.scripts.test": pkg.scripts.test } ] + + , [ 'entry', + { path: 'fixtures/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + 'NODETAR.depth': '1', + 'NODETAR.type': 'File', + nlink: 1, + uid: uid, + gid: gid, + size: 200, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '', + 'NODETAR.depth': '1', + 'NODETAR.type': 'File', + nlink: 1, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/a.txt', + mode: 420, + uid: uid, + gid: gid, + size: 257, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/b.txt', + mode: 420, + uid: uid, + gid: gid, + size: 512, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/c.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/cc.txt', + mode: 420, + uid: uid, + gid: gid, + size: 513, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/dir/sub/', + mode: 488, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + + , [ 'entry', + { path: 'fixtures/foo.js', + mode: 420, + uid: uid, + gid: gid, + size: 4, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-1', + mode: 420, + uid: uid, + gid: gid, + size: 200, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/hardlink-2', + mode: 420, + uid: uid, + gid: gid, + size: 0, + type: '1', + linkpath: 'fixtures/hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/omega.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/packtest/star.4.html', + mode: 420, + uid: uid, + gid: gid, + size: 54081, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'fixtures/packtest/Ω.txt', + 'NODETAR.depth': '2', + 'NODETAR.type': 'File', + nlink: 1, + uid: uid, + gid: gid, + size: 2, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/packtest/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '', + 'NODETAR.depth': '2', + 'NODETAR.type': 'File', + nlink: 1, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + + , [ 'entry', + { path: 'fixtures/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/', + mode: 493, + uid: uid, + gid: gid, + size: 0, + type: '5', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: uid, + gid: gid, + size: 100, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'entry', + { path: 'fixtures/symlink', + uid: uid, + gid: gid, + size: 0, + type: '2', + linkpath: 'hardlink-1', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' } ] + + , [ 'extendedHeader', + { path: 'PaxHeader/fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + type: 'x', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: "fixtures/Ω.txt" + , "NODETAR.depth": "1" + , "NODETAR.type": "File" + , nlink: 1 + , uid: uid + , gid: gid + , size: 2 + , "NODETAR.blksize": "4096" + , "NODETAR.blocks": "8" } ] + + , [ 'entry', + { path: 'fixtures/Ω.txt', + mode: 420, + uid: uid, + gid: gid, + size: 2, + type: '0', + linkpath: '', + ustar: 'ustar\u0000', + ustarver: '00', + uname: '', + gname: '', + devmaj: 0, + devmin: 0, + fill: '', + 'NODETAR.depth': '1', + 'NODETAR.type': 'File', + nlink: 1, + 'NODETAR.blksize': '4096', + 'NODETAR.blocks': '8' } ] + ] + + +// first, make sure that the hardlinks are actually hardlinks, or this +// won't work. Git has a way of replacing them with a copy. +var hard1 = path.resolve(__dirname, "fixtures/hardlink-1") + , hard2 = path.resolve(__dirname, "fixtures/hardlink-2") + , fs = require("fs") + +try { fs.unlinkSync(hard2) } catch (e) {} +fs.linkSync(hard1, hard2) + +tap.test("with global header", { timeout: 10000 }, function (t) { + runTest(t, true) +}) + +tap.test("without global header", { timeout: 10000 }, function (t) { + runTest(t, false) +}) + +tap.test("with from base", { timeout: 10000 }, function (t) { + runTest(t, true, true) +}) + +function alphasort (a, b) { + return a === b ? 0 + : a.toLowerCase() > b.toLowerCase() ? 1 + : a.toLowerCase() < b.toLowerCase() ? -1 + : a > b ? 1 + : -1 +} + + +function runTest (t, doGH, doFromBase) { + var reader = Reader({ path: input + , filter: function () { + return !this.path.match(/\.(tar|hex)$/) + } + , sort: alphasort + }) + + var props = doGH ? pkg : {} + if(doFromBase) props.fromBase = true; + + var pack = Pack(props) + var writer = Writer(target) + + // skip the global header if we're not doing that. + var entry = doGH ? 0 : 1 + + t.ok(reader, "reader ok") + t.ok(pack, "pack ok") + t.ok(writer, "writer ok") + + pack.pipe(writer) + + var parse = tar.Parse() + t.ok(parse, "parser should be ok") + + pack.on("data", function (c) { + // console.error("PACK DATA") + if (c.length !== 512) { + // this one is too noisy, only assert if it'll be relevant + t.equal(c.length, 512, "parser should emit data in 512byte blocks") + } + parse.write(c) + }) + + pack.on("end", function () { + // console.error("PACK END") + t.pass("parser ends") + parse.end() + }) + + pack.on("error", function (er) { + t.fail("pack error", er) + }) + + parse.on("error", function (er) { + t.fail("parse error", er) + }) + + writer.on("error", function (er) { + t.fail("writer error", er) + }) + + reader.on("error", function (er) { + t.fail("reader error", er) + }) + + parse.on("*", function (ev, e) { + var wanted = entries[entry++] + if (!wanted) { + t.fail("unexpected event: "+ev) + return + } + t.equal(ev, wanted[0], "event type should be "+wanted[0]) + + if(doFromBase) { + if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100) + wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc' + + if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/') + if(wanted[1].path == '') wanted[1].path = '/' + if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '') + + wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '') + } + + if (ev !== wanted[0] || e.path !== wanted[1].path) { + console.error("wanted", wanted) + console.error([ev, e.props]) + e.on("end", function () { + console.error(e.fields) + throw "break" + }) + } + + + t.has(e.props, wanted[1], "properties "+wanted[1].path) + if (wanted[2]) { + e.on("end", function () { + if (!e.fields) { + t.ok(e.fields, "should get fields") + } else { + t.has(e.fields, wanted[2], "should get expected fields") + } + }) + } + }) + + reader.pipe(pack) + + writer.on("close", function () { + t.equal(entry, entries.length, "should get all expected entries") + t.pass("it finished") + t.end() + }) + +} diff --git a/node_modules/tar/test/parse-discard.js b/node_modules/tar/test/parse-discard.js new file mode 100644 index 0000000..da01a65 --- /dev/null +++ b/node_modules/tar/test/parse-discard.js @@ -0,0 +1,29 @@ +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/c.tar") + +tap.test("parser test", function (t) { + var parser = tar.Parse() + var total = 0 + var dataTotal = 0 + + parser.on("end", function () { + + t.equals(total-513,dataTotal,'should have discarded only c.txt') + + t.end() + }) + + fs.createReadStream(file) + .pipe(parser) + .on('entry',function(entry){ + if(entry.path === 'c.txt') entry.abort() + + total += entry.size; + entry.on('data',function(data){ + dataTotal += data.length + }) + }) +}) diff --git a/node_modules/tar/test/parse.js b/node_modules/tar/test/parse.js new file mode 100644 index 0000000..f765a50 --- /dev/null +++ b/node_modules/tar/test/parse.js @@ -0,0 +1,359 @@ +var tap = require("tap") + , tar = require("../tar.js") + , fs = require("fs") + , path = require("path") + , file = path.resolve(__dirname, "fixtures/c.tar") + , index = 0 + + , expect = +[ [ 'entry', + { path: 'c.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 513, + mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'), + cksum: 5422, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'entry', + { path: 'cc.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 513, + mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'), + cksum: 5525, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'entry', + { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 100, + mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'), + cksum: 18124, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'entry', + { path: 'Ω.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 2, + mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + cksum: 5695, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/Ω.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 120, + mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + cksum: 6702, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: 'Ω.txt', + ctime: 1319737909, + atime: 1319739061, + dev: 234881026, + ino: 51693379, + nlink: 1 } ], + [ 'entry', + { path: 'Ω.txt', + mode: 420, + uid: 24561, + gid: 20, + size: 2, + mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + cksum: 5695, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'), + atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'), + dev: 234881026, + ino: 51693379, + nlink: 1 }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 353, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 14488, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ctime: 1319686868, + atime: 1319741254, + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 1 } ], + [ 'entry', + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 200, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 14570, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'), + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 1 }, + undefined ], + [ 'longPath', + { path: '././@LongLink', + mode: 0, + uid: 0, + gid: 0, + size: 201, + mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), + cksum: 4976, + type: 'L', + linkpath: '', + ustar: false }, + '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], + [ 'entry', + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 1000, + gid: 1000, + size: 201, + mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'), + cksum: 14086, + type: '0', + linkpath: '', + ustar: false }, + undefined ], + [ 'longLinkpath', + { path: '././@LongLink', + mode: 0, + uid: 0, + gid: 0, + size: 201, + mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), + cksum: 4975, + type: 'K', + linkpath: '', + ustar: false }, + '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ], + [ 'longPath', + { path: '././@LongLink', + mode: 0, + uid: 0, + gid: 0, + size: 201, + mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'), + cksum: 4976, + type: 'L', + linkpath: '', + ustar: false }, + '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ], + [ 'entry', + { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL', + mode: 511, + uid: 1000, + gid: 1000, + size: 0, + mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'), + cksum: 21603, + type: '2', + linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ustar: false }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/200-hard', + mode: 420, + uid: 24561, + gid: 20, + size: 143, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 6533, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { ctime: 1320617144, + atime: 1320617232, + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 } ], + [ 'entry', + { path: '200-hard', + mode: 420, + uid: 24561, + gid: 20, + size: 200, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 5526, + type: '0', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), + atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'), + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 }, + undefined ], + [ 'extendedHeader', + { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 353, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 14488, + type: 'x', + linkpath: '', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '' }, + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + ctime: 1320617144, + atime: 1320617406, + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 } ], + [ 'entry', + { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', + mode: 420, + uid: 24561, + gid: 20, + size: 0, + mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'), + cksum: 15173, + type: '1', + linkpath: '200-hard', + ustar: 'ustar\0', + ustarver: '00', + uname: 'isaacs', + gname: 'staff', + devmaj: 0, + devmin: 0, + fill: '', + ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'), + atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'), + 'LIBARCHIVE.creationtime': '1319686852', + dev: 234881026, + ino: 51681874, + nlink: 2 }, + undefined ] ] + + +tap.test("parser test", function (t) { + var parser = tar.Parse() + + parser.on("end", function () { + t.equal(index, expect.length, "saw all expected events") + t.end() + }) + + fs.createReadStream(file) + .pipe(parser) + .on("*", function (ev, entry) { + var wanted = expect[index] + if (!wanted) { + return t.fail("Unexpected event: " + ev) + } + var result = [ev, entry.props] + entry.on("end", function () { + result.push(entry.fields || entry.body) + + t.equal(ev, wanted[0], index + " event type") + t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties") + if (wanted[2]) { + t.equivalent(result[2], wanted[2], "metadata values") + } + index ++ + }) + }) +}) diff --git a/node_modules/tar/test/zz-cleanup.js b/node_modules/tar/test/zz-cleanup.js new file mode 100644 index 0000000..a00ff7f --- /dev/null +++ b/node_modules/tar/test/zz-cleanup.js @@ -0,0 +1,20 @@ +// clean up the fixtures + +var tap = require("tap") +, rimraf = require("rimraf") +, test = tap.test +, path = require("path") + +test("clean fixtures", function (t) { + rimraf(path.resolve(__dirname, "fixtures"), function (er) { + t.ifError(er, "rimraf ./fixtures/") + t.end() + }) +}) + +test("clean tmp", function (t) { + rimraf(path.resolve(__dirname, "tmp"), function (er) { + t.ifError(er, "rimraf ./tmp/") + t.end() + }) +}) diff --git a/node_modules/timers-browserify/.npmignore b/node_modules/timers-browserify/.npmignore new file mode 100644 index 0000000..03e05e4 --- /dev/null +++ b/node_modules/timers-browserify/.npmignore @@ -0,0 +1,2 @@ +.DS_Store +/node_modules diff --git a/node_modules/timers-browserify/CHANGELOG.md b/node_modules/timers-browserify/CHANGELOG.md new file mode 100644 index 0000000..a7cd8bc --- /dev/null +++ b/node_modules/timers-browserify/CHANGELOG.md @@ -0,0 +1,58 @@ +# Change Log +All notable changes to this project will be documented in this file. +This project adheres to [Semantic Versioning](http://semver.org/). + +## 1.4.0 - 2015-02-23 + +### Added +* Link to `timers-browserify-full`, which offers a larger, but much more exact, + version of Node's `timers` library + +### Changed +* `setTimeout` and `setInterval` return objects with the same API as the Node + implementation, instead of just IDs + +### Fixed +* `active` implementation actually has an effect, as in Node +* Replaced usages of `apply` that break in IE 8 + +## 1.3.0 - 2015-02-04 + +### Changed +* Prefer native versions of `setImmediate` and `clearImmediate` if they exist + +## 1.2.0 - 2015-01-02 + +### Changed +* Update `process` dependency + +## 1.1.0 - 2014-08-26 + +### Added +* `clearImmediate` available to undo `setImmediate` + +## 1.0.3 - 2014-06-30 + +### Fixed +* Resume returning opaque IDs from `setTimeout` and `setInterval` + +## 1.0.2 - 2014-06-30 + +### Fixed +* Pass `window` explicitly to `setTimeout` and others to resolve an error in + Chrome + +## 1.0.1 - 2013-12-28 + +### Changed +* Replaced `setimmediate` dependency with `process` for the `nextTick` shim + +## 1.0.0 - 2013-12-10 + +### Added +* Guard against undefined globals like `setTimeout` in some environments + +## 0.0.0 - 2012-05-30 + +### Added +* Basic functionality for initial release diff --git a/node_modules/timers-browserify/LICENSE.md b/node_modules/timers-browserify/LICENSE.md new file mode 100644 index 0000000..940ec90 --- /dev/null +++ b/node_modules/timers-browserify/LICENSE.md @@ -0,0 +1,46 @@ +# timers-browserify + +This project uses the [MIT](http://jryans.mit-license.org/) license: + + Copyright © 2012 J. Ryan Stinnett + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the “Software”), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +# lib/node + +The `lib/node` directory borrows files from joyent/node which uses the following license: + + Copyright Joyent, Inc. and other Node contributors. All rights reserved. + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal in the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. diff --git a/node_modules/timers-browserify/README.md b/node_modules/timers-browserify/README.md new file mode 100644 index 0000000..c7efa19 --- /dev/null +++ b/node_modules/timers-browserify/README.md @@ -0,0 +1,40 @@ +# Overview + +Adds support for the `timers` module to browserify. + +## Wait, isn't it already supported in the browser? + +The public methods of the `timers` module are: + +* `setTimeout(callback, delay, [arg], [...])` +* `clearTimeout(timeoutId)` +* `setInterval(callback, delay, [arg], [...])` +* `clearInterval(intervalId)` + +and indeed, browsers support these already. + +## So, why does this exist? + +The `timers` module also includes some private methods used in other built-in +Node.js modules: + +* `enroll(item, delay)` +* `unenroll(item)` +* `active(item)` + +These are used to efficiently support a large quantity of timers with the same +timeouts by creating only a few timers under the covers. + +Node.js also offers the `immediate` APIs, which aren't yet available cross-browser, so we polyfill those: + +* `setImmediate(callback, [arg], [...])` +* `clearImmediate(immediateId)` + +## I need lots of timers and want to use linked list timers as Node.js does. + +Linked lists are efficient when you have thousands (millions?) of timers with the same delay. +Take a look at [timers-browserify-full](https://www.npmjs.com/package/timers-browserify-full) in this case. + +# License + +[MIT](http://jryans.mit-license.org/) diff --git a/node_modules/timers-browserify/example/enroll/build.sh b/node_modules/timers-browserify/example/enroll/build.sh new file mode 100755 index 0000000..d276735 --- /dev/null +++ b/node_modules/timers-browserify/example/enroll/build.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +browserify --debug -o js/browserify.js js/main.js diff --git a/node_modules/timers-browserify/example/enroll/index.html b/node_modules/timers-browserify/example/enroll/index.html new file mode 100644 index 0000000..9cc1140 --- /dev/null +++ b/node_modules/timers-browserify/example/enroll/index.html @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/node_modules/timers-browserify/example/enroll/js/browserify.js b/node_modules/timers-browserify/example/enroll/js/browserify.js new file mode 100644 index 0000000..c2d0821 --- /dev/null +++ b/node_modules/timers-browserify/example/enroll/js/browserify.js @@ -0,0 +1,233 @@ +var require = function (file, cwd) { + var resolved = require.resolve(file, cwd || '/'); + var mod = require.modules[resolved]; + if (!mod) throw new Error( + 'Failed to resolve module ' + file + ', tried ' + resolved + ); + var res = mod._cached ? mod._cached : mod(); + return res; +} + +require.paths = []; +require.modules = {}; +require.extensions = [".js",".coffee"]; + +require._core = { + 'assert': true, + 'events': true, + 'fs': true, + 'path': true, + 'vm': true +}; + +require.resolve = (function () { + return function (x, cwd) { + if (!cwd) cwd = '/'; + + if (require._core[x]) return x; + var path = require.modules.path(); + cwd = path.resolve('/', cwd); + var y = cwd || '/'; + + if (x.match(/^(?:\.\.?\/|\/)/)) { + var m = loadAsFileSync(path.resolve(y, x)) + || loadAsDirectorySync(path.resolve(y, x)); + if (m) return m; + } + + var n = loadNodeModulesSync(x, y); + if (n) return n; + + throw new Error("Cannot find module '" + x + "'"); + + function loadAsFileSync (x) { + if (require.modules[x]) { + return x; + } + + for (var i = 0; i < require.extensions.length; i++) { + var ext = require.extensions[i]; + if (require.modules[x + ext]) return x + ext; + } + } + + function loadAsDirectorySync (x) { + x = x.replace(/\/+$/, ''); + var pkgfile = x + '/package.json'; + if (require.modules[pkgfile]) { + var pkg = require.modules[pkgfile](); + var b = pkg.browserify; + if (typeof b === 'object' && b.main) { + var m = loadAsFileSync(path.resolve(x, b.main)); + if (m) return m; + } + else if (typeof b === 'string') { + var m = loadAsFileSync(path.resolve(x, b)); + if (m) return m; + } + else if (pkg.main) { + var m = loadAsFileSync(path.resolve(x, pkg.main)); + if (m) return m; + } + } + + return loadAsFileSync(x + '/index'); + } + + function loadNodeModulesSync (x, start) { + var dirs = nodeModulesPathsSync(start); + for (var i = 0; i < dirs.length; i++) { + var dir = dirs[i]; + var m = loadAsFileSync(dir + '/' + x); + if (m) return m; + var n = loadAsDirectorySync(dir + '/' + x); + if (n) return n; + } + + var m = loadAsFileSync(x); + if (m) return m; + } + + function nodeModulesPathsSync (start) { + var parts; + if (start === '/') parts = [ '' ]; + else parts = path.normalize(start).split('/'); + + var dirs = []; + for (var i = parts.length - 1; i >= 0; i--) { + if (parts[i] === 'node_modules') continue; + var dir = parts.slice(0, i + 1).join('/') + '/node_modules'; + dirs.push(dir); + } + + return dirs; + } + }; +})(); + +require.alias = function (from, to) { + var path = require.modules.path(); + var res = null; + try { + res = require.resolve(from + '/package.json', '/'); + } + catch (err) { + res = require.resolve(from, '/'); + } + var basedir = path.dirname(res); + + var keys = (Object.keys || function (obj) { + var res = []; + for (var key in obj) res.push(key) + return res; + })(require.modules); + + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (key.slice(0, basedir.length + 1) === basedir + '/') { + var f = key.slice(basedir.length); + require.modules[to + f] = require.modules[basedir + f]; + } + else if (key === basedir) { + require.modules[to] = require.modules[basedir]; + } + } +}; + +require.define = function (filename, fn) { + var dirname = require._core[filename] + ? '' + : require.modules.path().dirname(filename) + ; + + var require_ = function (file) { + return require(file, dirname) + }; + require_.resolve = function (name) { + return require.resolve(name, dirname); + }; + require_.modules = require.modules; + require_.define = require.define; + var module_ = { exports : {} }; + + require.modules[filename] = function () { + require.modules[filename]._cached = module_.exports; + fn.call( + module_.exports, + require_, + module_, + module_.exports, + dirname, + filename + ); + require.modules[filename]._cached = module_.exports; + return module_.exports; + }; +}; + +if (typeof process === 'undefined') process = {}; + +if (!process.nextTick) process.nextTick = (function () { + var queue = []; + var canPost = typeof window !== 'undefined' + && window.postMessage && window.addEventListener + ; + + if (canPost) { + window.addEventListener('message', function (ev) { + if (ev.source === window && ev.data === 'browserify-tick') { + ev.stopPropagation(); + if (queue.length > 0) { + var fn = queue.shift(); + fn(); + } + } + }, true); + } + + return function (fn) { + if (canPost) { + queue.push(fn); + window.postMessage('browserify-tick', '*'); + } + else setTimeout(fn, 0); + }; +})(); + +if (!process.title) process.title = 'browser'; + +if (!process.binding) process.binding = function (name) { + if (name === 'evals') return require('vm') + else throw new Error('No such module') +}; + +if (!process.cwd) process.cwd = function () { return '.' }; + +if (!process.env) process.env = {}; +if (!process.argv) process.argv = []; + +require.define("path", Function( + [ 'require', 'module', 'exports', '__dirname', '__filename' ], + "function filter (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n if (fn(xs[i], i, xs)) res.push(xs[i]);\n }\n return res;\n}\n\n// resolves . and .. elements in a path array with directory names there\n// must be no slashes, empty elements, or device names (c:\\) in the array\n// (so also no leading and trailing slashes - it does not distinguish\n// relative and absolute paths)\nfunction normalizeArray(parts, allowAboveRoot) {\n // if the path tries to go above the root, `up` ends up > 0\n var up = 0;\n for (var i = parts.length; i >= 0; i--) {\n var last = parts[i];\n if (last == '.') {\n parts.splice(i, 1);\n } else if (last === '..') {\n parts.splice(i, 1);\n up++;\n } else if (up) {\n parts.splice(i, 1);\n up--;\n }\n }\n\n // if the path is allowed to go above the root, restore leading ..s\n if (allowAboveRoot) {\n for (; up--; up) {\n parts.unshift('..');\n }\n }\n\n return parts;\n}\n\n// Regex to split a filename into [*, dir, basename, ext]\n// posix version\nvar splitPathRe = /^(.+\\/(?!$)|\\/)?((?:.+?)?(\\.[^.]*)?)$/;\n\n// path.resolve([from ...], to)\n// posix version\nexports.resolve = function() {\nvar resolvedPath = '',\n resolvedAbsolute = false;\n\nfor (var i = arguments.length; i >= -1 && !resolvedAbsolute; i--) {\n var path = (i >= 0)\n ? arguments[i]\n : process.cwd();\n\n // Skip empty and invalid entries\n if (typeof path !== 'string' || !path) {\n continue;\n }\n\n resolvedPath = path + '/' + resolvedPath;\n resolvedAbsolute = path.charAt(0) === '/';\n}\n\n// At this point the path should be resolved to a full absolute path, but\n// handle relative paths to be safe (might happen when process.cwd() fails)\n\n// Normalize the path\nresolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) {\n return !!p;\n }), !resolvedAbsolute).join('/');\n\n return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.';\n};\n\n// path.normalize(path)\n// posix version\nexports.normalize = function(path) {\nvar isAbsolute = path.charAt(0) === '/',\n trailingSlash = path.slice(-1) === '/';\n\n// Normalize the path\npath = normalizeArray(filter(path.split('/'), function(p) {\n return !!p;\n }), !isAbsolute).join('/');\n\n if (!path && !isAbsolute) {\n path = '.';\n }\n if (path && trailingSlash) {\n path += '/';\n }\n \n return (isAbsolute ? '/' : '') + path;\n};\n\n\n// posix version\nexports.join = function() {\n var paths = Array.prototype.slice.call(arguments, 0);\n return exports.normalize(filter(paths, function(p, index) {\n return p && typeof p === 'string';\n }).join('/'));\n};\n\n\nexports.dirname = function(path) {\n var dir = splitPathRe.exec(path)[1] || '';\n var isWindows = false;\n if (!dir) {\n // No dirname\n return '.';\n } else if (dir.length === 1 ||\n (isWindows && dir.length <= 3 && dir.charAt(1) === ':')) {\n // It is just a slash or a drive letter with a slash\n return dir;\n } else {\n // It is a full dirname, strip trailing slash\n return dir.substring(0, dir.length - 1);\n }\n};\n\n\nexports.basename = function(path, ext) {\n var f = splitPathRe.exec(path)[2] || '';\n // TODO: make this comparison case-insensitive on windows?\n if (ext && f.substr(-1 * ext.length) === ext) {\n f = f.substr(0, f.length - ext.length);\n }\n return f;\n};\n\n\nexports.extname = function(path) {\n return splitPathRe.exec(path)[3] || '';\n};\n\n//@ sourceURL=path" +)); + +require.define("timers", Function( + [ 'require', 'module', 'exports', '__dirname', '__filename' ], + "module.exports = require(\"timers-browserify\")\n//@ sourceURL=timers" +)); + +require.define("/node_modules/timers-browserify/package.json", Function( + [ 'require', 'module', 'exports', '__dirname', '__filename' ], + "module.exports = {\"main\":\"main.js\"}\n//@ sourceURL=/node_modules/timers-browserify/package.json" +)); + +require.define("/node_modules/timers-browserify/main.js", Function( + [ 'require', 'module', 'exports', '__dirname', '__filename' ], + "// DOM APIs, for completeness\n\nexports.setTimeout = setTimeout;\nexports.clearTimeout = clearTimeout;\nexports.setInterval = setInterval;\nexports.clearInterval = clearInterval;\n\n// TODO: Change to more effiecient list approach used in Node.js\n// For now, we just implement the APIs using the primitives above.\n\nexports.enroll = function(item, delay) {\n item._timeoutID = setTimeout(item._onTimeout, delay);\n};\n\nexports.unenroll = function(item) {\n clearTimeout(item._timeoutID);\n};\n\nexports.active = function(item) {\n // our naive impl doesn't care (correctness is still preserved)\n};\n\n//@ sourceURL=/node_modules/timers-browserify/main.js" +)); + +require.define("/main.js", Function( + [ 'require', 'module', 'exports', '__dirname', '__filename' ], + "var timers = require('timers');\n\nvar obj = {\n _onTimeout: function() {\n console.log('Timer ran for: ' + (new Date().getTime() - obj.now) + ' ms');\n },\n start: function() {\n console.log('Timer should run for 100 ms');\n this.now = new Date().getTime();\n timers.enroll(this, 100);\n }\n};\n\nobj.start();\n\n//@ sourceURL=/main.js" +)); +require("/main.js"); diff --git a/node_modules/timers-browserify/example/enroll/js/main.js b/node_modules/timers-browserify/example/enroll/js/main.js new file mode 100644 index 0000000..0007df8 --- /dev/null +++ b/node_modules/timers-browserify/example/enroll/js/main.js @@ -0,0 +1,14 @@ +var timers = require('timers'); + +var obj = { + _onTimeout: function() { + console.log('Timer ran for: ' + (new Date().getTime() - obj.now) + ' ms'); + }, + start: function() { + console.log('Timer should run for 100 ms'); + this.now = new Date().getTime(); + timers.enroll(this, 100); + } +}; + +obj.start(); diff --git a/node_modules/timers-browserify/example/enroll/server.js b/node_modules/timers-browserify/example/enroll/server.js new file mode 100644 index 0000000..37b1a0a --- /dev/null +++ b/node_modules/timers-browserify/example/enroll/server.js @@ -0,0 +1,11 @@ +var connect = require('connect'); +var server = connect.createServer(); +server.use(connect.static(__dirname)); + +var browserify = require('browserify'); +var bundle = browserify(__dirname + '/js/main.js', { mount: '/js/browserify.js' }); +server.use(bundle); + +var port = parseInt(process.argv[2] || 8080, 10); +server.listen(port); +console.log('Listening on :' + port); diff --git a/node_modules/timers-browserify/main.js b/node_modules/timers-browserify/main.js new file mode 100644 index 0000000..38c058f --- /dev/null +++ b/node_modules/timers-browserify/main.js @@ -0,0 +1,76 @@ +var nextTick = require('process/browser.js').nextTick; +var apply = Function.prototype.apply; +var slice = Array.prototype.slice; +var immediateIds = {}; +var nextImmediateId = 0; + +// DOM APIs, for completeness + +exports.setTimeout = function() { + return new Timeout(apply.call(setTimeout, window, arguments), clearTimeout); +}; +exports.setInterval = function() { + return new Timeout(apply.call(setInterval, window, arguments), clearInterval); +}; +exports.clearTimeout = +exports.clearInterval = function(timeout) { timeout.close(); }; + +function Timeout(id, clearFn) { + this._id = id; + this._clearFn = clearFn; +} +Timeout.prototype.unref = Timeout.prototype.ref = function() {}; +Timeout.prototype.close = function() { + this._clearFn.call(window, this._id); +}; + +// Does not start the time, just sets up the members needed. +exports.enroll = function(item, msecs) { + clearTimeout(item._idleTimeoutId); + item._idleTimeout = msecs; +}; + +exports.unenroll = function(item) { + clearTimeout(item._idleTimeoutId); + item._idleTimeout = -1; +}; + +exports._unrefActive = exports.active = function(item) { + clearTimeout(item._idleTimeoutId); + + var msecs = item._idleTimeout; + if (msecs >= 0) { + item._idleTimeoutId = setTimeout(function onTimeout() { + if (item._onTimeout) + item._onTimeout(); + }, msecs); + } +}; + +// That's not how node.js implements it but the exposed api is the same. +exports.setImmediate = typeof setImmediate === "function" ? setImmediate : function(fn) { + var id = nextImmediateId++; + var args = arguments.length < 2 ? false : slice.call(arguments, 1); + + immediateIds[id] = true; + + nextTick(function onNextTick() { + if (immediateIds[id]) { + // fn.call() is faster so we optimize for the common use-case + // @see http://jsperf.com/call-apply-segu + if (args) { + fn.apply(null, args); + } else { + fn.call(null); + } + // Prevent ids from leaking + exports.clearImmediate(id); + } + }); + + return id; +}; + +exports.clearImmediate = typeof clearImmediate === "function" ? clearImmediate : function(id) { + delete immediateIds[id]; +}; \ No newline at end of file diff --git a/node_modules/timers-browserify/package.json b/node_modules/timers-browserify/package.json new file mode 100644 index 0000000..040f224 --- /dev/null +++ b/node_modules/timers-browserify/package.json @@ -0,0 +1,51 @@ +{ + "author": "J. Ryan Stinnett (http://convolv.es/)", + "name": "timers-browserify", + "description": "timers module for browserify", + "version": "1.4.2", + "homepage": "https://github.com/jryans/timers-browserify", + "bugs": "https://github.com/jryans/timers-browserify/issues", + "repository": { + "type": "git", + "url": "git://github.com/jryans/timers-browserify.git" + }, + "contributors": [ + "Guy Bedford ", + "Ionut-Cristian Florescu ", + "James Halliday ", + "Jan Schär ", + "Johannes Ewald ", + "Jonathan Prins ", + "Matt Esch " + ], + "main": "main.js", + "dependencies": { + "process": "~0.11.0" + }, + "devDependencies": { + "connect": "~2.3.0", + "browserify": "~1.10.16" + }, + "optionalDependencies": {}, + "engines": { + "node": ">=0.6.0" + }, + "keywords": [ + "timers", + "browserify", + "browser" + ], + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/jryans/timers-browserify/blob/master/LICENSE.md" + } + ], + "jspm": { + "map": { + "./main.js": { + "node": "@node/timers" + } + } + } +} diff --git a/node_modules/to-fast-properties/index.js b/node_modules/to-fast-properties/index.js new file mode 100644 index 0000000..33e594d --- /dev/null +++ b/node_modules/to-fast-properties/index.js @@ -0,0 +1,8 @@ +'use strict'; +module.exports = function toFastProperties(obj) { + function f() {} + f.prototype = obj; + new f(); + return; + eval(obj); +}; diff --git a/node_modules/to-fast-properties/license b/node_modules/to-fast-properties/license new file mode 100644 index 0000000..55cc204 --- /dev/null +++ b/node_modules/to-fast-properties/license @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014 Petka Antonov + 2015 Sindre Sorhus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-fast-properties/package.json b/node_modules/to-fast-properties/package.json new file mode 100644 index 0000000..8dbae5c --- /dev/null +++ b/node_modules/to-fast-properties/package.json @@ -0,0 +1,35 @@ +{ + "name": "to-fast-properties", + "version": "1.0.2", + "description": "Force V8 to use fast properties for an object", + "license": "MIT", + "repository": "sindresorhus/to-fast-properties", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node --allow-natives-syntax test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "object", + "obj", + "properties", + "props", + "v8", + "optimize", + "fast", + "convert", + "mode" + ], + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/to-fast-properties/readme.md b/node_modules/to-fast-properties/readme.md new file mode 100644 index 0000000..e298c20 --- /dev/null +++ b/node_modules/to-fast-properties/readme.md @@ -0,0 +1,37 @@ +# to-fast-properties [![Build Status](https://travis-ci.org/sindresorhus/to-fast-properties.svg?branch=master)](https://travis-ci.org/sindresorhus/to-fast-properties) + +> Force V8 to use fast properties for an object + +[Read more.](http://stackoverflow.com/questions/24987896/) + +Use `%HasFastProperties(object)` and `--allow-natives-syntax` to check whether an object already has fast properties. + + +## Install + +``` +$ npm install --save to-fast-properties +``` + + +## Usage + +```js +const toFastProperties = require('to-fast-properties'); + +const obj = { + foo: true, + bar: true +}; + +delete obj.foo; +// `obj` now has slow properties + +toFastProperties(obj); +// `obj` now has fast properties +``` + + +## License + +MIT © Petka Antonov, Sindre Sorhus diff --git a/node_modules/tough-cookie/LICENSE b/node_modules/tough-cookie/LICENSE new file mode 100644 index 0000000..1bc286f --- /dev/null +++ b/node_modules/tough-cookie/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015, Salesforce.com, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +=== + +The following exceptions apply: + +=== + +`public_suffix_list.dat` was obtained from + via +. The license for this file is MPL/2.0. The header of +that file reads as follows: + + // This Source Code Form is subject to the terms of the Mozilla Public + // License, v. 2.0. If a copy of the MPL was not distributed with this + // file, You can obtain one at http://mozilla.org/MPL/2.0/. diff --git a/node_modules/tough-cookie/README.md b/node_modules/tough-cookie/README.md new file mode 100644 index 0000000..ba2bdbb --- /dev/null +++ b/node_modules/tough-cookie/README.md @@ -0,0 +1,506 @@ +[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js + +[![npm package](https://nodei.co/npm/tough-cookie.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/tough-cookie/) + +[![Build Status](https://travis-ci.org/salesforce/tough-cookie.png?branch=master)](https://travis-ci.org/salesforce/tough-cookie) + +# Synopsis + +``` javascript +var tough = require('tough-cookie'); +var Cookie = tough.Cookie; +var cookie = Cookie.parse(header); +cookie.value = 'somethingdifferent'; +header = cookie.toString(); + +var cookiejar = new tough.CookieJar(); +cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb); +// ... +cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) { + res.headers['cookie'] = cookies.join('; '); +}); +``` + +# Installation + +It's _so_ easy! + +`npm install tough-cookie` + +Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken. + +## Version Support + +Support for versions of node.js will follow that of the [request](https://www.npmjs.com/package/request) module. + +# API + +## tough + +Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound". + +**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary. + +### `parseDate(string)` + +Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`. + +### `formatDate(date)` + +Format a Date into a RFC1123 string (the RFC6265-recommended format). + +### `canonicalDomain(str)` + +Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects). + +### `domainMatch(str,domStr[,canonicalize=true])` + +Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match". + +The `canonicalize` parameter will run the other two paramters through `canonicalDomain` or not. + +### `defaultPath(path)` + +Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC. + +The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output. + +### `pathMatch(reqPath,cookiePath)` + +Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean. + +This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`. + +### `parse(cookieString[, options])` + +alias for `Cookie.parse(cookieString[, options])` + +### `fromJSON(string)` + +alias for `Cookie.fromJSON(string)` + +### `getPublicSuffix(hostname)` + +Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it. + +For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`. + +For further information, see http://publicsuffix.org/. This module derives its list from that site. + +### `cookieCompare(a,b)` + +For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence: + +* Longest `.path` +* oldest `.creation` (which has a 1ms precision, same as `Date`) +* lowest `.creationIndex` (to get beyond the 1ms precision) + +``` javascript +var cookies = [ /* unsorted array of Cookie objects */ ]; +cookies = cookies.sort(cookieCompare); +``` + +**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`. + +### `permuteDomain(domain)` + +Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores. + +### `permutePath(path)` + +Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores. + + +## Cookie + +Exported via `tough.Cookie`. + +### `Cookie.parse(cookieString[, options])` + +Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed. + +The options parameter is not required and currently has only one property: + + * _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant. + +If options is not an object, it is ignored, which means you can use `Array#map` with it. + +Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response: + +``` javascript +if (res.headers['set-cookie'] instanceof Array) + cookies = res.headers['set-cookie'].map(Cookie.parse); +else + cookies = [Cookie.parse(res.headers['set-cookie'])]; +``` + +### Properties + +Cookie object properties: + + * _key_ - string - the name or key of the cookie (default "") + * _value_ - string - the value of the cookie (default "") + * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()` + * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()` + * _domain_ - string - the `Domain=` attribute of the cookie + * _path_ - string - the `Path=` of the cookie + * _secure_ - boolean - the `Secure` cookie flag + * _httpOnly_ - boolean - the `HttpOnly` cookie flag + * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside) + * _creation_ - `Date` - when this cookie was constructed + * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation) + +After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes: + + * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied) + * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one. + * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar + * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute. + +### `Cookie([{properties}])` + +Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties. + +### `.toString()` + +encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`. + +### `.cookieString()` + +encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '='). + +### `.setExpires(String)` + +sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set. + +### `.setMaxAge(number)` + +sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly. + +### `.expiryTime([now=Date.now()])` + +### `.expiryDate([now=Date.now()])` + +expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds. + +Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` paramter -- is used to offset the `.maxAge` attribute. + +If Expires (`.expires`) is set, that's returned. + +Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents). + +### `.TTL([now=Date.now()])` + +compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply. + +The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned. + +### `.canonicalizedDoman()` + +### `.cdomain()` + +return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters. + +### `.toJSON()` + +For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`). + +**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +### `Cookie.fromJSON(strOrObj)` + +Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer. + +Returns `null` upon JSON parsing error. + +### `.clone()` + +Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`. + +### `.validate()` + +Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive. + +validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct: + +``` javascript +if (cookie.validate() === true) { + // it's tasty +} else { + // yuck! +} +``` + + +## CookieJar + +Exported via `tough.CookieJar`. + +### `CookieJar([store],[options])` + +Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used. + +The `options` object can be omitted and can have the following properties: + + * _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk" + * _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name. + This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers. + +Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods. + +### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))` + +Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option. + +As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual). + +### `.setCookieSync(cookieOrString, currentUrl, [{options}])` + +Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookies(currentUrl, [{options},] cb(err,cookies))` + +Retrieve the list of cookies that can be sent in a Cookie header for the current url. + +If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially). + * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it). + +The `.lastAccessed` property of the returned cookies will have been updated. + +### `.getCookiesSync(currentUrl, [{options}])` + +Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookieString(...)` + +Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`. + +### `.getCookieStringSync(...)` + +Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getSetCookieStrings(...)` + +Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`. + +### `.getSetCookieStringsSync(...)` + +Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.serialize(cb(err,serializedObject))` + +Serialize the Jar if the underlying store supports `.getAllCookies`. + +**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +See [Serialization Format]. + +### `.serializeSync()` + +Sync version of .serialize + +### `.toJSON()` + +Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`. + +### `CookieJar.deserialize(serialized, [store], cb(err,object))` + +A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. + +As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback. + +### `CookieJar.deserializeSync(serialized, [store])` + +Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work. + +### `CookieJar.fromJSON(string)` + +Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`. + +### `.clone([store,]cb(err,newJar))` + +Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`. + +### `.cloneSync([store])` + +Synchronous version of `.clone`, returning a new `CookieJar` instance. + +The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used. + +The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls. + +## Store + +Base class for CookieJar stores. Available as `tough.Store`. + +## Store API + +The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores. + +Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. + +Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style + +All `domain` parameters will have been normalized before calling. + +The Cookie store must have all of the following methods. + +### `store.findCookie(domain, path, key, cb(err,cookie))` + +Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned. + +Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error). + +### `store.findCookies(domain, path, cb(err,cookies))` + +Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above. + +If no cookies are found, the callback MUST be passed an empty array. + +The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done. + +As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only). + +### `store.putCookie(cookie, cb(err))` + +Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur. + +The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties. + +Pass an error if the cookie cannot be stored. + +### `store.updateCookie(oldCookie, newCookie, cb(err))` + +Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store. + +The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement). + +Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object. + +The `newCookie` and `oldCookie` objects MUST NOT be modified. + +Pass an error if the newCookie cannot be stored. + +### `store.removeCookie(domain, path, key, cb(err))` + +Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint). + +The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie. + +### `store.removeCookies(domain, path, cb(err))` + +Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed. + +Pass an error ONLY if removing any existing cookies failed. + +### `store.getAllCookies(cb(err, cookies))` + +Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure. + +Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail. + +Pass an error if retrieval fails. + +## MemoryCookieStore + +Inherits from `Store`. + +A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. + +## Community Cookie Stores + +These are some Store implementations authored and maintained by the community. They aren't official and we don't vouch for them but you may be interested to have a look: + +- [`db-cookie-store`](https://github.com/JSBizon/db-cookie-store): SQL including SQLite-based databases +- [`file-cookie-store`](https://github.com/JSBizon/file-cookie-store): Netscape cookie file format on disk +- [`redis-cookie-store`](https://github.com/benkroeger/redis-cookie-store): Redis +- [`tough-cookie-filestore`](https://github.com/mitsuru/tough-cookie-filestore): JSON on disk +- [`tough-cookie-web-storage-store`](https://github.com/exponentjs/tough-cookie-web-storage-store): DOM localStorage and sessionStorage + + +# Serialization Format + +**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`. + +```js + { + // The version of tough-cookie that serialized this jar. + version: 'tough-cookie@1.x.y', + + // add the store type, to make humans happy: + storeType: 'MemoryCookieStore', + + // CookieJar configuration: + rejectPublicSuffixes: true, + // ... future items go here + + // Gets filled from jar.store.getAllCookies(): + cookies: [ + { + key: 'string', + value: 'string', + // ... + /* other Cookie.serializableProperties go here */ + } + ] + } +``` + +# Copyright and License + +(tl;dr: BSD-3-Clause with some MPL/2.0) + +```text + Copyright (c) 2015, Salesforce.com, Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of Salesforce.com nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +``` + +Portions may be licensed under different licenses (in particular `public_suffix_list.dat` is MPL/2.0); please read that file and the LICENSE file for full details. diff --git a/node_modules/tough-cookie/lib/cookie.js b/node_modules/tough-cookie/lib/cookie.js new file mode 100644 index 0000000..c3dacfe --- /dev/null +++ b/node_modules/tough-cookie/lib/cookie.js @@ -0,0 +1,1336 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +'use strict'; +var net = require('net'); +var urlParse = require('url').parse; +var pubsuffix = require('./pubsuffix'); +var Store = require('./store').Store; +var MemoryCookieStore = require('./memstore').MemoryCookieStore; +var pathMatch = require('./pathMatch').pathMatch; +var VERSION = require('../package.json').version; + +var punycode; +try { + punycode = require('punycode'); +} catch(e) { + console.warn("cookie: can't load punycode; won't use punycode for domain normalization"); +} + +var DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; + +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +var COOKIE_OCTET = /[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]/; +var COOKIE_OCTETS = new RegExp('^'+COOKIE_OCTET.source+'+$'); + +var CONTROL_CHARS = /[\x00-\x1F]/; + +// Double quotes are part of the value (see: S4.1.1). +// '\r', '\n' and '\0' should be treated as a terminator in the "relaxed" mode +// (see: https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60) +// '=' and ';' are attribute/values separators +// (see: https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L64) +var COOKIE_PAIR = /^(([^=;]+))\s*=\s*([^\n\r\0]*)/; + +// Used to parse non-RFC-compliant cookies like '=abc' when given the `loose` +// option in Cookie.parse: +var LOOSE_COOKIE_PAIR = /^((?:=)?([^=;]*)\s*=\s*)?([^\n\r\0]*)/; + +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +var PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + +var DAY_OF_MONTH = /^(\d{1,2})[^\d]*$/; +var TIME = /^(\d{1,2})[^\d]*:(\d{1,2})[^\d]*:(\d{1,2})[^\d]*$/; +var MONTH = /^(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)/i; + +var MONTH_TO_NUM = { + jan:0, feb:1, mar:2, apr:3, may:4, jun:5, + jul:6, aug:7, sep:8, oct:9, nov:10, dec:11 +}; +var NUM_TO_MONTH = [ + 'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec' +]; +var NUM_TO_DAY = [ + 'Sun','Mon','Tue','Wed','Thu','Fri','Sat' +]; + +var YEAR = /^(\d{2}|\d{4})$/; // 2 to 4 digits + +var MAX_TIME = 2147483647000; // 31-bit max +var MIN_TIME = 0; // 31-bit min + + +// RFC6265 S5.1.1 date parser: +function parseDate(str) { + if (!str) { + return; + } + + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + var tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } + + var hour = null; + var minutes = null; + var seconds = null; + var day = null; + var month = null; + var year = null; + + for (var i=0; i 23 || minutes > 59 || seconds > 59) { + return; + } + + continue; + } + } + + /* 2.2. If the found-day-of-month flag is not set and the date-token matches + * the day-of-month production, set the found-day-of- month flag and set + * the day-of-month-value to the number denoted by the date-token. Skip + * the remaining sub-steps and continue to the next date-token. + */ + if (day === null) { + result = DAY_OF_MONTH.exec(token); + if (result) { + day = parseInt(result, 10); + /* RFC6265 S5.1.1.5: + * [fail if] the day-of-month-value is less than 1 or greater than 31 + */ + if(day < 1 || day > 31) { + return; + } + continue; + } + } + + /* 2.3. If the found-month flag is not set and the date-token matches the + * month production, set the found-month flag and set the month-value to + * the month denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (month === null) { + result = MONTH.exec(token); + if (result) { + month = MONTH_TO_NUM[result[1].toLowerCase()]; + continue; + } + } + + /* 2.4. If the found-year flag is not set and the date-token matches the year + * production, set the found-year flag and set the year-value to the number + * denoted by the date-token. Skip the remaining sub-steps and continue to + * the next date-token. + */ + if (year === null) { + result = YEAR.exec(token); + if (result) { + year = parseInt(result[0], 10); + /* From S5.1.1: + * 3. If the year-value is greater than or equal to 70 and less + * than or equal to 99, increment the year-value by 1900. + * 4. If the year-value is greater than or equal to 0 and less + * than or equal to 69, increment the year-value by 2000. + */ + if (70 <= year && year <= 99) { + year += 1900; + } else if (0 <= year && year <= 69) { + year += 2000; + } + + if (year < 1601) { + return; // 5. ... the year-value is less than 1601 + } + } + } + } + + if (seconds === null || day === null || month === null || year === null) { + return; // 5. ... at least one of the found-day-of-month, found-month, found- + // year, or found-time flags is not set, + } + + return new Date(Date.UTC(year, month, day, hour, minutes, seconds)); +} + +function formatDate(date) { + var d = date.getUTCDate(); d = d >= 10 ? d : '0'+d; + var h = date.getUTCHours(); h = h >= 10 ? h : '0'+h; + var m = date.getUTCMinutes(); m = m >= 10 ? m : '0'+m; + var s = date.getUTCSeconds(); s = s >= 10 ? s : '0'+s; + return NUM_TO_DAY[date.getUTCDay()] + ', ' + + d+' '+ NUM_TO_MONTH[date.getUTCMonth()] +' '+ date.getUTCFullYear() +' '+ + h+':'+m+':'+s+' GMT'; +} + +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./,''); // S4.1.2.3 & S5.2.3: ignore leading . + + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } + + return str.toLowerCase(); +} + +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } + + /* + * "The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } + + /* "All of the following [three] conditions hold:" (order adjusted from the RFC) */ + + /* "* The string is a host name (i.e., not an IP address)." */ + if (net.isIP(str)) { + return false; + } + + /* "* The domain string is a suffix of the string" */ + var idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } + + // e.g "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { // it's not a suffix + return false; + } + + /* "* The last character of the string that is not included in the domain + * string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; + } + + return true; +} + + +// RFC6265 S5.1.4 Paths and Path-Match + +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0,1) !== "/") { + return "/"; + } + + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } + + var rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } + + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} + + +function parse(str, options) { + if (!options || typeof options !== 'object') { + options = {}; + } + str = str.trim(); + + // We use a regex to parse the "name-value-pair" part of S5.2 + var firstSemi = str.indexOf(';'); // S5.2 step 1 + var pairRe = options.loose ? LOOSE_COOKIE_PAIR : COOKIE_PAIR; + var result = pairRe.exec(firstSemi === -1 ? str : str.substr(0,firstSemi)); + + // Rx satisfies the "the name string is empty" and "lacks a %x3D ("=")" + // constraints as well as trimming any whitespace. + if (!result) { + return; + } + + var c = new Cookie(); + if (result[1]) { + c.key = result[2].trim(); + } else { + c.key = ''; + } + c.value = result[3].trim(); + if (CONTROL_CHARS.test(c.key) || CONTROL_CHARS.test(c.value)) { + return; + } + + if (firstSemi === -1) { + return c; + } + + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + var unparsed = str.slice(firstSemi + 1).trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; + } + + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + var cookie_avs = unparsed.split(';'); + while (cookie_avs.length) { + var av = cookie_avs.shift().trim(); + if (av.length === 0) { // happens if ";;" appears + continue; + } + var av_sep = av.indexOf('='); + var av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0,av_sep); + av_value = av.substr(av_sep+1); + } + + av_key = av_key.trim().toLowerCase(); + + if (av_value) { + av_value = av_value.trim(); + } + + switch(av_key) { + case 'expires': // S5.2.1 + if (av_value) { + var exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; + + case 'max-age': // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + var delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + + case 'domain': // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + var domain = av_value.trim().replace(/^\./, ''); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; + + case 'path': // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; + + case 'secure': // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; + + case 'httponly': // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } + } + + return c; +} + +// avoid the V8 deoptimization monster! +function jsonParse(str) { + var obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; + } + return obj; +} + +function fromJSON(str) { + if (!str) { + return null; + } + + var obj; + if (typeof str === 'string') { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; + } + + var c = new Cookie(); + for (var i=0; i 1) { + var lindex = path.lastIndexOf('/'); + if (lindex === 0) { + break; + } + path = path.substr(0,lindex); + permutations.push(path); + } + permutations.push('/'); + return permutations; +} + +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } + catch(err) { + // Silently swallow error + } + + return urlParse(url); +} + +function Cookie(options) { + options = options || {}; + + Object.keys(options).forEach(function(prop) { + if (Cookie.prototype.hasOwnProperty(prop) && + Cookie.prototype[prop] !== options[prop] && + prop.substr(0,1) !== '_') + { + this[prop] = options[prop]; + } + }, this); + + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, 'creationIndex', { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); +} + +Cookie.cookiesCreated = 0; // incremented each time a cookie is created + +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; + +Cookie.prototype.key = ""; +Cookie.prototype.value = ""; + +// the order in which the RFC has them: +Cookie.prototype.expires = "Infinity"; // coerces to literal Infinity +Cookie.prototype.maxAge = null; // takes precedence over expires for TTL +Cookie.prototype.domain = null; +Cookie.prototype.path = null; +Cookie.prototype.secure = false; +Cookie.prototype.httpOnly = false; +Cookie.prototype.extensions = null; + +// set by the CookieJar: +Cookie.prototype.hostOnly = null; // boolean when set +Cookie.prototype.pathIsDefault = null; // boolean when set +Cookie.prototype.creation = null; // Date when set; defaulted by Cookie.parse +Cookie.prototype.lastAccessed = null; // Date when set +Object.defineProperty(Cookie.prototype, 'creationIndex', { + configurable: true, + enumerable: false, + writable: true, + value: 0 +}); + +Cookie.serializableProperties = Object.keys(Cookie.prototype) + .filter(function(prop) { + return !( + Cookie.prototype[prop] instanceof Function || + prop === 'creationIndex' || + prop.substr(0,1) === '_' + ); + }); + +Cookie.prototype.inspect = function inspect() { + var now = Date.now(); + return 'Cookie="'+this.toString() + + '; hostOnly='+(this.hostOnly != null ? this.hostOnly : '?') + + '; aAge='+(this.lastAccessed ? (now-this.lastAccessed.getTime())+'ms' : '?') + + '; cAge='+(this.creation ? (now-this.creation.getTime())+'ms' : '?') + + '"'; +}; + +Cookie.prototype.toJSON = function() { + var obj = {}; + + var props = Cookie.serializableProperties; + for (var i=0; i suffixLen) { + var publicSuffix = parts.slice(0,suffixLen+1).reverse().join('.'); + return converted ? punycode.toUnicode(publicSuffix) : publicSuffix; + } + + return null; +}; + +// The following generated structure is used under the MPL version 2.0 +// See public-suffix.txt for more information + +var index = module.exports.index = Object.freeze( +{"ac":true,"com.ac":true,"edu.ac":true,"gov.ac":true,"net.ac":true,"mil.ac":true,"org.ac":true,"ad":true,"nom.ad":true,"ae":true,"co.ae":true,"net.ae":true,"org.ae":true,"sch.ae":true,"ac.ae":true,"gov.ae":true,"mil.ae":true,"aero":true,"accident-investigation.aero":true,"accident-prevention.aero":true,"aerobatic.aero":true,"aeroclub.aero":true,"aerodrome.aero":true,"agents.aero":true,"aircraft.aero":true,"airline.aero":true,"airport.aero":true,"air-surveillance.aero":true,"airtraffic.aero":true,"air-traffic-control.aero":true,"ambulance.aero":true,"amusement.aero":true,"association.aero":true,"author.aero":true,"ballooning.aero":true,"broker.aero":true,"caa.aero":true,"cargo.aero":true,"catering.aero":true,"certification.aero":true,"championship.aero":true,"charter.aero":true,"civilaviation.aero":true,"club.aero":true,"conference.aero":true,"consultant.aero":true,"consulting.aero":true,"control.aero":true,"council.aero":true,"crew.aero":true,"design.aero":true,"dgca.aero":true,"educator.aero":true,"emergency.aero":true,"engine.aero":true,"engineer.aero":true,"entertainment.aero":true,"equipment.aero":true,"exchange.aero":true,"express.aero":true,"federation.aero":true,"flight.aero":true,"freight.aero":true,"fuel.aero":true,"gliding.aero":true,"government.aero":true,"groundhandling.aero":true,"group.aero":true,"hanggliding.aero":true,"homebuilt.aero":true,"insurance.aero":true,"journal.aero":true,"journalist.aero":true,"leasing.aero":true,"logistics.aero":true,"magazine.aero":true,"maintenance.aero":true,"marketplace.aero":true,"media.aero":true,"microlight.aero":true,"modelling.aero":true,"navigation.aero":true,"parachuting.aero":true,"paragliding.aero":true,"passenger-association.aero":true,"pilot.aero":true,"press.aero":true,"production.aero":true,"recreation.aero":true,"repbody.aero":true,"res.aero":true,"research.aero":true,"rotorcraft.aero":true,"safety.aero":true,"scientist.aero":true,"services.aero":true,"show.aero":true,"skydiving.aero":true,"software.aero":true,"student.aero":true,"taxi.aero":true,"trader.aero":true,"trading.aero":true,"trainer.aero":true,"union.aero":true,"workinggroup.aero":true,"works.aero":true,"af":true,"gov.af":true,"com.af":true,"org.af":true,"net.af":true,"edu.af":true,"ag":true,"com.ag":true,"org.ag":true,"net.ag":true,"co.ag":true,"nom.ag":true,"ai":true,"off.ai":true,"com.ai":true,"net.ai":true,"org.ai":true,"al":true,"com.al":true,"edu.al":true,"gov.al":true,"mil.al":true,"net.al":true,"org.al":true,"am":true,"an":true,"com.an":true,"net.an":true,"org.an":true,"edu.an":true,"ao":true,"ed.ao":true,"gv.ao":true,"og.ao":true,"co.ao":true,"pb.ao":true,"it.ao":true,"aq":true,"ar":true,"com.ar":true,"edu.ar":true,"gob.ar":true,"gov.ar":true,"int.ar":true,"mil.ar":true,"net.ar":true,"org.ar":true,"tur.ar":true,"arpa":true,"e164.arpa":true,"in-addr.arpa":true,"ip6.arpa":true,"iris.arpa":true,"uri.arpa":true,"urn.arpa":true,"as":true,"gov.as":true,"asia":true,"at":true,"ac.at":true,"co.at":true,"gv.at":true,"or.at":true,"au":true,"com.au":true,"net.au":true,"org.au":true,"edu.au":true,"gov.au":true,"asn.au":true,"id.au":true,"info.au":true,"conf.au":true,"oz.au":true,"act.au":true,"nsw.au":true,"nt.au":true,"qld.au":true,"sa.au":true,"tas.au":true,"vic.au":true,"wa.au":true,"act.edu.au":true,"nsw.edu.au":true,"nt.edu.au":true,"qld.edu.au":true,"sa.edu.au":true,"tas.edu.au":true,"vic.edu.au":true,"wa.edu.au":true,"qld.gov.au":true,"sa.gov.au":true,"tas.gov.au":true,"vic.gov.au":true,"wa.gov.au":true,"aw":true,"com.aw":true,"ax":true,"az":true,"com.az":true,"net.az":true,"int.az":true,"gov.az":true,"org.az":true,"edu.az":true,"info.az":true,"pp.az":true,"mil.az":true,"name.az":true,"pro.az":true,"biz.az":true,"ba":true,"org.ba":true,"net.ba":true,"edu.ba":true,"gov.ba":true,"mil.ba":true,"unsa.ba":true,"unbi.ba":true,"co.ba":true,"com.ba":true,"rs.ba":true,"bb":true,"biz.bb":true,"co.bb":true,"com.bb":true,"edu.bb":true,"gov.bb":true,"info.bb":true,"net.bb":true,"org.bb":true,"store.bb":true,"tv.bb":true,"*.bd":true,"be":true,"ac.be":true,"bf":true,"gov.bf":true,"bg":true,"a.bg":true,"b.bg":true,"c.bg":true,"d.bg":true,"e.bg":true,"f.bg":true,"g.bg":true,"h.bg":true,"i.bg":true,"j.bg":true,"k.bg":true,"l.bg":true,"m.bg":true,"n.bg":true,"o.bg":true,"p.bg":true,"q.bg":true,"r.bg":true,"s.bg":true,"t.bg":true,"u.bg":true,"v.bg":true,"w.bg":true,"x.bg":true,"y.bg":true,"z.bg":true,"0.bg":true,"1.bg":true,"2.bg":true,"3.bg":true,"4.bg":true,"5.bg":true,"6.bg":true,"7.bg":true,"8.bg":true,"9.bg":true,"bh":true,"com.bh":true,"edu.bh":true,"net.bh":true,"org.bh":true,"gov.bh":true,"bi":true,"co.bi":true,"com.bi":true,"edu.bi":true,"or.bi":true,"org.bi":true,"biz":true,"bj":true,"asso.bj":true,"barreau.bj":true,"gouv.bj":true,"bm":true,"com.bm":true,"edu.bm":true,"gov.bm":true,"net.bm":true,"org.bm":true,"*.bn":true,"bo":true,"com.bo":true,"edu.bo":true,"gov.bo":true,"gob.bo":true,"int.bo":true,"org.bo":true,"net.bo":true,"mil.bo":true,"tv.bo":true,"br":true,"adm.br":true,"adv.br":true,"agr.br":true,"am.br":true,"arq.br":true,"art.br":true,"ato.br":true,"b.br":true,"bio.br":true,"blog.br":true,"bmd.br":true,"cim.br":true,"cng.br":true,"cnt.br":true,"com.br":true,"coop.br":true,"ecn.br":true,"eco.br":true,"edu.br":true,"emp.br":true,"eng.br":true,"esp.br":true,"etc.br":true,"eti.br":true,"far.br":true,"flog.br":true,"fm.br":true,"fnd.br":true,"fot.br":true,"fst.br":true,"g12.br":true,"ggf.br":true,"gov.br":true,"imb.br":true,"ind.br":true,"inf.br":true,"jor.br":true,"jus.br":true,"leg.br":true,"lel.br":true,"mat.br":true,"med.br":true,"mil.br":true,"mp.br":true,"mus.br":true,"net.br":true,"*.nom.br":true,"not.br":true,"ntr.br":true,"odo.br":true,"org.br":true,"ppg.br":true,"pro.br":true,"psc.br":true,"psi.br":true,"qsl.br":true,"radio.br":true,"rec.br":true,"slg.br":true,"srv.br":true,"taxi.br":true,"teo.br":true,"tmp.br":true,"trd.br":true,"tur.br":true,"tv.br":true,"vet.br":true,"vlog.br":true,"wiki.br":true,"zlg.br":true,"bs":true,"com.bs":true,"net.bs":true,"org.bs":true,"edu.bs":true,"gov.bs":true,"bt":true,"com.bt":true,"edu.bt":true,"gov.bt":true,"net.bt":true,"org.bt":true,"bv":true,"bw":true,"co.bw":true,"org.bw":true,"by":true,"gov.by":true,"mil.by":true,"com.by":true,"of.by":true,"bz":true,"com.bz":true,"net.bz":true,"org.bz":true,"edu.bz":true,"gov.bz":true,"ca":true,"ab.ca":true,"bc.ca":true,"mb.ca":true,"nb.ca":true,"nf.ca":true,"nl.ca":true,"ns.ca":true,"nt.ca":true,"nu.ca":true,"on.ca":true,"pe.ca":true,"qc.ca":true,"sk.ca":true,"yk.ca":true,"gc.ca":true,"cat":true,"cc":true,"cd":true,"gov.cd":true,"cf":true,"cg":true,"ch":true,"ci":true,"org.ci":true,"or.ci":true,"com.ci":true,"co.ci":true,"edu.ci":true,"ed.ci":true,"ac.ci":true,"net.ci":true,"go.ci":true,"asso.ci":true,"xn--aroport-bya.ci":true,"int.ci":true,"presse.ci":true,"md.ci":true,"gouv.ci":true,"*.ck":true,"www.ck":false,"cl":true,"gov.cl":true,"gob.cl":true,"co.cl":true,"mil.cl":true,"cm":true,"co.cm":true,"com.cm":true,"gov.cm":true,"net.cm":true,"cn":true,"ac.cn":true,"com.cn":true,"edu.cn":true,"gov.cn":true,"net.cn":true,"org.cn":true,"mil.cn":true,"xn--55qx5d.cn":true,"xn--io0a7i.cn":true,"xn--od0alg.cn":true,"ah.cn":true,"bj.cn":true,"cq.cn":true,"fj.cn":true,"gd.cn":true,"gs.cn":true,"gz.cn":true,"gx.cn":true,"ha.cn":true,"hb.cn":true,"he.cn":true,"hi.cn":true,"hl.cn":true,"hn.cn":true,"jl.cn":true,"js.cn":true,"jx.cn":true,"ln.cn":true,"nm.cn":true,"nx.cn":true,"qh.cn":true,"sc.cn":true,"sd.cn":true,"sh.cn":true,"sn.cn":true,"sx.cn":true,"tj.cn":true,"xj.cn":true,"xz.cn":true,"yn.cn":true,"zj.cn":true,"hk.cn":true,"mo.cn":true,"tw.cn":true,"co":true,"arts.co":true,"com.co":true,"edu.co":true,"firm.co":true,"gov.co":true,"info.co":true,"int.co":true,"mil.co":true,"net.co":true,"nom.co":true,"org.co":true,"rec.co":true,"web.co":true,"com":true,"coop":true,"cr":true,"ac.cr":true,"co.cr":true,"ed.cr":true,"fi.cr":true,"go.cr":true,"or.cr":true,"sa.cr":true,"cu":true,"com.cu":true,"edu.cu":true,"org.cu":true,"net.cu":true,"gov.cu":true,"inf.cu":true,"cv":true,"cw":true,"com.cw":true,"edu.cw":true,"net.cw":true,"org.cw":true,"cx":true,"gov.cx":true,"ac.cy":true,"biz.cy":true,"com.cy":true,"ekloges.cy":true,"gov.cy":true,"ltd.cy":true,"name.cy":true,"net.cy":true,"org.cy":true,"parliament.cy":true,"press.cy":true,"pro.cy":true,"tm.cy":true,"cz":true,"de":true,"dj":true,"dk":true,"dm":true,"com.dm":true,"net.dm":true,"org.dm":true,"edu.dm":true,"gov.dm":true,"do":true,"art.do":true,"com.do":true,"edu.do":true,"gob.do":true,"gov.do":true,"mil.do":true,"net.do":true,"org.do":true,"sld.do":true,"web.do":true,"dz":true,"com.dz":true,"org.dz":true,"net.dz":true,"gov.dz":true,"edu.dz":true,"asso.dz":true,"pol.dz":true,"art.dz":true,"ec":true,"com.ec":true,"info.ec":true,"net.ec":true,"fin.ec":true,"k12.ec":true,"med.ec":true,"pro.ec":true,"org.ec":true,"edu.ec":true,"gov.ec":true,"gob.ec":true,"mil.ec":true,"edu":true,"ee":true,"edu.ee":true,"gov.ee":true,"riik.ee":true,"lib.ee":true,"med.ee":true,"com.ee":true,"pri.ee":true,"aip.ee":true,"org.ee":true,"fie.ee":true,"eg":true,"com.eg":true,"edu.eg":true,"eun.eg":true,"gov.eg":true,"mil.eg":true,"name.eg":true,"net.eg":true,"org.eg":true,"sci.eg":true,"*.er":true,"es":true,"com.es":true,"nom.es":true,"org.es":true,"gob.es":true,"edu.es":true,"et":true,"com.et":true,"gov.et":true,"org.et":true,"edu.et":true,"biz.et":true,"name.et":true,"info.et":true,"net.et":true,"eu":true,"fi":true,"aland.fi":true,"*.fj":true,"*.fk":true,"fm":true,"fo":true,"fr":true,"com.fr":true,"asso.fr":true,"nom.fr":true,"prd.fr":true,"presse.fr":true,"tm.fr":true,"aeroport.fr":true,"assedic.fr":true,"avocat.fr":true,"avoues.fr":true,"cci.fr":true,"chambagri.fr":true,"chirurgiens-dentistes.fr":true,"experts-comptables.fr":true,"geometre-expert.fr":true,"gouv.fr":true,"greta.fr":true,"huissier-justice.fr":true,"medecin.fr":true,"notaires.fr":true,"pharmacien.fr":true,"port.fr":true,"veterinaire.fr":true,"ga":true,"gb":true,"gd":true,"ge":true,"com.ge":true,"edu.ge":true,"gov.ge":true,"org.ge":true,"mil.ge":true,"net.ge":true,"pvt.ge":true,"gf":true,"gg":true,"co.gg":true,"net.gg":true,"org.gg":true,"gh":true,"com.gh":true,"edu.gh":true,"gov.gh":true,"org.gh":true,"mil.gh":true,"gi":true,"com.gi":true,"ltd.gi":true,"gov.gi":true,"mod.gi":true,"edu.gi":true,"org.gi":true,"gl":true,"co.gl":true,"com.gl":true,"edu.gl":true,"net.gl":true,"org.gl":true,"gm":true,"gn":true,"ac.gn":true,"com.gn":true,"edu.gn":true,"gov.gn":true,"org.gn":true,"net.gn":true,"gov":true,"gp":true,"com.gp":true,"net.gp":true,"mobi.gp":true,"edu.gp":true,"org.gp":true,"asso.gp":true,"gq":true,"gr":true,"com.gr":true,"edu.gr":true,"net.gr":true,"org.gr":true,"gov.gr":true,"gs":true,"gt":true,"com.gt":true,"edu.gt":true,"gob.gt":true,"ind.gt":true,"mil.gt":true,"net.gt":true,"org.gt":true,"*.gu":true,"gw":true,"gy":true,"co.gy":true,"com.gy":true,"net.gy":true,"hk":true,"com.hk":true,"edu.hk":true,"gov.hk":true,"idv.hk":true,"net.hk":true,"org.hk":true,"xn--55qx5d.hk":true,"xn--wcvs22d.hk":true,"xn--lcvr32d.hk":true,"xn--mxtq1m.hk":true,"xn--gmqw5a.hk":true,"xn--ciqpn.hk":true,"xn--gmq050i.hk":true,"xn--zf0avx.hk":true,"xn--io0a7i.hk":true,"xn--mk0axi.hk":true,"xn--od0alg.hk":true,"xn--od0aq3b.hk":true,"xn--tn0ag.hk":true,"xn--uc0atv.hk":true,"xn--uc0ay4a.hk":true,"hm":true,"hn":true,"com.hn":true,"edu.hn":true,"org.hn":true,"net.hn":true,"mil.hn":true,"gob.hn":true,"hr":true,"iz.hr":true,"from.hr":true,"name.hr":true,"com.hr":true,"ht":true,"com.ht":true,"shop.ht":true,"firm.ht":true,"info.ht":true,"adult.ht":true,"net.ht":true,"pro.ht":true,"org.ht":true,"med.ht":true,"art.ht":true,"coop.ht":true,"pol.ht":true,"asso.ht":true,"edu.ht":true,"rel.ht":true,"gouv.ht":true,"perso.ht":true,"hu":true,"co.hu":true,"info.hu":true,"org.hu":true,"priv.hu":true,"sport.hu":true,"tm.hu":true,"2000.hu":true,"agrar.hu":true,"bolt.hu":true,"casino.hu":true,"city.hu":true,"erotica.hu":true,"erotika.hu":true,"film.hu":true,"forum.hu":true,"games.hu":true,"hotel.hu":true,"ingatlan.hu":true,"jogasz.hu":true,"konyvelo.hu":true,"lakas.hu":true,"media.hu":true,"news.hu":true,"reklam.hu":true,"sex.hu":true,"shop.hu":true,"suli.hu":true,"szex.hu":true,"tozsde.hu":true,"utazas.hu":true,"video.hu":true,"id":true,"ac.id":true,"biz.id":true,"co.id":true,"desa.id":true,"go.id":true,"mil.id":true,"my.id":true,"net.id":true,"or.id":true,"sch.id":true,"web.id":true,"ie":true,"gov.ie":true,"il":true,"ac.il":true,"co.il":true,"gov.il":true,"idf.il":true,"k12.il":true,"muni.il":true,"net.il":true,"org.il":true,"im":true,"ac.im":true,"co.im":true,"com.im":true,"ltd.co.im":true,"net.im":true,"org.im":true,"plc.co.im":true,"tt.im":true,"tv.im":true,"in":true,"co.in":true,"firm.in":true,"net.in":true,"org.in":true,"gen.in":true,"ind.in":true,"nic.in":true,"ac.in":true,"edu.in":true,"res.in":true,"gov.in":true,"mil.in":true,"info":true,"int":true,"eu.int":true,"io":true,"com.io":true,"iq":true,"gov.iq":true,"edu.iq":true,"mil.iq":true,"com.iq":true,"org.iq":true,"net.iq":true,"ir":true,"ac.ir":true,"co.ir":true,"gov.ir":true,"id.ir":true,"net.ir":true,"org.ir":true,"sch.ir":true,"xn--mgba3a4f16a.ir":true,"xn--mgba3a4fra.ir":true,"is":true,"net.is":true,"com.is":true,"edu.is":true,"gov.is":true,"org.is":true,"int.is":true,"it":true,"gov.it":true,"edu.it":true,"abr.it":true,"abruzzo.it":true,"aosta-valley.it":true,"aostavalley.it":true,"bas.it":true,"basilicata.it":true,"cal.it":true,"calabria.it":true,"cam.it":true,"campania.it":true,"emilia-romagna.it":true,"emiliaromagna.it":true,"emr.it":true,"friuli-v-giulia.it":true,"friuli-ve-giulia.it":true,"friuli-vegiulia.it":true,"friuli-venezia-giulia.it":true,"friuli-veneziagiulia.it":true,"friuli-vgiulia.it":true,"friuliv-giulia.it":true,"friulive-giulia.it":true,"friulivegiulia.it":true,"friulivenezia-giulia.it":true,"friuliveneziagiulia.it":true,"friulivgiulia.it":true,"fvg.it":true,"laz.it":true,"lazio.it":true,"lig.it":true,"liguria.it":true,"lom.it":true,"lombardia.it":true,"lombardy.it":true,"lucania.it":true,"mar.it":true,"marche.it":true,"mol.it":true,"molise.it":true,"piedmont.it":true,"piemonte.it":true,"pmn.it":true,"pug.it":true,"puglia.it":true,"sar.it":true,"sardegna.it":true,"sardinia.it":true,"sic.it":true,"sicilia.it":true,"sicily.it":true,"taa.it":true,"tos.it":true,"toscana.it":true,"trentino-a-adige.it":true,"trentino-aadige.it":true,"trentino-alto-adige.it":true,"trentino-altoadige.it":true,"trentino-s-tirol.it":true,"trentino-stirol.it":true,"trentino-sud-tirol.it":true,"trentino-sudtirol.it":true,"trentino-sued-tirol.it":true,"trentino-suedtirol.it":true,"trentinoa-adige.it":true,"trentinoaadige.it":true,"trentinoalto-adige.it":true,"trentinoaltoadige.it":true,"trentinos-tirol.it":true,"trentinostirol.it":true,"trentinosud-tirol.it":true,"trentinosudtirol.it":true,"trentinosued-tirol.it":true,"trentinosuedtirol.it":true,"tuscany.it":true,"umb.it":true,"umbria.it":true,"val-d-aosta.it":true,"val-daosta.it":true,"vald-aosta.it":true,"valdaosta.it":true,"valle-aosta.it":true,"valle-d-aosta.it":true,"valle-daosta.it":true,"valleaosta.it":true,"valled-aosta.it":true,"valledaosta.it":true,"vallee-aoste.it":true,"valleeaoste.it":true,"vao.it":true,"vda.it":true,"ven.it":true,"veneto.it":true,"ag.it":true,"agrigento.it":true,"al.it":true,"alessandria.it":true,"alto-adige.it":true,"altoadige.it":true,"an.it":true,"ancona.it":true,"andria-barletta-trani.it":true,"andria-trani-barletta.it":true,"andriabarlettatrani.it":true,"andriatranibarletta.it":true,"ao.it":true,"aosta.it":true,"aoste.it":true,"ap.it":true,"aq.it":true,"aquila.it":true,"ar.it":true,"arezzo.it":true,"ascoli-piceno.it":true,"ascolipiceno.it":true,"asti.it":true,"at.it":true,"av.it":true,"avellino.it":true,"ba.it":true,"balsan.it":true,"bari.it":true,"barletta-trani-andria.it":true,"barlettatraniandria.it":true,"belluno.it":true,"benevento.it":true,"bergamo.it":true,"bg.it":true,"bi.it":true,"biella.it":true,"bl.it":true,"bn.it":true,"bo.it":true,"bologna.it":true,"bolzano.it":true,"bozen.it":true,"br.it":true,"brescia.it":true,"brindisi.it":true,"bs.it":true,"bt.it":true,"bz.it":true,"ca.it":true,"cagliari.it":true,"caltanissetta.it":true,"campidano-medio.it":true,"campidanomedio.it":true,"campobasso.it":true,"carbonia-iglesias.it":true,"carboniaiglesias.it":true,"carrara-massa.it":true,"carraramassa.it":true,"caserta.it":true,"catania.it":true,"catanzaro.it":true,"cb.it":true,"ce.it":true,"cesena-forli.it":true,"cesenaforli.it":true,"ch.it":true,"chieti.it":true,"ci.it":true,"cl.it":true,"cn.it":true,"co.it":true,"como.it":true,"cosenza.it":true,"cr.it":true,"cremona.it":true,"crotone.it":true,"cs.it":true,"ct.it":true,"cuneo.it":true,"cz.it":true,"dell-ogliastra.it":true,"dellogliastra.it":true,"en.it":true,"enna.it":true,"fc.it":true,"fe.it":true,"fermo.it":true,"ferrara.it":true,"fg.it":true,"fi.it":true,"firenze.it":true,"florence.it":true,"fm.it":true,"foggia.it":true,"forli-cesena.it":true,"forlicesena.it":true,"fr.it":true,"frosinone.it":true,"ge.it":true,"genoa.it":true,"genova.it":true,"go.it":true,"gorizia.it":true,"gr.it":true,"grosseto.it":true,"iglesias-carbonia.it":true,"iglesiascarbonia.it":true,"im.it":true,"imperia.it":true,"is.it":true,"isernia.it":true,"kr.it":true,"la-spezia.it":true,"laquila.it":true,"laspezia.it":true,"latina.it":true,"lc.it":true,"le.it":true,"lecce.it":true,"lecco.it":true,"li.it":true,"livorno.it":true,"lo.it":true,"lodi.it":true,"lt.it":true,"lu.it":true,"lucca.it":true,"macerata.it":true,"mantova.it":true,"massa-carrara.it":true,"massacarrara.it":true,"matera.it":true,"mb.it":true,"mc.it":true,"me.it":true,"medio-campidano.it":true,"mediocampidano.it":true,"messina.it":true,"mi.it":true,"milan.it":true,"milano.it":true,"mn.it":true,"mo.it":true,"modena.it":true,"monza-brianza.it":true,"monza-e-della-brianza.it":true,"monza.it":true,"monzabrianza.it":true,"monzaebrianza.it":true,"monzaedellabrianza.it":true,"ms.it":true,"mt.it":true,"na.it":true,"naples.it":true,"napoli.it":true,"no.it":true,"novara.it":true,"nu.it":true,"nuoro.it":true,"og.it":true,"ogliastra.it":true,"olbia-tempio.it":true,"olbiatempio.it":true,"or.it":true,"oristano.it":true,"ot.it":true,"pa.it":true,"padova.it":true,"padua.it":true,"palermo.it":true,"parma.it":true,"pavia.it":true,"pc.it":true,"pd.it":true,"pe.it":true,"perugia.it":true,"pesaro-urbino.it":true,"pesarourbino.it":true,"pescara.it":true,"pg.it":true,"pi.it":true,"piacenza.it":true,"pisa.it":true,"pistoia.it":true,"pn.it":true,"po.it":true,"pordenone.it":true,"potenza.it":true,"pr.it":true,"prato.it":true,"pt.it":true,"pu.it":true,"pv.it":true,"pz.it":true,"ra.it":true,"ragusa.it":true,"ravenna.it":true,"rc.it":true,"re.it":true,"reggio-calabria.it":true,"reggio-emilia.it":true,"reggiocalabria.it":true,"reggioemilia.it":true,"rg.it":true,"ri.it":true,"rieti.it":true,"rimini.it":true,"rm.it":true,"rn.it":true,"ro.it":true,"roma.it":true,"rome.it":true,"rovigo.it":true,"sa.it":true,"salerno.it":true,"sassari.it":true,"savona.it":true,"si.it":true,"siena.it":true,"siracusa.it":true,"so.it":true,"sondrio.it":true,"sp.it":true,"sr.it":true,"ss.it":true,"suedtirol.it":true,"sv.it":true,"ta.it":true,"taranto.it":true,"te.it":true,"tempio-olbia.it":true,"tempioolbia.it":true,"teramo.it":true,"terni.it":true,"tn.it":true,"to.it":true,"torino.it":true,"tp.it":true,"tr.it":true,"trani-andria-barletta.it":true,"trani-barletta-andria.it":true,"traniandriabarletta.it":true,"tranibarlettaandria.it":true,"trapani.it":true,"trentino.it":true,"trento.it":true,"treviso.it":true,"trieste.it":true,"ts.it":true,"turin.it":true,"tv.it":true,"ud.it":true,"udine.it":true,"urbino-pesaro.it":true,"urbinopesaro.it":true,"va.it":true,"varese.it":true,"vb.it":true,"vc.it":true,"ve.it":true,"venezia.it":true,"venice.it":true,"verbania.it":true,"vercelli.it":true,"verona.it":true,"vi.it":true,"vibo-valentia.it":true,"vibovalentia.it":true,"vicenza.it":true,"viterbo.it":true,"vr.it":true,"vs.it":true,"vt.it":true,"vv.it":true,"je":true,"co.je":true,"net.je":true,"org.je":true,"*.jm":true,"jo":true,"com.jo":true,"org.jo":true,"net.jo":true,"edu.jo":true,"sch.jo":true,"gov.jo":true,"mil.jo":true,"name.jo":true,"jobs":true,"jp":true,"ac.jp":true,"ad.jp":true,"co.jp":true,"ed.jp":true,"go.jp":true,"gr.jp":true,"lg.jp":true,"ne.jp":true,"or.jp":true,"aichi.jp":true,"akita.jp":true,"aomori.jp":true,"chiba.jp":true,"ehime.jp":true,"fukui.jp":true,"fukuoka.jp":true,"fukushima.jp":true,"gifu.jp":true,"gunma.jp":true,"hiroshima.jp":true,"hokkaido.jp":true,"hyogo.jp":true,"ibaraki.jp":true,"ishikawa.jp":true,"iwate.jp":true,"kagawa.jp":true,"kagoshima.jp":true,"kanagawa.jp":true,"kochi.jp":true,"kumamoto.jp":true,"kyoto.jp":true,"mie.jp":true,"miyagi.jp":true,"miyazaki.jp":true,"nagano.jp":true,"nagasaki.jp":true,"nara.jp":true,"niigata.jp":true,"oita.jp":true,"okayama.jp":true,"okinawa.jp":true,"osaka.jp":true,"saga.jp":true,"saitama.jp":true,"shiga.jp":true,"shimane.jp":true,"shizuoka.jp":true,"tochigi.jp":true,"tokushima.jp":true,"tokyo.jp":true,"tottori.jp":true,"toyama.jp":true,"wakayama.jp":true,"yamagata.jp":true,"yamaguchi.jp":true,"yamanashi.jp":true,"xn--4pvxs.jp":true,"xn--vgu402c.jp":true,"xn--c3s14m.jp":true,"xn--f6qx53a.jp":true,"xn--8pvr4u.jp":true,"xn--uist22h.jp":true,"xn--djrs72d6uy.jp":true,"xn--mkru45i.jp":true,"xn--0trq7p7nn.jp":true,"xn--8ltr62k.jp":true,"xn--2m4a15e.jp":true,"xn--efvn9s.jp":true,"xn--32vp30h.jp":true,"xn--4it797k.jp":true,"xn--1lqs71d.jp":true,"xn--5rtp49c.jp":true,"xn--5js045d.jp":true,"xn--ehqz56n.jp":true,"xn--1lqs03n.jp":true,"xn--qqqt11m.jp":true,"xn--kbrq7o.jp":true,"xn--pssu33l.jp":true,"xn--ntsq17g.jp":true,"xn--uisz3g.jp":true,"xn--6btw5a.jp":true,"xn--1ctwo.jp":true,"xn--6orx2r.jp":true,"xn--rht61e.jp":true,"xn--rht27z.jp":true,"xn--djty4k.jp":true,"xn--nit225k.jp":true,"xn--rht3d.jp":true,"xn--klty5x.jp":true,"xn--kltx9a.jp":true,"xn--kltp7d.jp":true,"xn--uuwu58a.jp":true,"xn--zbx025d.jp":true,"xn--ntso0iqx3a.jp":true,"xn--elqq16h.jp":true,"xn--4it168d.jp":true,"xn--klt787d.jp":true,"xn--rny31h.jp":true,"xn--7t0a264c.jp":true,"xn--5rtq34k.jp":true,"xn--k7yn95e.jp":true,"xn--tor131o.jp":true,"xn--d5qv7z876c.jp":true,"*.kawasaki.jp":true,"*.kitakyushu.jp":true,"*.kobe.jp":true,"*.nagoya.jp":true,"*.sapporo.jp":true,"*.sendai.jp":true,"*.yokohama.jp":true,"city.kawasaki.jp":false,"city.kitakyushu.jp":false,"city.kobe.jp":false,"city.nagoya.jp":false,"city.sapporo.jp":false,"city.sendai.jp":false,"city.yokohama.jp":false,"aisai.aichi.jp":true,"ama.aichi.jp":true,"anjo.aichi.jp":true,"asuke.aichi.jp":true,"chiryu.aichi.jp":true,"chita.aichi.jp":true,"fuso.aichi.jp":true,"gamagori.aichi.jp":true,"handa.aichi.jp":true,"hazu.aichi.jp":true,"hekinan.aichi.jp":true,"higashiura.aichi.jp":true,"ichinomiya.aichi.jp":true,"inazawa.aichi.jp":true,"inuyama.aichi.jp":true,"isshiki.aichi.jp":true,"iwakura.aichi.jp":true,"kanie.aichi.jp":true,"kariya.aichi.jp":true,"kasugai.aichi.jp":true,"kira.aichi.jp":true,"kiyosu.aichi.jp":true,"komaki.aichi.jp":true,"konan.aichi.jp":true,"kota.aichi.jp":true,"mihama.aichi.jp":true,"miyoshi.aichi.jp":true,"nishio.aichi.jp":true,"nisshin.aichi.jp":true,"obu.aichi.jp":true,"oguchi.aichi.jp":true,"oharu.aichi.jp":true,"okazaki.aichi.jp":true,"owariasahi.aichi.jp":true,"seto.aichi.jp":true,"shikatsu.aichi.jp":true,"shinshiro.aichi.jp":true,"shitara.aichi.jp":true,"tahara.aichi.jp":true,"takahama.aichi.jp":true,"tobishima.aichi.jp":true,"toei.aichi.jp":true,"togo.aichi.jp":true,"tokai.aichi.jp":true,"tokoname.aichi.jp":true,"toyoake.aichi.jp":true,"toyohashi.aichi.jp":true,"toyokawa.aichi.jp":true,"toyone.aichi.jp":true,"toyota.aichi.jp":true,"tsushima.aichi.jp":true,"yatomi.aichi.jp":true,"akita.akita.jp":true,"daisen.akita.jp":true,"fujisato.akita.jp":true,"gojome.akita.jp":true,"hachirogata.akita.jp":true,"happou.akita.jp":true,"higashinaruse.akita.jp":true,"honjo.akita.jp":true,"honjyo.akita.jp":true,"ikawa.akita.jp":true,"kamikoani.akita.jp":true,"kamioka.akita.jp":true,"katagami.akita.jp":true,"kazuno.akita.jp":true,"kitaakita.akita.jp":true,"kosaka.akita.jp":true,"kyowa.akita.jp":true,"misato.akita.jp":true,"mitane.akita.jp":true,"moriyoshi.akita.jp":true,"nikaho.akita.jp":true,"noshiro.akita.jp":true,"odate.akita.jp":true,"oga.akita.jp":true,"ogata.akita.jp":true,"semboku.akita.jp":true,"yokote.akita.jp":true,"yurihonjo.akita.jp":true,"aomori.aomori.jp":true,"gonohe.aomori.jp":true,"hachinohe.aomori.jp":true,"hashikami.aomori.jp":true,"hiranai.aomori.jp":true,"hirosaki.aomori.jp":true,"itayanagi.aomori.jp":true,"kuroishi.aomori.jp":true,"misawa.aomori.jp":true,"mutsu.aomori.jp":true,"nakadomari.aomori.jp":true,"noheji.aomori.jp":true,"oirase.aomori.jp":true,"owani.aomori.jp":true,"rokunohe.aomori.jp":true,"sannohe.aomori.jp":true,"shichinohe.aomori.jp":true,"shingo.aomori.jp":true,"takko.aomori.jp":true,"towada.aomori.jp":true,"tsugaru.aomori.jp":true,"tsuruta.aomori.jp":true,"abiko.chiba.jp":true,"asahi.chiba.jp":true,"chonan.chiba.jp":true,"chosei.chiba.jp":true,"choshi.chiba.jp":true,"chuo.chiba.jp":true,"funabashi.chiba.jp":true,"futtsu.chiba.jp":true,"hanamigawa.chiba.jp":true,"ichihara.chiba.jp":true,"ichikawa.chiba.jp":true,"ichinomiya.chiba.jp":true,"inzai.chiba.jp":true,"isumi.chiba.jp":true,"kamagaya.chiba.jp":true,"kamogawa.chiba.jp":true,"kashiwa.chiba.jp":true,"katori.chiba.jp":true,"katsuura.chiba.jp":true,"kimitsu.chiba.jp":true,"kisarazu.chiba.jp":true,"kozaki.chiba.jp":true,"kujukuri.chiba.jp":true,"kyonan.chiba.jp":true,"matsudo.chiba.jp":true,"midori.chiba.jp":true,"mihama.chiba.jp":true,"minamiboso.chiba.jp":true,"mobara.chiba.jp":true,"mutsuzawa.chiba.jp":true,"nagara.chiba.jp":true,"nagareyama.chiba.jp":true,"narashino.chiba.jp":true,"narita.chiba.jp":true,"noda.chiba.jp":true,"oamishirasato.chiba.jp":true,"omigawa.chiba.jp":true,"onjuku.chiba.jp":true,"otaki.chiba.jp":true,"sakae.chiba.jp":true,"sakura.chiba.jp":true,"shimofusa.chiba.jp":true,"shirako.chiba.jp":true,"shiroi.chiba.jp":true,"shisui.chiba.jp":true,"sodegaura.chiba.jp":true,"sosa.chiba.jp":true,"tako.chiba.jp":true,"tateyama.chiba.jp":true,"togane.chiba.jp":true,"tohnosho.chiba.jp":true,"tomisato.chiba.jp":true,"urayasu.chiba.jp":true,"yachimata.chiba.jp":true,"yachiyo.chiba.jp":true,"yokaichiba.chiba.jp":true,"yokoshibahikari.chiba.jp":true,"yotsukaido.chiba.jp":true,"ainan.ehime.jp":true,"honai.ehime.jp":true,"ikata.ehime.jp":true,"imabari.ehime.jp":true,"iyo.ehime.jp":true,"kamijima.ehime.jp":true,"kihoku.ehime.jp":true,"kumakogen.ehime.jp":true,"masaki.ehime.jp":true,"matsuno.ehime.jp":true,"matsuyama.ehime.jp":true,"namikata.ehime.jp":true,"niihama.ehime.jp":true,"ozu.ehime.jp":true,"saijo.ehime.jp":true,"seiyo.ehime.jp":true,"shikokuchuo.ehime.jp":true,"tobe.ehime.jp":true,"toon.ehime.jp":true,"uchiko.ehime.jp":true,"uwajima.ehime.jp":true,"yawatahama.ehime.jp":true,"echizen.fukui.jp":true,"eiheiji.fukui.jp":true,"fukui.fukui.jp":true,"ikeda.fukui.jp":true,"katsuyama.fukui.jp":true,"mihama.fukui.jp":true,"minamiechizen.fukui.jp":true,"obama.fukui.jp":true,"ohi.fukui.jp":true,"ono.fukui.jp":true,"sabae.fukui.jp":true,"sakai.fukui.jp":true,"takahama.fukui.jp":true,"tsuruga.fukui.jp":true,"wakasa.fukui.jp":true,"ashiya.fukuoka.jp":true,"buzen.fukuoka.jp":true,"chikugo.fukuoka.jp":true,"chikuho.fukuoka.jp":true,"chikujo.fukuoka.jp":true,"chikushino.fukuoka.jp":true,"chikuzen.fukuoka.jp":true,"chuo.fukuoka.jp":true,"dazaifu.fukuoka.jp":true,"fukuchi.fukuoka.jp":true,"hakata.fukuoka.jp":true,"higashi.fukuoka.jp":true,"hirokawa.fukuoka.jp":true,"hisayama.fukuoka.jp":true,"iizuka.fukuoka.jp":true,"inatsuki.fukuoka.jp":true,"kaho.fukuoka.jp":true,"kasuga.fukuoka.jp":true,"kasuya.fukuoka.jp":true,"kawara.fukuoka.jp":true,"keisen.fukuoka.jp":true,"koga.fukuoka.jp":true,"kurate.fukuoka.jp":true,"kurogi.fukuoka.jp":true,"kurume.fukuoka.jp":true,"minami.fukuoka.jp":true,"miyako.fukuoka.jp":true,"miyama.fukuoka.jp":true,"miyawaka.fukuoka.jp":true,"mizumaki.fukuoka.jp":true,"munakata.fukuoka.jp":true,"nakagawa.fukuoka.jp":true,"nakama.fukuoka.jp":true,"nishi.fukuoka.jp":true,"nogata.fukuoka.jp":true,"ogori.fukuoka.jp":true,"okagaki.fukuoka.jp":true,"okawa.fukuoka.jp":true,"oki.fukuoka.jp":true,"omuta.fukuoka.jp":true,"onga.fukuoka.jp":true,"onojo.fukuoka.jp":true,"oto.fukuoka.jp":true,"saigawa.fukuoka.jp":true,"sasaguri.fukuoka.jp":true,"shingu.fukuoka.jp":true,"shinyoshitomi.fukuoka.jp":true,"shonai.fukuoka.jp":true,"soeda.fukuoka.jp":true,"sue.fukuoka.jp":true,"tachiarai.fukuoka.jp":true,"tagawa.fukuoka.jp":true,"takata.fukuoka.jp":true,"toho.fukuoka.jp":true,"toyotsu.fukuoka.jp":true,"tsuiki.fukuoka.jp":true,"ukiha.fukuoka.jp":true,"umi.fukuoka.jp":true,"usui.fukuoka.jp":true,"yamada.fukuoka.jp":true,"yame.fukuoka.jp":true,"yanagawa.fukuoka.jp":true,"yukuhashi.fukuoka.jp":true,"aizubange.fukushima.jp":true,"aizumisato.fukushima.jp":true,"aizuwakamatsu.fukushima.jp":true,"asakawa.fukushima.jp":true,"bandai.fukushima.jp":true,"date.fukushima.jp":true,"fukushima.fukushima.jp":true,"furudono.fukushima.jp":true,"futaba.fukushima.jp":true,"hanawa.fukushima.jp":true,"higashi.fukushima.jp":true,"hirata.fukushima.jp":true,"hirono.fukushima.jp":true,"iitate.fukushima.jp":true,"inawashiro.fukushima.jp":true,"ishikawa.fukushima.jp":true,"iwaki.fukushima.jp":true,"izumizaki.fukushima.jp":true,"kagamiishi.fukushima.jp":true,"kaneyama.fukushima.jp":true,"kawamata.fukushima.jp":true,"kitakata.fukushima.jp":true,"kitashiobara.fukushima.jp":true,"koori.fukushima.jp":true,"koriyama.fukushima.jp":true,"kunimi.fukushima.jp":true,"miharu.fukushima.jp":true,"mishima.fukushima.jp":true,"namie.fukushima.jp":true,"nango.fukushima.jp":true,"nishiaizu.fukushima.jp":true,"nishigo.fukushima.jp":true,"okuma.fukushima.jp":true,"omotego.fukushima.jp":true,"ono.fukushima.jp":true,"otama.fukushima.jp":true,"samegawa.fukushima.jp":true,"shimogo.fukushima.jp":true,"shirakawa.fukushima.jp":true,"showa.fukushima.jp":true,"soma.fukushima.jp":true,"sukagawa.fukushima.jp":true,"taishin.fukushima.jp":true,"tamakawa.fukushima.jp":true,"tanagura.fukushima.jp":true,"tenei.fukushima.jp":true,"yabuki.fukushima.jp":true,"yamato.fukushima.jp":true,"yamatsuri.fukushima.jp":true,"yanaizu.fukushima.jp":true,"yugawa.fukushima.jp":true,"anpachi.gifu.jp":true,"ena.gifu.jp":true,"gifu.gifu.jp":true,"ginan.gifu.jp":true,"godo.gifu.jp":true,"gujo.gifu.jp":true,"hashima.gifu.jp":true,"hichiso.gifu.jp":true,"hida.gifu.jp":true,"higashishirakawa.gifu.jp":true,"ibigawa.gifu.jp":true,"ikeda.gifu.jp":true,"kakamigahara.gifu.jp":true,"kani.gifu.jp":true,"kasahara.gifu.jp":true,"kasamatsu.gifu.jp":true,"kawaue.gifu.jp":true,"kitagata.gifu.jp":true,"mino.gifu.jp":true,"minokamo.gifu.jp":true,"mitake.gifu.jp":true,"mizunami.gifu.jp":true,"motosu.gifu.jp":true,"nakatsugawa.gifu.jp":true,"ogaki.gifu.jp":true,"sakahogi.gifu.jp":true,"seki.gifu.jp":true,"sekigahara.gifu.jp":true,"shirakawa.gifu.jp":true,"tajimi.gifu.jp":true,"takayama.gifu.jp":true,"tarui.gifu.jp":true,"toki.gifu.jp":true,"tomika.gifu.jp":true,"wanouchi.gifu.jp":true,"yamagata.gifu.jp":true,"yaotsu.gifu.jp":true,"yoro.gifu.jp":true,"annaka.gunma.jp":true,"chiyoda.gunma.jp":true,"fujioka.gunma.jp":true,"higashiagatsuma.gunma.jp":true,"isesaki.gunma.jp":true,"itakura.gunma.jp":true,"kanna.gunma.jp":true,"kanra.gunma.jp":true,"katashina.gunma.jp":true,"kawaba.gunma.jp":true,"kiryu.gunma.jp":true,"kusatsu.gunma.jp":true,"maebashi.gunma.jp":true,"meiwa.gunma.jp":true,"midori.gunma.jp":true,"minakami.gunma.jp":true,"naganohara.gunma.jp":true,"nakanojo.gunma.jp":true,"nanmoku.gunma.jp":true,"numata.gunma.jp":true,"oizumi.gunma.jp":true,"ora.gunma.jp":true,"ota.gunma.jp":true,"shibukawa.gunma.jp":true,"shimonita.gunma.jp":true,"shinto.gunma.jp":true,"showa.gunma.jp":true,"takasaki.gunma.jp":true,"takayama.gunma.jp":true,"tamamura.gunma.jp":true,"tatebayashi.gunma.jp":true,"tomioka.gunma.jp":true,"tsukiyono.gunma.jp":true,"tsumagoi.gunma.jp":true,"ueno.gunma.jp":true,"yoshioka.gunma.jp":true,"asaminami.hiroshima.jp":true,"daiwa.hiroshima.jp":true,"etajima.hiroshima.jp":true,"fuchu.hiroshima.jp":true,"fukuyama.hiroshima.jp":true,"hatsukaichi.hiroshima.jp":true,"higashihiroshima.hiroshima.jp":true,"hongo.hiroshima.jp":true,"jinsekikogen.hiroshima.jp":true,"kaita.hiroshima.jp":true,"kui.hiroshima.jp":true,"kumano.hiroshima.jp":true,"kure.hiroshima.jp":true,"mihara.hiroshima.jp":true,"miyoshi.hiroshima.jp":true,"naka.hiroshima.jp":true,"onomichi.hiroshima.jp":true,"osakikamijima.hiroshima.jp":true,"otake.hiroshima.jp":true,"saka.hiroshima.jp":true,"sera.hiroshima.jp":true,"seranishi.hiroshima.jp":true,"shinichi.hiroshima.jp":true,"shobara.hiroshima.jp":true,"takehara.hiroshima.jp":true,"abashiri.hokkaido.jp":true,"abira.hokkaido.jp":true,"aibetsu.hokkaido.jp":true,"akabira.hokkaido.jp":true,"akkeshi.hokkaido.jp":true,"asahikawa.hokkaido.jp":true,"ashibetsu.hokkaido.jp":true,"ashoro.hokkaido.jp":true,"assabu.hokkaido.jp":true,"atsuma.hokkaido.jp":true,"bibai.hokkaido.jp":true,"biei.hokkaido.jp":true,"bifuka.hokkaido.jp":true,"bihoro.hokkaido.jp":true,"biratori.hokkaido.jp":true,"chippubetsu.hokkaido.jp":true,"chitose.hokkaido.jp":true,"date.hokkaido.jp":true,"ebetsu.hokkaido.jp":true,"embetsu.hokkaido.jp":true,"eniwa.hokkaido.jp":true,"erimo.hokkaido.jp":true,"esan.hokkaido.jp":true,"esashi.hokkaido.jp":true,"fukagawa.hokkaido.jp":true,"fukushima.hokkaido.jp":true,"furano.hokkaido.jp":true,"furubira.hokkaido.jp":true,"haboro.hokkaido.jp":true,"hakodate.hokkaido.jp":true,"hamatonbetsu.hokkaido.jp":true,"hidaka.hokkaido.jp":true,"higashikagura.hokkaido.jp":true,"higashikawa.hokkaido.jp":true,"hiroo.hokkaido.jp":true,"hokuryu.hokkaido.jp":true,"hokuto.hokkaido.jp":true,"honbetsu.hokkaido.jp":true,"horokanai.hokkaido.jp":true,"horonobe.hokkaido.jp":true,"ikeda.hokkaido.jp":true,"imakane.hokkaido.jp":true,"ishikari.hokkaido.jp":true,"iwamizawa.hokkaido.jp":true,"iwanai.hokkaido.jp":true,"kamifurano.hokkaido.jp":true,"kamikawa.hokkaido.jp":true,"kamishihoro.hokkaido.jp":true,"kamisunagawa.hokkaido.jp":true,"kamoenai.hokkaido.jp":true,"kayabe.hokkaido.jp":true,"kembuchi.hokkaido.jp":true,"kikonai.hokkaido.jp":true,"kimobetsu.hokkaido.jp":true,"kitahiroshima.hokkaido.jp":true,"kitami.hokkaido.jp":true,"kiyosato.hokkaido.jp":true,"koshimizu.hokkaido.jp":true,"kunneppu.hokkaido.jp":true,"kuriyama.hokkaido.jp":true,"kuromatsunai.hokkaido.jp":true,"kushiro.hokkaido.jp":true,"kutchan.hokkaido.jp":true,"kyowa.hokkaido.jp":true,"mashike.hokkaido.jp":true,"matsumae.hokkaido.jp":true,"mikasa.hokkaido.jp":true,"minamifurano.hokkaido.jp":true,"mombetsu.hokkaido.jp":true,"moseushi.hokkaido.jp":true,"mukawa.hokkaido.jp":true,"muroran.hokkaido.jp":true,"naie.hokkaido.jp":true,"nakagawa.hokkaido.jp":true,"nakasatsunai.hokkaido.jp":true,"nakatombetsu.hokkaido.jp":true,"nanae.hokkaido.jp":true,"nanporo.hokkaido.jp":true,"nayoro.hokkaido.jp":true,"nemuro.hokkaido.jp":true,"niikappu.hokkaido.jp":true,"niki.hokkaido.jp":true,"nishiokoppe.hokkaido.jp":true,"noboribetsu.hokkaido.jp":true,"numata.hokkaido.jp":true,"obihiro.hokkaido.jp":true,"obira.hokkaido.jp":true,"oketo.hokkaido.jp":true,"okoppe.hokkaido.jp":true,"otaru.hokkaido.jp":true,"otobe.hokkaido.jp":true,"otofuke.hokkaido.jp":true,"otoineppu.hokkaido.jp":true,"oumu.hokkaido.jp":true,"ozora.hokkaido.jp":true,"pippu.hokkaido.jp":true,"rankoshi.hokkaido.jp":true,"rebun.hokkaido.jp":true,"rikubetsu.hokkaido.jp":true,"rishiri.hokkaido.jp":true,"rishirifuji.hokkaido.jp":true,"saroma.hokkaido.jp":true,"sarufutsu.hokkaido.jp":true,"shakotan.hokkaido.jp":true,"shari.hokkaido.jp":true,"shibecha.hokkaido.jp":true,"shibetsu.hokkaido.jp":true,"shikabe.hokkaido.jp":true,"shikaoi.hokkaido.jp":true,"shimamaki.hokkaido.jp":true,"shimizu.hokkaido.jp":true,"shimokawa.hokkaido.jp":true,"shinshinotsu.hokkaido.jp":true,"shintoku.hokkaido.jp":true,"shiranuka.hokkaido.jp":true,"shiraoi.hokkaido.jp":true,"shiriuchi.hokkaido.jp":true,"sobetsu.hokkaido.jp":true,"sunagawa.hokkaido.jp":true,"taiki.hokkaido.jp":true,"takasu.hokkaido.jp":true,"takikawa.hokkaido.jp":true,"takinoue.hokkaido.jp":true,"teshikaga.hokkaido.jp":true,"tobetsu.hokkaido.jp":true,"tohma.hokkaido.jp":true,"tomakomai.hokkaido.jp":true,"tomari.hokkaido.jp":true,"toya.hokkaido.jp":true,"toyako.hokkaido.jp":true,"toyotomi.hokkaido.jp":true,"toyoura.hokkaido.jp":true,"tsubetsu.hokkaido.jp":true,"tsukigata.hokkaido.jp":true,"urakawa.hokkaido.jp":true,"urausu.hokkaido.jp":true,"uryu.hokkaido.jp":true,"utashinai.hokkaido.jp":true,"wakkanai.hokkaido.jp":true,"wassamu.hokkaido.jp":true,"yakumo.hokkaido.jp":true,"yoichi.hokkaido.jp":true,"aioi.hyogo.jp":true,"akashi.hyogo.jp":true,"ako.hyogo.jp":true,"amagasaki.hyogo.jp":true,"aogaki.hyogo.jp":true,"asago.hyogo.jp":true,"ashiya.hyogo.jp":true,"awaji.hyogo.jp":true,"fukusaki.hyogo.jp":true,"goshiki.hyogo.jp":true,"harima.hyogo.jp":true,"himeji.hyogo.jp":true,"ichikawa.hyogo.jp":true,"inagawa.hyogo.jp":true,"itami.hyogo.jp":true,"kakogawa.hyogo.jp":true,"kamigori.hyogo.jp":true,"kamikawa.hyogo.jp":true,"kasai.hyogo.jp":true,"kasuga.hyogo.jp":true,"kawanishi.hyogo.jp":true,"miki.hyogo.jp":true,"minamiawaji.hyogo.jp":true,"nishinomiya.hyogo.jp":true,"nishiwaki.hyogo.jp":true,"ono.hyogo.jp":true,"sanda.hyogo.jp":true,"sannan.hyogo.jp":true,"sasayama.hyogo.jp":true,"sayo.hyogo.jp":true,"shingu.hyogo.jp":true,"shinonsen.hyogo.jp":true,"shiso.hyogo.jp":true,"sumoto.hyogo.jp":true,"taishi.hyogo.jp":true,"taka.hyogo.jp":true,"takarazuka.hyogo.jp":true,"takasago.hyogo.jp":true,"takino.hyogo.jp":true,"tamba.hyogo.jp":true,"tatsuno.hyogo.jp":true,"toyooka.hyogo.jp":true,"yabu.hyogo.jp":true,"yashiro.hyogo.jp":true,"yoka.hyogo.jp":true,"yokawa.hyogo.jp":true,"ami.ibaraki.jp":true,"asahi.ibaraki.jp":true,"bando.ibaraki.jp":true,"chikusei.ibaraki.jp":true,"daigo.ibaraki.jp":true,"fujishiro.ibaraki.jp":true,"hitachi.ibaraki.jp":true,"hitachinaka.ibaraki.jp":true,"hitachiomiya.ibaraki.jp":true,"hitachiota.ibaraki.jp":true,"ibaraki.ibaraki.jp":true,"ina.ibaraki.jp":true,"inashiki.ibaraki.jp":true,"itako.ibaraki.jp":true,"iwama.ibaraki.jp":true,"joso.ibaraki.jp":true,"kamisu.ibaraki.jp":true,"kasama.ibaraki.jp":true,"kashima.ibaraki.jp":true,"kasumigaura.ibaraki.jp":true,"koga.ibaraki.jp":true,"miho.ibaraki.jp":true,"mito.ibaraki.jp":true,"moriya.ibaraki.jp":true,"naka.ibaraki.jp":true,"namegata.ibaraki.jp":true,"oarai.ibaraki.jp":true,"ogawa.ibaraki.jp":true,"omitama.ibaraki.jp":true,"ryugasaki.ibaraki.jp":true,"sakai.ibaraki.jp":true,"sakuragawa.ibaraki.jp":true,"shimodate.ibaraki.jp":true,"shimotsuma.ibaraki.jp":true,"shirosato.ibaraki.jp":true,"sowa.ibaraki.jp":true,"suifu.ibaraki.jp":true,"takahagi.ibaraki.jp":true,"tamatsukuri.ibaraki.jp":true,"tokai.ibaraki.jp":true,"tomobe.ibaraki.jp":true,"tone.ibaraki.jp":true,"toride.ibaraki.jp":true,"tsuchiura.ibaraki.jp":true,"tsukuba.ibaraki.jp":true,"uchihara.ibaraki.jp":true,"ushiku.ibaraki.jp":true,"yachiyo.ibaraki.jp":true,"yamagata.ibaraki.jp":true,"yawara.ibaraki.jp":true,"yuki.ibaraki.jp":true,"anamizu.ishikawa.jp":true,"hakui.ishikawa.jp":true,"hakusan.ishikawa.jp":true,"kaga.ishikawa.jp":true,"kahoku.ishikawa.jp":true,"kanazawa.ishikawa.jp":true,"kawakita.ishikawa.jp":true,"komatsu.ishikawa.jp":true,"nakanoto.ishikawa.jp":true,"nanao.ishikawa.jp":true,"nomi.ishikawa.jp":true,"nonoichi.ishikawa.jp":true,"noto.ishikawa.jp":true,"shika.ishikawa.jp":true,"suzu.ishikawa.jp":true,"tsubata.ishikawa.jp":true,"tsurugi.ishikawa.jp":true,"uchinada.ishikawa.jp":true,"wajima.ishikawa.jp":true,"fudai.iwate.jp":true,"fujisawa.iwate.jp":true,"hanamaki.iwate.jp":true,"hiraizumi.iwate.jp":true,"hirono.iwate.jp":true,"ichinohe.iwate.jp":true,"ichinoseki.iwate.jp":true,"iwaizumi.iwate.jp":true,"iwate.iwate.jp":true,"joboji.iwate.jp":true,"kamaishi.iwate.jp":true,"kanegasaki.iwate.jp":true,"karumai.iwate.jp":true,"kawai.iwate.jp":true,"kitakami.iwate.jp":true,"kuji.iwate.jp":true,"kunohe.iwate.jp":true,"kuzumaki.iwate.jp":true,"miyako.iwate.jp":true,"mizusawa.iwate.jp":true,"morioka.iwate.jp":true,"ninohe.iwate.jp":true,"noda.iwate.jp":true,"ofunato.iwate.jp":true,"oshu.iwate.jp":true,"otsuchi.iwate.jp":true,"rikuzentakata.iwate.jp":true,"shiwa.iwate.jp":true,"shizukuishi.iwate.jp":true,"sumita.iwate.jp":true,"tanohata.iwate.jp":true,"tono.iwate.jp":true,"yahaba.iwate.jp":true,"yamada.iwate.jp":true,"ayagawa.kagawa.jp":true,"higashikagawa.kagawa.jp":true,"kanonji.kagawa.jp":true,"kotohira.kagawa.jp":true,"manno.kagawa.jp":true,"marugame.kagawa.jp":true,"mitoyo.kagawa.jp":true,"naoshima.kagawa.jp":true,"sanuki.kagawa.jp":true,"tadotsu.kagawa.jp":true,"takamatsu.kagawa.jp":true,"tonosho.kagawa.jp":true,"uchinomi.kagawa.jp":true,"utazu.kagawa.jp":true,"zentsuji.kagawa.jp":true,"akune.kagoshima.jp":true,"amami.kagoshima.jp":true,"hioki.kagoshima.jp":true,"isa.kagoshima.jp":true,"isen.kagoshima.jp":true,"izumi.kagoshima.jp":true,"kagoshima.kagoshima.jp":true,"kanoya.kagoshima.jp":true,"kawanabe.kagoshima.jp":true,"kinko.kagoshima.jp":true,"kouyama.kagoshima.jp":true,"makurazaki.kagoshima.jp":true,"matsumoto.kagoshima.jp":true,"minamitane.kagoshima.jp":true,"nakatane.kagoshima.jp":true,"nishinoomote.kagoshima.jp":true,"satsumasendai.kagoshima.jp":true,"soo.kagoshima.jp":true,"tarumizu.kagoshima.jp":true,"yusui.kagoshima.jp":true,"aikawa.kanagawa.jp":true,"atsugi.kanagawa.jp":true,"ayase.kanagawa.jp":true,"chigasaki.kanagawa.jp":true,"ebina.kanagawa.jp":true,"fujisawa.kanagawa.jp":true,"hadano.kanagawa.jp":true,"hakone.kanagawa.jp":true,"hiratsuka.kanagawa.jp":true,"isehara.kanagawa.jp":true,"kaisei.kanagawa.jp":true,"kamakura.kanagawa.jp":true,"kiyokawa.kanagawa.jp":true,"matsuda.kanagawa.jp":true,"minamiashigara.kanagawa.jp":true,"miura.kanagawa.jp":true,"nakai.kanagawa.jp":true,"ninomiya.kanagawa.jp":true,"odawara.kanagawa.jp":true,"oi.kanagawa.jp":true,"oiso.kanagawa.jp":true,"sagamihara.kanagawa.jp":true,"samukawa.kanagawa.jp":true,"tsukui.kanagawa.jp":true,"yamakita.kanagawa.jp":true,"yamato.kanagawa.jp":true,"yokosuka.kanagawa.jp":true,"yugawara.kanagawa.jp":true,"zama.kanagawa.jp":true,"zushi.kanagawa.jp":true,"aki.kochi.jp":true,"geisei.kochi.jp":true,"hidaka.kochi.jp":true,"higashitsuno.kochi.jp":true,"ino.kochi.jp":true,"kagami.kochi.jp":true,"kami.kochi.jp":true,"kitagawa.kochi.jp":true,"kochi.kochi.jp":true,"mihara.kochi.jp":true,"motoyama.kochi.jp":true,"muroto.kochi.jp":true,"nahari.kochi.jp":true,"nakamura.kochi.jp":true,"nankoku.kochi.jp":true,"nishitosa.kochi.jp":true,"niyodogawa.kochi.jp":true,"ochi.kochi.jp":true,"okawa.kochi.jp":true,"otoyo.kochi.jp":true,"otsuki.kochi.jp":true,"sakawa.kochi.jp":true,"sukumo.kochi.jp":true,"susaki.kochi.jp":true,"tosa.kochi.jp":true,"tosashimizu.kochi.jp":true,"toyo.kochi.jp":true,"tsuno.kochi.jp":true,"umaji.kochi.jp":true,"yasuda.kochi.jp":true,"yusuhara.kochi.jp":true,"amakusa.kumamoto.jp":true,"arao.kumamoto.jp":true,"aso.kumamoto.jp":true,"choyo.kumamoto.jp":true,"gyokuto.kumamoto.jp":true,"hitoyoshi.kumamoto.jp":true,"kamiamakusa.kumamoto.jp":true,"kashima.kumamoto.jp":true,"kikuchi.kumamoto.jp":true,"kosa.kumamoto.jp":true,"kumamoto.kumamoto.jp":true,"mashiki.kumamoto.jp":true,"mifune.kumamoto.jp":true,"minamata.kumamoto.jp":true,"minamioguni.kumamoto.jp":true,"nagasu.kumamoto.jp":true,"nishihara.kumamoto.jp":true,"oguni.kumamoto.jp":true,"ozu.kumamoto.jp":true,"sumoto.kumamoto.jp":true,"takamori.kumamoto.jp":true,"uki.kumamoto.jp":true,"uto.kumamoto.jp":true,"yamaga.kumamoto.jp":true,"yamato.kumamoto.jp":true,"yatsushiro.kumamoto.jp":true,"ayabe.kyoto.jp":true,"fukuchiyama.kyoto.jp":true,"higashiyama.kyoto.jp":true,"ide.kyoto.jp":true,"ine.kyoto.jp":true,"joyo.kyoto.jp":true,"kameoka.kyoto.jp":true,"kamo.kyoto.jp":true,"kita.kyoto.jp":true,"kizu.kyoto.jp":true,"kumiyama.kyoto.jp":true,"kyotamba.kyoto.jp":true,"kyotanabe.kyoto.jp":true,"kyotango.kyoto.jp":true,"maizuru.kyoto.jp":true,"minami.kyoto.jp":true,"minamiyamashiro.kyoto.jp":true,"miyazu.kyoto.jp":true,"muko.kyoto.jp":true,"nagaokakyo.kyoto.jp":true,"nakagyo.kyoto.jp":true,"nantan.kyoto.jp":true,"oyamazaki.kyoto.jp":true,"sakyo.kyoto.jp":true,"seika.kyoto.jp":true,"tanabe.kyoto.jp":true,"uji.kyoto.jp":true,"ujitawara.kyoto.jp":true,"wazuka.kyoto.jp":true,"yamashina.kyoto.jp":true,"yawata.kyoto.jp":true,"asahi.mie.jp":true,"inabe.mie.jp":true,"ise.mie.jp":true,"kameyama.mie.jp":true,"kawagoe.mie.jp":true,"kiho.mie.jp":true,"kisosaki.mie.jp":true,"kiwa.mie.jp":true,"komono.mie.jp":true,"kumano.mie.jp":true,"kuwana.mie.jp":true,"matsusaka.mie.jp":true,"meiwa.mie.jp":true,"mihama.mie.jp":true,"minamiise.mie.jp":true,"misugi.mie.jp":true,"miyama.mie.jp":true,"nabari.mie.jp":true,"shima.mie.jp":true,"suzuka.mie.jp":true,"tado.mie.jp":true,"taiki.mie.jp":true,"taki.mie.jp":true,"tamaki.mie.jp":true,"toba.mie.jp":true,"tsu.mie.jp":true,"udono.mie.jp":true,"ureshino.mie.jp":true,"watarai.mie.jp":true,"yokkaichi.mie.jp":true,"furukawa.miyagi.jp":true,"higashimatsushima.miyagi.jp":true,"ishinomaki.miyagi.jp":true,"iwanuma.miyagi.jp":true,"kakuda.miyagi.jp":true,"kami.miyagi.jp":true,"kawasaki.miyagi.jp":true,"kesennuma.miyagi.jp":true,"marumori.miyagi.jp":true,"matsushima.miyagi.jp":true,"minamisanriku.miyagi.jp":true,"misato.miyagi.jp":true,"murata.miyagi.jp":true,"natori.miyagi.jp":true,"ogawara.miyagi.jp":true,"ohira.miyagi.jp":true,"onagawa.miyagi.jp":true,"osaki.miyagi.jp":true,"rifu.miyagi.jp":true,"semine.miyagi.jp":true,"shibata.miyagi.jp":true,"shichikashuku.miyagi.jp":true,"shikama.miyagi.jp":true,"shiogama.miyagi.jp":true,"shiroishi.miyagi.jp":true,"tagajo.miyagi.jp":true,"taiwa.miyagi.jp":true,"tome.miyagi.jp":true,"tomiya.miyagi.jp":true,"wakuya.miyagi.jp":true,"watari.miyagi.jp":true,"yamamoto.miyagi.jp":true,"zao.miyagi.jp":true,"aya.miyazaki.jp":true,"ebino.miyazaki.jp":true,"gokase.miyazaki.jp":true,"hyuga.miyazaki.jp":true,"kadogawa.miyazaki.jp":true,"kawaminami.miyazaki.jp":true,"kijo.miyazaki.jp":true,"kitagawa.miyazaki.jp":true,"kitakata.miyazaki.jp":true,"kitaura.miyazaki.jp":true,"kobayashi.miyazaki.jp":true,"kunitomi.miyazaki.jp":true,"kushima.miyazaki.jp":true,"mimata.miyazaki.jp":true,"miyakonojo.miyazaki.jp":true,"miyazaki.miyazaki.jp":true,"morotsuka.miyazaki.jp":true,"nichinan.miyazaki.jp":true,"nishimera.miyazaki.jp":true,"nobeoka.miyazaki.jp":true,"saito.miyazaki.jp":true,"shiiba.miyazaki.jp":true,"shintomi.miyazaki.jp":true,"takaharu.miyazaki.jp":true,"takanabe.miyazaki.jp":true,"takazaki.miyazaki.jp":true,"tsuno.miyazaki.jp":true,"achi.nagano.jp":true,"agematsu.nagano.jp":true,"anan.nagano.jp":true,"aoki.nagano.jp":true,"asahi.nagano.jp":true,"azumino.nagano.jp":true,"chikuhoku.nagano.jp":true,"chikuma.nagano.jp":true,"chino.nagano.jp":true,"fujimi.nagano.jp":true,"hakuba.nagano.jp":true,"hara.nagano.jp":true,"hiraya.nagano.jp":true,"iida.nagano.jp":true,"iijima.nagano.jp":true,"iiyama.nagano.jp":true,"iizuna.nagano.jp":true,"ikeda.nagano.jp":true,"ikusaka.nagano.jp":true,"ina.nagano.jp":true,"karuizawa.nagano.jp":true,"kawakami.nagano.jp":true,"kiso.nagano.jp":true,"kisofukushima.nagano.jp":true,"kitaaiki.nagano.jp":true,"komagane.nagano.jp":true,"komoro.nagano.jp":true,"matsukawa.nagano.jp":true,"matsumoto.nagano.jp":true,"miasa.nagano.jp":true,"minamiaiki.nagano.jp":true,"minamimaki.nagano.jp":true,"minamiminowa.nagano.jp":true,"minowa.nagano.jp":true,"miyada.nagano.jp":true,"miyota.nagano.jp":true,"mochizuki.nagano.jp":true,"nagano.nagano.jp":true,"nagawa.nagano.jp":true,"nagiso.nagano.jp":true,"nakagawa.nagano.jp":true,"nakano.nagano.jp":true,"nozawaonsen.nagano.jp":true,"obuse.nagano.jp":true,"ogawa.nagano.jp":true,"okaya.nagano.jp":true,"omachi.nagano.jp":true,"omi.nagano.jp":true,"ookuwa.nagano.jp":true,"ooshika.nagano.jp":true,"otaki.nagano.jp":true,"otari.nagano.jp":true,"sakae.nagano.jp":true,"sakaki.nagano.jp":true,"saku.nagano.jp":true,"sakuho.nagano.jp":true,"shimosuwa.nagano.jp":true,"shinanomachi.nagano.jp":true,"shiojiri.nagano.jp":true,"suwa.nagano.jp":true,"suzaka.nagano.jp":true,"takagi.nagano.jp":true,"takamori.nagano.jp":true,"takayama.nagano.jp":true,"tateshina.nagano.jp":true,"tatsuno.nagano.jp":true,"togakushi.nagano.jp":true,"togura.nagano.jp":true,"tomi.nagano.jp":true,"ueda.nagano.jp":true,"wada.nagano.jp":true,"yamagata.nagano.jp":true,"yamanouchi.nagano.jp":true,"yasaka.nagano.jp":true,"yasuoka.nagano.jp":true,"chijiwa.nagasaki.jp":true,"futsu.nagasaki.jp":true,"goto.nagasaki.jp":true,"hasami.nagasaki.jp":true,"hirado.nagasaki.jp":true,"iki.nagasaki.jp":true,"isahaya.nagasaki.jp":true,"kawatana.nagasaki.jp":true,"kuchinotsu.nagasaki.jp":true,"matsuura.nagasaki.jp":true,"nagasaki.nagasaki.jp":true,"obama.nagasaki.jp":true,"omura.nagasaki.jp":true,"oseto.nagasaki.jp":true,"saikai.nagasaki.jp":true,"sasebo.nagasaki.jp":true,"seihi.nagasaki.jp":true,"shimabara.nagasaki.jp":true,"shinkamigoto.nagasaki.jp":true,"togitsu.nagasaki.jp":true,"tsushima.nagasaki.jp":true,"unzen.nagasaki.jp":true,"ando.nara.jp":true,"gose.nara.jp":true,"heguri.nara.jp":true,"higashiyoshino.nara.jp":true,"ikaruga.nara.jp":true,"ikoma.nara.jp":true,"kamikitayama.nara.jp":true,"kanmaki.nara.jp":true,"kashiba.nara.jp":true,"kashihara.nara.jp":true,"katsuragi.nara.jp":true,"kawai.nara.jp":true,"kawakami.nara.jp":true,"kawanishi.nara.jp":true,"koryo.nara.jp":true,"kurotaki.nara.jp":true,"mitsue.nara.jp":true,"miyake.nara.jp":true,"nara.nara.jp":true,"nosegawa.nara.jp":true,"oji.nara.jp":true,"ouda.nara.jp":true,"oyodo.nara.jp":true,"sakurai.nara.jp":true,"sango.nara.jp":true,"shimoichi.nara.jp":true,"shimokitayama.nara.jp":true,"shinjo.nara.jp":true,"soni.nara.jp":true,"takatori.nara.jp":true,"tawaramoto.nara.jp":true,"tenkawa.nara.jp":true,"tenri.nara.jp":true,"uda.nara.jp":true,"yamatokoriyama.nara.jp":true,"yamatotakada.nara.jp":true,"yamazoe.nara.jp":true,"yoshino.nara.jp":true,"aga.niigata.jp":true,"agano.niigata.jp":true,"gosen.niigata.jp":true,"itoigawa.niigata.jp":true,"izumozaki.niigata.jp":true,"joetsu.niigata.jp":true,"kamo.niigata.jp":true,"kariwa.niigata.jp":true,"kashiwazaki.niigata.jp":true,"minamiuonuma.niigata.jp":true,"mitsuke.niigata.jp":true,"muika.niigata.jp":true,"murakami.niigata.jp":true,"myoko.niigata.jp":true,"nagaoka.niigata.jp":true,"niigata.niigata.jp":true,"ojiya.niigata.jp":true,"omi.niigata.jp":true,"sado.niigata.jp":true,"sanjo.niigata.jp":true,"seiro.niigata.jp":true,"seirou.niigata.jp":true,"sekikawa.niigata.jp":true,"shibata.niigata.jp":true,"tagami.niigata.jp":true,"tainai.niigata.jp":true,"tochio.niigata.jp":true,"tokamachi.niigata.jp":true,"tsubame.niigata.jp":true,"tsunan.niigata.jp":true,"uonuma.niigata.jp":true,"yahiko.niigata.jp":true,"yoita.niigata.jp":true,"yuzawa.niigata.jp":true,"beppu.oita.jp":true,"bungoono.oita.jp":true,"bungotakada.oita.jp":true,"hasama.oita.jp":true,"hiji.oita.jp":true,"himeshima.oita.jp":true,"hita.oita.jp":true,"kamitsue.oita.jp":true,"kokonoe.oita.jp":true,"kuju.oita.jp":true,"kunisaki.oita.jp":true,"kusu.oita.jp":true,"oita.oita.jp":true,"saiki.oita.jp":true,"taketa.oita.jp":true,"tsukumi.oita.jp":true,"usa.oita.jp":true,"usuki.oita.jp":true,"yufu.oita.jp":true,"akaiwa.okayama.jp":true,"asakuchi.okayama.jp":true,"bizen.okayama.jp":true,"hayashima.okayama.jp":true,"ibara.okayama.jp":true,"kagamino.okayama.jp":true,"kasaoka.okayama.jp":true,"kibichuo.okayama.jp":true,"kumenan.okayama.jp":true,"kurashiki.okayama.jp":true,"maniwa.okayama.jp":true,"misaki.okayama.jp":true,"nagi.okayama.jp":true,"niimi.okayama.jp":true,"nishiawakura.okayama.jp":true,"okayama.okayama.jp":true,"satosho.okayama.jp":true,"setouchi.okayama.jp":true,"shinjo.okayama.jp":true,"shoo.okayama.jp":true,"soja.okayama.jp":true,"takahashi.okayama.jp":true,"tamano.okayama.jp":true,"tsuyama.okayama.jp":true,"wake.okayama.jp":true,"yakage.okayama.jp":true,"aguni.okinawa.jp":true,"ginowan.okinawa.jp":true,"ginoza.okinawa.jp":true,"gushikami.okinawa.jp":true,"haebaru.okinawa.jp":true,"higashi.okinawa.jp":true,"hirara.okinawa.jp":true,"iheya.okinawa.jp":true,"ishigaki.okinawa.jp":true,"ishikawa.okinawa.jp":true,"itoman.okinawa.jp":true,"izena.okinawa.jp":true,"kadena.okinawa.jp":true,"kin.okinawa.jp":true,"kitadaito.okinawa.jp":true,"kitanakagusuku.okinawa.jp":true,"kumejima.okinawa.jp":true,"kunigami.okinawa.jp":true,"minamidaito.okinawa.jp":true,"motobu.okinawa.jp":true,"nago.okinawa.jp":true,"naha.okinawa.jp":true,"nakagusuku.okinawa.jp":true,"nakijin.okinawa.jp":true,"nanjo.okinawa.jp":true,"nishihara.okinawa.jp":true,"ogimi.okinawa.jp":true,"okinawa.okinawa.jp":true,"onna.okinawa.jp":true,"shimoji.okinawa.jp":true,"taketomi.okinawa.jp":true,"tarama.okinawa.jp":true,"tokashiki.okinawa.jp":true,"tomigusuku.okinawa.jp":true,"tonaki.okinawa.jp":true,"urasoe.okinawa.jp":true,"uruma.okinawa.jp":true,"yaese.okinawa.jp":true,"yomitan.okinawa.jp":true,"yonabaru.okinawa.jp":true,"yonaguni.okinawa.jp":true,"zamami.okinawa.jp":true,"abeno.osaka.jp":true,"chihayaakasaka.osaka.jp":true,"chuo.osaka.jp":true,"daito.osaka.jp":true,"fujiidera.osaka.jp":true,"habikino.osaka.jp":true,"hannan.osaka.jp":true,"higashiosaka.osaka.jp":true,"higashisumiyoshi.osaka.jp":true,"higashiyodogawa.osaka.jp":true,"hirakata.osaka.jp":true,"ibaraki.osaka.jp":true,"ikeda.osaka.jp":true,"izumi.osaka.jp":true,"izumiotsu.osaka.jp":true,"izumisano.osaka.jp":true,"kadoma.osaka.jp":true,"kaizuka.osaka.jp":true,"kanan.osaka.jp":true,"kashiwara.osaka.jp":true,"katano.osaka.jp":true,"kawachinagano.osaka.jp":true,"kishiwada.osaka.jp":true,"kita.osaka.jp":true,"kumatori.osaka.jp":true,"matsubara.osaka.jp":true,"minato.osaka.jp":true,"minoh.osaka.jp":true,"misaki.osaka.jp":true,"moriguchi.osaka.jp":true,"neyagawa.osaka.jp":true,"nishi.osaka.jp":true,"nose.osaka.jp":true,"osakasayama.osaka.jp":true,"sakai.osaka.jp":true,"sayama.osaka.jp":true,"sennan.osaka.jp":true,"settsu.osaka.jp":true,"shijonawate.osaka.jp":true,"shimamoto.osaka.jp":true,"suita.osaka.jp":true,"tadaoka.osaka.jp":true,"taishi.osaka.jp":true,"tajiri.osaka.jp":true,"takaishi.osaka.jp":true,"takatsuki.osaka.jp":true,"tondabayashi.osaka.jp":true,"toyonaka.osaka.jp":true,"toyono.osaka.jp":true,"yao.osaka.jp":true,"ariake.saga.jp":true,"arita.saga.jp":true,"fukudomi.saga.jp":true,"genkai.saga.jp":true,"hamatama.saga.jp":true,"hizen.saga.jp":true,"imari.saga.jp":true,"kamimine.saga.jp":true,"kanzaki.saga.jp":true,"karatsu.saga.jp":true,"kashima.saga.jp":true,"kitagata.saga.jp":true,"kitahata.saga.jp":true,"kiyama.saga.jp":true,"kouhoku.saga.jp":true,"kyuragi.saga.jp":true,"nishiarita.saga.jp":true,"ogi.saga.jp":true,"omachi.saga.jp":true,"ouchi.saga.jp":true,"saga.saga.jp":true,"shiroishi.saga.jp":true,"taku.saga.jp":true,"tara.saga.jp":true,"tosu.saga.jp":true,"yoshinogari.saga.jp":true,"arakawa.saitama.jp":true,"asaka.saitama.jp":true,"chichibu.saitama.jp":true,"fujimi.saitama.jp":true,"fujimino.saitama.jp":true,"fukaya.saitama.jp":true,"hanno.saitama.jp":true,"hanyu.saitama.jp":true,"hasuda.saitama.jp":true,"hatogaya.saitama.jp":true,"hatoyama.saitama.jp":true,"hidaka.saitama.jp":true,"higashichichibu.saitama.jp":true,"higashimatsuyama.saitama.jp":true,"honjo.saitama.jp":true,"ina.saitama.jp":true,"iruma.saitama.jp":true,"iwatsuki.saitama.jp":true,"kamiizumi.saitama.jp":true,"kamikawa.saitama.jp":true,"kamisato.saitama.jp":true,"kasukabe.saitama.jp":true,"kawagoe.saitama.jp":true,"kawaguchi.saitama.jp":true,"kawajima.saitama.jp":true,"kazo.saitama.jp":true,"kitamoto.saitama.jp":true,"koshigaya.saitama.jp":true,"kounosu.saitama.jp":true,"kuki.saitama.jp":true,"kumagaya.saitama.jp":true,"matsubushi.saitama.jp":true,"minano.saitama.jp":true,"misato.saitama.jp":true,"miyashiro.saitama.jp":true,"miyoshi.saitama.jp":true,"moroyama.saitama.jp":true,"nagatoro.saitama.jp":true,"namegawa.saitama.jp":true,"niiza.saitama.jp":true,"ogano.saitama.jp":true,"ogawa.saitama.jp":true,"ogose.saitama.jp":true,"okegawa.saitama.jp":true,"omiya.saitama.jp":true,"otaki.saitama.jp":true,"ranzan.saitama.jp":true,"ryokami.saitama.jp":true,"saitama.saitama.jp":true,"sakado.saitama.jp":true,"satte.saitama.jp":true,"sayama.saitama.jp":true,"shiki.saitama.jp":true,"shiraoka.saitama.jp":true,"soka.saitama.jp":true,"sugito.saitama.jp":true,"toda.saitama.jp":true,"tokigawa.saitama.jp":true,"tokorozawa.saitama.jp":true,"tsurugashima.saitama.jp":true,"urawa.saitama.jp":true,"warabi.saitama.jp":true,"yashio.saitama.jp":true,"yokoze.saitama.jp":true,"yono.saitama.jp":true,"yorii.saitama.jp":true,"yoshida.saitama.jp":true,"yoshikawa.saitama.jp":true,"yoshimi.saitama.jp":true,"aisho.shiga.jp":true,"gamo.shiga.jp":true,"higashiomi.shiga.jp":true,"hikone.shiga.jp":true,"koka.shiga.jp":true,"konan.shiga.jp":true,"kosei.shiga.jp":true,"koto.shiga.jp":true,"kusatsu.shiga.jp":true,"maibara.shiga.jp":true,"moriyama.shiga.jp":true,"nagahama.shiga.jp":true,"nishiazai.shiga.jp":true,"notogawa.shiga.jp":true,"omihachiman.shiga.jp":true,"otsu.shiga.jp":true,"ritto.shiga.jp":true,"ryuoh.shiga.jp":true,"takashima.shiga.jp":true,"takatsuki.shiga.jp":true,"torahime.shiga.jp":true,"toyosato.shiga.jp":true,"yasu.shiga.jp":true,"akagi.shimane.jp":true,"ama.shimane.jp":true,"gotsu.shimane.jp":true,"hamada.shimane.jp":true,"higashiizumo.shimane.jp":true,"hikawa.shimane.jp":true,"hikimi.shimane.jp":true,"izumo.shimane.jp":true,"kakinoki.shimane.jp":true,"masuda.shimane.jp":true,"matsue.shimane.jp":true,"misato.shimane.jp":true,"nishinoshima.shimane.jp":true,"ohda.shimane.jp":true,"okinoshima.shimane.jp":true,"okuizumo.shimane.jp":true,"shimane.shimane.jp":true,"tamayu.shimane.jp":true,"tsuwano.shimane.jp":true,"unnan.shimane.jp":true,"yakumo.shimane.jp":true,"yasugi.shimane.jp":true,"yatsuka.shimane.jp":true,"arai.shizuoka.jp":true,"atami.shizuoka.jp":true,"fuji.shizuoka.jp":true,"fujieda.shizuoka.jp":true,"fujikawa.shizuoka.jp":true,"fujinomiya.shizuoka.jp":true,"fukuroi.shizuoka.jp":true,"gotemba.shizuoka.jp":true,"haibara.shizuoka.jp":true,"hamamatsu.shizuoka.jp":true,"higashiizu.shizuoka.jp":true,"ito.shizuoka.jp":true,"iwata.shizuoka.jp":true,"izu.shizuoka.jp":true,"izunokuni.shizuoka.jp":true,"kakegawa.shizuoka.jp":true,"kannami.shizuoka.jp":true,"kawanehon.shizuoka.jp":true,"kawazu.shizuoka.jp":true,"kikugawa.shizuoka.jp":true,"kosai.shizuoka.jp":true,"makinohara.shizuoka.jp":true,"matsuzaki.shizuoka.jp":true,"minamiizu.shizuoka.jp":true,"mishima.shizuoka.jp":true,"morimachi.shizuoka.jp":true,"nishiizu.shizuoka.jp":true,"numazu.shizuoka.jp":true,"omaezaki.shizuoka.jp":true,"shimada.shizuoka.jp":true,"shimizu.shizuoka.jp":true,"shimoda.shizuoka.jp":true,"shizuoka.shizuoka.jp":true,"susono.shizuoka.jp":true,"yaizu.shizuoka.jp":true,"yoshida.shizuoka.jp":true,"ashikaga.tochigi.jp":true,"bato.tochigi.jp":true,"haga.tochigi.jp":true,"ichikai.tochigi.jp":true,"iwafune.tochigi.jp":true,"kaminokawa.tochigi.jp":true,"kanuma.tochigi.jp":true,"karasuyama.tochigi.jp":true,"kuroiso.tochigi.jp":true,"mashiko.tochigi.jp":true,"mibu.tochigi.jp":true,"moka.tochigi.jp":true,"motegi.tochigi.jp":true,"nasu.tochigi.jp":true,"nasushiobara.tochigi.jp":true,"nikko.tochigi.jp":true,"nishikata.tochigi.jp":true,"nogi.tochigi.jp":true,"ohira.tochigi.jp":true,"ohtawara.tochigi.jp":true,"oyama.tochigi.jp":true,"sakura.tochigi.jp":true,"sano.tochigi.jp":true,"shimotsuke.tochigi.jp":true,"shioya.tochigi.jp":true,"takanezawa.tochigi.jp":true,"tochigi.tochigi.jp":true,"tsuga.tochigi.jp":true,"ujiie.tochigi.jp":true,"utsunomiya.tochigi.jp":true,"yaita.tochigi.jp":true,"aizumi.tokushima.jp":true,"anan.tokushima.jp":true,"ichiba.tokushima.jp":true,"itano.tokushima.jp":true,"kainan.tokushima.jp":true,"komatsushima.tokushima.jp":true,"matsushige.tokushima.jp":true,"mima.tokushima.jp":true,"minami.tokushima.jp":true,"miyoshi.tokushima.jp":true,"mugi.tokushima.jp":true,"nakagawa.tokushima.jp":true,"naruto.tokushima.jp":true,"sanagochi.tokushima.jp":true,"shishikui.tokushima.jp":true,"tokushima.tokushima.jp":true,"wajiki.tokushima.jp":true,"adachi.tokyo.jp":true,"akiruno.tokyo.jp":true,"akishima.tokyo.jp":true,"aogashima.tokyo.jp":true,"arakawa.tokyo.jp":true,"bunkyo.tokyo.jp":true,"chiyoda.tokyo.jp":true,"chofu.tokyo.jp":true,"chuo.tokyo.jp":true,"edogawa.tokyo.jp":true,"fuchu.tokyo.jp":true,"fussa.tokyo.jp":true,"hachijo.tokyo.jp":true,"hachioji.tokyo.jp":true,"hamura.tokyo.jp":true,"higashikurume.tokyo.jp":true,"higashimurayama.tokyo.jp":true,"higashiyamato.tokyo.jp":true,"hino.tokyo.jp":true,"hinode.tokyo.jp":true,"hinohara.tokyo.jp":true,"inagi.tokyo.jp":true,"itabashi.tokyo.jp":true,"katsushika.tokyo.jp":true,"kita.tokyo.jp":true,"kiyose.tokyo.jp":true,"kodaira.tokyo.jp":true,"koganei.tokyo.jp":true,"kokubunji.tokyo.jp":true,"komae.tokyo.jp":true,"koto.tokyo.jp":true,"kouzushima.tokyo.jp":true,"kunitachi.tokyo.jp":true,"machida.tokyo.jp":true,"meguro.tokyo.jp":true,"minato.tokyo.jp":true,"mitaka.tokyo.jp":true,"mizuho.tokyo.jp":true,"musashimurayama.tokyo.jp":true,"musashino.tokyo.jp":true,"nakano.tokyo.jp":true,"nerima.tokyo.jp":true,"ogasawara.tokyo.jp":true,"okutama.tokyo.jp":true,"ome.tokyo.jp":true,"oshima.tokyo.jp":true,"ota.tokyo.jp":true,"setagaya.tokyo.jp":true,"shibuya.tokyo.jp":true,"shinagawa.tokyo.jp":true,"shinjuku.tokyo.jp":true,"suginami.tokyo.jp":true,"sumida.tokyo.jp":true,"tachikawa.tokyo.jp":true,"taito.tokyo.jp":true,"tama.tokyo.jp":true,"toshima.tokyo.jp":true,"chizu.tottori.jp":true,"hino.tottori.jp":true,"kawahara.tottori.jp":true,"koge.tottori.jp":true,"kotoura.tottori.jp":true,"misasa.tottori.jp":true,"nanbu.tottori.jp":true,"nichinan.tottori.jp":true,"sakaiminato.tottori.jp":true,"tottori.tottori.jp":true,"wakasa.tottori.jp":true,"yazu.tottori.jp":true,"yonago.tottori.jp":true,"asahi.toyama.jp":true,"fuchu.toyama.jp":true,"fukumitsu.toyama.jp":true,"funahashi.toyama.jp":true,"himi.toyama.jp":true,"imizu.toyama.jp":true,"inami.toyama.jp":true,"johana.toyama.jp":true,"kamiichi.toyama.jp":true,"kurobe.toyama.jp":true,"nakaniikawa.toyama.jp":true,"namerikawa.toyama.jp":true,"nanto.toyama.jp":true,"nyuzen.toyama.jp":true,"oyabe.toyama.jp":true,"taira.toyama.jp":true,"takaoka.toyama.jp":true,"tateyama.toyama.jp":true,"toga.toyama.jp":true,"tonami.toyama.jp":true,"toyama.toyama.jp":true,"unazuki.toyama.jp":true,"uozu.toyama.jp":true,"yamada.toyama.jp":true,"arida.wakayama.jp":true,"aridagawa.wakayama.jp":true,"gobo.wakayama.jp":true,"hashimoto.wakayama.jp":true,"hidaka.wakayama.jp":true,"hirogawa.wakayama.jp":true,"inami.wakayama.jp":true,"iwade.wakayama.jp":true,"kainan.wakayama.jp":true,"kamitonda.wakayama.jp":true,"katsuragi.wakayama.jp":true,"kimino.wakayama.jp":true,"kinokawa.wakayama.jp":true,"kitayama.wakayama.jp":true,"koya.wakayama.jp":true,"koza.wakayama.jp":true,"kozagawa.wakayama.jp":true,"kudoyama.wakayama.jp":true,"kushimoto.wakayama.jp":true,"mihama.wakayama.jp":true,"misato.wakayama.jp":true,"nachikatsuura.wakayama.jp":true,"shingu.wakayama.jp":true,"shirahama.wakayama.jp":true,"taiji.wakayama.jp":true,"tanabe.wakayama.jp":true,"wakayama.wakayama.jp":true,"yuasa.wakayama.jp":true,"yura.wakayama.jp":true,"asahi.yamagata.jp":true,"funagata.yamagata.jp":true,"higashine.yamagata.jp":true,"iide.yamagata.jp":true,"kahoku.yamagata.jp":true,"kaminoyama.yamagata.jp":true,"kaneyama.yamagata.jp":true,"kawanishi.yamagata.jp":true,"mamurogawa.yamagata.jp":true,"mikawa.yamagata.jp":true,"murayama.yamagata.jp":true,"nagai.yamagata.jp":true,"nakayama.yamagata.jp":true,"nanyo.yamagata.jp":true,"nishikawa.yamagata.jp":true,"obanazawa.yamagata.jp":true,"oe.yamagata.jp":true,"oguni.yamagata.jp":true,"ohkura.yamagata.jp":true,"oishida.yamagata.jp":true,"sagae.yamagata.jp":true,"sakata.yamagata.jp":true,"sakegawa.yamagata.jp":true,"shinjo.yamagata.jp":true,"shirataka.yamagata.jp":true,"shonai.yamagata.jp":true,"takahata.yamagata.jp":true,"tendo.yamagata.jp":true,"tozawa.yamagata.jp":true,"tsuruoka.yamagata.jp":true,"yamagata.yamagata.jp":true,"yamanobe.yamagata.jp":true,"yonezawa.yamagata.jp":true,"yuza.yamagata.jp":true,"abu.yamaguchi.jp":true,"hagi.yamaguchi.jp":true,"hikari.yamaguchi.jp":true,"hofu.yamaguchi.jp":true,"iwakuni.yamaguchi.jp":true,"kudamatsu.yamaguchi.jp":true,"mitou.yamaguchi.jp":true,"nagato.yamaguchi.jp":true,"oshima.yamaguchi.jp":true,"shimonoseki.yamaguchi.jp":true,"shunan.yamaguchi.jp":true,"tabuse.yamaguchi.jp":true,"tokuyama.yamaguchi.jp":true,"toyota.yamaguchi.jp":true,"ube.yamaguchi.jp":true,"yuu.yamaguchi.jp":true,"chuo.yamanashi.jp":true,"doshi.yamanashi.jp":true,"fuefuki.yamanashi.jp":true,"fujikawa.yamanashi.jp":true,"fujikawaguchiko.yamanashi.jp":true,"fujiyoshida.yamanashi.jp":true,"hayakawa.yamanashi.jp":true,"hokuto.yamanashi.jp":true,"ichikawamisato.yamanashi.jp":true,"kai.yamanashi.jp":true,"kofu.yamanashi.jp":true,"koshu.yamanashi.jp":true,"kosuge.yamanashi.jp":true,"minami-alps.yamanashi.jp":true,"minobu.yamanashi.jp":true,"nakamichi.yamanashi.jp":true,"nanbu.yamanashi.jp":true,"narusawa.yamanashi.jp":true,"nirasaki.yamanashi.jp":true,"nishikatsura.yamanashi.jp":true,"oshino.yamanashi.jp":true,"otsuki.yamanashi.jp":true,"showa.yamanashi.jp":true,"tabayama.yamanashi.jp":true,"tsuru.yamanashi.jp":true,"uenohara.yamanashi.jp":true,"yamanakako.yamanashi.jp":true,"yamanashi.yamanashi.jp":true,"*.ke":true,"kg":true,"org.kg":true,"net.kg":true,"com.kg":true,"edu.kg":true,"gov.kg":true,"mil.kg":true,"*.kh":true,"ki":true,"edu.ki":true,"biz.ki":true,"net.ki":true,"org.ki":true,"gov.ki":true,"info.ki":true,"com.ki":true,"km":true,"org.km":true,"nom.km":true,"gov.km":true,"prd.km":true,"tm.km":true,"edu.km":true,"mil.km":true,"ass.km":true,"com.km":true,"coop.km":true,"asso.km":true,"presse.km":true,"medecin.km":true,"notaires.km":true,"pharmaciens.km":true,"veterinaire.km":true,"gouv.km":true,"kn":true,"net.kn":true,"org.kn":true,"edu.kn":true,"gov.kn":true,"kp":true,"com.kp":true,"edu.kp":true,"gov.kp":true,"org.kp":true,"rep.kp":true,"tra.kp":true,"kr":true,"ac.kr":true,"co.kr":true,"es.kr":true,"go.kr":true,"hs.kr":true,"kg.kr":true,"mil.kr":true,"ms.kr":true,"ne.kr":true,"or.kr":true,"pe.kr":true,"re.kr":true,"sc.kr":true,"busan.kr":true,"chungbuk.kr":true,"chungnam.kr":true,"daegu.kr":true,"daejeon.kr":true,"gangwon.kr":true,"gwangju.kr":true,"gyeongbuk.kr":true,"gyeonggi.kr":true,"gyeongnam.kr":true,"incheon.kr":true,"jeju.kr":true,"jeonbuk.kr":true,"jeonnam.kr":true,"seoul.kr":true,"ulsan.kr":true,"*.kw":true,"ky":true,"edu.ky":true,"gov.ky":true,"com.ky":true,"org.ky":true,"net.ky":true,"kz":true,"org.kz":true,"edu.kz":true,"net.kz":true,"gov.kz":true,"mil.kz":true,"com.kz":true,"la":true,"int.la":true,"net.la":true,"info.la":true,"edu.la":true,"gov.la":true,"per.la":true,"com.la":true,"org.la":true,"lb":true,"com.lb":true,"edu.lb":true,"gov.lb":true,"net.lb":true,"org.lb":true,"lc":true,"com.lc":true,"net.lc":true,"co.lc":true,"org.lc":true,"edu.lc":true,"gov.lc":true,"li":true,"lk":true,"gov.lk":true,"sch.lk":true,"net.lk":true,"int.lk":true,"com.lk":true,"org.lk":true,"edu.lk":true,"ngo.lk":true,"soc.lk":true,"web.lk":true,"ltd.lk":true,"assn.lk":true,"grp.lk":true,"hotel.lk":true,"ac.lk":true,"lr":true,"com.lr":true,"edu.lr":true,"gov.lr":true,"org.lr":true,"net.lr":true,"ls":true,"co.ls":true,"org.ls":true,"lt":true,"gov.lt":true,"lu":true,"lv":true,"com.lv":true,"edu.lv":true,"gov.lv":true,"org.lv":true,"mil.lv":true,"id.lv":true,"net.lv":true,"asn.lv":true,"conf.lv":true,"ly":true,"com.ly":true,"net.ly":true,"gov.ly":true,"plc.ly":true,"edu.ly":true,"sch.ly":true,"med.ly":true,"org.ly":true,"id.ly":true,"ma":true,"co.ma":true,"net.ma":true,"gov.ma":true,"org.ma":true,"ac.ma":true,"press.ma":true,"mc":true,"tm.mc":true,"asso.mc":true,"md":true,"me":true,"co.me":true,"net.me":true,"org.me":true,"edu.me":true,"ac.me":true,"gov.me":true,"its.me":true,"priv.me":true,"mg":true,"org.mg":true,"nom.mg":true,"gov.mg":true,"prd.mg":true,"tm.mg":true,"edu.mg":true,"mil.mg":true,"com.mg":true,"co.mg":true,"mh":true,"mil":true,"mk":true,"com.mk":true,"org.mk":true,"net.mk":true,"edu.mk":true,"gov.mk":true,"inf.mk":true,"name.mk":true,"ml":true,"com.ml":true,"edu.ml":true,"gouv.ml":true,"gov.ml":true,"net.ml":true,"org.ml":true,"presse.ml":true,"*.mm":true,"mn":true,"gov.mn":true,"edu.mn":true,"org.mn":true,"mo":true,"com.mo":true,"net.mo":true,"org.mo":true,"edu.mo":true,"gov.mo":true,"mobi":true,"mp":true,"mq":true,"mr":true,"gov.mr":true,"ms":true,"com.ms":true,"edu.ms":true,"gov.ms":true,"net.ms":true,"org.ms":true,"mt":true,"com.mt":true,"edu.mt":true,"net.mt":true,"org.mt":true,"mu":true,"com.mu":true,"net.mu":true,"org.mu":true,"gov.mu":true,"ac.mu":true,"co.mu":true,"or.mu":true,"museum":true,"academy.museum":true,"agriculture.museum":true,"air.museum":true,"airguard.museum":true,"alabama.museum":true,"alaska.museum":true,"amber.museum":true,"ambulance.museum":true,"american.museum":true,"americana.museum":true,"americanantiques.museum":true,"americanart.museum":true,"amsterdam.museum":true,"and.museum":true,"annefrank.museum":true,"anthro.museum":true,"anthropology.museum":true,"antiques.museum":true,"aquarium.museum":true,"arboretum.museum":true,"archaeological.museum":true,"archaeology.museum":true,"architecture.museum":true,"art.museum":true,"artanddesign.museum":true,"artcenter.museum":true,"artdeco.museum":true,"arteducation.museum":true,"artgallery.museum":true,"arts.museum":true,"artsandcrafts.museum":true,"asmatart.museum":true,"assassination.museum":true,"assisi.museum":true,"association.museum":true,"astronomy.museum":true,"atlanta.museum":true,"austin.museum":true,"australia.museum":true,"automotive.museum":true,"aviation.museum":true,"axis.museum":true,"badajoz.museum":true,"baghdad.museum":true,"bahn.museum":true,"bale.museum":true,"baltimore.museum":true,"barcelona.museum":true,"baseball.museum":true,"basel.museum":true,"baths.museum":true,"bauern.museum":true,"beauxarts.museum":true,"beeldengeluid.museum":true,"bellevue.museum":true,"bergbau.museum":true,"berkeley.museum":true,"berlin.museum":true,"bern.museum":true,"bible.museum":true,"bilbao.museum":true,"bill.museum":true,"birdart.museum":true,"birthplace.museum":true,"bonn.museum":true,"boston.museum":true,"botanical.museum":true,"botanicalgarden.museum":true,"botanicgarden.museum":true,"botany.museum":true,"brandywinevalley.museum":true,"brasil.museum":true,"bristol.museum":true,"british.museum":true,"britishcolumbia.museum":true,"broadcast.museum":true,"brunel.museum":true,"brussel.museum":true,"brussels.museum":true,"bruxelles.museum":true,"building.museum":true,"burghof.museum":true,"bus.museum":true,"bushey.museum":true,"cadaques.museum":true,"california.museum":true,"cambridge.museum":true,"can.museum":true,"canada.museum":true,"capebreton.museum":true,"carrier.museum":true,"cartoonart.museum":true,"casadelamoneda.museum":true,"castle.museum":true,"castres.museum":true,"celtic.museum":true,"center.museum":true,"chattanooga.museum":true,"cheltenham.museum":true,"chesapeakebay.museum":true,"chicago.museum":true,"children.museum":true,"childrens.museum":true,"childrensgarden.museum":true,"chiropractic.museum":true,"chocolate.museum":true,"christiansburg.museum":true,"cincinnati.museum":true,"cinema.museum":true,"circus.museum":true,"civilisation.museum":true,"civilization.museum":true,"civilwar.museum":true,"clinton.museum":true,"clock.museum":true,"coal.museum":true,"coastaldefence.museum":true,"cody.museum":true,"coldwar.museum":true,"collection.museum":true,"colonialwilliamsburg.museum":true,"coloradoplateau.museum":true,"columbia.museum":true,"columbus.museum":true,"communication.museum":true,"communications.museum":true,"community.museum":true,"computer.museum":true,"computerhistory.museum":true,"xn--comunicaes-v6a2o.museum":true,"contemporary.museum":true,"contemporaryart.museum":true,"convent.museum":true,"copenhagen.museum":true,"corporation.museum":true,"xn--correios-e-telecomunicaes-ghc29a.museum":true,"corvette.museum":true,"costume.museum":true,"countryestate.museum":true,"county.museum":true,"crafts.museum":true,"cranbrook.museum":true,"creation.museum":true,"cultural.museum":true,"culturalcenter.museum":true,"culture.museum":true,"cyber.museum":true,"cymru.museum":true,"dali.museum":true,"dallas.museum":true,"database.museum":true,"ddr.museum":true,"decorativearts.museum":true,"delaware.museum":true,"delmenhorst.museum":true,"denmark.museum":true,"depot.museum":true,"design.museum":true,"detroit.museum":true,"dinosaur.museum":true,"discovery.museum":true,"dolls.museum":true,"donostia.museum":true,"durham.museum":true,"eastafrica.museum":true,"eastcoast.museum":true,"education.museum":true,"educational.museum":true,"egyptian.museum":true,"eisenbahn.museum":true,"elburg.museum":true,"elvendrell.museum":true,"embroidery.museum":true,"encyclopedic.museum":true,"england.museum":true,"entomology.museum":true,"environment.museum":true,"environmentalconservation.museum":true,"epilepsy.museum":true,"essex.museum":true,"estate.museum":true,"ethnology.museum":true,"exeter.museum":true,"exhibition.museum":true,"family.museum":true,"farm.museum":true,"farmequipment.museum":true,"farmers.museum":true,"farmstead.museum":true,"field.museum":true,"figueres.museum":true,"filatelia.museum":true,"film.museum":true,"fineart.museum":true,"finearts.museum":true,"finland.museum":true,"flanders.museum":true,"florida.museum":true,"force.museum":true,"fortmissoula.museum":true,"fortworth.museum":true,"foundation.museum":true,"francaise.museum":true,"frankfurt.museum":true,"franziskaner.museum":true,"freemasonry.museum":true,"freiburg.museum":true,"fribourg.museum":true,"frog.museum":true,"fundacio.museum":true,"furniture.museum":true,"gallery.museum":true,"garden.museum":true,"gateway.museum":true,"geelvinck.museum":true,"gemological.museum":true,"geology.museum":true,"georgia.museum":true,"giessen.museum":true,"glas.museum":true,"glass.museum":true,"gorge.museum":true,"grandrapids.museum":true,"graz.museum":true,"guernsey.museum":true,"halloffame.museum":true,"hamburg.museum":true,"handson.museum":true,"harvestcelebration.museum":true,"hawaii.museum":true,"health.museum":true,"heimatunduhren.museum":true,"hellas.museum":true,"helsinki.museum":true,"hembygdsforbund.museum":true,"heritage.museum":true,"histoire.museum":true,"historical.museum":true,"historicalsociety.museum":true,"historichouses.museum":true,"historisch.museum":true,"historisches.museum":true,"history.museum":true,"historyofscience.museum":true,"horology.museum":true,"house.museum":true,"humanities.museum":true,"illustration.museum":true,"imageandsound.museum":true,"indian.museum":true,"indiana.museum":true,"indianapolis.museum":true,"indianmarket.museum":true,"intelligence.museum":true,"interactive.museum":true,"iraq.museum":true,"iron.museum":true,"isleofman.museum":true,"jamison.museum":true,"jefferson.museum":true,"jerusalem.museum":true,"jewelry.museum":true,"jewish.museum":true,"jewishart.museum":true,"jfk.museum":true,"journalism.museum":true,"judaica.museum":true,"judygarland.museum":true,"juedisches.museum":true,"juif.museum":true,"karate.museum":true,"karikatur.museum":true,"kids.museum":true,"koebenhavn.museum":true,"koeln.museum":true,"kunst.museum":true,"kunstsammlung.museum":true,"kunstunddesign.museum":true,"labor.museum":true,"labour.museum":true,"lajolla.museum":true,"lancashire.museum":true,"landes.museum":true,"lans.museum":true,"xn--lns-qla.museum":true,"larsson.museum":true,"lewismiller.museum":true,"lincoln.museum":true,"linz.museum":true,"living.museum":true,"livinghistory.museum":true,"localhistory.museum":true,"london.museum":true,"losangeles.museum":true,"louvre.museum":true,"loyalist.museum":true,"lucerne.museum":true,"luxembourg.museum":true,"luzern.museum":true,"mad.museum":true,"madrid.museum":true,"mallorca.museum":true,"manchester.museum":true,"mansion.museum":true,"mansions.museum":true,"manx.museum":true,"marburg.museum":true,"maritime.museum":true,"maritimo.museum":true,"maryland.museum":true,"marylhurst.museum":true,"media.museum":true,"medical.museum":true,"medizinhistorisches.museum":true,"meeres.museum":true,"memorial.museum":true,"mesaverde.museum":true,"michigan.museum":true,"midatlantic.museum":true,"military.museum":true,"mill.museum":true,"miners.museum":true,"mining.museum":true,"minnesota.museum":true,"missile.museum":true,"missoula.museum":true,"modern.museum":true,"moma.museum":true,"money.museum":true,"monmouth.museum":true,"monticello.museum":true,"montreal.museum":true,"moscow.museum":true,"motorcycle.museum":true,"muenchen.museum":true,"muenster.museum":true,"mulhouse.museum":true,"muncie.museum":true,"museet.museum":true,"museumcenter.museum":true,"museumvereniging.museum":true,"music.museum":true,"national.museum":true,"nationalfirearms.museum":true,"nationalheritage.museum":true,"nativeamerican.museum":true,"naturalhistory.museum":true,"naturalhistorymuseum.museum":true,"naturalsciences.museum":true,"nature.museum":true,"naturhistorisches.museum":true,"natuurwetenschappen.museum":true,"naumburg.museum":true,"naval.museum":true,"nebraska.museum":true,"neues.museum":true,"newhampshire.museum":true,"newjersey.museum":true,"newmexico.museum":true,"newport.museum":true,"newspaper.museum":true,"newyork.museum":true,"niepce.museum":true,"norfolk.museum":true,"north.museum":true,"nrw.museum":true,"nuernberg.museum":true,"nuremberg.museum":true,"nyc.museum":true,"nyny.museum":true,"oceanographic.museum":true,"oceanographique.museum":true,"omaha.museum":true,"online.museum":true,"ontario.museum":true,"openair.museum":true,"oregon.museum":true,"oregontrail.museum":true,"otago.museum":true,"oxford.museum":true,"pacific.museum":true,"paderborn.museum":true,"palace.museum":true,"paleo.museum":true,"palmsprings.museum":true,"panama.museum":true,"paris.museum":true,"pasadena.museum":true,"pharmacy.museum":true,"philadelphia.museum":true,"philadelphiaarea.museum":true,"philately.museum":true,"phoenix.museum":true,"photography.museum":true,"pilots.museum":true,"pittsburgh.museum":true,"planetarium.museum":true,"plantation.museum":true,"plants.museum":true,"plaza.museum":true,"portal.museum":true,"portland.museum":true,"portlligat.museum":true,"posts-and-telecommunications.museum":true,"preservation.museum":true,"presidio.museum":true,"press.museum":true,"project.museum":true,"public.museum":true,"pubol.museum":true,"quebec.museum":true,"railroad.museum":true,"railway.museum":true,"research.museum":true,"resistance.museum":true,"riodejaneiro.museum":true,"rochester.museum":true,"rockart.museum":true,"roma.museum":true,"russia.museum":true,"saintlouis.museum":true,"salem.museum":true,"salvadordali.museum":true,"salzburg.museum":true,"sandiego.museum":true,"sanfrancisco.museum":true,"santabarbara.museum":true,"santacruz.museum":true,"santafe.museum":true,"saskatchewan.museum":true,"satx.museum":true,"savannahga.museum":true,"schlesisches.museum":true,"schoenbrunn.museum":true,"schokoladen.museum":true,"school.museum":true,"schweiz.museum":true,"science.museum":true,"scienceandhistory.museum":true,"scienceandindustry.museum":true,"sciencecenter.museum":true,"sciencecenters.museum":true,"science-fiction.museum":true,"sciencehistory.museum":true,"sciences.museum":true,"sciencesnaturelles.museum":true,"scotland.museum":true,"seaport.museum":true,"settlement.museum":true,"settlers.museum":true,"shell.museum":true,"sherbrooke.museum":true,"sibenik.museum":true,"silk.museum":true,"ski.museum":true,"skole.museum":true,"society.museum":true,"sologne.museum":true,"soundandvision.museum":true,"southcarolina.museum":true,"southwest.museum":true,"space.museum":true,"spy.museum":true,"square.museum":true,"stadt.museum":true,"stalbans.museum":true,"starnberg.museum":true,"state.museum":true,"stateofdelaware.museum":true,"station.museum":true,"steam.museum":true,"steiermark.museum":true,"stjohn.museum":true,"stockholm.museum":true,"stpetersburg.museum":true,"stuttgart.museum":true,"suisse.museum":true,"surgeonshall.museum":true,"surrey.museum":true,"svizzera.museum":true,"sweden.museum":true,"sydney.museum":true,"tank.museum":true,"tcm.museum":true,"technology.museum":true,"telekommunikation.museum":true,"television.museum":true,"texas.museum":true,"textile.museum":true,"theater.museum":true,"time.museum":true,"timekeeping.museum":true,"topology.museum":true,"torino.museum":true,"touch.museum":true,"town.museum":true,"transport.museum":true,"tree.museum":true,"trolley.museum":true,"trust.museum":true,"trustee.museum":true,"uhren.museum":true,"ulm.museum":true,"undersea.museum":true,"university.museum":true,"usa.museum":true,"usantiques.museum":true,"usarts.museum":true,"uscountryestate.museum":true,"usculture.museum":true,"usdecorativearts.museum":true,"usgarden.museum":true,"ushistory.museum":true,"ushuaia.museum":true,"uslivinghistory.museum":true,"utah.museum":true,"uvic.museum":true,"valley.museum":true,"vantaa.museum":true,"versailles.museum":true,"viking.museum":true,"village.museum":true,"virginia.museum":true,"virtual.museum":true,"virtuel.museum":true,"vlaanderen.museum":true,"volkenkunde.museum":true,"wales.museum":true,"wallonie.museum":true,"war.museum":true,"washingtondc.museum":true,"watchandclock.museum":true,"watch-and-clock.museum":true,"western.museum":true,"westfalen.museum":true,"whaling.museum":true,"wildlife.museum":true,"williamsburg.museum":true,"windmill.museum":true,"workshop.museum":true,"york.museum":true,"yorkshire.museum":true,"yosemite.museum":true,"youth.museum":true,"zoological.museum":true,"zoology.museum":true,"xn--9dbhblg6di.museum":true,"xn--h1aegh.museum":true,"mv":true,"aero.mv":true,"biz.mv":true,"com.mv":true,"coop.mv":true,"edu.mv":true,"gov.mv":true,"info.mv":true,"int.mv":true,"mil.mv":true,"museum.mv":true,"name.mv":true,"net.mv":true,"org.mv":true,"pro.mv":true,"mw":true,"ac.mw":true,"biz.mw":true,"co.mw":true,"com.mw":true,"coop.mw":true,"edu.mw":true,"gov.mw":true,"int.mw":true,"museum.mw":true,"net.mw":true,"org.mw":true,"mx":true,"com.mx":true,"org.mx":true,"gob.mx":true,"edu.mx":true,"net.mx":true,"my":true,"com.my":true,"net.my":true,"org.my":true,"gov.my":true,"edu.my":true,"mil.my":true,"name.my":true,"*.mz":true,"teledata.mz":false,"na":true,"info.na":true,"pro.na":true,"name.na":true,"school.na":true,"or.na":true,"dr.na":true,"us.na":true,"mx.na":true,"ca.na":true,"in.na":true,"cc.na":true,"tv.na":true,"ws.na":true,"mobi.na":true,"co.na":true,"com.na":true,"org.na":true,"name":true,"nc":true,"asso.nc":true,"ne":true,"net":true,"nf":true,"com.nf":true,"net.nf":true,"per.nf":true,"rec.nf":true,"web.nf":true,"arts.nf":true,"firm.nf":true,"info.nf":true,"other.nf":true,"store.nf":true,"ng":true,"com.ng":true,"edu.ng":true,"name.ng":true,"net.ng":true,"org.ng":true,"sch.ng":true,"gov.ng":true,"mil.ng":true,"mobi.ng":true,"*.ni":true,"nl":true,"bv.nl":true,"no":true,"fhs.no":true,"vgs.no":true,"fylkesbibl.no":true,"folkebibl.no":true,"museum.no":true,"idrett.no":true,"priv.no":true,"mil.no":true,"stat.no":true,"dep.no":true,"kommune.no":true,"herad.no":true,"aa.no":true,"ah.no":true,"bu.no":true,"fm.no":true,"hl.no":true,"hm.no":true,"jan-mayen.no":true,"mr.no":true,"nl.no":true,"nt.no":true,"of.no":true,"ol.no":true,"oslo.no":true,"rl.no":true,"sf.no":true,"st.no":true,"svalbard.no":true,"tm.no":true,"tr.no":true,"va.no":true,"vf.no":true,"gs.aa.no":true,"gs.ah.no":true,"gs.bu.no":true,"gs.fm.no":true,"gs.hl.no":true,"gs.hm.no":true,"gs.jan-mayen.no":true,"gs.mr.no":true,"gs.nl.no":true,"gs.nt.no":true,"gs.of.no":true,"gs.ol.no":true,"gs.oslo.no":true,"gs.rl.no":true,"gs.sf.no":true,"gs.st.no":true,"gs.svalbard.no":true,"gs.tm.no":true,"gs.tr.no":true,"gs.va.no":true,"gs.vf.no":true,"akrehamn.no":true,"xn--krehamn-dxa.no":true,"algard.no":true,"xn--lgrd-poac.no":true,"arna.no":true,"brumunddal.no":true,"bryne.no":true,"bronnoysund.no":true,"xn--brnnysund-m8ac.no":true,"drobak.no":true,"xn--drbak-wua.no":true,"egersund.no":true,"fetsund.no":true,"floro.no":true,"xn--flor-jra.no":true,"fredrikstad.no":true,"hokksund.no":true,"honefoss.no":true,"xn--hnefoss-q1a.no":true,"jessheim.no":true,"jorpeland.no":true,"xn--jrpeland-54a.no":true,"kirkenes.no":true,"kopervik.no":true,"krokstadelva.no":true,"langevag.no":true,"xn--langevg-jxa.no":true,"leirvik.no":true,"mjondalen.no":true,"xn--mjndalen-64a.no":true,"mo-i-rana.no":true,"mosjoen.no":true,"xn--mosjen-eya.no":true,"nesoddtangen.no":true,"orkanger.no":true,"osoyro.no":true,"xn--osyro-wua.no":true,"raholt.no":true,"xn--rholt-mra.no":true,"sandnessjoen.no":true,"xn--sandnessjen-ogb.no":true,"skedsmokorset.no":true,"slattum.no":true,"spjelkavik.no":true,"stathelle.no":true,"stavern.no":true,"stjordalshalsen.no":true,"xn--stjrdalshalsen-sqb.no":true,"tananger.no":true,"tranby.no":true,"vossevangen.no":true,"afjord.no":true,"xn--fjord-lra.no":true,"agdenes.no":true,"al.no":true,"xn--l-1fa.no":true,"alesund.no":true,"xn--lesund-hua.no":true,"alstahaug.no":true,"alta.no":true,"xn--lt-liac.no":true,"alaheadju.no":true,"xn--laheadju-7ya.no":true,"alvdal.no":true,"amli.no":true,"xn--mli-tla.no":true,"amot.no":true,"xn--mot-tla.no":true,"andebu.no":true,"andoy.no":true,"xn--andy-ira.no":true,"andasuolo.no":true,"ardal.no":true,"xn--rdal-poa.no":true,"aremark.no":true,"arendal.no":true,"xn--s-1fa.no":true,"aseral.no":true,"xn--seral-lra.no":true,"asker.no":true,"askim.no":true,"askvoll.no":true,"askoy.no":true,"xn--asky-ira.no":true,"asnes.no":true,"xn--snes-poa.no":true,"audnedaln.no":true,"aukra.no":true,"aure.no":true,"aurland.no":true,"aurskog-holand.no":true,"xn--aurskog-hland-jnb.no":true,"austevoll.no":true,"austrheim.no":true,"averoy.no":true,"xn--avery-yua.no":true,"balestrand.no":true,"ballangen.no":true,"balat.no":true,"xn--blt-elab.no":true,"balsfjord.no":true,"bahccavuotna.no":true,"xn--bhccavuotna-k7a.no":true,"bamble.no":true,"bardu.no":true,"beardu.no":true,"beiarn.no":true,"bajddar.no":true,"xn--bjddar-pta.no":true,"baidar.no":true,"xn--bidr-5nac.no":true,"berg.no":true,"bergen.no":true,"berlevag.no":true,"xn--berlevg-jxa.no":true,"bearalvahki.no":true,"xn--bearalvhki-y4a.no":true,"bindal.no":true,"birkenes.no":true,"bjarkoy.no":true,"xn--bjarky-fya.no":true,"bjerkreim.no":true,"bjugn.no":true,"bodo.no":true,"xn--bod-2na.no":true,"badaddja.no":true,"xn--bdddj-mrabd.no":true,"budejju.no":true,"bokn.no":true,"bremanger.no":true,"bronnoy.no":true,"xn--brnny-wuac.no":true,"bygland.no":true,"bykle.no":true,"barum.no":true,"xn--brum-voa.no":true,"bo.telemark.no":true,"xn--b-5ga.telemark.no":true,"bo.nordland.no":true,"xn--b-5ga.nordland.no":true,"bievat.no":true,"xn--bievt-0qa.no":true,"bomlo.no":true,"xn--bmlo-gra.no":true,"batsfjord.no":true,"xn--btsfjord-9za.no":true,"bahcavuotna.no":true,"xn--bhcavuotna-s4a.no":true,"dovre.no":true,"drammen.no":true,"drangedal.no":true,"dyroy.no":true,"xn--dyry-ira.no":true,"donna.no":true,"xn--dnna-gra.no":true,"eid.no":true,"eidfjord.no":true,"eidsberg.no":true,"eidskog.no":true,"eidsvoll.no":true,"eigersund.no":true,"elverum.no":true,"enebakk.no":true,"engerdal.no":true,"etne.no":true,"etnedal.no":true,"evenes.no":true,"evenassi.no":true,"xn--eveni-0qa01ga.no":true,"evje-og-hornnes.no":true,"farsund.no":true,"fauske.no":true,"fuossko.no":true,"fuoisku.no":true,"fedje.no":true,"fet.no":true,"finnoy.no":true,"xn--finny-yua.no":true,"fitjar.no":true,"fjaler.no":true,"fjell.no":true,"flakstad.no":true,"flatanger.no":true,"flekkefjord.no":true,"flesberg.no":true,"flora.no":true,"fla.no":true,"xn--fl-zia.no":true,"folldal.no":true,"forsand.no":true,"fosnes.no":true,"frei.no":true,"frogn.no":true,"froland.no":true,"frosta.no":true,"frana.no":true,"xn--frna-woa.no":true,"froya.no":true,"xn--frya-hra.no":true,"fusa.no":true,"fyresdal.no":true,"forde.no":true,"xn--frde-gra.no":true,"gamvik.no":true,"gangaviika.no":true,"xn--ggaviika-8ya47h.no":true,"gaular.no":true,"gausdal.no":true,"gildeskal.no":true,"xn--gildeskl-g0a.no":true,"giske.no":true,"gjemnes.no":true,"gjerdrum.no":true,"gjerstad.no":true,"gjesdal.no":true,"gjovik.no":true,"xn--gjvik-wua.no":true,"gloppen.no":true,"gol.no":true,"gran.no":true,"grane.no":true,"granvin.no":true,"gratangen.no":true,"grimstad.no":true,"grong.no":true,"kraanghke.no":true,"xn--kranghke-b0a.no":true,"grue.no":true,"gulen.no":true,"hadsel.no":true,"halden.no":true,"halsa.no":true,"hamar.no":true,"hamaroy.no":true,"habmer.no":true,"xn--hbmer-xqa.no":true,"hapmir.no":true,"xn--hpmir-xqa.no":true,"hammerfest.no":true,"hammarfeasta.no":true,"xn--hmmrfeasta-s4ac.no":true,"haram.no":true,"hareid.no":true,"harstad.no":true,"hasvik.no":true,"aknoluokta.no":true,"xn--koluokta-7ya57h.no":true,"hattfjelldal.no":true,"aarborte.no":true,"haugesund.no":true,"hemne.no":true,"hemnes.no":true,"hemsedal.no":true,"heroy.more-og-romsdal.no":true,"xn--hery-ira.xn--mre-og-romsdal-qqb.no":true,"heroy.nordland.no":true,"xn--hery-ira.nordland.no":true,"hitra.no":true,"hjartdal.no":true,"hjelmeland.no":true,"hobol.no":true,"xn--hobl-ira.no":true,"hof.no":true,"hol.no":true,"hole.no":true,"holmestrand.no":true,"holtalen.no":true,"xn--holtlen-hxa.no":true,"hornindal.no":true,"horten.no":true,"hurdal.no":true,"hurum.no":true,"hvaler.no":true,"hyllestad.no":true,"hagebostad.no":true,"xn--hgebostad-g3a.no":true,"hoyanger.no":true,"xn--hyanger-q1a.no":true,"hoylandet.no":true,"xn--hylandet-54a.no":true,"ha.no":true,"xn--h-2fa.no":true,"ibestad.no":true,"inderoy.no":true,"xn--indery-fya.no":true,"iveland.no":true,"jevnaker.no":true,"jondal.no":true,"jolster.no":true,"xn--jlster-bya.no":true,"karasjok.no":true,"karasjohka.no":true,"xn--krjohka-hwab49j.no":true,"karlsoy.no":true,"galsa.no":true,"xn--gls-elac.no":true,"karmoy.no":true,"xn--karmy-yua.no":true,"kautokeino.no":true,"guovdageaidnu.no":true,"klepp.no":true,"klabu.no":true,"xn--klbu-woa.no":true,"kongsberg.no":true,"kongsvinger.no":true,"kragero.no":true,"xn--krager-gya.no":true,"kristiansand.no":true,"kristiansund.no":true,"krodsherad.no":true,"xn--krdsherad-m8a.no":true,"kvalsund.no":true,"rahkkeravju.no":true,"xn--rhkkervju-01af.no":true,"kvam.no":true,"kvinesdal.no":true,"kvinnherad.no":true,"kviteseid.no":true,"kvitsoy.no":true,"xn--kvitsy-fya.no":true,"kvafjord.no":true,"xn--kvfjord-nxa.no":true,"giehtavuoatna.no":true,"kvanangen.no":true,"xn--kvnangen-k0a.no":true,"navuotna.no":true,"xn--nvuotna-hwa.no":true,"kafjord.no":true,"xn--kfjord-iua.no":true,"gaivuotna.no":true,"xn--givuotna-8ya.no":true,"larvik.no":true,"lavangen.no":true,"lavagis.no":true,"loabat.no":true,"xn--loabt-0qa.no":true,"lebesby.no":true,"davvesiida.no":true,"leikanger.no":true,"leirfjord.no":true,"leka.no":true,"leksvik.no":true,"lenvik.no":true,"leangaviika.no":true,"xn--leagaviika-52b.no":true,"lesja.no":true,"levanger.no":true,"lier.no":true,"lierne.no":true,"lillehammer.no":true,"lillesand.no":true,"lindesnes.no":true,"lindas.no":true,"xn--linds-pra.no":true,"lom.no":true,"loppa.no":true,"lahppi.no":true,"xn--lhppi-xqa.no":true,"lund.no":true,"lunner.no":true,"luroy.no":true,"xn--lury-ira.no":true,"luster.no":true,"lyngdal.no":true,"lyngen.no":true,"ivgu.no":true,"lardal.no":true,"lerdal.no":true,"xn--lrdal-sra.no":true,"lodingen.no":true,"xn--ldingen-q1a.no":true,"lorenskog.no":true,"xn--lrenskog-54a.no":true,"loten.no":true,"xn--lten-gra.no":true,"malvik.no":true,"masoy.no":true,"xn--msy-ula0h.no":true,"muosat.no":true,"xn--muost-0qa.no":true,"mandal.no":true,"marker.no":true,"marnardal.no":true,"masfjorden.no":true,"meland.no":true,"meldal.no":true,"melhus.no":true,"meloy.no":true,"xn--mely-ira.no":true,"meraker.no":true,"xn--merker-kua.no":true,"moareke.no":true,"xn--moreke-jua.no":true,"midsund.no":true,"midtre-gauldal.no":true,"modalen.no":true,"modum.no":true,"molde.no":true,"moskenes.no":true,"moss.no":true,"mosvik.no":true,"malselv.no":true,"xn--mlselv-iua.no":true,"malatvuopmi.no":true,"xn--mlatvuopmi-s4a.no":true,"namdalseid.no":true,"aejrie.no":true,"namsos.no":true,"namsskogan.no":true,"naamesjevuemie.no":true,"xn--nmesjevuemie-tcba.no":true,"laakesvuemie.no":true,"nannestad.no":true,"narvik.no":true,"narviika.no":true,"naustdal.no":true,"nedre-eiker.no":true,"nes.akershus.no":true,"nes.buskerud.no":true,"nesna.no":true,"nesodden.no":true,"nesseby.no":true,"unjarga.no":true,"xn--unjrga-rta.no":true,"nesset.no":true,"nissedal.no":true,"nittedal.no":true,"nord-aurdal.no":true,"nord-fron.no":true,"nord-odal.no":true,"norddal.no":true,"nordkapp.no":true,"davvenjarga.no":true,"xn--davvenjrga-y4a.no":true,"nordre-land.no":true,"nordreisa.no":true,"raisa.no":true,"xn--risa-5na.no":true,"nore-og-uvdal.no":true,"notodden.no":true,"naroy.no":true,"xn--nry-yla5g.no":true,"notteroy.no":true,"xn--nttery-byae.no":true,"odda.no":true,"oksnes.no":true,"xn--ksnes-uua.no":true,"oppdal.no":true,"oppegard.no":true,"xn--oppegrd-ixa.no":true,"orkdal.no":true,"orland.no":true,"xn--rland-uua.no":true,"orskog.no":true,"xn--rskog-uua.no":true,"orsta.no":true,"xn--rsta-fra.no":true,"os.hedmark.no":true,"os.hordaland.no":true,"osen.no":true,"osteroy.no":true,"xn--ostery-fya.no":true,"ostre-toten.no":true,"xn--stre-toten-zcb.no":true,"overhalla.no":true,"ovre-eiker.no":true,"xn--vre-eiker-k8a.no":true,"oyer.no":true,"xn--yer-zna.no":true,"oygarden.no":true,"xn--ygarden-p1a.no":true,"oystre-slidre.no":true,"xn--ystre-slidre-ujb.no":true,"porsanger.no":true,"porsangu.no":true,"xn--porsgu-sta26f.no":true,"porsgrunn.no":true,"radoy.no":true,"xn--rady-ira.no":true,"rakkestad.no":true,"rana.no":true,"ruovat.no":true,"randaberg.no":true,"rauma.no":true,"rendalen.no":true,"rennebu.no":true,"rennesoy.no":true,"xn--rennesy-v1a.no":true,"rindal.no":true,"ringebu.no":true,"ringerike.no":true,"ringsaker.no":true,"rissa.no":true,"risor.no":true,"xn--risr-ira.no":true,"roan.no":true,"rollag.no":true,"rygge.no":true,"ralingen.no":true,"xn--rlingen-mxa.no":true,"rodoy.no":true,"xn--rdy-0nab.no":true,"romskog.no":true,"xn--rmskog-bya.no":true,"roros.no":true,"xn--rros-gra.no":true,"rost.no":true,"xn--rst-0na.no":true,"royken.no":true,"xn--ryken-vua.no":true,"royrvik.no":true,"xn--ryrvik-bya.no":true,"rade.no":true,"xn--rde-ula.no":true,"salangen.no":true,"siellak.no":true,"saltdal.no":true,"salat.no":true,"xn--slt-elab.no":true,"xn--slat-5na.no":true,"samnanger.no":true,"sande.more-og-romsdal.no":true,"sande.xn--mre-og-romsdal-qqb.no":true,"sande.vestfold.no":true,"sandefjord.no":true,"sandnes.no":true,"sandoy.no":true,"xn--sandy-yua.no":true,"sarpsborg.no":true,"sauda.no":true,"sauherad.no":true,"sel.no":true,"selbu.no":true,"selje.no":true,"seljord.no":true,"sigdal.no":true,"siljan.no":true,"sirdal.no":true,"skaun.no":true,"skedsmo.no":true,"ski.no":true,"skien.no":true,"skiptvet.no":true,"skjervoy.no":true,"xn--skjervy-v1a.no":true,"skierva.no":true,"xn--skierv-uta.no":true,"skjak.no":true,"xn--skjk-soa.no":true,"skodje.no":true,"skanland.no":true,"xn--sknland-fxa.no":true,"skanit.no":true,"xn--sknit-yqa.no":true,"smola.no":true,"xn--smla-hra.no":true,"snillfjord.no":true,"snasa.no":true,"xn--snsa-roa.no":true,"snoasa.no":true,"snaase.no":true,"xn--snase-nra.no":true,"sogndal.no":true,"sokndal.no":true,"sola.no":true,"solund.no":true,"songdalen.no":true,"sortland.no":true,"spydeberg.no":true,"stange.no":true,"stavanger.no":true,"steigen.no":true,"steinkjer.no":true,"stjordal.no":true,"xn--stjrdal-s1a.no":true,"stokke.no":true,"stor-elvdal.no":true,"stord.no":true,"stordal.no":true,"storfjord.no":true,"omasvuotna.no":true,"strand.no":true,"stranda.no":true,"stryn.no":true,"sula.no":true,"suldal.no":true,"sund.no":true,"sunndal.no":true,"surnadal.no":true,"sveio.no":true,"svelvik.no":true,"sykkylven.no":true,"sogne.no":true,"xn--sgne-gra.no":true,"somna.no":true,"xn--smna-gra.no":true,"sondre-land.no":true,"xn--sndre-land-0cb.no":true,"sor-aurdal.no":true,"xn--sr-aurdal-l8a.no":true,"sor-fron.no":true,"xn--sr-fron-q1a.no":true,"sor-odal.no":true,"xn--sr-odal-q1a.no":true,"sor-varanger.no":true,"xn--sr-varanger-ggb.no":true,"matta-varjjat.no":true,"xn--mtta-vrjjat-k7af.no":true,"sorfold.no":true,"xn--srfold-bya.no":true,"sorreisa.no":true,"xn--srreisa-q1a.no":true,"sorum.no":true,"xn--srum-gra.no":true,"tana.no":true,"deatnu.no":true,"time.no":true,"tingvoll.no":true,"tinn.no":true,"tjeldsund.no":true,"dielddanuorri.no":true,"tjome.no":true,"xn--tjme-hra.no":true,"tokke.no":true,"tolga.no":true,"torsken.no":true,"tranoy.no":true,"xn--trany-yua.no":true,"tromso.no":true,"xn--troms-zua.no":true,"tromsa.no":true,"romsa.no":true,"trondheim.no":true,"troandin.no":true,"trysil.no":true,"trana.no":true,"xn--trna-woa.no":true,"trogstad.no":true,"xn--trgstad-r1a.no":true,"tvedestrand.no":true,"tydal.no":true,"tynset.no":true,"tysfjord.no":true,"divtasvuodna.no":true,"divttasvuotna.no":true,"tysnes.no":true,"tysvar.no":true,"xn--tysvr-vra.no":true,"tonsberg.no":true,"xn--tnsberg-q1a.no":true,"ullensaker.no":true,"ullensvang.no":true,"ulvik.no":true,"utsira.no":true,"vadso.no":true,"xn--vads-jra.no":true,"cahcesuolo.no":true,"xn--hcesuolo-7ya35b.no":true,"vaksdal.no":true,"valle.no":true,"vang.no":true,"vanylven.no":true,"vardo.no":true,"xn--vard-jra.no":true,"varggat.no":true,"xn--vrggt-xqad.no":true,"vefsn.no":true,"vaapste.no":true,"vega.no":true,"vegarshei.no":true,"xn--vegrshei-c0a.no":true,"vennesla.no":true,"verdal.no":true,"verran.no":true,"vestby.no":true,"vestnes.no":true,"vestre-slidre.no":true,"vestre-toten.no":true,"vestvagoy.no":true,"xn--vestvgy-ixa6o.no":true,"vevelstad.no":true,"vik.no":true,"vikna.no":true,"vindafjord.no":true,"volda.no":true,"voss.no":true,"varoy.no":true,"xn--vry-yla5g.no":true,"vagan.no":true,"xn--vgan-qoa.no":true,"voagat.no":true,"vagsoy.no":true,"xn--vgsy-qoa0j.no":true,"vaga.no":true,"xn--vg-yiab.no":true,"valer.ostfold.no":true,"xn--vler-qoa.xn--stfold-9xa.no":true,"valer.hedmark.no":true,"xn--vler-qoa.hedmark.no":true,"*.np":true,"nr":true,"biz.nr":true,"info.nr":true,"gov.nr":true,"edu.nr":true,"org.nr":true,"net.nr":true,"com.nr":true,"nu":true,"nz":true,"ac.nz":true,"co.nz":true,"cri.nz":true,"geek.nz":true,"gen.nz":true,"govt.nz":true,"health.nz":true,"iwi.nz":true,"kiwi.nz":true,"maori.nz":true,"mil.nz":true,"xn--mori-qsa.nz":true,"net.nz":true,"org.nz":true,"parliament.nz":true,"school.nz":true,"om":true,"co.om":true,"com.om":true,"edu.om":true,"gov.om":true,"med.om":true,"museum.om":true,"net.om":true,"org.om":true,"pro.om":true,"org":true,"pa":true,"ac.pa":true,"gob.pa":true,"com.pa":true,"org.pa":true,"sld.pa":true,"edu.pa":true,"net.pa":true,"ing.pa":true,"abo.pa":true,"med.pa":true,"nom.pa":true,"pe":true,"edu.pe":true,"gob.pe":true,"nom.pe":true,"mil.pe":true,"org.pe":true,"com.pe":true,"net.pe":true,"pf":true,"com.pf":true,"org.pf":true,"edu.pf":true,"*.pg":true,"ph":true,"com.ph":true,"net.ph":true,"org.ph":true,"gov.ph":true,"edu.ph":true,"ngo.ph":true,"mil.ph":true,"i.ph":true,"pk":true,"com.pk":true,"net.pk":true,"edu.pk":true,"org.pk":true,"fam.pk":true,"biz.pk":true,"web.pk":true,"gov.pk":true,"gob.pk":true,"gok.pk":true,"gon.pk":true,"gop.pk":true,"gos.pk":true,"info.pk":true,"pl":true,"com.pl":true,"net.pl":true,"org.pl":true,"aid.pl":true,"agro.pl":true,"atm.pl":true,"auto.pl":true,"biz.pl":true,"edu.pl":true,"gmina.pl":true,"gsm.pl":true,"info.pl":true,"mail.pl":true,"miasta.pl":true,"media.pl":true,"mil.pl":true,"nieruchomosci.pl":true,"nom.pl":true,"pc.pl":true,"powiat.pl":true,"priv.pl":true,"realestate.pl":true,"rel.pl":true,"sex.pl":true,"shop.pl":true,"sklep.pl":true,"sos.pl":true,"szkola.pl":true,"targi.pl":true,"tm.pl":true,"tourism.pl":true,"travel.pl":true,"turystyka.pl":true,"gov.pl":true,"ap.gov.pl":true,"ic.gov.pl":true,"is.gov.pl":true,"us.gov.pl":true,"kmpsp.gov.pl":true,"kppsp.gov.pl":true,"kwpsp.gov.pl":true,"psp.gov.pl":true,"wskr.gov.pl":true,"kwp.gov.pl":true,"mw.gov.pl":true,"ug.gov.pl":true,"um.gov.pl":true,"umig.gov.pl":true,"ugim.gov.pl":true,"upow.gov.pl":true,"uw.gov.pl":true,"starostwo.gov.pl":true,"pa.gov.pl":true,"po.gov.pl":true,"psse.gov.pl":true,"pup.gov.pl":true,"rzgw.gov.pl":true,"sa.gov.pl":true,"so.gov.pl":true,"sr.gov.pl":true,"wsa.gov.pl":true,"sko.gov.pl":true,"uzs.gov.pl":true,"wiih.gov.pl":true,"winb.gov.pl":true,"pinb.gov.pl":true,"wios.gov.pl":true,"witd.gov.pl":true,"wzmiuw.gov.pl":true,"piw.gov.pl":true,"wiw.gov.pl":true,"griw.gov.pl":true,"wif.gov.pl":true,"oum.gov.pl":true,"sdn.gov.pl":true,"zp.gov.pl":true,"uppo.gov.pl":true,"mup.gov.pl":true,"wuoz.gov.pl":true,"konsulat.gov.pl":true,"oirm.gov.pl":true,"augustow.pl":true,"babia-gora.pl":true,"bedzin.pl":true,"beskidy.pl":true,"bialowieza.pl":true,"bialystok.pl":true,"bielawa.pl":true,"bieszczady.pl":true,"boleslawiec.pl":true,"bydgoszcz.pl":true,"bytom.pl":true,"cieszyn.pl":true,"czeladz.pl":true,"czest.pl":true,"dlugoleka.pl":true,"elblag.pl":true,"elk.pl":true,"glogow.pl":true,"gniezno.pl":true,"gorlice.pl":true,"grajewo.pl":true,"ilawa.pl":true,"jaworzno.pl":true,"jelenia-gora.pl":true,"jgora.pl":true,"kalisz.pl":true,"kazimierz-dolny.pl":true,"karpacz.pl":true,"kartuzy.pl":true,"kaszuby.pl":true,"katowice.pl":true,"kepno.pl":true,"ketrzyn.pl":true,"klodzko.pl":true,"kobierzyce.pl":true,"kolobrzeg.pl":true,"konin.pl":true,"konskowola.pl":true,"kutno.pl":true,"lapy.pl":true,"lebork.pl":true,"legnica.pl":true,"lezajsk.pl":true,"limanowa.pl":true,"lomza.pl":true,"lowicz.pl":true,"lubin.pl":true,"lukow.pl":true,"malbork.pl":true,"malopolska.pl":true,"mazowsze.pl":true,"mazury.pl":true,"mielec.pl":true,"mielno.pl":true,"mragowo.pl":true,"naklo.pl":true,"nowaruda.pl":true,"nysa.pl":true,"olawa.pl":true,"olecko.pl":true,"olkusz.pl":true,"olsztyn.pl":true,"opoczno.pl":true,"opole.pl":true,"ostroda.pl":true,"ostroleka.pl":true,"ostrowiec.pl":true,"ostrowwlkp.pl":true,"pila.pl":true,"pisz.pl":true,"podhale.pl":true,"podlasie.pl":true,"polkowice.pl":true,"pomorze.pl":true,"pomorskie.pl":true,"prochowice.pl":true,"pruszkow.pl":true,"przeworsk.pl":true,"pulawy.pl":true,"radom.pl":true,"rawa-maz.pl":true,"rybnik.pl":true,"rzeszow.pl":true,"sanok.pl":true,"sejny.pl":true,"slask.pl":true,"slupsk.pl":true,"sosnowiec.pl":true,"stalowa-wola.pl":true,"skoczow.pl":true,"starachowice.pl":true,"stargard.pl":true,"suwalki.pl":true,"swidnica.pl":true,"swiebodzin.pl":true,"swinoujscie.pl":true,"szczecin.pl":true,"szczytno.pl":true,"tarnobrzeg.pl":true,"tgory.pl":true,"turek.pl":true,"tychy.pl":true,"ustka.pl":true,"walbrzych.pl":true,"warmia.pl":true,"warszawa.pl":true,"waw.pl":true,"wegrow.pl":true,"wielun.pl":true,"wlocl.pl":true,"wloclawek.pl":true,"wodzislaw.pl":true,"wolomin.pl":true,"wroclaw.pl":true,"zachpomor.pl":true,"zagan.pl":true,"zarow.pl":true,"zgora.pl":true,"zgorzelec.pl":true,"pm":true,"pn":true,"gov.pn":true,"co.pn":true,"org.pn":true,"edu.pn":true,"net.pn":true,"post":true,"pr":true,"com.pr":true,"net.pr":true,"org.pr":true,"gov.pr":true,"edu.pr":true,"isla.pr":true,"pro.pr":true,"biz.pr":true,"info.pr":true,"name.pr":true,"est.pr":true,"prof.pr":true,"ac.pr":true,"pro":true,"aca.pro":true,"bar.pro":true,"cpa.pro":true,"jur.pro":true,"law.pro":true,"med.pro":true,"eng.pro":true,"ps":true,"edu.ps":true,"gov.ps":true,"sec.ps":true,"plo.ps":true,"com.ps":true,"org.ps":true,"net.ps":true,"pt":true,"net.pt":true,"gov.pt":true,"org.pt":true,"edu.pt":true,"int.pt":true,"publ.pt":true,"com.pt":true,"nome.pt":true,"pw":true,"co.pw":true,"ne.pw":true,"or.pw":true,"ed.pw":true,"go.pw":true,"belau.pw":true,"py":true,"com.py":true,"coop.py":true,"edu.py":true,"gov.py":true,"mil.py":true,"net.py":true,"org.py":true,"qa":true,"com.qa":true,"edu.qa":true,"gov.qa":true,"mil.qa":true,"name.qa":true,"net.qa":true,"org.qa":true,"sch.qa":true,"re":true,"com.re":true,"asso.re":true,"nom.re":true,"ro":true,"com.ro":true,"org.ro":true,"tm.ro":true,"nt.ro":true,"nom.ro":true,"info.ro":true,"rec.ro":true,"arts.ro":true,"firm.ro":true,"store.ro":true,"www.ro":true,"rs":true,"co.rs":true,"org.rs":true,"edu.rs":true,"ac.rs":true,"gov.rs":true,"in.rs":true,"ru":true,"ac.ru":true,"com.ru":true,"edu.ru":true,"int.ru":true,"net.ru":true,"org.ru":true,"pp.ru":true,"adygeya.ru":true,"altai.ru":true,"amur.ru":true,"arkhangelsk.ru":true,"astrakhan.ru":true,"bashkiria.ru":true,"belgorod.ru":true,"bir.ru":true,"bryansk.ru":true,"buryatia.ru":true,"cbg.ru":true,"chel.ru":true,"chelyabinsk.ru":true,"chita.ru":true,"chukotka.ru":true,"chuvashia.ru":true,"dagestan.ru":true,"dudinka.ru":true,"e-burg.ru":true,"grozny.ru":true,"irkutsk.ru":true,"ivanovo.ru":true,"izhevsk.ru":true,"jar.ru":true,"joshkar-ola.ru":true,"kalmykia.ru":true,"kaluga.ru":true,"kamchatka.ru":true,"karelia.ru":true,"kazan.ru":true,"kchr.ru":true,"kemerovo.ru":true,"khabarovsk.ru":true,"khakassia.ru":true,"khv.ru":true,"kirov.ru":true,"koenig.ru":true,"komi.ru":true,"kostroma.ru":true,"krasnoyarsk.ru":true,"kuban.ru":true,"kurgan.ru":true,"kursk.ru":true,"lipetsk.ru":true,"magadan.ru":true,"mari.ru":true,"mari-el.ru":true,"marine.ru":true,"mordovia.ru":true,"msk.ru":true,"murmansk.ru":true,"nalchik.ru":true,"nnov.ru":true,"nov.ru":true,"novosibirsk.ru":true,"nsk.ru":true,"omsk.ru":true,"orenburg.ru":true,"oryol.ru":true,"palana.ru":true,"penza.ru":true,"perm.ru":true,"ptz.ru":true,"rnd.ru":true,"ryazan.ru":true,"sakhalin.ru":true,"samara.ru":true,"saratov.ru":true,"simbirsk.ru":true,"smolensk.ru":true,"spb.ru":true,"stavropol.ru":true,"stv.ru":true,"surgut.ru":true,"tambov.ru":true,"tatarstan.ru":true,"tom.ru":true,"tomsk.ru":true,"tsaritsyn.ru":true,"tsk.ru":true,"tula.ru":true,"tuva.ru":true,"tver.ru":true,"tyumen.ru":true,"udm.ru":true,"udmurtia.ru":true,"ulan-ude.ru":true,"vladikavkaz.ru":true,"vladimir.ru":true,"vladivostok.ru":true,"volgograd.ru":true,"vologda.ru":true,"voronezh.ru":true,"vrn.ru":true,"vyatka.ru":true,"yakutia.ru":true,"yamal.ru":true,"yaroslavl.ru":true,"yekaterinburg.ru":true,"yuzhno-sakhalinsk.ru":true,"amursk.ru":true,"baikal.ru":true,"cmw.ru":true,"fareast.ru":true,"jamal.ru":true,"kms.ru":true,"k-uralsk.ru":true,"kustanai.ru":true,"kuzbass.ru":true,"magnitka.ru":true,"mytis.ru":true,"nakhodka.ru":true,"nkz.ru":true,"norilsk.ru":true,"oskol.ru":true,"pyatigorsk.ru":true,"rubtsovsk.ru":true,"snz.ru":true,"syzran.ru":true,"vdonsk.ru":true,"zgrad.ru":true,"gov.ru":true,"mil.ru":true,"test.ru":true,"rw":true,"gov.rw":true,"net.rw":true,"edu.rw":true,"ac.rw":true,"com.rw":true,"co.rw":true,"int.rw":true,"mil.rw":true,"gouv.rw":true,"sa":true,"com.sa":true,"net.sa":true,"org.sa":true,"gov.sa":true,"med.sa":true,"pub.sa":true,"edu.sa":true,"sch.sa":true,"sb":true,"com.sb":true,"edu.sb":true,"gov.sb":true,"net.sb":true,"org.sb":true,"sc":true,"com.sc":true,"gov.sc":true,"net.sc":true,"org.sc":true,"edu.sc":true,"sd":true,"com.sd":true,"net.sd":true,"org.sd":true,"edu.sd":true,"med.sd":true,"tv.sd":true,"gov.sd":true,"info.sd":true,"se":true,"a.se":true,"ac.se":true,"b.se":true,"bd.se":true,"brand.se":true,"c.se":true,"d.se":true,"e.se":true,"f.se":true,"fh.se":true,"fhsk.se":true,"fhv.se":true,"g.se":true,"h.se":true,"i.se":true,"k.se":true,"komforb.se":true,"kommunalforbund.se":true,"komvux.se":true,"l.se":true,"lanbib.se":true,"m.se":true,"n.se":true,"naturbruksgymn.se":true,"o.se":true,"org.se":true,"p.se":true,"parti.se":true,"pp.se":true,"press.se":true,"r.se":true,"s.se":true,"t.se":true,"tm.se":true,"u.se":true,"w.se":true,"x.se":true,"y.se":true,"z.se":true,"sg":true,"com.sg":true,"net.sg":true,"org.sg":true,"gov.sg":true,"edu.sg":true,"per.sg":true,"sh":true,"com.sh":true,"net.sh":true,"gov.sh":true,"org.sh":true,"mil.sh":true,"si":true,"sj":true,"sk":true,"sl":true,"com.sl":true,"net.sl":true,"edu.sl":true,"gov.sl":true,"org.sl":true,"sm":true,"sn":true,"art.sn":true,"com.sn":true,"edu.sn":true,"gouv.sn":true,"org.sn":true,"perso.sn":true,"univ.sn":true,"so":true,"com.so":true,"net.so":true,"org.so":true,"sr":true,"st":true,"co.st":true,"com.st":true,"consulado.st":true,"edu.st":true,"embaixada.st":true,"gov.st":true,"mil.st":true,"net.st":true,"org.st":true,"principe.st":true,"saotome.st":true,"store.st":true,"su":true,"adygeya.su":true,"arkhangelsk.su":true,"balashov.su":true,"bashkiria.su":true,"bryansk.su":true,"dagestan.su":true,"grozny.su":true,"ivanovo.su":true,"kalmykia.su":true,"kaluga.su":true,"karelia.su":true,"khakassia.su":true,"krasnodar.su":true,"kurgan.su":true,"lenug.su":true,"mordovia.su":true,"msk.su":true,"murmansk.su":true,"nalchik.su":true,"nov.su":true,"obninsk.su":true,"penza.su":true,"pokrovsk.su":true,"sochi.su":true,"spb.su":true,"togliatti.su":true,"troitsk.su":true,"tula.su":true,"tuva.su":true,"vladikavkaz.su":true,"vladimir.su":true,"vologda.su":true,"sv":true,"com.sv":true,"edu.sv":true,"gob.sv":true,"org.sv":true,"red.sv":true,"sx":true,"gov.sx":true,"sy":true,"edu.sy":true,"gov.sy":true,"net.sy":true,"mil.sy":true,"com.sy":true,"org.sy":true,"sz":true,"co.sz":true,"ac.sz":true,"org.sz":true,"tc":true,"td":true,"tel":true,"tf":true,"tg":true,"th":true,"ac.th":true,"co.th":true,"go.th":true,"in.th":true,"mi.th":true,"net.th":true,"or.th":true,"tj":true,"ac.tj":true,"biz.tj":true,"co.tj":true,"com.tj":true,"edu.tj":true,"go.tj":true,"gov.tj":true,"int.tj":true,"mil.tj":true,"name.tj":true,"net.tj":true,"nic.tj":true,"org.tj":true,"test.tj":true,"web.tj":true,"tk":true,"tl":true,"gov.tl":true,"tm":true,"com.tm":true,"co.tm":true,"org.tm":true,"net.tm":true,"nom.tm":true,"gov.tm":true,"mil.tm":true,"edu.tm":true,"tn":true,"com.tn":true,"ens.tn":true,"fin.tn":true,"gov.tn":true,"ind.tn":true,"intl.tn":true,"nat.tn":true,"net.tn":true,"org.tn":true,"info.tn":true,"perso.tn":true,"tourism.tn":true,"edunet.tn":true,"rnrt.tn":true,"rns.tn":true,"rnu.tn":true,"mincom.tn":true,"agrinet.tn":true,"defense.tn":true,"turen.tn":true,"to":true,"com.to":true,"gov.to":true,"net.to":true,"org.to":true,"edu.to":true,"mil.to":true,"tp":true,"tr":true,"com.tr":true,"info.tr":true,"biz.tr":true,"net.tr":true,"org.tr":true,"web.tr":true,"gen.tr":true,"tv.tr":true,"av.tr":true,"dr.tr":true,"bbs.tr":true,"name.tr":true,"tel.tr":true,"gov.tr":true,"bel.tr":true,"pol.tr":true,"mil.tr":true,"k12.tr":true,"edu.tr":true,"kep.tr":true,"nc.tr":true,"gov.nc.tr":true,"travel":true,"tt":true,"co.tt":true,"com.tt":true,"org.tt":true,"net.tt":true,"biz.tt":true,"info.tt":true,"pro.tt":true,"int.tt":true,"coop.tt":true,"jobs.tt":true,"mobi.tt":true,"travel.tt":true,"museum.tt":true,"aero.tt":true,"name.tt":true,"gov.tt":true,"edu.tt":true,"tv":true,"tw":true,"edu.tw":true,"gov.tw":true,"mil.tw":true,"com.tw":true,"net.tw":true,"org.tw":true,"idv.tw":true,"game.tw":true,"ebiz.tw":true,"club.tw":true,"xn--zf0ao64a.tw":true,"xn--uc0atv.tw":true,"xn--czrw28b.tw":true,"tz":true,"ac.tz":true,"co.tz":true,"go.tz":true,"hotel.tz":true,"info.tz":true,"me.tz":true,"mil.tz":true,"mobi.tz":true,"ne.tz":true,"or.tz":true,"sc.tz":true,"tv.tz":true,"ua":true,"com.ua":true,"edu.ua":true,"gov.ua":true,"in.ua":true,"net.ua":true,"org.ua":true,"cherkassy.ua":true,"cherkasy.ua":true,"chernigov.ua":true,"chernihiv.ua":true,"chernivtsi.ua":true,"chernovtsy.ua":true,"ck.ua":true,"cn.ua":true,"cr.ua":true,"crimea.ua":true,"cv.ua":true,"dn.ua":true,"dnepropetrovsk.ua":true,"dnipropetrovsk.ua":true,"dominic.ua":true,"donetsk.ua":true,"dp.ua":true,"if.ua":true,"ivano-frankivsk.ua":true,"kh.ua":true,"kharkiv.ua":true,"kharkov.ua":true,"kherson.ua":true,"khmelnitskiy.ua":true,"khmelnytskyi.ua":true,"kiev.ua":true,"kirovograd.ua":true,"km.ua":true,"kr.ua":true,"krym.ua":true,"ks.ua":true,"kv.ua":true,"kyiv.ua":true,"lg.ua":true,"lt.ua":true,"lugansk.ua":true,"lutsk.ua":true,"lv.ua":true,"lviv.ua":true,"mk.ua":true,"mykolaiv.ua":true,"nikolaev.ua":true,"od.ua":true,"odesa.ua":true,"odessa.ua":true,"pl.ua":true,"poltava.ua":true,"rivne.ua":true,"rovno.ua":true,"rv.ua":true,"sb.ua":true,"sebastopol.ua":true,"sevastopol.ua":true,"sm.ua":true,"sumy.ua":true,"te.ua":true,"ternopil.ua":true,"uz.ua":true,"uzhgorod.ua":true,"vinnica.ua":true,"vinnytsia.ua":true,"vn.ua":true,"volyn.ua":true,"yalta.ua":true,"zaporizhzhe.ua":true,"zaporizhzhia.ua":true,"zhitomir.ua":true,"zhytomyr.ua":true,"zp.ua":true,"zt.ua":true,"ug":true,"co.ug":true,"or.ug":true,"ac.ug":true,"sc.ug":true,"go.ug":true,"ne.ug":true,"com.ug":true,"org.ug":true,"uk":true,"ac.uk":true,"co.uk":true,"gov.uk":true,"ltd.uk":true,"me.uk":true,"net.uk":true,"nhs.uk":true,"org.uk":true,"plc.uk":true,"police.uk":true,"*.sch.uk":true,"us":true,"dni.us":true,"fed.us":true,"isa.us":true,"kids.us":true,"nsn.us":true,"ak.us":true,"al.us":true,"ar.us":true,"as.us":true,"az.us":true,"ca.us":true,"co.us":true,"ct.us":true,"dc.us":true,"de.us":true,"fl.us":true,"ga.us":true,"gu.us":true,"hi.us":true,"ia.us":true,"id.us":true,"il.us":true,"in.us":true,"ks.us":true,"ky.us":true,"la.us":true,"ma.us":true,"md.us":true,"me.us":true,"mi.us":true,"mn.us":true,"mo.us":true,"ms.us":true,"mt.us":true,"nc.us":true,"nd.us":true,"ne.us":true,"nh.us":true,"nj.us":true,"nm.us":true,"nv.us":true,"ny.us":true,"oh.us":true,"ok.us":true,"or.us":true,"pa.us":true,"pr.us":true,"ri.us":true,"sc.us":true,"sd.us":true,"tn.us":true,"tx.us":true,"ut.us":true,"vi.us":true,"vt.us":true,"va.us":true,"wa.us":true,"wi.us":true,"wv.us":true,"wy.us":true,"k12.ak.us":true,"k12.al.us":true,"k12.ar.us":true,"k12.as.us":true,"k12.az.us":true,"k12.ca.us":true,"k12.co.us":true,"k12.ct.us":true,"k12.dc.us":true,"k12.de.us":true,"k12.fl.us":true,"k12.ga.us":true,"k12.gu.us":true,"k12.ia.us":true,"k12.id.us":true,"k12.il.us":true,"k12.in.us":true,"k12.ks.us":true,"k12.ky.us":true,"k12.la.us":true,"k12.ma.us":true,"k12.md.us":true,"k12.me.us":true,"k12.mi.us":true,"k12.mn.us":true,"k12.mo.us":true,"k12.ms.us":true,"k12.mt.us":true,"k12.nc.us":true,"k12.ne.us":true,"k12.nh.us":true,"k12.nj.us":true,"k12.nm.us":true,"k12.nv.us":true,"k12.ny.us":true,"k12.oh.us":true,"k12.ok.us":true,"k12.or.us":true,"k12.pa.us":true,"k12.pr.us":true,"k12.ri.us":true,"k12.sc.us":true,"k12.tn.us":true,"k12.tx.us":true,"k12.ut.us":true,"k12.vi.us":true,"k12.vt.us":true,"k12.va.us":true,"k12.wa.us":true,"k12.wi.us":true,"k12.wy.us":true,"cc.ak.us":true,"cc.al.us":true,"cc.ar.us":true,"cc.as.us":true,"cc.az.us":true,"cc.ca.us":true,"cc.co.us":true,"cc.ct.us":true,"cc.dc.us":true,"cc.de.us":true,"cc.fl.us":true,"cc.ga.us":true,"cc.gu.us":true,"cc.hi.us":true,"cc.ia.us":true,"cc.id.us":true,"cc.il.us":true,"cc.in.us":true,"cc.ks.us":true,"cc.ky.us":true,"cc.la.us":true,"cc.ma.us":true,"cc.md.us":true,"cc.me.us":true,"cc.mi.us":true,"cc.mn.us":true,"cc.mo.us":true,"cc.ms.us":true,"cc.mt.us":true,"cc.nc.us":true,"cc.nd.us":true,"cc.ne.us":true,"cc.nh.us":true,"cc.nj.us":true,"cc.nm.us":true,"cc.nv.us":true,"cc.ny.us":true,"cc.oh.us":true,"cc.ok.us":true,"cc.or.us":true,"cc.pa.us":true,"cc.pr.us":true,"cc.ri.us":true,"cc.sc.us":true,"cc.sd.us":true,"cc.tn.us":true,"cc.tx.us":true,"cc.ut.us":true,"cc.vi.us":true,"cc.vt.us":true,"cc.va.us":true,"cc.wa.us":true,"cc.wi.us":true,"cc.wv.us":true,"cc.wy.us":true,"lib.ak.us":true,"lib.al.us":true,"lib.ar.us":true,"lib.as.us":true,"lib.az.us":true,"lib.ca.us":true,"lib.co.us":true,"lib.ct.us":true,"lib.dc.us":true,"lib.de.us":true,"lib.fl.us":true,"lib.ga.us":true,"lib.gu.us":true,"lib.hi.us":true,"lib.ia.us":true,"lib.id.us":true,"lib.il.us":true,"lib.in.us":true,"lib.ks.us":true,"lib.ky.us":true,"lib.la.us":true,"lib.ma.us":true,"lib.md.us":true,"lib.me.us":true,"lib.mi.us":true,"lib.mn.us":true,"lib.mo.us":true,"lib.ms.us":true,"lib.mt.us":true,"lib.nc.us":true,"lib.nd.us":true,"lib.ne.us":true,"lib.nh.us":true,"lib.nj.us":true,"lib.nm.us":true,"lib.nv.us":true,"lib.ny.us":true,"lib.oh.us":true,"lib.ok.us":true,"lib.or.us":true,"lib.pa.us":true,"lib.pr.us":true,"lib.ri.us":true,"lib.sc.us":true,"lib.sd.us":true,"lib.tn.us":true,"lib.tx.us":true,"lib.ut.us":true,"lib.vi.us":true,"lib.vt.us":true,"lib.va.us":true,"lib.wa.us":true,"lib.wi.us":true,"lib.wy.us":true,"pvt.k12.ma.us":true,"chtr.k12.ma.us":true,"paroch.k12.ma.us":true,"uy":true,"com.uy":true,"edu.uy":true,"gub.uy":true,"mil.uy":true,"net.uy":true,"org.uy":true,"uz":true,"co.uz":true,"com.uz":true,"net.uz":true,"org.uz":true,"va":true,"vc":true,"com.vc":true,"net.vc":true,"org.vc":true,"gov.vc":true,"mil.vc":true,"edu.vc":true,"ve":true,"arts.ve":true,"co.ve":true,"com.ve":true,"e12.ve":true,"edu.ve":true,"firm.ve":true,"gob.ve":true,"gov.ve":true,"info.ve":true,"int.ve":true,"mil.ve":true,"net.ve":true,"org.ve":true,"rec.ve":true,"store.ve":true,"tec.ve":true,"web.ve":true,"vg":true,"vi":true,"co.vi":true,"com.vi":true,"k12.vi":true,"net.vi":true,"org.vi":true,"vn":true,"com.vn":true,"net.vn":true,"org.vn":true,"edu.vn":true,"gov.vn":true,"int.vn":true,"ac.vn":true,"biz.vn":true,"info.vn":true,"name.vn":true,"pro.vn":true,"health.vn":true,"vu":true,"com.vu":true,"edu.vu":true,"net.vu":true,"org.vu":true,"wf":true,"ws":true,"com.ws":true,"net.ws":true,"org.ws":true,"gov.ws":true,"edu.ws":true,"yt":true,"xn--mgbaam7a8h":true,"xn--y9a3aq":true,"xn--54b7fta0cc":true,"xn--90ais":true,"xn--fiqs8s":true,"xn--fiqz9s":true,"xn--lgbbat1ad8j":true,"xn--wgbh1c":true,"xn--node":true,"xn--qxam":true,"xn--j6w193g":true,"xn--h2brj9c":true,"xn--mgbbh1a71e":true,"xn--fpcrj9c3d":true,"xn--gecrj9c":true,"xn--s9brj9c":true,"xn--45brj9c":true,"xn--xkc2dl3a5ee0h":true,"xn--mgba3a4f16a":true,"xn--mgba3a4fra":true,"xn--mgbtx2b":true,"xn--mgbayh7gpa":true,"xn--3e0b707e":true,"xn--80ao21a":true,"xn--fzc2c9e2c":true,"xn--xkc2al3hye2a":true,"xn--mgbc0a9azcg":true,"xn--d1alf":true,"xn--l1acc":true,"xn--mix891f":true,"xn--mix082f":true,"xn--mgbx4cd0ab":true,"xn--mgb9awbf":true,"xn--mgbai9azgqp6j":true,"xn--mgbai9a5eva00b":true,"xn--ygbi2ammx":true,"xn--90a3ac":true,"xn--o1ac.xn--90a3ac":true,"xn--c1avg.xn--90a3ac":true,"xn--90azh.xn--90a3ac":true,"xn--d1at.xn--90a3ac":true,"xn--o1ach.xn--90a3ac":true,"xn--80au.xn--90a3ac":true,"xn--p1ai":true,"xn--wgbl6a":true,"xn--mgberp4a5d4ar":true,"xn--mgberp4a5d4a87g":true,"xn--mgbqly7c0a67fbc":true,"xn--mgbqly7cvafr":true,"xn--mgbpl2fh":true,"xn--yfro4i67o":true,"xn--clchc0ea0b2g2a9gcd":true,"xn--ogbpf8fl":true,"xn--mgbtf8fl":true,"xn--o3cw4h":true,"xn--pgbs0dh":true,"xn--kpry57d":true,"xn--kprw13d":true,"xn--nnx388a":true,"xn--j1amh":true,"xn--mgb2ddes":true,"xxx":true,"*.ye":true,"ac.za":true,"agrica.za":true,"alt.za":true,"co.za":true,"edu.za":true,"gov.za":true,"grondar.za":true,"law.za":true,"mil.za":true,"net.za":true,"ngo.za":true,"nis.za":true,"nom.za":true,"org.za":true,"school.za":true,"tm.za":true,"web.za":true,"*.zm":true,"*.zw":true,"aaa":true,"aarp":true,"abarth":true,"abb":true,"abbott":true,"abbvie":true,"abc":true,"able":true,"abogado":true,"abudhabi":true,"academy":true,"accenture":true,"accountant":true,"accountants":true,"aco":true,"active":true,"actor":true,"adac":true,"ads":true,"adult":true,"aeg":true,"aetna":true,"afamilycompany":true,"afl":true,"africa":true,"africamagic":true,"agakhan":true,"agency":true,"aig":true,"aigo":true,"airbus":true,"airforce":true,"airtel":true,"akdn":true,"alfaromeo":true,"alibaba":true,"alipay":true,"allfinanz":true,"allstate":true,"ally":true,"alsace":true,"alstom":true,"americanexpress":true,"americanfamily":true,"amex":true,"amfam":true,"amica":true,"amsterdam":true,"analytics":true,"android":true,"anquan":true,"anz":true,"aol":true,"apartments":true,"app":true,"apple":true,"aquarelle":true,"aramco":true,"archi":true,"army":true,"arte":true,"asda":true,"associates":true,"athleta":true,"attorney":true,"auction":true,"audi":true,"audible":true,"audio":true,"auspost":true,"author":true,"auto":true,"autos":true,"avianca":true,"aws":true,"axa":true,"azure":true,"baby":true,"baidu":true,"banamex":true,"bananarepublic":true,"band":true,"bank":true,"bar":true,"barcelona":true,"barclaycard":true,"barclays":true,"barefoot":true,"bargains":true,"basketball":true,"bauhaus":true,"bayern":true,"bbc":true,"bbt":true,"bbva":true,"bcg":true,"bcn":true,"beats":true,"beer":true,"bentley":true,"berlin":true,"best":true,"bestbuy":true,"bet":true,"bharti":true,"bible":true,"bid":true,"bike":true,"bing":true,"bingo":true,"bio":true,"black":true,"blackfriday":true,"blanco":true,"blockbuster":true,"blog":true,"bloomberg":true,"blue":true,"bms":true,"bmw":true,"bnl":true,"bnpparibas":true,"boats":true,"boehringer":true,"bofa":true,"bom":true,"bond":true,"boo":true,"book":true,"booking":true,"boots":true,"bosch":true,"bostik":true,"bot":true,"boutique":true,"bradesco":true,"bridgestone":true,"broadway":true,"broker":true,"brother":true,"brussels":true,"budapest":true,"bugatti":true,"build":true,"builders":true,"business":true,"buy":true,"buzz":true,"bzh":true,"cab":true,"cafe":true,"cal":true,"call":true,"calvinklein":true,"camera":true,"camp":true,"cancerresearch":true,"canon":true,"capetown":true,"capital":true,"capitalone":true,"car":true,"caravan":true,"cards":true,"care":true,"career":true,"careers":true,"cars":true,"cartier":true,"casa":true,"case":true,"caseih":true,"cash":true,"casino":true,"catering":true,"cba":true,"cbn":true,"cbre":true,"cbs":true,"ceb":true,"center":true,"ceo":true,"cern":true,"cfa":true,"cfd":true,"chanel":true,"channel":true,"chase":true,"chat":true,"cheap":true,"chintai":true,"chloe":true,"christmas":true,"chrome":true,"chrysler":true,"church":true,"cipriani":true,"circle":true,"cisco":true,"citadel":true,"citi":true,"citic":true,"city":true,"cityeats":true,"claims":true,"cleaning":true,"click":true,"clinic":true,"clothing":true,"cloud":true,"club":true,"clubmed":true,"coach":true,"codes":true,"coffee":true,"college":true,"cologne":true,"comcast":true,"commbank":true,"community":true,"company":true,"computer":true,"comsec":true,"condos":true,"construction":true,"consulting":true,"contact":true,"contractors":true,"cooking":true,"cookingchannel":true,"cool":true,"corsica":true,"country":true,"coupon":true,"coupons":true,"courses":true,"credit":true,"creditcard":true,"creditunion":true,"cricket":true,"crown":true,"crs":true,"cruises":true,"csc":true,"cuisinella":true,"cymru":true,"cyou":true,"dabur":true,"dad":true,"dance":true,"date":true,"dating":true,"datsun":true,"day":true,"dclk":true,"dds":true,"deal":true,"dealer":true,"deals":true,"degree":true,"delivery":true,"dell":true,"deloitte":true,"delta":true,"democrat":true,"dental":true,"dentist":true,"desi":true,"design":true,"dev":true,"dhl":true,"diamonds":true,"diet":true,"digital":true,"direct":true,"directory":true,"discount":true,"discover":true,"dish":true,"dnp":true,"docs":true,"dodge":true,"dog":true,"doha":true,"domains":true,"doosan":true,"dot":true,"download":true,"drive":true,"dstv":true,"dtv":true,"dubai":true,"duck":true,"dunlop":true,"duns":true,"dupont":true,"durban":true,"dvag":true,"dwg":true,"earth":true,"eat":true,"edeka":true,"education":true,"email":true,"emerck":true,"emerson":true,"energy":true,"engineer":true,"engineering":true,"enterprises":true,"epost":true,"epson":true,"equipment":true,"ericsson":true,"erni":true,"esq":true,"estate":true,"esurance":true,"etisalat":true,"eurovision":true,"eus":true,"events":true,"everbank":true,"exchange":true,"expert":true,"exposed":true,"express":true,"extraspace":true,"fage":true,"fail":true,"fairwinds":true,"faith":true,"family":true,"fan":true,"fans":true,"farm":true,"farmers":true,"fashion":true,"fast":true,"fedex":true,"feedback":true,"ferrari":true,"ferrero":true,"fiat":true,"fidelity":true,"fido":true,"film":true,"final":true,"finance":true,"financial":true,"fire":true,"firestone":true,"firmdale":true,"fish":true,"fishing":true,"fit":true,"fitness":true,"flickr":true,"flights":true,"flir":true,"florist":true,"flowers":true,"flsmidth":true,"fly":true,"foo":true,"foodnetwork":true,"football":true,"ford":true,"forex":true,"forsale":true,"forum":true,"foundation":true,"fox":true,"fresenius":true,"frl":true,"frogans":true,"frontdoor":true,"frontier":true,"ftr":true,"fujitsu":true,"fujixerox":true,"fund":true,"furniture":true,"futbol":true,"fyi":true,"gal":true,"gallery":true,"gallo":true,"gallup":true,"game":true,"games":true,"gap":true,"garden":true,"gbiz":true,"gdn":true,"gea":true,"gent":true,"genting":true,"george":true,"ggee":true,"gift":true,"gifts":true,"gives":true,"giving":true,"glade":true,"glass":true,"gle":true,"global":true,"globo":true,"gmail":true,"gmo":true,"gmx":true,"godaddy":true,"gold":true,"goldpoint":true,"golf":true,"goo":true,"goodhands":true,"goodyear":true,"goog":true,"google":true,"gop":true,"got":true,"gotv":true,"grainger":true,"graphics":true,"gratis":true,"green":true,"gripe":true,"group":true,"guardian":true,"gucci":true,"guge":true,"guide":true,"guitars":true,"guru":true,"hamburg":true,"hangout":true,"haus":true,"hbo":true,"hdfc":true,"hdfcbank":true,"health":true,"healthcare":true,"help":true,"helsinki":true,"here":true,"hermes":true,"hgtv":true,"hiphop":true,"hisamitsu":true,"hitachi":true,"hiv":true,"hkt":true,"hockey":true,"holdings":true,"holiday":true,"homedepot":true,"homegoods":true,"homes":true,"homesense":true,"honda":true,"honeywell":true,"horse":true,"host":true,"hosting":true,"hot":true,"hoteles":true,"hotmail":true,"house":true,"how":true,"hsbc":true,"htc":true,"hughes":true,"hyatt":true,"hyundai":true,"ibm":true,"icbc":true,"ice":true,"icu":true,"ieee":true,"ifm":true,"iinet":true,"ikano":true,"imamat":true,"imdb":true,"immo":true,"immobilien":true,"industries":true,"infiniti":true,"ing":true,"ink":true,"institute":true,"insurance":true,"insure":true,"intel":true,"international":true,"intuit":true,"investments":true,"ipiranga":true,"irish":true,"iselect":true,"ismaili":true,"ist":true,"istanbul":true,"itau":true,"itv":true,"iveco":true,"iwc":true,"jaguar":true,"java":true,"jcb":true,"jcp":true,"jeep":true,"jetzt":true,"jewelry":true,"jio":true,"jlc":true,"jll":true,"jmp":true,"jnj":true,"joburg":true,"jot":true,"joy":true,"jpmorgan":true,"jprs":true,"juegos":true,"juniper":true,"kaufen":true,"kddi":true,"kerryhotels":true,"kerrylogistics":true,"kerryproperties":true,"kfh":true,"kia":true,"kim":true,"kinder":true,"kindle":true,"kitchen":true,"kiwi":true,"koeln":true,"komatsu":true,"kosher":true,"kpmg":true,"kpn":true,"krd":true,"kred":true,"kuokgroup":true,"kyknet":true,"kyoto":true,"lacaixa":true,"ladbrokes":true,"lamborghini":true,"lancaster":true,"lancia":true,"lancome":true,"land":true,"landrover":true,"lanxess":true,"lasalle":true,"lat":true,"latino":true,"latrobe":true,"law":true,"lawyer":true,"lds":true,"lease":true,"leclerc":true,"lefrak":true,"legal":true,"lego":true,"lexus":true,"lgbt":true,"liaison":true,"lidl":true,"life":true,"lifeinsurance":true,"lifestyle":true,"lighting":true,"like":true,"lilly":true,"limited":true,"limo":true,"lincoln":true,"linde":true,"link":true,"lipsy":true,"live":true,"living":true,"lixil":true,"loan":true,"loans":true,"locker":true,"locus":true,"loft":true,"lol":true,"london":true,"lotte":true,"lotto":true,"love":true,"lpl":true,"lplfinancial":true,"ltd":true,"ltda":true,"lundbeck":true,"lupin":true,"luxe":true,"luxury":true,"macys":true,"madrid":true,"maif":true,"maison":true,"makeup":true,"man":true,"management":true,"mango":true,"market":true,"marketing":true,"markets":true,"marriott":true,"marshalls":true,"maserati":true,"mattel":true,"mba":true,"mcd":true,"mcdonalds":true,"mckinsey":true,"med":true,"media":true,"meet":true,"melbourne":true,"meme":true,"memorial":true,"men":true,"menu":true,"meo":true,"metlife":true,"miami":true,"microsoft":true,"mini":true,"mint":true,"mit":true,"mitsubishi":true,"mlb":true,"mls":true,"mma":true,"mnet":true,"mobily":true,"moda":true,"moe":true,"moi":true,"mom":true,"monash":true,"money":true,"monster":true,"montblanc":true,"mopar":true,"mormon":true,"mortgage":true,"moscow":true,"moto":true,"motorcycles":true,"mov":true,"movie":true,"movistar":true,"msd":true,"mtn":true,"mtpc":true,"mtr":true,"multichoice":true,"mutual":true,"mutuelle":true,"mzansimagic":true,"nab":true,"nadex":true,"nagoya":true,"naspers":true,"nationwide":true,"natura":true,"navy":true,"nba":true,"nec":true,"netbank":true,"netflix":true,"network":true,"neustar":true,"new":true,"newholland":true,"news":true,"next":true,"nextdirect":true,"nexus":true,"nfl":true,"ngo":true,"nhk":true,"nico":true,"nike":true,"nikon":true,"ninja":true,"nissan":true,"nokia":true,"northwesternmutual":true,"norton":true,"now":true,"nowruz":true,"nowtv":true,"nra":true,"nrw":true,"ntt":true,"nyc":true,"obi":true,"observer":true,"off":true,"office":true,"okinawa":true,"olayan":true,"olayangroup":true,"oldnavy":true,"ollo":true,"omega":true,"one":true,"ong":true,"onl":true,"online":true,"onyourside":true,"ooo":true,"open":true,"oracle":true,"orange":true,"organic":true,"orientexpress":true,"osaka":true,"otsuka":true,"ott":true,"ovh":true,"page":true,"pamperedchef":true,"panasonic":true,"panerai":true,"paris":true,"pars":true,"partners":true,"parts":true,"party":true,"passagens":true,"pay":true,"payu":true,"pccw":true,"pet":true,"pfizer":true,"pharmacy":true,"philips":true,"photo":true,"photography":true,"photos":true,"physio":true,"piaget":true,"pics":true,"pictet":true,"pictures":true,"pid":true,"pin":true,"ping":true,"pink":true,"pioneer":true,"pizza":true,"place":true,"play":true,"playstation":true,"plumbing":true,"plus":true,"pnc":true,"pohl":true,"poker":true,"politie":true,"porn":true,"pramerica":true,"praxi":true,"press":true,"prime":true,"prod":true,"productions":true,"prof":true,"progressive":true,"promo":true,"properties":true,"property":true,"protection":true,"pru":true,"prudential":true,"pub":true,"qpon":true,"quebec":true,"quest":true,"qvc":true,"racing":true,"raid":true,"read":true,"realestate":true,"realtor":true,"realty":true,"recipes":true,"red":true,"redstone":true,"redumbrella":true,"rehab":true,"reise":true,"reisen":true,"reit":true,"reliance":true,"ren":true,"rent":true,"rentals":true,"repair":true,"report":true,"republican":true,"rest":true,"restaurant":true,"review":true,"reviews":true,"rexroth":true,"rich":true,"richardli":true,"ricoh":true,"rightathome":true,"ril":true,"rio":true,"rip":true,"rocher":true,"rocks":true,"rodeo":true,"rogers":true,"room":true,"rsvp":true,"ruhr":true,"run":true,"rwe":true,"ryukyu":true,"saarland":true,"safe":true,"safety":true,"sakura":true,"sale":true,"salon":true,"samsclub":true,"samsung":true,"sandvik":true,"sandvikcoromant":true,"sanofi":true,"sap":true,"sapo":true,"sarl":true,"sas":true,"save":true,"saxo":true,"sbi":true,"sbs":true,"sca":true,"scb":true,"schaeffler":true,"schmidt":true,"scholarships":true,"school":true,"schule":true,"schwarz":true,"science":true,"scjohnson":true,"scor":true,"scot":true,"seat":true,"secure":true,"security":true,"seek":true,"sener":true,"services":true,"ses":true,"seven":true,"sew":true,"sex":true,"sexy":true,"sfr":true,"shangrila":true,"sharp":true,"shaw":true,"shell":true,"shia":true,"shiksha":true,"shoes":true,"shouji":true,"show":true,"showtime":true,"shriram":true,"silk":true,"sina":true,"singles":true,"site":true,"ski":true,"skin":true,"sky":true,"skype":true,"sling":true,"smart":true,"smile":true,"sncf":true,"soccer":true,"social":true,"softbank":true,"software":true,"sohu":true,"solar":true,"solutions":true,"song":true,"sony":true,"soy":true,"space":true,"spiegel":true,"spot":true,"spreadbetting":true,"srl":true,"srt":true,"stada":true,"staples":true,"star":true,"starhub":true,"statebank":true,"statefarm":true,"statoil":true,"stc":true,"stcgroup":true,"stockholm":true,"storage":true,"store":true,"studio":true,"study":true,"style":true,"sucks":true,"supersport":true,"supplies":true,"supply":true,"support":true,"surf":true,"surgery":true,"suzuki":true,"swatch":true,"swiftcover":true,"swiss":true,"sydney":true,"symantec":true,"systems":true,"tab":true,"taipei":true,"talk":true,"taobao":true,"target":true,"tatamotors":true,"tatar":true,"tattoo":true,"tax":true,"taxi":true,"tci":true,"tdk":true,"team":true,"tech":true,"technology":true,"telecity":true,"telefonica":true,"temasek":true,"tennis":true,"teva":true,"thd":true,"theater":true,"theatre":true,"theguardian":true,"tiaa":true,"tickets":true,"tienda":true,"tiffany":true,"tips":true,"tires":true,"tirol":true,"tjmaxx":true,"tjx":true,"tkmaxx":true,"tmall":true,"today":true,"tokyo":true,"tools":true,"top":true,"toray":true,"toshiba":true,"total":true,"tours":true,"town":true,"toyota":true,"toys":true,"trade":true,"trading":true,"training":true,"travelchannel":true,"travelers":true,"travelersinsurance":true,"trust":true,"trv":true,"tube":true,"tui":true,"tunes":true,"tushu":true,"tvs":true,"ubank":true,"ubs":true,"uconnect":true,"university":true,"uno":true,"uol":true,"ups":true,"vacations":true,"vana":true,"vanguard":true,"vegas":true,"ventures":true,"verisign":true,"versicherung":true,"vet":true,"viajes":true,"video":true,"vig":true,"viking":true,"villas":true,"vin":true,"vip":true,"virgin":true,"visa":true,"vision":true,"vista":true,"vistaprint":true,"viva":true,"vivo":true,"vlaanderen":true,"vodka":true,"volkswagen":true,"vote":true,"voting":true,"voto":true,"voyage":true,"vuelos":true,"wales":true,"walmart":true,"walter":true,"wang":true,"wanggou":true,"warman":true,"watch":true,"watches":true,"weather":true,"weatherchannel":true,"webcam":true,"weber":true,"website":true,"wed":true,"wedding":true,"weibo":true,"weir":true,"whoswho":true,"wien":true,"wiki":true,"williamhill":true,"win":true,"windows":true,"wine":true,"winners":true,"wme":true,"wolterskluwer":true,"woodside":true,"work":true,"works":true,"world":true,"wtc":true,"wtf":true,"xbox":true,"xerox":true,"xfinity":true,"xihuan":true,"xin":true,"xn--11b4c3d":true,"xn--1ck2e1b":true,"xn--1qqw23a":true,"xn--30rr7y":true,"xn--3bst00m":true,"xn--3ds443g":true,"xn--3oq18vl8pn36a":true,"xn--3pxu8k":true,"xn--42c2d9a":true,"xn--45q11c":true,"xn--4gbrim":true,"xn--4gq48lf9j":true,"xn--55qw42g":true,"xn--55qx5d":true,"xn--5su34j936bgsg":true,"xn--5tzm5g":true,"xn--6frz82g":true,"xn--6qq986b3xl":true,"xn--80adxhks":true,"xn--80asehdb":true,"xn--80aswg":true,"xn--8y0a063a":true,"xn--9dbq2a":true,"xn--9et52u":true,"xn--9krt00a":true,"xn--b4w605ferd":true,"xn--bck1b9a5dre4c":true,"xn--c1avg":true,"xn--c2br7g":true,"xn--cck2b3b":true,"xn--cg4bki":true,"xn--czr694b":true,"xn--czrs0t":true,"xn--czru2d":true,"xn--d1acj3b":true,"xn--eckvdtc9d":true,"xn--efvy88h":true,"xn--estv75g":true,"xn--fct429k":true,"xn--fhbei":true,"xn--fiq228c5hs":true,"xn--fiq64b":true,"xn--fjq720a":true,"xn--flw351e":true,"xn--fzys8d69uvgm":true,"xn--g2xx48c":true,"xn--gckr3f0f":true,"xn--hxt814e":true,"xn--i1b6b1a6a2e":true,"xn--imr513n":true,"xn--io0a7i":true,"xn--j1aef":true,"xn--jlq61u9w7b":true,"xn--jvr189m":true,"xn--kcrx77d1x4a":true,"xn--kpu716f":true,"xn--kput3i":true,"xn--mgba3a3ejt":true,"xn--mgba7c0bbn0a":true,"xn--mgbaakc7dvf":true,"xn--mgbab2bd":true,"xn--mgbb9fbpob":true,"xn--mgbca7dzdo":true,"xn--mgbt3dhd":true,"xn--mk1bu44c":true,"xn--mxtq1m":true,"xn--ngbc5azd":true,"xn--ngbe9e0a":true,"xn--nqv7f":true,"xn--nqv7fs00ema":true,"xn--nyqy26a":true,"xn--p1acf":true,"xn--pbt977c":true,"xn--pssy2u":true,"xn--q9jyb4c":true,"xn--qcka1pmc":true,"xn--rhqv96g":true,"xn--rovu88b":true,"xn--ses554g":true,"xn--t60b56a":true,"xn--tckwe":true,"xn--unup4y":true,"xn--vermgensberater-ctb":true,"xn--vermgensberatung-pwb":true,"xn--vhquv":true,"xn--vuq861b":true,"xn--w4r85el8fhu5dnra":true,"xn--w4rs40l":true,"xn--xhq521b":true,"xn--zfr164b":true,"xperia":true,"xyz":true,"yachts":true,"yahoo":true,"yamaxun":true,"yandex":true,"yodobashi":true,"yoga":true,"yokohama":true,"you":true,"youtube":true,"yun":true,"zappos":true,"zara":true,"zero":true,"zip":true,"zippo":true,"zone":true,"zuerich":true,"cloudfront.net":true,"ap-northeast-1.compute.amazonaws.com":true,"ap-southeast-1.compute.amazonaws.com":true,"ap-southeast-2.compute.amazonaws.com":true,"cn-north-1.compute.amazonaws.cn":true,"compute.amazonaws.cn":true,"compute.amazonaws.com":true,"compute-1.amazonaws.com":true,"eu-west-1.compute.amazonaws.com":true,"eu-central-1.compute.amazonaws.com":true,"sa-east-1.compute.amazonaws.com":true,"us-east-1.amazonaws.com":true,"us-gov-west-1.compute.amazonaws.com":true,"us-west-1.compute.amazonaws.com":true,"us-west-2.compute.amazonaws.com":true,"z-1.compute-1.amazonaws.com":true,"z-2.compute-1.amazonaws.com":true,"elasticbeanstalk.com":true,"elb.amazonaws.com":true,"s3.amazonaws.com":true,"s3-ap-northeast-1.amazonaws.com":true,"s3-ap-southeast-1.amazonaws.com":true,"s3-ap-southeast-2.amazonaws.com":true,"s3-external-1.amazonaws.com":true,"s3-external-2.amazonaws.com":true,"s3-fips-us-gov-west-1.amazonaws.com":true,"s3-eu-central-1.amazonaws.com":true,"s3-eu-west-1.amazonaws.com":true,"s3-sa-east-1.amazonaws.com":true,"s3-us-gov-west-1.amazonaws.com":true,"s3-us-west-1.amazonaws.com":true,"s3-us-west-2.amazonaws.com":true,"s3.cn-north-1.amazonaws.com.cn":true,"s3.eu-central-1.amazonaws.com":true,"betainabox.com":true,"ae.org":true,"ar.com":true,"br.com":true,"cn.com":true,"com.de":true,"com.se":true,"de.com":true,"eu.com":true,"gb.com":true,"gb.net":true,"hu.com":true,"hu.net":true,"jp.net":true,"jpn.com":true,"kr.com":true,"mex.com":true,"no.com":true,"qc.com":true,"ru.com":true,"sa.com":true,"se.com":true,"se.net":true,"uk.com":true,"uk.net":true,"us.com":true,"uy.com":true,"za.bz":true,"za.com":true,"africa.com":true,"gr.com":true,"in.net":true,"us.org":true,"co.com":true,"c.la":true,"cloudcontrolled.com":true,"cloudcontrolapp.com":true,"co.ca":true,"c.cdn77.org":true,"cdn77-ssl.net":true,"r.cdn77.net":true,"rsc.cdn77.org":true,"ssl.origin.cdn77-secure.org":true,"co.nl":true,"co.no":true,"*.platform.sh":true,"cupcake.is":true,"dreamhosters.com":true,"duckdns.org":true,"dyndns-at-home.com":true,"dyndns-at-work.com":true,"dyndns-blog.com":true,"dyndns-free.com":true,"dyndns-home.com":true,"dyndns-ip.com":true,"dyndns-mail.com":true,"dyndns-office.com":true,"dyndns-pics.com":true,"dyndns-remote.com":true,"dyndns-server.com":true,"dyndns-web.com":true,"dyndns-wiki.com":true,"dyndns-work.com":true,"dyndns.biz":true,"dyndns.info":true,"dyndns.org":true,"dyndns.tv":true,"at-band-camp.net":true,"ath.cx":true,"barrel-of-knowledge.info":true,"barrell-of-knowledge.info":true,"better-than.tv":true,"blogdns.com":true,"blogdns.net":true,"blogdns.org":true,"blogsite.org":true,"boldlygoingnowhere.org":true,"broke-it.net":true,"buyshouses.net":true,"cechire.com":true,"dnsalias.com":true,"dnsalias.net":true,"dnsalias.org":true,"dnsdojo.com":true,"dnsdojo.net":true,"dnsdojo.org":true,"does-it.net":true,"doesntexist.com":true,"doesntexist.org":true,"dontexist.com":true,"dontexist.net":true,"dontexist.org":true,"doomdns.com":true,"doomdns.org":true,"dvrdns.org":true,"dyn-o-saur.com":true,"dynalias.com":true,"dynalias.net":true,"dynalias.org":true,"dynathome.net":true,"dyndns.ws":true,"endofinternet.net":true,"endofinternet.org":true,"endoftheinternet.org":true,"est-a-la-maison.com":true,"est-a-la-masion.com":true,"est-le-patron.com":true,"est-mon-blogueur.com":true,"for-better.biz":true,"for-more.biz":true,"for-our.info":true,"for-some.biz":true,"for-the.biz":true,"forgot.her.name":true,"forgot.his.name":true,"from-ak.com":true,"from-al.com":true,"from-ar.com":true,"from-az.net":true,"from-ca.com":true,"from-co.net":true,"from-ct.com":true,"from-dc.com":true,"from-de.com":true,"from-fl.com":true,"from-ga.com":true,"from-hi.com":true,"from-ia.com":true,"from-id.com":true,"from-il.com":true,"from-in.com":true,"from-ks.com":true,"from-ky.com":true,"from-la.net":true,"from-ma.com":true,"from-md.com":true,"from-me.org":true,"from-mi.com":true,"from-mn.com":true,"from-mo.com":true,"from-ms.com":true,"from-mt.com":true,"from-nc.com":true,"from-nd.com":true,"from-ne.com":true,"from-nh.com":true,"from-nj.com":true,"from-nm.com":true,"from-nv.com":true,"from-ny.net":true,"from-oh.com":true,"from-ok.com":true,"from-or.com":true,"from-pa.com":true,"from-pr.com":true,"from-ri.com":true,"from-sc.com":true,"from-sd.com":true,"from-tn.com":true,"from-tx.com":true,"from-ut.com":true,"from-va.com":true,"from-vt.com":true,"from-wa.com":true,"from-wi.com":true,"from-wv.com":true,"from-wy.com":true,"ftpaccess.cc":true,"fuettertdasnetz.de":true,"game-host.org":true,"game-server.cc":true,"getmyip.com":true,"gets-it.net":true,"go.dyndns.org":true,"gotdns.com":true,"gotdns.org":true,"groks-the.info":true,"groks-this.info":true,"ham-radio-op.net":true,"here-for-more.info":true,"hobby-site.com":true,"hobby-site.org":true,"home.dyndns.org":true,"homedns.org":true,"homeftp.net":true,"homeftp.org":true,"homeip.net":true,"homelinux.com":true,"homelinux.net":true,"homelinux.org":true,"homeunix.com":true,"homeunix.net":true,"homeunix.org":true,"iamallama.com":true,"in-the-band.net":true,"is-a-anarchist.com":true,"is-a-blogger.com":true,"is-a-bookkeeper.com":true,"is-a-bruinsfan.org":true,"is-a-bulls-fan.com":true,"is-a-candidate.org":true,"is-a-caterer.com":true,"is-a-celticsfan.org":true,"is-a-chef.com":true,"is-a-chef.net":true,"is-a-chef.org":true,"is-a-conservative.com":true,"is-a-cpa.com":true,"is-a-cubicle-slave.com":true,"is-a-democrat.com":true,"is-a-designer.com":true,"is-a-doctor.com":true,"is-a-financialadvisor.com":true,"is-a-geek.com":true,"is-a-geek.net":true,"is-a-geek.org":true,"is-a-green.com":true,"is-a-guru.com":true,"is-a-hard-worker.com":true,"is-a-hunter.com":true,"is-a-knight.org":true,"is-a-landscaper.com":true,"is-a-lawyer.com":true,"is-a-liberal.com":true,"is-a-libertarian.com":true,"is-a-linux-user.org":true,"is-a-llama.com":true,"is-a-musician.com":true,"is-a-nascarfan.com":true,"is-a-nurse.com":true,"is-a-painter.com":true,"is-a-patsfan.org":true,"is-a-personaltrainer.com":true,"is-a-photographer.com":true,"is-a-player.com":true,"is-a-republican.com":true,"is-a-rockstar.com":true,"is-a-socialist.com":true,"is-a-soxfan.org":true,"is-a-student.com":true,"is-a-teacher.com":true,"is-a-techie.com":true,"is-a-therapist.com":true,"is-an-accountant.com":true,"is-an-actor.com":true,"is-an-actress.com":true,"is-an-anarchist.com":true,"is-an-artist.com":true,"is-an-engineer.com":true,"is-an-entertainer.com":true,"is-by.us":true,"is-certified.com":true,"is-found.org":true,"is-gone.com":true,"is-into-anime.com":true,"is-into-cars.com":true,"is-into-cartoons.com":true,"is-into-games.com":true,"is-leet.com":true,"is-lost.org":true,"is-not-certified.com":true,"is-saved.org":true,"is-slick.com":true,"is-uberleet.com":true,"is-very-bad.org":true,"is-very-evil.org":true,"is-very-good.org":true,"is-very-nice.org":true,"is-very-sweet.org":true,"is-with-theband.com":true,"isa-geek.com":true,"isa-geek.net":true,"isa-geek.org":true,"isa-hockeynut.com":true,"issmarterthanyou.com":true,"isteingeek.de":true,"istmein.de":true,"kicks-ass.net":true,"kicks-ass.org":true,"knowsitall.info":true,"land-4-sale.us":true,"lebtimnetz.de":true,"leitungsen.de":true,"likes-pie.com":true,"likescandy.com":true,"merseine.nu":true,"mine.nu":true,"misconfused.org":true,"mypets.ws":true,"myphotos.cc":true,"neat-url.com":true,"office-on-the.net":true,"on-the-web.tv":true,"podzone.net":true,"podzone.org":true,"readmyblog.org":true,"saves-the-whales.com":true,"scrapper-site.net":true,"scrapping.cc":true,"selfip.biz":true,"selfip.com":true,"selfip.info":true,"selfip.net":true,"selfip.org":true,"sells-for-less.com":true,"sells-for-u.com":true,"sells-it.net":true,"sellsyourhome.org":true,"servebbs.com":true,"servebbs.net":true,"servebbs.org":true,"serveftp.net":true,"serveftp.org":true,"servegame.org":true,"shacknet.nu":true,"simple-url.com":true,"space-to-rent.com":true,"stuff-4-sale.org":true,"stuff-4-sale.us":true,"teaches-yoga.com":true,"thruhere.net":true,"traeumtgerade.de":true,"webhop.biz":true,"webhop.info":true,"webhop.net":true,"webhop.org":true,"worse-than.tv":true,"writesthisblog.com":true,"eu.org":true,"al.eu.org":true,"asso.eu.org":true,"at.eu.org":true,"au.eu.org":true,"be.eu.org":true,"bg.eu.org":true,"ca.eu.org":true,"cd.eu.org":true,"ch.eu.org":true,"cn.eu.org":true,"cy.eu.org":true,"cz.eu.org":true,"de.eu.org":true,"dk.eu.org":true,"edu.eu.org":true,"ee.eu.org":true,"es.eu.org":true,"fi.eu.org":true,"fr.eu.org":true,"gr.eu.org":true,"hr.eu.org":true,"hu.eu.org":true,"ie.eu.org":true,"il.eu.org":true,"in.eu.org":true,"int.eu.org":true,"is.eu.org":true,"it.eu.org":true,"jp.eu.org":true,"kr.eu.org":true,"lt.eu.org":true,"lu.eu.org":true,"lv.eu.org":true,"mc.eu.org":true,"me.eu.org":true,"mk.eu.org":true,"mt.eu.org":true,"my.eu.org":true,"net.eu.org":true,"ng.eu.org":true,"nl.eu.org":true,"no.eu.org":true,"nz.eu.org":true,"paris.eu.org":true,"pl.eu.org":true,"pt.eu.org":true,"q-a.eu.org":true,"ro.eu.org":true,"ru.eu.org":true,"se.eu.org":true,"si.eu.org":true,"sk.eu.org":true,"tr.eu.org":true,"uk.eu.org":true,"us.eu.org":true,"a.ssl.fastly.net":true,"b.ssl.fastly.net":true,"global.ssl.fastly.net":true,"a.prod.fastly.net":true,"global.prod.fastly.net":true,"firebaseapp.com":true,"flynnhub.com":true,"service.gov.uk":true,"github.io":true,"githubusercontent.com":true,"ro.com":true,"appspot.com":true,"blogspot.ae":true,"blogspot.al":true,"blogspot.am":true,"blogspot.ba":true,"blogspot.be":true,"blogspot.bg":true,"blogspot.bj":true,"blogspot.ca":true,"blogspot.cf":true,"blogspot.ch":true,"blogspot.cl":true,"blogspot.co.at":true,"blogspot.co.id":true,"blogspot.co.il":true,"blogspot.co.ke":true,"blogspot.co.nz":true,"blogspot.co.uk":true,"blogspot.co.za":true,"blogspot.com":true,"blogspot.com.ar":true,"blogspot.com.au":true,"blogspot.com.br":true,"blogspot.com.by":true,"blogspot.com.co":true,"blogspot.com.cy":true,"blogspot.com.ee":true,"blogspot.com.eg":true,"blogspot.com.es":true,"blogspot.com.mt":true,"blogspot.com.ng":true,"blogspot.com.tr":true,"blogspot.com.uy":true,"blogspot.cv":true,"blogspot.cz":true,"blogspot.de":true,"blogspot.dk":true,"blogspot.fi":true,"blogspot.fr":true,"blogspot.gr":true,"blogspot.hk":true,"blogspot.hr":true,"blogspot.hu":true,"blogspot.ie":true,"blogspot.in":true,"blogspot.is":true,"blogspot.it":true,"blogspot.jp":true,"blogspot.kr":true,"blogspot.li":true,"blogspot.lt":true,"blogspot.lu":true,"blogspot.md":true,"blogspot.mk":true,"blogspot.mr":true,"blogspot.mx":true,"blogspot.my":true,"blogspot.nl":true,"blogspot.no":true,"blogspot.pe":true,"blogspot.pt":true,"blogspot.qa":true,"blogspot.re":true,"blogspot.ro":true,"blogspot.rs":true,"blogspot.ru":true,"blogspot.se":true,"blogspot.sg":true,"blogspot.si":true,"blogspot.sk":true,"blogspot.sn":true,"blogspot.td":true,"blogspot.tw":true,"blogspot.ug":true,"blogspot.vn":true,"codespot.com":true,"googleapis.com":true,"googlecode.com":true,"pagespeedmobilizer.com":true,"withgoogle.com":true,"withyoutube.com":true,"herokuapp.com":true,"herokussl.com":true,"iki.fi":true,"biz.at":true,"info.at":true,"co.pl":true,"azurewebsites.net":true,"azure-mobile.net":true,"cloudapp.net":true,"bmoattachments.org":true,"4u.com":true,"nfshost.com":true,"nyc.mn":true,"nid.io":true,"operaunite.com":true,"outsystemscloud.com":true,"art.pl":true,"gliwice.pl":true,"krakow.pl":true,"poznan.pl":true,"wroc.pl":true,"zakopane.pl":true,"pantheon.io":true,"gotpantheon.com":true,"priv.at":true,"qa2.com":true,"rhcloud.com":true,"sandcats.io":true,"biz.ua":true,"co.ua":true,"pp.ua":true,"sinaapp.com":true,"vipsinaapp.com":true,"1kapp.com":true,"gda.pl":true,"gdansk.pl":true,"gdynia.pl":true,"med.pl":true,"sopot.pl":true,"hk.com":true,"hk.org":true,"ltd.hk":true,"inc.hk":true,"yolasite.com":true,"za.net":true,"za.org":true}); + +// END of automatically generated file diff --git a/node_modules/tough-cookie/lib/store.js b/node_modules/tough-cookie/lib/store.js new file mode 100644 index 0000000..bce5292 --- /dev/null +++ b/node_modules/tough-cookie/lib/store.js @@ -0,0 +1,71 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +'use strict'; +/*jshint unused:false */ + +function Store() { +} +exports.Store = Store; + +// Stores may be synchronous, but are still required to use a +// Continuation-Passing Style API. The CookieJar itself will expose a "*Sync" +// API that converts from synchronous-callbacks to imperative style. +Store.prototype.synchronous = false; + +Store.prototype.findCookie = function(domain, path, key, cb) { + throw new Error('findCookie is not implemented'); +}; + +Store.prototype.findCookies = function(domain, path, cb) { + throw new Error('findCookies is not implemented'); +}; + +Store.prototype.putCookie = function(cookie, cb) { + throw new Error('putCookie is not implemented'); +}; + +Store.prototype.updateCookie = function(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error('updateCookie is not implemented'); +}; + +Store.prototype.removeCookie = function(domain, path, key, cb) { + throw new Error('removeCookie is not implemented'); +}; + +Store.prototype.removeCookies = function(domain, path, cb) { + throw new Error('removeCookies is not implemented'); +}; + +Store.prototype.getAllCookies = function(cb) { + throw new Error('getAllCookies is not implemented (therefore jar cannot be serialized)'); +}; diff --git a/node_modules/tough-cookie/package.json b/node_modules/tough-cookie/package.json new file mode 100644 index 0000000..29c0361 --- /dev/null +++ b/node_modules/tough-cookie/package.json @@ -0,0 +1,74 @@ +{ + "author": { + "name": "Jeremy Stashewsky", + "email": "jstashewsky@salesforce.com", + "website": "https://github.com/stash" + }, + "contributors": [ + { + "name": "Alexander Savin", + "website": "https://github.com/apsavin" + }, + { + "name": "Ian Livingstone", + "website": "https://github.com/ianlivingstone" + }, + { + "name": "Ivan Nikulin", + "website": "https://github.com/inikulin" + }, + { + "name": "Lalit Kapoor", + "website": "https://github.com/lalitkapoor" + }, + { + "name": "Sam Thompson", + "website": "https://github.com/sambthompson" + }, + { + "name": "Sebastian Mayr", + "website": "https://github.com/Sebmaster" + } + ], + "license": "BSD-3-Clause", + "name": "tough-cookie", + "description": "RFC6265 Cookies and Cookie Jar for node.js", + "keywords": [ + "HTTP", + "cookie", + "cookies", + "set-cookie", + "cookiejar", + "jar", + "RFC6265", + "RFC2965" + ], + "version": "2.3.2", + "homepage": "https://github.com/salesforce/tough-cookie", + "repository": { + "type": "git", + "url": "git://github.com/salesforce/tough-cookie.git" + }, + "bugs": { + "url": "https://github.com/salesforce/tough-cookie/issues" + }, + "main": "./lib/cookie", + "files": [ + "lib" + ], + "scripts": { + "suffixup": "curl -o public_suffix_list.dat https://publicsuffix.org/list/public_suffix_list.dat && ./generate-pubsuffix.js", + "test": "vows test/*_test.js" + }, + "engines": { + "node": ">=0.8" + }, + "devDependencies": { + "async": "^1.4.2", + "string.prototype.repeat": "^0.2.0", + "vows": "^0.8.1" + }, + "dependencies": { + "punycode": "^1.4.1" + } +} diff --git a/node_modules/tty-browserify/LICENSE b/node_modules/tty-browserify/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/tty-browserify/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/tty-browserify/index.js b/node_modules/tty-browserify/index.js new file mode 100644 index 0000000..2b5f04c --- /dev/null +++ b/node_modules/tty-browserify/index.js @@ -0,0 +1,11 @@ +exports.isatty = function () { return false; }; + +function ReadStream() { + throw new Error('tty.ReadStream is not implemented'); +} +exports.ReadStream = ReadStream; + +function WriteStream() { + throw new Error('tty.ReadStream is not implemented'); +} +exports.WriteStream = WriteStream; diff --git a/node_modules/tty-browserify/package.json b/node_modules/tty-browserify/package.json new file mode 100644 index 0000000..c5d50e0 --- /dev/null +++ b/node_modules/tty-browserify/package.json @@ -0,0 +1,29 @@ +{ + "name": "tty-browserify", + "version": "0.0.0", + "description": "the tty module from node core for browsers", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~1.0.4" + }, + "scripts": { + "test": "tape test/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/tty-browserify.git" + }, + "homepage": "https://github.com/substack/tty-browserify", + "keywords": [ + "tty", + "browser", + "browserify" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT" +} diff --git a/node_modules/tty-browserify/readme.markdown b/node_modules/tty-browserify/readme.markdown new file mode 100644 index 0000000..91a2051 --- /dev/null +++ b/node_modules/tty-browserify/readme.markdown @@ -0,0 +1 @@ +# tty-browserify diff --git a/node_modules/tunnel-agent/LICENSE b/node_modules/tunnel-agent/LICENSE new file mode 100644 index 0000000..a4a9aee --- /dev/null +++ b/node_modules/tunnel-agent/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/node_modules/tunnel-agent/README.md b/node_modules/tunnel-agent/README.md new file mode 100644 index 0000000..bb533d5 --- /dev/null +++ b/node_modules/tunnel-agent/README.md @@ -0,0 +1,4 @@ +tunnel-agent +============ + +HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module. diff --git a/node_modules/tunnel-agent/index.js b/node_modules/tunnel-agent/index.js new file mode 100644 index 0000000..68013ac --- /dev/null +++ b/node_modules/tunnel-agent/index.js @@ -0,0 +1,243 @@ +'use strict' + +var net = require('net') + , tls = require('tls') + , http = require('http') + , https = require('https') + , events = require('events') + , assert = require('assert') + , util = require('util') + ; + +exports.httpOverHttp = httpOverHttp +exports.httpsOverHttp = httpsOverHttp +exports.httpOverHttps = httpOverHttps +exports.httpsOverHttps = httpsOverHttps + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options) + agent.request = http.request + return agent +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options) + agent.request = http.request + agent.createSocket = createSecureSocket + agent.defaultPort = 443 + return agent +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options) + agent.request = https.request + return agent +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options) + agent.request = https.request + agent.createSocket = createSecureSocket + agent.defaultPort = 443 + return agent +} + + +function TunnelingAgent(options) { + var self = this + self.options = options || {} + self.proxyOptions = self.options.proxy || {} + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets + self.requests = [] + self.sockets = [] + + self.on('free', function onFree(socket, host, port) { + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i] + if (pending.host === host && pending.port === port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1) + pending.request.onSocket(socket) + return + } + } + socket.destroy() + self.removeSocket(socket) + }) +} +util.inherits(TunnelingAgent, events.EventEmitter) + +TunnelingAgent.prototype.addRequest = function addRequest(req, options) { + var self = this + + // Legacy API: addRequest(req, host, port, path) + if (typeof options === 'string') { + options = { + host: options, + port: arguments[2], + path: arguments[3] + }; + } + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push({host: options.host, port: options.port, request: req}) + return + } + + // If we are under maxSockets create a new one. + self.createConnection({host: options.host, port: options.port, request: req}) +} + +TunnelingAgent.prototype.createConnection = function createConnection(pending) { + var self = this + + self.createSocket(pending, function(socket) { + socket.on('free', onFree) + socket.on('close', onCloseOrRemove) + socket.on('agentRemove', onCloseOrRemove) + pending.request.onSocket(socket) + + function onFree() { + self.emit('free', socket, pending.host, pending.port) + } + + function onCloseOrRemove(err) { + self.removeSocket(socket) + socket.removeListener('free', onFree) + socket.removeListener('close', onCloseOrRemove) + socket.removeListener('agentRemove', onCloseOrRemove) + } + }) +} + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this + var placeholder = {} + self.sockets.push(placeholder) + + var connectOptions = mergeOptions({}, self.proxyOptions, + { method: 'CONNECT' + , path: options.host + ':' + options.port + , agent: false + } + ) + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {} + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64') + } + + debug('making CONNECT request') + var connectReq = self.request(connectOptions) + connectReq.useChunkedEncodingByDefault = false // for v0.6 + connectReq.once('response', onResponse) // for v0.6 + connectReq.once('upgrade', onUpgrade) // for v0.6 + connectReq.once('connect', onConnect) // for v0.7 or later + connectReq.once('error', onError) + connectReq.end() + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head) + }) + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners() + socket.removeAllListeners() + + if (res.statusCode === 200) { + assert.equal(head.length, 0) + debug('tunneling connection has established') + self.sockets[self.sockets.indexOf(placeholder)] = socket + cb(socket) + } else { + debug('tunneling socket could not be established, statusCode=%d', res.statusCode) + var error = new Error('tunneling socket could not be established, ' + 'statusCode=' + res.statusCode) + error.code = 'ECONNRESET' + options.request.emit('error', error) + self.removeSocket(placeholder) + } + } + + function onError(cause) { + connectReq.removeAllListeners() + + debug('tunneling socket could not be established, cause=%s\n', cause.message, cause.stack) + var error = new Error('tunneling socket could not be established, ' + 'cause=' + cause.message) + error.code = 'ECONNRESET' + options.request.emit('error', error) + self.removeSocket(placeholder) + } +} + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) return + + this.sockets.splice(pos, 1) + + var pending = this.requests.shift() + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createConnection(pending) + } +} + +function createSecureSocket(options, cb) { + var self = this + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, mergeOptions({}, self.options, + { servername: options.host + , socket: socket + } + )) + self.sockets[self.sockets.indexOf(socket)] = secureSocket + cb(secureSocket) + }) +} + + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i] + if (typeof overrides === 'object') { + var keys = Object.keys(overrides) + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j] + if (overrides[k] !== undefined) { + target[k] = overrides[k] + } + } + } + } + return target +} + + +var debug +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments) + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0] + } else { + args.unshift('TUNNEL:') + } + console.error.apply(console, args) + } +} else { + debug = function() {} +} +exports.debug = debug // for test diff --git a/node_modules/tunnel-agent/package.json b/node_modules/tunnel-agent/package.json new file mode 100644 index 0000000..a503283 --- /dev/null +++ b/node_modules/tunnel-agent/package.json @@ -0,0 +1,20 @@ +{ + "author": "Mikeal Rogers (http://www.futurealoof.com)", + "name": "tunnel-agent", + "license": "Apache-2.0", + "description": "HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.", + "version": "0.4.3", + "repository": { + "url": "https://github.com/mikeal/tunnel-agent" + }, + "main": "index.js", + "files": [ + "index.js" + ], + "dependencies": {}, + "devDependencies": {}, + "optionalDependencies": {}, + "engines": { + "node": "*" + } +} diff --git a/node_modules/tweetnacl/.npmignore b/node_modules/tweetnacl/.npmignore new file mode 100644 index 0000000..7d98dcb --- /dev/null +++ b/node_modules/tweetnacl/.npmignore @@ -0,0 +1,4 @@ +.eslintrc +.travis.yml +bower.json +test diff --git a/node_modules/tweetnacl/CHANGELOG.md b/node_modules/tweetnacl/CHANGELOG.md new file mode 100644 index 0000000..9debcb4 --- /dev/null +++ b/node_modules/tweetnacl/CHANGELOG.md @@ -0,0 +1,181 @@ +TweetNaCl.js Changelog +====================== + + +v0.14.1 +------- + +No code changes, just tweaked packaging and added COPYING.txt. + + +v0.14.0 +------- + +* **Breaking change!** All functions from `nacl.util` have been removed. These + functions are no longer available: + + nacl.util.decodeUTF8 + nacl.util.encodeUTF8 + nacl.util.decodeBase64 + nacl.util.encodeBase64 + + If want to continue using them, you can include + package: + + + + + or + + var nacl = require('tweetnacl'); + nacl.util = require('tweetnacl-util'); + + However it is recommended to use better packages that have wider + compatibility and better performance. Functions from `nacl.util` were never + intended to be robust solution for string conversion and were included for + convenience: cryptography library is not the right place for them. + + Currently calling these functions will throw error pointing to + `tweetnacl-util-js` (in the next version this error message will be removed). + +* Improved detection of available random number generators, making it possible + to use `nacl.randomBytes` and related functions in Web Workers without + changes. + +* Changes to testing (see README). + + +v0.13.3 +------- + +No code changes. + +* Reverted license field in package.json to "Public domain". + +* Fixed typo in README. + + +v0.13.2 +------- + +* Fixed undefined variable bug in fast version of Poly1305. No worries, this + bug was *never* triggered. + +* Specified CC0 public domain dedication. + +* Updated development dependencies. + + +v0.13.1 +------- + +* Exclude `crypto` and `buffer` modules from browserify builds. + + +v0.13.0 +------- + +* Made `nacl-fast` the default version in NPM package. Now + `require("tweetnacl")` will use fast version; to get the original version, + use `require("tweetnacl/nacl.js")`. + +* Cleanup temporary array after generating random bytes. + + +v0.12.2 +------- + +* Improved performance of curve operations, making `nacl.scalarMult`, `nacl.box`, + `nacl.sign` and related functions up to 3x faster in `nacl-fast` version. + + +v0.12.1 +------- + +* Significantly improved performance of Salsa20 (~1.5x faster) and + Poly1305 (~3.5x faster) in `nacl-fast` version. + + +v0.12.0 +------- + +* Instead of using the given secret key directly, TweetNaCl.js now copies it to + a new array in `nacl.box.keyPair.fromSecretKey` and + `nacl.sign.keyPair.fromSecretKey`. + + +v0.11.2 +------- + +* Added new constant: `nacl.sign.seedLength`. + + +v0.11.1 +------- + +* Even faster hash for both short and long inputs (in `nacl-fast`). + + +v0.11.0 +------- + +* Implement `nacl.sign.keyPair.fromSeed` to enable creation of sign key pairs + deterministically from a 32-byte seed. (It behaves like + [libsodium's](http://doc.libsodium.org/public-key_cryptography/public-key_signatures.html) + `crypto_sign_seed_keypair`: the seed becomes a secret part of the secret key.) + +* Fast version now has an improved hash implementation that is 2x-5x faster. + +* Fixed benchmarks, which may have produced incorrect measurements. + + +v0.10.1 +------- + +* Exported undocumented `nacl.lowlevel.crypto_core_hsalsa20`. + + +v0.10.0 +------- + +* **Signature API breaking change!** `nacl.sign` and `nacl.sign.open` now deal + with signed messages, and new `nacl.sign.detached` and + `nacl.sign.detached.verify` are available. + + Previously, `nacl.sign` returned a signature, and `nacl.sign.open` accepted a + message and "detached" signature. This was unlike NaCl's API, which dealt with + signed messages (concatenation of signature and message). + + The new API is: + + nacl.sign(message, secretKey) -> signedMessage + nacl.sign.open(signedMessage, publicKey) -> message | null + + Since detached signatures are common, two new API functions were introduced: + + nacl.sign.detached(message, secretKey) -> signature + nacl.sign.detached.verify(message, signature, publicKey) -> true | false + + (Note that it's `verify`, not `open`, and it returns a boolean value, unlike + `open`, which returns an "unsigned" message.) + +* NPM package now comes without `test` directory to keep it small. + + +v0.9.2 +------ + +* Improved documentation. +* Fast version: increased theoretical message size limit from 2^32-1 to 2^52 + bytes in Poly1305 (and thus, secretbox and box). However this has no impact + in practice since JavaScript arrays or ArrayBuffers are limited to 32-bit + indexes, and most implementations won't allocate more than a gigabyte or so. + (Obviously, there are no tests for the correctness of implementation.) Also, + it's not recommended to use messages that large without splitting them into + smaller packets anyway. + + +v0.9.1 +------ + +* Initial release diff --git a/node_modules/tweetnacl/COPYING.txt b/node_modules/tweetnacl/COPYING.txt new file mode 100644 index 0000000..c2bd1e5 --- /dev/null +++ b/node_modules/tweetnacl/COPYING.txt @@ -0,0 +1,9 @@ +Public Domain + +The person who associated a work with this deed has dedicated the work to the +public domain by waiving all of his or her rights to the work worldwide under +copyright law, including all related and neighboring rights, to the extent +allowed by law. + +You can copy, modify, distribute and perform the work, even for commercial +purposes, all without asking permission. diff --git a/node_modules/tweetnacl/README.md b/node_modules/tweetnacl/README.md new file mode 100644 index 0000000..c80bbed --- /dev/null +++ b/node_modules/tweetnacl/README.md @@ -0,0 +1,445 @@ +TweetNaCl.js +============ + +Port of [TweetNaCl](http://tweetnacl.cr.yp.to) / [NaCl](http://nacl.cr.yp.to/) +to JavaScript for modern browsers and Node.js. Public domain. + +[![Build Status](https://travis-ci.org/dchest/tweetnacl-js.svg?branch=master) +](https://travis-ci.org/dchest/tweetnacl-js) + +Demo: + +**:warning: Beta version. The library is stable and API is frozen, however +it has not been independently reviewed. If you can help reviewing it, please +[contact me](mailto:dmitry@codingrobots.com).** + +Documentation +============= + +* [Overview](#overview) +* [Installation](#installation) +* [Usage](#usage) + * [Public-key authenticated encryption (box)](#public-key-authenticated-encryption-box) + * [Secret-key authenticated encryption (secretbox)](#secret-key-authenticated-encryption-secretbox) + * [Scalar multiplication](#scalar-multiplication) + * [Signatures](#signatures) + * [Hashing](#hashing) + * [Random bytes generation](#random-bytes-generation) + * [Constant-time comparison](#constant-time-comparison) +* [System requirements](#system-requirements) +* [Development and testing](#development-and-testing) +* [Contributors](#contributors) +* [Who uses it](#who-uses-it) + + +Overview +-------- + +The primary goal of this project is to produce a translation of TweetNaCl to +JavaScript which is as close as possible to the original C implementation, plus +a thin layer of idiomatic high-level API on top of it. + +There are two versions, you can use either of them: + +* `nacl.js` is the port of TweetNaCl with minimum differences from the + original + high-level API. + +* `nacl-fast.js` is like `nacl.js`, but with some functions replaced with + faster versions. + + +Installation +------------ + +You can install TweetNaCl.js via a package manager: + +[Bower](http://bower.io): + + $ bower install tweetnacl + +[NPM](https://www.npmjs.org/): + + $ npm install tweetnacl + +or [download source code](https://github.com/dchest/tweetnacl-js/releases). + + +Usage +----- + +All API functions accept and return bytes as `Uint8Array`s. If you need to +encode or decode strings, use functions from + or one of the more robust codec +packages. + +In Node.js v4 and later `Buffer` objects are backed by `Uint8Array`s, so you +can freely pass them to TweetNaCl.js functions as arguments. The returned +objects are still `Uint8Array`s, so if you need `Buffer`s, you'll have to +convert them manually; make sure to convert using copying: `new Buffer(array)`, +instead of sharing: `new Buffer(array.buffer)`, because some functions return +subarrays of their buffers. + + +### Public-key authenticated encryption (box) + +Implements *curve25519-xsalsa20-poly1305*. + +#### nacl.box.keyPair() + +Generates a new random key pair for box and returns it as an object with +`publicKey` and `secretKey` members: + + { + publicKey: ..., // Uint8Array with 32-byte public key + secretKey: ... // Uint8Array with 32-byte secret key + } + + +#### nacl.box.keyPair.fromSecretKey(secretKey) + +Returns a key pair for box with public key corresponding to the given secret +key. + +#### nacl.box(message, nonce, theirPublicKey, mySecretKey) + +Encrypt and authenticates message using peer's public key, our secret key, and +the given nonce, which must be unique for each distinct message for a key pair. + +Returns an encrypted and authenticated message, which is +`nacl.box.overheadLength` longer than the original message. + +#### nacl.box.open(box, nonce, theirPublicKey, mySecretKey) + +Authenticates and decrypts the given box with peer's public key, our secret +key, and the given nonce. + +Returns the original message, or `false` if authentication fails. + +#### nacl.box.before(theirPublicKey, mySecretKey) + +Returns a precomputed shared key which can be used in `nacl.box.after` and +`nacl.box.open.after`. + +#### nacl.box.after(message, nonce, sharedKey) + +Same as `nacl.box`, but uses a shared key precomputed with `nacl.box.before`. + +#### nacl.box.open.after(box, nonce, sharedKey) + +Same as `nacl.box.open`, but uses a shared key precomputed with `nacl.box.before`. + +#### nacl.box.publicKeyLength = 32 + +Length of public key in bytes. + +#### nacl.box.secretKeyLength = 32 + +Length of secret key in bytes. + +#### nacl.box.sharedKeyLength = 32 + +Length of precomputed shared key in bytes. + +#### nacl.box.nonceLength = 24 + +Length of nonce in bytes. + +#### nacl.box.overheadLength = 16 + +Length of overhead added to box compared to original message. + + +### Secret-key authenticated encryption (secretbox) + +Implements *xsalsa20-poly1305*. + +#### nacl.secretbox(message, nonce, key) + +Encrypt and authenticates message using the key and the nonce. The nonce must +be unique for each distinct message for this key. + +Returns an encrypted and authenticated message, which is +`nacl.secretbox.overheadLength` longer than the original message. + +#### nacl.secretbox.open(box, nonce, key) + +Authenticates and decrypts the given secret box using the key and the nonce. + +Returns the original message, or `false` if authentication fails. + +#### nacl.secretbox.keyLength = 32 + +Length of key in bytes. + +#### nacl.secretbox.nonceLength = 24 + +Length of nonce in bytes. + +#### nacl.secretbox.overheadLength = 16 + +Length of overhead added to secret box compared to original message. + + +### Scalar multiplication + +Implements *curve25519*. + +#### nacl.scalarMult(n, p) + +Multiplies an integer `n` by a group element `p` and returns the resulting +group element. + +#### nacl.scalarMult.base(n) + +Multiplies an integer `n` by a standard group element and returns the resulting +group element. + +#### nacl.scalarMult.scalarLength = 32 + +Length of scalar in bytes. + +#### nacl.scalarMult.groupElementLength = 32 + +Length of group element in bytes. + + +### Signatures + +Implements [ed25519](http://ed25519.cr.yp.to). + +#### nacl.sign.keyPair() + +Generates new random key pair for signing and returns it as an object with +`publicKey` and `secretKey` members: + + { + publicKey: ..., // Uint8Array with 32-byte public key + secretKey: ... // Uint8Array with 64-byte secret key + } + +#### nacl.sign.keyPair.fromSecretKey(secretKey) + +Returns a signing key pair with public key corresponding to the given +64-byte secret key. The secret key must have been generated by +`nacl.sign.keyPair` or `nacl.sign.keyPair.fromSeed`. + +#### nacl.sign.keyPair.fromSeed(seed) + +Returns a new signing key pair generated deterministically from a 32-byte seed. +The seed must contain enough entropy to be secure. This method is not +recommended for general use: instead, use `nacl.sign.keyPair` to generate a new +key pair from a random seed. + +#### nacl.sign(message, secretKey) + +Signs the message using the secret key and returns a signed message. + +#### nacl.sign.open(signedMessage, publicKey) + +Verifies the signed message and returns the message without signature. + +Returns `null` if verification failed. + +#### nacl.sign.detached(message, secretKey) + +Signs the message using the secret key and returns a signature. + +#### nacl.sign.detached.verify(message, signature, publicKey) + +Verifies the signature for the message and returns `true` if verification +succeeded or `false` if it failed. + +#### nacl.sign.publicKeyLength = 32 + +Length of signing public key in bytes. + +#### nacl.sign.secretKeyLength = 64 + +Length of signing secret key in bytes. + +#### nacl.sign.seedLength = 32 + +Length of seed for `nacl.sign.keyPair.fromSeed` in bytes. + +#### nacl.sign.signatureLength = 64 + +Length of signature in bytes. + + +### Hashing + +Implements *SHA-512*. + +#### nacl.hash(message) + +Returns SHA-512 hash of the message. + +#### nacl.hash.hashLength = 64 + +Length of hash in bytes. + + +### Random bytes generation + +#### nacl.randomBytes(length) + +Returns a `Uint8Array` of the given length containing random bytes of +cryptographic quality. + +**Implementation note** + +TweetNaCl.js uses the following methods to generate random bytes, +depending on the platform it runs on: + +* `window.crypto.getRandomValues` (WebCrypto standard) +* `window.msCrypto.getRandomValues` (Internet Explorer 11) +* `crypto.randomBytes` (Node.js) + +If the platform doesn't provide a suitable PRNG, the following functions, +which require random numbers, will throw exception: + +* `nacl.randomBytes` +* `nacl.box.keyPair` +* `nacl.sign.keyPair` + +Other functions are deterministic and will continue working. + +If a platform you are targeting doesn't implement secure random number +generator, but you somehow have a cryptographically-strong source of entropy +(not `Math.random`!), and you know what you are doing, you can plug it into +TweetNaCl.js like this: + + nacl.setPRNG(function(x, n) { + // ... copy n random bytes into x ... + }); + +Note that `nacl.setPRNG` *completely replaces* internal random byte generator +with the one provided. + + +### Constant-time comparison + +#### nacl.verify(x, y) + +Compares `x` and `y` in constant time and returns `true` if their lengths are +non-zero and equal, and their contents are equal. + +Returns `false` if either of the arguments has zero length, or arguments have +different lengths, or their contents differ. + + +System requirements +------------------- + +TweetNaCl.js supports modern browsers that have a cryptographically secure +pseudorandom number generator and typed arrays, including the latest versions +of: + +* Chrome +* Firefox +* Safari (Mac, iOS) +* Internet Explorer 11 + +Other systems: + +* Node.js + + +Development and testing +------------------------ + +Install NPM modules needed for development: + + $ npm install + +To build minified versions: + + $ npm run build + +Tests use minified version, so make sure to rebuild it every time you change +`nacl.js` or `nacl-fast.js`. + +### Testing + +To run tests in Node.js: + + $ npm run test-node + +By default all tests described here work on `nacl.min.js`. To test other +versions, set environment variable `NACL_SRC` to the file name you want to test. +For example, the following command will test fast minified version: + + $ NACL_SRC=nacl-fast.min.js npm run test-node + +To run full suite of tests in Node.js, including comparing outputs of +JavaScript port to outputs of the original C version: + + $ npm run test-node-all + +To prepare tests for browsers: + + $ npm run build-test-browser + +and then open `test/browser/test.html` (or `test/browser/test-fast.html`) to +run them. + +To run headless browser tests with `tape-run` (powered by Electron): + + $ npm run test-browser + +(If you get `Error: spawn ENOENT`, install *xvfb*: `sudo apt-get install xvfb`.) + +To run tests in both Node and Electron: + + $ npm test + +### Benchmarking + +To run benchmarks in Node.js: + + $ npm run bench + $ NACL_SRC=nacl-fast.min.js npm run bench + +To run benchmarks in a browser, open `test/benchmark/bench.html` (or +`test/benchmark/bench-fast.html`). + + +Contributors +------------ + +JavaScript port: + + * [Dmitry Chestnykh](http://github.com/dchest) (ported xsalsa20, poly1305, curve25519) + * [Devi Mandiri](https://github.com/devi) (ported curve25519, ed25519, sha512) + +Original authors of [NaCl](http://nacl.cr.yp.to), [TweetNaCl](http://tweetnacl.cr.yp.to) +and [Poly1305-donna](https://github.com/floodyberry/poly1305-donna) +(who are *not* responsible for any errors in this implementation): + + * [Daniel J. Bernstein](http://cr.yp.to/djb.html) + * Wesley Janssen + * [Tanja Lange](http://hyperelliptic.org/tanja) + * [Peter Schwabe](http://www.cryptojedi.org/users/peter/) + * [Matthew Dempsky](https://github.com/mdempsky) + * [Andrew Moon](https://github.com/floodyberry) + +Contributors have dedicated their work to the public domain. + +This software is distributed without any warranty. + + +Third-party libraries based on TweetNaCl.js +------------------------------------------- + +* [forward-secrecy](https://github.com/alax/forward-secrecy) — Axolotl ratchet implementation +* [nacl-stream](https://github.com/dchest/nacl-stream-js) - streaming encryption +* [tweetnacl-auth-js](https://github.com/dchest/tweetnacl-auth-js) — implementation of [`crypto_auth`](http://nacl.cr.yp.to/auth.html) + + +Who uses it +----------- + +Some notable users of TweetNaCl.js: + +* [miniLock](http://minilock.io/) +* [Stellar](https://www.stellar.org/) diff --git a/node_modules/tweetnacl/nacl-fast.js b/node_modules/tweetnacl/nacl-fast.js new file mode 100644 index 0000000..5e4562f --- /dev/null +++ b/node_modules/tweetnacl/nacl-fast.js @@ -0,0 +1,2388 @@ +(function(nacl) { +'use strict'; + +// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +// +// Implementation derived from TweetNaCl version 20140427. +// See for details: http://tweetnacl.cr.yp.to/ + +var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; +}; + +// Pluggable, initialized in high-level API below. +var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + +var _0 = new Uint8Array(16); +var _9 = new Uint8Array(32); _9[0] = 9; + +var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +function ts64(x, i, h, l) { + x[i] = (h >> 24) & 0xff; + x[i+1] = (h >> 16) & 0xff; + x[i+2] = (h >> 8) & 0xff; + x[i+3] = h & 0xff; + x[i+4] = (l >> 24) & 0xff; + x[i+5] = (l >> 16) & 0xff; + x[i+6] = (l >> 8) & 0xff; + x[i+7] = l & 0xff; +} + +function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; +} + +function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); +} + +function core_salsa20(o, p, k, c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + x0 = x0 + j0 | 0; + x1 = x1 + j1 | 0; + x2 = x2 + j2 | 0; + x3 = x3 + j3 | 0; + x4 = x4 + j4 | 0; + x5 = x5 + j5 | 0; + x6 = x6 + j6 | 0; + x7 = x7 + j7 | 0; + x8 = x8 + j8 | 0; + x9 = x9 + j9 | 0; + x10 = x10 + j10 | 0; + x11 = x11 + j11 | 0; + x12 = x12 + j12 | 0; + x13 = x13 + j13 | 0; + x14 = x14 + j14 | 0; + x15 = x15 + j15 | 0; + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x1 >>> 0 & 0xff; + o[ 5] = x1 >>> 8 & 0xff; + o[ 6] = x1 >>> 16 & 0xff; + o[ 7] = x1 >>> 24 & 0xff; + + o[ 8] = x2 >>> 0 & 0xff; + o[ 9] = x2 >>> 8 & 0xff; + o[10] = x2 >>> 16 & 0xff; + o[11] = x2 >>> 24 & 0xff; + + o[12] = x3 >>> 0 & 0xff; + o[13] = x3 >>> 8 & 0xff; + o[14] = x3 >>> 16 & 0xff; + o[15] = x3 >>> 24 & 0xff; + + o[16] = x4 >>> 0 & 0xff; + o[17] = x4 >>> 8 & 0xff; + o[18] = x4 >>> 16 & 0xff; + o[19] = x4 >>> 24 & 0xff; + + o[20] = x5 >>> 0 & 0xff; + o[21] = x5 >>> 8 & 0xff; + o[22] = x5 >>> 16 & 0xff; + o[23] = x5 >>> 24 & 0xff; + + o[24] = x6 >>> 0 & 0xff; + o[25] = x6 >>> 8 & 0xff; + o[26] = x6 >>> 16 & 0xff; + o[27] = x6 >>> 24 & 0xff; + + o[28] = x7 >>> 0 & 0xff; + o[29] = x7 >>> 8 & 0xff; + o[30] = x7 >>> 16 & 0xff; + o[31] = x7 >>> 24 & 0xff; + + o[32] = x8 >>> 0 & 0xff; + o[33] = x8 >>> 8 & 0xff; + o[34] = x8 >>> 16 & 0xff; + o[35] = x8 >>> 24 & 0xff; + + o[36] = x9 >>> 0 & 0xff; + o[37] = x9 >>> 8 & 0xff; + o[38] = x9 >>> 16 & 0xff; + o[39] = x9 >>> 24 & 0xff; + + o[40] = x10 >>> 0 & 0xff; + o[41] = x10 >>> 8 & 0xff; + o[42] = x10 >>> 16 & 0xff; + o[43] = x10 >>> 24 & 0xff; + + o[44] = x11 >>> 0 & 0xff; + o[45] = x11 >>> 8 & 0xff; + o[46] = x11 >>> 16 & 0xff; + o[47] = x11 >>> 24 & 0xff; + + o[48] = x12 >>> 0 & 0xff; + o[49] = x12 >>> 8 & 0xff; + o[50] = x12 >>> 16 & 0xff; + o[51] = x12 >>> 24 & 0xff; + + o[52] = x13 >>> 0 & 0xff; + o[53] = x13 >>> 8 & 0xff; + o[54] = x13 >>> 16 & 0xff; + o[55] = x13 >>> 24 & 0xff; + + o[56] = x14 >>> 0 & 0xff; + o[57] = x14 >>> 8 & 0xff; + o[58] = x14 >>> 16 & 0xff; + o[59] = x14 >>> 24 & 0xff; + + o[60] = x15 >>> 0 & 0xff; + o[61] = x15 >>> 8 & 0xff; + o[62] = x15 >>> 16 & 0xff; + o[63] = x15 >>> 24 & 0xff; +} + +function core_hsalsa20(o,p,k,c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x5 >>> 0 & 0xff; + o[ 5] = x5 >>> 8 & 0xff; + o[ 6] = x5 >>> 16 & 0xff; + o[ 7] = x5 >>> 24 & 0xff; + + o[ 8] = x10 >>> 0 & 0xff; + o[ 9] = x10 >>> 8 & 0xff; + o[10] = x10 >>> 16 & 0xff; + o[11] = x10 >>> 24 & 0xff; + + o[12] = x15 >>> 0 & 0xff; + o[13] = x15 >>> 8 & 0xff; + o[14] = x15 >>> 16 & 0xff; + o[15] = x15 >>> 24 & 0xff; + + o[16] = x6 >>> 0 & 0xff; + o[17] = x6 >>> 8 & 0xff; + o[18] = x6 >>> 16 & 0xff; + o[19] = x6 >>> 24 & 0xff; + + o[20] = x7 >>> 0 & 0xff; + o[21] = x7 >>> 8 & 0xff; + o[22] = x7 >>> 16 & 0xff; + o[23] = x7 >>> 24 & 0xff; + + o[24] = x8 >>> 0 & 0xff; + o[25] = x8 >>> 8 & 0xff; + o[26] = x8 >>> 16 & 0xff; + o[27] = x8 >>> 24 & 0xff; + + o[28] = x9 >>> 0 & 0xff; + o[29] = x9 >>> 8 & 0xff; + o[30] = x9 >>> 16 & 0xff; + o[31] = x9 >>> 24 & 0xff; +} + +function crypto_core_salsa20(out,inp,k,c) { + core_salsa20(out,inp,k,c); +} + +function crypto_core_hsalsa20(out,inp,k,c) { + core_hsalsa20(out,inp,k,c); +} + +var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + +function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + } + return 0; +} + +function crypto_stream_salsa20(c,cpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = x[i]; + } + return 0; +} + +function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20(c,cpos,d,sn,s); +} + +function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,sn,s); +} + +/* +* Port of Andrew Moon's Poly1305-donna-16. Public domain. +* https://github.com/floodyberry/poly1305-donna +*/ + +var poly1305 = function(key) { + this.buffer = new Uint8Array(16); + this.r = new Uint16Array(10); + this.h = new Uint16Array(10); + this.pad = new Uint16Array(8); + this.leftover = 0; + this.fin = 0; + + var t0, t1, t2, t3, t4, t5, t6, t7; + + t0 = key[ 0] & 0xff | (key[ 1] & 0xff) << 8; this.r[0] = ( t0 ) & 0x1fff; + t1 = key[ 2] & 0xff | (key[ 3] & 0xff) << 8; this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = key[ 4] & 0xff | (key[ 5] & 0xff) << 8; this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03; + t3 = key[ 6] & 0xff | (key[ 7] & 0xff) << 8; this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = key[ 8] & 0xff | (key[ 9] & 0xff) << 8; this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff; + this.r[5] = ((t4 >>> 1)) & 0x1ffe; + t5 = key[10] & 0xff | (key[11] & 0xff) << 8; this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = key[12] & 0xff | (key[13] & 0xff) << 8; this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81; + t7 = key[14] & 0xff | (key[15] & 0xff) << 8; this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + this.r[9] = ((t7 >>> 5)) & 0x007f; + + this.pad[0] = key[16] & 0xff | (key[17] & 0xff) << 8; + this.pad[1] = key[18] & 0xff | (key[19] & 0xff) << 8; + this.pad[2] = key[20] & 0xff | (key[21] & 0xff) << 8; + this.pad[3] = key[22] & 0xff | (key[23] & 0xff) << 8; + this.pad[4] = key[24] & 0xff | (key[25] & 0xff) << 8; + this.pad[5] = key[26] & 0xff | (key[27] & 0xff) << 8; + this.pad[6] = key[28] & 0xff | (key[29] & 0xff) << 8; + this.pad[7] = key[30] & 0xff | (key[31] & 0xff) << 8; +}; + +poly1305.prototype.blocks = function(m, mpos, bytes) { + var hibit = this.fin ? 0 : (1 << 11); + var t0, t1, t2, t3, t4, t5, t6, t7, c; + var d0, d1, d2, d3, d4, d5, d6, d7, d8, d9; + + var h0 = this.h[0], + h1 = this.h[1], + h2 = this.h[2], + h3 = this.h[3], + h4 = this.h[4], + h5 = this.h[5], + h6 = this.h[6], + h7 = this.h[7], + h8 = this.h[8], + h9 = this.h[9]; + + var r0 = this.r[0], + r1 = this.r[1], + r2 = this.r[2], + r3 = this.r[3], + r4 = this.r[4], + r5 = this.r[5], + r6 = this.r[6], + r7 = this.r[7], + r8 = this.r[8], + r9 = this.r[9]; + + while (bytes >= 16) { + t0 = m[mpos+ 0] & 0xff | (m[mpos+ 1] & 0xff) << 8; h0 += ( t0 ) & 0x1fff; + t1 = m[mpos+ 2] & 0xff | (m[mpos+ 3] & 0xff) << 8; h1 += ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = m[mpos+ 4] & 0xff | (m[mpos+ 5] & 0xff) << 8; h2 += ((t1 >>> 10) | (t2 << 6)) & 0x1fff; + t3 = m[mpos+ 6] & 0xff | (m[mpos+ 7] & 0xff) << 8; h3 += ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = m[mpos+ 8] & 0xff | (m[mpos+ 9] & 0xff) << 8; h4 += ((t3 >>> 4) | (t4 << 12)) & 0x1fff; + h5 += ((t4 >>> 1)) & 0x1fff; + t5 = m[mpos+10] & 0xff | (m[mpos+11] & 0xff) << 8; h6 += ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = m[mpos+12] & 0xff | (m[mpos+13] & 0xff) << 8; h7 += ((t5 >>> 11) | (t6 << 5)) & 0x1fff; + t7 = m[mpos+14] & 0xff | (m[mpos+15] & 0xff) << 8; h8 += ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + h9 += ((t7 >>> 5)) | hibit; + + c = 0; + + d0 = c; + d0 += h0 * r0; + d0 += h1 * (5 * r9); + d0 += h2 * (5 * r8); + d0 += h3 * (5 * r7); + d0 += h4 * (5 * r6); + c = (d0 >>> 13); d0 &= 0x1fff; + d0 += h5 * (5 * r5); + d0 += h6 * (5 * r4); + d0 += h7 * (5 * r3); + d0 += h8 * (5 * r2); + d0 += h9 * (5 * r1); + c += (d0 >>> 13); d0 &= 0x1fff; + + d1 = c; + d1 += h0 * r1; + d1 += h1 * r0; + d1 += h2 * (5 * r9); + d1 += h3 * (5 * r8); + d1 += h4 * (5 * r7); + c = (d1 >>> 13); d1 &= 0x1fff; + d1 += h5 * (5 * r6); + d1 += h6 * (5 * r5); + d1 += h7 * (5 * r4); + d1 += h8 * (5 * r3); + d1 += h9 * (5 * r2); + c += (d1 >>> 13); d1 &= 0x1fff; + + d2 = c; + d2 += h0 * r2; + d2 += h1 * r1; + d2 += h2 * r0; + d2 += h3 * (5 * r9); + d2 += h4 * (5 * r8); + c = (d2 >>> 13); d2 &= 0x1fff; + d2 += h5 * (5 * r7); + d2 += h6 * (5 * r6); + d2 += h7 * (5 * r5); + d2 += h8 * (5 * r4); + d2 += h9 * (5 * r3); + c += (d2 >>> 13); d2 &= 0x1fff; + + d3 = c; + d3 += h0 * r3; + d3 += h1 * r2; + d3 += h2 * r1; + d3 += h3 * r0; + d3 += h4 * (5 * r9); + c = (d3 >>> 13); d3 &= 0x1fff; + d3 += h5 * (5 * r8); + d3 += h6 * (5 * r7); + d3 += h7 * (5 * r6); + d3 += h8 * (5 * r5); + d3 += h9 * (5 * r4); + c += (d3 >>> 13); d3 &= 0x1fff; + + d4 = c; + d4 += h0 * r4; + d4 += h1 * r3; + d4 += h2 * r2; + d4 += h3 * r1; + d4 += h4 * r0; + c = (d4 >>> 13); d4 &= 0x1fff; + d4 += h5 * (5 * r9); + d4 += h6 * (5 * r8); + d4 += h7 * (5 * r7); + d4 += h8 * (5 * r6); + d4 += h9 * (5 * r5); + c += (d4 >>> 13); d4 &= 0x1fff; + + d5 = c; + d5 += h0 * r5; + d5 += h1 * r4; + d5 += h2 * r3; + d5 += h3 * r2; + d5 += h4 * r1; + c = (d5 >>> 13); d5 &= 0x1fff; + d5 += h5 * r0; + d5 += h6 * (5 * r9); + d5 += h7 * (5 * r8); + d5 += h8 * (5 * r7); + d5 += h9 * (5 * r6); + c += (d5 >>> 13); d5 &= 0x1fff; + + d6 = c; + d6 += h0 * r6; + d6 += h1 * r5; + d6 += h2 * r4; + d6 += h3 * r3; + d6 += h4 * r2; + c = (d6 >>> 13); d6 &= 0x1fff; + d6 += h5 * r1; + d6 += h6 * r0; + d6 += h7 * (5 * r9); + d6 += h8 * (5 * r8); + d6 += h9 * (5 * r7); + c += (d6 >>> 13); d6 &= 0x1fff; + + d7 = c; + d7 += h0 * r7; + d7 += h1 * r6; + d7 += h2 * r5; + d7 += h3 * r4; + d7 += h4 * r3; + c = (d7 >>> 13); d7 &= 0x1fff; + d7 += h5 * r2; + d7 += h6 * r1; + d7 += h7 * r0; + d7 += h8 * (5 * r9); + d7 += h9 * (5 * r8); + c += (d7 >>> 13); d7 &= 0x1fff; + + d8 = c; + d8 += h0 * r8; + d8 += h1 * r7; + d8 += h2 * r6; + d8 += h3 * r5; + d8 += h4 * r4; + c = (d8 >>> 13); d8 &= 0x1fff; + d8 += h5 * r3; + d8 += h6 * r2; + d8 += h7 * r1; + d8 += h8 * r0; + d8 += h9 * (5 * r9); + c += (d8 >>> 13); d8 &= 0x1fff; + + d9 = c; + d9 += h0 * r9; + d9 += h1 * r8; + d9 += h2 * r7; + d9 += h3 * r6; + d9 += h4 * r5; + c = (d9 >>> 13); d9 &= 0x1fff; + d9 += h5 * r4; + d9 += h6 * r3; + d9 += h7 * r2; + d9 += h8 * r1; + d9 += h9 * r0; + c += (d9 >>> 13); d9 &= 0x1fff; + + c = (((c << 2) + c)) | 0; + c = (c + d0) | 0; + d0 = c & 0x1fff; + c = (c >>> 13); + d1 += c; + + h0 = d0; + h1 = d1; + h2 = d2; + h3 = d3; + h4 = d4; + h5 = d5; + h6 = d6; + h7 = d7; + h8 = d8; + h9 = d9; + + mpos += 16; + bytes -= 16; + } + this.h[0] = h0; + this.h[1] = h1; + this.h[2] = h2; + this.h[3] = h3; + this.h[4] = h4; + this.h[5] = h5; + this.h[6] = h6; + this.h[7] = h7; + this.h[8] = h8; + this.h[9] = h9; +}; + +poly1305.prototype.finish = function(mac, macpos) { + var g = new Uint16Array(10); + var c, mask, f, i; + + if (this.leftover) { + i = this.leftover; + this.buffer[i++] = 1; + for (; i < 16; i++) this.buffer[i] = 0; + this.fin = 1; + this.blocks(this.buffer, 0, 16); + } + + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + for (i = 2; i < 10; i++) { + this.h[i] += c; + c = this.h[i] >>> 13; + this.h[i] &= 0x1fff; + } + this.h[0] += (c * 5); + c = this.h[0] >>> 13; + this.h[0] &= 0x1fff; + this.h[1] += c; + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + this.h[2] += c; + + g[0] = this.h[0] + 5; + c = g[0] >>> 13; + g[0] &= 0x1fff; + for (i = 1; i < 10; i++) { + g[i] = this.h[i] + c; + c = g[i] >>> 13; + g[i] &= 0x1fff; + } + g[9] -= (1 << 13); + + mask = (c ^ 1) - 1; + for (i = 0; i < 10; i++) g[i] &= mask; + mask = ~mask; + for (i = 0; i < 10; i++) this.h[i] = (this.h[i] & mask) | g[i]; + + this.h[0] = ((this.h[0] ) | (this.h[1] << 13) ) & 0xffff; + this.h[1] = ((this.h[1] >>> 3) | (this.h[2] << 10) ) & 0xffff; + this.h[2] = ((this.h[2] >>> 6) | (this.h[3] << 7) ) & 0xffff; + this.h[3] = ((this.h[3] >>> 9) | (this.h[4] << 4) ) & 0xffff; + this.h[4] = ((this.h[4] >>> 12) | (this.h[5] << 1) | (this.h[6] << 14)) & 0xffff; + this.h[5] = ((this.h[6] >>> 2) | (this.h[7] << 11) ) & 0xffff; + this.h[6] = ((this.h[7] >>> 5) | (this.h[8] << 8) ) & 0xffff; + this.h[7] = ((this.h[8] >>> 8) | (this.h[9] << 5) ) & 0xffff; + + f = this.h[0] + this.pad[0]; + this.h[0] = f & 0xffff; + for (i = 1; i < 8; i++) { + f = (((this.h[i] + this.pad[i]) | 0) + (f >>> 16)) | 0; + this.h[i] = f & 0xffff; + } + + mac[macpos+ 0] = (this.h[0] >>> 0) & 0xff; + mac[macpos+ 1] = (this.h[0] >>> 8) & 0xff; + mac[macpos+ 2] = (this.h[1] >>> 0) & 0xff; + mac[macpos+ 3] = (this.h[1] >>> 8) & 0xff; + mac[macpos+ 4] = (this.h[2] >>> 0) & 0xff; + mac[macpos+ 5] = (this.h[2] >>> 8) & 0xff; + mac[macpos+ 6] = (this.h[3] >>> 0) & 0xff; + mac[macpos+ 7] = (this.h[3] >>> 8) & 0xff; + mac[macpos+ 8] = (this.h[4] >>> 0) & 0xff; + mac[macpos+ 9] = (this.h[4] >>> 8) & 0xff; + mac[macpos+10] = (this.h[5] >>> 0) & 0xff; + mac[macpos+11] = (this.h[5] >>> 8) & 0xff; + mac[macpos+12] = (this.h[6] >>> 0) & 0xff; + mac[macpos+13] = (this.h[6] >>> 8) & 0xff; + mac[macpos+14] = (this.h[7] >>> 0) & 0xff; + mac[macpos+15] = (this.h[7] >>> 8) & 0xff; +}; + +poly1305.prototype.update = function(m, mpos, bytes) { + var i, want; + + if (this.leftover) { + want = (16 - this.leftover); + if (want > bytes) + want = bytes; + for (i = 0; i < want; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + bytes -= want; + mpos += want; + this.leftover += want; + if (this.leftover < 16) + return; + this.blocks(this.buffer, 0, 16); + this.leftover = 0; + } + + if (bytes >= 16) { + want = bytes - (bytes % 16); + this.blocks(m, mpos, want); + mpos += want; + bytes -= want; + } + + if (bytes) { + for (i = 0; i < bytes; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + this.leftover += bytes; + } +}; + +function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s = new poly1305(k); + s.update(m, mpos, n); + s.finish(out, outpos); + return 0; +} + +function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); +} + +function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; +} + +function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; +} + +function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; +} + +function car25519(o) { + var i, v, c = 1; + for (i = 0; i < 16; i++) { + v = o[i] + c + 65535; + c = Math.floor(v / 65536); + o[i] = v - c * 65536; + } + o[0] += c-1 + 37 * (c-1); +} + +function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } +} + +function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; +} + +function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; +} + +function A(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] + b[i]; +} + +function Z(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] - b[i]; +} + +function M(o, a, b) { + var v, c, + t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, + t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, + t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, + t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, + b0 = b[0], + b1 = b[1], + b2 = b[2], + b3 = b[3], + b4 = b[4], + b5 = b[5], + b6 = b[6], + b7 = b[7], + b8 = b[8], + b9 = b[9], + b10 = b[10], + b11 = b[11], + b12 = b[12], + b13 = b[13], + b14 = b[14], + b15 = b[15]; + + v = a[0]; + t0 += v * b0; + t1 += v * b1; + t2 += v * b2; + t3 += v * b3; + t4 += v * b4; + t5 += v * b5; + t6 += v * b6; + t7 += v * b7; + t8 += v * b8; + t9 += v * b9; + t10 += v * b10; + t11 += v * b11; + t12 += v * b12; + t13 += v * b13; + t14 += v * b14; + t15 += v * b15; + v = a[1]; + t1 += v * b0; + t2 += v * b1; + t3 += v * b2; + t4 += v * b3; + t5 += v * b4; + t6 += v * b5; + t7 += v * b6; + t8 += v * b7; + t9 += v * b8; + t10 += v * b9; + t11 += v * b10; + t12 += v * b11; + t13 += v * b12; + t14 += v * b13; + t15 += v * b14; + t16 += v * b15; + v = a[2]; + t2 += v * b0; + t3 += v * b1; + t4 += v * b2; + t5 += v * b3; + t6 += v * b4; + t7 += v * b5; + t8 += v * b6; + t9 += v * b7; + t10 += v * b8; + t11 += v * b9; + t12 += v * b10; + t13 += v * b11; + t14 += v * b12; + t15 += v * b13; + t16 += v * b14; + t17 += v * b15; + v = a[3]; + t3 += v * b0; + t4 += v * b1; + t5 += v * b2; + t6 += v * b3; + t7 += v * b4; + t8 += v * b5; + t9 += v * b6; + t10 += v * b7; + t11 += v * b8; + t12 += v * b9; + t13 += v * b10; + t14 += v * b11; + t15 += v * b12; + t16 += v * b13; + t17 += v * b14; + t18 += v * b15; + v = a[4]; + t4 += v * b0; + t5 += v * b1; + t6 += v * b2; + t7 += v * b3; + t8 += v * b4; + t9 += v * b5; + t10 += v * b6; + t11 += v * b7; + t12 += v * b8; + t13 += v * b9; + t14 += v * b10; + t15 += v * b11; + t16 += v * b12; + t17 += v * b13; + t18 += v * b14; + t19 += v * b15; + v = a[5]; + t5 += v * b0; + t6 += v * b1; + t7 += v * b2; + t8 += v * b3; + t9 += v * b4; + t10 += v * b5; + t11 += v * b6; + t12 += v * b7; + t13 += v * b8; + t14 += v * b9; + t15 += v * b10; + t16 += v * b11; + t17 += v * b12; + t18 += v * b13; + t19 += v * b14; + t20 += v * b15; + v = a[6]; + t6 += v * b0; + t7 += v * b1; + t8 += v * b2; + t9 += v * b3; + t10 += v * b4; + t11 += v * b5; + t12 += v * b6; + t13 += v * b7; + t14 += v * b8; + t15 += v * b9; + t16 += v * b10; + t17 += v * b11; + t18 += v * b12; + t19 += v * b13; + t20 += v * b14; + t21 += v * b15; + v = a[7]; + t7 += v * b0; + t8 += v * b1; + t9 += v * b2; + t10 += v * b3; + t11 += v * b4; + t12 += v * b5; + t13 += v * b6; + t14 += v * b7; + t15 += v * b8; + t16 += v * b9; + t17 += v * b10; + t18 += v * b11; + t19 += v * b12; + t20 += v * b13; + t21 += v * b14; + t22 += v * b15; + v = a[8]; + t8 += v * b0; + t9 += v * b1; + t10 += v * b2; + t11 += v * b3; + t12 += v * b4; + t13 += v * b5; + t14 += v * b6; + t15 += v * b7; + t16 += v * b8; + t17 += v * b9; + t18 += v * b10; + t19 += v * b11; + t20 += v * b12; + t21 += v * b13; + t22 += v * b14; + t23 += v * b15; + v = a[9]; + t9 += v * b0; + t10 += v * b1; + t11 += v * b2; + t12 += v * b3; + t13 += v * b4; + t14 += v * b5; + t15 += v * b6; + t16 += v * b7; + t17 += v * b8; + t18 += v * b9; + t19 += v * b10; + t20 += v * b11; + t21 += v * b12; + t22 += v * b13; + t23 += v * b14; + t24 += v * b15; + v = a[10]; + t10 += v * b0; + t11 += v * b1; + t12 += v * b2; + t13 += v * b3; + t14 += v * b4; + t15 += v * b5; + t16 += v * b6; + t17 += v * b7; + t18 += v * b8; + t19 += v * b9; + t20 += v * b10; + t21 += v * b11; + t22 += v * b12; + t23 += v * b13; + t24 += v * b14; + t25 += v * b15; + v = a[11]; + t11 += v * b0; + t12 += v * b1; + t13 += v * b2; + t14 += v * b3; + t15 += v * b4; + t16 += v * b5; + t17 += v * b6; + t18 += v * b7; + t19 += v * b8; + t20 += v * b9; + t21 += v * b10; + t22 += v * b11; + t23 += v * b12; + t24 += v * b13; + t25 += v * b14; + t26 += v * b15; + v = a[12]; + t12 += v * b0; + t13 += v * b1; + t14 += v * b2; + t15 += v * b3; + t16 += v * b4; + t17 += v * b5; + t18 += v * b6; + t19 += v * b7; + t20 += v * b8; + t21 += v * b9; + t22 += v * b10; + t23 += v * b11; + t24 += v * b12; + t25 += v * b13; + t26 += v * b14; + t27 += v * b15; + v = a[13]; + t13 += v * b0; + t14 += v * b1; + t15 += v * b2; + t16 += v * b3; + t17 += v * b4; + t18 += v * b5; + t19 += v * b6; + t20 += v * b7; + t21 += v * b8; + t22 += v * b9; + t23 += v * b10; + t24 += v * b11; + t25 += v * b12; + t26 += v * b13; + t27 += v * b14; + t28 += v * b15; + v = a[14]; + t14 += v * b0; + t15 += v * b1; + t16 += v * b2; + t17 += v * b3; + t18 += v * b4; + t19 += v * b5; + t20 += v * b6; + t21 += v * b7; + t22 += v * b8; + t23 += v * b9; + t24 += v * b10; + t25 += v * b11; + t26 += v * b12; + t27 += v * b13; + t28 += v * b14; + t29 += v * b15; + v = a[15]; + t15 += v * b0; + t16 += v * b1; + t17 += v * b2; + t18 += v * b3; + t19 += v * b4; + t20 += v * b5; + t21 += v * b6; + t22 += v * b7; + t23 += v * b8; + t24 += v * b9; + t25 += v * b10; + t26 += v * b11; + t27 += v * b12; + t28 += v * b13; + t29 += v * b14; + t30 += v * b15; + + t0 += 38 * t16; + t1 += 38 * t17; + t2 += 38 * t18; + t3 += 38 * t19; + t4 += 38 * t20; + t5 += 38 * t21; + t6 += 38 * t22; + t7 += 38 * t23; + t8 += 38 * t24; + t9 += 38 * t25; + t10 += 38 * t26; + t11 += 38 * t27; + t12 += 38 * t28; + t13 += 38 * t29; + t14 += 38 * t30; + // t15 left as is + + // first car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + // second car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + o[ 0] = t0; + o[ 1] = t1; + o[ 2] = t2; + o[ 3] = t3; + o[ 4] = t4; + o[ 5] = t5; + o[ 6] = t6; + o[ 7] = t7; + o[ 8] = t8; + o[ 9] = t9; + o[10] = t10; + o[11] = t11; + o[12] = t12; + o[13] = t13; + o[14] = t14; + o[15] = t15; +} + +function S(o, a) { + M(o, a, a); +} + +function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; +} + +function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); +} + +function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); +} + +function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); +} + +var crypto_box_afternm = crypto_secretbox; +var crypto_box_open_afternm = crypto_secretbox_open; + +function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); +} + +function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); +} + +var K = [ + 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, + 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, + 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, + 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, + 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, + 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, + 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, + 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, + 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, + 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, + 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, + 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, + 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, + 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, + 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, + 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, + 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, + 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, + 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, + 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, + 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, + 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, + 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, + 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, + 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, + 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, + 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, + 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, + 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, + 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, + 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, + 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, + 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, + 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, + 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, + 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, + 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, + 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, + 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, + 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 +]; + +function crypto_hashblocks_hl(hh, hl, m, n) { + var wh = new Int32Array(16), wl = new Int32Array(16), + bh0, bh1, bh2, bh3, bh4, bh5, bh6, bh7, + bl0, bl1, bl2, bl3, bl4, bl5, bl6, bl7, + th, tl, i, j, h, l, a, b, c, d; + + var ah0 = hh[0], + ah1 = hh[1], + ah2 = hh[2], + ah3 = hh[3], + ah4 = hh[4], + ah5 = hh[5], + ah6 = hh[6], + ah7 = hh[7], + + al0 = hl[0], + al1 = hl[1], + al2 = hl[2], + al3 = hl[3], + al4 = hl[4], + al5 = hl[5], + al6 = hl[6], + al7 = hl[7]; + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) { + j = 8 * i + pos; + wh[i] = (m[j+0] << 24) | (m[j+1] << 16) | (m[j+2] << 8) | m[j+3]; + wl[i] = (m[j+4] << 24) | (m[j+5] << 16) | (m[j+6] << 8) | m[j+7]; + } + for (i = 0; i < 80; i++) { + bh0 = ah0; + bh1 = ah1; + bh2 = ah2; + bh3 = ah3; + bh4 = ah4; + bh5 = ah5; + bh6 = ah6; + bh7 = ah7; + + bl0 = al0; + bl1 = al1; + bl2 = al2; + bl3 = al3; + bl4 = al4; + bl5 = al5; + bl6 = al6; + bl7 = al7; + + // add + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma1 + h = ((ah4 >>> 14) | (al4 << (32-14))) ^ ((ah4 >>> 18) | (al4 << (32-18))) ^ ((al4 >>> (41-32)) | (ah4 << (32-(41-32)))); + l = ((al4 >>> 14) | (ah4 << (32-14))) ^ ((al4 >>> 18) | (ah4 << (32-18))) ^ ((ah4 >>> (41-32)) | (al4 << (32-(41-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Ch + h = (ah4 & ah5) ^ (~ah4 & ah6); + l = (al4 & al5) ^ (~al4 & al6); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // K + h = K[i*2]; + l = K[i*2+1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // w + h = wh[i%16]; + l = wl[i%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + th = c & 0xffff | d << 16; + tl = a & 0xffff | b << 16; + + // add + h = th; + l = tl; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma0 + h = ((ah0 >>> 28) | (al0 << (32-28))) ^ ((al0 >>> (34-32)) | (ah0 << (32-(34-32)))) ^ ((al0 >>> (39-32)) | (ah0 << (32-(39-32)))); + l = ((al0 >>> 28) | (ah0 << (32-28))) ^ ((ah0 >>> (34-32)) | (al0 << (32-(34-32)))) ^ ((ah0 >>> (39-32)) | (al0 << (32-(39-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Maj + h = (ah0 & ah1) ^ (ah0 & ah2) ^ (ah1 & ah2); + l = (al0 & al1) ^ (al0 & al2) ^ (al1 & al2); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh7 = (c & 0xffff) | (d << 16); + bl7 = (a & 0xffff) | (b << 16); + + // add + h = bh3; + l = bl3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = th; + l = tl; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh3 = (c & 0xffff) | (d << 16); + bl3 = (a & 0xffff) | (b << 16); + + ah1 = bh0; + ah2 = bh1; + ah3 = bh2; + ah4 = bh3; + ah5 = bh4; + ah6 = bh5; + ah7 = bh6; + ah0 = bh7; + + al1 = bl0; + al2 = bl1; + al3 = bl2; + al4 = bl3; + al5 = bl4; + al6 = bl5; + al7 = bl6; + al0 = bl7; + + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + // add + h = wh[j]; + l = wl[j]; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = wh[(j+9)%16]; + l = wl[(j+9)%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma0 + th = wh[(j+1)%16]; + tl = wl[(j+1)%16]; + h = ((th >>> 1) | (tl << (32-1))) ^ ((th >>> 8) | (tl << (32-8))) ^ (th >>> 7); + l = ((tl >>> 1) | (th << (32-1))) ^ ((tl >>> 8) | (th << (32-8))) ^ ((tl >>> 7) | (th << (32-7))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma1 + th = wh[(j+14)%16]; + tl = wl[(j+14)%16]; + h = ((th >>> 19) | (tl << (32-19))) ^ ((tl >>> (61-32)) | (th << (32-(61-32)))) ^ (th >>> 6); + l = ((tl >>> 19) | (th << (32-19))) ^ ((th >>> (61-32)) | (tl << (32-(61-32)))) ^ ((tl >>> 6) | (th << (32-6))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + wh[j] = (c & 0xffff) | (d << 16); + wl[j] = (a & 0xffff) | (b << 16); + } + } + } + + // add + h = ah0; + l = al0; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[0]; + l = hl[0]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[0] = ah0 = (c & 0xffff) | (d << 16); + hl[0] = al0 = (a & 0xffff) | (b << 16); + + h = ah1; + l = al1; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[1]; + l = hl[1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[1] = ah1 = (c & 0xffff) | (d << 16); + hl[1] = al1 = (a & 0xffff) | (b << 16); + + h = ah2; + l = al2; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[2]; + l = hl[2]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[2] = ah2 = (c & 0xffff) | (d << 16); + hl[2] = al2 = (a & 0xffff) | (b << 16); + + h = ah3; + l = al3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[3]; + l = hl[3]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[3] = ah3 = (c & 0xffff) | (d << 16); + hl[3] = al3 = (a & 0xffff) | (b << 16); + + h = ah4; + l = al4; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[4]; + l = hl[4]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[4] = ah4 = (c & 0xffff) | (d << 16); + hl[4] = al4 = (a & 0xffff) | (b << 16); + + h = ah5; + l = al5; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[5]; + l = hl[5]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[5] = ah5 = (c & 0xffff) | (d << 16); + hl[5] = al5 = (a & 0xffff) | (b << 16); + + h = ah6; + l = al6; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[6]; + l = hl[6]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[6] = ah6 = (c & 0xffff) | (d << 16); + hl[6] = al6 = (a & 0xffff) | (b << 16); + + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[7]; + l = hl[7]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[7] = ah7 = (c & 0xffff) | (d << 16); + hl[7] = al7 = (a & 0xffff) | (b << 16); + + pos += 128; + n -= 128; + } + + return n; +} + +function crypto_hash(out, m, n) { + var hh = new Int32Array(8), + hl = new Int32Array(8), + x = new Uint8Array(256), + i, b = n; + + hh[0] = 0x6a09e667; + hh[1] = 0xbb67ae85; + hh[2] = 0x3c6ef372; + hh[3] = 0xa54ff53a; + hh[4] = 0x510e527f; + hh[5] = 0x9b05688c; + hh[6] = 0x1f83d9ab; + hh[7] = 0x5be0cd19; + + hl[0] = 0xf3bcc908; + hl[1] = 0x84caa73b; + hl[2] = 0xfe94f82b; + hl[3] = 0x5f1d36f1; + hl[4] = 0xade682d1; + hl[5] = 0x2b3e6c1f; + hl[6] = 0xfb41bd6b; + hl[7] = 0x137e2179; + + crypto_hashblocks_hl(hh, hl, m, n); + n %= 128; + + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, (b / 0x20000000) | 0, b << 3); + crypto_hashblocks_hl(hh, hl, x, n); + + for (i = 0; i < 8; i++) ts64(out, 8*i, hh[i], hl[i]); + + return 0; +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; +} + +var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + +function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = (x[j] + 128) >> 8; + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; +} + +function crypto_sign_open(m, sm, n, pk) { + var i, mlen; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + mlen = -1; + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + mlen = n; + return mlen; +} + +var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + +nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES +}; + +/* High-level API */ + +function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); +} + +function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); +} + +function checkArrayTypes() { + var t, i; + for (i = 0; i < arguments.length; i++) { + if ((t = Object.prototype.toString.call(arguments[i])) !== '[object Uint8Array]') + throw new TypeError('unexpected type ' + t + ', use Uint8Array'); + } +} + +function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; +} + +// TODO: Completely remove this in v0.15. +if (!nacl.util) { + nacl.util = {}; + nacl.util.decodeUTF8 = nacl.util.encodeUTF8 = nacl.util.encodeBase64 = nacl.util.decodeBase64 = function() { + throw new Error('nacl.util moved into separate package: https://github.com/dchest/tweetnacl-util-js'); + }; +} + +nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; +}; + +nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); +}; + +nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return false; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return false; + return m.subarray(crypto_secretbox_ZEROBYTES); +}; + +nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; +nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; +nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + +nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; +}; + +nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; +}; + +nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; +nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + +nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); +}; + +nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; +}; + +nacl.box.after = nacl.secretbox; + +nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); +}; + +nacl.box.open.after = nacl.secretbox.open; + +nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; +nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; +nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; +nacl.box.nonceLength = crypto_box_NONCEBYTES; +nacl.box.overheadLength = nacl.secretbox.overheadLength; + +nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; +}; + +nacl.sign.open = function(signedMsg, publicKey) { + if (arguments.length !== 2) + throw new Error('nacl.sign.open accepts 2 arguments; did you mean to use nacl.sign.detached.verify?'); + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; +}; + +nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; +}; + +nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; +nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; +nacl.sign.seedLength = crypto_sign_SEEDBYTES; +nacl.sign.signatureLength = crypto_sign_BYTES; + +nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; +}; + +nacl.hash.hashLength = crypto_hash_BYTES; + +nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; +}; + +nacl.setPRNG = function(fn) { + randombytes = fn; +}; + +(function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto = typeof self !== 'undefined' ? (self.crypto || self.msCrypto) : null; + if (crypto && crypto.getRandomValues) { + // Browsers. + var QUOTA = 65536; + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + for (i = 0; i < n; i += QUOTA) { + crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA))); + } + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } else if (typeof require !== 'undefined') { + // Node.js. + crypto = require('crypto'); + if (crypto && crypto.randomBytes) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } +})(); + +})(typeof module !== 'undefined' && module.exports ? module.exports : (self.nacl = self.nacl || {})); diff --git a/node_modules/tweetnacl/nacl-fast.min.js b/node_modules/tweetnacl/nacl-fast.min.js new file mode 100644 index 0000000..624fbbe --- /dev/null +++ b/node_modules/tweetnacl/nacl-fast.min.js @@ -0,0 +1,2 @@ +!function(r){"use strict";function t(r,t,n,e){r[t]=n>>24&255,r[t+1]=n>>16&255,r[t+2]=n>>8&255,r[t+3]=255&n,r[t+4]=e>>24&255,r[t+5]=e>>16&255,r[t+6]=e>>8&255,r[t+7]=255&e}function n(r,t,n,e,o){var i,h=0;for(i=0;o>i;i++)h|=r[t+i]^n[e+i];return(1&h-1>>>8)-1}function e(r,t,e,o){return n(r,t,e,o,16)}function o(r,t,e,o){return n(r,t,e,o,32)}function i(r,t,n,e){for(var o,i=255&e[0]|(255&e[1])<<8|(255&e[2])<<16|(255&e[3])<<24,h=255&n[0]|(255&n[1])<<8|(255&n[2])<<16|(255&n[3])<<24,a=255&n[4]|(255&n[5])<<8|(255&n[6])<<16|(255&n[7])<<24,f=255&n[8]|(255&n[9])<<8|(255&n[10])<<16|(255&n[11])<<24,s=255&n[12]|(255&n[13])<<8|(255&n[14])<<16|(255&n[15])<<24,u=255&e[4]|(255&e[5])<<8|(255&e[6])<<16|(255&e[7])<<24,c=255&t[0]|(255&t[1])<<8|(255&t[2])<<16|(255&t[3])<<24,y=255&t[4]|(255&t[5])<<8|(255&t[6])<<16|(255&t[7])<<24,l=255&t[8]|(255&t[9])<<8|(255&t[10])<<16|(255&t[11])<<24,w=255&t[12]|(255&t[13])<<8|(255&t[14])<<16|(255&t[15])<<24,p=255&e[8]|(255&e[9])<<8|(255&e[10])<<16|(255&e[11])<<24,v=255&n[16]|(255&n[17])<<8|(255&n[18])<<16|(255&n[19])<<24,b=255&n[20]|(255&n[21])<<8|(255&n[22])<<16|(255&n[23])<<24,g=255&n[24]|(255&n[25])<<8|(255&n[26])<<16|(255&n[27])<<24,_=255&n[28]|(255&n[29])<<8|(255&n[30])<<16|(255&n[31])<<24,A=255&e[12]|(255&e[13])<<8|(255&e[14])<<16|(255&e[15])<<24,d=i,U=h,E=a,x=f,M=s,m=u,B=c,S=y,K=l,T=w,Y=p,k=v,L=b,z=g,R=_,P=A,O=0;20>O;O+=2)o=d+L|0,M^=o<<7|o>>>25,o=M+d|0,K^=o<<9|o>>>23,o=K+M|0,L^=o<<13|o>>>19,o=L+K|0,d^=o<<18|o>>>14,o=m+U|0,T^=o<<7|o>>>25,o=T+m|0,z^=o<<9|o>>>23,o=z+T|0,U^=o<<13|o>>>19,o=U+z|0,m^=o<<18|o>>>14,o=Y+B|0,R^=o<<7|o>>>25,o=R+Y|0,E^=o<<9|o>>>23,o=E+R|0,B^=o<<13|o>>>19,o=B+E|0,Y^=o<<18|o>>>14,o=P+k|0,x^=o<<7|o>>>25,o=x+P|0,S^=o<<9|o>>>23,o=S+x|0,k^=o<<13|o>>>19,o=k+S|0,P^=o<<18|o>>>14,o=d+x|0,U^=o<<7|o>>>25,o=U+d|0,E^=o<<9|o>>>23,o=E+U|0,x^=o<<13|o>>>19,o=x+E|0,d^=o<<18|o>>>14,o=m+M|0,B^=o<<7|o>>>25,o=B+m|0,S^=o<<9|o>>>23,o=S+B|0,M^=o<<13|o>>>19,o=M+S|0,m^=o<<18|o>>>14,o=Y+T|0,k^=o<<7|o>>>25,o=k+Y|0,K^=o<<9|o>>>23,o=K+k|0,T^=o<<13|o>>>19,o=T+K|0,Y^=o<<18|o>>>14,o=P+R|0,L^=o<<7|o>>>25,o=L+P|0,z^=o<<9|o>>>23,o=z+L|0,R^=o<<13|o>>>19,o=R+z|0,P^=o<<18|o>>>14;d=d+i|0,U=U+h|0,E=E+a|0,x=x+f|0,M=M+s|0,m=m+u|0,B=B+c|0,S=S+y|0,K=K+l|0,T=T+w|0,Y=Y+p|0,k=k+v|0,L=L+b|0,z=z+g|0,R=R+_|0,P=P+A|0,r[0]=d>>>0&255,r[1]=d>>>8&255,r[2]=d>>>16&255,r[3]=d>>>24&255,r[4]=U>>>0&255,r[5]=U>>>8&255,r[6]=U>>>16&255,r[7]=U>>>24&255,r[8]=E>>>0&255,r[9]=E>>>8&255,r[10]=E>>>16&255,r[11]=E>>>24&255,r[12]=x>>>0&255,r[13]=x>>>8&255,r[14]=x>>>16&255,r[15]=x>>>24&255,r[16]=M>>>0&255,r[17]=M>>>8&255,r[18]=M>>>16&255,r[19]=M>>>24&255,r[20]=m>>>0&255,r[21]=m>>>8&255,r[22]=m>>>16&255,r[23]=m>>>24&255,r[24]=B>>>0&255,r[25]=B>>>8&255,r[26]=B>>>16&255,r[27]=B>>>24&255,r[28]=S>>>0&255,r[29]=S>>>8&255,r[30]=S>>>16&255,r[31]=S>>>24&255,r[32]=K>>>0&255,r[33]=K>>>8&255,r[34]=K>>>16&255,r[35]=K>>>24&255,r[36]=T>>>0&255,r[37]=T>>>8&255,r[38]=T>>>16&255,r[39]=T>>>24&255,r[40]=Y>>>0&255,r[41]=Y>>>8&255,r[42]=Y>>>16&255,r[43]=Y>>>24&255,r[44]=k>>>0&255,r[45]=k>>>8&255,r[46]=k>>>16&255,r[47]=k>>>24&255,r[48]=L>>>0&255,r[49]=L>>>8&255,r[50]=L>>>16&255,r[51]=L>>>24&255,r[52]=z>>>0&255,r[53]=z>>>8&255,r[54]=z>>>16&255,r[55]=z>>>24&255,r[56]=R>>>0&255,r[57]=R>>>8&255,r[58]=R>>>16&255,r[59]=R>>>24&255,r[60]=P>>>0&255,r[61]=P>>>8&255,r[62]=P>>>16&255,r[63]=P>>>24&255}function h(r,t,n,e){for(var o,i=255&e[0]|(255&e[1])<<8|(255&e[2])<<16|(255&e[3])<<24,h=255&n[0]|(255&n[1])<<8|(255&n[2])<<16|(255&n[3])<<24,a=255&n[4]|(255&n[5])<<8|(255&n[6])<<16|(255&n[7])<<24,f=255&n[8]|(255&n[9])<<8|(255&n[10])<<16|(255&n[11])<<24,s=255&n[12]|(255&n[13])<<8|(255&n[14])<<16|(255&n[15])<<24,u=255&e[4]|(255&e[5])<<8|(255&e[6])<<16|(255&e[7])<<24,c=255&t[0]|(255&t[1])<<8|(255&t[2])<<16|(255&t[3])<<24,y=255&t[4]|(255&t[5])<<8|(255&t[6])<<16|(255&t[7])<<24,l=255&t[8]|(255&t[9])<<8|(255&t[10])<<16|(255&t[11])<<24,w=255&t[12]|(255&t[13])<<8|(255&t[14])<<16|(255&t[15])<<24,p=255&e[8]|(255&e[9])<<8|(255&e[10])<<16|(255&e[11])<<24,v=255&n[16]|(255&n[17])<<8|(255&n[18])<<16|(255&n[19])<<24,b=255&n[20]|(255&n[21])<<8|(255&n[22])<<16|(255&n[23])<<24,g=255&n[24]|(255&n[25])<<8|(255&n[26])<<16|(255&n[27])<<24,_=255&n[28]|(255&n[29])<<8|(255&n[30])<<16|(255&n[31])<<24,A=255&e[12]|(255&e[13])<<8|(255&e[14])<<16|(255&e[15])<<24,d=i,U=h,E=a,x=f,M=s,m=u,B=c,S=y,K=l,T=w,Y=p,k=v,L=b,z=g,R=_,P=A,O=0;20>O;O+=2)o=d+L|0,M^=o<<7|o>>>25,o=M+d|0,K^=o<<9|o>>>23,o=K+M|0,L^=o<<13|o>>>19,o=L+K|0,d^=o<<18|o>>>14,o=m+U|0,T^=o<<7|o>>>25,o=T+m|0,z^=o<<9|o>>>23,o=z+T|0,U^=o<<13|o>>>19,o=U+z|0,m^=o<<18|o>>>14,o=Y+B|0,R^=o<<7|o>>>25,o=R+Y|0,E^=o<<9|o>>>23,o=E+R|0,B^=o<<13|o>>>19,o=B+E|0,Y^=o<<18|o>>>14,o=P+k|0,x^=o<<7|o>>>25,o=x+P|0,S^=o<<9|o>>>23,o=S+x|0,k^=o<<13|o>>>19,o=k+S|0,P^=o<<18|o>>>14,o=d+x|0,U^=o<<7|o>>>25,o=U+d|0,E^=o<<9|o>>>23,o=E+U|0,x^=o<<13|o>>>19,o=x+E|0,d^=o<<18|o>>>14,o=m+M|0,B^=o<<7|o>>>25,o=B+m|0,S^=o<<9|o>>>23,o=S+B|0,M^=o<<13|o>>>19,o=M+S|0,m^=o<<18|o>>>14,o=Y+T|0,k^=o<<7|o>>>25,o=k+Y|0,K^=o<<9|o>>>23,o=K+k|0,T^=o<<13|o>>>19,o=T+K|0,Y^=o<<18|o>>>14,o=P+R|0,L^=o<<7|o>>>25,o=L+P|0,z^=o<<9|o>>>23,o=z+L|0,R^=o<<13|o>>>19,o=R+z|0,P^=o<<18|o>>>14;r[0]=d>>>0&255,r[1]=d>>>8&255,r[2]=d>>>16&255,r[3]=d>>>24&255,r[4]=m>>>0&255,r[5]=m>>>8&255,r[6]=m>>>16&255,r[7]=m>>>24&255,r[8]=Y>>>0&255,r[9]=Y>>>8&255,r[10]=Y>>>16&255,r[11]=Y>>>24&255,r[12]=P>>>0&255,r[13]=P>>>8&255,r[14]=P>>>16&255,r[15]=P>>>24&255,r[16]=B>>>0&255,r[17]=B>>>8&255,r[18]=B>>>16&255,r[19]=B>>>24&255,r[20]=S>>>0&255,r[21]=S>>>8&255,r[22]=S>>>16&255,r[23]=S>>>24&255,r[24]=K>>>0&255,r[25]=K>>>8&255,r[26]=K>>>16&255,r[27]=K>>>24&255,r[28]=T>>>0&255,r[29]=T>>>8&255,r[30]=T>>>16&255,r[31]=T>>>24&255}function a(r,t,n,e){i(r,t,n,e)}function f(r,t,n,e){h(r,t,n,e)}function s(r,t,n,e,o,i,h){var f,s,u=new Uint8Array(16),c=new Uint8Array(64);for(s=0;16>s;s++)u[s]=0;for(s=0;8>s;s++)u[s]=i[s];for(;o>=64;){for(a(c,u,h,cr),s=0;64>s;s++)r[t+s]=n[e+s]^c[s];for(f=1,s=8;16>s;s++)f=f+(255&u[s])|0,u[s]=255&f,f>>>=8;o-=64,t+=64,e+=64}if(o>0)for(a(c,u,h,cr),s=0;o>s;s++)r[t+s]=n[e+s]^c[s];return 0}function u(r,t,n,e,o){var i,h,f=new Uint8Array(16),s=new Uint8Array(64);for(h=0;16>h;h++)f[h]=0;for(h=0;8>h;h++)f[h]=e[h];for(;n>=64;){for(a(s,f,o,cr),h=0;64>h;h++)r[t+h]=s[h];for(i=1,h=8;16>h;h++)i=i+(255&f[h])|0,f[h]=255&i,i>>>=8;n-=64,t+=64}if(n>0)for(a(s,f,o,cr),h=0;n>h;h++)r[t+h]=s[h];return 0}function c(r,t,n,e,o){var i=new Uint8Array(32);f(i,e,o,cr);for(var h=new Uint8Array(8),a=0;8>a;a++)h[a]=e[a+16];return u(r,t,n,h,i)}function y(r,t,n,e,o,i,h){var a=new Uint8Array(32);f(a,i,h,cr);for(var u=new Uint8Array(8),c=0;8>c;c++)u[c]=i[c+16];return s(r,t,n,e,o,u,a)}function l(r,t,n,e,o,i){var h=new yr(i);return h.update(n,e,o),h.finish(r,t),0}function w(r,t,n,o,i,h){var a=new Uint8Array(16);return l(a,0,n,o,i,h),e(r,t,a,0)}function p(r,t,n,e,o){var i;if(32>n)return-1;for(y(r,0,t,0,n,e,o),l(r,16,r,32,n-32,r),i=0;16>i;i++)r[i]=0;return 0}function v(r,t,n,e,o){var i,h=new Uint8Array(32);if(32>n)return-1;if(c(h,0,32,e,o),0!==w(t,16,t,32,n-32,h))return-1;for(y(r,0,t,0,n,e,o),i=0;32>i;i++)r[i]=0;return 0}function b(r,t){var n;for(n=0;16>n;n++)r[n]=0|t[n]}function g(r){var t,n,e=1;for(t=0;16>t;t++)n=r[t]+e+65535,e=Math.floor(n/65536),r[t]=n-65536*e;r[0]+=e-1+37*(e-1)}function _(r,t,n){for(var e,o=~(n-1),i=0;16>i;i++)e=o&(r[i]^t[i]),r[i]^=e,t[i]^=e}function A(r,t){var n,e,o,i=$(),h=$();for(n=0;16>n;n++)h[n]=t[n];for(g(h),g(h),g(h),e=0;2>e;e++){for(i[0]=h[0]-65517,n=1;15>n;n++)i[n]=h[n]-65535-(i[n-1]>>16&1),i[n-1]&=65535;i[15]=h[15]-32767-(i[14]>>16&1),o=i[15]>>16&1,i[14]&=65535,_(h,i,1-o)}for(n=0;16>n;n++)r[2*n]=255&h[n],r[2*n+1]=h[n]>>8}function d(r,t){var n=new Uint8Array(32),e=new Uint8Array(32);return A(n,r),A(e,t),o(n,0,e,0)}function U(r){var t=new Uint8Array(32);return A(t,r),1&t[0]}function E(r,t){var n;for(n=0;16>n;n++)r[n]=t[2*n]+(t[2*n+1]<<8);r[15]&=32767}function x(r,t,n){for(var e=0;16>e;e++)r[e]=t[e]+n[e]}function M(r,t,n){for(var e=0;16>e;e++)r[e]=t[e]-n[e]}function m(r,t,n){var e,o,i=0,h=0,a=0,f=0,s=0,u=0,c=0,y=0,l=0,w=0,p=0,v=0,b=0,g=0,_=0,A=0,d=0,U=0,E=0,x=0,M=0,m=0,B=0,S=0,K=0,T=0,Y=0,k=0,L=0,z=0,R=0,P=n[0],O=n[1],N=n[2],C=n[3],F=n[4],I=n[5],G=n[6],Z=n[7],j=n[8],q=n[9],V=n[10],X=n[11],D=n[12],H=n[13],J=n[14],Q=n[15];e=t[0],i+=e*P,h+=e*O,a+=e*N,f+=e*C,s+=e*F,u+=e*I,c+=e*G,y+=e*Z,l+=e*j,w+=e*q,p+=e*V,v+=e*X,b+=e*D,g+=e*H,_+=e*J,A+=e*Q,e=t[1],h+=e*P,a+=e*O,f+=e*N,s+=e*C,u+=e*F,c+=e*I,y+=e*G,l+=e*Z,w+=e*j,p+=e*q,v+=e*V,b+=e*X,g+=e*D,_+=e*H,A+=e*J,d+=e*Q,e=t[2],a+=e*P,f+=e*O,s+=e*N,u+=e*C,c+=e*F,y+=e*I,l+=e*G,w+=e*Z,p+=e*j,v+=e*q,b+=e*V,g+=e*X,_+=e*D,A+=e*H,d+=e*J,U+=e*Q,e=t[3],f+=e*P,s+=e*O,u+=e*N,c+=e*C,y+=e*F,l+=e*I,w+=e*G,p+=e*Z,v+=e*j,b+=e*q,g+=e*V,_+=e*X,A+=e*D,d+=e*H,U+=e*J,E+=e*Q,e=t[4],s+=e*P,u+=e*O,c+=e*N,y+=e*C,l+=e*F,w+=e*I,p+=e*G,v+=e*Z,b+=e*j,g+=e*q,_+=e*V,A+=e*X,d+=e*D,U+=e*H,E+=e*J,x+=e*Q,e=t[5],u+=e*P,c+=e*O,y+=e*N,l+=e*C,w+=e*F,p+=e*I,v+=e*G,b+=e*Z,g+=e*j,_+=e*q,A+=e*V,d+=e*X,U+=e*D,E+=e*H,x+=e*J,M+=e*Q,e=t[6],c+=e*P,y+=e*O,l+=e*N,w+=e*C,p+=e*F,v+=e*I,b+=e*G,g+=e*Z,_+=e*j,A+=e*q,d+=e*V,U+=e*X,E+=e*D,x+=e*H,M+=e*J,m+=e*Q,e=t[7],y+=e*P,l+=e*O,w+=e*N,p+=e*C,v+=e*F,b+=e*I,g+=e*G,_+=e*Z,A+=e*j,d+=e*q,U+=e*V,E+=e*X,x+=e*D,M+=e*H,m+=e*J,B+=e*Q,e=t[8],l+=e*P,w+=e*O,p+=e*N,v+=e*C,b+=e*F,g+=e*I,_+=e*G,A+=e*Z,d+=e*j,U+=e*q,E+=e*V,x+=e*X,M+=e*D,m+=e*H,B+=e*J,S+=e*Q,e=t[9],w+=e*P,p+=e*O,v+=e*N,b+=e*C,g+=e*F,_+=e*I,A+=e*G,d+=e*Z,U+=e*j,E+=e*q,x+=e*V,M+=e*X,m+=e*D,B+=e*H,S+=e*J,K+=e*Q,e=t[10],p+=e*P,v+=e*O,b+=e*N,g+=e*C,_+=e*F,A+=e*I,d+=e*G,U+=e*Z,E+=e*j,x+=e*q,M+=e*V,m+=e*X,B+=e*D,S+=e*H,K+=e*J,T+=e*Q,e=t[11],v+=e*P,b+=e*O,g+=e*N,_+=e*C,A+=e*F,d+=e*I,U+=e*G,E+=e*Z,x+=e*j,M+=e*q,m+=e*V,B+=e*X,S+=e*D,K+=e*H,T+=e*J,Y+=e*Q,e=t[12],b+=e*P,g+=e*O,_+=e*N,A+=e*C,d+=e*F,U+=e*I,E+=e*G,x+=e*Z,M+=e*j,m+=e*q,B+=e*V,S+=e*X,K+=e*D,T+=e*H,Y+=e*J,k+=e*Q,e=t[13],g+=e*P,_+=e*O,A+=e*N,d+=e*C,U+=e*F,E+=e*I,x+=e*G,M+=e*Z,m+=e*j,B+=e*q,S+=e*V,K+=e*X,T+=e*D,Y+=e*H,k+=e*J,L+=e*Q,e=t[14],_+=e*P,A+=e*O,d+=e*N,U+=e*C,E+=e*F,x+=e*I,M+=e*G,m+=e*Z,B+=e*j,S+=e*q,K+=e*V,T+=e*X,Y+=e*D,k+=e*H,L+=e*J,z+=e*Q,e=t[15],A+=e*P,d+=e*O,U+=e*N,E+=e*C,x+=e*F,M+=e*I,m+=e*G,B+=e*Z,S+=e*j,K+=e*q,T+=e*V,Y+=e*X,k+=e*D,L+=e*H,z+=e*J,R+=e*Q,i+=38*d,h+=38*U,a+=38*E,f+=38*x,s+=38*M,u+=38*m,c+=38*B,y+=38*S,l+=38*K,w+=38*T,p+=38*Y,v+=38*k,b+=38*L,g+=38*z,_+=38*R,o=1,e=i+o+65535,o=Math.floor(e/65536),i=e-65536*o,e=h+o+65535,o=Math.floor(e/65536),h=e-65536*o,e=a+o+65535,o=Math.floor(e/65536),a=e-65536*o,e=f+o+65535,o=Math.floor(e/65536),f=e-65536*o,e=s+o+65535,o=Math.floor(e/65536),s=e-65536*o,e=u+o+65535,o=Math.floor(e/65536),u=e-65536*o,e=c+o+65535,o=Math.floor(e/65536),c=e-65536*o,e=y+o+65535,o=Math.floor(e/65536),y=e-65536*o,e=l+o+65535,o=Math.floor(e/65536),l=e-65536*o,e=w+o+65535,o=Math.floor(e/65536),w=e-65536*o,e=p+o+65535,o=Math.floor(e/65536),p=e-65536*o,e=v+o+65535,o=Math.floor(e/65536),v=e-65536*o,e=b+o+65535,o=Math.floor(e/65536),b=e-65536*o,e=g+o+65535,o=Math.floor(e/65536),g=e-65536*o,e=_+o+65535,o=Math.floor(e/65536),_=e-65536*o,e=A+o+65535,o=Math.floor(e/65536),A=e-65536*o,i+=o-1+37*(o-1),o=1,e=i+o+65535,o=Math.floor(e/65536),i=e-65536*o,e=h+o+65535,o=Math.floor(e/65536),h=e-65536*o,e=a+o+65535,o=Math.floor(e/65536),a=e-65536*o,e=f+o+65535,o=Math.floor(e/65536),f=e-65536*o,e=s+o+65535,o=Math.floor(e/65536),s=e-65536*o,e=u+o+65535,o=Math.floor(e/65536),u=e-65536*o,e=c+o+65535,o=Math.floor(e/65536),c=e-65536*o,e=y+o+65535,o=Math.floor(e/65536),y=e-65536*o,e=l+o+65535,o=Math.floor(e/65536),l=e-65536*o,e=w+o+65535,o=Math.floor(e/65536),w=e-65536*o,e=p+o+65535,o=Math.floor(e/65536),p=e-65536*o,e=v+o+65535,o=Math.floor(e/65536),v=e-65536*o,e=b+o+65535,o=Math.floor(e/65536),b=e-65536*o,e=g+o+65535,o=Math.floor(e/65536),g=e-65536*o,e=_+o+65535,o=Math.floor(e/65536),_=e-65536*o,e=A+o+65535,o=Math.floor(e/65536),A=e-65536*o,i+=o-1+37*(o-1),r[0]=i,r[1]=h,r[2]=a,r[3]=f,r[4]=s,r[5]=u,r[6]=c,r[7]=y,r[8]=l,r[9]=w,r[10]=p,r[11]=v,r[12]=b,r[13]=g,r[14]=_,r[15]=A}function B(r,t){m(r,t,t)}function S(r,t){var n,e=$();for(n=0;16>n;n++)e[n]=t[n];for(n=253;n>=0;n--)B(e,e),2!==n&&4!==n&&m(e,e,t);for(n=0;16>n;n++)r[n]=e[n]}function K(r,t){var n,e=$();for(n=0;16>n;n++)e[n]=t[n];for(n=250;n>=0;n--)B(e,e),1!==n&&m(e,e,t);for(n=0;16>n;n++)r[n]=e[n]}function T(r,t,n){var e,o,i=new Uint8Array(32),h=new Float64Array(80),a=$(),f=$(),s=$(),u=$(),c=$(),y=$();for(o=0;31>o;o++)i[o]=t[o];for(i[31]=127&t[31]|64,i[0]&=248,E(h,n),o=0;16>o;o++)f[o]=h[o],u[o]=a[o]=s[o]=0;for(a[0]=u[0]=1,o=254;o>=0;--o)e=i[o>>>3]>>>(7&o)&1,_(a,f,e),_(s,u,e),x(c,a,s),M(a,a,s),x(s,f,u),M(f,f,u),B(u,c),B(y,a),m(a,s,a),m(s,f,c),x(c,a,s),M(a,a,s),B(f,a),M(s,u,y),m(a,s,ir),x(a,a,u),m(s,s,a),m(a,u,y),m(u,f,h),B(f,c),_(a,f,e),_(s,u,e);for(o=0;16>o;o++)h[o+16]=a[o],h[o+32]=s[o],h[o+48]=f[o],h[o+64]=u[o];var l=h.subarray(32),w=h.subarray(16);return S(l,l),m(w,w,l),A(r,w),0}function Y(r,t){return T(r,t,nr)}function k(r,t){return rr(t,32),Y(r,t)}function L(r,t,n){var e=new Uint8Array(32);return T(e,n,t),f(r,tr,e,cr)}function z(r,t,n,e,o,i){var h=new Uint8Array(32);return L(h,o,i),lr(r,t,n,e,h)}function R(r,t,n,e,o,i){var h=new Uint8Array(32);return L(h,o,i),wr(r,t,n,e,h)}function P(r,t,n,e){for(var o,i,h,a,f,s,u,c,y,l,w,p,v,b,g,_,A,d,U,E,x,M,m,B,S,K,T=new Int32Array(16),Y=new Int32Array(16),k=r[0],L=r[1],z=r[2],R=r[3],P=r[4],O=r[5],N=r[6],C=r[7],F=t[0],I=t[1],G=t[2],Z=t[3],j=t[4],q=t[5],V=t[6],X=t[7],D=0;e>=128;){for(U=0;16>U;U++)E=8*U+D,T[U]=n[E+0]<<24|n[E+1]<<16|n[E+2]<<8|n[E+3],Y[U]=n[E+4]<<24|n[E+5]<<16|n[E+6]<<8|n[E+7];for(U=0;80>U;U++)if(o=k,i=L,h=z,a=R,f=P,s=O,u=N,c=C,y=F,l=I,w=G,p=Z,v=j,b=q,g=V,_=X,x=C,M=X,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=(P>>>14|j<<18)^(P>>>18|j<<14)^(j>>>9|P<<23),M=(j>>>14|P<<18)^(j>>>18|P<<14)^(P>>>9|j<<23),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=P&O^~P&N,M=j&q^~j&V,m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=pr[2*U],M=pr[2*U+1],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=T[U%16],M=Y[U%16],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,A=65535&S|K<<16,d=65535&m|B<<16,x=A,M=d,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=(k>>>28|F<<4)^(F>>>2|k<<30)^(F>>>7|k<<25),M=(F>>>28|k<<4)^(k>>>2|F<<30)^(k>>>7|F<<25),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,x=k&L^k&z^L&z,M=F&I^F&G^I&G,m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,c=65535&S|K<<16,_=65535&m|B<<16,x=a,M=p,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=A,M=d,m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,a=65535&S|K<<16,p=65535&m|B<<16,L=o,z=i,R=h,P=a,O=f,N=s,C=u,k=c,I=y,G=l,Z=w,j=p,q=v,V=b,X=g,F=_,U%16===15)for(E=0;16>E;E++)x=T[E],M=Y[E],m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=T[(E+9)%16],M=Y[(E+9)%16],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,A=T[(E+1)%16],d=Y[(E+1)%16],x=(A>>>1|d<<31)^(A>>>8|d<<24)^A>>>7,M=(d>>>1|A<<31)^(d>>>8|A<<24)^(d>>>7|A<<25),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,A=T[(E+14)%16],d=Y[(E+14)%16],x=(A>>>19|d<<13)^(d>>>29|A<<3)^A>>>6,M=(d>>>19|A<<13)^(A>>>29|d<<3)^(d>>>6|A<<26),m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,T[E]=65535&S|K<<16,Y[E]=65535&m|B<<16;x=k,M=F,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[0],M=t[0],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[0]=k=65535&S|K<<16,t[0]=F=65535&m|B<<16,x=L,M=I,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[1],M=t[1],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[1]=L=65535&S|K<<16,t[1]=I=65535&m|B<<16,x=z,M=G,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[2],M=t[2],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[2]=z=65535&S|K<<16,t[2]=G=65535&m|B<<16,x=R,M=Z,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[3],M=t[3],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[3]=R=65535&S|K<<16,t[3]=Z=65535&m|B<<16,x=P,M=j,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[4],M=t[4],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[4]=P=65535&S|K<<16,t[4]=j=65535&m|B<<16,x=O,M=q,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[5],M=t[5],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[5]=O=65535&S|K<<16,t[5]=q=65535&m|B<<16,x=N,M=V,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[6],M=t[6],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[6]=N=65535&S|K<<16,t[6]=V=65535&m|B<<16,x=C,M=X,m=65535&M,B=M>>>16,S=65535&x,K=x>>>16,x=r[7],M=t[7],m+=65535&M,B+=M>>>16,S+=65535&x,K+=x>>>16,B+=m>>>16,S+=B>>>16,K+=S>>>16,r[7]=C=65535&S|K<<16,t[7]=X=65535&m|B<<16,D+=128,e-=128}return e}function O(r,n,e){var o,i=new Int32Array(8),h=new Int32Array(8),a=new Uint8Array(256),f=e;for(i[0]=1779033703,i[1]=3144134277,i[2]=1013904242,i[3]=2773480762,i[4]=1359893119,i[5]=2600822924,i[6]=528734635,i[7]=1541459225,h[0]=4089235720,h[1]=2227873595,h[2]=4271175723,h[3]=1595750129,h[4]=2917565137,h[5]=725511199,h[6]=4215389547,h[7]=327033209,P(i,h,n,e),e%=128,o=0;e>o;o++)a[o]=n[f-e+o];for(a[e]=128,e=256-128*(112>e?1:0),a[e-9]=0,t(a,e-8,f/536870912|0,f<<3),P(i,h,a,e),o=0;8>o;o++)t(r,8*o,i[o],h[o]);return 0}function N(r,t){var n=$(),e=$(),o=$(),i=$(),h=$(),a=$(),f=$(),s=$(),u=$();M(n,r[1],r[0]),M(u,t[1],t[0]),m(n,n,u),x(e,r[0],r[1]),x(u,t[0],t[1]),m(e,e,u),m(o,r[3],t[3]),m(o,o,ar),m(i,r[2],t[2]),x(i,i,i),M(h,e,n),M(a,i,o),x(f,i,o),x(s,e,n),m(r[0],h,a),m(r[1],s,f),m(r[2],f,a),m(r[3],h,s)}function C(r,t,n){var e;for(e=0;4>e;e++)_(r[e],t[e],n)}function F(r,t){var n=$(),e=$(),o=$();S(o,t[2]),m(n,t[0],o),m(e,t[1],o),A(r,e),r[31]^=U(n)<<7}function I(r,t,n){var e,o;for(b(r[0],er),b(r[1],or),b(r[2],or),b(r[3],er),o=255;o>=0;--o)e=n[o/8|0]>>(7&o)&1,C(r,t,e),N(t,r),N(r,r),C(r,t,e)}function G(r,t){var n=[$(),$(),$(),$()];b(n[0],fr),b(n[1],sr),b(n[2],or),m(n[3],fr,sr),I(r,n,t)}function Z(r,t,n){var e,o=new Uint8Array(64),i=[$(),$(),$(),$()];for(n||rr(t,32),O(o,t,32),o[0]&=248,o[31]&=127,o[31]|=64,G(i,o),F(r,i),e=0;32>e;e++)t[e+32]=r[e];return 0}function j(r,t){var n,e,o,i;for(e=63;e>=32;--e){for(n=0,o=e-32,i=e-12;i>o;++o)t[o]+=n-16*t[e]*vr[o-(e-32)],n=t[o]+128>>8,t[o]-=256*n;t[o]+=n,t[e]=0}for(n=0,o=0;32>o;o++)t[o]+=n-(t[31]>>4)*vr[o],n=t[o]>>8,t[o]&=255;for(o=0;32>o;o++)t[o]-=n*vr[o];for(e=0;32>e;e++)t[e+1]+=t[e]>>8,r[e]=255&t[e]}function q(r){var t,n=new Float64Array(64);for(t=0;64>t;t++)n[t]=r[t];for(t=0;64>t;t++)r[t]=0;j(r,n)}function V(r,t,n,e){var o,i,h=new Uint8Array(64),a=new Uint8Array(64),f=new Uint8Array(64),s=new Float64Array(64),u=[$(),$(),$(),$()];O(h,e,32),h[0]&=248,h[31]&=127,h[31]|=64;var c=n+64;for(o=0;n>o;o++)r[64+o]=t[o];for(o=0;32>o;o++)r[32+o]=h[32+o];for(O(f,r.subarray(32),n+32),q(f),G(u,f),F(r,u),o=32;64>o;o++)r[o]=e[o];for(O(a,r,n+64),q(a),o=0;64>o;o++)s[o]=0;for(o=0;32>o;o++)s[o]=f[o];for(o=0;32>o;o++)for(i=0;32>i;i++)s[o+i]+=a[o]*h[i];return j(r.subarray(32),s),c}function X(r,t){var n=$(),e=$(),o=$(),i=$(),h=$(),a=$(),f=$();return b(r[2],or),E(r[1],t),B(o,r[1]),m(i,o,hr),M(o,o,r[2]),x(i,r[2],i),B(h,i),B(a,h),m(f,a,h),m(n,f,o),m(n,n,i),K(n,n),m(n,n,o),m(n,n,i),m(n,n,i),m(r[0],n,i),B(e,r[0]),m(e,e,i),d(e,o)&&m(r[0],r[0],ur),B(e,r[0]),m(e,e,i),d(e,o)?-1:(U(r[0])===t[31]>>7&&M(r[0],er,r[0]),m(r[3],r[0],r[1]),0)}function D(r,t,n,e){var i,h,a=new Uint8Array(32),f=new Uint8Array(64),s=[$(),$(),$(),$()],u=[$(),$(),$(),$()];if(h=-1,64>n)return-1;if(X(u,e))return-1;for(i=0;n>i;i++)r[i]=t[i];for(i=0;32>i;i++)r[i+32]=e[i];if(O(f,r,n),q(f),I(s,u,f),G(u,t.subarray(32)),N(s,u),F(a,s),n-=64,o(t,0,a,0)){for(i=0;n>i;i++)r[i]=0;return-1}for(i=0;n>i;i++)r[i]=t[i+64];return h=n}function H(r,t){if(r.length!==br)throw new Error("bad key size");if(t.length!==gr)throw new Error("bad nonce size")}function J(r,t){if(r.length!==Er)throw new Error("bad public key size");if(t.length!==xr)throw new Error("bad secret key size")}function Q(){var r,t;for(t=0;t>>13|n<<3),e=255&r[4]|(255&r[5])<<8,this.r[2]=7939&(n>>>10|e<<6),o=255&r[6]|(255&r[7])<<8,this.r[3]=8191&(e>>>7|o<<9),i=255&r[8]|(255&r[9])<<8,this.r[4]=255&(o>>>4|i<<12),this.r[5]=i>>>1&8190,h=255&r[10]|(255&r[11])<<8,this.r[6]=8191&(i>>>14|h<<2),a=255&r[12]|(255&r[13])<<8,this.r[7]=8065&(h>>>11|a<<5),f=255&r[14]|(255&r[15])<<8,this.r[8]=8191&(a>>>8|f<<8),this.r[9]=f>>>5&127,this.pad[0]=255&r[16]|(255&r[17])<<8,this.pad[1]=255&r[18]|(255&r[19])<<8,this.pad[2]=255&r[20]|(255&r[21])<<8,this.pad[3]=255&r[22]|(255&r[23])<<8,this.pad[4]=255&r[24]|(255&r[25])<<8,this.pad[5]=255&r[26]|(255&r[27])<<8,this.pad[6]=255&r[28]|(255&r[29])<<8,this.pad[7]=255&r[30]|(255&r[31])<<8};yr.prototype.blocks=function(r,t,n){for(var e,o,i,h,a,f,s,u,c,y,l,w,p,v,b,g,_,A,d,U=this.fin?0:2048,E=this.h[0],x=this.h[1],M=this.h[2],m=this.h[3],B=this.h[4],S=this.h[5],K=this.h[6],T=this.h[7],Y=this.h[8],k=this.h[9],L=this.r[0],z=this.r[1],R=this.r[2],P=this.r[3],O=this.r[4],N=this.r[5],C=this.r[6],F=this.r[7],I=this.r[8],G=this.r[9];n>=16;)e=255&r[t+0]|(255&r[t+1])<<8,E+=8191&e,o=255&r[t+2]|(255&r[t+3])<<8,x+=8191&(e>>>13|o<<3),i=255&r[t+4]|(255&r[t+5])<<8,M+=8191&(o>>>10|i<<6),h=255&r[t+6]|(255&r[t+7])<<8,m+=8191&(i>>>7|h<<9),a=255&r[t+8]|(255&r[t+9])<<8,B+=8191&(h>>>4|a<<12),S+=a>>>1&8191,f=255&r[t+10]|(255&r[t+11])<<8,K+=8191&(a>>>14|f<<2),s=255&r[t+12]|(255&r[t+13])<<8,T+=8191&(f>>>11|s<<5),u=255&r[t+14]|(255&r[t+15])<<8,Y+=8191&(s>>>8|u<<8),k+=u>>>5|U,c=0,y=c,y+=E*L,y+=x*(5*G),y+=M*(5*I),y+=m*(5*F),y+=B*(5*C),c=y>>>13,y&=8191,y+=S*(5*N),y+=K*(5*O),y+=T*(5*P),y+=Y*(5*R),y+=k*(5*z),c+=y>>>13,y&=8191,l=c,l+=E*z,l+=x*L,l+=M*(5*G),l+=m*(5*I),l+=B*(5*F),c=l>>>13,l&=8191,l+=S*(5*C),l+=K*(5*N),l+=T*(5*O),l+=Y*(5*P),l+=k*(5*R),c+=l>>>13,l&=8191,w=c,w+=E*R,w+=x*z,w+=M*L,w+=m*(5*G),w+=B*(5*I),c=w>>>13,w&=8191,w+=S*(5*F),w+=K*(5*C),w+=T*(5*N),w+=Y*(5*O),w+=k*(5*P),c+=w>>>13,w&=8191,p=c,p+=E*P,p+=x*R,p+=M*z,p+=m*L,p+=B*(5*G),c=p>>>13,p&=8191,p+=S*(5*I),p+=K*(5*F),p+=T*(5*C),p+=Y*(5*N),p+=k*(5*O),c+=p>>>13,p&=8191,v=c,v+=E*O,v+=x*P,v+=M*R,v+=m*z,v+=B*L,c=v>>>13,v&=8191,v+=S*(5*G),v+=K*(5*I),v+=T*(5*F),v+=Y*(5*C),v+=k*(5*N),c+=v>>>13,v&=8191,b=c,b+=E*N,b+=x*O,b+=M*P,b+=m*R,b+=B*z,c=b>>>13,b&=8191,b+=S*L,b+=K*(5*G),b+=T*(5*I),b+=Y*(5*F),b+=k*(5*C),c+=b>>>13,b&=8191,g=c,g+=E*C,g+=x*N,g+=M*O,g+=m*P,g+=B*R,c=g>>>13,g&=8191,g+=S*z,g+=K*L,g+=T*(5*G),g+=Y*(5*I),g+=k*(5*F),c+=g>>>13,g&=8191,_=c,_+=E*F,_+=x*C,_+=M*N,_+=m*O,_+=B*P,c=_>>>13,_&=8191,_+=S*R,_+=K*z,_+=T*L,_+=Y*(5*G),_+=k*(5*I),c+=_>>>13,_&=8191,A=c,A+=E*I,A+=x*F,A+=M*C,A+=m*N,A+=B*O,c=A>>>13,A&=8191,A+=S*P,A+=K*R,A+=T*z,A+=Y*L,A+=k*(5*G),c+=A>>>13,A&=8191,d=c,d+=E*G,d+=x*I,d+=M*F,d+=m*C,d+=B*N,c=d>>>13,d&=8191,d+=S*O,d+=K*P,d+=T*R,d+=Y*z,d+=k*L,c+=d>>>13,d&=8191,c=(c<<2)+c|0,c=c+y|0,y=8191&c,c>>>=13,l+=c,E=y,x=l,M=w,m=p,B=v,S=b,K=g,T=_,Y=A,k=d,t+=16,n-=16;this.h[0]=E,this.h[1]=x,this.h[2]=M,this.h[3]=m,this.h[4]=B,this.h[5]=S,this.h[6]=K,this.h[7]=T,this.h[8]=Y,this.h[9]=k},yr.prototype.finish=function(r,t){var n,e,o,i,h=new Uint16Array(10);if(this.leftover){for(i=this.leftover,this.buffer[i++]=1;16>i;i++)this.buffer[i]=0;this.fin=1,this.blocks(this.buffer,0,16)}for(n=this.h[1]>>>13,this.h[1]&=8191,i=2;10>i;i++)this.h[i]+=n,n=this.h[i]>>>13,this.h[i]&=8191;for(this.h[0]+=5*n,n=this.h[0]>>>13,this.h[0]&=8191,this.h[1]+=n,n=this.h[1]>>>13,this.h[1]&=8191,this.h[2]+=n,h[0]=this.h[0]+5,n=h[0]>>>13,h[0]&=8191,i=1;10>i;i++)h[i]=this.h[i]+n,n=h[i]>>>13,h[i]&=8191;for(h[9]-=8192,e=(1^n)-1,i=0;10>i;i++)h[i]&=e;for(e=~e,i=0;10>i;i++)this.h[i]=this.h[i]&e|h[i];for(this.h[0]=65535&(this.h[0]|this.h[1]<<13),this.h[1]=65535&(this.h[1]>>>3|this.h[2]<<10),this.h[2]=65535&(this.h[2]>>>6|this.h[3]<<7),this.h[3]=65535&(this.h[3]>>>9|this.h[4]<<4),this.h[4]=65535&(this.h[4]>>>12|this.h[5]<<1|this.h[6]<<14),this.h[5]=65535&(this.h[6]>>>2|this.h[7]<<11),this.h[6]=65535&(this.h[7]>>>5|this.h[8]<<8),this.h[7]=65535&(this.h[8]>>>8|this.h[9]<<5),o=this.h[0]+this.pad[0],this.h[0]=65535&o,i=1;8>i;i++)o=(this.h[i]+this.pad[i]|0)+(o>>>16)|0,this.h[i]=65535&o;r[t+0]=this.h[0]>>>0&255,r[t+1]=this.h[0]>>>8&255,r[t+2]=this.h[1]>>>0&255,r[t+3]=this.h[1]>>>8&255,r[t+4]=this.h[2]>>>0&255,r[t+5]=this.h[2]>>>8&255,r[t+6]=this.h[3]>>>0&255,r[t+7]=this.h[3]>>>8&255,r[t+8]=this.h[4]>>>0&255,r[t+9]=this.h[4]>>>8&255,r[t+10]=this.h[5]>>>0&255,r[t+11]=this.h[5]>>>8&255,r[t+12]=this.h[6]>>>0&255,r[t+13]=this.h[6]>>>8&255,r[t+14]=this.h[7]>>>0&255,r[t+15]=this.h[7]>>>8&255},yr.prototype.update=function(r,t,n){var e,o;if(this.leftover){for(o=16-this.leftover,o>n&&(o=n),e=0;o>e;e++)this.buffer[this.leftover+e]=r[t+e];if(n-=o,t+=o,this.leftover+=o,this.leftover<16)return;this.blocks(this.buffer,0,16),this.leftover=0}if(n>=16&&(o=n-n%16,this.blocks(r,t,o),t+=o,n-=o),n){for(e=0;n>e;e++)this.buffer[this.leftover+e]=r[t+e];this.leftover+=n}};var lr=p,wr=v,pr=[1116352408,3609767458,1899447441,602891725,3049323471,3964484399,3921009573,2173295548,961987163,4081628472,1508970993,3053834265,2453635748,2937671579,2870763221,3664609560,3624381080,2734883394,310598401,1164996542,607225278,1323610764,1426881987,3590304994,1925078388,4068182383,2162078206,991336113,2614888103,633803317,3248222580,3479774868,3835390401,2666613458,4022224774,944711139,264347078,2341262773,604807628,2007800933,770255983,1495990901,1249150122,1856431235,1555081692,3175218132,1996064986,2198950837,2554220882,3999719339,2821834349,766784016,2952996808,2566594879,3210313671,3203337956,3336571891,1034457026,3584528711,2466948901,113926993,3758326383,338241895,168717936,666307205,1188179964,773529912,1546045734,1294757372,1522805485,1396182291,2643833823,1695183700,2343527390,1986661051,1014477480,2177026350,1206759142,2456956037,344077627,2730485921,1290863460,2820302411,3158454273,3259730800,3505952657,3345764771,106217008,3516065817,3606008344,3600352804,1432725776,4094571909,1467031594,275423344,851169720,430227734,3100823752,506948616,1363258195,659060556,3750685593,883997877,3785050280,958139571,3318307427,1322822218,3812723403,1537002063,2003034995,1747873779,3602036899,1955562222,1575990012,2024104815,1125592928,2227730452,2716904306,2361852424,442776044,2428436474,593698344,2756734187,3733110249,3204031479,2999351573,3329325298,3815920427,3391569614,3928383900,3515267271,566280711,3940187606,3454069534,4118630271,4000239992,116418474,1914138554,174292421,2731055270,289380356,3203993006,460393269,320620315,685471733,587496836,852142971,1086792851,1017036298,365543100,1126000580,2618297676,1288033470,3409855158,1501505948,4234509866,1607167915,987167468,1816402316,1246189591],vr=new Float64Array([237,211,245,92,26,99,18,88,214,156,247,162,222,249,222,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16]),br=32,gr=24,_r=32,Ar=16,dr=32,Ur=32,Er=32,xr=32,Mr=32,mr=gr,Br=_r,Sr=Ar,Kr=64,Tr=32,Yr=64,kr=32,Lr=64;r.lowlevel={crypto_core_hsalsa20:f,crypto_stream_xor:y,crypto_stream:c,crypto_stream_salsa20_xor:s,crypto_stream_salsa20:u,crypto_onetimeauth:l,crypto_onetimeauth_verify:w,crypto_verify_16:e,crypto_verify_32:o,crypto_secretbox:p,crypto_secretbox_open:v,crypto_scalarmult:T,crypto_scalarmult_base:Y,crypto_box_beforenm:L,crypto_box_afternm:lr,crypto_box:z,crypto_box_open:R,crypto_box_keypair:k,crypto_hash:O,crypto_sign:V,crypto_sign_keypair:Z,crypto_sign_open:D,crypto_secretbox_KEYBYTES:br,crypto_secretbox_NONCEBYTES:gr,crypto_secretbox_ZEROBYTES:_r,crypto_secretbox_BOXZEROBYTES:Ar,crypto_scalarmult_BYTES:dr,crypto_scalarmult_SCALARBYTES:Ur,crypto_box_PUBLICKEYBYTES:Er,crypto_box_SECRETKEYBYTES:xr,crypto_box_BEFORENMBYTES:Mr,crypto_box_NONCEBYTES:mr,crypto_box_ZEROBYTES:Br,crypto_box_BOXZEROBYTES:Sr,crypto_sign_BYTES:Kr,crypto_sign_PUBLICKEYBYTES:Tr,crypto_sign_SECRETKEYBYTES:Yr,crypto_sign_SEEDBYTES:kr,crypto_hash_BYTES:Lr},r.util||(r.util={},r.util.decodeUTF8=r.util.encodeUTF8=r.util.encodeBase64=r.util.decodeBase64=function(){throw new Error("nacl.util moved into separate package: https://github.com/dchest/tweetnacl-util-js")}),r.randomBytes=function(r){var t=new Uint8Array(r);return rr(t,r),t},r.secretbox=function(r,t,n){Q(r,t,n),H(n,t);for(var e=new Uint8Array(_r+r.length),o=new Uint8Array(e.length),i=0;ie)return null;for(var o=new Uint8Array(e),i=0;ie;e++)o[e]=t[e];for(e=0;e=0},r.sign.keyPair=function(){var r=new Uint8Array(Tr),t=new Uint8Array(Yr);return Z(r,t),{publicKey:r,secretKey:t}},r.sign.keyPair.fromSecretKey=function(r){if(Q(r),r.length!==Yr)throw new Error("bad secret key size");for(var t=new Uint8Array(Tr),n=0;ne;e++)n[e]=r[e];return Z(t,n,!0),{publicKey:t,secretKey:n}},r.sign.publicKeyLength=Tr,r.sign.secretKeyLength=Yr,r.sign.seedLength=kr,r.sign.signatureLength=Kr,r.hash=function(r){Q(r);var t=new Uint8Array(Lr);return O(t,r,r.length),t},r.hash.hashLength=Lr,r.verify=function(r,t){return Q(r,t), +0===r.length||0===t.length?!1:r.length!==t.length?!1:0===n(r,0,t,0,r.length)?!0:!1},r.setPRNG=function(r){rr=r},function(){var t="undefined"!=typeof self?self.crypto||self.msCrypto:null;if(t&&t.getRandomValues){var n=65536;r.setPRNG(function(r,e){var o,i=new Uint8Array(e);for(o=0;e>o;o+=n)t.getRandomValues(i.subarray(o,o+Math.min(e-o,n)));for(o=0;e>o;o++)r[o]=i[o];W(i)})}else"undefined"!=typeof require&&(t=require("crypto"),t&&t.randomBytes&&r.setPRNG(function(r,n){var e,o=t.randomBytes(n);for(e=0;n>e;e++)r[e]=o[e];W(o)}))}()}("undefined"!=typeof module&&module.exports?module.exports:self.nacl=self.nacl||{}); \ No newline at end of file diff --git a/node_modules/tweetnacl/nacl.js b/node_modules/tweetnacl/nacl.js new file mode 100644 index 0000000..f72dd78 --- /dev/null +++ b/node_modules/tweetnacl/nacl.js @@ -0,0 +1,1175 @@ +(function(nacl) { +'use strict'; + +// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +// +// Implementation derived from TweetNaCl version 20140427. +// See for details: http://tweetnacl.cr.yp.to/ + +var u64 = function(h, l) { this.hi = h|0 >>> 0; this.lo = l|0 >>> 0; }; +var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; +}; + +// Pluggable, initialized in high-level API below. +var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + +var _0 = new Uint8Array(16); +var _9 = new Uint8Array(32); _9[0] = 9; + +var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +function L32(x, c) { return (x << c) | (x >>> (32 - c)); } + +function ld32(x, i) { + var u = x[i+3] & 0xff; + u = (u<<8)|(x[i+2] & 0xff); + u = (u<<8)|(x[i+1] & 0xff); + return (u<<8)|(x[i+0] & 0xff); +} + +function dl64(x, i) { + var h = (x[i] << 24) | (x[i+1] << 16) | (x[i+2] << 8) | x[i+3]; + var l = (x[i+4] << 24) | (x[i+5] << 16) | (x[i+6] << 8) | x[i+7]; + return new u64(h, l); +} + +function st32(x, j, u) { + var i; + for (i = 0; i < 4; i++) { x[j+i] = u & 255; u >>>= 8; } +} + +function ts64(x, i, u) { + x[i] = (u.hi >> 24) & 0xff; + x[i+1] = (u.hi >> 16) & 0xff; + x[i+2] = (u.hi >> 8) & 0xff; + x[i+3] = u.hi & 0xff; + x[i+4] = (u.lo >> 24) & 0xff; + x[i+5] = (u.lo >> 16) & 0xff; + x[i+6] = (u.lo >> 8) & 0xff; + x[i+7] = u.lo & 0xff; +} + +function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; +} + +function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); +} + +function core(out,inp,k,c,h) { + var w = new Uint32Array(16), x = new Uint32Array(16), + y = new Uint32Array(16), t = new Uint32Array(4); + var i, j, m; + + for (i = 0; i < 4; i++) { + x[5*i] = ld32(c, 4*i); + x[1+i] = ld32(k, 4*i); + x[6+i] = ld32(inp, 4*i); + x[11+i] = ld32(k, 16+4*i); + } + + for (i = 0; i < 16; i++) y[i] = x[i]; + + for (i = 0; i < 20; i++) { + for (j = 0; j < 4; j++) { + for (m = 0; m < 4; m++) t[m] = x[(5*j+4*m)%16]; + t[1] ^= L32((t[0]+t[3])|0, 7); + t[2] ^= L32((t[1]+t[0])|0, 9); + t[3] ^= L32((t[2]+t[1])|0,13); + t[0] ^= L32((t[3]+t[2])|0,18); + for (m = 0; m < 4; m++) w[4*j+(j+m)%4] = t[m]; + } + for (m = 0; m < 16; m++) x[m] = w[m]; + } + + if (h) { + for (i = 0; i < 16; i++) x[i] = (x[i] + y[i]) | 0; + for (i = 0; i < 4; i++) { + x[5*i] = (x[5*i] - ld32(c, 4*i)) | 0; + x[6+i] = (x[6+i] - ld32(inp, 4*i)) | 0; + } + for (i = 0; i < 4; i++) { + st32(out,4*i,x[5*i]); + st32(out,16+4*i,x[6+i]); + } + } else { + for (i = 0; i < 16; i++) st32(out, 4 * i, (x[i] + y[i]) | 0); + } +} + +function crypto_core_salsa20(out,inp,k,c) { + core(out,inp,k,c,false); + return 0; +} + +function crypto_core_hsalsa20(out,inp,k,c) { + core(out,inp,k,c,true); + return 0; +} + +var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + +function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + if (!b) return 0; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = (m?m[mpos+i]:0) ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + if (m) mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = (m?m[mpos+i]:0) ^ x[i]; + } + return 0; +} + +function crypto_stream_salsa20(c,cpos,d,n,k) { + return crypto_stream_salsa20_xor(c,cpos,null,0,d,n,k); +} + +function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + return crypto_stream_salsa20(c,cpos,d,n.subarray(16),s); +} + +function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,n.subarray(16),s); +} + +function add1305(h, c) { + var j, u = 0; + for (j = 0; j < 17; j++) { + u = (u + ((h[j] + c[j]) | 0)) | 0; + h[j] = u & 255; + u >>>= 8; + } +} + +var minusp = new Uint32Array([ + 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252 +]); + +function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s, i, j, u; + var x = new Uint32Array(17), r = new Uint32Array(17), + h = new Uint32Array(17), c = new Uint32Array(17), + g = new Uint32Array(17); + for (j = 0; j < 17; j++) r[j]=h[j]=0; + for (j = 0; j < 16; j++) r[j]=k[j]; + r[3]&=15; + r[4]&=252; + r[7]&=15; + r[8]&=252; + r[11]&=15; + r[12]&=252; + r[15]&=15; + + while (n > 0) { + for (j = 0; j < 17; j++) c[j] = 0; + for (j = 0; (j < 16) && (j < n); ++j) c[j] = m[mpos+j]; + c[j] = 1; + mpos += j; n -= j; + add1305(h,c); + for (i = 0; i < 17; i++) { + x[i] = 0; + for (j = 0; j < 17; j++) x[i] = (x[i] + (h[j] * ((j <= i) ? r[i - j] : ((320 * r[i + 17 - j])|0))) | 0) | 0; + } + for (i = 0; i < 17; i++) h[i] = x[i]; + u = 0; + for (j = 0; j < 16; j++) { + u = (u + h[j]) | 0; + h[j] = u & 255; + u >>>= 8; + } + u = (u + h[16]) | 0; h[16] = u & 3; + u = (5 * (u >>> 2)) | 0; + for (j = 0; j < 16; j++) { + u = (u + h[j]) | 0; + h[j] = u & 255; + u >>>= 8; + } + u = (u + h[16]) | 0; h[16] = u; + } + + for (j = 0; j < 17; j++) g[j] = h[j]; + add1305(h,minusp); + s = (-(h[16] >>> 7) | 0); + for (j = 0; j < 17; j++) h[j] ^= s & (g[j] ^ h[j]); + + for (j = 0; j < 16; j++) c[j] = k[j + 16]; + c[16] = 0; + add1305(h,c); + for (j = 0; j < 16; j++) out[outpos+j] = h[j]; + return 0; +} + +function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); +} + +function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; +} + +function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; +} + +function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; +} + +function car25519(o) { + var c; + var i; + for (i = 0; i < 16; i++) { + o[i] += 65536; + c = Math.floor(o[i] / 65536); + o[(i+1)*(i<15?1:0)] += c - 1 + 37 * (c-1) * (i===15?1:0); + o[i] -= (c * 65536); + } +} + +function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } +} + +function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; +} + +function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; +} + +function A(o, a, b) { + var i; + for (i = 0; i < 16; i++) o[i] = (a[i] + b[i])|0; +} + +function Z(o, a, b) { + var i; + for (i = 0; i < 16; i++) o[i] = (a[i] - b[i])|0; +} + +function M(o, a, b) { + var i, j, t = new Float64Array(31); + for (i = 0; i < 31; i++) t[i] = 0; + for (i = 0; i < 16; i++) { + for (j = 0; j < 16; j++) { + t[i+j] += a[i] * b[j]; + } + } + for (i = 0; i < 15; i++) { + t[i] += 38 * t[i+16]; + } + for (i = 0; i < 16; i++) o[i] = t[i]; + car25519(o); + car25519(o); +} + +function S(o, a) { + M(o, a, a); +} + +function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; +} + +function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); +} + +function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); +} + +function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); +} + +var crypto_box_afternm = crypto_secretbox; +var crypto_box_open_afternm = crypto_secretbox_open; + +function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); +} + +function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); +} + +function add64() { + var a = 0, b = 0, c = 0, d = 0, m16 = 65535, l, h, i; + for (i = 0; i < arguments.length; i++) { + l = arguments[i].lo; + h = arguments[i].hi; + a += (l & m16); b += (l >>> 16); + c += (h & m16); d += (h >>> 16); + } + + b += (a >>> 16); + c += (b >>> 16); + d += (c >>> 16); + + return new u64((c & m16) | (d << 16), (a & m16) | (b << 16)); +} + +function shr64(x, c) { + return new u64((x.hi >>> c), (x.lo >>> c) | (x.hi << (32 - c))); +} + +function xor64() { + var l = 0, h = 0, i; + for (i = 0; i < arguments.length; i++) { + l ^= arguments[i].lo; + h ^= arguments[i].hi; + } + return new u64(h, l); +} + +function R(x, c) { + var h, l, c1 = 32 - c; + if (c < 32) { + h = (x.hi >>> c) | (x.lo << c1); + l = (x.lo >>> c) | (x.hi << c1); + } else if (c < 64) { + h = (x.lo >>> c) | (x.hi << c1); + l = (x.hi >>> c) | (x.lo << c1); + } + return new u64(h, l); +} + +function Ch(x, y, z) { + var h = (x.hi & y.hi) ^ (~x.hi & z.hi), + l = (x.lo & y.lo) ^ (~x.lo & z.lo); + return new u64(h, l); +} + +function Maj(x, y, z) { + var h = (x.hi & y.hi) ^ (x.hi & z.hi) ^ (y.hi & z.hi), + l = (x.lo & y.lo) ^ (x.lo & z.lo) ^ (y.lo & z.lo); + return new u64(h, l); +} + +function Sigma0(x) { return xor64(R(x,28), R(x,34), R(x,39)); } +function Sigma1(x) { return xor64(R(x,14), R(x,18), R(x,41)); } +function sigma0(x) { return xor64(R(x, 1), R(x, 8), shr64(x,7)); } +function sigma1(x) { return xor64(R(x,19), R(x,61), shr64(x,6)); } + +var K = [ + new u64(0x428a2f98, 0xd728ae22), new u64(0x71374491, 0x23ef65cd), + new u64(0xb5c0fbcf, 0xec4d3b2f), new u64(0xe9b5dba5, 0x8189dbbc), + new u64(0x3956c25b, 0xf348b538), new u64(0x59f111f1, 0xb605d019), + new u64(0x923f82a4, 0xaf194f9b), new u64(0xab1c5ed5, 0xda6d8118), + new u64(0xd807aa98, 0xa3030242), new u64(0x12835b01, 0x45706fbe), + new u64(0x243185be, 0x4ee4b28c), new u64(0x550c7dc3, 0xd5ffb4e2), + new u64(0x72be5d74, 0xf27b896f), new u64(0x80deb1fe, 0x3b1696b1), + new u64(0x9bdc06a7, 0x25c71235), new u64(0xc19bf174, 0xcf692694), + new u64(0xe49b69c1, 0x9ef14ad2), new u64(0xefbe4786, 0x384f25e3), + new u64(0x0fc19dc6, 0x8b8cd5b5), new u64(0x240ca1cc, 0x77ac9c65), + new u64(0x2de92c6f, 0x592b0275), new u64(0x4a7484aa, 0x6ea6e483), + new u64(0x5cb0a9dc, 0xbd41fbd4), new u64(0x76f988da, 0x831153b5), + new u64(0x983e5152, 0xee66dfab), new u64(0xa831c66d, 0x2db43210), + new u64(0xb00327c8, 0x98fb213f), new u64(0xbf597fc7, 0xbeef0ee4), + new u64(0xc6e00bf3, 0x3da88fc2), new u64(0xd5a79147, 0x930aa725), + new u64(0x06ca6351, 0xe003826f), new u64(0x14292967, 0x0a0e6e70), + new u64(0x27b70a85, 0x46d22ffc), new u64(0x2e1b2138, 0x5c26c926), + new u64(0x4d2c6dfc, 0x5ac42aed), new u64(0x53380d13, 0x9d95b3df), + new u64(0x650a7354, 0x8baf63de), new u64(0x766a0abb, 0x3c77b2a8), + new u64(0x81c2c92e, 0x47edaee6), new u64(0x92722c85, 0x1482353b), + new u64(0xa2bfe8a1, 0x4cf10364), new u64(0xa81a664b, 0xbc423001), + new u64(0xc24b8b70, 0xd0f89791), new u64(0xc76c51a3, 0x0654be30), + new u64(0xd192e819, 0xd6ef5218), new u64(0xd6990624, 0x5565a910), + new u64(0xf40e3585, 0x5771202a), new u64(0x106aa070, 0x32bbd1b8), + new u64(0x19a4c116, 0xb8d2d0c8), new u64(0x1e376c08, 0x5141ab53), + new u64(0x2748774c, 0xdf8eeb99), new u64(0x34b0bcb5, 0xe19b48a8), + new u64(0x391c0cb3, 0xc5c95a63), new u64(0x4ed8aa4a, 0xe3418acb), + new u64(0x5b9cca4f, 0x7763e373), new u64(0x682e6ff3, 0xd6b2b8a3), + new u64(0x748f82ee, 0x5defb2fc), new u64(0x78a5636f, 0x43172f60), + new u64(0x84c87814, 0xa1f0ab72), new u64(0x8cc70208, 0x1a6439ec), + new u64(0x90befffa, 0x23631e28), new u64(0xa4506ceb, 0xde82bde9), + new u64(0xbef9a3f7, 0xb2c67915), new u64(0xc67178f2, 0xe372532b), + new u64(0xca273ece, 0xea26619c), new u64(0xd186b8c7, 0x21c0c207), + new u64(0xeada7dd6, 0xcde0eb1e), new u64(0xf57d4f7f, 0xee6ed178), + new u64(0x06f067aa, 0x72176fba), new u64(0x0a637dc5, 0xa2c898a6), + new u64(0x113f9804, 0xbef90dae), new u64(0x1b710b35, 0x131c471b), + new u64(0x28db77f5, 0x23047d84), new u64(0x32caab7b, 0x40c72493), + new u64(0x3c9ebe0a, 0x15c9bebc), new u64(0x431d67c4, 0x9c100d4c), + new u64(0x4cc5d4be, 0xcb3e42b6), new u64(0x597f299c, 0xfc657e2a), + new u64(0x5fcb6fab, 0x3ad6faec), new u64(0x6c44198c, 0x4a475817) +]; + +function crypto_hashblocks(x, m, n) { + var z = [], b = [], a = [], w = [], t, i, j; + + for (i = 0; i < 8; i++) z[i] = a[i] = dl64(x, 8*i); + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) w[i] = dl64(m, 8*i+pos); + for (i = 0; i < 80; i++) { + for (j = 0; j < 8; j++) b[j] = a[j]; + t = add64(a[7], Sigma1(a[4]), Ch(a[4], a[5], a[6]), K[i], w[i%16]); + b[7] = add64(t, Sigma0(a[0]), Maj(a[0], a[1], a[2])); + b[3] = add64(b[3], t); + for (j = 0; j < 8; j++) a[(j+1)%8] = b[j]; + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + w[j] = add64(w[j], w[(j+9)%16], sigma0(w[(j+1)%16]), sigma1(w[(j+14)%16])); + } + } + } + + for (i = 0; i < 8; i++) { + a[i] = add64(a[i], z[i]); + z[i] = a[i]; + } + + pos += 128; + n -= 128; + } + + for (i = 0; i < 8; i++) ts64(x, 8*i, z[i]); + return n; +} + +var iv = new Uint8Array([ + 0x6a,0x09,0xe6,0x67,0xf3,0xbc,0xc9,0x08, + 0xbb,0x67,0xae,0x85,0x84,0xca,0xa7,0x3b, + 0x3c,0x6e,0xf3,0x72,0xfe,0x94,0xf8,0x2b, + 0xa5,0x4f,0xf5,0x3a,0x5f,0x1d,0x36,0xf1, + 0x51,0x0e,0x52,0x7f,0xad,0xe6,0x82,0xd1, + 0x9b,0x05,0x68,0x8c,0x2b,0x3e,0x6c,0x1f, + 0x1f,0x83,0xd9,0xab,0xfb,0x41,0xbd,0x6b, + 0x5b,0xe0,0xcd,0x19,0x13,0x7e,0x21,0x79 +]); + +function crypto_hash(out, m, n) { + var h = new Uint8Array(64), x = new Uint8Array(256); + var i, b = n; + + for (i = 0; i < 64; i++) h[i] = iv[i]; + + crypto_hashblocks(h, m, n); + n %= 128; + + for (i = 0; i < 256; i++) x[i] = 0; + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, new u64((b / 0x20000000) | 0, b << 3)); + crypto_hashblocks(h, x, n); + + for (i = 0; i < 64; i++) out[i] = h[i]; + + return 0; +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; +} + +var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + +function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = (x[j] + 128) >> 8; + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; +} + +function crypto_sign_open(m, sm, n, pk) { + var i, mlen; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + mlen = -1; + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + mlen = n; + return mlen; +} + +var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + +nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES +}; + +/* High-level API */ + +function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); +} + +function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); +} + +function checkArrayTypes() { + var t, i; + for (i = 0; i < arguments.length; i++) { + if ((t = Object.prototype.toString.call(arguments[i])) !== '[object Uint8Array]') + throw new TypeError('unexpected type ' + t + ', use Uint8Array'); + } +} + +function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; +} + +// TODO: Completely remove this in v0.15. +if (!nacl.util) { + nacl.util = {}; + nacl.util.decodeUTF8 = nacl.util.encodeUTF8 = nacl.util.encodeBase64 = nacl.util.decodeBase64 = function() { + throw new Error('nacl.util moved into separate package: https://github.com/dchest/tweetnacl-util-js'); + }; +} + +nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; +}; + +nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); +}; + +nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return false; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return false; + return m.subarray(crypto_secretbox_ZEROBYTES); +}; + +nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; +nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; +nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + +nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; +}; + +nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; +}; + +nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; +nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + +nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); +}; + +nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; +}; + +nacl.box.after = nacl.secretbox; + +nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); +}; + +nacl.box.open.after = nacl.secretbox.open; + +nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; +nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; +nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; +nacl.box.nonceLength = crypto_box_NONCEBYTES; +nacl.box.overheadLength = nacl.secretbox.overheadLength; + +nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; +}; + +nacl.sign.open = function(signedMsg, publicKey) { + if (arguments.length !== 2) + throw new Error('nacl.sign.open accepts 2 arguments; did you mean to use nacl.sign.detached.verify?'); + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; +}; + +nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; +}; + +nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; +nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; +nacl.sign.seedLength = crypto_sign_SEEDBYTES; +nacl.sign.signatureLength = crypto_sign_BYTES; + +nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; +}; + +nacl.hash.hashLength = crypto_hash_BYTES; + +nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; +}; + +nacl.setPRNG = function(fn) { + randombytes = fn; +}; + +(function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto = typeof self !== 'undefined' ? (self.crypto || self.msCrypto) : null; + if (crypto && crypto.getRandomValues) { + // Browsers. + var QUOTA = 65536; + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + for (i = 0; i < n; i += QUOTA) { + crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA))); + } + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } else if (typeof require !== 'undefined') { + // Node.js. + crypto = require('crypto'); + if (crypto && crypto.randomBytes) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } +})(); + +})(typeof module !== 'undefined' && module.exports ? module.exports : (self.nacl = self.nacl || {})); diff --git a/node_modules/tweetnacl/nacl.min.js b/node_modules/tweetnacl/nacl.min.js new file mode 100644 index 0000000..eed3854 --- /dev/null +++ b/node_modules/tweetnacl/nacl.min.js @@ -0,0 +1 @@ +!function(r){"use strict";function n(r,n){return r<>>32-n}function e(r,n){var e=255&r[n+3];return e=e<<8|255&r[n+2],e=e<<8|255&r[n+1],e<<8|255&r[n+0]}function t(r,n){var e=r[n]<<24|r[n+1]<<16|r[n+2]<<8|r[n+3],t=r[n+4]<<24|r[n+5]<<16|r[n+6]<<8|r[n+7];return new lr(e,t)}function o(r,n,e){var t;for(t=0;4>t;t++)r[n+t]=255&e,e>>>=8}function i(r,n,e){r[n]=e.hi>>24&255,r[n+1]=e.hi>>16&255,r[n+2]=e.hi>>8&255,r[n+3]=255&e.hi,r[n+4]=e.lo>>24&255,r[n+5]=e.lo>>16&255,r[n+6]=e.lo>>8&255,r[n+7]=255&e.lo}function a(r,n,e,t,o){var i,a=0;for(i=0;o>i;i++)a|=r[n+i]^e[t+i];return(1&a-1>>>8)-1}function f(r,n,e,t){return a(r,n,e,t,16)}function u(r,n,e,t){return a(r,n,e,t,32)}function c(r,t,i,a,f){var u,c,w,y=new Uint32Array(16),s=new Uint32Array(16),l=new Uint32Array(16),h=new Uint32Array(4);for(u=0;4>u;u++)s[5*u]=e(a,4*u),s[1+u]=e(i,4*u),s[6+u]=e(t,4*u),s[11+u]=e(i,16+4*u);for(u=0;16>u;u++)l[u]=s[u];for(u=0;20>u;u++){for(c=0;4>c;c++){for(w=0;4>w;w++)h[w]=s[(5*c+4*w)%16];for(h[1]^=n(h[0]+h[3]|0,7),h[2]^=n(h[1]+h[0]|0,9),h[3]^=n(h[2]+h[1]|0,13),h[0]^=n(h[3]+h[2]|0,18),w=0;4>w;w++)y[4*c+(c+w)%4]=h[w]}for(w=0;16>w;w++)s[w]=y[w]}if(f){for(u=0;16>u;u++)s[u]=s[u]+l[u]|0;for(u=0;4>u;u++)s[5*u]=s[5*u]-e(a,4*u)|0,s[6+u]=s[6+u]-e(t,4*u)|0;for(u=0;4>u;u++)o(r,4*u,s[5*u]),o(r,16+4*u,s[6+u])}else for(u=0;16>u;u++)o(r,4*u,s[u]+l[u]|0)}function w(r,n,e,t){return c(r,n,e,t,!1),0}function y(r,n,e,t){return c(r,n,e,t,!0),0}function s(r,n,e,t,o,i,a){var f,u,c=new Uint8Array(16),y=new Uint8Array(64);if(!o)return 0;for(u=0;16>u;u++)c[u]=0;for(u=0;8>u;u++)c[u]=i[u];for(;o>=64;){for(w(y,c,a,Br),u=0;64>u;u++)r[n+u]=(e?e[t+u]:0)^y[u];for(f=1,u=8;16>u;u++)f=f+(255&c[u])|0,c[u]=255&f,f>>>=8;o-=64,n+=64,e&&(t+=64)}if(o>0)for(w(y,c,a,Br),u=0;o>u;u++)r[n+u]=(e?e[t+u]:0)^y[u];return 0}function l(r,n,e,t,o){return s(r,n,null,0,e,t,o)}function h(r,n,e,t,o){var i=new Uint8Array(32);return y(i,t,o,Br),l(r,n,e,t.subarray(16),i)}function g(r,n,e,t,o,i,a){var f=new Uint8Array(32);return y(f,i,a,Br),s(r,n,e,t,o,i.subarray(16),f)}function v(r,n){var e,t=0;for(e=0;17>e;e++)t=t+(r[e]+n[e]|0)|0,r[e]=255&t,t>>>=8}function b(r,n,e,t,o,i){var a,f,u,c,w=new Uint32Array(17),y=new Uint32Array(17),s=new Uint32Array(17),l=new Uint32Array(17),h=new Uint32Array(17);for(u=0;17>u;u++)y[u]=s[u]=0;for(u=0;16>u;u++)y[u]=i[u];for(y[3]&=15,y[4]&=252,y[7]&=15,y[8]&=252,y[11]&=15,y[12]&=252,y[15]&=15;o>0;){for(u=0;17>u;u++)l[u]=0;for(u=0;16>u&&o>u;++u)l[u]=e[t+u];for(l[u]=1,t+=u,o-=u,v(s,l),f=0;17>f;f++)for(w[f]=0,u=0;17>u;u++)w[f]=w[f]+s[u]*(f>=u?y[f-u]:320*y[f+17-u]|0)|0|0;for(f=0;17>f;f++)s[f]=w[f];for(c=0,u=0;16>u;u++)c=c+s[u]|0,s[u]=255&c,c>>>=8;for(c=c+s[16]|0,s[16]=3&c,c=5*(c>>>2)|0,u=0;16>u;u++)c=c+s[u]|0,s[u]=255&c,c>>>=8;c=c+s[16]|0,s[16]=c}for(u=0;17>u;u++)h[u]=s[u];for(v(s,Sr),a=0|-(s[16]>>>7),u=0;17>u;u++)s[u]^=a&(h[u]^s[u]);for(u=0;16>u;u++)l[u]=i[u+16];for(l[16]=0,v(s,l),u=0;16>u;u++)r[n+u]=s[u];return 0}function p(r,n,e,t,o,i){var a=new Uint8Array(16);return b(a,0,e,t,o,i),f(r,n,a,0)}function _(r,n,e,t,o){var i;if(32>e)return-1;for(g(r,0,n,0,e,t,o),b(r,16,r,32,e-32,r),i=0;16>i;i++)r[i]=0;return 0}function A(r,n,e,t,o){var i,a=new Uint8Array(32);if(32>e)return-1;if(h(a,0,32,t,o),0!==p(n,16,n,32,e-32,a))return-1;for(g(r,0,n,0,e,t,o),i=0;32>i;i++)r[i]=0;return 0}function U(r,n){var e;for(e=0;16>e;e++)r[e]=0|n[e]}function E(r){var n,e;for(e=0;16>e;e++)r[e]+=65536,n=Math.floor(r[e]/65536),r[(e+1)*(15>e?1:0)]+=n-1+37*(n-1)*(15===e?1:0),r[e]-=65536*n}function d(r,n,e){for(var t,o=~(e-1),i=0;16>i;i++)t=o&(r[i]^n[i]),r[i]^=t,n[i]^=t}function x(r,n){var e,t,o,i=hr(),a=hr();for(e=0;16>e;e++)a[e]=n[e];for(E(a),E(a),E(a),t=0;2>t;t++){for(i[0]=a[0]-65517,e=1;15>e;e++)i[e]=a[e]-65535-(i[e-1]>>16&1),i[e-1]&=65535;i[15]=a[15]-32767-(i[14]>>16&1),o=i[15]>>16&1,i[14]&=65535,d(a,i,1-o)}for(e=0;16>e;e++)r[2*e]=255&a[e],r[2*e+1]=a[e]>>8}function m(r,n){var e=new Uint8Array(32),t=new Uint8Array(32);return x(e,r),x(t,n),u(e,0,t,0)}function B(r){var n=new Uint8Array(32);return x(n,r),1&n[0]}function S(r,n){var e;for(e=0;16>e;e++)r[e]=n[2*e]+(n[2*e+1]<<8);r[15]&=32767}function K(r,n,e){var t;for(t=0;16>t;t++)r[t]=n[t]+e[t]|0}function T(r,n,e){var t;for(t=0;16>t;t++)r[t]=n[t]-e[t]|0}function Y(r,n,e){var t,o,i=new Float64Array(31);for(t=0;31>t;t++)i[t]=0;for(t=0;16>t;t++)for(o=0;16>o;o++)i[t+o]+=n[t]*e[o];for(t=0;15>t;t++)i[t]+=38*i[t+16];for(t=0;16>t;t++)r[t]=i[t];E(r),E(r)}function L(r,n){Y(r,n,n)}function k(r,n){var e,t=hr();for(e=0;16>e;e++)t[e]=n[e];for(e=253;e>=0;e--)L(t,t),2!==e&&4!==e&&Y(t,t,n);for(e=0;16>e;e++)r[e]=t[e]}function z(r,n){var e,t=hr();for(e=0;16>e;e++)t[e]=n[e];for(e=250;e>=0;e--)L(t,t),1!==e&&Y(t,t,n);for(e=0;16>e;e++)r[e]=t[e]}function R(r,n,e){var t,o,i=new Uint8Array(32),a=new Float64Array(80),f=hr(),u=hr(),c=hr(),w=hr(),y=hr(),s=hr();for(o=0;31>o;o++)i[o]=n[o];for(i[31]=127&n[31]|64,i[0]&=248,S(a,e),o=0;16>o;o++)u[o]=a[o],w[o]=f[o]=c[o]=0;for(f[0]=w[0]=1,o=254;o>=0;--o)t=i[o>>>3]>>>(7&o)&1,d(f,u,t),d(c,w,t),K(y,f,c),T(f,f,c),K(c,u,w),T(u,u,w),L(w,y),L(s,f),Y(f,c,f),Y(c,u,y),K(y,f,c),T(f,f,c),L(u,f),T(c,w,s),Y(f,c,Ar),K(f,f,w),Y(c,c,f),Y(f,w,s),Y(w,u,a),L(u,y),d(f,u,t),d(c,w,t);for(o=0;16>o;o++)a[o+16]=f[o],a[o+32]=c[o],a[o+48]=u[o],a[o+64]=w[o];var l=a.subarray(32),h=a.subarray(16);return k(l,l),Y(h,h,l),x(r,h),0}function P(r,n){return R(r,n,br)}function O(r,n){return gr(n,32),P(r,n)}function F(r,n,e){var t=new Uint8Array(32);return R(t,e,n),y(r,vr,t,Br)}function N(r,n,e,t,o,i){var a=new Uint8Array(32);return F(a,o,i),Kr(r,n,e,t,a)}function C(r,n,e,t,o,i){var a=new Uint8Array(32);return F(a,o,i),Tr(r,n,e,t,a)}function M(){var r,n,e,t=0,o=0,i=0,a=0,f=65535;for(e=0;e>>16,i+=n&f,a+=n>>>16;return o+=t>>>16,i+=o>>>16,a+=i>>>16,new lr(i&f|a<<16,t&f|o<<16)}function G(r,n){return new lr(r.hi>>>n,r.lo>>>n|r.hi<<32-n)}function Z(){var r,n=0,e=0;for(r=0;rn?(e=r.hi>>>n|r.lo<>>n|r.hi<n&&(e=r.lo>>>n|r.hi<>>n|r.lo<a;a++)u[a]=w[a]=t(r,8*a);for(var s=0;e>=128;){for(a=0;16>a;a++)y[a]=t(n,8*a+s);for(a=0;80>a;a++){for(f=0;8>f;f++)c[f]=w[f];for(o=M(w[7],X(w[4]),q(w[4],w[5],w[6]),Yr[a],y[a%16]),c[7]=M(o,V(w[0]),I(w[0],w[1],w[2])),c[3]=M(c[3],o),f=0;8>f;f++)w[(f+1)%8]=c[f];if(a%16===15)for(f=0;16>f;f++)y[f]=M(y[f],y[(f+9)%16],D(y[(f+1)%16]),H(y[(f+14)%16]))}for(a=0;8>a;a++)w[a]=M(w[a],u[a]),u[a]=w[a];s+=128,e-=128}for(a=0;8>a;a++)i(r,8*a,u[a]);return e}function Q(r,n,e){var t,o=new Uint8Array(64),a=new Uint8Array(256),f=e;for(t=0;64>t;t++)o[t]=Lr[t];for(J(o,n,e),e%=128,t=0;256>t;t++)a[t]=0;for(t=0;e>t;t++)a[t]=n[f-e+t];for(a[e]=128,e=256-128*(112>e?1:0),a[e-9]=0,i(a,e-8,new lr(f/536870912|0,f<<3)),J(o,a,e),t=0;64>t;t++)r[t]=o[t];return 0}function W(r,n){var e=hr(),t=hr(),o=hr(),i=hr(),a=hr(),f=hr(),u=hr(),c=hr(),w=hr();T(e,r[1],r[0]),T(w,n[1],n[0]),Y(e,e,w),K(t,r[0],r[1]),K(w,n[0],n[1]),Y(t,t,w),Y(o,r[3],n[3]),Y(o,o,Er),Y(i,r[2],n[2]),K(i,i,i),T(a,t,e),T(f,i,o),K(u,i,o),K(c,t,e),Y(r[0],a,f),Y(r[1],c,u),Y(r[2],u,f),Y(r[3],a,c)}function $(r,n,e){var t;for(t=0;4>t;t++)d(r[t],n[t],e)}function rr(r,n){var e=hr(),t=hr(),o=hr();k(o,n[2]),Y(e,n[0],o),Y(t,n[1],o),x(r,t),r[31]^=B(e)<<7}function nr(r,n,e){var t,o;for(U(r[0],pr),U(r[1],_r),U(r[2],_r),U(r[3],pr),o=255;o>=0;--o)t=e[o/8|0]>>(7&o)&1,$(r,n,t),W(n,r),W(r,r),$(r,n,t)}function er(r,n){var e=[hr(),hr(),hr(),hr()];U(e[0],dr),U(e[1],xr),U(e[2],_r),Y(e[3],dr,xr),nr(r,e,n)}function tr(r,n,e){var t,o=new Uint8Array(64),i=[hr(),hr(),hr(),hr()];for(e||gr(n,32),Q(o,n,32),o[0]&=248,o[31]&=127,o[31]|=64,er(i,o),rr(r,i),t=0;32>t;t++)n[t+32]=r[t];return 0}function or(r,n){var e,t,o,i;for(t=63;t>=32;--t){for(e=0,o=t-32,i=t-12;i>o;++o)n[o]+=e-16*n[t]*kr[o-(t-32)],e=n[o]+128>>8,n[o]-=256*e;n[o]+=e,n[t]=0}for(e=0,o=0;32>o;o++)n[o]+=e-(n[31]>>4)*kr[o],e=n[o]>>8,n[o]&=255;for(o=0;32>o;o++)n[o]-=e*kr[o];for(t=0;32>t;t++)n[t+1]+=n[t]>>8,r[t]=255&n[t]}function ir(r){var n,e=new Float64Array(64);for(n=0;64>n;n++)e[n]=r[n];for(n=0;64>n;n++)r[n]=0;or(r,e)}function ar(r,n,e,t){var o,i,a=new Uint8Array(64),f=new Uint8Array(64),u=new Uint8Array(64),c=new Float64Array(64),w=[hr(),hr(),hr(),hr()];Q(a,t,32),a[0]&=248,a[31]&=127,a[31]|=64;var y=e+64;for(o=0;e>o;o++)r[64+o]=n[o];for(o=0;32>o;o++)r[32+o]=a[32+o];for(Q(u,r.subarray(32),e+32),ir(u),er(w,u),rr(r,w),o=32;64>o;o++)r[o]=t[o];for(Q(f,r,e+64),ir(f),o=0;64>o;o++)c[o]=0;for(o=0;32>o;o++)c[o]=u[o];for(o=0;32>o;o++)for(i=0;32>i;i++)c[o+i]+=f[o]*a[i];return or(r.subarray(32),c),y}function fr(r,n){var e=hr(),t=hr(),o=hr(),i=hr(),a=hr(),f=hr(),u=hr();return U(r[2],_r),S(r[1],n),L(o,r[1]),Y(i,o,Ur),T(o,o,r[2]),K(i,r[2],i),L(a,i),L(f,a),Y(u,f,a),Y(e,u,o),Y(e,e,i),z(e,e),Y(e,e,o),Y(e,e,i),Y(e,e,i),Y(r[0],e,i),L(t,r[0]),Y(t,t,i),m(t,o)&&Y(r[0],r[0],mr),L(t,r[0]),Y(t,t,i),m(t,o)?-1:(B(r[0])===n[31]>>7&&T(r[0],pr,r[0]),Y(r[3],r[0],r[1]),0)}function ur(r,n,e,t){var o,i,a=new Uint8Array(32),f=new Uint8Array(64),c=[hr(),hr(),hr(),hr()],w=[hr(),hr(),hr(),hr()];if(i=-1,64>e)return-1;if(fr(w,t))return-1;for(o=0;e>o;o++)r[o]=n[o];for(o=0;32>o;o++)r[o+32]=t[o];if(Q(f,r,e),ir(f),nr(c,w,f),er(w,n.subarray(32)),W(c,w),rr(a,c),e-=64,u(n,0,a,0)){for(o=0;e>o;o++)r[o]=0;return-1}for(o=0;e>o;o++)r[o]=n[o+64];return i=e}function cr(r,n){if(r.length!==zr)throw new Error("bad key size");if(n.length!==Rr)throw new Error("bad nonce size")}function wr(r,n){if(r.length!==Cr)throw new Error("bad public key size");if(n.length!==Mr)throw new Error("bad secret key size")}function yr(){var r,n;for(n=0;nt)return null;for(var o=new Uint8Array(t),i=0;it;t++)o[t]=n[t];for(t=0;t=0},r.sign.keyPair=function(){var r=new Uint8Array(Vr),n=new Uint8Array(Xr);return tr(r,n),{publicKey:r,secretKey:n}},r.sign.keyPair.fromSecretKey=function(r){if(yr(r),r.length!==Xr)throw new Error("bad secret key size");for(var n=new Uint8Array(Vr),e=0;et;t++)e[t]=r[t];return tr(n,e,!0),{publicKey:n,secretKey:e}},r.sign.publicKeyLength=Vr,r.sign.secretKeyLength=Xr,r.sign.seedLength=Dr,r.sign.signatureLength=Ir,r.hash=function(r){yr(r);var n=new Uint8Array(Hr);return Q(n,r,r.length),n},r.hash.hashLength=Hr,r.verify=function(r,n){return yr(r,n),0===r.length||0===n.length?!1:r.length!==n.length?!1:0===a(r,0,n,0,r.length)?!0:!1},r.setPRNG=function(r){gr=r},function(){var n="undefined"!=typeof self?self.crypto||self.msCrypto:null;if(n&&n.getRandomValues){var e=65536;r.setPRNG(function(r,t){var o,i=new Uint8Array(t);for(o=0;t>o;o+=e)n.getRandomValues(i.subarray(o,o+Math.min(t-o,e)));for(o=0;t>o;o++)r[o]=i[o];sr(i)})}else"undefined"!=typeof require&&(n=require("crypto"),n&&n.randomBytes&&r.setPRNG(function(r,e){var t,o=n.randomBytes(e);for(t=0;e>t;t++)r[t]=o[t];sr(o)}))}()}("undefined"!=typeof module&&module.exports?module.exports:self.nacl=self.nacl||{}); \ No newline at end of file diff --git a/node_modules/tweetnacl/package.json b/node_modules/tweetnacl/package.json new file mode 100644 index 0000000..5fa81e5 --- /dev/null +++ b/node_modules/tweetnacl/package.json @@ -0,0 +1,57 @@ +{ + "name": "tweetnacl", + "version": "0.14.3", + "description": "Port of TweetNaCl cryptographic library to JavaScript", + "main": "nacl-fast.js", + "directories": { + "test": "test" + }, + "scripts": { + "build": "uglifyjs nacl.js -c -m -o nacl.min.js && uglifyjs nacl-fast.js -c -m -o nacl-fast.min.js", + "test-node": "tape test/*.js | faucet", + "test-node-all": "make -C test/c && tape test/*.js test/c/*.js | faucet", + "test-browser": "NACL_SRC=${NACL_SRC:='nacl.min.js'} && npm run build-test-browser && cat $NACL_SRC test/browser/_bundle.js | tape-run | faucet", + "build-test-browser": "browserify test/browser/init.js test/*.js | uglifyjs -c -m -o test/browser/_bundle.js 2>/dev/null && browserify test/browser/init.js test/*.quick.js | uglifyjs -c -m -o test/browser/_bundle-quick.js 2>/dev/null", + "test": "npm run test-node-all && npm run test-browser", + "bench": "node test/benchmark/bench.js", + "lint": "eslint nacl.js nacl-fast.js test/*.js test/benchmark/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/dchest/tweetnacl-js.git" + }, + "keywords": [ + "crypto", + "cryptography", + "curve25519", + "ed25519", + "encrypt", + "hash", + "key", + "nacl", + "poly1305", + "public", + "salsa20", + "signatures" + ], + "author": "TweetNaCl-js contributors", + "license": "SEE LICENSE IN COPYING.txt", + "bugs": { + "url": "https://github.com/dchest/tweetnacl-js/issues" + }, + "homepage": "https://tweetnacl.js.org", + "devDependencies": { + "browserify": "^13.0.0", + "eslint": "^2.2.0", + "faucet": "^0.0.1", + "tap-browser-color": "^0.1.2", + "tape": "^4.4.0", + "tape-run": "^2.1.3", + "tweetnacl-util": "^0.13.3", + "uglify-js": "^2.6.1" + }, + "browser": { + "buffer": false, + "crypto": false + } +} diff --git a/node_modules/type-is/HISTORY.md b/node_modules/type-is/HISTORY.md new file mode 100644 index 0000000..5ec118c --- /dev/null +++ b/node_modules/type-is/HISTORY.md @@ -0,0 +1,206 @@ +1.6.13 / 2016-05-18 +=================== + + * deps: mime-types@~2.1.11 + - Add new mime types + +1.6.12 / 2016-02-28 +=================== + + * deps: mime-types@~2.1.10 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +1.6.11 / 2016-01-29 +=================== + + * deps: mime-types@~2.1.9 + - Add new mime types + +1.6.10 / 2015-12-01 +=================== + + * deps: mime-types@~2.1.8 + - Add new mime types + +1.6.9 / 2015-09-27 +================== + + * deps: mime-types@~2.1.7 + - Add new mime types + +1.6.8 / 2015-09-04 +================== + + * deps: mime-types@~2.1.6 + - Add new mime types + +1.6.7 / 2015-08-20 +================== + + * Fix type error when given invalid type to match against + * deps: mime-types@~2.1.5 + - Add new mime types + +1.6.6 / 2015-07-31 +================== + + * deps: mime-types@~2.1.4 + - Add new mime types + +1.6.5 / 2015-07-16 +================== + + * deps: mime-types@~2.1.3 + - Add new mime types + +1.6.4 / 2015-07-01 +================== + + * deps: mime-types@~2.1.2 + - Add new mime types + * perf: enable strict mode + * perf: remove argument reassignment + +1.6.3 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - Add new mime types + * perf: reduce try block size + * perf: remove bitwise operations + +1.6.2 / 2015-05-10 +================== + + * deps: mime-types@~2.0.11 + - Add new mime types + +1.6.1 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - Add new mime types + +1.6.0 / 2015-02-12 +================== + + * fix false-positives in `hasBody` `Transfer-Encoding` check + * support wildcard for both type and subtype (`*/*`) + +1.5.7 / 2015-02-09 +================== + + * fix argument reassignment + * deps: mime-types@~2.0.9 + - Add new mime types + +1.5.6 / 2015-01-29 +================== + + * deps: mime-types@~2.0.8 + - Add new mime types + +1.5.5 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - Add new mime types + - Fix missing extensions + - Fix various invalid MIME type entries + - Remove example template MIME types + - deps: mime-db@~1.5.0 + +1.5.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - Add new mime types + - deps: mime-db@~1.3.0 + +1.5.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - Add new mime types + - deps: mime-db@~1.2.0 + +1.5.2 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - Add new mime types + - deps: mime-db@~1.1.0 + +1.5.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + * deps: media-typer@0.3.0 + * deps: mime-types@~2.0.1 + - Support Node.js 0.6 + +1.5.0 / 2014-09-05 +================== + + * fix `hasbody` to be true for `content-length: 0` + +1.4.0 / 2014-09-02 +================== + + * update mime-types + +1.3.2 / 2014-06-24 +================== + + * use `~` range on mime-types + +1.3.1 / 2014-06-19 +================== + + * fix global variable leak + +1.3.0 / 2014-06-19 +================== + + * improve type parsing + + - invalid media type never matches + - media type not case-sensitive + - extra LWS does not affect results + +1.2.2 / 2014-06-19 +================== + + * fix behavior on unknown type argument + +1.2.1 / 2014-06-03 +================== + + * switch dependency from `mime` to `mime-types@1.0.0` + +1.2.0 / 2014-05-11 +================== + + * support suffix matching: + + - `+json` matches `application/vnd+json` + - `*/vnd+json` matches `application/vnd+json` + - `application/*+json` matches `application/vnd+json` + +1.1.0 / 2014-04-12 +================== + + * add non-array values support + * expose internal utilities: + + - `.is()` + - `.hasBody()` + - `.normalize()` + - `.match()` + +1.0.1 / 2014-03-30 +================== + + * add `multipart` as a shorthand diff --git a/node_modules/type-is/LICENSE b/node_modules/type-is/LICENSE new file mode 100644 index 0000000..386b7b6 --- /dev/null +++ b/node_modules/type-is/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/type-is/README.md b/node_modules/type-is/README.md new file mode 100644 index 0000000..008a7af --- /dev/null +++ b/node_modules/type-is/README.md @@ -0,0 +1,136 @@ +# type-is + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Infer the content-type of a request. + +### Install + +```sh +$ npm install type-is +``` + +## API + +```js +var http = require('http') +var is = require('type-is') + +http.createServer(function (req, res) { + var istext = is(req, ['text/*']) + res.end('you ' + (istext ? 'sent' : 'did not send') + ' me text') +}) +``` + +### type = is(request, types) + +`request` is the node HTTP request. `types` is an array of types. + +```js +// req.headers.content-type = 'application/json' + +is(req, ['json']) // 'json' +is(req, ['html', 'json']) // 'json' +is(req, ['application/*']) // 'application/json' +is(req, ['application/json']) // 'application/json' + +is(req, ['html']) // false +``` + +### is.hasBody(request) + +Returns a Boolean if the given `request` has a body, regardless of the +`Content-Type` header. + +Having a body has no relation to how large the body is (it may be 0 bytes). +This is similar to how file existence works. If a body does exist, then this +indicates that there is data to read from the Node.js request stream. + +```js +if (is.hasBody(req)) { + // read the body, since there is one + + req.on('data', function (chunk) { + // ... + }) +} +``` + +### type = is.is(mediaType, types) + +`mediaType` is the [media type](https://tools.ietf.org/html/rfc6838) string. `types` is an array of types. + +```js +var mediaType = 'application/json' + +is.is(mediaType, ['json']) // 'json' +is.is(mediaType, ['html', 'json']) // 'json' +is.is(mediaType, ['application/*']) // 'application/json' +is.is(mediaType, ['application/json']) // 'application/json' + +is.is(mediaType, ['html']) // false +``` + +### Each type can be: + +- An extension name such as `json`. This name will be returned if matched. +- A mime type such as `application/json`. +- A mime type with a wildcard such as `*/*` or `*/json` or `application/*`. The full mime type will be returned if matched. +- A suffix such as `+json`. This can be combined with a wildcard such as `*/vnd+json` or `application/*+json`. The full mime type will be returned if matched. + +`false` will be returned if no type matches or the content type is invalid. + +`null` will be returned if the request does not have a body. + +## Examples + +#### Example body parser + +```js +var is = require('type-is'); + +function bodyParser(req, res, next) { + if (!is.hasBody(req)) { + return next() + } + + switch (is(req, ['urlencoded', 'json', 'multipart'])) { + case 'urlencoded': + // parse urlencoded body + throw new Error('implement urlencoded body parsing') + break + case 'json': + // parse json body + throw new Error('implement json body parsing') + break + case 'multipart': + // parse multipart body + throw new Error('implement multipart body parsing') + break + default: + // 415 error code + res.statusCode = 415 + res.end() + return + } +} +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/type-is.svg +[npm-url]: https://npmjs.org/package/type-is +[node-version-image]: https://img.shields.io/node/v/type-is.svg +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/jshttp/type-is/master.svg +[travis-url]: https://travis-ci.org/jshttp/type-is +[coveralls-image]: https://img.shields.io/coveralls/jshttp/type-is/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/type-is?branch=master +[downloads-image]: https://img.shields.io/npm/dm/type-is.svg +[downloads-url]: https://npmjs.org/package/type-is diff --git a/node_modules/type-is/index.js b/node_modules/type-is/index.js new file mode 100644 index 0000000..4da7301 --- /dev/null +++ b/node_modules/type-is/index.js @@ -0,0 +1,262 @@ +/*! + * type-is + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var typer = require('media-typer') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = typeofrequest +module.exports.is = typeis +module.exports.hasBody = hasbody +module.exports.normalize = normalize +module.exports.match = mimeMatch + +/** + * Compare a `value` content-type with `types`. + * Each `type` can be an extension like `html`, + * a special shortcut like `multipart` or `urlencoded`, + * or a mime type. + * + * If no types match, `false` is returned. + * Otherwise, the first `type` that matches is returned. + * + * @param {String} value + * @param {Array} types + * @public + */ + +function typeis (value, types_) { + var i + var types = types_ + + // remove parameters and normalize + var val = tryNormalizeType(value) + + // no type or invalid + if (!val) { + return false + } + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length - 1) + for (i = 0; i < types.length; i++) { + types[i] = arguments[i + 1] + } + } + + // no types, return the content type + if (!types || !types.length) { + return val + } + + var type + for (i = 0; i < types.length; i++) { + if (mimeMatch(normalize(type = types[i]), val)) { + return type[0] === '+' || type.indexOf('*') !== -1 + ? val + : type + } + } + + // no matches + return false +} + +/** + * Check if a request has a request body. + * A request with a body __must__ either have `transfer-encoding` + * or `content-length` headers set. + * http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3 + * + * @param {Object} request + * @return {Boolean} + * @public + */ + +function hasbody (req) { + return req.headers['transfer-encoding'] !== undefined || + !isNaN(req.headers['content-length']) +} + +/** + * Check if the incoming request contains the "Content-Type" + * header field, and it contains any of the give mime `type`s. + * If there is no request body, `null` is returned. + * If there is no content type, `false` is returned. + * Otherwise, it returns the first `type` that matches. + * + * Examples: + * + * // With Content-Type: text/html; charset=utf-8 + * this.is('html'); // => 'html' + * this.is('text/html'); // => 'text/html' + * this.is('text/*', 'application/json'); // => 'text/html' + * + * // When Content-Type is application/json + * this.is('json', 'urlencoded'); // => 'json' + * this.is('application/json'); // => 'application/json' + * this.is('html', 'application/*'); // => 'application/json' + * + * this.is('html'); // => false + * + * @param {String|Array} types... + * @return {String|false|null} + * @public + */ + +function typeofrequest (req, types_) { + var types = types_ + + // no body + if (!hasbody(req)) { + return null + } + + // support flattened arguments + if (arguments.length > 2) { + types = new Array(arguments.length - 1) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i + 1] + } + } + + // request content type + var value = req.headers['content-type'] + + return typeis(value, types) +} + +/** + * Normalize a mime type. + * If it's a shorthand, expand it to a valid mime type. + * + * In general, you probably want: + * + * var type = is(req, ['urlencoded', 'json', 'multipart']); + * + * Then use the appropriate body parsers. + * These three are the most common request body types + * and are thus ensured to work. + * + * @param {String} type + * @private + */ + +function normalize (type) { + if (typeof type !== 'string') { + // invalid type + return false + } + + switch (type) { + case 'urlencoded': + return 'application/x-www-form-urlencoded' + case 'multipart': + return 'multipart/*' + } + + if (type[0] === '+') { + // "+json" -> "*/*+json" expando + return '*/*' + type + } + + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if `expected` mime type + * matches `actual` mime type with + * wildcard and +suffix support. + * + * @param {String} expected + * @param {String} actual + * @return {Boolean} + * @private + */ + +function mimeMatch (expected, actual) { + // invalid type + if (expected === false) { + return false + } + + // split types + var actualParts = actual.split('/') + var expectedParts = expected.split('/') + + // invalid format + if (actualParts.length !== 2 || expectedParts.length !== 2) { + return false + } + + // validate type + if (expectedParts[0] !== '*' && expectedParts[0] !== actualParts[0]) { + return false + } + + // validate suffix wildcard + if (expectedParts[1].substr(0, 2) === '*+') { + return expectedParts[1].length <= actualParts[1].length + 1 && + expectedParts[1].substr(1) === actualParts[1].substr(1 - expectedParts[1].length) + } + + // validate subtype + if (expectedParts[1] !== '*' && expectedParts[1] !== actualParts[1]) { + return false + } + + return true +} + +/** + * Normalize a type and remove parameters. + * + * @param {string} value + * @return {string} + * @private + */ + +function normalizeType (value) { + // parse the type + var type = typer.parse(value) + + // remove the parameters + type.parameters = undefined + + // reformat it + return typer.format(type) +} + +/** + * Try to normalize a type and remove parameters. + * + * @param {string} value + * @return {string} + * @private + */ + +function tryNormalizeType (value) { + try { + return normalizeType(value) + } catch (err) { + return null + } +} diff --git a/node_modules/type-is/package.json b/node_modules/type-is/package.json new file mode 100644 index 0000000..877045d --- /dev/null +++ b/node_modules/type-is/package.json @@ -0,0 +1,42 @@ +{ + "name": "type-is", + "description": "Infer the content-type of a request.", + "version": "1.6.13", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/type-is", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.11" + }, + "devDependencies": { + "eslint": "2.10.2", + "eslint-config-standard": "5.3.1", + "eslint-plugin-promise": "1.1.0", + "eslint-plugin-standard": "1.3.2", + "istanbul": "0.4.3", + "mocha": "1.21.5" + }, + "engines": { + "node": ">= 0.6" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "scripts": { + "lint": "eslint **/*.js", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + }, + "keywords": [ + "content", + "type", + "checking" + ] +} diff --git a/node_modules/uglify-js/LICENSE b/node_modules/uglify-js/LICENSE new file mode 100644 index 0000000..dd7706f --- /dev/null +++ b/node_modules/uglify-js/LICENSE @@ -0,0 +1,29 @@ +UglifyJS is released under the BSD license: + +Copyright 2012-2013 (c) Mihai Bazon + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF +THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/node_modules/uglify-js/README.md b/node_modules/uglify-js/README.md new file mode 100644 index 0000000..2ab9de6 --- /dev/null +++ b/node_modules/uglify-js/README.md @@ -0,0 +1,848 @@ +UglifyJS 2 +========== +[![Build Status](https://travis-ci.org/mishoo/UglifyJS2.svg)](https://travis-ci.org/mishoo/UglifyJS2) + +UglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit. + +This page documents the command line utility. For +[API and internals documentation see my website](http://lisperator.net/uglifyjs/). +There's also an +[in-browser online demo](http://lisperator.net/uglifyjs/#demo) (for Firefox, +Chrome and probably Safari). + +Install +------- + +First make sure you have installed the latest version of [node.js](http://nodejs.org/) +(You may need to restart your computer after this step). + +From NPM for use as a command line app: + + npm install uglify-js -g + +From NPM for programmatic use: + + npm install uglify-js + +From Git: + + git clone git://github.com/mishoo/UglifyJS2.git + cd UglifyJS2 + npm link . + +Usage +----- + + uglifyjs [input files] [options] + +UglifyJS2 can take multiple input files. It's recommended that you pass the +input files first, then pass the options. UglifyJS will parse input files +in sequence and apply any compression options. The files are parsed in the +same global scope, that is, a reference from a file to some +variable/function declared in another file will be matched properly. + +If you want to read from STDIN instead, pass a single dash instead of input +files. + +If you wish to pass your options before the input files, separate the two with +a double dash to prevent input files being used as option arguments: + + uglifyjs --compress --mangle -- input.js + +The available options are: + +``` + --source-map Specify an output file where to generate source + map. + --source-map-root The path to the original source to be included + in the source map. + --source-map-url The path to the source map to be added in //# + sourceMappingURL. Defaults to the value passed + with --source-map. + --source-map-include-sources Pass this flag if you want to include the + content of source files in the source map as + sourcesContent property. + --in-source-map Input source map, useful if you're compressing + JS that was generated from some other original + code. + --screw-ie8 Pass this flag if you don't care about full + compliance with Internet Explorer 6-8 quirks + (by default UglifyJS will try to be IE-proof). + --expr Parse a single expression, rather than a + program (for parsing JSON) + -p, --prefix Skip prefix for original filenames that appear + in source maps. For example -p 3 will drop 3 + directories from file names and ensure they are + relative paths. You can also specify -p + relative, which will make UglifyJS figure out + itself the relative paths between original + sources, the source map and the output file. + -o, --output Output file (default STDOUT). + -b, --beautify Beautify output/specify output options. + -m, --mangle Mangle names/pass mangler options. + -r, --reserved Reserved names to exclude from mangling. + -c, --compress Enable compressor/pass compressor options. Pass + options like -c + hoist_vars=false,if_return=false. Use -c with + no argument to use the default compression + options. + -d, --define Global definitions + -e, --enclose Embed everything in a big function, with a + configurable parameter/argument list. + --comments Preserve copyright comments in the output. By + default this works like Google Closure, keeping + JSDoc-style comments that contain "@license" or + "@preserve". You can optionally pass one of the + following arguments to this flag: + - "all" to keep all comments + - a valid JS regexp (needs to start with a + slash) to keep only comments that match. + Note that currently not *all* comments can be + kept when compression is on, because of dead + code removal or cascading statements into + sequences. + --preamble Preamble to prepend to the output. You can use + this to insert a comment, for example for + licensing information. This will not be + parsed, but the source map will adjust for its + presence. + --stats Display operations run time on STDERR. + --acorn Use Acorn for parsing. + --spidermonkey Assume input files are SpiderMonkey AST format + (as JSON). + --self Build itself (UglifyJS2) as a library (implies + --wrap=UglifyJS --export-all) + --wrap Embed everything in a big function, making the + “exports” and “global” variables available. You + need to pass an argument to this option to + specify the name that your module will take + when included in, say, a browser. + --export-all Only used when --wrap, this tells UglifyJS to + add code to automatically export all globals. + --lint Display some scope warnings + -v, --verbose Verbose + -V, --version Print version number and exit. + --noerr Don't throw an error for unknown options in -c, + -b or -m. + --bare-returns Allow return outside of functions. Useful when + minifying CommonJS modules and Userscripts that + may be anonymous function wrapped (IIFE) by the + .user.js engine `caller`. + --keep-fnames Do not mangle/drop function names. Useful for + code relying on Function.prototype.name. + --reserved-file File containing reserved names + --reserve-domprops Make (most?) DOM properties reserved for + --mangle-props + --mangle-props Mangle property names (default `0`). Set to + `true` or `1` to mangle all property names. Set + to `unquoted` or `2` to only mangle unquoted + property names. Mode `2` also enables the + `keep_quoted_props` beautifier option to + preserve the quotes around property names and + disables the `properties` compressor option to + prevent rewriting quoted properties with dot + notation. You can override these by setting + them explicitly on the command line. + --mangle-regex Only mangle property names matching the regex + --name-cache File to hold mangled names mappings + --pure-funcs List of functions that can be safely removed if + their return value is not used [array] +``` + +Specify `--output` (`-o`) to declare the output file. Otherwise the output +goes to STDOUT. + +## Source map options + +UglifyJS2 can generate a source map file, which is highly useful for +debugging your compressed JavaScript. To get a source map, pass +`--source-map output.js.map` (full path to the file where you want the +source map dumped). + +Additionally you might need `--source-map-root` to pass the URL where the +original files can be found. In case you are passing full paths to input +files to UglifyJS, you can use `--prefix` (`-p`) to specify the number of +directories to drop from the path prefix when declaring files in the source +map. + +For example: + + uglifyjs /home/doe/work/foo/src/js/file1.js \ + /home/doe/work/foo/src/js/file2.js \ + -o foo.min.js \ + --source-map foo.min.js.map \ + --source-map-root http://foo.com/src \ + -p 5 -c -m + +The above will compress and mangle `file1.js` and `file2.js`, will drop the +output in `foo.min.js` and the source map in `foo.min.js.map`. The source +mapping will refer to `http://foo.com/src/js/file1.js` and +`http://foo.com/src/js/file2.js` (in fact it will list `http://foo.com/src` +as the source map root, and the original files as `js/file1.js` and +`js/file2.js`). + +### Composed source map + +When you're compressing JS code that was output by a compiler such as +CoffeeScript, mapping to the JS code won't be too helpful. Instead, you'd +like to map back to the original code (i.e. CoffeeScript). UglifyJS has an +option to take an input source map. Assuming you have a mapping from +CoffeeScript → compiled JS, UglifyJS can generate a map from CoffeeScript → +compressed JS by mapping every token in the compiled JS to its original +location. + +To use this feature you need to pass `--in-source-map +/path/to/input/source.map`. Normally the input source map should also point +to the file containing the generated JS, so if that's correct you can omit +input files from the command line. + +## Mangler options + +To enable the mangler you need to pass `--mangle` (`-m`). The following +(comma-separated) options are supported: + +- `toplevel` — mangle names declared in the toplevel scope (disabled by + default). + +- `eval` — mangle names visible in scopes where `eval` or `with` are used + (disabled by default). + +When mangling is enabled but you want to prevent certain names from being +mangled, you can declare those names with `--reserved` (`-r`) — pass a +comma-separated list of names. For example: + + uglifyjs ... -m -r '$,require,exports' + +to prevent the `require`, `exports` and `$` names from being changed. + +### Mangling property names (`--mangle-props`) + +**Note:** this will probably break your code. Mangling property names is a +separate step, different from variable name mangling. Pass +`--mangle-props`. It will mangle all properties that are seen in some +object literal, or that are assigned to. For example: + +```js +var x = { + foo: 1 +}; + +x.bar = 2; +x["baz"] = 3; +x[condition ? "moo" : "boo"] = 4; +console.log(x.something()); +``` + +In the above code, `foo`, `bar`, `baz`, `moo` and `boo` will be replaced +with single characters, while `something()` will be left as is. + +In order for this to be of any use, we should avoid mangling standard JS +names. For instance, if your code would contain `x.length = 10`, then +`length` becomes a candidate for mangling and it will be mangled throughout +the code, regardless if it's being used as part of your own objects or +accessing an array's length. To avoid that, you can use `--reserved-file` +to pass a filename that should contain the names to be excluded from +mangling. This file can be used both for excluding variable names and +property names. It could look like this, for example: + +```js +{ + "vars": [ "define", "require", ... ], + "props": [ "length", "prototype", ... ] +} +``` + +`--reserved-file` can be an array of file names (either a single +comma-separated argument, or you can pass multiple `--reserved-file` +arguments) — in this case it will exclude names from all those files. + +A default exclusion file is provided in `tools/domprops.json` which should +cover most standard JS and DOM properties defined in various browsers. Pass +`--reserve-domprops` to read that in. + +You can also use a regular expression to define which property names should be +mangled. For example, `--mangle-regex="/^_/"` will only mangle property names +that start with an underscore. + +When you compress multiple files using this option, in order for them to +work together in the end we need to ensure somehow that one property gets +mangled to the same name in all of them. For this, pass `--name-cache +filename.json` and UglifyJS will maintain these mappings in a file which can +then be reused. It should be initially empty. Example: + +``` +rm -f /tmp/cache.json # start fresh +uglifyjs file1.js file2.js --mangle-props --name-cache /tmp/cache.json -o part1.js +uglifyjs file3.js file4.js --mangle-props --name-cache /tmp/cache.json -o part2.js +``` + +Now, `part1.js` and `part2.js` will be consistent with each other in terms +of mangled property names. + +Using the name cache is not necessary if you compress all your files in a +single call to UglifyJS. + +## Compressor options + +You need to pass `--compress` (`-c`) to enable the compressor. Optionally +you can pass a comma-separated list of options. Options are in the form +`foo=bar`, or just `foo` (the latter implies a boolean option that you want +to set `true`; it's effectively a shortcut for `foo=true`). + +- `sequences` -- join consecutive simple statements using the comma operator + +- `properties` -- rewrite property access using the dot notation, for + example `foo["bar"] → foo.bar` + +- `dead_code` -- remove unreachable code + +- `drop_debugger` -- remove `debugger;` statements + +- `unsafe` (default: false) -- apply "unsafe" transformations (discussion below) + +- `unsafe_comps` (default: false) -- Reverse `<` and `<=` to `>` and `>=` to + allow improved compression. This might be unsafe when an at least one of two + operands is an object with computed values due the use of methods like `get`, + or `valueOf`. This could cause change in execution order after operands in the + comparison are switching. Compression only works if both `comparisons` and + `unsafe_comps` are both set to true. + +- `conditionals` -- apply optimizations for `if`-s and conditional + expressions + +- `comparisons` -- apply certain optimizations to binary nodes, for example: + `!(a <= b) → a > b` (only when `unsafe_comps`), attempts to negate binary + nodes, e.g. `a = !b && !c && !d && !e → a=!(b||c||d||e)` etc. + +- `evaluate` -- attempt to evaluate constant expressions + +- `booleans` -- various optimizations for boolean context, for example `!!a + ? b : c → a ? b : c` + +- `loops` -- optimizations for `do`, `while` and `for` loops when we can + statically determine the condition + +- `unused` -- drop unreferenced functions and variables + +- `hoist_funs` -- hoist function declarations + +- `hoist_vars` (default: false) -- hoist `var` declarations (this is `false` + by default because it seems to increase the size of the output in general) + +- `if_return` -- optimizations for if/return and if/continue + +- `join_vars` -- join consecutive `var` statements + +- `cascade` -- small optimization for sequences, transform `x, x` into `x` + and `x = something(), x` into `x = something()` + +- `collapse_vars` -- default `false`. Collapse single-use `var` and `const` + definitions when possible. + +- `warnings` -- display warnings when dropping unreachable code or unused + declarations etc. + +- `negate_iife` -- negate "Immediately-Called Function Expressions" + where the return value is discarded, to avoid the parens that the + code generator would insert. + +- `pure_getters` -- the default is `false`. If you pass `true` for + this, UglifyJS will assume that object property access + (e.g. `foo.bar` or `foo["bar"]`) doesn't have any side effects. + +- `pure_funcs` -- default `null`. You can pass an array of names and + UglifyJS will assume that those functions do not produce side + effects. DANGER: will not check if the name is redefined in scope. + An example case here, for instance `var q = Math.floor(a/b)`. If + variable `q` is not used elsewhere, UglifyJS will drop it, but will + still keep the `Math.floor(a/b)`, not knowing what it does. You can + pass `pure_funcs: [ 'Math.floor' ]` to let it know that this + function won't produce any side effect, in which case the whole + statement would get discarded. The current implementation adds some + overhead (compression will be slower). + +- `drop_console` -- default `false`. Pass `true` to discard calls to + `console.*` functions. + +- `keep_fargs` -- default `true`. Prevents the + compressor from discarding unused function arguments. You need this + for code which relies on `Function.length`. + +- `keep_fnames` -- default `false`. Pass `true` to prevent the + compressor from mangling/discarding function names. Useful for code relying on + `Function.prototype.name`. + +- `passes` -- default `1`. Number of times to run compress. Use an + integer argument larger than 1 to further reduce code size in some cases. + Note: raising the number of passes will increase uglify compress time. + +### The `unsafe` option + +It enables some transformations that *might* break code logic in certain +contrived cases, but should be fine for most code. You might want to try it +on your own code, it should reduce the minified size. Here's what happens +when this flag is on: + +- `new Array(1, 2, 3)` or `Array(1, 2, 3)` → `[ 1, 2, 3 ]` +- `new Object()` → `{}` +- `String(exp)` or `exp.toString()` → `"" + exp` +- `new Object/RegExp/Function/Error/Array (...)` → we discard the `new` +- `typeof foo == "undefined"` → `foo === void 0` +- `void 0` → `undefined` (if there is a variable named "undefined" in + scope; we do it because the variable name will be mangled, typically + reduced to a single character) + +### Conditional compilation + +You can use the `--define` (`-d`) switch in order to declare global +variables that UglifyJS will assume to be constants (unless defined in +scope). For example if you pass `--define DEBUG=false` then, coupled with +dead code removal UglifyJS will discard the following from the output: +```javascript +if (DEBUG) { + console.log("debug stuff"); +} +``` + +UglifyJS will warn about the condition being always false and about dropping +unreachable code; for now there is no option to turn off only this specific +warning, you can pass `warnings=false` to turn off *all* warnings. + +Another way of doing that is to declare your globals as constants in a +separate file and include it into the build. For example you can have a +`build/defines.js` file with the following: +```javascript +const DEBUG = false; +const PRODUCTION = true; +// Alternative for environments that don't support `const` +/** @const */ var STAGING = false; +// etc. +``` + +and build your code like this: + + uglifyjs build/defines.js js/foo.js js/bar.js... -c + +UglifyJS will notice the constants and, since they cannot be altered, it +will evaluate references to them to the value itself and drop unreachable +code as usual. The build will contain the `const` declarations if you use +them. If you are targeting < ES6 environments, use `/** @const */ var`. + + + +#### Conditional compilation, API +You can also use conditional compilation via the programmatic API. With the difference that the +property name is `global_defs` and is a compressor property: + +```js +uglifyJS.minify([ "input.js"], { + compress: { + dead_code: true, + global_defs: { + DEBUG: false + } + } +}); +``` + +## Beautifier options + +The code generator tries to output shortest code possible by default. In +case you want beautified output, pass `--beautify` (`-b`). Optionally you +can pass additional arguments that control the code output: + +- `beautify` (default `true`) -- whether to actually beautify the output. + Passing `-b` will set this to true, but you might need to pass `-b` even + when you want to generate minified code, in order to specify additional + arguments, so you can use `-b beautify=false` to override it. +- `indent-level` (default 4) +- `indent-start` (default 0) -- prefix all lines by that many spaces +- `quote-keys` (default `false`) -- pass `true` to quote all keys in literal + objects +- `space-colon` (default `true`) -- insert a space after the colon signs +- `ascii-only` (default `false`) -- escape Unicode characters in strings and + regexps (affects directives with non-ascii characters becoming invalid) +- `inline-script` (default `false`) -- escape the slash in occurrences of + ` 0) { + print_error("WARN: Ignoring input files since --self was passed"); + } + files = UglifyJS.FILES; + if (!ARGS.wrap) ARGS.wrap = "UglifyJS"; +} + +var ORIG_MAP = ARGS.in_source_map; + +if (ORIG_MAP) { + ORIG_MAP = JSON.parse(fs.readFileSync(ORIG_MAP)); + if (files.length == 0) { + print_error("INFO: Using file from the input source map: " + ORIG_MAP.file); + files = [ ORIG_MAP.file ]; + } + if (ARGS.source_map_root == null) { + ARGS.source_map_root = ORIG_MAP.sourceRoot; + } +} + +if (files.length == 0) { + files = [ "-" ]; +} + +if (files.indexOf("-") >= 0 && ARGS.source_map) { + print_error("ERROR: Source map doesn't work with input from STDIN"); + process.exit(1); +} + +if (files.filter(function(el){ return el == "-" }).length > 1) { + print_error("ERROR: Can read a single file from STDIN (two or more dashes specified)"); + process.exit(1); +} + +var STATS = {}; +var OUTPUT_FILE = ARGS.o; +var TOPLEVEL = null; +var P_RELATIVE = ARGS.p && ARGS.p == "relative"; +var SOURCES_CONTENT = {}; + +var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({ + file: P_RELATIVE ? path.relative(path.dirname(ARGS.source_map), OUTPUT_FILE) : OUTPUT_FILE, + root: ARGS.source_map_root, + orig: ORIG_MAP, +}) : null; + +OUTPUT_OPTIONS.source_map = SOURCE_MAP; + +try { + var output = UglifyJS.OutputStream(OUTPUT_OPTIONS); + var compressor = COMPRESS && UglifyJS.Compressor(COMPRESS); +} catch(ex) { + if (ex instanceof UglifyJS.DefaultsError) { + print_error(ex.msg); + print_error("Supported options:"); + print_error(sys.inspect(ex.defs)); + process.exit(1); + } +} + +async.eachLimit(files, 1, function (file, cb) { + read_whole_file(file, function (err, code) { + if (err) { + print_error("ERROR: can't read file: " + file); + process.exit(1); + } + if (ARGS.p != null) { + if (P_RELATIVE) { + file = path.relative(path.dirname(ARGS.source_map), file).replace(/\\/g, '/'); + } else { + var p = parseInt(ARGS.p, 10); + if (!isNaN(p)) { + file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/"); + } + } + } + SOURCES_CONTENT[file] = code; + time_it("parse", function(){ + if (ARGS.spidermonkey) { + var program = JSON.parse(code); + if (!TOPLEVEL) TOPLEVEL = program; + else TOPLEVEL.body = TOPLEVEL.body.concat(program.body); + } + else if (ARGS.acorn) { + TOPLEVEL = acorn.parse(code, { + locations : true, + sourceFile : file, + program : TOPLEVEL + }); + } + else { + try { + TOPLEVEL = UglifyJS.parse(code, { + filename : file, + toplevel : TOPLEVEL, + expression : ARGS.expr, + bare_returns : ARGS.bare_returns, + }); + } catch(ex) { + if (ex instanceof UglifyJS.JS_Parse_Error) { + print_error("Parse error at " + file + ":" + ex.line + "," + ex.col); + print_error(ex.message); + print_error(ex.stack); + process.exit(1); + } + throw ex; + } + }; + }); + cb(); + }); +}, function () { + if (ARGS.acorn || ARGS.spidermonkey) time_it("convert_ast", function(){ + TOPLEVEL = UglifyJS.AST_Node.from_mozilla_ast(TOPLEVEL); + }); + + if (ARGS.wrap != null) { + TOPLEVEL = TOPLEVEL.wrap_commonjs(ARGS.wrap, ARGS.export_all); + } + + if (ARGS.enclose != null) { + var arg_parameter_list = ARGS.enclose; + if (arg_parameter_list === true) { + arg_parameter_list = []; + } + else if (!(arg_parameter_list instanceof Array)) { + arg_parameter_list = [arg_parameter_list]; + } + TOPLEVEL = TOPLEVEL.wrap_enclose(arg_parameter_list); + } + + if (ARGS.mangle_props || ARGS.name_cache) (function(){ + var reserved = RESERVED ? RESERVED.props : null; + var cache = readNameCache("props"); + var regex; + + try { + regex = ARGS.mangle_regex ? extractRegex(ARGS.mangle_regex) : null; + } catch (e) { + print_error("ERROR: Invalid --mangle-regex: " + e.message); + process.exit(1); + } + + TOPLEVEL = UglifyJS.mangle_properties(TOPLEVEL, { + reserved : reserved, + cache : cache, + only_cache : !ARGS.mangle_props, + regex : regex, + ignore_quoted : ARGS.mangle_props == 2 + }); + writeNameCache("props", cache); + })(); + + var SCOPE_IS_NEEDED = COMPRESS || MANGLE || ARGS.lint + var TL_CACHE = readNameCache("vars"); + + if (SCOPE_IS_NEEDED) { + time_it("scope", function(){ + TOPLEVEL.figure_out_scope({ screw_ie8: ARGS.screw_ie8, cache: TL_CACHE }); + if (ARGS.lint) { + TOPLEVEL.scope_warnings(); + } + }); + } + + if (COMPRESS) { + time_it("squeeze", function(){ + TOPLEVEL = compressor.compress(TOPLEVEL); + }); + } + + if (SCOPE_IS_NEEDED) { + time_it("scope", function(){ + TOPLEVEL.figure_out_scope({ screw_ie8: ARGS.screw_ie8, cache: TL_CACHE }); + if (MANGLE && !TL_CACHE) { + TOPLEVEL.compute_char_frequency(MANGLE); + } + }); + } + + if (MANGLE) time_it("mangle", function(){ + MANGLE.cache = TL_CACHE; + TOPLEVEL.mangle_names(MANGLE); + }); + + writeNameCache("vars", TL_CACHE); + + if (ARGS.source_map_include_sources) { + for (var file in SOURCES_CONTENT) { + if (SOURCES_CONTENT.hasOwnProperty(file)) { + SOURCE_MAP.get().setSourceContent(file, SOURCES_CONTENT[file]); + } + } + } + + if (ARGS.dump_spidermonkey_ast) { + print(JSON.stringify(TOPLEVEL.to_mozilla_ast(), null, 2)); + } else { + time_it("generate", function(){ + TOPLEVEL.print(output); + }); + + output = output.get(); + + if (SOURCE_MAP) { + fs.writeFileSync(ARGS.source_map, SOURCE_MAP, "utf8"); + var source_map_url = ARGS.source_map_url || ( + P_RELATIVE + ? path.relative(path.dirname(OUTPUT_FILE), ARGS.source_map) + : ARGS.source_map + ); + output += "\n//# sourceMappingURL=" + source_map_url; + } + + if (OUTPUT_FILE) { + fs.writeFileSync(OUTPUT_FILE, output, "utf8"); + } else { + print(output); + } + } + + if (ARGS.stats) { + print_error(UglifyJS.string_template("Timing information (compressed {count} files):", { + count: files.length + })); + for (var i in STATS) if (STATS.hasOwnProperty(i)) { + print_error(UglifyJS.string_template("- {name}: {time}s", { + name: i, + time: (STATS[i] / 1000).toFixed(3) + })); + } + } +}); + +/* -----[ functions ]----- */ + +function normalize(o) { + for (var i in o) if (o.hasOwnProperty(i) && /-/.test(i)) { + o[i.replace(/-/g, "_")] = o[i]; + delete o[i]; + } +} + +function getOptions(flag, constants) { + var x = ARGS[flag]; + if (x == null || x === false) return null; + var ret = {}; + if (x !== "") { + if (Array.isArray(x)) x = x.map(function (v) { return "(" + v + ")"; }).join(", "); + + var ast; + try { + ast = UglifyJS.parse(x, { expression: true }); + } catch(ex) { + if (ex instanceof UglifyJS.JS_Parse_Error) { + print_error("Error parsing arguments for flag `" + flag + "': " + x); + process.exit(1); + } + } + ast.walk(new UglifyJS.TreeWalker(function(node){ + if (node instanceof UglifyJS.AST_Seq) return; // descend + if (node instanceof UglifyJS.AST_Assign) { + var name = node.left.print_to_string({ beautify: false }).replace(/-/g, "_"); + var value = node.right; + if (constants) + value = new Function("return (" + value.print_to_string() + ")")(); + ret[name] = value; + return true; // no descend + } + if (node instanceof UglifyJS.AST_Symbol || node instanceof UglifyJS.AST_Binary) { + var name = node.print_to_string({ beautify: false }).replace(/-/g, "_"); + ret[name] = true; + return true; // no descend + } + print_error(node.TYPE) + print_error("Error parsing arguments for flag `" + flag + "': " + x); + process.exit(1); + })); + } + return ret; +} + +function read_whole_file(filename, cb) { + if (filename == "-") { + var chunks = []; + process.stdin.setEncoding('utf-8'); + process.stdin.on('data', function (chunk) { + chunks.push(chunk); + }).on('end', function () { + cb(null, chunks.join("")); + }); + process.openStdin(); + } else { + fs.readFile(filename, "utf-8", cb); + } +} + +function time_it(name, cont) { + var t1 = new Date().getTime(); + var ret = cont(); + if (ARGS.stats) { + var spent = new Date().getTime() - t1; + if (STATS[name]) STATS[name] += spent; + else STATS[name] = spent; + } + return ret; +} + +function print_error(msg) { + console.error("%s", msg); +} + +function print(txt) { + console.log("%s", txt); +} diff --git a/node_modules/uglify-js/lib/ast.js b/node_modules/uglify-js/lib/ast.js new file mode 100644 index 0000000..42506cb --- /dev/null +++ b/node_modules/uglify-js/lib/ast.js @@ -0,0 +1,1021 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +function DEFNODE(type, props, methods, base) { + if (arguments.length < 4) base = AST_Node; + if (!props) props = []; + else props = props.split(/\s+/); + var self_props = props; + if (base && base.PROPS) + props = props.concat(base.PROPS); + var code = "return function AST_" + type + "(props){ if (props) { "; + for (var i = props.length; --i >= 0;) { + code += "this." + props[i] + " = props." + props[i] + ";"; + } + var proto = base && new base; + if (proto && proto.initialize || (methods && methods.initialize)) + code += "this.initialize();"; + code += "}}"; + var ctor = new Function(code)(); + if (proto) { + ctor.prototype = proto; + ctor.BASE = base; + } + if (base) base.SUBCLASSES.push(ctor); + ctor.prototype.CTOR = ctor; + ctor.PROPS = props || null; + ctor.SELF_PROPS = self_props; + ctor.SUBCLASSES = []; + if (type) { + ctor.prototype.TYPE = ctor.TYPE = type; + } + if (methods) for (i in methods) if (HOP(methods, i)) { + if (/^\$/.test(i)) { + ctor[i.substr(1)] = methods[i]; + } else { + ctor.prototype[i] = methods[i]; + } + } + ctor.DEFMETHOD = function(name, method) { + this.prototype[name] = method; + }; + exports["AST_" + type] = ctor; + return ctor; +}; + +var AST_Token = DEFNODE("Token", "type value line col pos endline endcol endpos nlb comments_before file raw", { +}, null); + +var AST_Node = DEFNODE("Node", "start end", { + clone: function() { + return new this.CTOR(this); + }, + $documentation: "Base class of all AST nodes", + $propdoc: { + start: "[AST_Token] The first token of this node", + end: "[AST_Token] The last token of this node" + }, + _walk: function(visitor) { + return visitor._visit(this); + }, + walk: function(visitor) { + return this._walk(visitor); // not sure the indirection will be any help + } +}, null); + +AST_Node.warn_function = null; +AST_Node.warn = function(txt, props) { + if (AST_Node.warn_function) + AST_Node.warn_function(string_template(txt, props)); +}; + +/* -----[ statements ]----- */ + +var AST_Statement = DEFNODE("Statement", null, { + $documentation: "Base class of all statements", +}); + +var AST_Debugger = DEFNODE("Debugger", null, { + $documentation: "Represents a debugger statement", +}, AST_Statement); + +var AST_Directive = DEFNODE("Directive", "value scope quote", { + $documentation: "Represents a directive, like \"use strict\";", + $propdoc: { + value: "[string] The value of this directive as a plain string (it's not an AST_String!)", + scope: "[AST_Scope/S] The scope that this directive affects", + quote: "[string] the original quote character" + }, +}, AST_Statement); + +var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", { + $documentation: "A statement consisting of an expression, i.e. a = 1 + 2", + $propdoc: { + body: "[AST_Node] an expression node (should not be instanceof AST_Statement)" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.body._walk(visitor); + }); + } +}, AST_Statement); + +function walk_body(node, visitor) { + if (node.body instanceof AST_Statement) { + node.body._walk(visitor); + } + else node.body.forEach(function(stat){ + stat._walk(visitor); + }); +}; + +var AST_Block = DEFNODE("Block", "body", { + $documentation: "A body of statements (usually bracketed)", + $propdoc: { + body: "[AST_Statement*] an array of statements" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + walk_body(this, visitor); + }); + } +}, AST_Statement); + +var AST_BlockStatement = DEFNODE("BlockStatement", null, { + $documentation: "A block statement", +}, AST_Block); + +var AST_EmptyStatement = DEFNODE("EmptyStatement", null, { + $documentation: "The empty statement (empty block or simply a semicolon)", + _walk: function(visitor) { + return visitor._visit(this); + } +}, AST_Statement); + +var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", { + $documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`", + $propdoc: { + body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.body._walk(visitor); + }); + } +}, AST_Statement); + +var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", { + $documentation: "Statement with a label", + $propdoc: { + label: "[AST_Label] a label definition" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.label._walk(visitor); + this.body._walk(visitor); + }); + } +}, AST_StatementWithBody); + +var AST_IterationStatement = DEFNODE("IterationStatement", null, { + $documentation: "Internal class. All loops inherit from it." +}, AST_StatementWithBody); + +var AST_DWLoop = DEFNODE("DWLoop", "condition", { + $documentation: "Base class for do/while statements", + $propdoc: { + condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement" + } +}, AST_IterationStatement); + +var AST_Do = DEFNODE("Do", null, { + $documentation: "A `do` statement", + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.body._walk(visitor); + this.condition._walk(visitor); + }); + } +}, AST_DWLoop); + +var AST_While = DEFNODE("While", null, { + $documentation: "A `while` statement", + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.condition._walk(visitor); + this.body._walk(visitor); + }); + } +}, AST_DWLoop); + +var AST_For = DEFNODE("For", "init condition step", { + $documentation: "A `for` statement", + $propdoc: { + init: "[AST_Node?] the `for` initialization code, or null if empty", + condition: "[AST_Node?] the `for` termination clause, or null if empty", + step: "[AST_Node?] the `for` update clause, or null if empty" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + if (this.init) this.init._walk(visitor); + if (this.condition) this.condition._walk(visitor); + if (this.step) this.step._walk(visitor); + this.body._walk(visitor); + }); + } +}, AST_IterationStatement); + +var AST_ForIn = DEFNODE("ForIn", "init name object", { + $documentation: "A `for ... in` statement", + $propdoc: { + init: "[AST_Node] the `for/in` initialization code", + name: "[AST_SymbolRef?] the loop variable, only if `init` is AST_Var", + object: "[AST_Node] the object that we're looping through" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.init._walk(visitor); + this.object._walk(visitor); + this.body._walk(visitor); + }); + } +}, AST_IterationStatement); + +var AST_With = DEFNODE("With", "expression", { + $documentation: "A `with` statement", + $propdoc: { + expression: "[AST_Node] the `with` expression" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + this.body._walk(visitor); + }); + } +}, AST_StatementWithBody); + +/* -----[ scope and functions ]----- */ + +var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", { + $documentation: "Base class for all statements introducing a lexical scope", + $propdoc: { + directives: "[string*/S] an array of directives declared in this scope", + variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope", + functions: "[Object/S] like `variables`, but only lists function declarations", + uses_with: "[boolean/S] tells whether this scope uses the `with` statement", + uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`", + parent_scope: "[AST_Scope?/S] link to the parent scope", + enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes", + cname: "[integer/S] current index for mangling variables (used internally by the mangler)", + }, +}, AST_Block); + +var AST_Toplevel = DEFNODE("Toplevel", "globals", { + $documentation: "The toplevel scope", + $propdoc: { + globals: "[Object/S] a map of name -> SymbolDef for all undeclared names", + }, + wrap_enclose: function(arg_parameter_pairs) { + var self = this; + var args = []; + var parameters = []; + + arg_parameter_pairs.forEach(function(pair) { + var splitAt = pair.lastIndexOf(":"); + + args.push(pair.substr(0, splitAt)); + parameters.push(pair.substr(splitAt + 1)); + }); + + var wrapped_tl = "(function(" + parameters.join(",") + "){ '$ORIG'; })(" + args.join(",") + ")"; + wrapped_tl = parse(wrapped_tl); + wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){ + if (node instanceof AST_Directive && node.value == "$ORIG") { + return MAP.splice(self.body); + } + })); + return wrapped_tl; + }, + wrap_commonjs: function(name, export_all) { + var self = this; + var to_export = []; + if (export_all) { + self.figure_out_scope(); + self.walk(new TreeWalker(function(node){ + if (node instanceof AST_SymbolDeclaration && node.definition().global) { + if (!find_if(function(n){ return n.name == node.name }, to_export)) + to_export.push(node); + } + })); + } + var wrapped_tl = "(function(exports, global){ '$ORIG'; '$EXPORTS'; global['" + name + "'] = exports; }({}, (function(){return this}())))"; + wrapped_tl = parse(wrapped_tl); + wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){ + if (node instanceof AST_Directive) { + switch (node.value) { + case "$ORIG": + return MAP.splice(self.body); + case "$EXPORTS": + var body = []; + to_export.forEach(function(sym){ + body.push(new AST_SimpleStatement({ + body: new AST_Assign({ + left: new AST_Sub({ + expression: new AST_SymbolRef({ name: "exports" }), + property: new AST_String({ value: sym.name }), + }), + operator: "=", + right: new AST_SymbolRef(sym), + }), + })); + }); + return MAP.splice(body); + } + } + })); + return wrapped_tl; + } +}, AST_Scope); + +var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", { + $documentation: "Base class for functions", + $propdoc: { + name: "[AST_SymbolDeclaration?] the name of this function", + argnames: "[AST_SymbolFunarg*] array of function arguments", + uses_arguments: "[boolean/S] tells whether this function accesses the arguments array" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + if (this.name) this.name._walk(visitor); + this.argnames.forEach(function(arg){ + arg._walk(visitor); + }); + walk_body(this, visitor); + }); + } +}, AST_Scope); + +var AST_Accessor = DEFNODE("Accessor", null, { + $documentation: "A setter/getter function. The `name` property is always null." +}, AST_Lambda); + +var AST_Function = DEFNODE("Function", null, { + $documentation: "A function expression" +}, AST_Lambda); + +var AST_Defun = DEFNODE("Defun", null, { + $documentation: "A function definition" +}, AST_Lambda); + +/* -----[ JUMPS ]----- */ + +var AST_Jump = DEFNODE("Jump", null, { + $documentation: "Base class for “jumps” (for now that's `return`, `throw`, `break` and `continue`)" +}, AST_Statement); + +var AST_Exit = DEFNODE("Exit", "value", { + $documentation: "Base class for “exits” (`return` and `throw`)", + $propdoc: { + value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return" + }, + _walk: function(visitor) { + return visitor._visit(this, this.value && function(){ + this.value._walk(visitor); + }); + } +}, AST_Jump); + +var AST_Return = DEFNODE("Return", null, { + $documentation: "A `return` statement" +}, AST_Exit); + +var AST_Throw = DEFNODE("Throw", null, { + $documentation: "A `throw` statement" +}, AST_Exit); + +var AST_LoopControl = DEFNODE("LoopControl", "label", { + $documentation: "Base class for loop control statements (`break` and `continue`)", + $propdoc: { + label: "[AST_LabelRef?] the label, or null if none", + }, + _walk: function(visitor) { + return visitor._visit(this, this.label && function(){ + this.label._walk(visitor); + }); + } +}, AST_Jump); + +var AST_Break = DEFNODE("Break", null, { + $documentation: "A `break` statement" +}, AST_LoopControl); + +var AST_Continue = DEFNODE("Continue", null, { + $documentation: "A `continue` statement" +}, AST_LoopControl); + +/* -----[ IF ]----- */ + +var AST_If = DEFNODE("If", "condition alternative", { + $documentation: "A `if` statement", + $propdoc: { + condition: "[AST_Node] the `if` condition", + alternative: "[AST_Statement?] the `else` part, or null if not present" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.condition._walk(visitor); + this.body._walk(visitor); + if (this.alternative) this.alternative._walk(visitor); + }); + } +}, AST_StatementWithBody); + +/* -----[ SWITCH ]----- */ + +var AST_Switch = DEFNODE("Switch", "expression", { + $documentation: "A `switch` statement", + $propdoc: { + expression: "[AST_Node] the `switch` “discriminant”" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + walk_body(this, visitor); + }); + } +}, AST_Block); + +var AST_SwitchBranch = DEFNODE("SwitchBranch", null, { + $documentation: "Base class for `switch` branches", +}, AST_Block); + +var AST_Default = DEFNODE("Default", null, { + $documentation: "A `default` switch branch", +}, AST_SwitchBranch); + +var AST_Case = DEFNODE("Case", "expression", { + $documentation: "A `case` switch branch", + $propdoc: { + expression: "[AST_Node] the `case` expression" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + walk_body(this, visitor); + }); + } +}, AST_SwitchBranch); + +/* -----[ EXCEPTIONS ]----- */ + +var AST_Try = DEFNODE("Try", "bcatch bfinally", { + $documentation: "A `try` statement", + $propdoc: { + bcatch: "[AST_Catch?] the catch block, or null if not present", + bfinally: "[AST_Finally?] the finally block, or null if not present" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + walk_body(this, visitor); + if (this.bcatch) this.bcatch._walk(visitor); + if (this.bfinally) this.bfinally._walk(visitor); + }); + } +}, AST_Block); + +var AST_Catch = DEFNODE("Catch", "argname", { + $documentation: "A `catch` node; only makes sense as part of a `try` statement", + $propdoc: { + argname: "[AST_SymbolCatch] symbol for the exception" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.argname._walk(visitor); + walk_body(this, visitor); + }); + } +}, AST_Block); + +var AST_Finally = DEFNODE("Finally", null, { + $documentation: "A `finally` node; only makes sense as part of a `try` statement" +}, AST_Block); + +/* -----[ VAR/CONST ]----- */ + +var AST_Definitions = DEFNODE("Definitions", "definitions", { + $documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)", + $propdoc: { + definitions: "[AST_VarDef*] array of variable definitions" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.definitions.forEach(function(def){ + def._walk(visitor); + }); + }); + } +}, AST_Statement); + +var AST_Var = DEFNODE("Var", null, { + $documentation: "A `var` statement" +}, AST_Definitions); + +var AST_Const = DEFNODE("Const", null, { + $documentation: "A `const` statement" +}, AST_Definitions); + +var AST_VarDef = DEFNODE("VarDef", "name value", { + $documentation: "A variable declaration; only appears in a AST_Definitions node", + $propdoc: { + name: "[AST_SymbolVar|AST_SymbolConst] name of the variable", + value: "[AST_Node?] initializer, or null of there's no initializer" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.name._walk(visitor); + if (this.value) this.value._walk(visitor); + }); + } +}); + +/* -----[ OTHER ]----- */ + +var AST_Call = DEFNODE("Call", "expression args", { + $documentation: "A function call expression", + $propdoc: { + expression: "[AST_Node] expression to invoke as function", + args: "[AST_Node*] array of arguments" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + this.args.forEach(function(arg){ + arg._walk(visitor); + }); + }); + } +}); + +var AST_New = DEFNODE("New", null, { + $documentation: "An object instantiation. Derives from a function call since it has exactly the same properties" +}, AST_Call); + +var AST_Seq = DEFNODE("Seq", "car cdr", { + $documentation: "A sequence expression (two comma-separated expressions)", + $propdoc: { + car: "[AST_Node] first element in sequence", + cdr: "[AST_Node] second element in sequence" + }, + $cons: function(x, y) { + var seq = new AST_Seq(x); + seq.car = x; + seq.cdr = y; + return seq; + }, + $from_array: function(array) { + if (array.length == 0) return null; + if (array.length == 1) return array[0].clone(); + var list = null; + for (var i = array.length; --i >= 0;) { + list = AST_Seq.cons(array[i], list); + } + var p = list; + while (p) { + if (p.cdr && !p.cdr.cdr) { + p.cdr = p.cdr.car; + break; + } + p = p.cdr; + } + return list; + }, + to_array: function() { + var p = this, a = []; + while (p) { + a.push(p.car); + if (p.cdr && !(p.cdr instanceof AST_Seq)) { + a.push(p.cdr); + break; + } + p = p.cdr; + } + return a; + }, + add: function(node) { + var p = this; + while (p) { + if (!(p.cdr instanceof AST_Seq)) { + var cell = AST_Seq.cons(p.cdr, node); + return p.cdr = cell; + } + p = p.cdr; + } + }, + len: function() { + if (this.cdr instanceof AST_Seq) { + return this.cdr.len() + 1; + } else { + return 2; + } + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.car._walk(visitor); + if (this.cdr) this.cdr._walk(visitor); + }); + } +}); + +var AST_PropAccess = DEFNODE("PropAccess", "expression property", { + $documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`", + $propdoc: { + expression: "[AST_Node] the “container” expression", + property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node" + } +}); + +var AST_Dot = DEFNODE("Dot", null, { + $documentation: "A dotted property access expression", + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + }); + } +}, AST_PropAccess); + +var AST_Sub = DEFNODE("Sub", null, { + $documentation: "Index-style property access, i.e. `a[\"foo\"]`", + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + this.property._walk(visitor); + }); + } +}, AST_PropAccess); + +var AST_Unary = DEFNODE("Unary", "operator expression", { + $documentation: "Base class for unary expressions", + $propdoc: { + operator: "[string] the operator", + expression: "[AST_Node] expression that this unary operator applies to" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.expression._walk(visitor); + }); + } +}); + +var AST_UnaryPrefix = DEFNODE("UnaryPrefix", null, { + $documentation: "Unary prefix expression, i.e. `typeof i` or `++i`" +}, AST_Unary); + +var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, { + $documentation: "Unary postfix expression, i.e. `i++`" +}, AST_Unary); + +var AST_Binary = DEFNODE("Binary", "left operator right", { + $documentation: "Binary expression, i.e. `a + b`", + $propdoc: { + left: "[AST_Node] left-hand side expression", + operator: "[string] the operator", + right: "[AST_Node] right-hand side expression" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.left._walk(visitor); + this.right._walk(visitor); + }); + } +}); + +var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", { + $documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`", + $propdoc: { + condition: "[AST_Node]", + consequent: "[AST_Node]", + alternative: "[AST_Node]" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.condition._walk(visitor); + this.consequent._walk(visitor); + this.alternative._walk(visitor); + }); + } +}); + +var AST_Assign = DEFNODE("Assign", null, { + $documentation: "An assignment expression — `a = b + 5`", +}, AST_Binary); + +/* -----[ LITERALS ]----- */ + +var AST_Array = DEFNODE("Array", "elements", { + $documentation: "An array literal", + $propdoc: { + elements: "[AST_Node*] array of elements" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.elements.forEach(function(el){ + el._walk(visitor); + }); + }); + } +}); + +var AST_Object = DEFNODE("Object", "properties", { + $documentation: "An object literal", + $propdoc: { + properties: "[AST_ObjectProperty*] array of properties" + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.properties.forEach(function(prop){ + prop._walk(visitor); + }); + }); + } +}); + +var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", { + $documentation: "Base class for literal object properties", + $propdoc: { + key: "[string] the property name converted to a string for ObjectKeyVal. For setters and getters this is an arbitrary AST_Node.", + value: "[AST_Node] property value. For setters and getters this is an AST_Function." + }, + _walk: function(visitor) { + return visitor._visit(this, function(){ + this.value._walk(visitor); + }); + } +}); + +var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", "quote", { + $documentation: "A key: value object property", + $propdoc: { + quote: "[string] the original quote character" + } +}, AST_ObjectProperty); + +var AST_ObjectSetter = DEFNODE("ObjectSetter", null, { + $documentation: "An object setter property", +}, AST_ObjectProperty); + +var AST_ObjectGetter = DEFNODE("ObjectGetter", null, { + $documentation: "An object getter property", +}, AST_ObjectProperty); + +var AST_Symbol = DEFNODE("Symbol", "scope name thedef", { + $propdoc: { + name: "[string] name of this symbol", + scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)", + thedef: "[SymbolDef/S] the definition of this symbol" + }, + $documentation: "Base class for all symbols", +}); + +var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, { + $documentation: "The name of a property accessor (setter/getter function)" +}, AST_Symbol); + +var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", { + $documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)", + $propdoc: { + init: "[AST_Node*/S] array of initializers for this declaration." + } +}, AST_Symbol); + +var AST_SymbolVar = DEFNODE("SymbolVar", null, { + $documentation: "Symbol defining a variable", +}, AST_SymbolDeclaration); + +var AST_SymbolConst = DEFNODE("SymbolConst", null, { + $documentation: "A constant declaration" +}, AST_SymbolDeclaration); + +var AST_SymbolFunarg = DEFNODE("SymbolFunarg", null, { + $documentation: "Symbol naming a function argument", +}, AST_SymbolVar); + +var AST_SymbolDefun = DEFNODE("SymbolDefun", null, { + $documentation: "Symbol defining a function", +}, AST_SymbolDeclaration); + +var AST_SymbolLambda = DEFNODE("SymbolLambda", null, { + $documentation: "Symbol naming a function expression", +}, AST_SymbolDeclaration); + +var AST_SymbolCatch = DEFNODE("SymbolCatch", null, { + $documentation: "Symbol naming the exception in catch", +}, AST_SymbolDeclaration); + +var AST_Label = DEFNODE("Label", "references", { + $documentation: "Symbol naming a label (declaration)", + $propdoc: { + references: "[AST_LoopControl*] a list of nodes referring to this label" + }, + initialize: function() { + this.references = []; + this.thedef = this; + } +}, AST_Symbol); + +var AST_SymbolRef = DEFNODE("SymbolRef", null, { + $documentation: "Reference to some symbol (not definition/declaration)", +}, AST_Symbol); + +var AST_LabelRef = DEFNODE("LabelRef", null, { + $documentation: "Reference to a label symbol", +}, AST_Symbol); + +var AST_This = DEFNODE("This", null, { + $documentation: "The `this` symbol", +}, AST_Symbol); + +var AST_Constant = DEFNODE("Constant", null, { + $documentation: "Base class for all constants", + getValue: function() { + return this.value; + } +}); + +var AST_String = DEFNODE("String", "value quote", { + $documentation: "A string literal", + $propdoc: { + value: "[string] the contents of this string", + quote: "[string] the original quote character" + } +}, AST_Constant); + +var AST_Number = DEFNODE("Number", "value literal", { + $documentation: "A number literal", + $propdoc: { + value: "[number] the numeric value", + literal: "[string] numeric value as string (optional)" + } +}, AST_Constant); + +var AST_RegExp = DEFNODE("RegExp", "value", { + $documentation: "A regexp literal", + $propdoc: { + value: "[RegExp] the actual regexp" + } +}, AST_Constant); + +var AST_Atom = DEFNODE("Atom", null, { + $documentation: "Base class for atoms", +}, AST_Constant); + +var AST_Null = DEFNODE("Null", null, { + $documentation: "The `null` atom", + value: null +}, AST_Atom); + +var AST_NaN = DEFNODE("NaN", null, { + $documentation: "The impossible value", + value: 0/0 +}, AST_Atom); + +var AST_Undefined = DEFNODE("Undefined", null, { + $documentation: "The `undefined` value", + value: (function(){}()) +}, AST_Atom); + +var AST_Hole = DEFNODE("Hole", null, { + $documentation: "A hole in an array", + value: (function(){}()) +}, AST_Atom); + +var AST_Infinity = DEFNODE("Infinity", null, { + $documentation: "The `Infinity` value", + value: 1/0 +}, AST_Atom); + +var AST_Boolean = DEFNODE("Boolean", null, { + $documentation: "Base class for booleans", +}, AST_Atom); + +var AST_False = DEFNODE("False", null, { + $documentation: "The `false` atom", + value: false +}, AST_Boolean); + +var AST_True = DEFNODE("True", null, { + $documentation: "The `true` atom", + value: true +}, AST_Boolean); + +/* -----[ TreeWalker ]----- */ + +function TreeWalker(callback) { + this.visit = callback; + this.stack = []; + this.directives = Object.create(null); +}; +TreeWalker.prototype = { + _visit: function(node, descend) { + this.push(node); + var ret = this.visit(node, descend ? function(){ + descend.call(node); + } : noop); + if (!ret && descend) { + descend.call(node); + } + this.pop(node); + return ret; + }, + parent: function(n) { + return this.stack[this.stack.length - 2 - (n || 0)]; + }, + push: function (node) { + if (node instanceof AST_Lambda) { + this.directives = Object.create(this.directives); + } else if (node instanceof AST_Directive) { + this.directives[node.value] = this.directives[node.value] ? "up" : true; + } + this.stack.push(node); + }, + pop: function(node) { + this.stack.pop(); + if (node instanceof AST_Lambda) { + this.directives = Object.getPrototypeOf(this.directives); + } + }, + self: function() { + return this.stack[this.stack.length - 1]; + }, + find_parent: function(type) { + var stack = this.stack; + for (var i = stack.length; --i >= 0;) { + var x = stack[i]; + if (x instanceof type) return x; + } + }, + has_directive: function(type) { + var dir = this.directives[type]; + if (dir) return dir; + var node = this.stack[this.stack.length - 1]; + if (node instanceof AST_Scope) { + for (var i = 0; i < node.body.length; ++i) { + var st = node.body[i]; + if (!(st instanceof AST_Directive)) break; + if (st.value == type) return true; + } + } + }, + in_boolean_context: function() { + var stack = this.stack; + var i = stack.length, self = stack[--i]; + while (i > 0) { + var p = stack[--i]; + if ((p instanceof AST_If && p.condition === self) || + (p instanceof AST_Conditional && p.condition === self) || + (p instanceof AST_DWLoop && p.condition === self) || + (p instanceof AST_For && p.condition === self) || + (p instanceof AST_UnaryPrefix && p.operator == "!" && p.expression === self)) + { + return true; + } + if (!(p instanceof AST_Binary && (p.operator == "&&" || p.operator == "||"))) + return false; + self = p; + } + }, + loopcontrol_target: function(label) { + var stack = this.stack; + if (label) for (var i = stack.length; --i >= 0;) { + var x = stack[i]; + if (x instanceof AST_LabeledStatement && x.label.name == label.name) { + return x.body; + } + } else for (var i = stack.length; --i >= 0;) { + var x = stack[i]; + if (x instanceof AST_Switch || x instanceof AST_IterationStatement) + return x; + } + } +}; diff --git a/node_modules/uglify-js/lib/compress.js b/node_modules/uglify-js/lib/compress.js new file mode 100644 index 0000000..4152bd0 --- /dev/null +++ b/node_modules/uglify-js/lib/compress.js @@ -0,0 +1,2903 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +function Compressor(options, false_by_default) { + if (!(this instanceof Compressor)) + return new Compressor(options, false_by_default); + TreeTransformer.call(this, this.before, this.after); + this.options = defaults(options, { + sequences : !false_by_default, + properties : !false_by_default, + dead_code : !false_by_default, + drop_debugger : !false_by_default, + unsafe : false, + unsafe_comps : false, + conditionals : !false_by_default, + comparisons : !false_by_default, + evaluate : !false_by_default, + booleans : !false_by_default, + loops : !false_by_default, + unused : !false_by_default, + hoist_funs : !false_by_default, + keep_fargs : true, + keep_fnames : false, + hoist_vars : false, + if_return : !false_by_default, + join_vars : !false_by_default, + collapse_vars : false, + cascade : !false_by_default, + side_effects : !false_by_default, + pure_getters : false, + pure_funcs : null, + negate_iife : !false_by_default, + screw_ie8 : false, + drop_console : false, + angular : false, + warnings : true, + global_defs : {}, + passes : 1, + }, true); + this.warnings_produced = {}; +}; + +Compressor.prototype = new TreeTransformer; +merge(Compressor.prototype, { + option: function(key) { return this.options[key] }, + compress: function(node) { + var passes = +this.options.passes || 1; + for (var pass = 0; pass < passes && pass < 3; ++pass) { + if (pass > 0) node.clear_opt_flags(); + node = node.transform(this); + } + return node; + }, + warn: function(text, props) { + if (this.options.warnings) { + // only emit unique warnings + var message = string_template(text, props); + if (!(message in this.warnings_produced)) { + this.warnings_produced[message] = true; + AST_Node.warn.apply(AST_Node, arguments); + } + } + }, + clear_warnings: function() { + this.warnings_produced = {}; + }, + before: function(node, descend, in_list) { + if (node._squeezed) return node; + var was_scope = false; + if (node instanceof AST_Scope) { + node = node.hoist_declarations(this); + was_scope = true; + } + descend(node, this); + node = node.optimize(this); + if (was_scope && node instanceof AST_Scope) { + node.drop_unused(this); + descend(node, this); + } + node._squeezed = true; + return node; + } +}); + +(function(){ + + function OPT(node, optimizer) { + node.DEFMETHOD("optimize", function(compressor){ + var self = this; + if (self._optimized) return self; + if (compressor.has_directive("use asm")) return self; + var opt = optimizer(self, compressor); + opt._optimized = true; + if (opt === self) return opt; + return opt.transform(compressor); + }); + }; + + OPT(AST_Node, function(self, compressor){ + return self; + }); + + AST_Node.DEFMETHOD("equivalent_to", function(node){ + // XXX: this is a rather expensive way to test two node's equivalence: + return this.print_to_string() == node.print_to_string(); + }); + + AST_Node.DEFMETHOD("clear_opt_flags", function(){ + this.walk(new TreeWalker(function(node){ + if (!(node instanceof AST_Directive || node instanceof AST_Constant)) { + node._squeezed = false; + node._optimized = false; + } + })); + }); + + function make_node(ctor, orig, props) { + if (!props) props = {}; + if (orig) { + if (!props.start) props.start = orig.start; + if (!props.end) props.end = orig.end; + } + return new ctor(props); + }; + + function make_node_from_constant(compressor, val, orig) { + // XXX: WIP. + // if (val instanceof AST_Node) return val.transform(new TreeTransformer(null, function(node){ + // if (node instanceof AST_SymbolRef) { + // var scope = compressor.find_parent(AST_Scope); + // var def = scope.find_variable(node); + // node.thedef = def; + // return node; + // } + // })).transform(compressor); + + if (val instanceof AST_Node) return val.transform(compressor); + switch (typeof val) { + case "string": + return make_node(AST_String, orig, { + value: val + }).optimize(compressor); + case "number": + if (isNaN(val)) { + return make_node(AST_NaN, orig); + } + + if ((1 / val) < 0) { + return make_node(AST_UnaryPrefix, orig, { + operator: "-", + expression: make_node(AST_Number, null, { value: -val }) + }); + } + + return make_node(AST_Number, orig, { value: val }).optimize(compressor); + case "boolean": + return make_node(val ? AST_True : AST_False, orig).optimize(compressor); + case "undefined": + return make_node(AST_Undefined, orig).optimize(compressor); + default: + if (val === null) { + return make_node(AST_Null, orig, { value: null }).optimize(compressor); + } + if (val instanceof RegExp) { + return make_node(AST_RegExp, orig, { value: val }).optimize(compressor); + } + throw new Error(string_template("Can't handle constant of type: {type}", { + type: typeof val + })); + } + }; + + // we shouldn't compress (1,func)(something) to + // func(something) because that changes the meaning of + // the func (becomes lexical instead of global). + function maintain_this_binding(parent, orig, val) { + if (parent instanceof AST_Call && parent.expression === orig) { + if (val instanceof AST_PropAccess || val instanceof AST_SymbolRef && val.name === "eval") { + return make_node(AST_Seq, orig, { + car: make_node(AST_Number, orig, { + value: 0 + }), + cdr: val + }); + } + } + return val; + } + + function as_statement_array(thing) { + if (thing === null) return []; + if (thing instanceof AST_BlockStatement) return thing.body; + if (thing instanceof AST_EmptyStatement) return []; + if (thing instanceof AST_Statement) return [ thing ]; + throw new Error("Can't convert thing to statement array"); + }; + + function is_empty(thing) { + if (thing === null) return true; + if (thing instanceof AST_EmptyStatement) return true; + if (thing instanceof AST_BlockStatement) return thing.body.length == 0; + return false; + }; + + function loop_body(x) { + if (x instanceof AST_Switch) return x; + if (x instanceof AST_For || x instanceof AST_ForIn || x instanceof AST_DWLoop) { + return (x.body instanceof AST_BlockStatement ? x.body : x); + } + return x; + }; + + function tighten_body(statements, compressor) { + var CHANGED, max_iter = 10; + do { + CHANGED = false; + if (compressor.option("angular")) { + statements = process_for_angular(statements); + } + statements = eliminate_spurious_blocks(statements); + if (compressor.option("dead_code")) { + statements = eliminate_dead_code(statements, compressor); + } + if (compressor.option("if_return")) { + statements = handle_if_return(statements, compressor); + } + if (compressor.option("sequences")) { + statements = sequencesize(statements, compressor); + } + if (compressor.option("join_vars")) { + statements = join_consecutive_vars(statements, compressor); + } + if (compressor.option("collapse_vars")) { + statements = collapse_single_use_vars(statements, compressor); + } + } while (CHANGED && max_iter-- > 0); + + if (compressor.option("negate_iife")) { + negate_iifes(statements, compressor); + } + + return statements; + + function collapse_single_use_vars(statements, compressor) { + // Iterate statements backwards looking for a statement with a var/const + // declaration immediately preceding it. Grab the rightmost var definition + // and if it has exactly one reference then attempt to replace its reference + // in the statement with the var value and then erase the var definition. + + var self = compressor.self(); + var var_defs_removed = false; + for (var stat_index = statements.length; --stat_index >= 0;) { + var stat = statements[stat_index]; + if (stat instanceof AST_Definitions) continue; + + // Process child blocks of statement if present. + [stat, stat.body, stat.alternative, stat.bcatch, stat.bfinally].forEach(function(node) { + node && node.body && collapse_single_use_vars(node.body, compressor); + }); + + // The variable definition must precede a statement. + if (stat_index <= 0) break; + var prev_stat_index = stat_index - 1; + var prev_stat = statements[prev_stat_index]; + if (!(prev_stat instanceof AST_Definitions)) continue; + var var_defs = prev_stat.definitions; + if (var_defs == null) continue; + + var var_names_seen = {}; + var side_effects_encountered = false; + var lvalues_encountered = false; + var lvalues = {}; + + // Scan variable definitions from right to left. + for (var var_defs_index = var_defs.length; --var_defs_index >= 0;) { + + // Obtain var declaration and var name with basic sanity check. + var var_decl = var_defs[var_defs_index]; + if (var_decl.value == null) break; + var var_name = var_decl.name.name; + if (!var_name || !var_name.length) break; + + // Bail if we've seen a var definition of same name before. + if (var_name in var_names_seen) break; + var_names_seen[var_name] = true; + + // Only interested in cases with just one reference to the variable. + var def = self.find_variable && self.find_variable(var_name); + if (!def || !def.references || def.references.length !== 1 || var_name == "arguments") { + side_effects_encountered = true; + continue; + } + var ref = def.references[0]; + + // Don't replace ref if eval() or with statement in scope. + if (ref.scope.uses_eval || ref.scope.uses_with) break; + + // Constant single use vars can be replaced in any scope. + if (!(var_decl.value instanceof AST_RegExp) && var_decl.value.is_constant(compressor)) { + var ctt = new TreeTransformer(function(node) { + if (node === ref) + return replace_var(node, ctt.parent(), true); + }); + stat.transform(ctt); + continue; + } + + // Restrict var replacement to constants if side effects encountered. + if (side_effects_encountered |= lvalues_encountered) continue; + + // Non-constant single use vars can only be replaced in same scope. + if (ref.scope !== self) { + side_effects_encountered |= var_decl.value.has_side_effects(compressor); + continue; + } + + // Detect lvalues in var value. + var tw = new TreeWalker(function(node){ + if (node instanceof AST_SymbolRef && is_lvalue(node, tw.parent())) { + lvalues[node.name] = lvalues_encountered = true; + } + }); + var_decl.value.walk(tw); + + // Replace the non-constant single use var in statement if side effect free. + var unwind = false; + var tt = new TreeTransformer( + function preorder(node) { + if (unwind) return node; + var parent = tt.parent(); + if (node instanceof AST_Lambda + || node instanceof AST_Try + || node instanceof AST_With + || node instanceof AST_Case + || node instanceof AST_IterationStatement + || (parent instanceof AST_If && node !== parent.condition) + || (parent instanceof AST_Conditional && node !== parent.condition) + || (parent instanceof AST_Binary + && (parent.operator == "&&" || parent.operator == "||") + && node === parent.right) + || (parent instanceof AST_Switch && node !== parent.expression)) { + return side_effects_encountered = unwind = true, node; + } + }, + function postorder(node) { + if (unwind) return node; + if (node === ref) + return unwind = true, replace_var(node, tt.parent(), false); + if (side_effects_encountered |= node.has_side_effects(compressor)) + return unwind = true, node; + if (lvalues_encountered && node instanceof AST_SymbolRef && node.name in lvalues) { + side_effects_encountered = true; + return unwind = true, node; + } + } + ); + stat.transform(tt); + } + } + + // Remove extraneous empty statments in block after removing var definitions. + // Leave at least one statement in `statements`. + if (var_defs_removed) for (var i = statements.length; --i >= 0;) { + if (statements.length > 1 && statements[i] instanceof AST_EmptyStatement) + statements.splice(i, 1); + } + + return statements; + + function is_lvalue(node, parent) { + return node instanceof AST_SymbolRef && ( + (parent instanceof AST_Assign && node === parent.left) + || (parent instanceof AST_Unary && parent.expression === node + && (parent.operator == "++" || parent.operator == "--"))); + } + function replace_var(node, parent, is_constant) { + if (is_lvalue(node, parent)) return node; + + // Remove var definition and return its value to the TreeTransformer to replace. + var value = maintain_this_binding(parent, node, var_decl.value); + var_decl.value = null; + + var_defs.splice(var_defs_index, 1); + if (var_defs.length === 0) { + statements[prev_stat_index] = make_node(AST_EmptyStatement, self); + var_defs_removed = true; + } + // Further optimize statement after substitution. + stat.clear_opt_flags(); + + compressor.warn("Replacing " + (is_constant ? "constant" : "variable") + + " " + var_name + " [{file}:{line},{col}]", node.start); + CHANGED = true; + return value; + } + } + + function process_for_angular(statements) { + function has_inject(comment) { + return /@ngInject/.test(comment.value); + } + function make_arguments_names_list(func) { + return func.argnames.map(function(sym){ + return make_node(AST_String, sym, { value: sym.name }); + }); + } + function make_array(orig, elements) { + return make_node(AST_Array, orig, { elements: elements }); + } + function make_injector(func, name) { + return make_node(AST_SimpleStatement, func, { + body: make_node(AST_Assign, func, { + operator: "=", + left: make_node(AST_Dot, name, { + expression: make_node(AST_SymbolRef, name, name), + property: "$inject" + }), + right: make_array(func, make_arguments_names_list(func)) + }) + }); + } + function check_expression(body) { + if (body && body.args) { + // if this is a function call check all of arguments passed + body.args.forEach(function(argument, index, array) { + var comments = argument.start.comments_before; + // if the argument is function preceded by @ngInject + if (argument instanceof AST_Lambda && comments.length && has_inject(comments[0])) { + // replace the function with an array of names of its parameters and function at the end + array[index] = make_array(argument, make_arguments_names_list(argument).concat(argument)); + } + }); + // if this is chained call check previous one recursively + if (body.expression && body.expression.expression) { + check_expression(body.expression.expression); + } + } + } + return statements.reduce(function(a, stat){ + a.push(stat); + + if (stat.body && stat.body.args) { + check_expression(stat.body); + } else { + var token = stat.start; + var comments = token.comments_before; + if (comments && comments.length > 0) { + var last = comments.pop(); + if (has_inject(last)) { + // case 1: defun + if (stat instanceof AST_Defun) { + a.push(make_injector(stat, stat.name)); + } + else if (stat instanceof AST_Definitions) { + stat.definitions.forEach(function(def) { + if (def.value && def.value instanceof AST_Lambda) { + a.push(make_injector(def.value, def.name)); + } + }); + } + else { + compressor.warn("Unknown statement marked with @ngInject [{file}:{line},{col}]", token); + } + } + } + } + + return a; + }, []); + } + + function eliminate_spurious_blocks(statements) { + var seen_dirs = []; + return statements.reduce(function(a, stat){ + if (stat instanceof AST_BlockStatement) { + CHANGED = true; + a.push.apply(a, eliminate_spurious_blocks(stat.body)); + } else if (stat instanceof AST_EmptyStatement) { + CHANGED = true; + } else if (stat instanceof AST_Directive) { + if (seen_dirs.indexOf(stat.value) < 0) { + a.push(stat); + seen_dirs.push(stat.value); + } else { + CHANGED = true; + } + } else { + a.push(stat); + } + return a; + }, []); + }; + + function handle_if_return(statements, compressor) { + var self = compressor.self(); + var multiple_if_returns = has_multiple_if_returns(statements); + var in_lambda = self instanceof AST_Lambda; + var ret = []; + loop: for (var i = statements.length; --i >= 0;) { + var stat = statements[i]; + switch (true) { + case (in_lambda && stat instanceof AST_Return && !stat.value && ret.length == 0): + CHANGED = true; + // note, ret.length is probably always zero + // because we drop unreachable code before this + // step. nevertheless, it's good to check. + continue loop; + case stat instanceof AST_If: + if (stat.body instanceof AST_Return) { + //--- + // pretty silly case, but: + // if (foo()) return; return; ==> foo(); return; + if (((in_lambda && ret.length == 0) + || (ret[0] instanceof AST_Return && !ret[0].value)) + && !stat.body.value && !stat.alternative) { + CHANGED = true; + var cond = make_node(AST_SimpleStatement, stat.condition, { + body: stat.condition + }); + ret.unshift(cond); + continue loop; + } + //--- + // if (foo()) return x; return y; ==> return foo() ? x : y; + if (ret[0] instanceof AST_Return && stat.body.value && ret[0].value && !stat.alternative) { + CHANGED = true; + stat = stat.clone(); + stat.alternative = ret[0]; + ret[0] = stat.transform(compressor); + continue loop; + } + //--- + // if (foo()) return x; [ return ; ] ==> return foo() ? x : undefined; + if (multiple_if_returns && (ret.length == 0 || ret[0] instanceof AST_Return) + && stat.body.value && !stat.alternative && in_lambda) { + CHANGED = true; + stat = stat.clone(); + stat.alternative = ret[0] || make_node(AST_Return, stat, { + value: make_node(AST_Undefined, stat) + }); + ret[0] = stat.transform(compressor); + continue loop; + } + //--- + // if (foo()) return; [ else x... ]; y... ==> if (!foo()) { x...; y... } + if (!stat.body.value && in_lambda) { + CHANGED = true; + stat = stat.clone(); + stat.condition = stat.condition.negate(compressor); + var body = as_statement_array(stat.alternative).concat(ret); + var funs = extract_functions_from_statement_array(body); + stat.body = make_node(AST_BlockStatement, stat, { + body: body + }); + stat.alternative = null; + ret = funs.concat([ stat.transform(compressor) ]); + continue loop; + } + //--- + // XXX: what was the intention of this case? + // if sequences is not enabled, this can lead to an endless loop (issue #866). + // however, with sequences on this helps producing slightly better output for + // the example code. + if (compressor.option("sequences") + && ret.length == 1 && in_lambda && ret[0] instanceof AST_SimpleStatement + && (!stat.alternative || stat.alternative instanceof AST_SimpleStatement)) { + CHANGED = true; + ret.push(make_node(AST_Return, ret[0], { + value: make_node(AST_Undefined, ret[0]) + }).transform(compressor)); + ret = as_statement_array(stat.alternative).concat(ret); + ret.unshift(stat); + continue loop; + } + } + + var ab = aborts(stat.body); + var lct = ab instanceof AST_LoopControl ? compressor.loopcontrol_target(ab.label) : null; + if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda) + || (ab instanceof AST_Continue && self === loop_body(lct)) + || (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) { + if (ab.label) { + remove(ab.label.thedef.references, ab); + } + CHANGED = true; + var body = as_statement_array(stat.body).slice(0, -1); + stat = stat.clone(); + stat.condition = stat.condition.negate(compressor); + stat.body = make_node(AST_BlockStatement, stat, { + body: as_statement_array(stat.alternative).concat(ret) + }); + stat.alternative = make_node(AST_BlockStatement, stat, { + body: body + }); + ret = [ stat.transform(compressor) ]; + continue loop; + } + + var ab = aborts(stat.alternative); + var lct = ab instanceof AST_LoopControl ? compressor.loopcontrol_target(ab.label) : null; + if (ab && ((ab instanceof AST_Return && !ab.value && in_lambda) + || (ab instanceof AST_Continue && self === loop_body(lct)) + || (ab instanceof AST_Break && lct instanceof AST_BlockStatement && self === lct))) { + if (ab.label) { + remove(ab.label.thedef.references, ab); + } + CHANGED = true; + stat = stat.clone(); + stat.body = make_node(AST_BlockStatement, stat.body, { + body: as_statement_array(stat.body).concat(ret) + }); + stat.alternative = make_node(AST_BlockStatement, stat.alternative, { + body: as_statement_array(stat.alternative).slice(0, -1) + }); + ret = [ stat.transform(compressor) ]; + continue loop; + } + + ret.unshift(stat); + break; + default: + ret.unshift(stat); + break; + } + } + return ret; + + function has_multiple_if_returns(statements) { + var n = 0; + for (var i = statements.length; --i >= 0;) { + var stat = statements[i]; + if (stat instanceof AST_If && stat.body instanceof AST_Return) { + if (++n > 1) return true; + } + } + return false; + } + }; + + function eliminate_dead_code(statements, compressor) { + var has_quit = false; + var orig = statements.length; + var self = compressor.self(); + statements = statements.reduce(function(a, stat){ + if (has_quit) { + extract_declarations_from_unreachable_code(compressor, stat, a); + } else { + if (stat instanceof AST_LoopControl) { + var lct = compressor.loopcontrol_target(stat.label); + if ((stat instanceof AST_Break + && lct instanceof AST_BlockStatement + && loop_body(lct) === self) || (stat instanceof AST_Continue + && loop_body(lct) === self)) { + if (stat.label) { + remove(stat.label.thedef.references, stat); + } + } else { + a.push(stat); + } + } else { + a.push(stat); + } + if (aborts(stat)) has_quit = true; + } + return a; + }, []); + CHANGED = statements.length != orig; + return statements; + }; + + function sequencesize(statements, compressor) { + if (statements.length < 2) return statements; + var seq = [], ret = []; + function push_seq() { + seq = AST_Seq.from_array(seq); + if (seq) ret.push(make_node(AST_SimpleStatement, seq, { + body: seq + })); + seq = []; + }; + statements.forEach(function(stat){ + if (stat instanceof AST_SimpleStatement && seqLength(seq) < 2000) { + seq.push(stat.body); + } else { + push_seq(); + ret.push(stat); + } + }); + push_seq(); + ret = sequencesize_2(ret, compressor); + CHANGED = ret.length != statements.length; + return ret; + }; + + function seqLength(a) { + for (var len = 0, i = 0; i < a.length; ++i) { + var stat = a[i]; + if (stat instanceof AST_Seq) { + len += stat.len(); + } else { + len++; + } + } + return len; + }; + + function sequencesize_2(statements, compressor) { + function cons_seq(right) { + ret.pop(); + var left = prev.body; + if (left instanceof AST_Seq) { + left.add(right); + } else { + left = AST_Seq.cons(left, right); + } + return left.transform(compressor); + }; + var ret = [], prev = null; + statements.forEach(function(stat){ + if (prev) { + if (stat instanceof AST_For) { + var opera = {}; + try { + prev.body.walk(new TreeWalker(function(node){ + if (node instanceof AST_Binary && node.operator == "in") + throw opera; + })); + if (stat.init && !(stat.init instanceof AST_Definitions)) { + stat.init = cons_seq(stat.init); + } + else if (!stat.init) { + stat.init = prev.body; + ret.pop(); + } + } catch(ex) { + if (ex !== opera) throw ex; + } + } + else if (stat instanceof AST_If) { + stat.condition = cons_seq(stat.condition); + } + else if (stat instanceof AST_With) { + stat.expression = cons_seq(stat.expression); + } + else if (stat instanceof AST_Exit && stat.value) { + stat.value = cons_seq(stat.value); + } + else if (stat instanceof AST_Exit) { + stat.value = cons_seq(make_node(AST_Undefined, stat)); + } + else if (stat instanceof AST_Switch) { + stat.expression = cons_seq(stat.expression); + } + } + ret.push(stat); + prev = stat instanceof AST_SimpleStatement ? stat : null; + }); + return ret; + }; + + function join_consecutive_vars(statements, compressor) { + var prev = null; + return statements.reduce(function(a, stat){ + if (stat instanceof AST_Definitions && prev && prev.TYPE == stat.TYPE) { + prev.definitions = prev.definitions.concat(stat.definitions); + CHANGED = true; + } + else if (stat instanceof AST_For + && prev instanceof AST_Definitions + && (!stat.init || stat.init.TYPE == prev.TYPE)) { + CHANGED = true; + a.pop(); + if (stat.init) { + stat.init.definitions = prev.definitions.concat(stat.init.definitions); + } else { + stat.init = prev; + } + a.push(stat); + prev = stat; + } + else { + prev = stat; + a.push(stat); + } + return a; + }, []); + }; + + function negate_iifes(statements, compressor) { + statements.forEach(function(stat){ + if (stat instanceof AST_SimpleStatement) { + stat.body = (function transform(thing) { + return thing.transform(new TreeTransformer(function(node){ + if (node instanceof AST_New) { + return node; + } + if (node instanceof AST_Call && node.expression instanceof AST_Function) { + return make_node(AST_UnaryPrefix, node, { + operator: "!", + expression: node + }); + } + else if (node instanceof AST_Call) { + node.expression = transform(node.expression); + } + else if (node instanceof AST_Seq) { + node.car = transform(node.car); + } + else if (node instanceof AST_Conditional) { + var expr = transform(node.condition); + if (expr !== node.condition) { + // it has been negated, reverse + node.condition = expr; + var tmp = node.consequent; + node.consequent = node.alternative; + node.alternative = tmp; + } + } + return node; + })); + })(stat.body); + } + }); + }; + + }; + + function extract_functions_from_statement_array(statements) { + var funs = []; + for (var i = statements.length - 1; i >= 0; --i) { + var stat = statements[i]; + if (stat instanceof AST_Defun) { + statements.splice(i, 1); + funs.unshift(stat); + } + } + return funs; + } + + function extract_declarations_from_unreachable_code(compressor, stat, target) { + if (!(stat instanceof AST_Defun)) { + compressor.warn("Dropping unreachable code [{file}:{line},{col}]", stat.start); + } + stat.walk(new TreeWalker(function(node){ + if (node instanceof AST_Definitions) { + compressor.warn("Declarations in unreachable code! [{file}:{line},{col}]", node.start); + node.remove_initializers(); + target.push(node); + return true; + } + if (node instanceof AST_Defun) { + target.push(node); + return true; + } + if (node instanceof AST_Scope) { + return true; + } + })); + }; + + /* -----[ boolean/negation helpers ]----- */ + + // methods to determine whether an expression has a boolean result type + (function (def){ + var unary_bool = [ "!", "delete" ]; + var binary_bool = [ "in", "instanceof", "==", "!=", "===", "!==", "<", "<=", ">=", ">" ]; + def(AST_Node, function(){ return false }); + def(AST_UnaryPrefix, function(){ + return member(this.operator, unary_bool); + }); + def(AST_Binary, function(){ + return member(this.operator, binary_bool) || + ( (this.operator == "&&" || this.operator == "||") && + this.left.is_boolean() && this.right.is_boolean() ); + }); + def(AST_Conditional, function(){ + return this.consequent.is_boolean() && this.alternative.is_boolean(); + }); + def(AST_Assign, function(){ + return this.operator == "=" && this.right.is_boolean(); + }); + def(AST_Seq, function(){ + return this.cdr.is_boolean(); + }); + def(AST_True, function(){ return true }); + def(AST_False, function(){ return true }); + })(function(node, func){ + node.DEFMETHOD("is_boolean", func); + }); + + // methods to determine if an expression has a string result type + (function (def){ + def(AST_Node, function(){ return false }); + def(AST_String, function(){ return true }); + def(AST_UnaryPrefix, function(){ + return this.operator == "typeof"; + }); + def(AST_Binary, function(compressor){ + return this.operator == "+" && + (this.left.is_string(compressor) || this.right.is_string(compressor)); + }); + def(AST_Assign, function(compressor){ + return (this.operator == "=" || this.operator == "+=") && this.right.is_string(compressor); + }); + def(AST_Seq, function(compressor){ + return this.cdr.is_string(compressor); + }); + def(AST_Conditional, function(compressor){ + return this.consequent.is_string(compressor) && this.alternative.is_string(compressor); + }); + def(AST_Call, function(compressor){ + return compressor.option("unsafe") + && this.expression instanceof AST_SymbolRef + && this.expression.name == "String" + && this.expression.undeclared(); + }); + })(function(node, func){ + node.DEFMETHOD("is_string", func); + }); + + function best_of(ast1, ast2) { + return ast1.print_to_string().length > + ast2.print_to_string().length + ? ast2 : ast1; + }; + + // methods to evaluate a constant expression + (function (def){ + // The evaluate method returns an array with one or two + // elements. If the node has been successfully reduced to a + // constant, then the second element tells us the value; + // otherwise the second element is missing. The first element + // of the array is always an AST_Node descendant; if + // evaluation was successful it's a node that represents the + // constant; otherwise it's the original or a replacement node. + AST_Node.DEFMETHOD("evaluate", function(compressor){ + if (!compressor.option("evaluate")) return [ this ]; + try { + var val = this._eval(compressor); + return [ best_of(make_node_from_constant(compressor, val, this), this), val ]; + } catch(ex) { + if (ex !== def) throw ex; + return [ this ]; + } + }); + AST_Node.DEFMETHOD("is_constant", function(compressor){ + // Accomodate when compress option evaluate=false + // as well as the common constant expressions !0 and !1 + return this instanceof AST_Constant + || (this instanceof AST_UnaryPrefix && this.operator == "!" + && this.expression instanceof AST_Constant) + || this.evaluate(compressor).length > 1; + }); + // Obtain the constant value of an expression already known to be constant. + // Result only valid iff this.is_constant(compressor) is true. + AST_Node.DEFMETHOD("constant_value", function(compressor){ + // Accomodate when option evaluate=false. + if (this instanceof AST_Constant) return this.value; + // Accomodate the common constant expressions !0 and !1 when option evaluate=false. + if (this instanceof AST_UnaryPrefix + && this.operator == "!" + && this.expression instanceof AST_Constant) { + return !this.expression.value; + } + var result = this.evaluate(compressor) + if (result.length > 1) { + return result[1]; + } + // should never be reached + return undefined; + }); + def(AST_Statement, function(){ + throw new Error(string_template("Cannot evaluate a statement [{file}:{line},{col}]", this.start)); + }); + def(AST_Function, function(){ + // XXX: AST_Function inherits from AST_Scope, which itself + // inherits from AST_Statement; however, an AST_Function + // isn't really a statement. This could byte in other + // places too. :-( Wish JS had multiple inheritance. + throw def; + }); + function ev(node, compressor) { + if (!compressor) throw new Error("Compressor must be passed"); + + return node._eval(compressor); + }; + def(AST_Node, function(){ + throw def; // not constant + }); + def(AST_Constant, function(){ + return this.getValue(); + }); + def(AST_UnaryPrefix, function(compressor){ + var e = this.expression; + switch (this.operator) { + case "!": return !ev(e, compressor); + case "typeof": + // Function would be evaluated to an array and so typeof would + // incorrectly return 'object'. Hence making is a special case. + if (e instanceof AST_Function) return typeof function(){}; + + e = ev(e, compressor); + + // typeof returns "object" or "function" on different platforms + // so cannot evaluate reliably + if (e instanceof RegExp) throw def; + + return typeof e; + case "void": return void ev(e, compressor); + case "~": return ~ev(e, compressor); + case "-": return -ev(e, compressor); + case "+": return +ev(e, compressor); + } + throw def; + }); + def(AST_Binary, function(c){ + var left = this.left, right = this.right, result; + switch (this.operator) { + case "&&" : result = ev(left, c) && ev(right, c); break; + case "||" : result = ev(left, c) || ev(right, c); break; + case "|" : result = ev(left, c) | ev(right, c); break; + case "&" : result = ev(left, c) & ev(right, c); break; + case "^" : result = ev(left, c) ^ ev(right, c); break; + case "+" : result = ev(left, c) + ev(right, c); break; + case "*" : result = ev(left, c) * ev(right, c); break; + case "/" : result = ev(left, c) / ev(right, c); break; + case "%" : result = ev(left, c) % ev(right, c); break; + case "-" : result = ev(left, c) - ev(right, c); break; + case "<<" : result = ev(left, c) << ev(right, c); break; + case ">>" : result = ev(left, c) >> ev(right, c); break; + case ">>>" : result = ev(left, c) >>> ev(right, c); break; + case "==" : result = ev(left, c) == ev(right, c); break; + case "===" : result = ev(left, c) === ev(right, c); break; + case "!=" : result = ev(left, c) != ev(right, c); break; + case "!==" : result = ev(left, c) !== ev(right, c); break; + case "<" : result = ev(left, c) < ev(right, c); break; + case "<=" : result = ev(left, c) <= ev(right, c); break; + case ">" : result = ev(left, c) > ev(right, c); break; + case ">=" : result = ev(left, c) >= ev(right, c); break; + default: + throw def; + } + if (isNaN(result) && c.find_parent(AST_With)) { + // leave original expression as is + throw def; + } + return result; + }); + def(AST_Conditional, function(compressor){ + return ev(this.condition, compressor) + ? ev(this.consequent, compressor) + : ev(this.alternative, compressor); + }); + def(AST_SymbolRef, function(compressor){ + if (this._evaluating) throw def; + this._evaluating = true; + try { + var d = this.definition(); + if (d && d.constant && d.init) { + return ev(d.init, compressor); + } + } finally { + this._evaluating = false; + } + throw def; + }); + def(AST_Dot, function(compressor){ + if (compressor.option("unsafe") && this.property == "length") { + var str = ev(this.expression, compressor); + if (typeof str == "string") + return str.length; + } + throw def; + }); + })(function(node, func){ + node.DEFMETHOD("_eval", func); + }); + + // method to negate an expression + (function(def){ + function basic_negation(exp) { + return make_node(AST_UnaryPrefix, exp, { + operator: "!", + expression: exp + }); + }; + def(AST_Node, function(){ + return basic_negation(this); + }); + def(AST_Statement, function(){ + throw new Error("Cannot negate a statement"); + }); + def(AST_Function, function(){ + return basic_negation(this); + }); + def(AST_UnaryPrefix, function(){ + if (this.operator == "!") + return this.expression; + return basic_negation(this); + }); + def(AST_Seq, function(compressor){ + var self = this.clone(); + self.cdr = self.cdr.negate(compressor); + return self; + }); + def(AST_Conditional, function(compressor){ + var self = this.clone(); + self.consequent = self.consequent.negate(compressor); + self.alternative = self.alternative.negate(compressor); + return best_of(basic_negation(this), self); + }); + def(AST_Binary, function(compressor){ + var self = this.clone(), op = this.operator; + if (compressor.option("unsafe_comps")) { + switch (op) { + case "<=" : self.operator = ">" ; return self; + case "<" : self.operator = ">=" ; return self; + case ">=" : self.operator = "<" ; return self; + case ">" : self.operator = "<=" ; return self; + } + } + switch (op) { + case "==" : self.operator = "!="; return self; + case "!=" : self.operator = "=="; return self; + case "===": self.operator = "!=="; return self; + case "!==": self.operator = "==="; return self; + case "&&": + self.operator = "||"; + self.left = self.left.negate(compressor); + self.right = self.right.negate(compressor); + return best_of(basic_negation(this), self); + case "||": + self.operator = "&&"; + self.left = self.left.negate(compressor); + self.right = self.right.negate(compressor); + return best_of(basic_negation(this), self); + } + return basic_negation(this); + }); + })(function(node, func){ + node.DEFMETHOD("negate", function(compressor){ + return func.call(this, compressor); + }); + }); + + // determine if expression has side effects + (function(def){ + def(AST_Node, function(compressor){ return true }); + + def(AST_EmptyStatement, function(compressor){ return false }); + def(AST_Constant, function(compressor){ return false }); + def(AST_This, function(compressor){ return false }); + + def(AST_Call, function(compressor){ + var pure = compressor.option("pure_funcs"); + if (!pure) return true; + if (typeof pure == "function") return pure(this); + return pure.indexOf(this.expression.print_to_string()) < 0; + }); + + def(AST_Block, function(compressor){ + for (var i = this.body.length; --i >= 0;) { + if (this.body[i].has_side_effects(compressor)) + return true; + } + return false; + }); + + def(AST_SimpleStatement, function(compressor){ + return this.body.has_side_effects(compressor); + }); + def(AST_Defun, function(compressor){ return true }); + def(AST_Function, function(compressor){ return false }); + def(AST_Binary, function(compressor){ + return this.left.has_side_effects(compressor) + || this.right.has_side_effects(compressor); + }); + def(AST_Assign, function(compressor){ return true }); + def(AST_Conditional, function(compressor){ + return this.condition.has_side_effects(compressor) + || this.consequent.has_side_effects(compressor) + || this.alternative.has_side_effects(compressor); + }); + def(AST_Unary, function(compressor){ + return this.operator == "delete" + || this.operator == "++" + || this.operator == "--" + || this.expression.has_side_effects(compressor); + }); + def(AST_SymbolRef, function(compressor){ + return this.global() && this.undeclared(); + }); + def(AST_Object, function(compressor){ + for (var i = this.properties.length; --i >= 0;) + if (this.properties[i].has_side_effects(compressor)) + return true; + return false; + }); + def(AST_ObjectProperty, function(compressor){ + return this.value.has_side_effects(compressor); + }); + def(AST_Array, function(compressor){ + for (var i = this.elements.length; --i >= 0;) + if (this.elements[i].has_side_effects(compressor)) + return true; + return false; + }); + def(AST_Dot, function(compressor){ + if (!compressor.option("pure_getters")) return true; + return this.expression.has_side_effects(compressor); + }); + def(AST_Sub, function(compressor){ + if (!compressor.option("pure_getters")) return true; + return this.expression.has_side_effects(compressor) + || this.property.has_side_effects(compressor); + }); + def(AST_PropAccess, function(compressor){ + return !compressor.option("pure_getters"); + }); + def(AST_Seq, function(compressor){ + return this.car.has_side_effects(compressor) + || this.cdr.has_side_effects(compressor); + }); + })(function(node, func){ + node.DEFMETHOD("has_side_effects", func); + }); + + // tell me if a statement aborts + function aborts(thing) { + return thing && thing.aborts(); + }; + (function(def){ + def(AST_Statement, function(){ return null }); + def(AST_Jump, function(){ return this }); + function block_aborts(){ + var n = this.body.length; + return n > 0 && aborts(this.body[n - 1]); + }; + def(AST_BlockStatement, block_aborts); + def(AST_SwitchBranch, block_aborts); + def(AST_If, function(){ + return this.alternative && aborts(this.body) && aborts(this.alternative) && this; + }); + })(function(node, func){ + node.DEFMETHOD("aborts", func); + }); + + /* -----[ optimizers ]----- */ + + OPT(AST_Directive, function(self, compressor){ + if (compressor.has_directive(self.value) === "up") { + return make_node(AST_EmptyStatement, self); + } + return self; + }); + + OPT(AST_Debugger, function(self, compressor){ + if (compressor.option("drop_debugger")) + return make_node(AST_EmptyStatement, self); + return self; + }); + + OPT(AST_LabeledStatement, function(self, compressor){ + if (self.body instanceof AST_Break + && compressor.loopcontrol_target(self.body.label) === self.body) { + return make_node(AST_EmptyStatement, self); + } + return self.label.references.length == 0 ? self.body : self; + }); + + OPT(AST_Block, function(self, compressor){ + self.body = tighten_body(self.body, compressor); + return self; + }); + + OPT(AST_BlockStatement, function(self, compressor){ + self.body = tighten_body(self.body, compressor); + switch (self.body.length) { + case 1: return self.body[0]; + case 0: return make_node(AST_EmptyStatement, self); + } + return self; + }); + + AST_Scope.DEFMETHOD("drop_unused", function(compressor){ + var self = this; + if (compressor.has_directive("use asm")) return self; + if (compressor.option("unused") + && !(self instanceof AST_Toplevel) + && !self.uses_eval + && !self.uses_with + ) { + var in_use = []; + var in_use_ids = {}; // avoid expensive linear scans of in_use + var initializations = new Dictionary(); + // pass 1: find out which symbols are directly used in + // this scope (not in nested scopes). + var scope = this; + var tw = new TreeWalker(function(node, descend){ + if (node !== self) { + if (node instanceof AST_Defun) { + initializations.add(node.name.name, node); + return true; // don't go in nested scopes + } + if (node instanceof AST_Definitions && scope === self) { + node.definitions.forEach(function(def){ + if (def.value) { + initializations.add(def.name.name, def.value); + if (def.value.has_side_effects(compressor)) { + def.value.walk(tw); + } + } + }); + return true; + } + if (node instanceof AST_SymbolRef) { + var node_def = node.definition(); + if (!(node_def.id in in_use_ids)) { + in_use_ids[node_def.id] = true; + in_use.push(node_def); + } + return true; + } + if (node instanceof AST_Scope) { + var save_scope = scope; + scope = node; + descend(); + scope = save_scope; + return true; + } + } + }); + self.walk(tw); + // pass 2: for every used symbol we need to walk its + // initialization code to figure out if it uses other + // symbols (that may not be in_use). + for (var i = 0; i < in_use.length; ++i) { + in_use[i].orig.forEach(function(decl){ + // undeclared globals will be instanceof AST_SymbolRef + var init = initializations.get(decl.name); + if (init) init.forEach(function(init){ + var tw = new TreeWalker(function(node){ + if (node instanceof AST_SymbolRef) { + var node_def = node.definition(); + if (!(node_def.id in in_use_ids)) { + in_use_ids[node_def.id] = true; + in_use.push(node_def); + } + } + }); + init.walk(tw); + }); + }); + } + // pass 3: we should drop declarations not in_use + var tt = new TreeTransformer( + function before(node, descend, in_list) { + if (node instanceof AST_Lambda && !(node instanceof AST_Accessor)) { + if (!compressor.option("keep_fargs")) { + for (var a = node.argnames, i = a.length; --i >= 0;) { + var sym = a[i]; + if (sym.unreferenced()) { + a.pop(); + compressor.warn("Dropping unused function argument {name} [{file}:{line},{col}]", { + name : sym.name, + file : sym.start.file, + line : sym.start.line, + col : sym.start.col + }); + } + else break; + } + } + } + if (node instanceof AST_Defun && node !== self) { + if (!(node.name.definition().id in in_use_ids)) { + compressor.warn("Dropping unused function {name} [{file}:{line},{col}]", { + name : node.name.name, + file : node.name.start.file, + line : node.name.start.line, + col : node.name.start.col + }); + return make_node(AST_EmptyStatement, node); + } + return node; + } + if (node instanceof AST_Definitions && !(tt.parent() instanceof AST_ForIn)) { + var def = node.definitions.filter(function(def){ + if (def.name.definition().id in in_use_ids) return true; + var w = { + name : def.name.name, + file : def.name.start.file, + line : def.name.start.line, + col : def.name.start.col + }; + if (def.value && def.value.has_side_effects(compressor)) { + def._unused_side_effects = true; + compressor.warn("Side effects in initialization of unused variable {name} [{file}:{line},{col}]", w); + return true; + } + compressor.warn("Dropping unused variable {name} [{file}:{line},{col}]", w); + return false; + }); + // place uninitialized names at the start + def = mergeSort(def, function(a, b){ + if (!a.value && b.value) return -1; + if (!b.value && a.value) return 1; + return 0; + }); + // for unused names whose initialization has + // side effects, we can cascade the init. code + // into the next one, or next statement. + var side_effects = []; + for (var i = 0; i < def.length;) { + var x = def[i]; + if (x._unused_side_effects) { + side_effects.push(x.value); + def.splice(i, 1); + } else { + if (side_effects.length > 0) { + side_effects.push(x.value); + x.value = AST_Seq.from_array(side_effects); + side_effects = []; + } + ++i; + } + } + if (side_effects.length > 0) { + side_effects = make_node(AST_BlockStatement, node, { + body: [ make_node(AST_SimpleStatement, node, { + body: AST_Seq.from_array(side_effects) + }) ] + }); + } else { + side_effects = null; + } + if (def.length == 0 && !side_effects) { + return make_node(AST_EmptyStatement, node); + } + if (def.length == 0) { + return in_list ? MAP.splice(side_effects.body) : side_effects; + } + node.definitions = def; + if (side_effects) { + side_effects.body.unshift(node); + return in_list ? MAP.splice(side_effects.body) : side_effects; + } + return node; + } + if (node instanceof AST_For) { + descend(node, this); + + if (node.init instanceof AST_BlockStatement) { + // certain combination of unused name + side effect leads to: + // https://github.com/mishoo/UglifyJS2/issues/44 + // that's an invalid AST. + // We fix it at this stage by moving the `var` outside the `for`. + + var body = node.init.body.slice(0, -1); + node.init = node.init.body.slice(-1)[0].body; + body.push(node); + + return in_list ? MAP.splice(body) : make_node(AST_BlockStatement, node, { + body: body + }); + } + } + if (node instanceof AST_Scope && node !== self) + return node; + } + ); + self.transform(tt); + } + }); + + AST_Scope.DEFMETHOD("hoist_declarations", function(compressor){ + var self = this; + if (compressor.has_directive("use asm")) return self; + var hoist_funs = compressor.option("hoist_funs"); + var hoist_vars = compressor.option("hoist_vars"); + if (hoist_funs || hoist_vars) { + var dirs = []; + var hoisted = []; + var vars = new Dictionary(), vars_found = 0, var_decl = 0; + // let's count var_decl first, we seem to waste a lot of + // space if we hoist `var` when there's only one. + self.walk(new TreeWalker(function(node){ + if (node instanceof AST_Scope && node !== self) + return true; + if (node instanceof AST_Var) { + ++var_decl; + return true; + } + })); + hoist_vars = hoist_vars && var_decl > 1; + var tt = new TreeTransformer( + function before(node) { + if (node !== self) { + if (node instanceof AST_Directive) { + dirs.push(node); + return make_node(AST_EmptyStatement, node); + } + if (node instanceof AST_Defun && hoist_funs) { + hoisted.push(node); + return make_node(AST_EmptyStatement, node); + } + if (node instanceof AST_Var && hoist_vars) { + node.definitions.forEach(function(def){ + vars.set(def.name.name, def); + ++vars_found; + }); + var seq = node.to_assignments(); + var p = tt.parent(); + if (p instanceof AST_ForIn && p.init === node) { + if (seq == null) { + var def = node.definitions[0].name; + return make_node(AST_SymbolRef, def, def); + } + return seq; + } + if (p instanceof AST_For && p.init === node) { + return seq; + } + if (!seq) return make_node(AST_EmptyStatement, node); + return make_node(AST_SimpleStatement, node, { + body: seq + }); + } + if (node instanceof AST_Scope) + return node; // to avoid descending in nested scopes + } + } + ); + self = self.transform(tt); + if (vars_found > 0) { + // collect only vars which don't show up in self's arguments list + var defs = []; + vars.each(function(def, name){ + if (self instanceof AST_Lambda + && find_if(function(x){ return x.name == def.name.name }, + self.argnames)) { + vars.del(name); + } else { + def = def.clone(); + def.value = null; + defs.push(def); + vars.set(name, def); + } + }); + if (defs.length > 0) { + // try to merge in assignments + for (var i = 0; i < self.body.length;) { + if (self.body[i] instanceof AST_SimpleStatement) { + var expr = self.body[i].body, sym, assign; + if (expr instanceof AST_Assign + && expr.operator == "=" + && (sym = expr.left) instanceof AST_Symbol + && vars.has(sym.name)) + { + var def = vars.get(sym.name); + if (def.value) break; + def.value = expr.right; + remove(defs, def); + defs.push(def); + self.body.splice(i, 1); + continue; + } + if (expr instanceof AST_Seq + && (assign = expr.car) instanceof AST_Assign + && assign.operator == "=" + && (sym = assign.left) instanceof AST_Symbol + && vars.has(sym.name)) + { + var def = vars.get(sym.name); + if (def.value) break; + def.value = assign.right; + remove(defs, def); + defs.push(def); + self.body[i].body = expr.cdr; + continue; + } + } + if (self.body[i] instanceof AST_EmptyStatement) { + self.body.splice(i, 1); + continue; + } + if (self.body[i] instanceof AST_BlockStatement) { + var tmp = [ i, 1 ].concat(self.body[i].body); + self.body.splice.apply(self.body, tmp); + continue; + } + break; + } + defs = make_node(AST_Var, self, { + definitions: defs + }); + hoisted.push(defs); + }; + } + self.body = dirs.concat(hoisted, self.body); + } + return self; + }); + + OPT(AST_SimpleStatement, function(self, compressor){ + if (compressor.option("side_effects")) { + if (!self.body.has_side_effects(compressor)) { + compressor.warn("Dropping side-effect-free statement [{file}:{line},{col}]", self.start); + return make_node(AST_EmptyStatement, self); + } + } + return self; + }); + + OPT(AST_DWLoop, function(self, compressor){ + var cond = self.condition.evaluate(compressor); + self.condition = cond[0]; + if (!compressor.option("loops")) return self; + if (cond.length > 1) { + if (cond[1]) { + return make_node(AST_For, self, { + body: self.body + }); + } else if (self instanceof AST_While) { + if (compressor.option("dead_code")) { + var a = []; + extract_declarations_from_unreachable_code(compressor, self.body, a); + return make_node(AST_BlockStatement, self, { body: a }); + } + } + } + return self; + }); + + function if_break_in_loop(self, compressor) { + function drop_it(rest) { + rest = as_statement_array(rest); + if (self.body instanceof AST_BlockStatement) { + self.body = self.body.clone(); + self.body.body = rest.concat(self.body.body.slice(1)); + self.body = self.body.transform(compressor); + } else { + self.body = make_node(AST_BlockStatement, self.body, { + body: rest + }).transform(compressor); + } + if_break_in_loop(self, compressor); + } + var first = self.body instanceof AST_BlockStatement ? self.body.body[0] : self.body; + if (first instanceof AST_If) { + if (first.body instanceof AST_Break + && compressor.loopcontrol_target(first.body.label) === self) { + if (self.condition) { + self.condition = make_node(AST_Binary, self.condition, { + left: self.condition, + operator: "&&", + right: first.condition.negate(compressor), + }); + } else { + self.condition = first.condition.negate(compressor); + } + drop_it(first.alternative); + } + else if (first.alternative instanceof AST_Break + && compressor.loopcontrol_target(first.alternative.label) === self) { + if (self.condition) { + self.condition = make_node(AST_Binary, self.condition, { + left: self.condition, + operator: "&&", + right: first.condition, + }); + } else { + self.condition = first.condition; + } + drop_it(first.body); + } + } + }; + + OPT(AST_While, function(self, compressor) { + if (!compressor.option("loops")) return self; + self = AST_DWLoop.prototype.optimize.call(self, compressor); + if (self instanceof AST_While) { + if_break_in_loop(self, compressor); + self = make_node(AST_For, self, self).transform(compressor); + } + return self; + }); + + OPT(AST_For, function(self, compressor){ + var cond = self.condition; + if (cond) { + cond = cond.evaluate(compressor); + self.condition = cond[0]; + } + if (!compressor.option("loops")) return self; + if (cond) { + if (cond.length > 1 && !cond[1]) { + if (compressor.option("dead_code")) { + var a = []; + if (self.init instanceof AST_Statement) { + a.push(self.init); + } + else if (self.init) { + a.push(make_node(AST_SimpleStatement, self.init, { + body: self.init + })); + } + extract_declarations_from_unreachable_code(compressor, self.body, a); + return make_node(AST_BlockStatement, self, { body: a }); + } + } + } + if_break_in_loop(self, compressor); + return self; + }); + + OPT(AST_If, function(self, compressor){ + if (!compressor.option("conditionals")) return self; + // if condition can be statically determined, warn and drop + // one of the blocks. note, statically determined implies + // “has no side effects”; also it doesn't work for cases like + // `x && true`, though it probably should. + var cond = self.condition.evaluate(compressor); + self.condition = cond[0]; + if (cond.length > 1) { + if (cond[1]) { + compressor.warn("Condition always true [{file}:{line},{col}]", self.condition.start); + if (compressor.option("dead_code")) { + var a = []; + if (self.alternative) { + extract_declarations_from_unreachable_code(compressor, self.alternative, a); + } + a.push(self.body); + return make_node(AST_BlockStatement, self, { body: a }).transform(compressor); + } + } else { + compressor.warn("Condition always false [{file}:{line},{col}]", self.condition.start); + if (compressor.option("dead_code")) { + var a = []; + extract_declarations_from_unreachable_code(compressor, self.body, a); + if (self.alternative) a.push(self.alternative); + return make_node(AST_BlockStatement, self, { body: a }).transform(compressor); + } + } + } + if (is_empty(self.alternative)) self.alternative = null; + var negated = self.condition.negate(compressor); + var self_condition_length = self.condition.print_to_string().length; + var negated_length = negated.print_to_string().length; + var negated_is_best = negated_length < self_condition_length; + if (self.alternative && negated_is_best) { + negated_is_best = false; // because we already do the switch here. + // no need to swap values of self_condition_length and negated_length + // here because they are only used in an equality comparison later on. + self.condition = negated; + var tmp = self.body; + self.body = self.alternative || make_node(AST_EmptyStatement); + self.alternative = tmp; + } + if (is_empty(self.body) && is_empty(self.alternative)) { + return make_node(AST_SimpleStatement, self.condition, { + body: self.condition + }).transform(compressor); + } + if (self.body instanceof AST_SimpleStatement + && self.alternative instanceof AST_SimpleStatement) { + return make_node(AST_SimpleStatement, self, { + body: make_node(AST_Conditional, self, { + condition : self.condition, + consequent : self.body.body, + alternative : self.alternative.body + }) + }).transform(compressor); + } + if (is_empty(self.alternative) && self.body instanceof AST_SimpleStatement) { + if (self_condition_length === negated_length && !negated_is_best + && self.condition instanceof AST_Binary && self.condition.operator == "||") { + // although the code length of self.condition and negated are the same, + // negated does not require additional surrounding parentheses. + // see https://github.com/mishoo/UglifyJS2/issues/979 + negated_is_best = true; + } + if (negated_is_best) return make_node(AST_SimpleStatement, self, { + body: make_node(AST_Binary, self, { + operator : "||", + left : negated, + right : self.body.body + }) + }).transform(compressor); + return make_node(AST_SimpleStatement, self, { + body: make_node(AST_Binary, self, { + operator : "&&", + left : self.condition, + right : self.body.body + }) + }).transform(compressor); + } + if (self.body instanceof AST_EmptyStatement + && self.alternative + && self.alternative instanceof AST_SimpleStatement) { + return make_node(AST_SimpleStatement, self, { + body: make_node(AST_Binary, self, { + operator : "||", + left : self.condition, + right : self.alternative.body + }) + }).transform(compressor); + } + if (self.body instanceof AST_Exit + && self.alternative instanceof AST_Exit + && self.body.TYPE == self.alternative.TYPE) { + return make_node(self.body.CTOR, self, { + value: make_node(AST_Conditional, self, { + condition : self.condition, + consequent : self.body.value || make_node(AST_Undefined, self.body).optimize(compressor), + alternative : self.alternative.value || make_node(AST_Undefined, self.alternative).optimize(compressor) + }) + }).transform(compressor); + } + if (self.body instanceof AST_If + && !self.body.alternative + && !self.alternative) { + self.condition = make_node(AST_Binary, self.condition, { + operator: "&&", + left: self.condition, + right: self.body.condition + }).transform(compressor); + self.body = self.body.body; + } + if (aborts(self.body)) { + if (self.alternative) { + var alt = self.alternative; + self.alternative = null; + return make_node(AST_BlockStatement, self, { + body: [ self, alt ] + }).transform(compressor); + } + } + if (aborts(self.alternative)) { + var body = self.body; + self.body = self.alternative; + self.condition = negated_is_best ? negated : self.condition.negate(compressor); + self.alternative = null; + return make_node(AST_BlockStatement, self, { + body: [ self, body ] + }).transform(compressor); + } + return self; + }); + + OPT(AST_Switch, function(self, compressor){ + if (self.body.length == 0 && compressor.option("conditionals")) { + return make_node(AST_SimpleStatement, self, { + body: self.expression + }).transform(compressor); + } + for(;;) { + var last_branch = self.body[self.body.length - 1]; + if (last_branch) { + var stat = last_branch.body[last_branch.body.length - 1]; // last statement + if (stat instanceof AST_Break && loop_body(compressor.loopcontrol_target(stat.label)) === self) + last_branch.body.pop(); + if (last_branch instanceof AST_Default && last_branch.body.length == 0) { + self.body.pop(); + continue; + } + } + break; + } + var exp = self.expression.evaluate(compressor); + out: if (exp.length == 2) try { + // constant expression + self.expression = exp[0]; + if (!compressor.option("dead_code")) break out; + var value = exp[1]; + var in_if = false; + var in_block = false; + var started = false; + var stopped = false; + var ruined = false; + var tt = new TreeTransformer(function(node, descend, in_list){ + if (node instanceof AST_Lambda || node instanceof AST_SimpleStatement) { + // no need to descend these node types + return node; + } + else if (node instanceof AST_Switch && node === self) { + node = node.clone(); + descend(node, this); + return ruined ? node : make_node(AST_BlockStatement, node, { + body: node.body.reduce(function(a, branch){ + return a.concat(branch.body); + }, []) + }).transform(compressor); + } + else if (node instanceof AST_If || node instanceof AST_Try) { + var save = in_if; + in_if = !in_block; + descend(node, this); + in_if = save; + return node; + } + else if (node instanceof AST_StatementWithBody || node instanceof AST_Switch) { + var save = in_block; + in_block = true; + descend(node, this); + in_block = save; + return node; + } + else if (node instanceof AST_Break && this.loopcontrol_target(node.label) === self) { + if (in_if) { + ruined = true; + return node; + } + if (in_block) return node; + stopped = true; + return in_list ? MAP.skip : make_node(AST_EmptyStatement, node); + } + else if (node instanceof AST_SwitchBranch && this.parent() === self) { + if (stopped) return MAP.skip; + if (node instanceof AST_Case) { + var exp = node.expression.evaluate(compressor); + if (exp.length < 2) { + // got a case with non-constant expression, baling out + throw self; + } + if (exp[1] === value || started) { + started = true; + if (aborts(node)) stopped = true; + descend(node, this); + return node; + } + return MAP.skip; + } + descend(node, this); + return node; + } + }); + tt.stack = compressor.stack.slice(); // so that's able to see parent nodes + self = self.transform(tt); + } catch(ex) { + if (ex !== self) throw ex; + } + return self; + }); + + OPT(AST_Case, function(self, compressor){ + self.body = tighten_body(self.body, compressor); + return self; + }); + + OPT(AST_Try, function(self, compressor){ + self.body = tighten_body(self.body, compressor); + return self; + }); + + AST_Definitions.DEFMETHOD("remove_initializers", function(){ + this.definitions.forEach(function(def){ def.value = null }); + }); + + AST_Definitions.DEFMETHOD("to_assignments", function(){ + var assignments = this.definitions.reduce(function(a, def){ + if (def.value) { + var name = make_node(AST_SymbolRef, def.name, def.name); + a.push(make_node(AST_Assign, def, { + operator : "=", + left : name, + right : def.value + })); + } + return a; + }, []); + if (assignments.length == 0) return null; + return AST_Seq.from_array(assignments); + }); + + OPT(AST_Definitions, function(self, compressor){ + if (self.definitions.length == 0) + return make_node(AST_EmptyStatement, self); + return self; + }); + + OPT(AST_Function, function(self, compressor){ + self = AST_Lambda.prototype.optimize.call(self, compressor); + if (compressor.option("unused") && !compressor.option("keep_fnames")) { + if (self.name && self.name.unreferenced()) { + self.name = null; + } + } + return self; + }); + + OPT(AST_Call, function(self, compressor){ + if (compressor.option("unsafe")) { + var exp = self.expression; + if (exp instanceof AST_SymbolRef && exp.undeclared()) { + switch (exp.name) { + case "Array": + if (self.args.length != 1) { + return make_node(AST_Array, self, { + elements: self.args + }).transform(compressor); + } + break; + case "Object": + if (self.args.length == 0) { + return make_node(AST_Object, self, { + properties: [] + }); + } + break; + case "String": + if (self.args.length == 0) return make_node(AST_String, self, { + value: "" + }); + if (self.args.length <= 1) return make_node(AST_Binary, self, { + left: self.args[0], + operator: "+", + right: make_node(AST_String, self, { value: "" }) + }).transform(compressor); + break; + case "Number": + if (self.args.length == 0) return make_node(AST_Number, self, { + value: 0 + }); + if (self.args.length == 1) return make_node(AST_UnaryPrefix, self, { + expression: self.args[0], + operator: "+" + }).transform(compressor); + case "Boolean": + if (self.args.length == 0) return make_node(AST_False, self); + if (self.args.length == 1) return make_node(AST_UnaryPrefix, self, { + expression: make_node(AST_UnaryPrefix, null, { + expression: self.args[0], + operator: "!" + }), + operator: "!" + }).transform(compressor); + break; + case "Function": + // new Function() => function(){} + if (self.args.length == 0) return make_node(AST_Function, self, { + argnames: [], + body: [] + }); + if (all(self.args, function(x){ return x instanceof AST_String })) { + // quite a corner-case, but we can handle it: + // https://github.com/mishoo/UglifyJS2/issues/203 + // if the code argument is a constant, then we can minify it. + try { + var code = "(function(" + self.args.slice(0, -1).map(function(arg){ + return arg.value; + }).join(",") + "){" + self.args[self.args.length - 1].value + "})()"; + var ast = parse(code); + ast.figure_out_scope({ screw_ie8: compressor.option("screw_ie8") }); + var comp = new Compressor(compressor.options); + ast = ast.transform(comp); + ast.figure_out_scope({ screw_ie8: compressor.option("screw_ie8") }); + ast.mangle_names(); + var fun; + try { + ast.walk(new TreeWalker(function(node){ + if (node instanceof AST_Lambda) { + fun = node; + throw ast; + } + })); + } catch(ex) { + if (ex !== ast) throw ex; + }; + if (!fun) return self; + var args = fun.argnames.map(function(arg, i){ + return make_node(AST_String, self.args[i], { + value: arg.print_to_string() + }); + }); + var code = OutputStream(); + AST_BlockStatement.prototype._codegen.call(fun, fun, code); + code = code.toString().replace(/^\{|\}$/g, ""); + args.push(make_node(AST_String, self.args[self.args.length - 1], { + value: code + })); + self.args = args; + return self; + } catch(ex) { + if (ex instanceof JS_Parse_Error) { + compressor.warn("Error parsing code passed to new Function [{file}:{line},{col}]", self.args[self.args.length - 1].start); + compressor.warn(ex.toString()); + } else { + console.log(ex); + throw ex; + } + } + } + break; + } + } + else if (exp instanceof AST_Dot && exp.property == "toString" && self.args.length == 0) { + return make_node(AST_Binary, self, { + left: make_node(AST_String, self, { value: "" }), + operator: "+", + right: exp.expression + }).transform(compressor); + } + else if (exp instanceof AST_Dot && exp.expression instanceof AST_Array && exp.property == "join") EXIT: { + var separator = self.args.length == 0 ? "," : self.args[0].evaluate(compressor)[1]; + if (separator == null) break EXIT; // not a constant + var elements = exp.expression.elements.reduce(function(a, el){ + el = el.evaluate(compressor); + if (a.length == 0 || el.length == 1) { + a.push(el); + } else { + var last = a[a.length - 1]; + if (last.length == 2) { + // it's a constant + var val = "" + last[1] + separator + el[1]; + a[a.length - 1] = [ make_node_from_constant(compressor, val, last[0]), val ]; + } else { + a.push(el); + } + } + return a; + }, []); + if (elements.length == 0) return make_node(AST_String, self, { value: "" }); + if (elements.length == 1) return elements[0][0]; + if (separator == "") { + var first; + if (elements[0][0] instanceof AST_String + || elements[1][0] instanceof AST_String) { + first = elements.shift()[0]; + } else { + first = make_node(AST_String, self, { value: "" }); + } + return elements.reduce(function(prev, el){ + return make_node(AST_Binary, el[0], { + operator : "+", + left : prev, + right : el[0], + }); + }, first).transform(compressor); + } + // need this awkward cloning to not affect original element + // best_of will decide which one to get through. + var node = self.clone(); + node.expression = node.expression.clone(); + node.expression.expression = node.expression.expression.clone(); + node.expression.expression.elements = elements.map(function(el){ + return el[0]; + }); + return best_of(self, node); + } + } + if (compressor.option("side_effects")) { + if (self.expression instanceof AST_Function + && self.args.length == 0 + && !AST_Block.prototype.has_side_effects.call(self.expression, compressor)) { + return make_node(AST_Undefined, self).transform(compressor); + } + } + if (compressor.option("drop_console")) { + if (self.expression instanceof AST_PropAccess) { + var name = self.expression.expression; + while (name.expression) { + name = name.expression; + } + if (name instanceof AST_SymbolRef + && name.name == "console" + && name.undeclared()) { + return make_node(AST_Undefined, self).transform(compressor); + } + } + } + return self.evaluate(compressor)[0]; + }); + + OPT(AST_New, function(self, compressor){ + if (compressor.option("unsafe")) { + var exp = self.expression; + if (exp instanceof AST_SymbolRef && exp.undeclared()) { + switch (exp.name) { + case "Object": + case "RegExp": + case "Function": + case "Error": + case "Array": + return make_node(AST_Call, self, self).transform(compressor); + } + } + } + return self; + }); + + OPT(AST_Seq, function(self, compressor){ + if (!compressor.option("side_effects")) + return self; + if (!self.car.has_side_effects(compressor)) { + return maintain_this_binding(compressor.parent(), self, self.cdr); + } + if (compressor.option("cascade")) { + if (self.car instanceof AST_Assign + && !self.car.left.has_side_effects(compressor)) { + if (self.car.left.equivalent_to(self.cdr)) { + return self.car; + } + if (self.cdr instanceof AST_Call + && self.cdr.expression.equivalent_to(self.car.left)) { + self.cdr.expression = self.car; + return self.cdr; + } + } + if (!self.car.has_side_effects(compressor) + && !self.cdr.has_side_effects(compressor) + && self.car.equivalent_to(self.cdr)) { + return self.car; + } + } + if (self.cdr instanceof AST_UnaryPrefix + && self.cdr.operator == "void" + && !self.cdr.expression.has_side_effects(compressor)) { + self.cdr.expression = self.car; + return self.cdr; + } + if (self.cdr instanceof AST_Undefined) { + return make_node(AST_UnaryPrefix, self, { + operator : "void", + expression : self.car + }); + } + return self; + }); + + AST_Unary.DEFMETHOD("lift_sequences", function(compressor){ + if (compressor.option("sequences")) { + if (this.expression instanceof AST_Seq) { + var seq = this.expression; + var x = seq.to_array(); + this.expression = x.pop(); + x.push(this); + seq = AST_Seq.from_array(x).transform(compressor); + return seq; + } + } + return this; + }); + + OPT(AST_UnaryPostfix, function(self, compressor){ + return self.lift_sequences(compressor); + }); + + OPT(AST_UnaryPrefix, function(self, compressor){ + self = self.lift_sequences(compressor); + var e = self.expression; + if (compressor.option("booleans") && compressor.in_boolean_context()) { + switch (self.operator) { + case "!": + if (e instanceof AST_UnaryPrefix && e.operator == "!") { + // !!foo ==> foo, if we're in boolean context + return e.expression; + } + break; + case "typeof": + // typeof always returns a non-empty string, thus it's + // always true in booleans + compressor.warn("Boolean expression always true [{file}:{line},{col}]", self.start); + return make_node(AST_True, self); + } + if (e instanceof AST_Binary && self.operator == "!") { + self = best_of(self, e.negate(compressor)); + } + } + return self.evaluate(compressor)[0]; + }); + + function has_side_effects_or_prop_access(node, compressor) { + var save_pure_getters = compressor.option("pure_getters"); + compressor.options.pure_getters = false; + var ret = node.has_side_effects(compressor); + compressor.options.pure_getters = save_pure_getters; + return ret; + } + + AST_Binary.DEFMETHOD("lift_sequences", function(compressor){ + if (compressor.option("sequences")) { + if (this.left instanceof AST_Seq) { + var seq = this.left; + var x = seq.to_array(); + this.left = x.pop(); + x.push(this); + seq = AST_Seq.from_array(x).transform(compressor); + return seq; + } + if (this.right instanceof AST_Seq + && this instanceof AST_Assign + && !has_side_effects_or_prop_access(this.left, compressor)) { + var seq = this.right; + var x = seq.to_array(); + this.right = x.pop(); + x.push(this); + seq = AST_Seq.from_array(x).transform(compressor); + return seq; + } + } + return this; + }); + + var commutativeOperators = makePredicate("== === != !== * & | ^"); + + OPT(AST_Binary, function(self, compressor){ + function reverse(op, force) { + if (force || !(self.left.has_side_effects(compressor) || self.right.has_side_effects(compressor))) { + if (op) self.operator = op; + var tmp = self.left; + self.left = self.right; + self.right = tmp; + } + } + if (commutativeOperators(self.operator)) { + if (self.right instanceof AST_Constant + && !(self.left instanceof AST_Constant)) { + // if right is a constant, whatever side effects the + // left side might have could not influence the + // result. hence, force switch. + + if (!(self.left instanceof AST_Binary + && PRECEDENCE[self.left.operator] >= PRECEDENCE[self.operator])) { + reverse(null, true); + } + } + if (/^[!=]==?$/.test(self.operator)) { + if (self.left instanceof AST_SymbolRef && self.right instanceof AST_Conditional) { + if (self.right.consequent instanceof AST_SymbolRef + && self.right.consequent.definition() === self.left.definition()) { + if (/^==/.test(self.operator)) return self.right.condition; + if (/^!=/.test(self.operator)) return self.right.condition.negate(compressor); + } + if (self.right.alternative instanceof AST_SymbolRef + && self.right.alternative.definition() === self.left.definition()) { + if (/^==/.test(self.operator)) return self.right.condition.negate(compressor); + if (/^!=/.test(self.operator)) return self.right.condition; + } + } + if (self.right instanceof AST_SymbolRef && self.left instanceof AST_Conditional) { + if (self.left.consequent instanceof AST_SymbolRef + && self.left.consequent.definition() === self.right.definition()) { + if (/^==/.test(self.operator)) return self.left.condition; + if (/^!=/.test(self.operator)) return self.left.condition.negate(compressor); + } + if (self.left.alternative instanceof AST_SymbolRef + && self.left.alternative.definition() === self.right.definition()) { + if (/^==/.test(self.operator)) return self.left.condition.negate(compressor); + if (/^!=/.test(self.operator)) return self.left.condition; + } + } + } + } + self = self.lift_sequences(compressor); + if (compressor.option("comparisons")) switch (self.operator) { + case "===": + case "!==": + if ((self.left.is_string(compressor) && self.right.is_string(compressor)) || + (self.left.is_boolean() && self.right.is_boolean())) { + self.operator = self.operator.substr(0, 2); + } + // XXX: intentionally falling down to the next case + case "==": + case "!=": + if (self.left instanceof AST_String + && self.left.value == "undefined" + && self.right instanceof AST_UnaryPrefix + && self.right.operator == "typeof" + && compressor.option("unsafe")) { + if (!(self.right.expression instanceof AST_SymbolRef) + || !self.right.expression.undeclared()) { + self.right = self.right.expression; + self.left = make_node(AST_Undefined, self.left).optimize(compressor); + if (self.operator.length == 2) self.operator += "="; + } + } + break; + } + if (compressor.option("conditionals")) { + if (self.operator == "&&") { + var ll = self.left.evaluate(compressor); + if (ll.length > 1) { + if (ll[1]) { + compressor.warn("Condition left of && always true [{file}:{line},{col}]", self.start); + return maintain_this_binding(compressor.parent(), self, self.right.evaluate(compressor)[0]); + } else { + compressor.warn("Condition left of && always false [{file}:{line},{col}]", self.start); + return maintain_this_binding(compressor.parent(), self, ll[0]); + } + } + } + else if (self.operator == "||") { + var ll = self.left.evaluate(compressor); + if (ll.length > 1) { + if (ll[1]) { + compressor.warn("Condition left of || always true [{file}:{line},{col}]", self.start); + return maintain_this_binding(compressor.parent(), self, ll[0]); + } else { + compressor.warn("Condition left of || always false [{file}:{line},{col}]", self.start); + return maintain_this_binding(compressor.parent(), self, self.right.evaluate(compressor)[0]); + } + } + } + } + if (compressor.option("booleans") && compressor.in_boolean_context()) switch (self.operator) { + case "&&": + var ll = self.left.evaluate(compressor); + var rr = self.right.evaluate(compressor); + if ((ll.length > 1 && !ll[1]) || (rr.length > 1 && !rr[1])) { + compressor.warn("Boolean && always false [{file}:{line},{col}]", self.start); + if (self.left.has_side_effects(compressor)) { + return make_node(AST_Seq, self, { + car: self.left, + cdr: make_node(AST_False) + }).optimize(compressor); + } + return make_node(AST_False, self); + } + if (ll.length > 1 && ll[1]) { + return rr[0]; + } + if (rr.length > 1 && rr[1]) { + return ll[0]; + } + break; + case "||": + var ll = self.left.evaluate(compressor); + var rr = self.right.evaluate(compressor); + if ((ll.length > 1 && ll[1]) || (rr.length > 1 && rr[1])) { + compressor.warn("Boolean || always true [{file}:{line},{col}]", self.start); + if (self.left.has_side_effects(compressor)) { + return make_node(AST_Seq, self, { + car: self.left, + cdr: make_node(AST_True) + }).optimize(compressor); + } + return make_node(AST_True, self); + } + if (ll.length > 1 && !ll[1]) { + return rr[0]; + } + if (rr.length > 1 && !rr[1]) { + return ll[0]; + } + break; + case "+": + var ll = self.left.evaluate(compressor); + var rr = self.right.evaluate(compressor); + if ((ll.length > 1 && ll[0] instanceof AST_String && ll[1]) || + (rr.length > 1 && rr[0] instanceof AST_String && rr[1])) { + compressor.warn("+ in boolean context always true [{file}:{line},{col}]", self.start); + return make_node(AST_True, self); + } + break; + } + if (compressor.option("comparisons") && self.is_boolean()) { + if (!(compressor.parent() instanceof AST_Binary) + || compressor.parent() instanceof AST_Assign) { + var negated = make_node(AST_UnaryPrefix, self, { + operator: "!", + expression: self.negate(compressor) + }); + self = best_of(self, negated); + } + if (compressor.option("unsafe_comps")) { + switch (self.operator) { + case "<": reverse(">"); break; + case "<=": reverse(">="); break; + } + } + } + if (self.operator == "+" && self.right instanceof AST_String + && self.right.getValue() === "" && self.left instanceof AST_Binary + && self.left.operator == "+" && self.left.is_string(compressor)) { + return self.left; + } + if (compressor.option("evaluate")) { + if (self.operator == "+") { + if (self.left instanceof AST_Constant + && self.right instanceof AST_Binary + && self.right.operator == "+" + && self.right.left instanceof AST_Constant + && self.right.is_string(compressor)) { + self = make_node(AST_Binary, self, { + operator: "+", + left: make_node(AST_String, null, { + value: "" + self.left.getValue() + self.right.left.getValue(), + start: self.left.start, + end: self.right.left.end + }), + right: self.right.right + }); + } + if (self.right instanceof AST_Constant + && self.left instanceof AST_Binary + && self.left.operator == "+" + && self.left.right instanceof AST_Constant + && self.left.is_string(compressor)) { + self = make_node(AST_Binary, self, { + operator: "+", + left: self.left.left, + right: make_node(AST_String, null, { + value: "" + self.left.right.getValue() + self.right.getValue(), + start: self.left.right.start, + end: self.right.end + }) + }); + } + if (self.left instanceof AST_Binary + && self.left.operator == "+" + && self.left.is_string(compressor) + && self.left.right instanceof AST_Constant + && self.right instanceof AST_Binary + && self.right.operator == "+" + && self.right.left instanceof AST_Constant + && self.right.is_string(compressor)) { + self = make_node(AST_Binary, self, { + operator: "+", + left: make_node(AST_Binary, self.left, { + operator: "+", + left: self.left.left, + right: make_node(AST_String, null, { + value: "" + self.left.right.getValue() + self.right.left.getValue(), + start: self.left.right.start, + end: self.right.left.end + }) + }), + right: self.right.right + }); + } + } + } + // x && (y && z) ==> x && y && z + // x || (y || z) ==> x || y || z + if (self.right instanceof AST_Binary + && self.right.operator == self.operator + && (self.operator == "&&" || self.operator == "||")) + { + self.left = make_node(AST_Binary, self.left, { + operator : self.operator, + left : self.left, + right : self.right.left + }); + self.right = self.right.right; + return self.transform(compressor); + } + return self.evaluate(compressor)[0]; + }); + + OPT(AST_SymbolRef, function(self, compressor){ + function isLHS(symbol, parent) { + return ( + parent instanceof AST_Binary && + parent.operator === '=' && + parent.left === symbol + ); + } + + if (self.undeclared() && !isLHS(self, compressor.parent())) { + var defines = compressor.option("global_defs"); + if (defines && HOP(defines, self.name)) { + return make_node_from_constant(compressor, defines[self.name], self); + } + // testing against !self.scope.uses_with first is an optimization + if (!self.scope.uses_with || !compressor.find_parent(AST_With)) { + switch (self.name) { + case "undefined": + return make_node(AST_Undefined, self); + case "NaN": + return make_node(AST_NaN, self).transform(compressor); + case "Infinity": + return make_node(AST_Infinity, self).transform(compressor); + } + } + } + return self; + }); + + OPT(AST_Infinity, function (self, compressor) { + return make_node(AST_Binary, self, { + operator : '/', + left : make_node(AST_Number, self, {value: 1}), + right : make_node(AST_Number, self, {value: 0}) + }); + }); + + OPT(AST_Undefined, function(self, compressor){ + if (compressor.option("unsafe")) { + var scope = compressor.find_parent(AST_Scope); + var undef = scope.find_variable("undefined"); + if (undef) { + var ref = make_node(AST_SymbolRef, self, { + name : "undefined", + scope : scope, + thedef : undef + }); + ref.reference(); + return ref; + } + } + return self; + }); + + var ASSIGN_OPS = [ '+', '-', '/', '*', '%', '>>', '<<', '>>>', '|', '^', '&' ]; + OPT(AST_Assign, function(self, compressor){ + self = self.lift_sequences(compressor); + if (self.operator == "=" + && self.left instanceof AST_SymbolRef + && self.right instanceof AST_Binary + && self.right.left instanceof AST_SymbolRef + && self.right.left.name == self.left.name + && member(self.right.operator, ASSIGN_OPS)) { + self.operator = self.right.operator + "="; + self.right = self.right.right; + } + return self; + }); + + OPT(AST_Conditional, function(self, compressor){ + if (!compressor.option("conditionals")) return self; + if (self.condition instanceof AST_Seq) { + var car = self.condition.car; + self.condition = self.condition.cdr; + return AST_Seq.cons(car, self); + } + var cond = self.condition.evaluate(compressor); + if (cond.length > 1) { + if (cond[1]) { + compressor.warn("Condition always true [{file}:{line},{col}]", self.start); + return maintain_this_binding(compressor.parent(), self, self.consequent); + } else { + compressor.warn("Condition always false [{file}:{line},{col}]", self.start); + return maintain_this_binding(compressor.parent(), self, self.alternative); + } + } + var negated = cond[0].negate(compressor); + if (best_of(cond[0], negated) === negated) { + self = make_node(AST_Conditional, self, { + condition: negated, + consequent: self.alternative, + alternative: self.consequent + }); + } + var consequent = self.consequent; + var alternative = self.alternative; + if (consequent instanceof AST_Assign + && alternative instanceof AST_Assign + && consequent.operator == alternative.operator + && consequent.left.equivalent_to(alternative.left) + && !consequent.left.has_side_effects(compressor) + ) { + /* + * Stuff like this: + * if (foo) exp = something; else exp = something_else; + * ==> + * exp = foo ? something : something_else; + */ + return make_node(AST_Assign, self, { + operator: consequent.operator, + left: consequent.left, + right: make_node(AST_Conditional, self, { + condition: self.condition, + consequent: consequent.right, + alternative: alternative.right + }) + }); + } + if (consequent instanceof AST_Call + && alternative.TYPE === consequent.TYPE + && consequent.args.length == alternative.args.length + && !consequent.expression.has_side_effects(compressor) + && consequent.expression.equivalent_to(alternative.expression)) { + if (consequent.args.length == 0) { + return make_node(AST_Seq, self, { + car: self.condition, + cdr: consequent + }); + } + if (consequent.args.length == 1) { + consequent.args[0] = make_node(AST_Conditional, self, { + condition: self.condition, + consequent: consequent.args[0], + alternative: alternative.args[0] + }); + return consequent; + } + } + // x?y?z:a:a --> x&&y?z:a + if (consequent instanceof AST_Conditional + && consequent.alternative.equivalent_to(alternative)) { + return make_node(AST_Conditional, self, { + condition: make_node(AST_Binary, self, { + left: self.condition, + operator: "&&", + right: consequent.condition + }), + consequent: consequent.consequent, + alternative: alternative + }); + } + // y?1:1 --> 1 + if (consequent.is_constant(compressor) + && alternative.is_constant(compressor) + && consequent.equivalent_to(alternative)) { + var consequent_value = consequent.constant_value(compressor); + if (self.condition.has_side_effects(compressor)) { + return AST_Seq.from_array([self.condition, make_node_from_constant(compressor, consequent_value, self)]); + } else { + return make_node_from_constant(compressor, consequent_value, self); + } + } + + if (is_true(self.consequent)) { + if (is_false(self.alternative)) { + // c ? true : false ---> !!c + return booleanize(self.condition); + } + // c ? true : x ---> !!c || x + return make_node(AST_Binary, self, { + operator: "||", + left: booleanize(self.condition), + right: self.alternative + }); + } + if (is_false(self.consequent)) { + if (is_true(self.alternative)) { + // c ? false : true ---> !c + return booleanize(self.condition.negate(compressor)); + } + // c ? false : x ---> !c && x + return make_node(AST_Binary, self, { + operator: "&&", + left: booleanize(self.condition.negate(compressor)), + right: self.alternative + }); + } + if (is_true(self.alternative)) { + // c ? x : true ---> !c || x + return make_node(AST_Binary, self, { + operator: "||", + left: booleanize(self.condition.negate(compressor)), + right: self.consequent + }); + } + if (is_false(self.alternative)) { + // c ? x : false ---> !!c && x + return make_node(AST_Binary, self, { + operator: "&&", + left: booleanize(self.condition), + right: self.consequent + }); + } + + return self; + + function booleanize(node) { + if (node.is_boolean()) return node; + // !!expression + return make_node(AST_UnaryPrefix, node, { + operator: "!", + expression: node.negate(compressor) + }); + } + + // AST_True or !0 + function is_true(node) { + return node instanceof AST_True + || (node instanceof AST_UnaryPrefix + && node.operator == "!" + && node.expression instanceof AST_Constant + && !node.expression.value); + } + // AST_False or !1 + function is_false(node) { + return node instanceof AST_False + || (node instanceof AST_UnaryPrefix + && node.operator == "!" + && node.expression instanceof AST_Constant + && !!node.expression.value); + } + }); + + OPT(AST_Boolean, function(self, compressor){ + if (compressor.option("booleans")) { + var p = compressor.parent(); + if (p instanceof AST_Binary && (p.operator == "==" + || p.operator == "!=")) { + compressor.warn("Non-strict equality against boolean: {operator} {value} [{file}:{line},{col}]", { + operator : p.operator, + value : self.value, + file : p.start.file, + line : p.start.line, + col : p.start.col, + }); + return make_node(AST_Number, self, { + value: +self.value + }); + } + return make_node(AST_UnaryPrefix, self, { + operator: "!", + expression: make_node(AST_Number, self, { + value: 1 - self.value + }) + }); + } + return self; + }); + + OPT(AST_Sub, function(self, compressor){ + var prop = self.property; + if (prop instanceof AST_String && compressor.option("properties")) { + prop = prop.getValue(); + if (RESERVED_WORDS(prop) ? compressor.option("screw_ie8") : is_identifier_string(prop)) { + return make_node(AST_Dot, self, { + expression : self.expression, + property : prop + }).optimize(compressor); + } + var v = parseFloat(prop); + if (!isNaN(v) && v.toString() == prop) { + self.property = make_node(AST_Number, self.property, { + value: v + }); + } + } + return self; + }); + + OPT(AST_Dot, function(self, compressor){ + var prop = self.property; + if (RESERVED_WORDS(prop) && !compressor.option("screw_ie8")) { + return make_node(AST_Sub, self, { + expression : self.expression, + property : make_node(AST_String, self, { + value: prop + }) + }).optimize(compressor); + } + return self.evaluate(compressor)[0]; + }); + + function literals_in_boolean_context(self, compressor) { + if (compressor.option("booleans") && compressor.in_boolean_context() && !self.has_side_effects(compressor)) { + return make_node(AST_True, self); + } + return self; + }; + OPT(AST_Array, literals_in_boolean_context); + OPT(AST_Object, literals_in_boolean_context); + OPT(AST_RegExp, literals_in_boolean_context); + + OPT(AST_Return, function(self, compressor){ + if (self.value instanceof AST_Undefined) { + self.value = null; + } + return self; + }); + +})(); diff --git a/node_modules/uglify-js/lib/mozilla-ast.js b/node_modules/uglify-js/lib/mozilla-ast.js new file mode 100644 index 0000000..c1b2b68 --- /dev/null +++ b/node_modules/uglify-js/lib/mozilla-ast.js @@ -0,0 +1,537 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +(function(){ + + var MOZ_TO_ME = { + ExpressionStatement: function(M) { + var expr = M.expression; + if (expr.type === "Literal" && typeof expr.value === "string") { + return new AST_Directive({ + start: my_start_token(M), + end: my_end_token(M), + value: expr.value + }); + } + return new AST_SimpleStatement({ + start: my_start_token(M), + end: my_end_token(M), + body: from_moz(expr) + }); + }, + TryStatement: function(M) { + var handlers = M.handlers || [M.handler]; + if (handlers.length > 1 || M.guardedHandlers && M.guardedHandlers.length) { + throw new Error("Multiple catch clauses are not supported."); + } + return new AST_Try({ + start : my_start_token(M), + end : my_end_token(M), + body : from_moz(M.block).body, + bcatch : from_moz(handlers[0]), + bfinally : M.finalizer ? new AST_Finally(from_moz(M.finalizer)) : null + }); + }, + Property: function(M) { + var key = M.key; + var name = key.type == "Identifier" ? key.name : key.value; + var args = { + start : my_start_token(key), + end : my_end_token(M.value), + key : name, + value : from_moz(M.value) + }; + switch (M.kind) { + case "init": + return new AST_ObjectKeyVal(args); + case "set": + args.value.name = from_moz(key); + return new AST_ObjectSetter(args); + case "get": + args.value.name = from_moz(key); + return new AST_ObjectGetter(args); + } + }, + ObjectExpression: function(M) { + return new AST_Object({ + start : my_start_token(M), + end : my_end_token(M), + properties : M.properties.map(function(prop){ + prop.type = "Property"; + return from_moz(prop) + }) + }); + }, + SequenceExpression: function(M) { + return AST_Seq.from_array(M.expressions.map(from_moz)); + }, + MemberExpression: function(M) { + return new (M.computed ? AST_Sub : AST_Dot)({ + start : my_start_token(M), + end : my_end_token(M), + property : M.computed ? from_moz(M.property) : M.property.name, + expression : from_moz(M.object) + }); + }, + SwitchCase: function(M) { + return new (M.test ? AST_Case : AST_Default)({ + start : my_start_token(M), + end : my_end_token(M), + expression : from_moz(M.test), + body : M.consequent.map(from_moz) + }); + }, + VariableDeclaration: function(M) { + return new (M.kind === "const" ? AST_Const : AST_Var)({ + start : my_start_token(M), + end : my_end_token(M), + definitions : M.declarations.map(from_moz) + }); + }, + Literal: function(M) { + var val = M.value, args = { + start : my_start_token(M), + end : my_end_token(M) + }; + if (val === null) return new AST_Null(args); + switch (typeof val) { + case "string": + args.value = val; + return new AST_String(args); + case "number": + args.value = val; + return new AST_Number(args); + case "boolean": + return new (val ? AST_True : AST_False)(args); + default: + var rx = M.regex; + if (rx && rx.pattern) { + // RegExpLiteral as per ESTree AST spec + args.value = new RegExp(rx.pattern, rx.flags).toString(); + } else { + // support legacy RegExp + args.value = M.regex && M.raw ? M.raw : val; + } + return new AST_RegExp(args); + } + }, + Identifier: function(M) { + var p = FROM_MOZ_STACK[FROM_MOZ_STACK.length - 2]; + return new ( p.type == "LabeledStatement" ? AST_Label + : p.type == "VariableDeclarator" && p.id === M ? (p.kind == "const" ? AST_SymbolConst : AST_SymbolVar) + : p.type == "FunctionExpression" ? (p.id === M ? AST_SymbolLambda : AST_SymbolFunarg) + : p.type == "FunctionDeclaration" ? (p.id === M ? AST_SymbolDefun : AST_SymbolFunarg) + : p.type == "CatchClause" ? AST_SymbolCatch + : p.type == "BreakStatement" || p.type == "ContinueStatement" ? AST_LabelRef + : AST_SymbolRef)({ + start : my_start_token(M), + end : my_end_token(M), + name : M.name + }); + } + }; + + MOZ_TO_ME.UpdateExpression = + MOZ_TO_ME.UnaryExpression = function To_Moz_Unary(M) { + var prefix = "prefix" in M ? M.prefix + : M.type == "UnaryExpression" ? true : false; + return new (prefix ? AST_UnaryPrefix : AST_UnaryPostfix)({ + start : my_start_token(M), + end : my_end_token(M), + operator : M.operator, + expression : from_moz(M.argument) + }); + }; + + map("Program", AST_Toplevel, "body@body"); + map("EmptyStatement", AST_EmptyStatement); + map("BlockStatement", AST_BlockStatement, "body@body"); + map("IfStatement", AST_If, "test>condition, consequent>body, alternate>alternative"); + map("LabeledStatement", AST_LabeledStatement, "label>label, body>body"); + map("BreakStatement", AST_Break, "label>label"); + map("ContinueStatement", AST_Continue, "label>label"); + map("WithStatement", AST_With, "object>expression, body>body"); + map("SwitchStatement", AST_Switch, "discriminant>expression, cases@body"); + map("ReturnStatement", AST_Return, "argument>value"); + map("ThrowStatement", AST_Throw, "argument>value"); + map("WhileStatement", AST_While, "test>condition, body>body"); + map("DoWhileStatement", AST_Do, "test>condition, body>body"); + map("ForStatement", AST_For, "init>init, test>condition, update>step, body>body"); + map("ForInStatement", AST_ForIn, "left>init, right>object, body>body"); + map("DebuggerStatement", AST_Debugger); + map("FunctionDeclaration", AST_Defun, "id>name, params@argnames, body%body"); + map("VariableDeclarator", AST_VarDef, "id>name, init>value"); + map("CatchClause", AST_Catch, "param>argname, body%body"); + + map("ThisExpression", AST_This); + map("ArrayExpression", AST_Array, "elements@elements"); + map("FunctionExpression", AST_Function, "id>name, params@argnames, body%body"); + map("BinaryExpression", AST_Binary, "operator=operator, left>left, right>right"); + map("LogicalExpression", AST_Binary, "operator=operator, left>left, right>right"); + map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right"); + map("ConditionalExpression", AST_Conditional, "test>condition, consequent>consequent, alternate>alternative"); + map("NewExpression", AST_New, "callee>expression, arguments@args"); + map("CallExpression", AST_Call, "callee>expression, arguments@args"); + + def_to_moz(AST_Directive, function To_Moz_Directive(M) { + return { + type: "ExpressionStatement", + expression: { + type: "Literal", + value: M.value + } + }; + }); + + def_to_moz(AST_SimpleStatement, function To_Moz_ExpressionStatement(M) { + return { + type: "ExpressionStatement", + expression: to_moz(M.body) + }; + }); + + def_to_moz(AST_SwitchBranch, function To_Moz_SwitchCase(M) { + return { + type: "SwitchCase", + test: to_moz(M.expression), + consequent: M.body.map(to_moz) + }; + }); + + def_to_moz(AST_Try, function To_Moz_TryStatement(M) { + return { + type: "TryStatement", + block: to_moz_block(M), + handler: to_moz(M.bcatch), + guardedHandlers: [], + finalizer: to_moz(M.bfinally) + }; + }); + + def_to_moz(AST_Catch, function To_Moz_CatchClause(M) { + return { + type: "CatchClause", + param: to_moz(M.argname), + guard: null, + body: to_moz_block(M) + }; + }); + + def_to_moz(AST_Definitions, function To_Moz_VariableDeclaration(M) { + return { + type: "VariableDeclaration", + kind: M instanceof AST_Const ? "const" : "var", + declarations: M.definitions.map(to_moz) + }; + }); + + def_to_moz(AST_Seq, function To_Moz_SequenceExpression(M) { + return { + type: "SequenceExpression", + expressions: M.to_array().map(to_moz) + }; + }); + + def_to_moz(AST_PropAccess, function To_Moz_MemberExpression(M) { + var isComputed = M instanceof AST_Sub; + return { + type: "MemberExpression", + object: to_moz(M.expression), + computed: isComputed, + property: isComputed ? to_moz(M.property) : {type: "Identifier", name: M.property} + }; + }); + + def_to_moz(AST_Unary, function To_Moz_Unary(M) { + return { + type: M.operator == "++" || M.operator == "--" ? "UpdateExpression" : "UnaryExpression", + operator: M.operator, + prefix: M instanceof AST_UnaryPrefix, + argument: to_moz(M.expression) + }; + }); + + def_to_moz(AST_Binary, function To_Moz_BinaryExpression(M) { + return { + type: M.operator == "&&" || M.operator == "||" ? "LogicalExpression" : "BinaryExpression", + left: to_moz(M.left), + operator: M.operator, + right: to_moz(M.right) + }; + }); + + def_to_moz(AST_Object, function To_Moz_ObjectExpression(M) { + return { + type: "ObjectExpression", + properties: M.properties.map(to_moz) + }; + }); + + def_to_moz(AST_ObjectProperty, function To_Moz_Property(M) { + var key = ( + is_identifier(M.key) + ? {type: "Identifier", name: M.key} + : {type: "Literal", value: M.key} + ); + var kind; + if (M instanceof AST_ObjectKeyVal) { + kind = "init"; + } else + if (M instanceof AST_ObjectGetter) { + kind = "get"; + } else + if (M instanceof AST_ObjectSetter) { + kind = "set"; + } + return { + type: "Property", + kind: kind, + key: key, + value: to_moz(M.value) + }; + }); + + def_to_moz(AST_Symbol, function To_Moz_Identifier(M) { + var def = M.definition(); + return { + type: "Identifier", + name: def ? def.mangled_name || def.name : M.name + }; + }); + + def_to_moz(AST_RegExp, function To_Moz_RegExpLiteral(M) { + var value = M.value; + return { + type: "Literal", + value: value, + raw: value.toString(), + regex: { + pattern: value.source, + flags: value.toString().match(/[gimuy]*$/)[0] + } + }; + }); + + def_to_moz(AST_Constant, function To_Moz_Literal(M) { + var value = M.value; + if (typeof value === 'number' && (value < 0 || (value === 0 && 1 / value < 0))) { + return { + type: "UnaryExpression", + operator: "-", + prefix: true, + argument: { + type: "Literal", + value: -value, + raw: M.start.raw + } + }; + } + return { + type: "Literal", + value: value, + raw: M.start.raw + }; + }); + + def_to_moz(AST_Atom, function To_Moz_Atom(M) { + return { + type: "Identifier", + name: String(M.value) + }; + }); + + AST_Boolean.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast); + AST_Null.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast); + AST_Hole.DEFMETHOD("to_mozilla_ast", function To_Moz_ArrayHole() { return null }); + + AST_Block.DEFMETHOD("to_mozilla_ast", AST_BlockStatement.prototype.to_mozilla_ast); + AST_Lambda.DEFMETHOD("to_mozilla_ast", AST_Function.prototype.to_mozilla_ast); + + /* -----[ tools ]----- */ + + function raw_token(moznode) { + if (moznode.type == "Literal") { + return moznode.raw != null ? moznode.raw : moznode.value + ""; + } + } + + function my_start_token(moznode) { + var loc = moznode.loc, start = loc && loc.start; + var range = moznode.range; + return new AST_Token({ + file : loc && loc.source, + line : start && start.line, + col : start && start.column, + pos : range ? range[0] : moznode.start, + endline : start && start.line, + endcol : start && start.column, + endpos : range ? range[0] : moznode.start, + raw : raw_token(moznode), + }); + }; + + function my_end_token(moznode) { + var loc = moznode.loc, end = loc && loc.end; + var range = moznode.range; + return new AST_Token({ + file : loc && loc.source, + line : end && end.line, + col : end && end.column, + pos : range ? range[1] : moznode.end, + endline : end && end.line, + endcol : end && end.column, + endpos : range ? range[1] : moznode.end, + raw : raw_token(moznode), + }); + }; + + function map(moztype, mytype, propmap) { + var moz_to_me = "function From_Moz_" + moztype + "(M){\n"; + moz_to_me += "return new U2." + mytype.name + "({\n" + + "start: my_start_token(M),\n" + + "end: my_end_token(M)"; + + var me_to_moz = "function To_Moz_" + moztype + "(M){\n"; + me_to_moz += "return {\n" + + "type: " + JSON.stringify(moztype); + + if (propmap) propmap.split(/\s*,\s*/).forEach(function(prop){ + var m = /([a-z0-9$_]+)(=|@|>|%)([a-z0-9$_]+)/i.exec(prop); + if (!m) throw new Error("Can't understand property map: " + prop); + var moz = m[1], how = m[2], my = m[3]; + moz_to_me += ",\n" + my + ": "; + me_to_moz += ",\n" + moz + ": "; + switch (how) { + case "@": + moz_to_me += "M." + moz + ".map(from_moz)"; + me_to_moz += "M." + my + ".map(to_moz)"; + break; + case ">": + moz_to_me += "from_moz(M." + moz + ")"; + me_to_moz += "to_moz(M." + my + ")"; + break; + case "=": + moz_to_me += "M." + moz; + me_to_moz += "M." + my; + break; + case "%": + moz_to_me += "from_moz(M." + moz + ").body"; + me_to_moz += "to_moz_block(M)"; + break; + default: + throw new Error("Can't understand operator in propmap: " + prop); + } + }); + + moz_to_me += "\n})\n}"; + me_to_moz += "\n}\n}"; + + //moz_to_me = parse(moz_to_me).print_to_string({ beautify: true }); + //me_to_moz = parse(me_to_moz).print_to_string({ beautify: true }); + //console.log(moz_to_me); + + moz_to_me = new Function("U2", "my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")( + exports, my_start_token, my_end_token, from_moz + ); + me_to_moz = new Function("to_moz", "to_moz_block", "return(" + me_to_moz + ")")( + to_moz, to_moz_block + ); + MOZ_TO_ME[moztype] = moz_to_me; + def_to_moz(mytype, me_to_moz); + }; + + var FROM_MOZ_STACK = null; + + function from_moz(node) { + FROM_MOZ_STACK.push(node); + var ret = node != null ? MOZ_TO_ME[node.type](node) : null; + FROM_MOZ_STACK.pop(); + return ret; + }; + + AST_Node.from_mozilla_ast = function(node){ + var save_stack = FROM_MOZ_STACK; + FROM_MOZ_STACK = []; + var ast = from_moz(node); + FROM_MOZ_STACK = save_stack; + return ast; + }; + + function set_moz_loc(mynode, moznode, myparent) { + var start = mynode.start; + var end = mynode.end; + if (start.pos != null && end.endpos != null) { + moznode.range = [start.pos, end.endpos]; + } + if (start.line) { + moznode.loc = { + start: {line: start.line, column: start.col}, + end: end.endline ? {line: end.endline, column: end.endcol} : null + }; + if (start.file) { + moznode.loc.source = start.file; + } + } + return moznode; + }; + + function def_to_moz(mytype, handler) { + mytype.DEFMETHOD("to_mozilla_ast", function() { + return set_moz_loc(this, handler(this)); + }); + }; + + function to_moz(node) { + return node != null ? node.to_mozilla_ast() : null; + }; + + function to_moz_block(node) { + return { + type: "BlockStatement", + body: node.body.map(to_moz) + }; + }; + +})(); diff --git a/node_modules/uglify-js/lib/output.js b/node_modules/uglify-js/lib/output.js new file mode 100644 index 0000000..5db1356 --- /dev/null +++ b/node_modules/uglify-js/lib/output.js @@ -0,0 +1,1417 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +var EXPECT_DIRECTIVE = /^$|[;{][\s\n]*$/; + +function OutputStream(options) { + + options = defaults(options, { + indent_start : 0, + indent_level : 4, + quote_keys : false, + space_colon : true, + ascii_only : false, + unescape_regexps : false, + inline_script : false, + width : 80, + max_line_len : 32000, + beautify : false, + source_map : null, + bracketize : false, + semicolons : true, + comments : false, + shebang : true, + preserve_line : false, + screw_ie8 : false, + preamble : null, + quote_style : 0, + keep_quoted_props: false + }, true); + + var indentation = 0; + var current_col = 0; + var current_line = 1; + var current_pos = 0; + var OUTPUT = ""; + + function to_ascii(str, identifier) { + return str.replace(/[\u0000-\u001f\u007f-\uffff]/g, function(ch) { + var code = ch.charCodeAt(0).toString(16); + if (code.length <= 2 && !identifier) { + while (code.length < 2) code = "0" + code; + return "\\x" + code; + } else { + while (code.length < 4) code = "0" + code; + return "\\u" + code; + } + }); + }; + + function make_string(str, quote) { + var dq = 0, sq = 0; + str = str.replace(/[\\\b\f\n\r\v\t\x22\x27\u2028\u2029\0\ufeff]/g, + function(s, i){ + switch (s) { + case '"': ++dq; return '"'; + case "'": ++sq; return "'"; + case "\\": return "\\\\"; + case "\n": return "\\n"; + case "\r": return "\\r"; + case "\t": return "\\t"; + case "\b": return "\\b"; + case "\f": return "\\f"; + case "\x0B": return options.screw_ie8 ? "\\v" : "\\x0B"; + case "\u2028": return "\\u2028"; + case "\u2029": return "\\u2029"; + case "\ufeff": return "\\ufeff"; + case "\0": + return /[0-7]/.test(str.charAt(i+1)) ? "\\x00" : "\\0"; + } + return s; + }); + function quote_single() { + return "'" + str.replace(/\x27/g, "\\'") + "'"; + } + function quote_double() { + return '"' + str.replace(/\x22/g, '\\"') + '"'; + } + if (options.ascii_only) str = to_ascii(str); + switch (options.quote_style) { + case 1: + return quote_single(); + case 2: + return quote_double(); + case 3: + return quote == "'" ? quote_single() : quote_double(); + default: + return dq > sq ? quote_single() : quote_double(); + } + }; + + function encode_string(str, quote) { + var ret = make_string(str, quote); + if (options.inline_script) { + ret = ret.replace(/<\x2fscript([>\/\t\n\f\r ])/gi, "<\\/script$1"); + ret = ret.replace(/\x3c!--/g, "\\x3c!--"); + ret = ret.replace(/--\x3e/g, "--\\x3e"); + } + return ret; + }; + + function make_name(name) { + name = name.toString(); + if (options.ascii_only) + name = to_ascii(name, true); + return name; + }; + + function make_indent(back) { + return repeat_string(" ", options.indent_start + indentation - back * options.indent_level); + }; + + /* -----[ beautification/minification ]----- */ + + var might_need_space = false; + var might_need_semicolon = false; + var last = null; + + function last_char() { + return last.charAt(last.length - 1); + }; + + function maybe_newline() { + if (options.max_line_len && current_col > options.max_line_len) + print("\n"); + }; + + var requireSemicolonChars = makePredicate("( [ + * / - , ."); + + function print(str) { + str = String(str); + var ch = str.charAt(0); + if (might_need_semicolon) { + might_need_semicolon = false; + + if ((!ch || ";}".indexOf(ch) < 0) && !/[;]$/.test(last)) { + if (options.semicolons || requireSemicolonChars(ch)) { + OUTPUT += ";"; + current_col++; + current_pos++; + } else { + OUTPUT += "\n"; + current_pos++; + current_line++; + current_col = 0; + + if (/^\s+$/.test(str)) { + // reset the semicolon flag, since we didn't print one + // now and might still have to later + might_need_semicolon = true; + } + } + + if (!options.beautify) + might_need_space = false; + } + } + + if (!options.beautify && options.preserve_line && stack[stack.length - 1]) { + var target_line = stack[stack.length - 1].start.line; + while (current_line < target_line) { + OUTPUT += "\n"; + current_pos++; + current_line++; + current_col = 0; + might_need_space = false; + } + } + + if (might_need_space) { + var prev = last_char(); + if ((is_identifier_char(prev) + && (is_identifier_char(ch) || ch == "\\")) + || (/^[\+\-\/]$/.test(ch) && ch == prev)) + { + OUTPUT += " "; + current_col++; + current_pos++; + } + might_need_space = false; + } + var a = str.split(/\r?\n/), n = a.length - 1; + current_line += n; + if (n == 0) { + current_col += a[n].length; + } else { + current_col = a[n].length; + } + current_pos += str.length; + last = str; + OUTPUT += str; + }; + + var space = options.beautify ? function() { + print(" "); + } : function() { + might_need_space = true; + }; + + var indent = options.beautify ? function(half) { + if (options.beautify) { + print(make_indent(half ? 0.5 : 0)); + } + } : noop; + + var with_indent = options.beautify ? function(col, cont) { + if (col === true) col = next_indent(); + var save_indentation = indentation; + indentation = col; + var ret = cont(); + indentation = save_indentation; + return ret; + } : function(col, cont) { return cont() }; + + var newline = options.beautify ? function() { + print("\n"); + } : maybe_newline; + + var semicolon = options.beautify ? function() { + print(";"); + } : function() { + might_need_semicolon = true; + }; + + function force_semicolon() { + might_need_semicolon = false; + print(";"); + }; + + function next_indent() { + return indentation + options.indent_level; + }; + + function with_block(cont) { + var ret; + print("{"); + newline(); + with_indent(next_indent(), function(){ + ret = cont(); + }); + indent(); + print("}"); + return ret; + }; + + function with_parens(cont) { + print("("); + //XXX: still nice to have that for argument lists + //var ret = with_indent(current_col, cont); + var ret = cont(); + print(")"); + return ret; + }; + + function with_square(cont) { + print("["); + //var ret = with_indent(current_col, cont); + var ret = cont(); + print("]"); + return ret; + }; + + function comma() { + print(","); + space(); + }; + + function colon() { + print(":"); + if (options.space_colon) space(); + }; + + var add_mapping = options.source_map ? function(token, name) { + try { + if (token) options.source_map.add( + token.file || "?", + current_line, current_col, + token.line, token.col, + (!name && token.type == "name") ? token.value : name + ); + } catch(ex) { + AST_Node.warn("Couldn't figure out mapping for {file}:{line},{col} → {cline},{ccol} [{name}]", { + file: token.file, + line: token.line, + col: token.col, + cline: current_line, + ccol: current_col, + name: name || "" + }) + } + } : noop; + + function get() { + return OUTPUT; + }; + + if (options.preamble) { + print(options.preamble.replace(/\r\n?|[\n\u2028\u2029]|\s*$/g, "\n")); + } + + var stack = []; + return { + get : get, + toString : get, + indent : indent, + indentation : function() { return indentation }, + current_width : function() { return current_col - indentation }, + should_break : function() { return options.width && this.current_width() >= options.width }, + newline : newline, + print : print, + space : space, + comma : comma, + colon : colon, + last : function() { return last }, + semicolon : semicolon, + force_semicolon : force_semicolon, + to_ascii : to_ascii, + print_name : function(name) { print(make_name(name)) }, + print_string : function(str, quote, escape_directive) { + var encoded = encode_string(str, quote); + if (escape_directive === true && encoded.indexOf("\\") === -1) { + // Insert semicolons to break directive prologue + if (!EXPECT_DIRECTIVE.test(OUTPUT)) { + force_semicolon(); + } + force_semicolon(); + } + print(encoded); + }, + encode_string : encode_string, + next_indent : next_indent, + with_indent : with_indent, + with_block : with_block, + with_parens : with_parens, + with_square : with_square, + add_mapping : add_mapping, + option : function(opt) { return options[opt] }, + line : function() { return current_line }, + col : function() { return current_col }, + pos : function() { return current_pos }, + push_node : function(node) { stack.push(node) }, + pop_node : function() { return stack.pop() }, + stack : function() { return stack }, + parent : function(n) { + return stack[stack.length - 2 - (n || 0)]; + } + }; + +}; + +/* -----[ code generators ]----- */ + +(function(){ + + /* -----[ utils ]----- */ + + function DEFPRINT(nodetype, generator) { + nodetype.DEFMETHOD("_codegen", generator); + }; + + var use_asm = false; + var in_directive = false; + + AST_Node.DEFMETHOD("print", function(stream, force_parens){ + var self = this, generator = self._codegen, prev_use_asm = use_asm; + if (self instanceof AST_Directive && self.value == "use asm" && stream.parent() instanceof AST_Scope) { + use_asm = true; + } + function doit() { + self.add_comments(stream); + self.add_source_map(stream); + generator(self, stream); + } + stream.push_node(self); + if (force_parens || self.needs_parens(stream)) { + stream.with_parens(doit); + } else { + doit(); + } + stream.pop_node(); + if (self instanceof AST_Scope) { + use_asm = prev_use_asm; + } + }); + + AST_Node.DEFMETHOD("print_to_string", function(options){ + var s = OutputStream(options); + if (!options) s._readonly = true; + this.print(s); + return s.get(); + }); + + /* -----[ comments ]----- */ + + AST_Node.DEFMETHOD("add_comments", function(output){ + if (output._readonly) return; + var c = output.option("comments"), self = this; + var start = self.start; + if (start && !start._comments_dumped) { + start._comments_dumped = true; + var comments = start.comments_before || []; + + // XXX: ugly fix for https://github.com/mishoo/UglifyJS2/issues/112 + // and https://github.com/mishoo/UglifyJS2/issues/372 + if (self instanceof AST_Exit && self.value) { + self.value.walk(new TreeWalker(function(node){ + if (node.start && node.start.comments_before) { + comments = comments.concat(node.start.comments_before); + node.start.comments_before = []; + } + if (node instanceof AST_Function || + node instanceof AST_Array || + node instanceof AST_Object) + { + return true; // don't go inside. + } + })); + } + + if (!c) { + comments = comments.filter(function(comment) { + return comment.type == "comment5"; + }); + } else if (c.test) { + comments = comments.filter(function(comment){ + return comment.type == "comment5" || c.test(comment.value); + }); + } else if (typeof c == "function") { + comments = comments.filter(function(comment){ + return comment.type == "comment5" || c(self, comment); + }); + } + + // Keep single line comments after nlb, after nlb + if (!output.option("beautify") && comments.length > 0 && + /comment[134]/.test(comments[0].type) && + output.col() !== 0 && comments[0].nlb) + { + output.print("\n"); + } + + comments.forEach(function(c){ + if (/comment[134]/.test(c.type)) { + output.print("//" + c.value + "\n"); + output.indent(); + } + else if (c.type == "comment2") { + output.print("/*" + c.value + "*/"); + if (start.nlb) { + output.print("\n"); + output.indent(); + } else { + output.space(); + } + } + else if (output.pos() === 0 && c.type == "comment5" && output.option("shebang")) { + output.print("#!" + c.value + "\n"); + output.indent(); + } + }); + } + }); + + /* -----[ PARENTHESES ]----- */ + + function PARENS(nodetype, func) { + if (Array.isArray(nodetype)) { + nodetype.forEach(function(nodetype){ + PARENS(nodetype, func); + }); + } else { + nodetype.DEFMETHOD("needs_parens", func); + } + }; + + PARENS(AST_Node, function(){ + return false; + }); + + // a function expression needs parens around it when it's provably + // the first token to appear in a statement. + PARENS(AST_Function, function(output){ + return first_in_statement(output); + }); + + // same goes for an object literal, because otherwise it would be + // interpreted as a block of code. + PARENS(AST_Object, function(output){ + return first_in_statement(output); + }); + + PARENS([ AST_Unary, AST_Undefined ], function(output){ + var p = output.parent(); + return p instanceof AST_PropAccess && p.expression === this + || p instanceof AST_New; + }); + + PARENS(AST_Seq, function(output){ + var p = output.parent(); + return p instanceof AST_Call // (foo, bar)() or foo(1, (2, 3), 4) + || p instanceof AST_Unary // !(foo, bar, baz) + || p instanceof AST_Binary // 1 + (2, 3) + 4 ==> 8 + || p instanceof AST_VarDef // var a = (1, 2), b = a + a; ==> b == 4 + || p instanceof AST_PropAccess // (1, {foo:2}).foo or (1, {foo:2})["foo"] ==> 2 + || p instanceof AST_Array // [ 1, (2, 3), 4 ] ==> [ 1, 3, 4 ] + || p instanceof AST_ObjectProperty // { foo: (1, 2) }.foo ==> 2 + || p instanceof AST_Conditional /* (false, true) ? (a = 10, b = 20) : (c = 30) + * ==> 20 (side effect, set a := 10 and b := 20) */ + ; + }); + + PARENS(AST_Binary, function(output){ + var p = output.parent(); + // (foo && bar)() + if (p instanceof AST_Call && p.expression === this) + return true; + // typeof (foo && bar) + if (p instanceof AST_Unary) + return true; + // (foo && bar)["prop"], (foo && bar).prop + if (p instanceof AST_PropAccess && p.expression === this) + return true; + // this deals with precedence: 3 * (2 + 1) + if (p instanceof AST_Binary) { + var po = p.operator, pp = PRECEDENCE[po]; + var so = this.operator, sp = PRECEDENCE[so]; + if (pp > sp + || (pp == sp + && this === p.right)) { + return true; + } + } + }); + + PARENS(AST_PropAccess, function(output){ + var p = output.parent(); + if (p instanceof AST_New && p.expression === this) { + // i.e. new (foo.bar().baz) + // + // if there's one call into this subtree, then we need + // parens around it too, otherwise the call will be + // interpreted as passing the arguments to the upper New + // expression. + try { + this.walk(new TreeWalker(function(node){ + if (node instanceof AST_Call) throw p; + })); + } catch(ex) { + if (ex !== p) throw ex; + return true; + } + } + }); + + PARENS(AST_Call, function(output){ + var p = output.parent(), p1; + if (p instanceof AST_New && p.expression === this) + return true; + + // workaround for Safari bug. + // https://bugs.webkit.org/show_bug.cgi?id=123506 + return this.expression instanceof AST_Function + && p instanceof AST_PropAccess + && p.expression === this + && (p1 = output.parent(1)) instanceof AST_Assign + && p1.left === p; + }); + + PARENS(AST_New, function(output){ + var p = output.parent(); + if (!need_constructor_parens(this, output) + && (p instanceof AST_PropAccess // (new Date).getTime(), (new Date)["getTime"]() + || p instanceof AST_Call && p.expression === this)) // (new foo)(bar) + return true; + }); + + PARENS(AST_Number, function(output){ + var p = output.parent(); + if (p instanceof AST_PropAccess && p.expression === this) { + var value = this.getValue(); + if (value < 0 || /^0/.test(make_num(value))) { + return true; + } + } + }); + + PARENS([ AST_Assign, AST_Conditional ], function (output){ + var p = output.parent(); + // !(a = false) → true + if (p instanceof AST_Unary) + return true; + // 1 + (a = 2) + 3 → 6, side effect setting a = 2 + if (p instanceof AST_Binary && !(p instanceof AST_Assign)) + return true; + // (a = func)() —or— new (a = Object)() + if (p instanceof AST_Call && p.expression === this) + return true; + // (a = foo) ? bar : baz + if (p instanceof AST_Conditional && p.condition === this) + return true; + // (a = foo)["prop"] —or— (a = foo).prop + if (p instanceof AST_PropAccess && p.expression === this) + return true; + }); + + /* -----[ PRINTERS ]----- */ + + DEFPRINT(AST_Directive, function(self, output){ + output.print_string(self.value, self.quote); + output.semicolon(); + }); + DEFPRINT(AST_Debugger, function(self, output){ + output.print("debugger"); + output.semicolon(); + }); + + /* -----[ statements ]----- */ + + function display_body(body, is_toplevel, output, allow_directives) { + var last = body.length - 1; + in_directive = allow_directives; + body.forEach(function(stmt, i){ + if (in_directive === true && !(stmt instanceof AST_Directive || + stmt instanceof AST_EmptyStatement || + (stmt instanceof AST_SimpleStatement && stmt.body instanceof AST_String) + )) { + in_directive = false; + } + if (!(stmt instanceof AST_EmptyStatement)) { + output.indent(); + stmt.print(output); + if (!(i == last && is_toplevel)) { + output.newline(); + if (is_toplevel) output.newline(); + } + } + if (in_directive === true && + stmt instanceof AST_SimpleStatement && + stmt.body instanceof AST_String + ) { + in_directive = false; + } + }); + in_directive = false; + }; + + AST_StatementWithBody.DEFMETHOD("_do_print_body", function(output){ + force_statement(this.body, output); + }); + + DEFPRINT(AST_Statement, function(self, output){ + self.body.print(output); + output.semicolon(); + }); + DEFPRINT(AST_Toplevel, function(self, output){ + display_body(self.body, true, output, true); + output.print(""); + }); + DEFPRINT(AST_LabeledStatement, function(self, output){ + self.label.print(output); + output.colon(); + self.body.print(output); + }); + DEFPRINT(AST_SimpleStatement, function(self, output){ + self.body.print(output); + output.semicolon(); + }); + function print_bracketed(body, output, allow_directives) { + if (body.length > 0) output.with_block(function(){ + display_body(body, false, output, allow_directives); + }); + else output.print("{}"); + }; + DEFPRINT(AST_BlockStatement, function(self, output){ + print_bracketed(self.body, output); + }); + DEFPRINT(AST_EmptyStatement, function(self, output){ + output.semicolon(); + }); + DEFPRINT(AST_Do, function(self, output){ + output.print("do"); + output.space(); + self._do_print_body(output); + output.space(); + output.print("while"); + output.space(); + output.with_parens(function(){ + self.condition.print(output); + }); + output.semicolon(); + }); + DEFPRINT(AST_While, function(self, output){ + output.print("while"); + output.space(); + output.with_parens(function(){ + self.condition.print(output); + }); + output.space(); + self._do_print_body(output); + }); + DEFPRINT(AST_For, function(self, output){ + output.print("for"); + output.space(); + output.with_parens(function(){ + if (self.init && !(self.init instanceof AST_EmptyStatement)) { + if (self.init instanceof AST_Definitions) { + self.init.print(output); + } else { + parenthesize_for_noin(self.init, output, true); + } + output.print(";"); + output.space(); + } else { + output.print(";"); + } + if (self.condition) { + self.condition.print(output); + output.print(";"); + output.space(); + } else { + output.print(";"); + } + if (self.step) { + self.step.print(output); + } + }); + output.space(); + self._do_print_body(output); + }); + DEFPRINT(AST_ForIn, function(self, output){ + output.print("for"); + output.space(); + output.with_parens(function(){ + self.init.print(output); + output.space(); + output.print("in"); + output.space(); + self.object.print(output); + }); + output.space(); + self._do_print_body(output); + }); + DEFPRINT(AST_With, function(self, output){ + output.print("with"); + output.space(); + output.with_parens(function(){ + self.expression.print(output); + }); + output.space(); + self._do_print_body(output); + }); + + /* -----[ functions ]----- */ + AST_Lambda.DEFMETHOD("_do_print", function(output, nokeyword){ + var self = this; + if (!nokeyword) { + output.print("function"); + } + if (self.name) { + output.space(); + self.name.print(output); + } + output.with_parens(function(){ + self.argnames.forEach(function(arg, i){ + if (i) output.comma(); + arg.print(output); + }); + }); + output.space(); + print_bracketed(self.body, output, true); + }); + DEFPRINT(AST_Lambda, function(self, output){ + self._do_print(output); + }); + + /* -----[ exits ]----- */ + AST_Exit.DEFMETHOD("_do_print", function(output, kind){ + output.print(kind); + if (this.value) { + output.space(); + this.value.print(output); + } + output.semicolon(); + }); + DEFPRINT(AST_Return, function(self, output){ + self._do_print(output, "return"); + }); + DEFPRINT(AST_Throw, function(self, output){ + self._do_print(output, "throw"); + }); + + /* -----[ loop control ]----- */ + AST_LoopControl.DEFMETHOD("_do_print", function(output, kind){ + output.print(kind); + if (this.label) { + output.space(); + this.label.print(output); + } + output.semicolon(); + }); + DEFPRINT(AST_Break, function(self, output){ + self._do_print(output, "break"); + }); + DEFPRINT(AST_Continue, function(self, output){ + self._do_print(output, "continue"); + }); + + /* -----[ if ]----- */ + function make_then(self, output) { + if (output.option("bracketize")) { + make_block(self.body, output); + return; + } + // The squeezer replaces "block"-s that contain only a single + // statement with the statement itself; technically, the AST + // is correct, but this can create problems when we output an + // IF having an ELSE clause where the THEN clause ends in an + // IF *without* an ELSE block (then the outer ELSE would refer + // to the inner IF). This function checks for this case and + // adds the block brackets if needed. + if (!self.body) + return output.force_semicolon(); + if (self.body instanceof AST_Do + && !output.option("screw_ie8")) { + // https://github.com/mishoo/UglifyJS/issues/#issue/57 IE + // croaks with "syntax error" on code like this: if (foo) + // do ... while(cond); else ... we need block brackets + // around do/while + make_block(self.body, output); + return; + } + var b = self.body; + while (true) { + if (b instanceof AST_If) { + if (!b.alternative) { + make_block(self.body, output); + return; + } + b = b.alternative; + } + else if (b instanceof AST_StatementWithBody) { + b = b.body; + } + else break; + } + force_statement(self.body, output); + }; + DEFPRINT(AST_If, function(self, output){ + output.print("if"); + output.space(); + output.with_parens(function(){ + self.condition.print(output); + }); + output.space(); + if (self.alternative) { + make_then(self, output); + output.space(); + output.print("else"); + output.space(); + force_statement(self.alternative, output); + } else { + self._do_print_body(output); + } + }); + + /* -----[ switch ]----- */ + DEFPRINT(AST_Switch, function(self, output){ + output.print("switch"); + output.space(); + output.with_parens(function(){ + self.expression.print(output); + }); + output.space(); + if (self.body.length > 0) output.with_block(function(){ + self.body.forEach(function(stmt, i){ + if (i) output.newline(); + output.indent(true); + stmt.print(output); + }); + }); + else output.print("{}"); + }); + AST_SwitchBranch.DEFMETHOD("_do_print_body", function(output){ + if (this.body.length > 0) { + output.newline(); + this.body.forEach(function(stmt){ + output.indent(); + stmt.print(output); + output.newline(); + }); + } + }); + DEFPRINT(AST_Default, function(self, output){ + output.print("default:"); + self._do_print_body(output); + }); + DEFPRINT(AST_Case, function(self, output){ + output.print("case"); + output.space(); + self.expression.print(output); + output.print(":"); + self._do_print_body(output); + }); + + /* -----[ exceptions ]----- */ + DEFPRINT(AST_Try, function(self, output){ + output.print("try"); + output.space(); + print_bracketed(self.body, output); + if (self.bcatch) { + output.space(); + self.bcatch.print(output); + } + if (self.bfinally) { + output.space(); + self.bfinally.print(output); + } + }); + DEFPRINT(AST_Catch, function(self, output){ + output.print("catch"); + output.space(); + output.with_parens(function(){ + self.argname.print(output); + }); + output.space(); + print_bracketed(self.body, output); + }); + DEFPRINT(AST_Finally, function(self, output){ + output.print("finally"); + output.space(); + print_bracketed(self.body, output); + }); + + /* -----[ var/const ]----- */ + AST_Definitions.DEFMETHOD("_do_print", function(output, kind){ + output.print(kind); + output.space(); + this.definitions.forEach(function(def, i){ + if (i) output.comma(); + def.print(output); + }); + var p = output.parent(); + var in_for = p instanceof AST_For || p instanceof AST_ForIn; + var avoid_semicolon = in_for && p.init === this; + if (!avoid_semicolon) + output.semicolon(); + }); + DEFPRINT(AST_Var, function(self, output){ + self._do_print(output, "var"); + }); + DEFPRINT(AST_Const, function(self, output){ + self._do_print(output, "const"); + }); + + function parenthesize_for_noin(node, output, noin) { + if (!noin) node.print(output); + else try { + // need to take some precautions here: + // https://github.com/mishoo/UglifyJS2/issues/60 + node.walk(new TreeWalker(function(node){ + if (node instanceof AST_Binary && node.operator == "in") + throw output; + })); + node.print(output); + } catch(ex) { + if (ex !== output) throw ex; + node.print(output, true); + } + }; + + DEFPRINT(AST_VarDef, function(self, output){ + self.name.print(output); + if (self.value) { + output.space(); + output.print("="); + output.space(); + var p = output.parent(1); + var noin = p instanceof AST_For || p instanceof AST_ForIn; + parenthesize_for_noin(self.value, output, noin); + } + }); + + /* -----[ other expressions ]----- */ + DEFPRINT(AST_Call, function(self, output){ + self.expression.print(output); + if (self instanceof AST_New && !need_constructor_parens(self, output)) + return; + output.with_parens(function(){ + self.args.forEach(function(expr, i){ + if (i) output.comma(); + expr.print(output); + }); + }); + }); + DEFPRINT(AST_New, function(self, output){ + output.print("new"); + output.space(); + AST_Call.prototype._codegen(self, output); + }); + + AST_Seq.DEFMETHOD("_do_print", function(output){ + this.car.print(output); + if (this.cdr) { + output.comma(); + if (output.should_break()) { + output.newline(); + output.indent(); + } + this.cdr.print(output); + } + }); + DEFPRINT(AST_Seq, function(self, output){ + self._do_print(output); + // var p = output.parent(); + // if (p instanceof AST_Statement) { + // output.with_indent(output.next_indent(), function(){ + // self._do_print(output); + // }); + // } else { + // self._do_print(output); + // } + }); + DEFPRINT(AST_Dot, function(self, output){ + var expr = self.expression; + expr.print(output); + if (expr instanceof AST_Number && expr.getValue() >= 0) { + if (!/[xa-f.)]/i.test(output.last())) { + output.print("."); + } + } + output.print("."); + // the name after dot would be mapped about here. + output.add_mapping(self.end); + output.print_name(self.property); + }); + DEFPRINT(AST_Sub, function(self, output){ + self.expression.print(output); + output.print("["); + self.property.print(output); + output.print("]"); + }); + DEFPRINT(AST_UnaryPrefix, function(self, output){ + var op = self.operator; + output.print(op); + if (/^[a-z]/i.test(op) + || (/[+-]$/.test(op) + && self.expression instanceof AST_UnaryPrefix + && /^[+-]/.test(self.expression.operator))) { + output.space(); + } + self.expression.print(output); + }); + DEFPRINT(AST_UnaryPostfix, function(self, output){ + self.expression.print(output); + output.print(self.operator); + }); + DEFPRINT(AST_Binary, function(self, output){ + var op = self.operator; + self.left.print(output); + if (op[0] == ">" /* ">>" ">>>" ">" ">=" */ + && self.left instanceof AST_UnaryPostfix + && self.left.operator == "--") { + // space is mandatory to avoid outputting --> + output.print(" "); + } else { + // the space is optional depending on "beautify" + output.space(); + } + output.print(op); + if ((op == "<" || op == "<<") + && self.right instanceof AST_UnaryPrefix + && self.right.operator == "!" + && self.right.expression instanceof AST_UnaryPrefix + && self.right.expression.operator == "--") { + // space is mandatory to avoid outputting ") && S.newline_before) { + forward(3); + skip_line_comment("comment4"); + continue; + } + } + var ch = peek(); + if (!ch) return token("eof"); + var code = ch.charCodeAt(0); + switch (code) { + case 34: case 39: return read_string(ch); + case 46: return handle_dot(); + case 47: { + var tok = handle_slash(); + if (tok === next_token) continue; + return tok; + } + } + if (is_digit(code)) return read_num(); + if (PUNC_CHARS(ch)) return token("punc", next()); + if (OPERATOR_CHARS(ch)) return read_operator(); + if (code == 92 || is_identifier_start(code)) return read_word(); + if (shebang) { + if (S.pos == 0 && looking_at("#!")) { + forward(2); + skip_line_comment("comment5"); + continue; + } + } + break; + } + parse_error("Unexpected character '" + ch + "'"); + }; + + next_token.context = function(nc) { + if (nc) S = nc; + return S; + }; + + next_token.add_directive = function(directive) { + S.directive_stack[S.directive_stack.length - 1].push(directive); + + if (S.directives[directive] === undefined) { + S.directives[directive] = 1; + } else { + S.directives[directive]++; + } + } + + next_token.push_directives_stack = function() { + S.directive_stack.push([]); + } + + next_token.pop_directives_stack = function() { + var directives = S.directive_stack[S.directive_stack.length - 1]; + + for (var i = 0; i < directives.length; i++) { + S.directives[directives[i]]--; + } + + S.directive_stack.pop(); + } + + next_token.has_directive = function(directive) { + return S.directives[directive] !== undefined && + S.directives[directive] > 0; + } + + return next_token; + +}; + +/* -----[ Parser (constants) ]----- */ + +var UNARY_PREFIX = makePredicate([ + "typeof", + "void", + "delete", + "--", + "++", + "!", + "~", + "-", + "+" +]); + +var UNARY_POSTFIX = makePredicate([ "--", "++" ]); + +var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]); + +var PRECEDENCE = (function(a, ret){ + for (var i = 0; i < a.length; ++i) { + var b = a[i]; + for (var j = 0; j < b.length; ++j) { + ret[b[j]] = i + 1; + } + } + return ret; +})( + [ + ["||"], + ["&&"], + ["|"], + ["^"], + ["&"], + ["==", "===", "!=", "!=="], + ["<", ">", "<=", ">=", "in", "instanceof"], + [">>", "<<", ">>>"], + ["+", "-"], + ["*", "/", "%"] + ], + {} +); + +var STATEMENTS_WITH_LABELS = array_to_hash([ "for", "do", "while", "switch" ]); + +var ATOMIC_START_TOKEN = array_to_hash([ "atom", "num", "string", "regexp", "name" ]); + +/* -----[ Parser ]----- */ + +function parse($TEXT, options) { + + options = defaults(options, { + strict : false, + filename : null, + toplevel : null, + expression : false, + html5_comments : true, + bare_returns : false, + shebang : true, + }); + + var S = { + input : (typeof $TEXT == "string" + ? tokenizer($TEXT, options.filename, + options.html5_comments, options.shebang) + : $TEXT), + token : null, + prev : null, + peeked : null, + in_function : 0, + in_directives : true, + in_loop : 0, + labels : [] + }; + + S.token = next(); + + function is(type, value) { + return is_token(S.token, type, value); + }; + + function peek() { return S.peeked || (S.peeked = S.input()); }; + + function next() { + S.prev = S.token; + if (S.peeked) { + S.token = S.peeked; + S.peeked = null; + } else { + S.token = S.input(); + } + S.in_directives = S.in_directives && ( + S.token.type == "string" || is("punc", ";") + ); + return S.token; + }; + + function prev() { + return S.prev; + }; + + function croak(msg, line, col, pos) { + var ctx = S.input.context(); + js_error(msg, + ctx.filename, + line != null ? line : ctx.tokline, + col != null ? col : ctx.tokcol, + pos != null ? pos : ctx.tokpos); + }; + + function token_error(token, msg) { + croak(msg, token.line, token.col); + }; + + function unexpected(token) { + if (token == null) + token = S.token; + token_error(token, "SyntaxError: Unexpected token: " + token.type + " (" + token.value + ")"); + }; + + function expect_token(type, val) { + if (is(type, val)) { + return next(); + } + token_error(S.token, "SyntaxError: Unexpected token " + S.token.type + " «" + S.token.value + "»" + ", expected " + type + " «" + val + "»"); + }; + + function expect(punc) { return expect_token("punc", punc); }; + + function can_insert_semicolon() { + return !options.strict && ( + S.token.nlb || is("eof") || is("punc", "}") + ); + }; + + function semicolon(optional) { + if (is("punc", ";")) next(); + else if (!optional && !can_insert_semicolon()) unexpected(); + }; + + function parenthesised() { + expect("("); + var exp = expression(true); + expect(")"); + return exp; + }; + + function embed_tokens(parser) { + return function() { + var start = S.token; + var expr = parser(); + var end = prev(); + expr.start = start; + expr.end = end; + return expr; + }; + }; + + function handle_regexp() { + if (is("operator", "/") || is("operator", "/=")) { + S.peeked = null; + S.token = S.input(S.token.value.substr(1)); // force regexp + } + }; + + var statement = embed_tokens(function() { + var tmp; + handle_regexp(); + switch (S.token.type) { + case "string": + var dir = false; + if (S.in_directives === true) { + if ((is_token(peek(), "punc", ";") || peek().nlb) && S.token.raw.indexOf("\\") === -1) { + S.input.add_directive(S.token.value); + } else { + S.in_directives = false; + } + } + var dir = S.in_directives, stat = simple_statement(); + if (dir) { + return new AST_Directive({ + start : stat.body.start, + end : stat.body.end, + quote : stat.body.quote, + value : stat.body.value, + }); + } + return stat; + case "num": + case "regexp": + case "operator": + case "atom": + return simple_statement(); + + case "name": + return is_token(peek(), "punc", ":") + ? labeled_statement() + : simple_statement(); + + case "punc": + switch (S.token.value) { + case "{": + return new AST_BlockStatement({ + start : S.token, + body : block_(), + end : prev() + }); + case "[": + case "(": + return simple_statement(); + case ";": + S.in_directives = false; + next(); + return new AST_EmptyStatement(); + default: + unexpected(); + } + + case "keyword": + switch (tmp = S.token.value, next(), tmp) { + case "break": + return break_cont(AST_Break); + + case "continue": + return break_cont(AST_Continue); + + case "debugger": + semicolon(); + return new AST_Debugger(); + + case "do": + return new AST_Do({ + body : in_loop(statement), + condition : (expect_token("keyword", "while"), tmp = parenthesised(), semicolon(true), tmp) + }); + + case "while": + return new AST_While({ + condition : parenthesised(), + body : in_loop(statement) + }); + + case "for": + return for_(); + + case "function": + return function_(AST_Defun); + + case "if": + return if_(); + + case "return": + if (S.in_function == 0 && !options.bare_returns) + croak("SyntaxError: 'return' outside of function"); + return new AST_Return({ + value: ( is("punc", ";") + ? (next(), null) + : can_insert_semicolon() + ? null + : (tmp = expression(true), semicolon(), tmp) ) + }); + + case "switch": + return new AST_Switch({ + expression : parenthesised(), + body : in_loop(switch_body_) + }); + + case "throw": + if (S.token.nlb) + croak("SyntaxError: Illegal newline after 'throw'"); + return new AST_Throw({ + value: (tmp = expression(true), semicolon(), tmp) + }); + + case "try": + return try_(); + + case "var": + return tmp = var_(), semicolon(), tmp; + + case "const": + return tmp = const_(), semicolon(), tmp; + + case "with": + if (S.input.has_directive("use strict")) { + croak("SyntaxError: Strict mode may not include a with statement"); + } + return new AST_With({ + expression : parenthesised(), + body : statement() + }); + + default: + unexpected(); + } + } + }); + + function labeled_statement() { + var label = as_symbol(AST_Label); + if (find_if(function(l){ return l.name == label.name }, S.labels)) { + // ECMA-262, 12.12: An ECMAScript program is considered + // syntactically incorrect if it contains a + // LabelledStatement that is enclosed by a + // LabelledStatement with the same Identifier as label. + croak("SyntaxError: Label " + label.name + " defined twice"); + } + expect(":"); + S.labels.push(label); + var stat = statement(); + S.labels.pop(); + if (!(stat instanceof AST_IterationStatement)) { + // check for `continue` that refers to this label. + // those should be reported as syntax errors. + // https://github.com/mishoo/UglifyJS2/issues/287 + label.references.forEach(function(ref){ + if (ref instanceof AST_Continue) { + ref = ref.label.start; + croak("SyntaxError: Continue label `" + label.name + "` refers to non-IterationStatement.", + ref.line, ref.col, ref.pos); + } + }); + } + return new AST_LabeledStatement({ body: stat, label: label }); + }; + + function simple_statement(tmp) { + return new AST_SimpleStatement({ body: (tmp = expression(true), semicolon(), tmp) }); + }; + + function break_cont(type) { + var label = null, ldef; + if (!can_insert_semicolon()) { + label = as_symbol(AST_LabelRef, true); + } + if (label != null) { + ldef = find_if(function(l){ return l.name == label.name }, S.labels); + if (!ldef) + croak("SyntaxError: Undefined label " + label.name); + label.thedef = ldef; + } + else if (S.in_loop == 0) + croak("SyntaxError: " + type.TYPE + " not inside a loop or switch"); + semicolon(); + var stat = new type({ label: label }); + if (ldef) ldef.references.push(stat); + return stat; + }; + + function for_() { + expect("("); + var init = null; + if (!is("punc", ";")) { + init = is("keyword", "var") + ? (next(), var_(true)) + : expression(true, true); + if (is("operator", "in")) { + if (init instanceof AST_Var && init.definitions.length > 1) + croak("SyntaxError: Only one variable declaration allowed in for..in loop"); + next(); + return for_in(init); + } + } + return regular_for(init); + }; + + function regular_for(init) { + expect(";"); + var test = is("punc", ";") ? null : expression(true); + expect(";"); + var step = is("punc", ")") ? null : expression(true); + expect(")"); + return new AST_For({ + init : init, + condition : test, + step : step, + body : in_loop(statement) + }); + }; + + function for_in(init) { + var lhs = init instanceof AST_Var ? init.definitions[0].name : null; + var obj = expression(true); + expect(")"); + return new AST_ForIn({ + init : init, + name : lhs, + object : obj, + body : in_loop(statement) + }); + }; + + var function_ = function(ctor) { + var in_statement = ctor === AST_Defun; + var name = is("name") ? as_symbol(in_statement ? AST_SymbolDefun : AST_SymbolLambda) : null; + if (in_statement && !name) + unexpected(); + expect("("); + return new ctor({ + name: name, + argnames: (function(first, a){ + while (!is("punc", ")")) { + if (first) first = false; else expect(","); + a.push(as_symbol(AST_SymbolFunarg)); + } + next(); + return a; + })(true, []), + body: (function(loop, labels){ + ++S.in_function; + S.in_directives = true; + S.input.push_directives_stack(); + S.in_loop = 0; + S.labels = []; + var a = block_(); + S.input.pop_directives_stack(); + --S.in_function; + S.in_loop = loop; + S.labels = labels; + return a; + })(S.in_loop, S.labels) + }); + }; + + function if_() { + var cond = parenthesised(), body = statement(), belse = null; + if (is("keyword", "else")) { + next(); + belse = statement(); + } + return new AST_If({ + condition : cond, + body : body, + alternative : belse + }); + }; + + function block_() { + expect("{"); + var a = []; + while (!is("punc", "}")) { + if (is("eof")) unexpected(); + a.push(statement()); + } + next(); + return a; + }; + + function switch_body_() { + expect("{"); + var a = [], cur = null, branch = null, tmp; + while (!is("punc", "}")) { + if (is("eof")) unexpected(); + if (is("keyword", "case")) { + if (branch) branch.end = prev(); + cur = []; + branch = new AST_Case({ + start : (tmp = S.token, next(), tmp), + expression : expression(true), + body : cur + }); + a.push(branch); + expect(":"); + } + else if (is("keyword", "default")) { + if (branch) branch.end = prev(); + cur = []; + branch = new AST_Default({ + start : (tmp = S.token, next(), expect(":"), tmp), + body : cur + }); + a.push(branch); + } + else { + if (!cur) unexpected(); + cur.push(statement()); + } + } + if (branch) branch.end = prev(); + next(); + return a; + }; + + function try_() { + var body = block_(), bcatch = null, bfinally = null; + if (is("keyword", "catch")) { + var start = S.token; + next(); + expect("("); + var name = as_symbol(AST_SymbolCatch); + expect(")"); + bcatch = new AST_Catch({ + start : start, + argname : name, + body : block_(), + end : prev() + }); + } + if (is("keyword", "finally")) { + var start = S.token; + next(); + bfinally = new AST_Finally({ + start : start, + body : block_(), + end : prev() + }); + } + if (!bcatch && !bfinally) + croak("SyntaxError: Missing catch/finally blocks"); + return new AST_Try({ + body : body, + bcatch : bcatch, + bfinally : bfinally + }); + }; + + function vardefs(no_in, in_const) { + var a = []; + for (;;) { + a.push(new AST_VarDef({ + start : S.token, + name : as_symbol(in_const ? AST_SymbolConst : AST_SymbolVar), + value : is("operator", "=") ? (next(), expression(false, no_in)) : null, + end : prev() + })); + if (!is("punc", ",")) + break; + next(); + } + return a; + }; + + var var_ = function(no_in) { + return new AST_Var({ + start : prev(), + definitions : vardefs(no_in, false), + end : prev() + }); + }; + + var const_ = function() { + return new AST_Const({ + start : prev(), + definitions : vardefs(false, true), + end : prev() + }); + }; + + var new_ = function(allow_calls) { + var start = S.token; + expect_token("operator", "new"); + var newexp = expr_atom(false), args; + if (is("punc", "(")) { + next(); + args = expr_list(")"); + } else { + args = []; + } + return subscripts(new AST_New({ + start : start, + expression : newexp, + args : args, + end : prev() + }), allow_calls); + }; + + function as_atom_node() { + var tok = S.token, ret; + switch (tok.type) { + case "name": + case "keyword": + ret = _make_symbol(AST_SymbolRef); + break; + case "num": + ret = new AST_Number({ start: tok, end: tok, value: tok.value }); + break; + case "string": + ret = new AST_String({ + start : tok, + end : tok, + value : tok.value, + quote : tok.quote + }); + break; + case "regexp": + ret = new AST_RegExp({ start: tok, end: tok, value: tok.value }); + break; + case "atom": + switch (tok.value) { + case "false": + ret = new AST_False({ start: tok, end: tok }); + break; + case "true": + ret = new AST_True({ start: tok, end: tok }); + break; + case "null": + ret = new AST_Null({ start: tok, end: tok }); + break; + } + break; + case "operator": + if (!is_identifier_string(tok.value)) { + croak("SyntaxError: Invalid getter/setter name: " + tok.value, + tok.line, tok.col, tok.pos); + } + ret = _make_symbol(AST_SymbolRef); + break; + } + next(); + return ret; + }; + + var expr_atom = function(allow_calls) { + if (is("operator", "new")) { + return new_(allow_calls); + } + var start = S.token; + if (is("punc")) { + switch (start.value) { + case "(": + next(); + var ex = expression(true); + ex.start = start; + ex.end = S.token; + expect(")"); + return subscripts(ex, allow_calls); + case "[": + return subscripts(array_(), allow_calls); + case "{": + return subscripts(object_(), allow_calls); + } + unexpected(); + } + if (is("keyword", "function")) { + next(); + var func = function_(AST_Function); + func.start = start; + func.end = prev(); + return subscripts(func, allow_calls); + } + if (ATOMIC_START_TOKEN[S.token.type]) { + return subscripts(as_atom_node(), allow_calls); + } + unexpected(); + }; + + function expr_list(closing, allow_trailing_comma, allow_empty) { + var first = true, a = []; + while (!is("punc", closing)) { + if (first) first = false; else expect(","); + if (allow_trailing_comma && is("punc", closing)) break; + if (is("punc", ",") && allow_empty) { + a.push(new AST_Hole({ start: S.token, end: S.token })); + } else { + a.push(expression(false)); + } + } + next(); + return a; + }; + + var array_ = embed_tokens(function() { + expect("["); + return new AST_Array({ + elements: expr_list("]", !options.strict, true) + }); + }); + + var object_ = embed_tokens(function() { + expect("{"); + var first = true, a = []; + while (!is("punc", "}")) { + if (first) first = false; else expect(","); + if (!options.strict && is("punc", "}")) + // allow trailing comma + break; + var start = S.token; + var type = start.type; + var name = as_property_name(); + if (type == "name" && !is("punc", ":")) { + if (name == "get") { + a.push(new AST_ObjectGetter({ + start : start, + key : as_atom_node(), + value : function_(AST_Accessor), + end : prev() + })); + continue; + } + if (name == "set") { + a.push(new AST_ObjectSetter({ + start : start, + key : as_atom_node(), + value : function_(AST_Accessor), + end : prev() + })); + continue; + } + } + expect(":"); + a.push(new AST_ObjectKeyVal({ + start : start, + quote : start.quote, + key : name, + value : expression(false), + end : prev() + })); + } + next(); + return new AST_Object({ properties: a }); + }); + + function as_property_name() { + var tmp = S.token; + next(); + switch (tmp.type) { + case "num": + case "string": + case "name": + case "operator": + case "keyword": + case "atom": + return tmp.value; + default: + unexpected(); + } + }; + + function as_name() { + var tmp = S.token; + next(); + switch (tmp.type) { + case "name": + case "operator": + case "keyword": + case "atom": + return tmp.value; + default: + unexpected(); + } + }; + + function _make_symbol(type) { + var name = S.token.value; + return new (name == "this" ? AST_This : type)({ + name : String(name), + start : S.token, + end : S.token + }); + }; + + function as_symbol(type, noerror) { + if (!is("name")) { + if (!noerror) croak("SyntaxError: Name expected"); + return null; + } + var sym = _make_symbol(type); + next(); + return sym; + }; + + var subscripts = function(expr, allow_calls) { + var start = expr.start; + if (is("punc", ".")) { + next(); + return subscripts(new AST_Dot({ + start : start, + expression : expr, + property : as_name(), + end : prev() + }), allow_calls); + } + if (is("punc", "[")) { + next(); + var prop = expression(true); + expect("]"); + return subscripts(new AST_Sub({ + start : start, + expression : expr, + property : prop, + end : prev() + }), allow_calls); + } + if (allow_calls && is("punc", "(")) { + next(); + return subscripts(new AST_Call({ + start : start, + expression : expr, + args : expr_list(")"), + end : prev() + }), true); + } + return expr; + }; + + var maybe_unary = function(allow_calls) { + var start = S.token; + if (is("operator") && UNARY_PREFIX(start.value)) { + next(); + handle_regexp(); + var ex = make_unary(AST_UnaryPrefix, start.value, maybe_unary(allow_calls)); + ex.start = start; + ex.end = prev(); + return ex; + } + var val = expr_atom(allow_calls); + while (is("operator") && UNARY_POSTFIX(S.token.value) && !S.token.nlb) { + val = make_unary(AST_UnaryPostfix, S.token.value, val); + val.start = start; + val.end = S.token; + next(); + } + return val; + }; + + function make_unary(ctor, op, expr) { + if ((op == "++" || op == "--") && !is_assignable(expr)) + croak("SyntaxError: Invalid use of " + op + " operator"); + return new ctor({ operator: op, expression: expr }); + }; + + var expr_op = function(left, min_prec, no_in) { + var op = is("operator") ? S.token.value : null; + if (op == "in" && no_in) op = null; + var prec = op != null ? PRECEDENCE[op] : null; + if (prec != null && prec > min_prec) { + next(); + var right = expr_op(maybe_unary(true), prec, no_in); + return expr_op(new AST_Binary({ + start : left.start, + left : left, + operator : op, + right : right, + end : right.end + }), min_prec, no_in); + } + return left; + }; + + function expr_ops(no_in) { + return expr_op(maybe_unary(true), 0, no_in); + }; + + var maybe_conditional = function(no_in) { + var start = S.token; + var expr = expr_ops(no_in); + if (is("operator", "?")) { + next(); + var yes = expression(false); + expect(":"); + return new AST_Conditional({ + start : start, + condition : expr, + consequent : yes, + alternative : expression(false, no_in), + end : prev() + }); + } + return expr; + }; + + function is_assignable(expr) { + if (!options.strict) return true; + if (expr instanceof AST_This) return false; + return (expr instanceof AST_PropAccess || expr instanceof AST_Symbol); + }; + + var maybe_assign = function(no_in) { + var start = S.token; + var left = maybe_conditional(no_in), val = S.token.value; + if (is("operator") && ASSIGNMENT(val)) { + if (is_assignable(left)) { + next(); + return new AST_Assign({ + start : start, + left : left, + operator : val, + right : maybe_assign(no_in), + end : prev() + }); + } + croak("SyntaxError: Invalid assignment"); + } + return left; + }; + + var expression = function(commas, no_in) { + var start = S.token; + var expr = maybe_assign(no_in); + if (commas && is("punc", ",")) { + next(); + return new AST_Seq({ + start : start, + car : expr, + cdr : expression(true, no_in), + end : peek() + }); + } + return expr; + }; + + function in_loop(cont) { + ++S.in_loop; + var ret = cont(); + --S.in_loop; + return ret; + }; + + if (options.expression) { + return expression(true); + } + + return (function(){ + var start = S.token; + var body = []; + S.input.push_directives_stack(); + while (!is("eof")) + body.push(statement()); + S.input.pop_directives_stack(); + var end = prev(); + var toplevel = options.toplevel; + if (toplevel) { + toplevel.body = toplevel.body.concat(body); + toplevel.end = end; + } else { + toplevel = new AST_Toplevel({ start: start, body: body, end: end }); + } + return toplevel; + })(); + +}; diff --git a/node_modules/uglify-js/lib/propmangle.js b/node_modules/uglify-js/lib/propmangle.js new file mode 100644 index 0000000..08043d7 --- /dev/null +++ b/node_modules/uglify-js/lib/propmangle.js @@ -0,0 +1,229 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +function find_builtins() { + var a = []; + [ Object, Array, Function, Number, + String, Boolean, Error, Math, + Date, RegExp + ].forEach(function(ctor){ + Object.getOwnPropertyNames(ctor).map(add); + if (ctor.prototype) { + Object.getOwnPropertyNames(ctor.prototype).map(add); + } + }); + function add(name) { + push_uniq(a, name); + } + return a; +} + +function mangle_properties(ast, options) { + options = defaults(options, { + reserved : null, + cache : null, + only_cache : false, + regex : null, + ignore_quoted : false + }); + + var reserved = options.reserved; + if (reserved == null) + reserved = find_builtins(); + + var cache = options.cache; + if (cache == null) { + cache = { + cname: -1, + props: new Dictionary() + }; + } + + var regex = options.regex; + var ignore_quoted = options.ignore_quoted; + + var names_to_mangle = []; + var unmangleable = []; + + // step 1: find candidates to mangle + ast.walk(new TreeWalker(function(node){ + if (node instanceof AST_ObjectKeyVal) { + if (!(ignore_quoted && node.quote)) + add(node.key); + } + else if (node instanceof AST_ObjectProperty) { + // setter or getter, since KeyVal is handled above + add(node.key.name); + } + else if (node instanceof AST_Dot) { + if (this.parent() instanceof AST_Assign) { + add(node.property); + } + } + else if (node instanceof AST_Sub) { + if (this.parent() instanceof AST_Assign) { + if (!ignore_quoted) + addStrings(node.property); + } + } + })); + + // step 2: transform the tree, renaming properties + return ast.transform(new TreeTransformer(function(node){ + if (node instanceof AST_ObjectKeyVal) { + if (!(ignore_quoted && node.quote)) + node.key = mangle(node.key); + } + else if (node instanceof AST_ObjectProperty) { + // setter or getter + node.key.name = mangle(node.key.name); + } + else if (node instanceof AST_Dot) { + node.property = mangle(node.property); + } + else if (node instanceof AST_Sub) { + if (!ignore_quoted) + node.property = mangleStrings(node.property); + } + // else if (node instanceof AST_String) { + // if (should_mangle(node.value)) { + // AST_Node.warn( + // "Found \"{prop}\" property candidate for mangling in an arbitrary string [{file}:{line},{col}]", { + // file : node.start.file, + // line : node.start.line, + // col : node.start.col, + // prop : node.value + // } + // ); + // } + // } + })); + + // only function declarations after this line + + function can_mangle(name) { + if (unmangleable.indexOf(name) >= 0) return false; + if (reserved.indexOf(name) >= 0) return false; + if (options.only_cache) { + return cache.props.has(name); + } + if (/^[0-9.]+$/.test(name)) return false; + return true; + } + + function should_mangle(name) { + if (regex && !regex.test(name)) return false; + if (reserved.indexOf(name) >= 0) return false; + return cache.props.has(name) + || names_to_mangle.indexOf(name) >= 0; + } + + function add(name) { + if (can_mangle(name)) + push_uniq(names_to_mangle, name); + + if (!should_mangle(name)) { + push_uniq(unmangleable, name); + } + } + + function mangle(name) { + if (!should_mangle(name)) { + return name; + } + + var mangled = cache.props.get(name); + if (!mangled) { + do { + mangled = base54(++cache.cname); + } while (!can_mangle(mangled)); + cache.props.set(name, mangled); + } + return mangled; + } + + function addStrings(node) { + var out = {}; + try { + (function walk(node){ + node.walk(new TreeWalker(function(node){ + if (node instanceof AST_Seq) { + walk(node.cdr); + return true; + } + if (node instanceof AST_String) { + add(node.value); + return true; + } + if (node instanceof AST_Conditional) { + walk(node.consequent); + walk(node.alternative); + return true; + } + throw out; + })); + })(node); + } catch(ex) { + if (ex !== out) throw ex; + } + } + + function mangleStrings(node) { + return node.transform(new TreeTransformer(function(node){ + if (node instanceof AST_Seq) { + node.cdr = mangleStrings(node.cdr); + } + else if (node instanceof AST_String) { + node.value = mangle(node.value); + } + else if (node instanceof AST_Conditional) { + node.consequent = mangleStrings(node.consequent); + node.alternative = mangleStrings(node.alternative); + } + return node; + })); + } + +} diff --git a/node_modules/uglify-js/lib/scope.js b/node_modules/uglify-js/lib/scope.js new file mode 100644 index 0000000..d07eca1 --- /dev/null +++ b/node_modules/uglify-js/lib/scope.js @@ -0,0 +1,636 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +function SymbolDef(scope, index, orig) { + this.name = orig.name; + this.orig = [ orig ]; + this.scope = scope; + this.references = []; + this.global = false; + this.mangled_name = null; + this.undeclared = false; + this.constant = false; + this.index = index; + this.id = SymbolDef.next_id++; +}; + +SymbolDef.next_id = 1; + +SymbolDef.prototype = { + unmangleable: function(options) { + if (!options) options = {}; + + return (this.global && !options.toplevel) + || this.undeclared + || (!options.eval && (this.scope.uses_eval || this.scope.uses_with)) + || (options.keep_fnames + && (this.orig[0] instanceof AST_SymbolLambda + || this.orig[0] instanceof AST_SymbolDefun)); + }, + mangle: function(options) { + var cache = options.cache && options.cache.props; + if (this.global && cache && cache.has(this.name)) { + this.mangled_name = cache.get(this.name); + } + else if (!this.mangled_name && !this.unmangleable(options)) { + var s = this.scope; + if (!options.screw_ie8 && this.orig[0] instanceof AST_SymbolLambda) + s = s.parent_scope; + this.mangled_name = s.next_mangled(options, this); + if (this.global && cache) { + cache.set(this.name, this.mangled_name); + } + } + } +}; + +AST_Toplevel.DEFMETHOD("figure_out_scope", function(options){ + options = defaults(options, { + screw_ie8: false, + cache: null + }); + + // pass 1: setup scope chaining and handle definitions + var self = this; + var scope = self.parent_scope = null; + var labels = new Dictionary(); + var defun = null; + var last_var_had_const_pragma = false; + var nesting = 0; + var tw = new TreeWalker(function(node, descend){ + if (options.screw_ie8 && node instanceof AST_Catch) { + var save_scope = scope; + scope = new AST_Scope(node); + scope.init_scope_vars(nesting); + scope.parent_scope = save_scope; + descend(); + scope = save_scope; + return true; + } + if (node instanceof AST_Scope) { + node.init_scope_vars(nesting); + var save_scope = node.parent_scope = scope; + var save_defun = defun; + var save_labels = labels; + defun = scope = node; + labels = new Dictionary(); + ++nesting; descend(); --nesting; + scope = save_scope; + defun = save_defun; + labels = save_labels; + return true; // don't descend again in TreeWalker + } + if (node instanceof AST_LabeledStatement) { + var l = node.label; + if (labels.has(l.name)) { + throw new Error(string_template("Label {name} defined twice", l)); + } + labels.set(l.name, l); + descend(); + labels.del(l.name); + return true; // no descend again + } + if (node instanceof AST_With) { + for (var s = scope; s; s = s.parent_scope) + s.uses_with = true; + return; + } + if (node instanceof AST_Symbol) { + node.scope = scope; + } + if (node instanceof AST_Label) { + node.thedef = node; + node.references = []; + } + if (node instanceof AST_SymbolLambda) { + defun.def_function(node); + } + else if (node instanceof AST_SymbolDefun) { + // Careful here, the scope where this should be defined is + // the parent scope. The reason is that we enter a new + // scope when we encounter the AST_Defun node (which is + // instanceof AST_Scope) but we get to the symbol a bit + // later. + (node.scope = defun.parent_scope).def_function(node); + } + else if (node instanceof AST_Var) { + last_var_had_const_pragma = node.has_const_pragma(); + } + else if (node instanceof AST_SymbolVar + || node instanceof AST_SymbolConst) { + var def = defun.def_variable(node); + def.constant = node instanceof AST_SymbolConst || last_var_had_const_pragma; + def.init = tw.parent().value; + } + else if (node instanceof AST_SymbolCatch) { + (options.screw_ie8 ? scope : defun) + .def_variable(node); + } + else if (node instanceof AST_LabelRef) { + var sym = labels.get(node.name); + if (!sym) throw new Error(string_template("Undefined label {name} [{line},{col}]", { + name: node.name, + line: node.start.line, + col: node.start.col + })); + node.thedef = sym; + } + }); + self.walk(tw); + + // pass 2: find back references and eval + var func = null; + var globals = self.globals = new Dictionary(); + var tw = new TreeWalker(function(node, descend){ + if (node instanceof AST_Lambda) { + var prev_func = func; + func = node; + descend(); + func = prev_func; + return true; + } + if (node instanceof AST_LoopControl && node.label) { + node.label.thedef.references.push(node); + return true; + } + if (node instanceof AST_SymbolRef) { + var name = node.name; + if (name == "eval" && tw.parent() instanceof AST_Call) { + for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope) { + s.uses_eval = true; + } + } + var sym = node.scope.find_variable(name); + if (!sym) { + var g; + if (globals.has(name)) { + g = globals.get(name); + } else { + g = new SymbolDef(self, globals.size(), node); + g.undeclared = true; + g.global = true; + globals.set(name, g); + } + node.thedef = g; + if (func && name == "arguments") { + func.uses_arguments = true; + } + } else { + node.thedef = sym; + } + node.reference(); + return true; + } + }); + self.walk(tw); + + if (options.cache) { + this.cname = options.cache.cname; + } +}); + +AST_Scope.DEFMETHOD("init_scope_vars", function(nesting){ + this.variables = new Dictionary(); // map name to AST_SymbolVar (variables defined in this scope; includes functions) + this.functions = new Dictionary(); // map name to AST_SymbolDefun (functions defined in this scope) + this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement + this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval` + this.parent_scope = null; // the parent scope + this.enclosed = []; // a list of variables from this or outer scope(s) that are referenced from this or inner scopes + this.cname = -1; // the current index for mangling functions/variables + this.nesting = nesting; // the nesting level of this scope (0 means toplevel) +}); + +AST_Lambda.DEFMETHOD("init_scope_vars", function(){ + AST_Scope.prototype.init_scope_vars.apply(this, arguments); + this.uses_arguments = false; + + var symbol = new AST_VarDef({ name: "arguments", start: this.start, end: this.end }); + var def = new SymbolDef(this, this.variables.size(), symbol); + this.variables.set(symbol.name, def); +}); + +AST_SymbolRef.DEFMETHOD("reference", function() { + var def = this.definition(); + def.references.push(this); + var s = this.scope; + while (s) { + push_uniq(s.enclosed, def); + if (s === def.scope) break; + s = s.parent_scope; + } + this.frame = this.scope.nesting - def.scope.nesting; +}); + +AST_Scope.DEFMETHOD("find_variable", function(name){ + if (name instanceof AST_Symbol) name = name.name; + return this.variables.get(name) + || (this.parent_scope && this.parent_scope.find_variable(name)); +}); + +AST_Scope.DEFMETHOD("def_function", function(symbol){ + this.functions.set(symbol.name, this.def_variable(symbol)); +}); + +AST_Scope.DEFMETHOD("def_variable", function(symbol){ + var def; + if (!this.variables.has(symbol.name)) { + def = new SymbolDef(this, this.variables.size(), symbol); + this.variables.set(symbol.name, def); + def.global = !this.parent_scope; + } else { + def = this.variables.get(symbol.name); + def.orig.push(symbol); + } + return symbol.thedef = def; +}); + +AST_Scope.DEFMETHOD("next_mangled", function(options){ + var ext = this.enclosed; + out: while (true) { + var m = base54(++this.cname); + if (!is_identifier(m)) continue; // skip over "do" + + // https://github.com/mishoo/UglifyJS2/issues/242 -- do not + // shadow a name excepted from mangling. + if (options.except.indexOf(m) >= 0) continue; + + // we must ensure that the mangled name does not shadow a name + // from some parent scope that is referenced in this or in + // inner scopes. + for (var i = ext.length; --i >= 0;) { + var sym = ext[i]; + var name = sym.mangled_name || (sym.unmangleable(options) && sym.name); + if (m == name) continue out; + } + return m; + } +}); + +AST_Function.DEFMETHOD("next_mangled", function(options, def){ + // #179, #326 + // in Safari strict mode, something like (function x(x){...}) is a syntax error; + // a function expression's argument cannot shadow the function expression's name + + var tricky_def = def.orig[0] instanceof AST_SymbolFunarg && this.name && this.name.definition(); + while (true) { + var name = AST_Lambda.prototype.next_mangled.call(this, options, def); + if (!(tricky_def && tricky_def.mangled_name == name)) + return name; + } +}); + +AST_Scope.DEFMETHOD("references", function(sym){ + if (sym instanceof AST_Symbol) sym = sym.definition(); + return this.enclosed.indexOf(sym) < 0 ? null : sym; +}); + +AST_Symbol.DEFMETHOD("unmangleable", function(options){ + return this.definition().unmangleable(options); +}); + +// property accessors are not mangleable +AST_SymbolAccessor.DEFMETHOD("unmangleable", function(){ + return true; +}); + +// labels are always mangleable +AST_Label.DEFMETHOD("unmangleable", function(){ + return false; +}); + +AST_Symbol.DEFMETHOD("unreferenced", function(){ + return this.definition().references.length == 0 + && !(this.scope.uses_eval || this.scope.uses_with); +}); + +AST_Symbol.DEFMETHOD("undeclared", function(){ + return this.definition().undeclared; +}); + +AST_LabelRef.DEFMETHOD("undeclared", function(){ + return false; +}); + +AST_Label.DEFMETHOD("undeclared", function(){ + return false; +}); + +AST_Symbol.DEFMETHOD("definition", function(){ + return this.thedef; +}); + +AST_Symbol.DEFMETHOD("global", function(){ + return this.definition().global; +}); + +AST_Var.DEFMETHOD("has_const_pragma", function() { + var comments_before = this.start && this.start.comments_before; + var lastComment = comments_before && comments_before[comments_before.length - 1]; + return lastComment && /@const\b/.test(lastComment.value); +}); + +AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options){ + return defaults(options, { + except : [], + eval : false, + sort : false, // Ignored. Flag retained for backwards compatibility. + toplevel : false, + screw_ie8 : false, + keep_fnames : false + }); +}); + +AST_Toplevel.DEFMETHOD("mangle_names", function(options){ + options = this._default_mangler_options(options); + + // Never mangle arguments + options.except.push('arguments'); + + // We only need to mangle declaration nodes. Special logic wired + // into the code generator will display the mangled name if it's + // present (and for AST_SymbolRef-s it'll use the mangled name of + // the AST_SymbolDeclaration that it points to). + var lname = -1; + var to_mangle = []; + + if (options.cache) { + this.globals.each(function(symbol){ + if (options.except.indexOf(symbol.name) < 0) { + to_mangle.push(symbol); + } + }); + } + + var tw = new TreeWalker(function(node, descend){ + if (node instanceof AST_LabeledStatement) { + // lname is incremented when we get to the AST_Label + var save_nesting = lname; + descend(); + lname = save_nesting; + return true; // don't descend again in TreeWalker + } + if (node instanceof AST_Scope) { + var p = tw.parent(), a = []; + node.variables.each(function(symbol){ + if (options.except.indexOf(symbol.name) < 0) { + a.push(symbol); + } + }); + to_mangle.push.apply(to_mangle, a); + return; + } + if (node instanceof AST_Label) { + var name; + do name = base54(++lname); while (!is_identifier(name)); + node.mangled_name = name; + return true; + } + if (options.screw_ie8 && node instanceof AST_SymbolCatch) { + to_mangle.push(node.definition()); + return; + } + }); + this.walk(tw); + to_mangle.forEach(function(def){ def.mangle(options) }); + + if (options.cache) { + options.cache.cname = this.cname; + } +}); + +AST_Toplevel.DEFMETHOD("compute_char_frequency", function(options){ + options = this._default_mangler_options(options); + var tw = new TreeWalker(function(node){ + if (node instanceof AST_Constant) + base54.consider(node.print_to_string()); + else if (node instanceof AST_Return) + base54.consider("return"); + else if (node instanceof AST_Throw) + base54.consider("throw"); + else if (node instanceof AST_Continue) + base54.consider("continue"); + else if (node instanceof AST_Break) + base54.consider("break"); + else if (node instanceof AST_Debugger) + base54.consider("debugger"); + else if (node instanceof AST_Directive) + base54.consider(node.value); + else if (node instanceof AST_While) + base54.consider("while"); + else if (node instanceof AST_Do) + base54.consider("do while"); + else if (node instanceof AST_If) { + base54.consider("if"); + if (node.alternative) base54.consider("else"); + } + else if (node instanceof AST_Var) + base54.consider("var"); + else if (node instanceof AST_Const) + base54.consider("const"); + else if (node instanceof AST_Lambda) + base54.consider("function"); + else if (node instanceof AST_For) + base54.consider("for"); + else if (node instanceof AST_ForIn) + base54.consider("for in"); + else if (node instanceof AST_Switch) + base54.consider("switch"); + else if (node instanceof AST_Case) + base54.consider("case"); + else if (node instanceof AST_Default) + base54.consider("default"); + else if (node instanceof AST_With) + base54.consider("with"); + else if (node instanceof AST_ObjectSetter) + base54.consider("set" + node.key); + else if (node instanceof AST_ObjectGetter) + base54.consider("get" + node.key); + else if (node instanceof AST_ObjectKeyVal) + base54.consider(node.key); + else if (node instanceof AST_New) + base54.consider("new"); + else if (node instanceof AST_This) + base54.consider("this"); + else if (node instanceof AST_Try) + base54.consider("try"); + else if (node instanceof AST_Catch) + base54.consider("catch"); + else if (node instanceof AST_Finally) + base54.consider("finally"); + else if (node instanceof AST_Symbol && node.unmangleable(options)) + base54.consider(node.name); + else if (node instanceof AST_Unary || node instanceof AST_Binary) + base54.consider(node.operator); + else if (node instanceof AST_Dot) + base54.consider(node.property); + }); + this.walk(tw); + base54.sort(); +}); + +var base54 = (function() { + var string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_0123456789"; + var chars, frequency; + function reset() { + frequency = Object.create(null); + chars = string.split("").map(function(ch){ return ch.charCodeAt(0) }); + chars.forEach(function(ch){ frequency[ch] = 0 }); + } + base54.consider = function(str){ + for (var i = str.length; --i >= 0;) { + var code = str.charCodeAt(i); + if (code in frequency) ++frequency[code]; + } + }; + base54.sort = function() { + chars = mergeSort(chars, function(a, b){ + if (is_digit(a) && !is_digit(b)) return 1; + if (is_digit(b) && !is_digit(a)) return -1; + return frequency[b] - frequency[a]; + }); + }; + base54.reset = reset; + reset(); + base54.get = function(){ return chars }; + base54.freq = function(){ return frequency }; + function base54(num) { + var ret = "", base = 54; + num++; + do { + num--; + ret += String.fromCharCode(chars[num % base]); + num = Math.floor(num / base); + base = 64; + } while (num > 0); + return ret; + }; + return base54; +})(); + +AST_Toplevel.DEFMETHOD("scope_warnings", function(options){ + options = defaults(options, { + undeclared : false, // this makes a lot of noise + unreferenced : true, + assign_to_global : true, + func_arguments : true, + nested_defuns : true, + eval : true + }); + var tw = new TreeWalker(function(node){ + if (options.undeclared + && node instanceof AST_SymbolRef + && node.undeclared()) + { + // XXX: this also warns about JS standard names, + // i.e. Object, Array, parseInt etc. Should add a list of + // exceptions. + AST_Node.warn("Undeclared symbol: {name} [{file}:{line},{col}]", { + name: node.name, + file: node.start.file, + line: node.start.line, + col: node.start.col + }); + } + if (options.assign_to_global) + { + var sym = null; + if (node instanceof AST_Assign && node.left instanceof AST_SymbolRef) + sym = node.left; + else if (node instanceof AST_ForIn && node.init instanceof AST_SymbolRef) + sym = node.init; + if (sym + && (sym.undeclared() + || (sym.global() && sym.scope !== sym.definition().scope))) { + AST_Node.warn("{msg}: {name} [{file}:{line},{col}]", { + msg: sym.undeclared() ? "Accidental global?" : "Assignment to global", + name: sym.name, + file: sym.start.file, + line: sym.start.line, + col: sym.start.col + }); + } + } + if (options.eval + && node instanceof AST_SymbolRef + && node.undeclared() + && node.name == "eval") { + AST_Node.warn("Eval is used [{file}:{line},{col}]", node.start); + } + if (options.unreferenced + && (node instanceof AST_SymbolDeclaration || node instanceof AST_Label) + && !(node instanceof AST_SymbolCatch) + && node.unreferenced()) { + AST_Node.warn("{type} {name} is declared but not referenced [{file}:{line},{col}]", { + type: node instanceof AST_Label ? "Label" : "Symbol", + name: node.name, + file: node.start.file, + line: node.start.line, + col: node.start.col + }); + } + if (options.func_arguments + && node instanceof AST_Lambda + && node.uses_arguments) { + AST_Node.warn("arguments used in function {name} [{file}:{line},{col}]", { + name: node.name ? node.name.name : "anonymous", + file: node.start.file, + line: node.start.line, + col: node.start.col + }); + } + if (options.nested_defuns + && node instanceof AST_Defun + && !(tw.parent() instanceof AST_Scope)) { + AST_Node.warn("Function {name} declared in nested statement \"{type}\" [{file}:{line},{col}]", { + name: node.name.name, + type: tw.parent().TYPE, + file: node.start.file, + line: node.start.line, + col: node.start.col + }); + } + }); + this.walk(tw); +}); diff --git a/node_modules/uglify-js/lib/sourcemap.js b/node_modules/uglify-js/lib/sourcemap.js new file mode 100644 index 0000000..e5d7df6 --- /dev/null +++ b/node_modules/uglify-js/lib/sourcemap.js @@ -0,0 +1,87 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +// a small wrapper around fitzgen's source-map library +function SourceMap(options) { + options = defaults(options, { + file : null, + root : null, + orig : null, + + orig_line_diff : 0, + dest_line_diff : 0, + }); + var generator = new MOZ_SourceMap.SourceMapGenerator({ + file : options.file, + sourceRoot : options.root + }); + var orig_map = options.orig && new MOZ_SourceMap.SourceMapConsumer(options.orig); + function add(source, gen_line, gen_col, orig_line, orig_col, name) { + if (orig_map) { + var info = orig_map.originalPositionFor({ + line: orig_line, + column: orig_col + }); + if (info.source === null) { + return; + } + source = info.source; + orig_line = info.line; + orig_col = info.column; + name = info.name || name; + } + generator.addMapping({ + generated : { line: gen_line + options.dest_line_diff, column: gen_col }, + original : { line: orig_line + options.orig_line_diff, column: orig_col }, + source : source, + name : name + }); + }; + return { + add : add, + get : function() { return generator }, + toString : function() { return JSON.stringify(generator.toJSON()); } + }; +}; diff --git a/node_modules/uglify-js/lib/transform.js b/node_modules/uglify-js/lib/transform.js new file mode 100644 index 0000000..3018e8f --- /dev/null +++ b/node_modules/uglify-js/lib/transform.js @@ -0,0 +1,218 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +// Tree transformer helpers. + +function TreeTransformer(before, after) { + TreeWalker.call(this); + this.before = before; + this.after = after; +} +TreeTransformer.prototype = new TreeWalker; + +(function(undefined){ + + function _(node, descend) { + node.DEFMETHOD("transform", function(tw, in_list){ + var x, y; + tw.push(this); + if (tw.before) x = tw.before(this, descend, in_list); + if (x === undefined) { + if (!tw.after) { + x = this; + descend(x, tw); + } else { + tw.stack[tw.stack.length - 1] = x = this; + descend(x, tw); + y = tw.after(x, in_list); + if (y !== undefined) x = y; + } + } + tw.pop(this); + return x; + }); + }; + + function do_list(list, tw) { + return MAP(list, function(node){ + return node.transform(tw, true); + }); + }; + + _(AST_Node, noop); + + _(AST_LabeledStatement, function(self, tw){ + self.label = self.label.transform(tw); + self.body = self.body.transform(tw); + }); + + _(AST_SimpleStatement, function(self, tw){ + self.body = self.body.transform(tw); + }); + + _(AST_Block, function(self, tw){ + self.body = do_list(self.body, tw); + }); + + _(AST_DWLoop, function(self, tw){ + self.condition = self.condition.transform(tw); + self.body = self.body.transform(tw); + }); + + _(AST_For, function(self, tw){ + if (self.init) self.init = self.init.transform(tw); + if (self.condition) self.condition = self.condition.transform(tw); + if (self.step) self.step = self.step.transform(tw); + self.body = self.body.transform(tw); + }); + + _(AST_ForIn, function(self, tw){ + self.init = self.init.transform(tw); + self.object = self.object.transform(tw); + self.body = self.body.transform(tw); + }); + + _(AST_With, function(self, tw){ + self.expression = self.expression.transform(tw); + self.body = self.body.transform(tw); + }); + + _(AST_Exit, function(self, tw){ + if (self.value) self.value = self.value.transform(tw); + }); + + _(AST_LoopControl, function(self, tw){ + if (self.label) self.label = self.label.transform(tw); + }); + + _(AST_If, function(self, tw){ + self.condition = self.condition.transform(tw); + self.body = self.body.transform(tw); + if (self.alternative) self.alternative = self.alternative.transform(tw); + }); + + _(AST_Switch, function(self, tw){ + self.expression = self.expression.transform(tw); + self.body = do_list(self.body, tw); + }); + + _(AST_Case, function(self, tw){ + self.expression = self.expression.transform(tw); + self.body = do_list(self.body, tw); + }); + + _(AST_Try, function(self, tw){ + self.body = do_list(self.body, tw); + if (self.bcatch) self.bcatch = self.bcatch.transform(tw); + if (self.bfinally) self.bfinally = self.bfinally.transform(tw); + }); + + _(AST_Catch, function(self, tw){ + self.argname = self.argname.transform(tw); + self.body = do_list(self.body, tw); + }); + + _(AST_Definitions, function(self, tw){ + self.definitions = do_list(self.definitions, tw); + }); + + _(AST_VarDef, function(self, tw){ + self.name = self.name.transform(tw); + if (self.value) self.value = self.value.transform(tw); + }); + + _(AST_Lambda, function(self, tw){ + if (self.name) self.name = self.name.transform(tw); + self.argnames = do_list(self.argnames, tw); + self.body = do_list(self.body, tw); + }); + + _(AST_Call, function(self, tw){ + self.expression = self.expression.transform(tw); + self.args = do_list(self.args, tw); + }); + + _(AST_Seq, function(self, tw){ + self.car = self.car.transform(tw); + self.cdr = self.cdr.transform(tw); + }); + + _(AST_Dot, function(self, tw){ + self.expression = self.expression.transform(tw); + }); + + _(AST_Sub, function(self, tw){ + self.expression = self.expression.transform(tw); + self.property = self.property.transform(tw); + }); + + _(AST_Unary, function(self, tw){ + self.expression = self.expression.transform(tw); + }); + + _(AST_Binary, function(self, tw){ + self.left = self.left.transform(tw); + self.right = self.right.transform(tw); + }); + + _(AST_Conditional, function(self, tw){ + self.condition = self.condition.transform(tw); + self.consequent = self.consequent.transform(tw); + self.alternative = self.alternative.transform(tw); + }); + + _(AST_Array, function(self, tw){ + self.elements = do_list(self.elements, tw); + }); + + _(AST_Object, function(self, tw){ + self.properties = do_list(self.properties, tw); + }); + + _(AST_ObjectProperty, function(self, tw){ + self.value = self.value.transform(tw); + }); + +})(); diff --git a/node_modules/uglify-js/lib/utils.js b/node_modules/uglify-js/lib/utils.js new file mode 100644 index 0000000..78c6dbf --- /dev/null +++ b/node_modules/uglify-js/lib/utils.js @@ -0,0 +1,311 @@ +/*********************************************************************** + + A JavaScript tokenizer / parser / beautifier / compressor. + https://github.com/mishoo/UglifyJS2 + + -------------------------------- (C) --------------------------------- + + Author: Mihai Bazon + + http://mihai.bazon.net/blog + + Distributed under the BSD license: + + Copyright 2012 (c) Mihai Bazon + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR + TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF + THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + SUCH DAMAGE. + + ***********************************************************************/ + +"use strict"; + +function array_to_hash(a) { + var ret = Object.create(null); + for (var i = 0; i < a.length; ++i) + ret[a[i]] = true; + return ret; +}; + +function slice(a, start) { + return Array.prototype.slice.call(a, start || 0); +}; + +function characters(str) { + return str.split(""); +}; + +function member(name, array) { + return array.indexOf(name) >= 0; +}; + +function find_if(func, array) { + for (var i = 0, n = array.length; i < n; ++i) { + if (func(array[i])) + return array[i]; + } +}; + +function repeat_string(str, i) { + if (i <= 0) return ""; + if (i == 1) return str; + var d = repeat_string(str, i >> 1); + d += d; + if (i & 1) d += str; + return d; +}; + +function DefaultsError(msg, defs) { + Error.call(this, msg); + this.msg = msg; + this.defs = defs; +}; +DefaultsError.prototype = Object.create(Error.prototype); +DefaultsError.prototype.constructor = DefaultsError; + +DefaultsError.croak = function(msg, defs) { + throw new DefaultsError(msg, defs); +}; + +function defaults(args, defs, croak) { + if (args === true) + args = {}; + var ret = args || {}; + if (croak) for (var i in ret) if (HOP(ret, i) && !HOP(defs, i)) + DefaultsError.croak("`" + i + "` is not a supported option", defs); + for (var i in defs) if (HOP(defs, i)) { + ret[i] = (args && HOP(args, i)) ? args[i] : defs[i]; + } + return ret; +}; + +function merge(obj, ext) { + var count = 0; + for (var i in ext) if (HOP(ext, i)) { + obj[i] = ext[i]; + count++; + } + return count; +}; + +function noop() {}; + +var MAP = (function(){ + function MAP(a, f, backwards) { + var ret = [], top = [], i; + function doit() { + var val = f(a[i], i); + var is_last = val instanceof Last; + if (is_last) val = val.v; + if (val instanceof AtTop) { + val = val.v; + if (val instanceof Splice) { + top.push.apply(top, backwards ? val.v.slice().reverse() : val.v); + } else { + top.push(val); + } + } + else if (val !== skip) { + if (val instanceof Splice) { + ret.push.apply(ret, backwards ? val.v.slice().reverse() : val.v); + } else { + ret.push(val); + } + } + return is_last; + }; + if (a instanceof Array) { + if (backwards) { + for (i = a.length; --i >= 0;) if (doit()) break; + ret.reverse(); + top.reverse(); + } else { + for (i = 0; i < a.length; ++i) if (doit()) break; + } + } + else { + for (i in a) if (HOP(a, i)) if (doit()) break; + } + return top.concat(ret); + }; + MAP.at_top = function(val) { return new AtTop(val) }; + MAP.splice = function(val) { return new Splice(val) }; + MAP.last = function(val) { return new Last(val) }; + var skip = MAP.skip = {}; + function AtTop(val) { this.v = val }; + function Splice(val) { this.v = val }; + function Last(val) { this.v = val }; + return MAP; +})(); + +function push_uniq(array, el) { + if (array.indexOf(el) < 0) + array.push(el); +}; + +function string_template(text, props) { + return text.replace(/\{(.+?)\}/g, function(str, p){ + return props[p]; + }); +}; + +function remove(array, el) { + for (var i = array.length; --i >= 0;) { + if (array[i] === el) array.splice(i, 1); + } +}; + +function mergeSort(array, cmp) { + if (array.length < 2) return array.slice(); + function merge(a, b) { + var r = [], ai = 0, bi = 0, i = 0; + while (ai < a.length && bi < b.length) { + cmp(a[ai], b[bi]) <= 0 + ? r[i++] = a[ai++] + : r[i++] = b[bi++]; + } + if (ai < a.length) r.push.apply(r, a.slice(ai)); + if (bi < b.length) r.push.apply(r, b.slice(bi)); + return r; + }; + function _ms(a) { + if (a.length <= 1) + return a; + var m = Math.floor(a.length / 2), left = a.slice(0, m), right = a.slice(m); + left = _ms(left); + right = _ms(right); + return merge(left, right); + }; + return _ms(array); +}; + +function set_difference(a, b) { + return a.filter(function(el){ + return b.indexOf(el) < 0; + }); +}; + +function set_intersection(a, b) { + return a.filter(function(el){ + return b.indexOf(el) >= 0; + }); +}; + +// this function is taken from Acorn [1], written by Marijn Haverbeke +// [1] https://github.com/marijnh/acorn +function makePredicate(words) { + if (!(words instanceof Array)) words = words.split(" "); + var f = "", cats = []; + out: for (var i = 0; i < words.length; ++i) { + for (var j = 0; j < cats.length; ++j) + if (cats[j][0].length == words[i].length) { + cats[j].push(words[i]); + continue out; + } + cats.push([words[i]]); + } + function compareTo(arr) { + if (arr.length == 1) return f += "return str === " + JSON.stringify(arr[0]) + ";"; + f += "switch(str){"; + for (var i = 0; i < arr.length; ++i) f += "case " + JSON.stringify(arr[i]) + ":"; + f += "return true}return false;"; + } + // When there are more than three length categories, an outer + // switch first dispatches on the lengths, to save on comparisons. + if (cats.length > 3) { + cats.sort(function(a, b) {return b.length - a.length;}); + f += "switch(str.length){"; + for (var i = 0; i < cats.length; ++i) { + var cat = cats[i]; + f += "case " + cat[0].length + ":"; + compareTo(cat); + } + f += "}"; + // Otherwise, simply generate a flat `switch` statement. + } else { + compareTo(words); + } + return new Function("str", f); +}; + +function all(array, predicate) { + for (var i = array.length; --i >= 0;) + if (!predicate(array[i])) + return false; + return true; +}; + +function Dictionary() { + this._values = Object.create(null); + this._size = 0; +}; +Dictionary.prototype = { + set: function(key, val) { + if (!this.has(key)) ++this._size; + this._values["$" + key] = val; + return this; + }, + add: function(key, val) { + if (this.has(key)) { + this.get(key).push(val); + } else { + this.set(key, [ val ]); + } + return this; + }, + get: function(key) { return this._values["$" + key] }, + del: function(key) { + if (this.has(key)) { + --this._size; + delete this._values["$" + key]; + } + return this; + }, + has: function(key) { return ("$" + key) in this._values }, + each: function(f) { + for (var i in this._values) + f(this._values[i], i.substr(1)); + }, + size: function() { + return this._size; + }, + map: function(f) { + var ret = []; + for (var i in this._values) + ret.push(f(this._values[i], i.substr(1))); + return ret; + }, + toObject: function() { return this._values } +}; +Dictionary.fromObject = function(obj) { + var dict = new Dictionary(); + dict._size = merge(dict._values, obj); + return dict; +}; + +function HOP(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} diff --git a/node_modules/uglify-js/node_modules/async/LICENSE b/node_modules/uglify-js/node_modules/async/LICENSE new file mode 100644 index 0000000..b7f9d50 --- /dev/null +++ b/node_modules/uglify-js/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/uglify-js/node_modules/async/README.md b/node_modules/uglify-js/node_modules/async/README.md new file mode 100644 index 0000000..951f76e --- /dev/null +++ b/node_modules/uglify-js/node_modules/async/README.md @@ -0,0 +1,1425 @@ +# Async.js + +Async is a utility module which provides straight-forward, powerful functions +for working with asynchronous JavaScript. Although originally designed for +use with [node.js](http://nodejs.org), it can also be used directly in the +browser. Also supports [component](https://github.com/component/component). + +Async provides around 20 functions that include the usual 'functional' +suspects (map, reduce, filter, each…) as well as some common patterns +for asynchronous control flow (parallel, series, waterfall…). All these +functions assume you follow the node.js convention of providing a single +callback as the last argument of your async function. + + +## Quick Examples + +```javascript +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); + +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); + +async.parallel([ + function(){ ... }, + function(){ ... } +], callback); + +async.series([ + function(){ ... }, + function(){ ... } +]); +``` + +There are many more functions available so take a look at the docs below for a +full list. This module aims to be comprehensive, so if you feel anything is +missing please create a GitHub issue for it. + +## Common Pitfalls + +### Binding a context to an iterator + +This section is really about bind, not about async. If you are wondering how to +make async execute your iterators in a given context, or are confused as to why +a method of another library isn't working as an iterator, study this example: + +```js +// Here is a simple object with an (unnecessarily roundabout) squaring method +var AsyncSquaringLibrary = { + squareExponent: 2, + square: function(number, callback){ + var result = Math.pow(number, this.squareExponent); + setTimeout(function(){ + callback(null, result); + }, 200); + } +}; + +async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){ + // result is [NaN, NaN, NaN] + // This fails because the `this.squareExponent` expression in the square + // function is not evaluated in the context of AsyncSquaringLibrary, and is + // therefore undefined. +}); + +async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){ + // result is [1, 4, 9] + // With the help of bind we can attach a context to the iterator before + // passing it to async. Now the square function will be executed in its + // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent` + // will be as expected. +}); +``` + +## Download + +The source is available for download from +[GitHub](http://github.com/caolan/async). +Alternatively, you can install using Node Package Manager (npm): + + npm install async + +__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed + +## In the Browser + +So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. Usage: + +```html + + +``` + +## Documentation + +### Collections + +* [each](#each) +* [eachSeries](#eachSeries) +* [eachLimit](#eachLimit) +* [map](#map) +* [mapSeries](#mapSeries) +* [mapLimit](#mapLimit) +* [filter](#filter) +* [filterSeries](#filterSeries) +* [reject](#reject) +* [rejectSeries](#rejectSeries) +* [reduce](#reduce) +* [reduceRight](#reduceRight) +* [detect](#detect) +* [detectSeries](#detectSeries) +* [sortBy](#sortBy) +* [some](#some) +* [every](#every) +* [concat](#concat) +* [concatSeries](#concatSeries) + +### Control Flow + +* [series](#series) +* [parallel](#parallel) +* [parallelLimit](#parallellimittasks-limit-callback) +* [whilst](#whilst) +* [doWhilst](#doWhilst) +* [until](#until) +* [doUntil](#doUntil) +* [forever](#forever) +* [waterfall](#waterfall) +* [compose](#compose) +* [applyEach](#applyEach) +* [applyEachSeries](#applyEachSeries) +* [queue](#queue) +* [cargo](#cargo) +* [auto](#auto) +* [iterator](#iterator) +* [apply](#apply) +* [nextTick](#nextTick) +* [times](#times) +* [timesSeries](#timesSeries) + +### Utils + +* [memoize](#memoize) +* [unmemoize](#unmemoize) +* [log](#log) +* [dir](#dir) +* [noConflict](#noConflict) + + +## Collections + + + +### each(arr, iterator, callback) + +Applies an iterator function to each item in an array, in parallel. +The iterator is called with an item from the list and a callback for when it +has finished. If the iterator passes an error to this callback, the main +callback for the each function is immediately called with the error. + +Note, that since this function applies the iterator to each item in parallel +there is no guarantee that the iterator functions will complete in order. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback(err) which must be called once it has + completed. If no error has occured, the callback should be run without + arguments or with an explicit null argument. +* callback(err) - A callback which is called after all the iterator functions + have finished, or an error has occurred. + +__Example__ + +```js +// assuming openFiles is an array of file names and saveFile is a function +// to save the modified contents of that file: + +async.each(openFiles, saveFile, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +--------------------------------------- + + + +### eachSeries(arr, iterator, callback) + +The same as each only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. This means the iterator functions will complete in order. + + +--------------------------------------- + + + +### eachLimit(arr, limit, iterator, callback) + +The same as each only no more than "limit" iterators will be simultaneously +running at any time. + +Note that the items are not processed in batches, so there is no guarantee that + the first "limit" iterator functions will complete before any others are +started. + +__Arguments__ + +* arr - An array to iterate over. +* limit - The maximum number of iterators to run at any time. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback(err) which must be called once it has + completed. If no error has occured, the callback should be run without + arguments or with an explicit null argument. +* callback(err) - A callback which is called after all the iterator functions + have finished, or an error has occurred. + +__Example__ + +```js +// Assume documents is an array of JSON objects and requestApi is a +// function that interacts with a rate-limited REST api. + +async.eachLimit(documents, 20, requestApi, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +--------------------------------------- + + +### map(arr, iterator, callback) + +Produces a new array of values by mapping each value in the given array through +the iterator function. The iterator is called with an item from the array and a +callback for when it has finished processing. The callback takes 2 arguments, +an error and the transformed item from the array. If the iterator passes an +error to this callback, the main callback for the map function is immediately +called with the error. + +Note, that since this function applies the iterator to each item in parallel +there is no guarantee that the iterator functions will complete in order, however +the results array will be in the same order as the original array. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback(err, transformed) which must be called once + it has completed with an error (which can be null) and a transformed item. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is an array of the + transformed items from the original array. + +__Example__ + +```js +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +--------------------------------------- + + +### mapSeries(arr, iterator, callback) + +The same as map only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. The results array will be in the same order as the original. + + +--------------------------------------- + + +### mapLimit(arr, limit, iterator, callback) + +The same as map only no more than "limit" iterators will be simultaneously +running at any time. + +Note that the items are not processed in batches, so there is no guarantee that + the first "limit" iterator functions will complete before any others are +started. + +__Arguments__ + +* arr - An array to iterate over. +* limit - The maximum number of iterators to run at any time. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback(err, transformed) which must be called once + it has completed with an error (which can be null) and a transformed item. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is an array of the + transformed items from the original array. + +__Example__ + +```js +async.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +--------------------------------------- + + +### filter(arr, iterator, callback) + +__Alias:__ select + +Returns a new array of all the values which pass an async truth test. +_The callback for each iterator call only accepts a single argument of true or +false, it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like fs.exists. This operation is +performed in parallel, but the results array will be in the same order as the +original. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback(truthValue) which must be called with a + boolean argument once it has completed. +* callback(results) - A callback which is called after all the iterator + functions have finished. + +__Example__ + +```js +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); +``` + +--------------------------------------- + + +### filterSeries(arr, iterator, callback) + +__alias:__ selectSeries + +The same as filter only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. The results array will be in the same order as the original. + +--------------------------------------- + + +### reject(arr, iterator, callback) + +The opposite of filter. Removes values that pass an async truth test. + +--------------------------------------- + + +### rejectSeries(arr, iterator, callback) + +The same as reject, only the iterator is applied to each item in the array +in series. + + +--------------------------------------- + + +### reduce(arr, memo, iterator, callback) + +__aliases:__ inject, foldl + +Reduces a list of values into a single value using an async iterator to return +each successive step. Memo is the initial state of the reduction. This +function only operates in series. For performance reasons, it may make sense to +split a call to this function into a parallel map, then use the normal +Array.prototype.reduce on the results. This function is for situations where +each step in the reduction needs to be async, if you can get the data before +reducing it then it's probably a good idea to do so. + +__Arguments__ + +* arr - An array to iterate over. +* memo - The initial state of the reduction. +* iterator(memo, item, callback) - A function applied to each item in the + array to produce the next step in the reduction. The iterator is passed a + callback(err, reduction) which accepts an optional error as its first + argument, and the state of the reduction as the second. If an error is + passed to the callback, the reduction is stopped and the main callback is + immediately called with the error. +* callback(err, result) - A callback which is called after all the iterator + functions have finished. Result is the reduced value. + +__Example__ + +```js +async.reduce([1,2,3], 0, function(memo, item, callback){ + // pointless async: + process.nextTick(function(){ + callback(null, memo + item) + }); +}, function(err, result){ + // result is now equal to the last value of memo, which is 6 +}); +``` + +--------------------------------------- + + +### reduceRight(arr, memo, iterator, callback) + +__Alias:__ foldr + +Same as reduce, only operates on the items in the array in reverse order. + + +--------------------------------------- + + +### detect(arr, iterator, callback) + +Returns the first value in a list that passes an async truth test. The +iterator is applied in parallel, meaning the first iterator to return true will +fire the detect callback with that result. That means the result might not be +the first item in the original array (in terms of order) that passes the test. + +If order within the original array is important then look at detectSeries. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback(truthValue) which must be called with a + boolean argument once it has completed. +* callback(result) - A callback which is called as soon as any iterator returns + true, or after all the iterator functions have finished. Result will be + the first item in the array that passes the truth test (iterator) or the + value undefined if none passed. + +__Example__ + +```js +async.detect(['file1','file2','file3'], fs.exists, function(result){ + // result now equals the first file in the list that exists +}); +``` + +--------------------------------------- + + +### detectSeries(arr, iterator, callback) + +The same as detect, only the iterator is applied to each item in the array +in series. This means the result is always the first in the original array (in +terms of array order) that passes the truth test. + + +--------------------------------------- + + +### sortBy(arr, iterator, callback) + +Sorts a list by the results of running each value through an async iterator. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback(err, sortValue) which must be called once it + has completed with an error (which can be null) and a value to use as the sort + criteria. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is the items from + the original array sorted by the values returned by the iterator calls. + +__Example__ + +```js +async.sortBy(['file1','file2','file3'], function(file, callback){ + fs.stat(file, function(err, stats){ + callback(err, stats.mtime); + }); +}, function(err, results){ + // results is now the original array of files sorted by + // modified date +}); +``` + +--------------------------------------- + + +### some(arr, iterator, callback) + +__Alias:__ any + +Returns true if at least one element in the array satisfies an async test. +_The callback for each iterator call only accepts a single argument of true or +false, it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like fs.exists. Once any iterator +call returns true, the main callback is immediately called. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback(truthValue) which must be called with a + boolean argument once it has completed. +* callback(result) - A callback which is called as soon as any iterator returns + true, or after all the iterator functions have finished. Result will be + either true or false depending on the values of the async tests. + +__Example__ + +```js +async.some(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then at least one of the files exists +}); +``` + +--------------------------------------- + + +### every(arr, iterator, callback) + +__Alias:__ all + +Returns true if every element in the array satisfies an async test. +_The callback for each iterator call only accepts a single argument of true or +false, it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like fs.exists. + +__Arguments__ + +* arr - An array to iterate over. +* iterator(item, callback) - A truth test to apply to each item in the array. + The iterator is passed a callback(truthValue) which must be called with a + boolean argument once it has completed. +* callback(result) - A callback which is called after all the iterator + functions have finished. Result will be either true or false depending on + the values of the async tests. + +__Example__ + +```js +async.every(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then every file exists +}); +``` + +--------------------------------------- + + +### concat(arr, iterator, callback) + +Applies an iterator to each item in a list, concatenating the results. Returns the +concatenated list. The iterators are called in parallel, and the results are +concatenated as they return. There is no guarantee that the results array will +be returned in the original order of the arguments passed to the iterator function. + +__Arguments__ + +* arr - An array to iterate over +* iterator(item, callback) - A function to apply to each item in the array. + The iterator is passed a callback(err, results) which must be called once it + has completed with an error (which can be null) and an array of results. +* callback(err, results) - A callback which is called after all the iterator + functions have finished, or an error has occurred. Results is an array containing + the concatenated results of the iterator function. + +__Example__ + +```js +async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){ + // files is now a list of filenames that exist in the 3 directories +}); +``` + +--------------------------------------- + + +### concatSeries(arr, iterator, callback) + +Same as async.concat, but executes in series instead of parallel. + + +## Control Flow + + +### series(tasks, [callback]) + +Run an array of functions in series, each one running once the previous +function has completed. If any functions in the series pass an error to its +callback, no more functions are run and the callback for the series is +immediately called with the value of the error. Once the tasks have completed, +the results are passed to the final callback as an array. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final callback as an object +instead of an array. This can be a more readable way of handling results from +async.series. + + +__Arguments__ + +* tasks - An array or object containing functions to run, each function is passed + a callback(err, result) it must call on completion with an error (which can + be null) and an optional result value. +* callback(err, results) - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +__Example__ + +```js +async.series([ + function(callback){ + // do some stuff ... + callback(null, 'one'); + }, + function(callback){ + // do some more stuff ... + callback(null, 'two'); + } +], +// optional callback +function(err, results){ + // results is now equal to ['one', 'two'] +}); + + +// an example using an object instead of an array +async.series({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equal to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + + +### parallel(tasks, [callback]) + +Run an array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its +callback, the main callback is immediately called with the value of the error. +Once the tasks have completed, the results are passed to the final callback as an +array. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final callback as an object +instead of an array. This can be a more readable way of handling results from +async.parallel. + + +__Arguments__ + +* tasks - An array or object containing functions to run, each function is passed + a callback(err, result) it must call on completion with an error (which can + be null) and an optional result value. +* callback(err, results) - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +__Example__ + +```js +async.parallel([ + function(callback){ + setTimeout(function(){ + callback(null, 'one'); + }, 200); + }, + function(callback){ + setTimeout(function(){ + callback(null, 'two'); + }, 100); + } +], +// optional callback +function(err, results){ + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}); + + +// an example using an object instead of an array +async.parallel({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equals to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + + +### parallelLimit(tasks, limit, [callback]) + +The same as parallel only the tasks are executed in parallel with a maximum of "limit" +tasks executing at any time. + +Note that the tasks are not executed in batches, so there is no guarantee that +the first "limit" tasks will complete before any others are started. + +__Arguments__ + +* tasks - An array or object containing functions to run, each function is passed + a callback(err, result) it must call on completion with an error (which can + be null) and an optional result value. +* limit - The maximum number of tasks to run at any time. +* callback(err, results) - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +--------------------------------------- + + +### whilst(test, fn, callback) + +Repeatedly call fn, while test returns true. Calls the callback when stopped, +or an error occurs. + +__Arguments__ + +* test() - synchronous truth test to perform before each execution of fn. +* fn(callback) - A function to call each time the test passes. The function is + passed a callback(err) which must be called once it has completed with an + optional error argument. +* callback(err) - A callback which is called after the test fails and repeated + execution of fn has stopped. + +__Example__ + +```js +var count = 0; + +async.whilst( + function () { return count < 5; }, + function (callback) { + count++; + setTimeout(callback, 1000); + }, + function (err) { + // 5 seconds have passed + } +); +``` + +--------------------------------------- + + +### doWhilst(fn, test, callback) + +The post check version of whilst. To reflect the difference in the order of operations `test` and `fn` arguments are switched. `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + +--------------------------------------- + + +### until(test, fn, callback) + +Repeatedly call fn, until test returns true. Calls the callback when stopped, +or an error occurs. + +The inverse of async.whilst. + +--------------------------------------- + + +### doUntil(fn, test, callback) + +Like doWhilst except the test is inverted. Note the argument ordering differs from `until`. + +--------------------------------------- + + +### forever(fn, callback) + +Calls the asynchronous function 'fn' repeatedly, in series, indefinitely. +If an error is passed to fn's callback then 'callback' is called with the +error, otherwise it will never be called. + +--------------------------------------- + + +### waterfall(tasks, [callback]) + +Runs an array of functions in series, each passing their results to the next in +the array. However, if any of the functions pass an error to the callback, the +next function is not executed and the main callback is immediately called with +the error. + +__Arguments__ + +* tasks - An array of functions to run, each function is passed a + callback(err, result1, result2, ...) it must call on completion. The first + argument is an error (which can be null) and any further arguments will be + passed as arguments in order to the next task. +* callback(err, [results]) - An optional callback to run once all the functions + have completed. This will be passed the results of the last task's callback. + + + +__Example__ + +```js +async.waterfall([ + function(callback){ + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback){ + callback(null, 'three'); + }, + function(arg1, callback){ + // arg1 now equals 'three' + callback(null, 'done'); + } +], function (err, result) { + // result now equals 'done' +}); +``` + +--------------------------------------- + +### compose(fn1, fn2...) + +Creates a function which is a composition of the passed asynchronous +functions. Each function consumes the return value of the function that +follows. Composing functions f(), g() and h() would produce the result of +f(g(h())), only this version uses callbacks to obtain the return values. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* functions... - the asynchronous functions to compose + + +__Example__ + +```js +function add1(n, callback) { + setTimeout(function () { + callback(null, n + 1); + }, 10); +} + +function mul3(n, callback) { + setTimeout(function () { + callback(null, n * 3); + }, 10); +} + +var add1mul3 = async.compose(mul3, add1); + +add1mul3(4, function (err, result) { + // result now equals 15 +}); +``` + +--------------------------------------- + +### applyEach(fns, args..., callback) + +Applies the provided arguments to each function in the array, calling the +callback after all functions have completed. If you only provide the first +argument then it will return a function which lets you pass in the +arguments as if it were a single function call. + +__Arguments__ + +* fns - the asynchronous functions to all call with the same arguments +* args... - any number of separate arguments to pass to the function +* callback - the final argument should be the callback, called when all + functions have completed processing + + +__Example__ + +```js +async.applyEach([enableSearch, updateSchema], 'bucket', callback); + +// partial application example: +async.each( + buckets, + async.applyEach([enableSearch, updateSchema]), + callback +); +``` + +--------------------------------------- + + +### applyEachSeries(arr, iterator, callback) + +The same as applyEach only the functions are applied in series. + +--------------------------------------- + + +### queue(worker, concurrency) + +Creates a queue object with the specified concurrency. Tasks added to the +queue will be processed in parallel (up to the concurrency limit). If all +workers are in progress, the task is queued until one is available. Once +a worker has completed a task, the task's callback is called. + +__Arguments__ + +* worker(task, callback) - An asynchronous function for processing a queued + task, which must call its callback(err) argument when finished, with an + optional error as an argument. +* concurrency - An integer for determining how many worker functions should be + run in parallel. + +__Queue objects__ + +The queue object returned by this function has the following properties and +methods: + +* length() - a function returning the number of items waiting to be processed. +* concurrency - an integer for determining how many worker functions should be + run in parallel. This property can be changed after a queue is created to + alter the concurrency on-the-fly. +* push(task, [callback]) - add a new task to the queue, the callback is called + once the worker has finished processing the task. + instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list. +* unshift(task, [callback]) - add a new task to the front of the queue. +* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued +* empty - a callback that is called when the last item from the queue is given to a worker +* drain - a callback that is called when the last item from the queue has returned from the worker + +__Example__ + +```js +// create a queue object with concurrency 2 + +var q = async.queue(function (task, callback) { + console.log('hello ' + task.name); + callback(); +}, 2); + + +// assign a callback +q.drain = function() { + console.log('all items have been processed'); +} + +// add some items to the queue + +q.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); +}); +q.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); + +// add some items to the queue (batch-wise) + +q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) { + console.log('finished processing bar'); +}); + +// add some items to the front of the queue + +q.unshift({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); +``` + +--------------------------------------- + + +### cargo(worker, [payload]) + +Creates a cargo object with the specified payload. Tasks added to the +cargo will be processed altogether (up to the payload limit). If the +worker is in progress, the task is queued until it is available. Once +the worker has completed some tasks, each callback of those tasks is called. + +__Arguments__ + +* worker(tasks, callback) - An asynchronous function for processing an array of + queued tasks, which must call its callback(err) argument when finished, with + an optional error as an argument. +* payload - An optional integer for determining how many tasks should be + processed per round; if omitted, the default is unlimited. + +__Cargo objects__ + +The cargo object returned by this function has the following properties and +methods: + +* length() - a function returning the number of items waiting to be processed. +* payload - an integer for determining how many tasks should be + process per round. This property can be changed after a cargo is created to + alter the payload on-the-fly. +* push(task, [callback]) - add a new task to the queue, the callback is called + once the worker has finished processing the task. + instead of a single task, an array of tasks can be submitted. the respective callback is used for every task in the list. +* saturated - a callback that is called when the queue length hits the concurrency and further tasks will be queued +* empty - a callback that is called when the last item from the queue is given to a worker +* drain - a callback that is called when the last item from the queue has returned from the worker + +__Example__ + +```js +// create a cargo object with payload 2 + +var cargo = async.cargo(function (tasks, callback) { + for(var i=0; i +### auto(tasks, [callback]) + +Determines the best order for running functions based on their requirements. +Each function can optionally depend on other functions being completed first, +and each function is run as soon as its requirements are satisfied. If any of +the functions pass an error to their callback, that function will not complete +(so any other functions depending on it will not run) and the main callback +will be called immediately with the error. Functions also receive an object +containing the results of functions which have completed so far. + +Note, all functions are called with a results object as a second argument, +so it is unsafe to pass functions in the tasks object which cannot handle the +extra argument. For example, this snippet of code: + +```js +async.auto({ + readData: async.apply(fs.readFile, 'data.txt', 'utf-8') +}, callback); +``` + +will have the effect of calling readFile with the results object as the last +argument, which will fail: + +```js +fs.readFile('data.txt', 'utf-8', cb, {}); +``` + +Instead, wrap the call to readFile in a function which does not forward the +results object: + +```js +async.auto({ + readData: function(cb, results){ + fs.readFile('data.txt', 'utf-8', cb); + } +}, callback); +``` + +__Arguments__ + +* tasks - An object literal containing named functions or an array of + requirements, with the function itself the last item in the array. The key + used for each function or array is used when specifying requirements. The + function receives two arguments: (1) a callback(err, result) which must be + called when finished, passing an error (which can be null) and the result of + the function's execution, and (2) a results object, containing the results of + the previously executed functions. +* callback(err, results) - An optional callback which is called when all the + tasks have been completed. The callback will receive an error as an argument + if any tasks pass an error to their callback. Results will always be passed + but if an error occurred, no other tasks will be performed, and the results + object will only contain partial results. + + +__Example__ + +```js +async.auto({ + get_data: function(callback){ + // async code to get some data + }, + make_folder: function(callback){ + // async code to create a directory to store a file in + // this is run at the same time as getting the data + }, + write_file: ['get_data', 'make_folder', function(callback){ + // once there is some data and the directory exists, + // write the data to a file in the directory + callback(null, filename); + }], + email_link: ['write_file', function(callback, results){ + // once the file is written let's email a link to it... + // results.write_file contains the filename returned by write_file. + }] +}); +``` + +This is a fairly trivial example, but to do this using the basic parallel and +series functions would look like this: + +```js +async.parallel([ + function(callback){ + // async code to get some data + }, + function(callback){ + // async code to create a directory to store a file in + // this is run at the same time as getting the data + } +], +function(err, results){ + async.series([ + function(callback){ + // once there is some data and the directory exists, + // write the data to a file in the directory + }, + function(callback){ + // once the file is written let's email a link to it... + } + ]); +}); +``` + +For a complicated series of async tasks using the auto function makes adding +new tasks much easier and makes the code more readable. + + +--------------------------------------- + + +### iterator(tasks) + +Creates an iterator function which calls the next function in the array, +returning a continuation to call the next one after that. It's also possible to +'peek' the next iterator by doing iterator.next(). + +This function is used internally by the async module but can be useful when +you want to manually control the flow of functions in series. + +__Arguments__ + +* tasks - An array of functions to run. + +__Example__ + +```js +var iterator = async.iterator([ + function(){ sys.p('one'); }, + function(){ sys.p('two'); }, + function(){ sys.p('three'); } +]); + +node> var iterator2 = iterator(); +'one' +node> var iterator3 = iterator2(); +'two' +node> iterator3(); +'three' +node> var nextfn = iterator2.next(); +node> nextfn(); +'three' +``` + +--------------------------------------- + + +### apply(function, arguments..) + +Creates a continuation function with some arguments already applied, a useful +shorthand when combined with other control flow functions. Any arguments +passed to the returned function are added to the arguments originally passed +to apply. + +__Arguments__ + +* function - The function you want to eventually apply all arguments to. +* arguments... - Any number of arguments to automatically apply when the + continuation is called. + +__Example__ + +```js +// using apply + +async.parallel([ + async.apply(fs.writeFile, 'testfile1', 'test1'), + async.apply(fs.writeFile, 'testfile2', 'test2'), +]); + + +// the same process without using apply + +async.parallel([ + function(callback){ + fs.writeFile('testfile1', 'test1', callback); + }, + function(callback){ + fs.writeFile('testfile2', 'test2', callback); + } +]); +``` + +It's possible to pass any number of additional arguments when calling the +continuation: + +```js +node> var fn = async.apply(sys.puts, 'one'); +node> fn('two', 'three'); +one +two +three +``` + +--------------------------------------- + + +### nextTick(callback) + +Calls the callback on a later loop around the event loop. In node.js this just +calls process.nextTick, in the browser it falls back to setImmediate(callback) +if available, otherwise setTimeout(callback, 0), which means other higher priority +events may precede the execution of the callback. + +This is used internally for browser-compatibility purposes. + +__Arguments__ + +* callback - The function to call on a later loop around the event loop. + +__Example__ + +```js +var call_order = []; +async.nextTick(function(){ + call_order.push('two'); + // call_order now equals ['one','two'] +}); +call_order.push('one') +``` + + +### times(n, callback) + +Calls the callback n times and accumulates results in the same manner +you would use with async.map. + +__Arguments__ + +* n - The number of times to run the function. +* callback - The function to call n times. + +__Example__ + +```js +// Pretend this is some complicated async factory +var createUser = function(id, callback) { + callback(null, { + id: 'user' + id + }) +} +// generate 5 users +async.times(5, function(n, next){ + createUser(n, function(err, user) { + next(err, user) + }) +}, function(err, users) { + // we should now have 5 users +}); +``` + + +### timesSeries(n, callback) + +The same as times only the iterator is applied to each item in the array in +series. The next iterator is only called once the current one has completed +processing. The results array will be in the same order as the original. + + +## Utils + + +### memoize(fn, [hasher]) + +Caches the results of an async function. When creating a hash to store function +results against, the callback is omitted from the hash and an optional hash +function can be used. + +The cache of results is exposed as the `memo` property of the function returned +by `memoize`. + +__Arguments__ + +* fn - the function you to proxy and cache results from. +* hasher - an optional function for generating a custom hash for storing + results, it has all the arguments applied to it apart from the callback, and + must be synchronous. + +__Example__ + +```js +var slow_fn = function (name, callback) { + // do something + callback(null, result); +}; +var fn = async.memoize(slow_fn); + +// fn can now be used as if it were slow_fn +fn('some name', function () { + // callback +}); +``` + + +### unmemoize(fn) + +Undoes a memoized function, reverting it to the original, unmemoized +form. Comes handy in tests. + +__Arguments__ + +* fn - the memoized function + + +### log(function, arguments) + +Logs the result of an async function to the console. Only works in node.js or +in browsers that support console.log and console.error (such as FF and Chrome). +If multiple arguments are returned from the async function, console.log is +called on each argument in order. + +__Arguments__ + +* function - The function you want to eventually apply all arguments to. +* arguments... - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, 'hello ' + name); + }, 1000); +}; +``` +```js +node> async.log(hello, 'world'); +'hello world' +``` + +--------------------------------------- + + +### dir(function, arguments) + +Logs the result of an async function to the console using console.dir to +display the properties of the resulting object. Only works in node.js or +in browsers that support console.dir and console.error (such as FF and Chrome). +If multiple arguments are returned from the async function, console.dir is +called on each argument in order. + +__Arguments__ + +* function - The function you want to eventually apply all arguments to. +* arguments... - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, {hello: name}); + }, 1000); +}; +``` +```js +node> async.dir(hello, 'world'); +{hello: 'world'} +``` + +--------------------------------------- + + +### noConflict() + +Changes the value of async back to its original value, returning a reference to the +async object. diff --git a/node_modules/uglify-js/node_modules/async/component.json b/node_modules/uglify-js/node_modules/async/component.json new file mode 100644 index 0000000..bbb0115 --- /dev/null +++ b/node_modules/uglify-js/node_modules/async/component.json @@ -0,0 +1,11 @@ +{ + "name": "async", + "repo": "caolan/async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "0.1.23", + "keywords": [], + "dependencies": {}, + "development": {}, + "main": "lib/async.js", + "scripts": [ "lib/async.js" ] +} diff --git a/node_modules/uglify-js/node_modules/async/lib/async.js b/node_modules/uglify-js/node_modules/async/lib/async.js new file mode 100755 index 0000000..1eebb15 --- /dev/null +++ b/node_modules/uglify-js/node_modules/async/lib/async.js @@ -0,0 +1,958 @@ +/*global setImmediate: false, setTimeout: false, console: false */ +(function () { + + var async = {}; + + // global on the server, window in the browser + var root, previous_async; + + root = this; + if (root != null) { + previous_async = root.async; + } + + async.noConflict = function () { + root.async = previous_async; + return async; + }; + + function only_once(fn) { + var called = false; + return function() { + if (called) throw new Error("Callback was already called."); + called = true; + fn.apply(root, arguments); + } + } + + //// cross-browser compatiblity functions //// + + var _each = function (arr, iterator) { + if (arr.forEach) { + return arr.forEach(iterator); + } + for (var i = 0; i < arr.length; i += 1) { + iterator(arr[i], i, arr); + } + }; + + var _map = function (arr, iterator) { + if (arr.map) { + return arr.map(iterator); + } + var results = []; + _each(arr, function (x, i, a) { + results.push(iterator(x, i, a)); + }); + return results; + }; + + var _reduce = function (arr, iterator, memo) { + if (arr.reduce) { + return arr.reduce(iterator, memo); + } + _each(arr, function (x, i, a) { + memo = iterator(memo, x, i, a); + }); + return memo; + }; + + var _keys = function (obj) { + if (Object.keys) { + return Object.keys(obj); + } + var keys = []; + for (var k in obj) { + if (obj.hasOwnProperty(k)) { + keys.push(k); + } + } + return keys; + }; + + //// exported async module functions //// + + //// nextTick implementation with browser-compatible fallback //// + if (typeof process === 'undefined' || !(process.nextTick)) { + if (typeof setImmediate === 'function') { + async.nextTick = function (fn) { + // not a direct alias for IE10 compatibility + setImmediate(fn); + }; + async.setImmediate = async.nextTick; + } + else { + async.nextTick = function (fn) { + setTimeout(fn, 0); + }; + async.setImmediate = async.nextTick; + } + } + else { + async.nextTick = process.nextTick; + if (typeof setImmediate !== 'undefined') { + async.setImmediate = function (fn) { + // not a direct alias for IE10 compatibility + setImmediate(fn); + }; + } + else { + async.setImmediate = async.nextTick; + } + } + + async.each = function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length) { + return callback(); + } + var completed = 0; + _each(arr, function (x) { + iterator(x, only_once(function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed >= arr.length) { + callback(null); + } + } + })); + }); + }; + async.forEach = async.each; + + async.eachSeries = function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length) { + return callback(); + } + var completed = 0; + var iterate = function () { + iterator(arr[completed], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed >= arr.length) { + callback(null); + } + else { + iterate(); + } + } + }); + }; + iterate(); + }; + async.forEachSeries = async.eachSeries; + + async.eachLimit = function (arr, limit, iterator, callback) { + var fn = _eachLimit(limit); + fn.apply(null, [arr, iterator, callback]); + }; + async.forEachLimit = async.eachLimit; + + var _eachLimit = function (limit) { + + return function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length || limit <= 0) { + return callback(); + } + var completed = 0; + var started = 0; + var running = 0; + + (function replenish () { + if (completed >= arr.length) { + return callback(); + } + + while (running < limit && started < arr.length) { + started += 1; + running += 1; + iterator(arr[started - 1], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + running -= 1; + if (completed >= arr.length) { + callback(); + } + else { + replenish(); + } + } + }); + } + })(); + }; + }; + + + var doParallel = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.each].concat(args)); + }; + }; + var doParallelLimit = function(limit, fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [_eachLimit(limit)].concat(args)); + }; + }; + var doSeries = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.eachSeries].concat(args)); + }; + }; + + + var _asyncMap = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (err, v) { + results[x.index] = v; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + }; + async.map = doParallel(_asyncMap); + async.mapSeries = doSeries(_asyncMap); + async.mapLimit = function (arr, limit, iterator, callback) { + return _mapLimit(limit)(arr, iterator, callback); + }; + + var _mapLimit = function(limit) { + return doParallelLimit(limit, _asyncMap); + }; + + // reduce only has a series version, as doing reduce in parallel won't + // work in many situations. + async.reduce = function (arr, memo, iterator, callback) { + async.eachSeries(arr, function (x, callback) { + iterator(memo, x, function (err, v) { + memo = v; + callback(err); + }); + }, function (err) { + callback(err, memo); + }); + }; + // inject alias + async.inject = async.reduce; + // foldl alias + async.foldl = async.reduce; + + async.reduceRight = function (arr, memo, iterator, callback) { + var reversed = _map(arr, function (x) { + return x; + }).reverse(); + async.reduce(reversed, memo, iterator, callback); + }; + // foldr alias + async.foldr = async.reduceRight; + + var _filter = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.filter = doParallel(_filter); + async.filterSeries = doSeries(_filter); + // select alias + async.select = async.filter; + async.selectSeries = async.filterSeries; + + var _reject = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (!v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.reject = doParallel(_reject); + async.rejectSeries = doSeries(_reject); + + var _detect = function (eachfn, arr, iterator, main_callback) { + eachfn(arr, function (x, callback) { + iterator(x, function (result) { + if (result) { + main_callback(x); + main_callback = function () {}; + } + else { + callback(); + } + }); + }, function (err) { + main_callback(); + }); + }; + async.detect = doParallel(_detect); + async.detectSeries = doSeries(_detect); + + async.some = function (arr, iterator, main_callback) { + async.each(arr, function (x, callback) { + iterator(x, function (v) { + if (v) { + main_callback(true); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(false); + }); + }; + // any alias + async.any = async.some; + + async.every = function (arr, iterator, main_callback) { + async.each(arr, function (x, callback) { + iterator(x, function (v) { + if (!v) { + main_callback(false); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(true); + }); + }; + // all alias + async.all = async.every; + + async.sortBy = function (arr, iterator, callback) { + async.map(arr, function (x, callback) { + iterator(x, function (err, criteria) { + if (err) { + callback(err); + } + else { + callback(null, {value: x, criteria: criteria}); + } + }); + }, function (err, results) { + if (err) { + return callback(err); + } + else { + var fn = function (left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + }; + callback(null, _map(results.sort(fn), function (x) { + return x.value; + })); + } + }); + }; + + async.auto = function (tasks, callback) { + callback = callback || function () {}; + var keys = _keys(tasks); + if (!keys.length) { + return callback(null); + } + + var results = {}; + + var listeners = []; + var addListener = function (fn) { + listeners.unshift(fn); + }; + var removeListener = function (fn) { + for (var i = 0; i < listeners.length; i += 1) { + if (listeners[i] === fn) { + listeners.splice(i, 1); + return; + } + } + }; + var taskComplete = function () { + _each(listeners.slice(0), function (fn) { + fn(); + }); + }; + + addListener(function () { + if (_keys(results).length === keys.length) { + callback(null, results); + callback = function () {}; + } + }); + + _each(keys, function (k) { + var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k]; + var taskCallback = function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + if (err) { + var safeResults = {}; + _each(_keys(results), function(rkey) { + safeResults[rkey] = results[rkey]; + }); + safeResults[k] = args; + callback(err, safeResults); + // stop subsequent errors hitting callback multiple times + callback = function () {}; + } + else { + results[k] = args; + async.setImmediate(taskComplete); + } + }; + var requires = task.slice(0, Math.abs(task.length - 1)) || []; + var ready = function () { + return _reduce(requires, function (a, x) { + return (a && results.hasOwnProperty(x)); + }, true) && !results.hasOwnProperty(k); + }; + if (ready()) { + task[task.length - 1](taskCallback, results); + } + else { + var listener = function () { + if (ready()) { + removeListener(listener); + task[task.length - 1](taskCallback, results); + } + }; + addListener(listener); + } + }); + }; + + async.waterfall = function (tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor !== Array) { + var err = new Error('First argument to waterfall must be an array of functions'); + return callback(err); + } + if (!tasks.length) { + return callback(); + } + var wrapIterator = function (iterator) { + return function (err) { + if (err) { + callback.apply(null, arguments); + callback = function () {}; + } + else { + var args = Array.prototype.slice.call(arguments, 1); + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + async.setImmediate(function () { + iterator.apply(null, args); + }); + } + }; + }; + wrapIterator(async.iterator(tasks))(); + }; + + var _parallel = function(eachfn, tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor === Array) { + eachfn.map(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + eachfn.each(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.parallel = function (tasks, callback) { + _parallel({ map: async.map, each: async.each }, tasks, callback); + }; + + async.parallelLimit = function(tasks, limit, callback) { + _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback); + }; + + async.series = function (tasks, callback) { + callback = callback || function () {}; + if (tasks.constructor === Array) { + async.mapSeries(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + async.eachSeries(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.iterator = function (tasks) { + var makeCallback = function (index) { + var fn = function () { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + }; + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + }; + return makeCallback(0); + }; + + async.apply = function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + return function () { + return fn.apply( + null, args.concat(Array.prototype.slice.call(arguments)) + ); + }; + }; + + var _concat = function (eachfn, arr, fn, callback) { + var r = []; + eachfn(arr, function (x, cb) { + fn(x, function (err, y) { + r = r.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, r); + }); + }; + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + if (test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.whilst(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.doWhilst = function (iterator, test, callback) { + iterator(function (err) { + if (err) { + return callback(err); + } + if (test()) { + async.doWhilst(iterator, test, callback); + } + else { + callback(); + } + }); + }; + + async.until = function (test, iterator, callback) { + if (!test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.until(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.doUntil = function (iterator, test, callback) { + iterator(function (err) { + if (err) { + return callback(err); + } + if (!test()) { + async.doUntil(iterator, test, callback); + } + else { + callback(); + } + }); + }; + + async.queue = function (worker, concurrency) { + if (concurrency === undefined) { + concurrency = 1; + } + function _insert(q, data, pos, callback) { + if(data.constructor !== Array) { + data = [data]; + } + _each(data, function(task) { + var item = { + data: task, + callback: typeof callback === 'function' ? callback : null + }; + + if (pos) { + q.tasks.unshift(item); + } else { + q.tasks.push(item); + } + + if (q.saturated && q.tasks.length === concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + var workers = 0; + var q = { + tasks: [], + concurrency: concurrency, + saturated: null, + empty: null, + drain: null, + push: function (data, callback) { + _insert(q, data, false, callback); + }, + unshift: function (data, callback) { + _insert(q, data, true, callback); + }, + process: function () { + if (workers < q.concurrency && q.tasks.length) { + var task = q.tasks.shift(); + if (q.empty && q.tasks.length === 0) { + q.empty(); + } + workers += 1; + var next = function () { + workers -= 1; + if (task.callback) { + task.callback.apply(task, arguments); + } + if (q.drain && q.tasks.length + workers === 0) { + q.drain(); + } + q.process(); + }; + var cb = only_once(next); + worker(task.data, cb); + } + }, + length: function () { + return q.tasks.length; + }, + running: function () { + return workers; + } + }; + return q; + }; + + async.cargo = function (worker, payload) { + var working = false, + tasks = []; + + var cargo = { + tasks: tasks, + payload: payload, + saturated: null, + empty: null, + drain: null, + push: function (data, callback) { + if(data.constructor !== Array) { + data = [data]; + } + _each(data, function(task) { + tasks.push({ + data: task, + callback: typeof callback === 'function' ? callback : null + }); + if (cargo.saturated && tasks.length === payload) { + cargo.saturated(); + } + }); + async.setImmediate(cargo.process); + }, + process: function process() { + if (working) return; + if (tasks.length === 0) { + if(cargo.drain) cargo.drain(); + return; + } + + var ts = typeof payload === 'number' + ? tasks.splice(0, payload) + : tasks.splice(0); + + var ds = _map(ts, function (task) { + return task.data; + }); + + if(cargo.empty) cargo.empty(); + working = true; + worker(ds, function () { + working = false; + + var args = arguments; + _each(ts, function (data) { + if (data.callback) { + data.callback.apply(null, args); + } + }); + + process(); + }); + }, + length: function () { + return tasks.length; + }, + running: function () { + return working; + } + }; + return cargo; + }; + + var _console_fn = function (name) { + return function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + fn.apply(null, args.concat([function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (typeof console !== 'undefined') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _each(args, function (x) { + console[name](x); + }); + } + } + }])); + }; + }; + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + + async.memoize = function (fn, hasher) { + var memo = {}; + var queues = {}; + hasher = hasher || function (x) { + return x; + }; + var memoized = function () { + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + var key = hasher.apply(null, args); + if (key in memo) { + callback.apply(null, memo[key]); + } + else if (key in queues) { + queues[key].push(callback); + } + else { + queues[key] = [callback]; + fn.apply(null, args.concat([function () { + memo[key] = arguments; + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i].apply(null, arguments); + } + }])); + } + }; + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + }; + + async.unmemoize = function (fn) { + return function () { + return (fn.unmemoized || fn).apply(null, arguments); + }; + }; + + async.times = function (count, iterator, callback) { + var counter = []; + for (var i = 0; i < count; i++) { + counter.push(i); + } + return async.map(counter, iterator, callback); + }; + + async.timesSeries = function (count, iterator, callback) { + var counter = []; + for (var i = 0; i < count; i++) { + counter.push(i); + } + return async.mapSeries(counter, iterator, callback); + }; + + async.compose = function (/* functions... */) { + var fns = Array.prototype.reverse.call(arguments); + return function () { + var that = this; + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + async.reduce(fns, args, function (newargs, fn, cb) { + fn.apply(that, newargs.concat([function () { + var err = arguments[0]; + var nextargs = Array.prototype.slice.call(arguments, 1); + cb(err, nextargs); + }])) + }, + function (err, results) { + callback.apply(that, [err].concat(results)); + }); + }; + }; + + var _applyEach = function (eachfn, fns /*args...*/) { + var go = function () { + var that = this; + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + return eachfn(fns, function (fn, cb) { + fn.apply(that, args.concat([cb])); + }, + callback); + }; + if (arguments.length > 2) { + var args = Array.prototype.slice.call(arguments, 2); + return go.apply(this, args); + } + else { + return go; + } + }; + async.applyEach = doParallel(_applyEach); + async.applyEachSeries = doSeries(_applyEach); + + async.forever = function (fn, callback) { + function next(err) { + if (err) { + if (callback) { + return callback(err); + } + throw err; + } + fn(next); + } + next(); + }; + + // AMD / RequireJS + if (typeof define !== 'undefined' && define.amd) { + define([], function () { + return async; + }); + } + // Node.js + else if (typeof module !== 'undefined' && module.exports) { + module.exports = async; + } + // included directly via + + diff --git a/node_modules/uglify-to-browserify/.npmignore b/node_modules/uglify-to-browserify/.npmignore new file mode 100644 index 0000000..66d015b --- /dev/null +++ b/node_modules/uglify-to-browserify/.npmignore @@ -0,0 +1,14 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz +pids +logs +results +npm-debug.log +node_modules +/test/output.js diff --git a/node_modules/uglify-to-browserify/.travis.yml b/node_modules/uglify-to-browserify/.travis.yml new file mode 100644 index 0000000..9a61f6b --- /dev/null +++ b/node_modules/uglify-to-browserify/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "0.10" \ No newline at end of file diff --git a/node_modules/uglify-to-browserify/LICENSE b/node_modules/uglify-to-browserify/LICENSE new file mode 100644 index 0000000..dfb0b19 --- /dev/null +++ b/node_modules/uglify-to-browserify/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2013 Forbes Lindesay + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/uglify-to-browserify/README.md b/node_modules/uglify-to-browserify/README.md new file mode 100644 index 0000000..99685da --- /dev/null +++ b/node_modules/uglify-to-browserify/README.md @@ -0,0 +1,15 @@ +# uglify-to-browserify + +A transform to make UglifyJS work in browserify. + +[![Build Status](https://travis-ci.org/ForbesLindesay/uglify-to-browserify.png?branch=master)](https://travis-ci.org/ForbesLindesay/uglify-to-browserify) +[![Dependency Status](https://gemnasium.com/ForbesLindesay/uglify-to-browserify.png)](https://gemnasium.com/ForbesLindesay/uglify-to-browserify) +[![NPM version](https://badge.fury.io/js/uglify-to-browserify.png)](http://badge.fury.io/js/uglify-to-browserify) + +## Installation + + npm install uglify-to-browserify + +## License + + MIT \ No newline at end of file diff --git a/node_modules/uglify-to-browserify/index.js b/node_modules/uglify-to-browserify/index.js new file mode 100644 index 0000000..2cea629 --- /dev/null +++ b/node_modules/uglify-to-browserify/index.js @@ -0,0 +1,49 @@ +'use strict' + +var fs = require('fs') +var PassThrough = require('stream').PassThrough +var Transform = require('stream').Transform + +if (typeof Transform === 'undefined') { + throw new Error('UglifyJS only supports browserify when using node >= 0.10.x') +} + +var cache = {} +module.exports = transform +function transform(file) { + if (!/tools\/node\.js$/.test(file.replace(/\\/g,'/'))) return new PassThrough(); + if (cache[file]) return makeStream(cache[file]) + var uglify = require(file) + var src = 'var sys = require("util");\nvar MOZ_SourceMap = require("source-map");\nvar UglifyJS = exports;\n' + uglify.FILES.map(function (path) { return fs.readFileSync(path, 'utf8') }).join('\n') + + var ast = uglify.parse(src) + ast.figure_out_scope() + + var variables = ast.variables + .map(function (node, name) { + return name + }) + + src += '\n\n' + variables.map(function (v) { return 'exports.' + v + ' = ' + v + ';' }).join('\n') + '\n\n' + + src += 'exports.AST_Node.warn_function = function (txt) { if (typeof console != "undefined" && typeof console.warn === "function") console.warn(txt) }\n\n' + + src += 'exports.minify = ' + uglify.minify.toString() + ';\n\n' + src += 'exports.describe_ast = ' + uglify.describe_ast.toString() + ';' + + // TODO: remove once https://github.com/substack/node-browserify/issues/631 is resolved + src = src.replace(/"for"/g, '"fo" + "r"') + + cache[file] = src + return makeStream(src); +} + +function makeStream(src) { + var res = new Transform(); + res._transform = function (chunk, encoding, callback) { callback() } + res._flush = function (callback) { + res.push(src) + callback() + } + return res; +} diff --git a/node_modules/uglify-to-browserify/package.json b/node_modules/uglify-to-browserify/package.json new file mode 100644 index 0000000..24eed9b --- /dev/null +++ b/node_modules/uglify-to-browserify/package.json @@ -0,0 +1,20 @@ +{ + "name": "uglify-to-browserify", + "version": "1.0.2", + "description": "A transform to make UglifyJS work in browserify.", + "keywords": [], + "dependencies": {}, + "devDependencies": { + "uglify-js": "~2.4.0", + "source-map": "~0.1.27" + }, + "scripts": { + "test": "node test/index.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/ForbesLindesay/uglify-to-browserify.git" + }, + "author": "ForbesLindesay", + "license": "MIT" +} \ No newline at end of file diff --git a/node_modules/uglify-to-browserify/test/index.js b/node_modules/uglify-to-browserify/test/index.js new file mode 100644 index 0000000..4117894 --- /dev/null +++ b/node_modules/uglify-to-browserify/test/index.js @@ -0,0 +1,22 @@ +var fs = require('fs') +var br = require('../') +var test = fs.readFileSync(require.resolve('uglify-js/test/run-tests.js'), 'utf8') + .replace(/^#.*\n/, '') + +var transform = br(require.resolve('uglify-js')) +transform.pipe(fs.createWriteStream(__dirname + '/output.js')) + .on('close', function () { + Function('module,require', test)({ + filename: require.resolve('uglify-js/test/run-tests.js') + }, + function (name) { + if (name === '../tools/node') { + return require('./output.js') + } else if (/^[a-z]+$/.test(name)) { + return require(name) + } else { + throw new Error('I didn\'t expect you to require ' + name) + } + }) + }) +transform.end(fs.readFileSync(require.resolve('uglify-js'), 'utf8')) \ No newline at end of file diff --git a/node_modules/uid-number/LICENSE b/node_modules/uid-number/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/uid-number/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/uid-number/README.md b/node_modules/uid-number/README.md new file mode 100644 index 0000000..8116675 --- /dev/null +++ b/node_modules/uid-number/README.md @@ -0,0 +1,17 @@ +Use this module to convert a username/groupname to a uid/gid number. + +Usage: + +``` +npm install uid-number +``` + +Then, in your node program: + +```javascript +var uidNumber = require("uid-number") +uidNumber("isaacs", function (er, uid, gid) { + // gid is null because we didn't ask for a group name + // uid === 24561 because that's my number. +}) +``` diff --git a/node_modules/uid-number/get-uid-gid.js b/node_modules/uid-number/get-uid-gid.js new file mode 100755 index 0000000..0b39174 --- /dev/null +++ b/node_modules/uid-number/get-uid-gid.js @@ -0,0 +1,24 @@ +if (module !== require.main) { + throw new Error("This file should not be loaded with require()") +} + +if (!process.getuid || !process.getgid) { + throw new Error("this file should not be called without uid/gid support") +} + +var argv = process.argv.slice(2) + , user = argv[0] || process.getuid() + , group = argv[1] || process.getgid() + +if (!isNaN(user)) user = +user +if (!isNaN(group)) group = +group + +console.error([user, group]) + +try { + process.setgid(group) + process.setuid(user) + console.log(JSON.stringify({uid:+process.getuid(), gid:+process.getgid()})) +} catch (ex) { + console.log(JSON.stringify({error:ex.message,errno:ex.errno})) +} diff --git a/node_modules/uid-number/package.json b/node_modules/uid-number/package.json new file mode 100644 index 0000000..616223f --- /dev/null +++ b/node_modules/uid-number/package.json @@ -0,0 +1,18 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "uid-number", + "description": "Convert a username/group name to a uid/gid number", + "version": "0.0.6", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/uid-number.git" + }, + "main": "uid-number.js", + "dependencies": {}, + "devDependencies": {}, + "optionalDependencies": {}, + "engines": { + "node": "*" + }, + "license": "ISC" +} diff --git a/node_modules/uid-number/uid-number.js b/node_modules/uid-number/uid-number.js new file mode 100644 index 0000000..bd62184 --- /dev/null +++ b/node_modules/uid-number/uid-number.js @@ -0,0 +1,59 @@ +module.exports = uidNumber + +// This module calls into get-uid-gid.js, which sets the +// uid and gid to the supplied argument, in order to find out their +// numeric value. This can't be done in the main node process, +// because otherwise node would be running as that user from this +// point on. + +var child_process = require("child_process") + , path = require("path") + , uidSupport = process.getuid && process.setuid + , uidCache = {} + , gidCache = {} + +function uidNumber (uid, gid, cb) { + if (!uidSupport) return cb() + if (typeof cb !== "function") cb = gid, gid = null + if (typeof cb !== "function") cb = uid, uid = null + if (gid == null) gid = process.getgid() + if (uid == null) uid = process.getuid() + if (!isNaN(gid)) gid = gidCache[gid] = +gid + if (!isNaN(uid)) uid = uidCache[uid] = +uid + + if (uidCache.hasOwnProperty(uid)) uid = uidCache[uid] + if (gidCache.hasOwnProperty(gid)) gid = gidCache[gid] + + if (typeof gid === "number" && typeof uid === "number") { + return process.nextTick(cb.bind(null, null, uid, gid)) + } + + var getter = require.resolve("./get-uid-gid.js") + + child_process.execFile( process.execPath + , [getter, uid, gid] + , function (code, out, stderr) { + if (code) { + var er = new Error("could not get uid/gid\n" + stderr) + er.code = code + return cb(er) + } + + try { + out = JSON.parse(out+"") + } catch (ex) { + return cb(ex) + } + + if (out.error) { + var er = new Error(out.error) + er.errno = out.errno + return cb(er) + } + + if (isNaN(out.uid) || isNaN(out.gid)) return cb(new Error( + "Could not get uid/gid: "+JSON.stringify(out))) + + cb(null, uidCache[uid] = +out.uid, gidCache[gid] = +out.gid) + }) +} diff --git a/node_modules/unpipe/HISTORY.md b/node_modules/unpipe/HISTORY.md new file mode 100644 index 0000000..85e0f8d --- /dev/null +++ b/node_modules/unpipe/HISTORY.md @@ -0,0 +1,4 @@ +1.0.0 / 2015-06-14 +================== + + * Initial release diff --git a/node_modules/unpipe/LICENSE b/node_modules/unpipe/LICENSE new file mode 100644 index 0000000..aed0138 --- /dev/null +++ b/node_modules/unpipe/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/unpipe/README.md b/node_modules/unpipe/README.md new file mode 100644 index 0000000..e536ad2 --- /dev/null +++ b/node_modules/unpipe/README.md @@ -0,0 +1,43 @@ +# unpipe + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Unpipe a stream from all destinations. + +## Installation + +```sh +$ npm install unpipe +``` + +## API + +```js +var unpipe = require('unpipe') +``` + +### unpipe(stream) + +Unpipes all destinations from a given stream. With stream 2+, this is +equivalent to `stream.unpipe()`. When used with streams 1 style streams +(typically Node.js 0.8 and below), this module attempts to undo the +actions done in `stream.pipe(dest)`. + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/unpipe.svg +[npm-url]: https://npmjs.org/package/unpipe +[node-image]: https://img.shields.io/node/v/unpipe.svg +[node-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/stream-utils/unpipe.svg +[travis-url]: https://travis-ci.org/stream-utils/unpipe +[coveralls-image]: https://img.shields.io/coveralls/stream-utils/unpipe.svg +[coveralls-url]: https://coveralls.io/r/stream-utils/unpipe?branch=master +[downloads-image]: https://img.shields.io/npm/dm/unpipe.svg +[downloads-url]: https://npmjs.org/package/unpipe diff --git a/node_modules/unpipe/index.js b/node_modules/unpipe/index.js new file mode 100644 index 0000000..15c3d97 --- /dev/null +++ b/node_modules/unpipe/index.js @@ -0,0 +1,69 @@ +/*! + * unpipe + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = unpipe + +/** + * Determine if there are Node.js pipe-like data listeners. + * @private + */ + +function hasPipeDataListeners(stream) { + var listeners = stream.listeners('data') + + for (var i = 0; i < listeners.length; i++) { + if (listeners[i].name === 'ondata') { + return true + } + } + + return false +} + +/** + * Unpipe a stream from all destinations. + * + * @param {object} stream + * @public + */ + +function unpipe(stream) { + if (!stream) { + throw new TypeError('argument stream is required') + } + + if (typeof stream.unpipe === 'function') { + // new-style + stream.unpipe() + return + } + + // Node.js 0.8 hack + if (!hasPipeDataListeners(stream)) { + return + } + + var listener + var listeners = stream.listeners('close') + + for (var i = 0; i < listeners.length; i++) { + listener = listeners[i] + + if (listener.name !== 'cleanup' && listener.name !== 'onclose') { + continue + } + + // invoke the listener + listener.call(stream) + } +} diff --git a/node_modules/unpipe/package.json b/node_modules/unpipe/package.json new file mode 100644 index 0000000..a2b7358 --- /dev/null +++ b/node_modules/unpipe/package.json @@ -0,0 +1,27 @@ +{ + "name": "unpipe", + "description": "Unpipe a stream from all destinations", + "version": "1.0.0", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "repository": "stream-utils/unpipe", + "devDependencies": { + "istanbul": "0.3.15", + "mocha": "2.2.5", + "readable-stream": "1.1.13" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/url/.npmignore b/node_modules/url/.npmignore new file mode 100644 index 0000000..ba11471 --- /dev/null +++ b/node_modules/url/.npmignore @@ -0,0 +1 @@ +test-url.js diff --git a/node_modules/url/.travis.yml b/node_modules/url/.travis.yml new file mode 100644 index 0000000..16ed301 --- /dev/null +++ b/node_modules/url/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - "0.10" +env: + global: + - secure: OgPRLCzHFh5WbjHEKlghHFW1oOreSF2JVUr3CMaFDi03ngTS2WONSw8mRn8SA6FTldiGGBx1n8orDzUw6cdkB7+tkU3G5B0M0V3vl823NaUFKgxsCM3UGDYfJb3yfAG5cj72rVZoX/ABd1fVuG4vBIlDLxsSlKQFMzUCFoyttr8= + - secure: AiZP8GHbyx83ZBhOvOxxtpNcgNHoP+vo5G1a1OYU78EHCgHg8NRyHKyCdrBnPvw6mV2BI/8frZaXAEicsHMtHMofBYn7nibNlaajBPI8AkHtYfNSc+zO+71Kwv7VOTOKKnkMEIkqhHlc6njFoH3QaBNHsgNlzzplPxaIt8vdUVk= diff --git a/node_modules/url/.zuul.yml b/node_modules/url/.zuul.yml new file mode 100644 index 0000000..feea8b6 --- /dev/null +++ b/node_modules/url/.zuul.yml @@ -0,0 +1,16 @@ +ui: mocha-tdd +browsers: + - name: chrome + version: latest + - name: firefox + version: 24..latest + - name: safari + version: latest + - name: ie + version: 9..latest + - name: iphone + version: oldest..latest + - name: ipad + version: oldest..latest + - name: android + version: oldest..latest diff --git a/node_modules/url/LICENSE b/node_modules/url/LICENSE new file mode 100644 index 0000000..f45bc11 --- /dev/null +++ b/node_modules/url/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/url/README.md b/node_modules/url/README.md new file mode 100644 index 0000000..8b35460 --- /dev/null +++ b/node_modules/url/README.md @@ -0,0 +1,108 @@ +# node-url + +[![Build Status](https://travis-ci.org/defunctzombie/node-url.svg?branch=master)](https://travis-ci.org/defunctzombie/node-url) + +This module has utilities for URL resolution and parsing meant to have feature parity with node.js core [url](http://nodejs.org/api/url.html) module. + +```js +var url = require('url'); +``` + +## api + +Parsed URL objects have some or all of the following fields, depending on +whether or not they exist in the URL string. Any parts that are not in the URL +string will not be in the parsed object. Examples are shown for the URL + +`'http://user:pass@host.com:8080/p/a/t/h?query=string#hash'` + +* `href`: The full URL that was originally parsed. Both the protocol and host are lowercased. + + Example: `'http://user:pass@host.com:8080/p/a/t/h?query=string#hash'` + +* `protocol`: The request protocol, lowercased. + + Example: `'http:'` + +* `host`: The full lowercased host portion of the URL, including port + information. + + Example: `'host.com:8080'` + +* `auth`: The authentication information portion of a URL. + + Example: `'user:pass'` + +* `hostname`: Just the lowercased hostname portion of the host. + + Example: `'host.com'` + +* `port`: The port number portion of the host. + + Example: `'8080'` + +* `pathname`: The path section of the URL, that comes after the host and + before the query, including the initial slash if present. + + Example: `'/p/a/t/h'` + +* `search`: The 'query string' portion of the URL, including the leading + question mark. + + Example: `'?query=string'` + +* `path`: Concatenation of `pathname` and `search`. + + Example: `'/p/a/t/h?query=string'` + +* `query`: Either the 'params' portion of the query string, or a + querystring-parsed object. + + Example: `'query=string'` or `{'query':'string'}` + +* `hash`: The 'fragment' portion of the URL including the pound-sign. + + Example: `'#hash'` + +The following methods are provided by the URL module: + +### url.parse(urlStr, [parseQueryString], [slashesDenoteHost]) + +Take a URL string, and return an object. + +Pass `true` as the second argument to also parse +the query string using the `querystring` module. +Defaults to `false`. + +Pass `true` as the third argument to treat `//foo/bar` as +`{ host: 'foo', pathname: '/bar' }` rather than +`{ pathname: '//foo/bar' }`. Defaults to `false`. + +### url.format(urlObj) + +Take a parsed URL object, and return a formatted URL string. + +* `href` will be ignored. +* `protocol` is treated the same with or without the trailing `:` (colon). + * The protocols `http`, `https`, `ftp`, `gopher`, `file` will be + postfixed with `://` (colon-slash-slash). + * All other protocols `mailto`, `xmpp`, `aim`, `sftp`, `foo`, etc will + be postfixed with `:` (colon) +* `auth` will be used if present. +* `hostname` will only be used if `host` is absent. +* `port` will only be used if `host` is absent. +* `host` will be used in place of `hostname` and `port` +* `pathname` is treated the same with or without the leading `/` (slash) +* `search` will be used in place of `query` +* `query` (object; see `querystring`) will only be used if `search` is absent. +* `search` is treated the same with or without the leading `?` (question mark) +* `hash` is treated the same with or without the leading `#` (pound sign, anchor) + +### url.resolve(from, to) + +Take a base URL, and a href URL, and resolve them as a browser would for +an anchor tag. Examples: + + url.resolve('/one/two/three', 'four') // '/one/two/four' + url.resolve('http://example.com/', '/one') // 'http://example.com/one' + url.resolve('http://example.com/one', '/two') // 'http://example.com/two' diff --git a/node_modules/url/node_modules/punycode/LICENSE-MIT.txt b/node_modules/url/node_modules/punycode/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/url/node_modules/punycode/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/url/node_modules/punycode/README.md b/node_modules/url/node_modules/punycode/README.md new file mode 100644 index 0000000..831e637 --- /dev/null +++ b/node_modules/url/node_modules/punycode/README.md @@ -0,0 +1,176 @@ +# Punycode.js [![Build status](https://travis-ci.org/bestiejs/punycode.js.svg?branch=master)](https://travis-ci.org/bestiejs/punycode.js) [![Code coverage status](http://img.shields.io/coveralls/bestiejs/punycode.js/master.svg)](https://coveralls.io/r/bestiejs/punycode.js) [![Dependency status](https://gemnasium.com/bestiejs/punycode.js.svg)](https://gemnasium.com/bestiejs/punycode.js) + +A robust Punycode converter that fully complies to [RFC 3492](http://tools.ietf.org/html/rfc3492) and [RFC 5891](http://tools.ietf.org/html/rfc5891), and works on nearly all JavaScript platforms. + +This JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm: + +* [The C example code from RFC 3492](http://tools.ietf.org/html/rfc3492#appendix-C) +* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c) +* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c) +* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287) +* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072)) + +This project is [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with [Node.js v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc). + +## Installation + +Via [npm](http://npmjs.org/) (only required for Node.js releases older than v0.6.2): + +```bash +npm install punycode +``` + +Via [Bower](http://bower.io/): + +```bash +bower install punycode +``` + +Via [Component](https://github.com/component/component): + +```bash +component install bestiejs/punycode.js +``` + +In a browser: + +```html + +``` + +In [Narwhal](http://narwhaljs.org/), [Node.js](http://nodejs.org/), and [RingoJS](http://ringojs.org/): + +```js +var punycode = require('punycode'); +``` + +In [Rhino](http://www.mozilla.org/rhino/): + +```js +load('punycode.js'); +``` + +Using an AMD loader like [RequireJS](http://requirejs.org/): + +```js +require( + { + 'paths': { + 'punycode': 'path/to/punycode' + } + }, + ['punycode'], + function(punycode) { + console.log(punycode); + } +); +``` + +## API + +### `punycode.decode(string)` + +Converts a Punycode string of ASCII symbols to a string of Unicode symbols. + +```js +// decode domain name parts +punycode.decode('maana-pta'); // 'mañana' +punycode.decode('--dqo34k'); // '☃-⌘' +``` + +### `punycode.encode(string)` + +Converts a string of Unicode symbols to a Punycode string of ASCII symbols. + +```js +// encode domain name parts +punycode.encode('mañana'); // 'maana-pta' +punycode.encode('☃-⌘'); // '--dqo34k' +``` + +### `punycode.toUnicode(input)` + +Converts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode. + +```js +// decode domain names +punycode.toUnicode('xn--maana-pta.com'); +// → 'mañana.com' +punycode.toUnicode('xn----dqo34k.com'); +// → '☃-⌘.com' + +// decode email addresses +punycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'); +// → 'джумла@джpумлатест.bрфa' +``` + +### `punycode.toASCII(input)` + +Converts a Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that's already in ASCII. + +```js +// encode domain names +punycode.toASCII('mañana.com'); +// → 'xn--maana-pta.com' +punycode.toASCII('☃-⌘.com'); +// → 'xn----dqo34k.com' + +// encode email addresses +punycode.toASCII('джумла@джpумлатест.bрфa'); +// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq' +``` + +### `punycode.ucs2` + +#### `punycode.ucs2.decode(string)` + +Creates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](https://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16. + +```js +punycode.ucs2.decode('abc'); +// → [0x61, 0x62, 0x63] +// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE: +punycode.ucs2.decode('\uD834\uDF06'); +// → [0x1D306] +``` + +#### `punycode.ucs2.encode(codePoints)` + +Creates a string based on an array of numeric code point values. + +```js +punycode.ucs2.encode([0x61, 0x62, 0x63]); +// → 'abc' +punycode.ucs2.encode([0x1D306]); +// → '\uD834\uDF06' +``` + +### `punycode.version` + +A string representing the current Punycode.js version number. + +## Unit tests & code coverage + +After cloning this repository, run `npm install --dev` to install the dependencies needed for Punycode.js development and testing. You may want to install Istanbul _globally_ using `npm install istanbul -g`. + +Once that’s done, you can run the unit tests in Node using `npm test` or `node tests/tests.js`. To run the tests in Rhino, Ringo, Narwhal, PhantomJS, and web browsers as well, use `grunt test`. + +To generate the code coverage report, use `grunt cover`. + +Feel free to fork if you see possible improvements! + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## Contributors + +| [![twitter/jdalton](https://gravatar.com/avatar/299a3d891ff1920b69c364d061007043?s=70)](https://twitter.com/jdalton "Follow @jdalton on Twitter") | +|---| +| [John-David Dalton](http://allyoucanleet.com/) | + +## License + +Punycode.js is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/url/node_modules/punycode/package.json b/node_modules/url/node_modules/punycode/package.json new file mode 100644 index 0000000..35f046a --- /dev/null +++ b/node_modules/url/node_modules/punycode/package.json @@ -0,0 +1,53 @@ +{ + "name": "punycode", + "version": "1.3.2", + "description": "A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, and works on nearly all JavaScript platforms.", + "homepage": "https://mths.be/punycode", + "main": "punycode.js", + "keywords": [ + "punycode", + "unicode", + "idn", + "idna", + "dns", + "url", + "domain" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "contributors": [ + { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + { + "name": "John-David Dalton", + "url": "http://allyoucanleet.com/" + } + ], + "repository": { + "type": "git", + "url": "https://github.com/bestiejs/punycode.js.git" + }, + "bugs": "https://github.com/bestiejs/punycode.js/issues", + "files": [ + "LICENSE-MIT.txt", + "punycode.js" + ], + "scripts": { + "test": "node tests/tests.js" + }, + "devDependencies": { + "coveralls": "^2.10.1", + "grunt": "^0.4.5", + "grunt-contrib-uglify": "^0.5.0", + "grunt-shell": "^0.7.0", + "istanbul": "^0.2.13", + "qunit-extras": "^1.2.0", + "qunitjs": "~1.11.0", + "requirejs": "^2.1.14" + } +} diff --git a/node_modules/url/node_modules/punycode/punycode.js b/node_modules/url/node_modules/punycode/punycode.js new file mode 100644 index 0000000..ac68597 --- /dev/null +++ b/node_modules/url/node_modules/punycode/punycode.js @@ -0,0 +1,530 @@ +/*! https://mths.be/punycode v1.3.2 by @mathias */ +;(function(root) { + + /** Detect free variables */ + var freeExports = typeof exports == 'object' && exports && + !exports.nodeType && exports; + var freeModule = typeof module == 'object' && module && + !module.nodeType && module; + var freeGlobal = typeof global == 'object' && global; + if ( + freeGlobal.global === freeGlobal || + freeGlobal.window === freeGlobal || + freeGlobal.self === freeGlobal + ) { + root = freeGlobal; + } + + /** + * The `punycode` object. + * @name punycode + * @type Object + */ + var punycode, + + /** Highest positive signed 32-bit float value */ + maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 + + /** Bootstring parameters */ + base = 36, + tMin = 1, + tMax = 26, + skew = 38, + damp = 700, + initialBias = 72, + initialN = 128, // 0x80 + delimiter = '-', // '\x2D' + + /** Regular expressions */ + regexPunycode = /^xn--/, + regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars + regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators + + /** Error messages */ + errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' + }, + + /** Convenience shortcuts */ + baseMinusTMin = base - tMin, + floor = Math.floor, + stringFromCharCode = String.fromCharCode, + + /** Temporary variable */ + key; + + /*--------------------------------------------------------------------------*/ + + /** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ + function error(type) { + throw RangeError(errors[type]); + } + + /** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ + function map(array, fn) { + var length = array.length; + var result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; + } + + /** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ + function mapDomain(string, fn) { + var parts = string.split('@'); + var result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + var labels = string.split('.'); + var encoded = map(labels, fn).join('.'); + return result + encoded; + } + + /** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ + function ucs2decode(string) { + var output = [], + counter = 0, + length = string.length, + value, + extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // low surrogate + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; + } + + /** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ + function ucs2encode(array) { + return map(array, function(value) { + var output = ''; + if (value > 0xFFFF) { + value -= 0x10000; + output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); + value = 0xDC00 | value & 0x3FF; + } + output += stringFromCharCode(value); + return output; + }).join(''); + } + + /** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ + function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; + } + + /** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ + function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); + } + + /** + * Bias adaptation function as per section 3.4 of RFC 3492. + * http://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ + function adapt(delta, numPoints, firstTime) { + var k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); + } + + /** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ + function decode(input) { + // Don't use UCS-2 + var output = [], + inputLength = input.length, + out, + i = 0, + n = initialN, + bias = initialBias, + basic, + j, + index, + oldi, + w, + k, + digit, + t, + /** Cached calculation results */ + baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + + } + + return ucs2encode(output); + } + + /** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ + function encode(input) { + var n, + delta, + handledCPCount, + basicLength, + bias, + j, + m, + q, + k, + t, + currentValue, + output = [], + /** `inputLength` will hold the number of code points in `input`. */ + inputLength, + /** Cached calculation results */ + handledCPCountPlusOne, + baseMinusT, + qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base; /* no condition */; k += base) { + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); + } + + /** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ + function toUnicode(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); + } + + /** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ + function toASCII(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); + } + + /*--------------------------------------------------------------------------*/ + + /** Define the public API */ + punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '1.3.2', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode + }; + + /** Expose `punycode` */ + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define('punycode', function() { + return punycode; + }); + } else if (freeExports && freeModule) { + if (module.exports == freeExports) { // in Node.js or RingoJS v0.8.0+ + freeModule.exports = punycode; + } else { // in Narwhal or RingoJS v0.7.0- + for (key in punycode) { + punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]); + } + } + } else { // in Rhino or a web browser + root.punycode = punycode; + } + +}(this)); diff --git a/node_modules/url/package.json b/node_modules/url/package.json new file mode 100644 index 0000000..ee19fdc --- /dev/null +++ b/node_modules/url/package.json @@ -0,0 +1,24 @@ +{ + "name": "url", + "description": "The core `url` packaged standalone for use with Browserify.", + "version": "0.10.3", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + }, + "main": "./url.js", + "devDependencies": { + "assert": "1.1.1", + "mocha": "1.18.2", + "zuul": "2.0.0" + }, + "scripts": { + "test": "mocha --ui qunit test.js && zuul -- test.js", + "test-local": "zuul --local -- test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/defunctzombie/node-url.git" + }, + "license": "MIT" +} diff --git a/node_modules/url/test.js b/node_modules/url/test.js new file mode 100644 index 0000000..b9663c7 --- /dev/null +++ b/node_modules/url/test.js @@ -0,0 +1,1461 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var assert = require('assert'); +var util = require('util'); + +var url = require('./url'); + +test('god', function() { + +// URLs to parse, and expected data +// { url : parsed } +var parseTests = { + '//some_path' : { + 'href': '//some_path', + 'pathname': '//some_path', + 'path': '//some_path' + }, + + 'HTTP://www.example.com/' : { + 'href': 'http://www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'HTTP://www.example.com' : { + 'href': 'http://www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://www.ExAmPlE.com/' : { + 'href': 'http://www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://user:pw@www.ExAmPlE.com/' : { + 'href': 'http://user:pw@www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:pw', + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://USER:PW@www.ExAmPlE.com/' : { + 'href': 'http://USER:PW@www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'auth': 'USER:PW', + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://user@www.example.com/' : { + 'href': 'http://user@www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user', + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://user%3Apw@www.example.com/' : { + 'href': 'http://user:pw@www.example.com/', + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:pw', + 'host': 'www.example.com', + 'hostname': 'www.example.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://x.com/path?that\'s#all, folks' : { + 'href': 'http://x.com/path?that%27s#all,%20folks', + 'protocol': 'http:', + 'slashes': true, + 'host': 'x.com', + 'hostname': 'x.com', + 'search': '?that%27s', + 'query': 'that%27s', + 'pathname': '/path', + 'hash': '#all,%20folks', + 'path': '/path?that%27s' + }, + + 'HTTP://X.COM/Y' : { + 'href': 'http://x.com/Y', + 'protocol': 'http:', + 'slashes': true, + 'host': 'x.com', + 'hostname': 'x.com', + 'pathname': '/Y', + 'path': '/Y' + }, + + // an unexpected invalid char in the hostname. + 'HtTp://x.y.cOm*a/b/c?d=e#f gi' : { + 'href': 'http://x.y.com/*a/b/c?d=e#f%20g%3Ch%3Ei', + 'protocol': 'http:', + 'slashes': true, + 'host': 'x.y.com', + 'hostname': 'x.y.com', + 'pathname': '/*a/b/c', + 'search': '?d=e', + 'query': 'd=e', + 'hash': '#f%20g%3Ch%3Ei', + 'path': '/*a/b/c?d=e' + }, + + // make sure that we don't accidentally lcast the path parts. + 'HtTp://x.y.cOm*A/b/c?d=e#f gi' : { + 'href': 'http://x.y.com/*A/b/c?d=e#f%20g%3Ch%3Ei', + 'protocol': 'http:', + 'slashes': true, + 'host': 'x.y.com', + 'hostname': 'x.y.com', + 'pathname': '/*A/b/c', + 'search': '?d=e', + 'query': 'd=e', + 'hash': '#f%20g%3Ch%3Ei', + 'path': '/*A/b/c?d=e' + }, + + 'http://x...y...#p': { + 'href': 'http://x...y.../#p', + 'protocol': 'http:', + 'slashes': true, + 'host': 'x...y...', + 'hostname': 'x...y...', + 'hash': '#p', + 'pathname': '/', + 'path': '/' + }, + + 'http://x/p/"quoted"': { + 'href': 'http://x/p/%22quoted%22', + 'protocol': 'http:', + 'slashes': true, + 'host': 'x', + 'hostname': 'x', + 'pathname': '/p/%22quoted%22', + 'path': '/p/%22quoted%22' + }, + + ' Is a URL!': { + 'href': '%3Chttp://goo.corn/bread%3E%20Is%20a%20URL!', + 'pathname': '%3Chttp://goo.corn/bread%3E%20Is%20a%20URL!', + 'path': '%3Chttp://goo.corn/bread%3E%20Is%20a%20URL!' + }, + + 'http://www.narwhaljs.org/blog/categories?id=news' : { + 'href': 'http://www.narwhaljs.org/blog/categories?id=news', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.narwhaljs.org', + 'hostname': 'www.narwhaljs.org', + 'search': '?id=news', + 'query': 'id=news', + 'pathname': '/blog/categories', + 'path': '/blog/categories?id=news' + }, + + 'http://mt0.google.com/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=' : { + 'href': 'http://mt0.google.com/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=', + 'protocol': 'http:', + 'slashes': true, + 'host': 'mt0.google.com', + 'hostname': 'mt0.google.com', + 'pathname': '/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=', + 'path': '/vt/lyrs=m@114&hl=en&src=api&x=2&y=2&z=3&s=' + }, + + 'http://mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=' : { + 'href': 'http://mt0.google.com/vt/lyrs=m@114???&hl=en&src=api' + + '&x=2&y=2&z=3&s=', + 'protocol': 'http:', + 'slashes': true, + 'host': 'mt0.google.com', + 'hostname': 'mt0.google.com', + 'search': '???&hl=en&src=api&x=2&y=2&z=3&s=', + 'query': '??&hl=en&src=api&x=2&y=2&z=3&s=', + 'pathname': '/vt/lyrs=m@114', + 'path': '/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=' + }, + + 'http://user:pass@mt0.google.com/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=': + { + 'href': 'http://user:pass@mt0.google.com/vt/lyrs=m@114???' + + '&hl=en&src=api&x=2&y=2&z=3&s=', + 'protocol': 'http:', + 'slashes': true, + 'host': 'mt0.google.com', + 'auth': 'user:pass', + 'hostname': 'mt0.google.com', + 'search': '???&hl=en&src=api&x=2&y=2&z=3&s=', + 'query': '??&hl=en&src=api&x=2&y=2&z=3&s=', + 'pathname': '/vt/lyrs=m@114', + 'path': '/vt/lyrs=m@114???&hl=en&src=api&x=2&y=2&z=3&s=' + }, + + 'file:///etc/passwd' : { + 'href': 'file:///etc/passwd', + 'slashes': true, + 'protocol': 'file:', + 'pathname': '/etc/passwd', + 'hostname': '', + 'host': '', + 'path': '/etc/passwd' + }, + + 'file://localhost/etc/passwd' : { + 'href': 'file://localhost/etc/passwd', + 'protocol': 'file:', + 'slashes': true, + 'pathname': '/etc/passwd', + 'hostname': 'localhost', + 'host': 'localhost', + 'path': '/etc/passwd' + }, + + 'file://foo/etc/passwd' : { + 'href': 'file://foo/etc/passwd', + 'protocol': 'file:', + 'slashes': true, + 'pathname': '/etc/passwd', + 'hostname': 'foo', + 'host': 'foo', + 'path': '/etc/passwd' + }, + + 'file:///etc/node/' : { + 'href': 'file:///etc/node/', + 'slashes': true, + 'protocol': 'file:', + 'pathname': '/etc/node/', + 'hostname': '', + 'host': '', + 'path': '/etc/node/' + }, + + 'file://localhost/etc/node/' : { + 'href': 'file://localhost/etc/node/', + 'protocol': 'file:', + 'slashes': true, + 'pathname': '/etc/node/', + 'hostname': 'localhost', + 'host': 'localhost', + 'path': '/etc/node/' + }, + + 'file://foo/etc/node/' : { + 'href': 'file://foo/etc/node/', + 'protocol': 'file:', + 'slashes': true, + 'pathname': '/etc/node/', + 'hostname': 'foo', + 'host': 'foo', + 'path': '/etc/node/' + }, + + 'http:/baz/../foo/bar' : { + 'href': 'http:/baz/../foo/bar', + 'protocol': 'http:', + 'pathname': '/baz/../foo/bar', + 'path': '/baz/../foo/bar' + }, + + 'http://user:pass@example.com:8000/foo/bar?baz=quux#frag' : { + 'href': 'http://user:pass@example.com:8000/foo/bar?baz=quux#frag', + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com:8000', + 'auth': 'user:pass', + 'port': '8000', + 'hostname': 'example.com', + 'hash': '#frag', + 'search': '?baz=quux', + 'query': 'baz=quux', + 'pathname': '/foo/bar', + 'path': '/foo/bar?baz=quux' + }, + + '//user:pass@example.com:8000/foo/bar?baz=quux#frag' : { + 'href': '//user:pass@example.com:8000/foo/bar?baz=quux#frag', + 'slashes': true, + 'host': 'example.com:8000', + 'auth': 'user:pass', + 'port': '8000', + 'hostname': 'example.com', + 'hash': '#frag', + 'search': '?baz=quux', + 'query': 'baz=quux', + 'pathname': '/foo/bar', + 'path': '/foo/bar?baz=quux' + }, + + '/foo/bar?baz=quux#frag' : { + 'href': '/foo/bar?baz=quux#frag', + 'hash': '#frag', + 'search': '?baz=quux', + 'query': 'baz=quux', + 'pathname': '/foo/bar', + 'path': '/foo/bar?baz=quux' + }, + + 'http:/foo/bar?baz=quux#frag' : { + 'href': 'http:/foo/bar?baz=quux#frag', + 'protocol': 'http:', + 'hash': '#frag', + 'search': '?baz=quux', + 'query': 'baz=quux', + 'pathname': '/foo/bar', + 'path': '/foo/bar?baz=quux' + }, + + 'mailto:foo@bar.com?subject=hello' : { + 'href': 'mailto:foo@bar.com?subject=hello', + 'protocol': 'mailto:', + 'host': 'bar.com', + 'auth' : 'foo', + 'hostname' : 'bar.com', + 'search': '?subject=hello', + 'query': 'subject=hello', + 'path': '?subject=hello' + }, + + 'javascript:alert(\'hello\');' : { + 'href': 'javascript:alert(\'hello\');', + 'protocol': 'javascript:', + 'pathname': 'alert(\'hello\');', + 'path': 'alert(\'hello\');' + }, + + 'xmpp:isaacschlueter@jabber.org' : { + 'href': 'xmpp:isaacschlueter@jabber.org', + 'protocol': 'xmpp:', + 'host': 'jabber.org', + 'auth': 'isaacschlueter', + 'hostname': 'jabber.org' + }, + + 'http://atpass:foo%40bar@127.0.0.1:8080/path?search=foo#bar' : { + 'href' : 'http://atpass:foo%40bar@127.0.0.1:8080/path?search=foo#bar', + 'protocol' : 'http:', + 'slashes': true, + 'host' : '127.0.0.1:8080', + 'auth' : 'atpass:foo@bar', + 'hostname' : '127.0.0.1', + 'port' : '8080', + 'pathname': '/path', + 'search' : '?search=foo', + 'query' : 'search=foo', + 'hash' : '#bar', + 'path': '/path?search=foo' + }, + + 'svn+ssh://foo/bar': { + 'href': 'svn+ssh://foo/bar', + 'host': 'foo', + 'hostname': 'foo', + 'protocol': 'svn+ssh:', + 'pathname': '/bar', + 'path': '/bar', + 'slashes': true + }, + + 'dash-test://foo/bar': { + 'href': 'dash-test://foo/bar', + 'host': 'foo', + 'hostname': 'foo', + 'protocol': 'dash-test:', + 'pathname': '/bar', + 'path': '/bar', + 'slashes': true + }, + + 'dash-test:foo/bar': { + 'href': 'dash-test:foo/bar', + 'host': 'foo', + 'hostname': 'foo', + 'protocol': 'dash-test:', + 'pathname': '/bar', + 'path': '/bar' + }, + + 'dot.test://foo/bar': { + 'href': 'dot.test://foo/bar', + 'host': 'foo', + 'hostname': 'foo', + 'protocol': 'dot.test:', + 'pathname': '/bar', + 'path': '/bar', + 'slashes': true + }, + + 'dot.test:foo/bar': { + 'href': 'dot.test:foo/bar', + 'host': 'foo', + 'hostname': 'foo', + 'protocol': 'dot.test:', + 'pathname': '/bar', + 'path': '/bar' + }, + + // IDNA tests + 'http://www.日本語.com/' : { + 'href': 'http://www.xn--wgv71a119e.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.xn--wgv71a119e.com', + 'hostname': 'www.xn--wgv71a119e.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://example.Bücher.com/' : { + 'href': 'http://example.xn--bcher-kva.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.xn--bcher-kva.com', + 'hostname': 'example.xn--bcher-kva.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://www.Äffchen.com/' : { + 'href': 'http://www.xn--ffchen-9ta.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.xn--ffchen-9ta.com', + 'hostname': 'www.xn--ffchen-9ta.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://www.Äffchen.cOm*A/b/c?d=e#f gi' : { + 'href': 'http://www.xn--ffchen-9ta.com/*A/b/c?d=e#f%20g%3Ch%3Ei', + 'protocol': 'http:', + 'slashes': true, + 'host': 'www.xn--ffchen-9ta.com', + 'hostname': 'www.xn--ffchen-9ta.com', + 'pathname': '/*A/b/c', + 'search': '?d=e', + 'query': 'd=e', + 'hash': '#f%20g%3Ch%3Ei', + 'path': '/*A/b/c?d=e' + }, + + 'http://SÉLIER.COM/' : { + 'href': 'http://xn--slier-bsa.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'xn--slier-bsa.com', + 'hostname': 'xn--slier-bsa.com', + 'pathname': '/', + 'path': '/' + }, + + 'http://ليهمابتكلموشعربي؟.ي؟/' : { + 'href': 'http://xn--egbpdaj6bu4bxfgehfvwxn.xn--egb9f/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'xn--egbpdaj6bu4bxfgehfvwxn.xn--egb9f', + 'hostname': 'xn--egbpdaj6bu4bxfgehfvwxn.xn--egb9f', + 'pathname': '/', + 'path': '/' + }, + + 'http://➡.ws/➡' : { + 'href': 'http://xn--hgi.ws/➡', + 'protocol': 'http:', + 'slashes': true, + 'host': 'xn--hgi.ws', + 'hostname': 'xn--hgi.ws', + 'pathname': '/➡', + 'path': '/➡' + }, + + 'http://bucket_name.s3.amazonaws.com/image.jpg': { + protocol: 'http:', + slashes: true, + host: 'bucket_name.s3.amazonaws.com', + hostname: 'bucket_name.s3.amazonaws.com', + pathname: '/image.jpg', + href: 'http://bucket_name.s3.amazonaws.com/image.jpg', + 'path': '/image.jpg' + }, + + 'git+http://github.com/joyent/node.git': { + protocol: 'git+http:', + slashes: true, + host: 'github.com', + hostname: 'github.com', + pathname: '/joyent/node.git', + path: '/joyent/node.git', + href: 'git+http://github.com/joyent/node.git' + }, + + //if local1@domain1 is uses as a relative URL it may + //be parse into auth@hostname, but here there is no + //way to make it work in url.parse, I add the test to be explicit + 'local1@domain1': { + 'pathname': 'local1@domain1', + 'path': 'local1@domain1', + 'href': 'local1@domain1' + }, + + //While this may seem counter-intuitive, a browser will parse + // as a path. + 'www.example.com' : { + 'href': 'www.example.com', + 'pathname': 'www.example.com', + 'path': 'www.example.com' + }, + + // ipv6 support + '[fe80::1]': { + 'href': '[fe80::1]', + 'pathname': '[fe80::1]', + 'path': '[fe80::1]' + }, + + 'coap://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]': { + 'protocol': 'coap:', + 'slashes': true, + 'host': '[fedc:ba98:7654:3210:fedc:ba98:7654:3210]', + 'hostname': 'fedc:ba98:7654:3210:fedc:ba98:7654:3210', + 'href': 'coap://[fedc:ba98:7654:3210:fedc:ba98:7654:3210]/', + 'pathname': '/', + 'path': '/' + }, + + 'coap://[1080:0:0:0:8:800:200C:417A]:61616/': { + 'protocol': 'coap:', + 'slashes': true, + 'host': '[1080:0:0:0:8:800:200c:417a]:61616', + 'port': '61616', + 'hostname': '1080:0:0:0:8:800:200c:417a', + 'href': 'coap://[1080:0:0:0:8:800:200c:417a]:61616/', + 'pathname': '/', + 'path': '/' + }, + + 'http://user:password@[3ffe:2a00:100:7031::1]:8080': { + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:password', + 'host': '[3ffe:2a00:100:7031::1]:8080', + 'port': '8080', + 'hostname': '3ffe:2a00:100:7031::1', + 'href': 'http://user:password@[3ffe:2a00:100:7031::1]:8080/', + 'pathname': '/', + 'path': '/' + }, + + 'coap://u:p@[::192.9.5.5]:61616/.well-known/r?n=Temperature': { + 'protocol': 'coap:', + 'slashes': true, + 'auth': 'u:p', + 'host': '[::192.9.5.5]:61616', + 'port': '61616', + 'hostname': '::192.9.5.5', + 'href': 'coap://u:p@[::192.9.5.5]:61616/.well-known/r?n=Temperature', + 'search': '?n=Temperature', + 'query': 'n=Temperature', + 'pathname': '/.well-known/r', + 'path': '/.well-known/r?n=Temperature' + }, + + // empty port + 'http://example.com:': { + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com', + 'hostname': 'example.com', + 'href': 'http://example.com/', + 'pathname': '/', + 'path': '/' + }, + + 'http://example.com:/a/b.html': { + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com', + 'hostname': 'example.com', + 'href': 'http://example.com/a/b.html', + 'pathname': '/a/b.html', + 'path': '/a/b.html' + }, + + 'http://example.com:?a=b': { + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com', + 'hostname': 'example.com', + 'href': 'http://example.com/?a=b', + 'search': '?a=b', + 'query': 'a=b', + 'pathname': '/', + 'path': '/?a=b' + }, + + 'http://example.com:#abc': { + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com', + 'hostname': 'example.com', + 'href': 'http://example.com/#abc', + 'hash': '#abc', + 'pathname': '/', + 'path': '/' + }, + + 'http://[fe80::1]:/a/b?a=b#abc': { + 'protocol': 'http:', + 'slashes': true, + 'host': '[fe80::1]', + 'hostname': 'fe80::1', + 'href': 'http://[fe80::1]/a/b?a=b#abc', + 'search': '?a=b', + 'query': 'a=b', + 'hash': '#abc', + 'pathname': '/a/b', + 'path': '/a/b?a=b' + }, + + 'http://-lovemonsterz.tumblr.com/rss': { + 'protocol': 'http:', + 'slashes': true, + 'host': '-lovemonsterz.tumblr.com', + 'hostname': '-lovemonsterz.tumblr.com', + 'href': 'http://-lovemonsterz.tumblr.com/rss', + 'pathname': '/rss', + 'path': '/rss', + }, + + 'http://-lovemonsterz.tumblr.com:80/rss': { + 'protocol': 'http:', + 'slashes': true, + 'port': '80', + 'host': '-lovemonsterz.tumblr.com:80', + 'hostname': '-lovemonsterz.tumblr.com', + 'href': 'http://-lovemonsterz.tumblr.com:80/rss', + 'pathname': '/rss', + 'path': '/rss', + }, + + 'http://user:pass@-lovemonsterz.tumblr.com/rss': { + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:pass', + 'host': '-lovemonsterz.tumblr.com', + 'hostname': '-lovemonsterz.tumblr.com', + 'href': 'http://user:pass@-lovemonsterz.tumblr.com/rss', + 'pathname': '/rss', + 'path': '/rss', + }, + + 'http://user:pass@-lovemonsterz.tumblr.com:80/rss': { + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:pass', + 'port': '80', + 'host': '-lovemonsterz.tumblr.com:80', + 'hostname': '-lovemonsterz.tumblr.com', + 'href': 'http://user:pass@-lovemonsterz.tumblr.com:80/rss', + 'pathname': '/rss', + 'path': '/rss', + }, + + 'http://_jabber._tcp.google.com/test': { + 'protocol': 'http:', + 'slashes': true, + 'host': '_jabber._tcp.google.com', + 'hostname': '_jabber._tcp.google.com', + 'href': 'http://_jabber._tcp.google.com/test', + 'pathname': '/test', + 'path': '/test', + }, + + 'http://user:pass@_jabber._tcp.google.com/test': { + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:pass', + 'host': '_jabber._tcp.google.com', + 'hostname': '_jabber._tcp.google.com', + 'href': 'http://user:pass@_jabber._tcp.google.com/test', + 'pathname': '/test', + 'path': '/test', + }, + + 'http://_jabber._tcp.google.com:80/test': { + 'protocol': 'http:', + 'slashes': true, + 'port': '80', + 'host': '_jabber._tcp.google.com:80', + 'hostname': '_jabber._tcp.google.com', + 'href': 'http://_jabber._tcp.google.com:80/test', + 'pathname': '/test', + 'path': '/test', + }, + + 'http://user:pass@_jabber._tcp.google.com:80/test': { + 'protocol': 'http:', + 'slashes': true, + 'auth': 'user:pass', + 'port': '80', + 'host': '_jabber._tcp.google.com:80', + 'hostname': '_jabber._tcp.google.com', + 'href': 'http://user:pass@_jabber._tcp.google.com:80/test', + 'pathname': '/test', + 'path': '/test', + }, + + 'http://a@b@c/': { + protocol: 'http:', + slashes: true, + auth: 'a@b', + host: 'c', + hostname: 'c', + href: 'http://a%40b@c/', + path: '/', + pathname: '/' + }, + + 'http://a@b?@c': { + protocol: 'http:', + slashes: true, + auth: 'a', + host: 'b', + hostname: 'b', + href: 'http://a@b/?@c', + path: '/?@c', + pathname: '/', + search: '?@c', + query: '@c' + }, + + 'http://a\r" \t\n<\'b:b@c\r\nd/e?f':{ + protocol: 'http:', + slashes: true, + auth: 'a\r" \t\n<\'b:b', + host: 'c', + port: null, + hostname: 'c', + hash: null, + search: '?f', + query: 'f', + pathname: '%0D%0Ad/e', + path: '%0D%0Ad/e?f', + href: 'http://a%0D%22%20%09%0A%3C\'b:b@c/%0D%0Ad/e?f' + } + +}; + +for (var u in parseTests) { + var actual = url.parse(u), + spaced = url.parse(' \t ' + u + '\n\t'); + expected = parseTests[u]; + + Object.keys(actual).forEach(function (i) { + if (expected[i] === undefined && actual[i] === null) { + expected[i] = null; + } + }); + + assert.deepEqual(actual, expected); + assert.deepEqual(spaced, expected); + + var expected = parseTests[u].href, + actual = url.format(parseTests[u]); + + assert.equal(actual, expected, + 'format(' + u + ') == ' + u + '\nactual:' + actual); +} + +var parseTestsWithQueryString = { + '/foo/bar?baz=quux#frag' : { + 'href': '/foo/bar?baz=quux#frag', + 'hash': '#frag', + 'search': '?baz=quux', + 'query': { + 'baz': 'quux' + }, + 'pathname': '/foo/bar', + 'path': '/foo/bar?baz=quux' + }, + 'http://example.com' : { + 'href': 'http://example.com/', + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com', + 'hostname': 'example.com', + 'query': {}, + 'search': '', + 'pathname': '/', + 'path': '/' + } +}; +for (var u in parseTestsWithQueryString) { + var actual = url.parse(u, true); + var expected = parseTestsWithQueryString[u]; + for (var i in actual) { + if (actual[i] === null && expected[i] === undefined) { + expected[i] = null; + } + } + + assert.deepEqual(actual, expected); +} + +// some extra formatting tests, just to verify +// that it'll format slightly wonky content to a valid url. +var formatTests = { + 'http://example.com?' : { + 'href': 'http://example.com/?', + 'protocol': 'http:', + 'slashes': true, + 'host': 'example.com', + 'hostname': 'example.com', + 'search': '?', + 'query': {}, + 'pathname': '/' + }, + 'http://example.com?foo=bar#frag' : { + 'href': 'http://example.com/?foo=bar#frag', + 'protocol': 'http:', + 'host': 'example.com', + 'hostname': 'example.com', + 'hash': '#frag', + 'search': '?foo=bar', + 'query': 'foo=bar', + 'pathname': '/' + }, + 'http://example.com?foo=@bar#frag' : { + 'href': 'http://example.com/?foo=@bar#frag', + 'protocol': 'http:', + 'host': 'example.com', + 'hostname': 'example.com', + 'hash': '#frag', + 'search': '?foo=@bar', + 'query': 'foo=@bar', + 'pathname': '/' + }, + 'http://example.com?foo=/bar/#frag' : { + 'href': 'http://example.com/?foo=/bar/#frag', + 'protocol': 'http:', + 'host': 'example.com', + 'hostname': 'example.com', + 'hash': '#frag', + 'search': '?foo=/bar/', + 'query': 'foo=/bar/', + 'pathname': '/' + }, + 'http://example.com?foo=?bar/#frag' : { + 'href': 'http://example.com/?foo=?bar/#frag', + 'protocol': 'http:', + 'host': 'example.com', + 'hostname': 'example.com', + 'hash': '#frag', + 'search': '?foo=?bar/', + 'query': 'foo=?bar/', + 'pathname': '/' + }, + 'http://example.com#frag=?bar/#frag' : { + 'href': 'http://example.com/#frag=?bar/#frag', + 'protocol': 'http:', + 'host': 'example.com', + 'hostname': 'example.com', + 'hash': '#frag=?bar/#frag', + 'pathname': '/' + }, + 'http://google.com" onload="alert(42)/' : { + 'href': 'http://google.com/%22%20onload=%22alert(42)/', + 'protocol': 'http:', + 'host': 'google.com', + 'pathname': '/%22%20onload=%22alert(42)/' + }, + 'http://a.com/a/b/c?s#h' : { + 'href': 'http://a.com/a/b/c?s#h', + 'protocol': 'http', + 'host': 'a.com', + 'pathname': 'a/b/c', + 'hash': 'h', + 'search': 's' + }, + 'xmpp:isaacschlueter@jabber.org' : { + 'href': 'xmpp:isaacschlueter@jabber.org', + 'protocol': 'xmpp:', + 'host': 'jabber.org', + 'auth': 'isaacschlueter', + 'hostname': 'jabber.org' + }, + 'http://atpass:foo%40bar@127.0.0.1/' : { + 'href': 'http://atpass:foo%40bar@127.0.0.1/', + 'auth': 'atpass:foo@bar', + 'hostname': '127.0.0.1', + 'protocol': 'http:', + 'pathname': '/' + }, + 'http://atslash%2F%40:%2F%40@foo/' : { + 'href': 'http://atslash%2F%40:%2F%40@foo/', + 'auth': 'atslash/@:/@', + 'hostname': 'foo', + 'protocol': 'http:', + 'pathname': '/' + }, + 'svn+ssh://foo/bar': { + 'href': 'svn+ssh://foo/bar', + 'hostname': 'foo', + 'protocol': 'svn+ssh:', + 'pathname': '/bar', + 'slashes': true + }, + 'dash-test://foo/bar': { + 'href': 'dash-test://foo/bar', + 'hostname': 'foo', + 'protocol': 'dash-test:', + 'pathname': '/bar', + 'slashes': true + }, + 'dash-test:foo/bar': { + 'href': 'dash-test:foo/bar', + 'hostname': 'foo', + 'protocol': 'dash-test:', + 'pathname': '/bar' + }, + 'dot.test://foo/bar': { + 'href': 'dot.test://foo/bar', + 'hostname': 'foo', + 'protocol': 'dot.test:', + 'pathname': '/bar', + 'slashes': true + }, + 'dot.test:foo/bar': { + 'href': 'dot.test:foo/bar', + 'hostname': 'foo', + 'protocol': 'dot.test:', + 'pathname': '/bar' + }, + // ipv6 support + 'coap:u:p@[::1]:61616/.well-known/r?n=Temperature': { + 'href': 'coap:u:p@[::1]:61616/.well-known/r?n=Temperature', + 'protocol': 'coap:', + 'auth': 'u:p', + 'hostname': '::1', + 'port': '61616', + 'pathname': '/.well-known/r', + 'search': 'n=Temperature' + }, + 'coap:[fedc:ba98:7654:3210:fedc:ba98:7654:3210]:61616/s/stopButton': { + 'href': 'coap:[fedc:ba98:7654:3210:fedc:ba98:7654:3210]:61616/s/stopButton', + 'protocol': 'coap', + 'host': '[fedc:ba98:7654:3210:fedc:ba98:7654:3210]:61616', + 'pathname': '/s/stopButton' + }, + + // encode context-specific delimiters in path and query, but do not touch + // other non-delimiter chars like `%`. + // + + // `#`,`?` in path + '/path/to/%%23%3F+=&.txt?foo=theA1#bar' : { + href : '/path/to/%%23%3F+=&.txt?foo=theA1#bar', + pathname: '/path/to/%#?+=&.txt', + query: { + foo: 'theA1' + }, + hash: "#bar" + }, + + // `#`,`?` in path + `#` in query + '/path/to/%%23%3F+=&.txt?foo=the%231#bar' : { + href : '/path/to/%%23%3F+=&.txt?foo=the%231#bar', + pathname: '/path/to/%#?+=&.txt', + query: { + foo: 'the#1' + }, + hash: "#bar" + }, + + // `?` and `#` in path and search + 'http://ex.com/foo%3F100%m%23r?abc=the%231?&foo=bar#frag': { + href: 'http://ex.com/foo%3F100%m%23r?abc=the%231?&foo=bar#frag', + protocol: 'http:', + hostname: 'ex.com', + hash: '#frag', + search: '?abc=the#1?&foo=bar', + pathname: '/foo?100%m#r', + }, + + // `?` and `#` in search only + 'http://ex.com/fooA100%mBr?abc=the%231?&foo=bar#frag': { + href: 'http://ex.com/fooA100%mBr?abc=the%231?&foo=bar#frag', + protocol: 'http:', + hostname: 'ex.com', + hash: '#frag', + search: '?abc=the#1?&foo=bar', + pathname: '/fooA100%mBr', + } +}; +for (var u in formatTests) { + var expect = formatTests[u].href; + delete formatTests[u].href; + var actual = url.format(u); + var actualObj = url.format(formatTests[u]); + assert.equal(actual, expect, + 'wonky format(' + u + ') == ' + expect + + '\nactual:' + actual); + assert.equal(actualObj, expect, + 'wonky format(' + JSON.stringify(formatTests[u]) + + ') == ' + expect + + '\nactual: ' + actualObj); +} + +/* + [from, path, expected] +*/ +var relativeTests = [ + ['/foo/bar/baz', 'quux', '/foo/bar/quux'], + ['/foo/bar/baz', 'quux/asdf', '/foo/bar/quux/asdf'], + ['/foo/bar/baz', 'quux/baz', '/foo/bar/quux/baz'], + ['/foo/bar/baz', '../quux/baz', '/foo/quux/baz'], + ['/foo/bar/baz', '/bar', '/bar'], + ['/foo/bar/baz/', 'quux', '/foo/bar/baz/quux'], + ['/foo/bar/baz/', 'quux/baz', '/foo/bar/baz/quux/baz'], + ['/foo/bar/baz', '../../../../../../../../quux/baz', '/quux/baz'], + ['/foo/bar/baz', '../../../../../../../quux/baz', '/quux/baz'], + ['foo/bar', '../../../baz', '../../baz'], + ['foo/bar/', '../../../baz', '../baz'], + ['http://example.com/b//c//d;p?q#blarg', 'https:#hash2', 'https:///#hash2'], + ['http://example.com/b//c//d;p?q#blarg', + 'https:/p/a/t/h?s#hash2', + 'https://p/a/t/h?s#hash2'], + ['http://example.com/b//c//d;p?q#blarg', + 'https://u:p@h.com/p/a/t/h?s#hash2', + 'https://u:p@h.com/p/a/t/h?s#hash2'], + ['http://example.com/b//c//d;p?q#blarg', + 'https:/a/b/c/d', + 'https://a/b/c/d'], + ['http://example.com/b//c//d;p?q#blarg', + 'http:#hash2', + 'http://example.com/b//c//d;p?q#hash2'], + ['http://example.com/b//c//d;p?q#blarg', + 'http:/p/a/t/h?s#hash2', + 'http://example.com/p/a/t/h?s#hash2'], + ['http://example.com/b//c//d;p?q#blarg', + 'http://u:p@h.com/p/a/t/h?s#hash2', + 'http://u:p@h.com/p/a/t/h?s#hash2'], + ['http://example.com/b//c//d;p?q#blarg', + 'http:/a/b/c/d', + 'http://example.com/a/b/c/d'], + ['/foo/bar/baz', '/../etc/passwd', '/etc/passwd'] +]; +relativeTests.forEach(function(relativeTest) { + var a = url.resolve(relativeTest[0], relativeTest[1]), + e = relativeTest[2]; + assert.equal(a, e, + 'resolve(' + [relativeTest[0], relativeTest[1]] + ') == ' + e + + '\n actual=' + a); +}); + + +// https://github.com/joyent/node/issues/568 +[ + undefined, + null, + true, + false, + 0.0, + 0, + [], + {} +].forEach(function(val) { + assert.throws(function() { url.parse(val); }, TypeError); +}); + + +// +// Tests below taken from Chiron +// http://code.google.com/p/chironjs/source/browse/trunk/src/test/http/url.js +// +// Copyright (c) 2002-2008 Kris Kowal +// used with permission under MIT License +// +// Changes marked with @isaacs + +var bases = [ + 'http://a/b/c/d;p?q', + 'http://a/b/c/d;p?q=1/2', + 'http://a/b/c/d;p=1/2?q', + 'fred:///s//a/b/c', + 'http:///s//a/b/c' +]; + +//[to, from, result] +var relativeTests2 = [ + // http://lists.w3.org/Archives/Public/uri/2004Feb/0114.html + ['../c', 'foo:a/b', 'foo:c'], + ['foo:.', 'foo:a', 'foo:'], + ['/foo/../../../bar', 'zz:abc', 'zz:/bar'], + ['/foo/../bar', 'zz:abc', 'zz:/bar'], + // @isaacs Disagree. Not how web browsers resolve this. + ['foo/../../../bar', 'zz:abc', 'zz:bar'], + // ['foo/../../../bar', 'zz:abc', 'zz:../../bar'], // @isaacs Added + ['foo/../bar', 'zz:abc', 'zz:bar'], + ['zz:.', 'zz:abc', 'zz:'], + ['/.', bases[0], 'http://a/'], + ['/.foo', bases[0], 'http://a/.foo'], + ['.foo', bases[0], 'http://a/b/c/.foo'], + + // http://gbiv.com/protocols/uri/test/rel_examples1.html + // examples from RFC 2396 + ['g:h', bases[0], 'g:h'], + ['g', bases[0], 'http://a/b/c/g'], + ['./g', bases[0], 'http://a/b/c/g'], + ['g/', bases[0], 'http://a/b/c/g/'], + ['/g', bases[0], 'http://a/g'], + ['//g', bases[0], 'http://g/'], + // changed with RFC 2396bis + //('?y', bases[0], 'http://a/b/c/d;p?y'], + ['?y', bases[0], 'http://a/b/c/d;p?y'], + ['g?y', bases[0], 'http://a/b/c/g?y'], + // changed with RFC 2396bis + //('#s', bases[0], CURRENT_DOC_URI + '#s'], + ['#s', bases[0], 'http://a/b/c/d;p?q#s'], + ['g#s', bases[0], 'http://a/b/c/g#s'], + ['g?y#s', bases[0], 'http://a/b/c/g?y#s'], + [';x', bases[0], 'http://a/b/c/;x'], + ['g;x', bases[0], 'http://a/b/c/g;x'], + ['g;x?y#s', bases[0], 'http://a/b/c/g;x?y#s'], + // changed with RFC 2396bis + //('', bases[0], CURRENT_DOC_URI], + ['', bases[0], 'http://a/b/c/d;p?q'], + ['.', bases[0], 'http://a/b/c/'], + ['./', bases[0], 'http://a/b/c/'], + ['..', bases[0], 'http://a/b/'], + ['../', bases[0], 'http://a/b/'], + ['../g', bases[0], 'http://a/b/g'], + ['../..', bases[0], 'http://a/'], + ['../../', bases[0], 'http://a/'], + ['../../g', bases[0], 'http://a/g'], + ['../../../g', bases[0], ('http://a/../g', 'http://a/g')], + ['../../../../g', bases[0], ('http://a/../../g', 'http://a/g')], + // changed with RFC 2396bis + //('/./g', bases[0], 'http://a/./g'], + ['/./g', bases[0], 'http://a/g'], + // changed with RFC 2396bis + //('/../g', bases[0], 'http://a/../g'], + ['/../g', bases[0], 'http://a/g'], + ['g.', bases[0], 'http://a/b/c/g.'], + ['.g', bases[0], 'http://a/b/c/.g'], + ['g..', bases[0], 'http://a/b/c/g..'], + ['..g', bases[0], 'http://a/b/c/..g'], + ['./../g', bases[0], 'http://a/b/g'], + ['./g/.', bases[0], 'http://a/b/c/g/'], + ['g/./h', bases[0], 'http://a/b/c/g/h'], + ['g/../h', bases[0], 'http://a/b/c/h'], + ['g;x=1/./y', bases[0], 'http://a/b/c/g;x=1/y'], + ['g;x=1/../y', bases[0], 'http://a/b/c/y'], + ['g?y/./x', bases[0], 'http://a/b/c/g?y/./x'], + ['g?y/../x', bases[0], 'http://a/b/c/g?y/../x'], + ['g#s/./x', bases[0], 'http://a/b/c/g#s/./x'], + ['g#s/../x', bases[0], 'http://a/b/c/g#s/../x'], + ['http:g', bases[0], ('http:g', 'http://a/b/c/g')], + ['http:', bases[0], ('http:', bases[0])], + // not sure where this one originated + ['/a/b/c/./../../g', bases[0], 'http://a/a/g'], + + // http://gbiv.com/protocols/uri/test/rel_examples2.html + // slashes in base URI's query args + ['g', bases[1], 'http://a/b/c/g'], + ['./g', bases[1], 'http://a/b/c/g'], + ['g/', bases[1], 'http://a/b/c/g/'], + ['/g', bases[1], 'http://a/g'], + ['//g', bases[1], 'http://g/'], + // changed in RFC 2396bis + //('?y', bases[1], 'http://a/b/c/?y'], + ['?y', bases[1], 'http://a/b/c/d;p?y'], + ['g?y', bases[1], 'http://a/b/c/g?y'], + ['g?y/./x', bases[1], 'http://a/b/c/g?y/./x'], + ['g?y/../x', bases[1], 'http://a/b/c/g?y/../x'], + ['g#s', bases[1], 'http://a/b/c/g#s'], + ['g#s/./x', bases[1], 'http://a/b/c/g#s/./x'], + ['g#s/../x', bases[1], 'http://a/b/c/g#s/../x'], + ['./', bases[1], 'http://a/b/c/'], + ['../', bases[1], 'http://a/b/'], + ['../g', bases[1], 'http://a/b/g'], + ['../../', bases[1], 'http://a/'], + ['../../g', bases[1], 'http://a/g'], + + // http://gbiv.com/protocols/uri/test/rel_examples3.html + // slashes in path params + // all of these changed in RFC 2396bis + ['g', bases[2], 'http://a/b/c/d;p=1/g'], + ['./g', bases[2], 'http://a/b/c/d;p=1/g'], + ['g/', bases[2], 'http://a/b/c/d;p=1/g/'], + ['g?y', bases[2], 'http://a/b/c/d;p=1/g?y'], + [';x', bases[2], 'http://a/b/c/d;p=1/;x'], + ['g;x', bases[2], 'http://a/b/c/d;p=1/g;x'], + ['g;x=1/./y', bases[2], 'http://a/b/c/d;p=1/g;x=1/y'], + ['g;x=1/../y', bases[2], 'http://a/b/c/d;p=1/y'], + ['./', bases[2], 'http://a/b/c/d;p=1/'], + ['../', bases[2], 'http://a/b/c/'], + ['../g', bases[2], 'http://a/b/c/g'], + ['../../', bases[2], 'http://a/b/'], + ['../../g', bases[2], 'http://a/b/g'], + + // http://gbiv.com/protocols/uri/test/rel_examples4.html + // double and triple slash, unknown scheme + ['g:h', bases[3], 'g:h'], + ['g', bases[3], 'fred:///s//a/b/g'], + ['./g', bases[3], 'fred:///s//a/b/g'], + ['g/', bases[3], 'fred:///s//a/b/g/'], + ['/g', bases[3], 'fred:///g'], // may change to fred:///s//a/g + ['//g', bases[3], 'fred://g'], // may change to fred:///s//g + ['//g/x', bases[3], 'fred://g/x'], // may change to fred:///s//g/x + ['///g', bases[3], 'fred:///g'], + ['./', bases[3], 'fred:///s//a/b/'], + ['../', bases[3], 'fred:///s//a/'], + ['../g', bases[3], 'fred:///s//a/g'], + + ['../../', bases[3], 'fred:///s//'], + ['../../g', bases[3], 'fred:///s//g'], + ['../../../g', bases[3], 'fred:///s/g'], + // may change to fred:///s//a/../../../g + ['../../../../g', bases[3], 'fred:///g'], + + // http://gbiv.com/protocols/uri/test/rel_examples5.html + // double and triple slash, well-known scheme + ['g:h', bases[4], 'g:h'], + ['g', bases[4], 'http:///s//a/b/g'], + ['./g', bases[4], 'http:///s//a/b/g'], + ['g/', bases[4], 'http:///s//a/b/g/'], + ['/g', bases[4], 'http:///g'], // may change to http:///s//a/g + ['//g', bases[4], 'http://g/'], // may change to http:///s//g + ['//g/x', bases[4], 'http://g/x'], // may change to http:///s//g/x + ['///g', bases[4], 'http:///g'], + ['./', bases[4], 'http:///s//a/b/'], + ['../', bases[4], 'http:///s//a/'], + ['../g', bases[4], 'http:///s//a/g'], + ['../../', bases[4], 'http:///s//'], + ['../../g', bases[4], 'http:///s//g'], + // may change to http:///s//a/../../g + ['../../../g', bases[4], 'http:///s/g'], + // may change to http:///s//a/../../../g + ['../../../../g', bases[4], 'http:///g'], + + // from Dan Connelly's tests in http://www.w3.org/2000/10/swap/uripath.py + ['bar:abc', 'foo:xyz', 'bar:abc'], + ['../abc', 'http://example/x/y/z', 'http://example/x/abc'], + ['http://example/x/abc', 'http://example2/x/y/z', 'http://example/x/abc'], + ['../r', 'http://ex/x/y/z', 'http://ex/x/r'], + ['q/r', 'http://ex/x/y', 'http://ex/x/q/r'], + ['q/r#s', 'http://ex/x/y', 'http://ex/x/q/r#s'], + ['q/r#s/t', 'http://ex/x/y', 'http://ex/x/q/r#s/t'], + ['ftp://ex/x/q/r', 'http://ex/x/y', 'ftp://ex/x/q/r'], + ['', 'http://ex/x/y', 'http://ex/x/y'], + ['', 'http://ex/x/y/', 'http://ex/x/y/'], + ['', 'http://ex/x/y/pdq', 'http://ex/x/y/pdq'], + ['z/', 'http://ex/x/y/', 'http://ex/x/y/z/'], + ['#Animal', + 'file:/swap/test/animal.rdf', + 'file:/swap/test/animal.rdf#Animal'], + ['../abc', 'file:/e/x/y/z', 'file:/e/x/abc'], + ['/example/x/abc', 'file:/example2/x/y/z', 'file:/example/x/abc'], + ['../r', 'file:/ex/x/y/z', 'file:/ex/x/r'], + ['/r', 'file:/ex/x/y/z', 'file:/r'], + ['q/r', 'file:/ex/x/y', 'file:/ex/x/q/r'], + ['q/r#s', 'file:/ex/x/y', 'file:/ex/x/q/r#s'], + ['q/r#', 'file:/ex/x/y', 'file:/ex/x/q/r#'], + ['q/r#s/t', 'file:/ex/x/y', 'file:/ex/x/q/r#s/t'], + ['ftp://ex/x/q/r', 'file:/ex/x/y', 'ftp://ex/x/q/r'], + ['', 'file:/ex/x/y', 'file:/ex/x/y'], + ['', 'file:/ex/x/y/', 'file:/ex/x/y/'], + ['', 'file:/ex/x/y/pdq', 'file:/ex/x/y/pdq'], + ['z/', 'file:/ex/x/y/', 'file:/ex/x/y/z/'], + ['file://meetings.example.com/cal#m1', + 'file:/devel/WWW/2000/10/swap/test/reluri-1.n3', + 'file://meetings.example.com/cal#m1'], + ['file://meetings.example.com/cal#m1', + 'file:/home/connolly/w3ccvs/WWW/2000/10/swap/test/reluri-1.n3', + 'file://meetings.example.com/cal#m1'], + ['./#blort', 'file:/some/dir/foo', 'file:/some/dir/#blort'], + ['./#', 'file:/some/dir/foo', 'file:/some/dir/#'], + // Ryan Lee + ['./', 'http://example/x/abc.efg', 'http://example/x/'], + + + // Graham Klyne's tests + // http://www.ninebynine.org/Software/HaskellUtils/Network/UriTest.xls + // 01-31 are from Connelly's cases + + // 32-49 + ['./q:r', 'http://ex/x/y', 'http://ex/x/q:r'], + ['./p=q:r', 'http://ex/x/y', 'http://ex/x/p=q:r'], + ['?pp/rr', 'http://ex/x/y?pp/qq', 'http://ex/x/y?pp/rr'], + ['y/z', 'http://ex/x/y?pp/qq', 'http://ex/x/y/z'], + ['local/qual@domain.org#frag', + 'mailto:local', + 'mailto:local/qual@domain.org#frag'], + ['more/qual2@domain2.org#frag', + 'mailto:local/qual1@domain1.org', + 'mailto:local/more/qual2@domain2.org#frag'], + ['y?q', 'http://ex/x/y?q', 'http://ex/x/y?q'], + ['/x/y?q', 'http://ex?p', 'http://ex/x/y?q'], + ['c/d', 'foo:a/b', 'foo:a/c/d'], + ['/c/d', 'foo:a/b', 'foo:/c/d'], + ['', 'foo:a/b?c#d', 'foo:a/b?c'], + ['b/c', 'foo:a', 'foo:b/c'], + ['../b/c', 'foo:/a/y/z', 'foo:/a/b/c'], + ['./b/c', 'foo:a', 'foo:b/c'], + ['/./b/c', 'foo:a', 'foo:/b/c'], + ['../../d', 'foo://a//b/c', 'foo://a/d'], + ['.', 'foo:a', 'foo:'], + ['..', 'foo:a', 'foo:'], + + // 50-57[cf. TimBL comments -- + // http://lists.w3.org/Archives/Public/uri/2003Feb/0028.html, + // http://lists.w3.org/Archives/Public/uri/2003Jan/0008.html) + ['abc', 'http://example/x/y%2Fz', 'http://example/x/abc'], + ['../../x%2Fabc', 'http://example/a/x/y/z', 'http://example/a/x%2Fabc'], + ['../x%2Fabc', 'http://example/a/x/y%2Fz', 'http://example/a/x%2Fabc'], + ['abc', 'http://example/x%2Fy/z', 'http://example/x%2Fy/abc'], + ['q%3Ar', 'http://ex/x/y', 'http://ex/x/q%3Ar'], + ['/x%2Fabc', 'http://example/x/y%2Fz', 'http://example/x%2Fabc'], + ['/x%2Fabc', 'http://example/x/y/z', 'http://example/x%2Fabc'], + ['/x%2Fabc', 'http://example/x/y%2Fz', 'http://example/x%2Fabc'], + + // 70-77 + ['local2@domain2', 'mailto:local1@domain1?query1', 'mailto:local2@domain2'], + ['local2@domain2?query2', + 'mailto:local1@domain1', + 'mailto:local2@domain2?query2'], + ['local2@domain2?query2', + 'mailto:local1@domain1?query1', + 'mailto:local2@domain2?query2'], + ['?query2', 'mailto:local@domain?query1', 'mailto:local@domain?query2'], + ['local@domain?query2', 'mailto:?query1', 'mailto:local@domain?query2'], + ['?query2', 'mailto:local@domain?query1', 'mailto:local@domain?query2'], + ['http://example/a/b?c/../d', 'foo:bar', 'http://example/a/b?c/../d'], + ['http://example/a/b#c/../d', 'foo:bar', 'http://example/a/b#c/../d'], + + // 82-88 + // @isaacs Disagree. Not how browsers do it. + // ['http:this', 'http://example.org/base/uri', 'http:this'], + // @isaacs Added + ['http:this', 'http://example.org/base/uri', 'http://example.org/base/this'], + ['http:this', 'http:base', 'http:this'], + ['.//g', 'f:/a', 'f://g'], + ['b/c//d/e', 'f://example.org/base/a', 'f://example.org/base/b/c//d/e'], + ['m2@example.ord/c2@example.org', + 'mid:m@example.ord/c@example.org', + 'mid:m@example.ord/m2@example.ord/c2@example.org'], + ['mini1.xml', + 'file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/', + 'file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/mini1.xml'], + ['../b/c', 'foo:a/y/z', 'foo:a/b/c'], + + //changeing auth + ['http://diff:auth@www.example.com', + 'http://asdf:qwer@www.example.com', + 'http://diff:auth@www.example.com/'] +]; +relativeTests2.forEach(function(relativeTest) { + var a = url.resolve(relativeTest[1], relativeTest[0]), + e = relativeTest[2]; + assert.equal(a, e, + 'resolve(' + [relativeTest[1], relativeTest[0]] + ') == ' + e + + '\n actual=' + a); +}); + +//if format and parse are inverse operations then +//resolveObject(parse(x), y) == parse(resolve(x, y)) + +//host and hostname are special, in this case a '' value is important +var emptyIsImportant = {'host': true, 'hostname': ''}; + +//format: [from, path, expected] +relativeTests.forEach(function(relativeTest) { + var actual = url.resolveObject(url.parse(relativeTest[0]), relativeTest[1]), + expected = url.parse(relativeTest[2]); + + + assert.deepEqual(actual, expected); + + expected = relativeTest[2]; + actual = url.format(actual); + + assert.equal(actual, expected, + 'format(' + actual + ') == ' + expected + '\nactual:' + actual); +}); + +//format: [to, from, result] +// the test: ['.//g', 'f:/a', 'f://g'] is a fundimental problem +// url.parse('f:/a') does not have a host +// url.resolve('f:/a', './/g') does not have a host becuase you have moved +// down to the g directory. i.e. f: //g, however when this url is parsed +// f:// will indicate that the host is g which is not the case. +// it is unclear to me how to keep this information from being lost +// it may be that a pathname of ////g should colapse to /g but this seems +// to be a lot of work for an edge case. Right now I remove the test +if (relativeTests2[181][0] === './/g' && + relativeTests2[181][1] === 'f:/a' && + relativeTests2[181][2] === 'f://g') { + relativeTests2.splice(181, 1); +} +relativeTests2.forEach(function(relativeTest) { + var actual = url.resolveObject(url.parse(relativeTest[1]), relativeTest[0]), + expected = url.parse(relativeTest[2]); + + assert.deepEqual(actual, expected); + + var expected = relativeTest[2], + actual = url.format(actual); + + assert.equal(actual, expected, + 'format(' + relativeTest[1] + ') == ' + expected + + '\nactual:' + actual); +}); + +}); diff --git a/node_modules/url/url.js b/node_modules/url/url.js new file mode 100644 index 0000000..ddc4ade --- /dev/null +++ b/node_modules/url/url.js @@ -0,0 +1,707 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var punycode = require('punycode'); + +exports.parse = urlParse; +exports.resolve = urlResolve; +exports.resolveObject = urlResolveObject; +exports.format = urlFormat; + +exports.Url = Url; + +function Url() { + this.protocol = null; + this.slashes = null; + this.auth = null; + this.host = null; + this.port = null; + this.hostname = null; + this.hash = null; + this.search = null; + this.query = null; + this.pathname = null; + this.path = null; + this.href = null; +} + +// Reference: RFC 3986, RFC 1808, RFC 2396 + +// define these here so at least they only have to be +// compiled once on the first module load. +var protocolPattern = /^([a-z0-9.+-]+:)/i, + portPattern = /:[0-9]*$/, + + // RFC 2396: characters reserved for delimiting URLs. + // We actually just auto-escape these. + delims = ['<', '>', '"', '`', ' ', '\r', '\n', '\t'], + + // RFC 2396: characters not allowed for various reasons. + unwise = ['{', '}', '|', '\\', '^', '`'].concat(delims), + + // Allowed by RFCs, but cause of XSS attacks. Always escape these. + autoEscape = ['\''].concat(unwise), + // Characters that are never ever allowed in a hostname. + // Note that any invalid chars are also handled, but these + // are the ones that are *expected* to be seen, so we fast-path + // them. + nonHostChars = ['%', '/', '?', ';', '#'].concat(autoEscape), + hostEndingChars = ['/', '?', '#'], + hostnameMaxLen = 255, + hostnamePartPattern = /^[a-z0-9A-Z_-]{0,63}$/, + hostnamePartStart = /^([a-z0-9A-Z_-]{0,63})(.*)$/, + // protocols that can allow "unsafe" and "unwise" chars. + unsafeProtocol = { + 'javascript': true, + 'javascript:': true + }, + // protocols that never have a hostname. + hostlessProtocol = { + 'javascript': true, + 'javascript:': true + }, + // protocols that always contain a // bit. + slashedProtocol = { + 'http': true, + 'https': true, + 'ftp': true, + 'gopher': true, + 'file': true, + 'http:': true, + 'https:': true, + 'ftp:': true, + 'gopher:': true, + 'file:': true + }, + querystring = require('querystring'); + +function urlParse(url, parseQueryString, slashesDenoteHost) { + if (url && isObject(url) && url instanceof Url) return url; + + var u = new Url; + u.parse(url, parseQueryString, slashesDenoteHost); + return u; +} + +Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) { + if (!isString(url)) { + throw new TypeError("Parameter 'url' must be a string, not " + typeof url); + } + + var rest = url; + + // trim before proceeding. + // This is to support parse stuff like " http://foo.com \n" + rest = rest.trim(); + + var proto = protocolPattern.exec(rest); + if (proto) { + proto = proto[0]; + var lowerProto = proto.toLowerCase(); + this.protocol = lowerProto; + rest = rest.substr(proto.length); + } + + // figure out if it's got a host + // user@server is *always* interpreted as a hostname, and url + // resolution will treat //foo/bar as host=foo,path=bar because that's + // how the browser resolves relative URLs. + if (slashesDenoteHost || proto || rest.match(/^\/\/[^@\/]+@[^@\/]+/)) { + var slashes = rest.substr(0, 2) === '//'; + if (slashes && !(proto && hostlessProtocol[proto])) { + rest = rest.substr(2); + this.slashes = true; + } + } + + if (!hostlessProtocol[proto] && + (slashes || (proto && !slashedProtocol[proto]))) { + + // there's a hostname. + // the first instance of /, ?, ;, or # ends the host. + // + // If there is an @ in the hostname, then non-host chars *are* allowed + // to the left of the last @ sign, unless some host-ending character + // comes *before* the @-sign. + // URLs are obnoxious. + // + // ex: + // http://a@b@c/ => user:a@b host:c + // http://a@b?@c => user:a host:c path:/?@c + + // v0.12 TODO(isaacs): This is not quite how Chrome does things. + // Review our test case against browsers more comprehensively. + + // find the first instance of any hostEndingChars + var hostEnd = -1; + for (var i = 0; i < hostEndingChars.length; i++) { + var hec = rest.indexOf(hostEndingChars[i]); + if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) + hostEnd = hec; + } + + // at this point, either we have an explicit point where the + // auth portion cannot go past, or the last @ char is the decider. + var auth, atSign; + if (hostEnd === -1) { + // atSign can be anywhere. + atSign = rest.lastIndexOf('@'); + } else { + // atSign must be in auth portion. + // http://a@b/c@d => host:b auth:a path:/c@d + atSign = rest.lastIndexOf('@', hostEnd); + } + + // Now we have a portion which is definitely the auth. + // Pull that off. + if (atSign !== -1) { + auth = rest.slice(0, atSign); + rest = rest.slice(atSign + 1); + this.auth = decodeURIComponent(auth); + } + + // the host is the remaining to the left of the first non-host char + hostEnd = -1; + for (var i = 0; i < nonHostChars.length; i++) { + var hec = rest.indexOf(nonHostChars[i]); + if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) + hostEnd = hec; + } + // if we still have not hit it, then the entire thing is a host. + if (hostEnd === -1) + hostEnd = rest.length; + + this.host = rest.slice(0, hostEnd); + rest = rest.slice(hostEnd); + + // pull out port. + this.parseHost(); + + // we've indicated that there is a hostname, + // so even if it's empty, it has to be present. + this.hostname = this.hostname || ''; + + // if hostname begins with [ and ends with ] + // assume that it's an IPv6 address. + var ipv6Hostname = this.hostname[0] === '[' && + this.hostname[this.hostname.length - 1] === ']'; + + // validate a little. + if (!ipv6Hostname) { + var hostparts = this.hostname.split(/\./); + for (var i = 0, l = hostparts.length; i < l; i++) { + var part = hostparts[i]; + if (!part) continue; + if (!part.match(hostnamePartPattern)) { + var newpart = ''; + for (var j = 0, k = part.length; j < k; j++) { + if (part.charCodeAt(j) > 127) { + // we replace non-ASCII char with a temporary placeholder + // we need this to make sure size of hostname is not + // broken by replacing non-ASCII by nothing + newpart += 'x'; + } else { + newpart += part[j]; + } + } + // we test again with ASCII char only + if (!newpart.match(hostnamePartPattern)) { + var validParts = hostparts.slice(0, i); + var notHost = hostparts.slice(i + 1); + var bit = part.match(hostnamePartStart); + if (bit) { + validParts.push(bit[1]); + notHost.unshift(bit[2]); + } + if (notHost.length) { + rest = '/' + notHost.join('.') + rest; + } + this.hostname = validParts.join('.'); + break; + } + } + } + } + + if (this.hostname.length > hostnameMaxLen) { + this.hostname = ''; + } else { + // hostnames are always lower case. + this.hostname = this.hostname.toLowerCase(); + } + + if (!ipv6Hostname) { + // IDNA Support: Returns a puny coded representation of "domain". + // It only converts the part of the domain name that + // has non ASCII characters. I.e. it dosent matter if + // you call it with a domain that already is in ASCII. + var domainArray = this.hostname.split('.'); + var newOut = []; + for (var i = 0; i < domainArray.length; ++i) { + var s = domainArray[i]; + newOut.push(s.match(/[^A-Za-z0-9_-]/) ? + 'xn--' + punycode.encode(s) : s); + } + this.hostname = newOut.join('.'); + } + + var p = this.port ? ':' + this.port : ''; + var h = this.hostname || ''; + this.host = h + p; + this.href += this.host; + + // strip [ and ] from the hostname + // the host field still retains them, though + if (ipv6Hostname) { + this.hostname = this.hostname.substr(1, this.hostname.length - 2); + if (rest[0] !== '/') { + rest = '/' + rest; + } + } + } + + // now rest is set to the post-host stuff. + // chop off any delim chars. + if (!unsafeProtocol[lowerProto]) { + + // First, make 100% sure that any "autoEscape" chars get + // escaped, even if encodeURIComponent doesn't think they + // need to be. + for (var i = 0, l = autoEscape.length; i < l; i++) { + var ae = autoEscape[i]; + var esc = encodeURIComponent(ae); + if (esc === ae) { + esc = escape(ae); + } + rest = rest.split(ae).join(esc); + } + } + + + // chop off from the tail first. + var hash = rest.indexOf('#'); + if (hash !== -1) { + // got a fragment string. + this.hash = rest.substr(hash); + rest = rest.slice(0, hash); + } + var qm = rest.indexOf('?'); + if (qm !== -1) { + this.search = rest.substr(qm); + this.query = rest.substr(qm + 1); + if (parseQueryString) { + this.query = querystring.parse(this.query); + } + rest = rest.slice(0, qm); + } else if (parseQueryString) { + // no query string, but parseQueryString still requested + this.search = ''; + this.query = {}; + } + if (rest) this.pathname = rest; + if (slashedProtocol[lowerProto] && + this.hostname && !this.pathname) { + this.pathname = '/'; + } + + //to support http.request + if (this.pathname || this.search) { + var p = this.pathname || ''; + var s = this.search || ''; + this.path = p + s; + } + + // finally, reconstruct the href based on what has been validated. + this.href = this.format(); + return this; +}; + +// format a parsed object into a url string +function urlFormat(obj) { + // ensure it's an object, and not a string url. + // If it's an obj, this is a no-op. + // this way, you can call url_format() on strings + // to clean up potentially wonky urls. + if (isString(obj)) obj = urlParse(obj); + if (!(obj instanceof Url)) return Url.prototype.format.call(obj); + return obj.format(); +} + +Url.prototype.format = function() { + var auth = this.auth || ''; + if (auth) { + auth = encodeURIComponent(auth); + auth = auth.replace(/%3A/i, ':'); + auth += '@'; + } + + var protocol = this.protocol || '', + pathname = this.pathname || '', + hash = this.hash || '', + host = false, + query = ''; + + if (this.host) { + host = auth + this.host; + } else if (this.hostname) { + host = auth + (this.hostname.indexOf(':') === -1 ? + this.hostname : + '[' + this.hostname + ']'); + if (this.port) { + host += ':' + this.port; + } + } + + if (this.query && + isObject(this.query) && + Object.keys(this.query).length) { + query = querystring.stringify(this.query); + } + + var search = this.search || (query && ('?' + query)) || ''; + + if (protocol && protocol.substr(-1) !== ':') protocol += ':'; + + // only the slashedProtocols get the //. Not mailto:, xmpp:, etc. + // unless they had them to begin with. + if (this.slashes || + (!protocol || slashedProtocol[protocol]) && host !== false) { + host = '//' + (host || ''); + if (pathname && pathname.charAt(0) !== '/') pathname = '/' + pathname; + } else if (!host) { + host = ''; + } + + if (hash && hash.charAt(0) !== '#') hash = '#' + hash; + if (search && search.charAt(0) !== '?') search = '?' + search; + + pathname = pathname.replace(/[?#]/g, function(match) { + return encodeURIComponent(match); + }); + search = search.replace('#', '%23'); + + return protocol + host + pathname + search + hash; +}; + +function urlResolve(source, relative) { + return urlParse(source, false, true).resolve(relative); +} + +Url.prototype.resolve = function(relative) { + return this.resolveObject(urlParse(relative, false, true)).format(); +}; + +function urlResolveObject(source, relative) { + if (!source) return relative; + return urlParse(source, false, true).resolveObject(relative); +} + +Url.prototype.resolveObject = function(relative) { + if (isString(relative)) { + var rel = new Url(); + rel.parse(relative, false, true); + relative = rel; + } + + var result = new Url(); + Object.keys(this).forEach(function(k) { + result[k] = this[k]; + }, this); + + // hash is always overridden, no matter what. + // even href="" will remove it. + result.hash = relative.hash; + + // if the relative url is empty, then there's nothing left to do here. + if (relative.href === '') { + result.href = result.format(); + return result; + } + + // hrefs like //foo/bar always cut to the protocol. + if (relative.slashes && !relative.protocol) { + // take everything except the protocol from relative + Object.keys(relative).forEach(function(k) { + if (k !== 'protocol') + result[k] = relative[k]; + }); + + //urlParse appends trailing / to urls like http://www.example.com + if (slashedProtocol[result.protocol] && + result.hostname && !result.pathname) { + result.path = result.pathname = '/'; + } + + result.href = result.format(); + return result; + } + + if (relative.protocol && relative.protocol !== result.protocol) { + // if it's a known url protocol, then changing + // the protocol does weird things + // first, if it's not file:, then we MUST have a host, + // and if there was a path + // to begin with, then we MUST have a path. + // if it is file:, then the host is dropped, + // because that's known to be hostless. + // anything else is assumed to be absolute. + if (!slashedProtocol[relative.protocol]) { + Object.keys(relative).forEach(function(k) { + result[k] = relative[k]; + }); + result.href = result.format(); + return result; + } + + result.protocol = relative.protocol; + if (!relative.host && !hostlessProtocol[relative.protocol]) { + var relPath = (relative.pathname || '').split('/'); + while (relPath.length && !(relative.host = relPath.shift())); + if (!relative.host) relative.host = ''; + if (!relative.hostname) relative.hostname = ''; + if (relPath[0] !== '') relPath.unshift(''); + if (relPath.length < 2) relPath.unshift(''); + result.pathname = relPath.join('/'); + } else { + result.pathname = relative.pathname; + } + result.search = relative.search; + result.query = relative.query; + result.host = relative.host || ''; + result.auth = relative.auth; + result.hostname = relative.hostname || relative.host; + result.port = relative.port; + // to support http.request + if (result.pathname || result.search) { + var p = result.pathname || ''; + var s = result.search || ''; + result.path = p + s; + } + result.slashes = result.slashes || relative.slashes; + result.href = result.format(); + return result; + } + + var isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'), + isRelAbs = ( + relative.host || + relative.pathname && relative.pathname.charAt(0) === '/' + ), + mustEndAbs = (isRelAbs || isSourceAbs || + (result.host && relative.pathname)), + removeAllDots = mustEndAbs, + srcPath = result.pathname && result.pathname.split('/') || [], + relPath = relative.pathname && relative.pathname.split('/') || [], + psychotic = result.protocol && !slashedProtocol[result.protocol]; + + // if the url is a non-slashed url, then relative + // links like ../.. should be able + // to crawl up to the hostname, as well. This is strange. + // result.protocol has already been set by now. + // Later on, put the first path part into the host field. + if (psychotic) { + result.hostname = ''; + result.port = null; + if (result.host) { + if (srcPath[0] === '') srcPath[0] = result.host; + else srcPath.unshift(result.host); + } + result.host = ''; + if (relative.protocol) { + relative.hostname = null; + relative.port = null; + if (relative.host) { + if (relPath[0] === '') relPath[0] = relative.host; + else relPath.unshift(relative.host); + } + relative.host = null; + } + mustEndAbs = mustEndAbs && (relPath[0] === '' || srcPath[0] === ''); + } + + if (isRelAbs) { + // it's absolute. + result.host = (relative.host || relative.host === '') ? + relative.host : result.host; + result.hostname = (relative.hostname || relative.hostname === '') ? + relative.hostname : result.hostname; + result.search = relative.search; + result.query = relative.query; + srcPath = relPath; + // fall through to the dot-handling below. + } else if (relPath.length) { + // it's relative + // throw away the existing file, and take the new path instead. + if (!srcPath) srcPath = []; + srcPath.pop(); + srcPath = srcPath.concat(relPath); + result.search = relative.search; + result.query = relative.query; + } else if (!isNullOrUndefined(relative.search)) { + // just pull out the search. + // like href='?foo'. + // Put this after the other two cases because it simplifies the booleans + if (psychotic) { + result.hostname = result.host = srcPath.shift(); + //occationaly the auth can get stuck only in host + //this especialy happens in cases like + //url.resolveObject('mailto:local1@domain1', 'local2@domain2') + var authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + result.search = relative.search; + result.query = relative.query; + //to support http.request + if (!isNull(result.pathname) || !isNull(result.search)) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.href = result.format(); + return result; + } + + if (!srcPath.length) { + // no path at all. easy. + // we've already handled the other stuff above. + result.pathname = null; + //to support http.request + if (result.search) { + result.path = '/' + result.search; + } else { + result.path = null; + } + result.href = result.format(); + return result; + } + + // if a url ENDs in . or .., then it must get a trailing slash. + // however, if it ends in anything else non-slashy, + // then it must NOT get a trailing slash. + var last = srcPath.slice(-1)[0]; + var hasTrailingSlash = ( + (result.host || relative.host) && (last === '.' || last === '..') || + last === ''); + + // strip single dots, resolve double dots to parent dir + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = srcPath.length; i >= 0; i--) { + last = srcPath[i]; + if (last == '.') { + srcPath.splice(i, 1); + } else if (last === '..') { + srcPath.splice(i, 1); + up++; + } else if (up) { + srcPath.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (!mustEndAbs && !removeAllDots) { + for (; up--; up) { + srcPath.unshift('..'); + } + } + + if (mustEndAbs && srcPath[0] !== '' && + (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { + srcPath.unshift(''); + } + + if (hasTrailingSlash && (srcPath.join('/').substr(-1) !== '/')) { + srcPath.push(''); + } + + var isAbsolute = srcPath[0] === '' || + (srcPath[0] && srcPath[0].charAt(0) === '/'); + + // put the host back + if (psychotic) { + result.hostname = result.host = isAbsolute ? '' : + srcPath.length ? srcPath.shift() : ''; + //occationaly the auth can get stuck only in host + //this especialy happens in cases like + //url.resolveObject('mailto:local1@domain1', 'local2@domain2') + var authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + + mustEndAbs = mustEndAbs || (result.host && srcPath.length); + + if (mustEndAbs && !isAbsolute) { + srcPath.unshift(''); + } + + if (!srcPath.length) { + result.pathname = null; + result.path = null; + } else { + result.pathname = srcPath.join('/'); + } + + //to support request.http + if (!isNull(result.pathname) || !isNull(result.search)) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.auth = relative.auth || result.auth; + result.slashes = result.slashes || relative.slashes; + result.href = result.format(); + return result; +}; + +Url.prototype.parseHost = function() { + var host = this.host; + var port = portPattern.exec(host); + if (port) { + port = port[0]; + if (port !== ':') { + this.port = port.substr(1); + } + host = host.substr(0, host.length - port.length); + } + if (host) this.hostname = host; +}; + +function isString(arg) { + return typeof arg === "string"; +} + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} + +function isNull(arg) { + return arg === null; +} +function isNullOrUndefined(arg) { + return arg == null; +} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..2e79f89 --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/node_modules/util/.npmignore b/node_modules/util/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/util/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/util/.travis.yml b/node_modules/util/.travis.yml new file mode 100644 index 0000000..ded625c --- /dev/null +++ b/node_modules/util/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: +- '0.8' +- '0.10' +env: + global: + - secure: AdUubswCR68/eGD+WWjwTHgFbelwQGnNo81j1IOaUxKw+zgFPzSnFEEtDw7z98pWgg7p9DpCnyzzSnSllP40wq6AG19OwyUJjSLoZK57fp+r8zwTQwWiSqUgMu2YSMmKJPIO/aoSGpRQXT+L1nRrHoUJXgFodyIZgz40qzJeZjc= + - secure: heQuxPVsQ7jBbssoVKimXDpqGjQFiucm6W5spoujmspjDG7oEcHD9ANo9++LoRPrsAmNx56SpMK5fNfVmYediw6SvhXm4Mxt56/fYCrLDBtgGG+1neCeffAi8z1rO8x48m77hcQ6YhbUL5R9uBimUjMX92fZcygAt8Rg804zjFo= diff --git a/node_modules/util/.zuul.yml b/node_modules/util/.zuul.yml new file mode 100644 index 0000000..2105010 --- /dev/null +++ b/node_modules/util/.zuul.yml @@ -0,0 +1,10 @@ +ui: mocha-qunit +browsers: + - name: chrome + version: 27..latest + - name: firefox + version: latest + - name: safari + version: latest + - name: ie + version: 9..latest diff --git a/node_modules/util/LICENSE b/node_modules/util/LICENSE new file mode 100644 index 0000000..e3d4e69 --- /dev/null +++ b/node_modules/util/LICENSE @@ -0,0 +1,18 @@ +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/util/README.md b/node_modules/util/README.md new file mode 100644 index 0000000..1c473d2 --- /dev/null +++ b/node_modules/util/README.md @@ -0,0 +1,15 @@ +# util + +[![Build Status](https://travis-ci.org/defunctzombie/node-util.png?branch=master)](https://travis-ci.org/defunctzombie/node-util) + +node.js [util](http://nodejs.org/api/util.html) module as a module + +## install via [npm](npmjs.org) + +```shell +npm install util +``` + +## browser support + +This module also works in modern browsers. If you need legacy browser support you will need to polyfill ES5 features. diff --git a/node_modules/util/node_modules/inherits/LICENSE b/node_modules/util/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/node_modules/util/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/util/node_modules/inherits/README.md b/node_modules/util/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/node_modules/util/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/util/node_modules/inherits/inherits.js b/node_modules/util/node_modules/inherits/inherits.js new file mode 100644 index 0000000..29f5e24 --- /dev/null +++ b/node_modules/util/node_modules/inherits/inherits.js @@ -0,0 +1 @@ +module.exports = require('util').inherits diff --git a/node_modules/util/node_modules/inherits/inherits_browser.js b/node_modules/util/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..c1e78a7 --- /dev/null +++ b/node_modules/util/node_modules/inherits/inherits_browser.js @@ -0,0 +1,23 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } +} diff --git a/node_modules/util/node_modules/inherits/package.json b/node_modules/util/node_modules/inherits/package.json new file mode 100644 index 0000000..0e36965 --- /dev/null +++ b/node_modules/util/node_modules/inherits/package.json @@ -0,0 +1,22 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.1", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "node test" + } +} diff --git a/node_modules/util/node_modules/inherits/test.js b/node_modules/util/node_modules/inherits/test.js new file mode 100644 index 0000000..fc53012 --- /dev/null +++ b/node_modules/util/node_modules/inherits/test.js @@ -0,0 +1,25 @@ +var inherits = require('./inherits.js') +var assert = require('assert') + +function test(c) { + assert(c.constructor === Child) + assert(c.constructor.super_ === Parent) + assert(Object.getPrototypeOf(c) === Child.prototype) + assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype) + assert(c instanceof Child) + assert(c instanceof Parent) +} + +function Child() { + Parent.call(this) + test(this) +} + +function Parent() {} + +inherits(Child, Parent) + +var c = new Child +test(c) + +console.log('ok') diff --git a/node_modules/util/package.json b/node_modules/util/package.json new file mode 100644 index 0000000..738d0a8 --- /dev/null +++ b/node_modules/util/package.json @@ -0,0 +1,31 @@ +{ + "author": { + "name": "Joyent", + "url": "http://www.joyent.com" + }, + "name": "util", + "description": "Node.JS util module", + "keywords": [ + "util" + ], + "version": "0.10.3", + "homepage": "https://github.com/defunctzombie/node-util", + "repository": { + "type": "git", + "url": "git://github.com/defunctzombie/node-util" + }, + "main": "./util.js", + "scripts": { + "test": "node test/node/*.js && zuul test/browser/*.js" + }, + "dependencies": { + "inherits": "2.0.1" + }, + "license": "MIT", + "devDependencies": { + "zuul": "~1.0.9" + }, + "browser": { + "./support/isBuffer.js": "./support/isBufferBrowser.js" + } +} diff --git a/node_modules/util/support/isBuffer.js b/node_modules/util/support/isBuffer.js new file mode 100644 index 0000000..ace9ac0 --- /dev/null +++ b/node_modules/util/support/isBuffer.js @@ -0,0 +1,3 @@ +module.exports = function isBuffer(arg) { + return arg instanceof Buffer; +} diff --git a/node_modules/util/support/isBufferBrowser.js b/node_modules/util/support/isBufferBrowser.js new file mode 100644 index 0000000..0e1bee1 --- /dev/null +++ b/node_modules/util/support/isBufferBrowser.js @@ -0,0 +1,6 @@ +module.exports = function isBuffer(arg) { + return arg && typeof arg === 'object' + && typeof arg.copy === 'function' + && typeof arg.fill === 'function' + && typeof arg.readUInt8 === 'function'; +} \ No newline at end of file diff --git a/node_modules/util/test/browser/inspect.js b/node_modules/util/test/browser/inspect.js new file mode 100644 index 0000000..91af3b0 --- /dev/null +++ b/node_modules/util/test/browser/inspect.js @@ -0,0 +1,41 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var assert = require('assert'); +var util = require('../../'); + +suite('inspect'); + +test('util.inspect - test for sparse array', function () { + var a = ['foo', 'bar', 'baz']; + assert.equal(util.inspect(a), '[ \'foo\', \'bar\', \'baz\' ]'); + delete a[1]; + assert.equal(util.inspect(a), '[ \'foo\', , \'baz\' ]'); + assert.equal(util.inspect(a, true), '[ \'foo\', , \'baz\', [length]: 3 ]'); + assert.equal(util.inspect(new Array(5)), '[ , , , , ]'); +}); + +test('util.inspect - exceptions should print the error message, not \'{}\'', function () { + assert.equal(util.inspect(new Error()), '[Error]'); + assert.equal(util.inspect(new Error('FAIL')), '[Error: FAIL]'); + assert.equal(util.inspect(new TypeError('FAIL')), '[TypeError: FAIL]'); + assert.equal(util.inspect(new SyntaxError('FAIL')), '[SyntaxError: FAIL]'); +}); diff --git a/node_modules/util/test/browser/is.js b/node_modules/util/test/browser/is.js new file mode 100644 index 0000000..f63bff9 --- /dev/null +++ b/node_modules/util/test/browser/is.js @@ -0,0 +1,91 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var assert = require('assert'); + +var util = require('../../'); + +suite('is'); + +test('util.isArray', function () { + assert.equal(true, util.isArray([])); + assert.equal(true, util.isArray(Array())); + assert.equal(true, util.isArray(new Array())); + assert.equal(true, util.isArray(new Array(5))); + assert.equal(true, util.isArray(new Array('with', 'some', 'entries'))); + assert.equal(false, util.isArray({})); + assert.equal(false, util.isArray({ push: function() {} })); + assert.equal(false, util.isArray(/regexp/)); + assert.equal(false, util.isArray(new Error())); + assert.equal(false, util.isArray(Object.create(Array.prototype))); +}); + +test('util.isRegExp', function () { + assert.equal(true, util.isRegExp(/regexp/)); + assert.equal(true, util.isRegExp(RegExp())); + assert.equal(true, util.isRegExp(new RegExp())); + assert.equal(false, util.isRegExp({})); + assert.equal(false, util.isRegExp([])); + assert.equal(false, util.isRegExp(new Date())); + assert.equal(false, util.isRegExp(Object.create(RegExp.prototype))); +}); + +test('util.isDate', function () { + assert.equal(true, util.isDate(new Date())); + assert.equal(true, util.isDate(new Date(0))); + assert.equal(false, util.isDate(Date())); + assert.equal(false, util.isDate({})); + assert.equal(false, util.isDate([])); + assert.equal(false, util.isDate(new Error())); + assert.equal(false, util.isDate(Object.create(Date.prototype))); +}); + +test('util.isError', function () { + assert.equal(true, util.isError(new Error())); + assert.equal(true, util.isError(new TypeError())); + assert.equal(true, util.isError(new SyntaxError())); + assert.equal(false, util.isError({})); + assert.equal(false, util.isError({ name: 'Error', message: '' })); + assert.equal(false, util.isError([])); + assert.equal(true, util.isError(Object.create(Error.prototype))); +}); + +test('util._extend', function () { + assert.deepEqual(util._extend({a:1}), {a:1}); + assert.deepEqual(util._extend({a:1}, []), {a:1}); + assert.deepEqual(util._extend({a:1}, null), {a:1}); + assert.deepEqual(util._extend({a:1}, true), {a:1}); + assert.deepEqual(util._extend({a:1}, false), {a:1}); + assert.deepEqual(util._extend({a:1}, {b:2}), {a:1, b:2}); + assert.deepEqual(util._extend({a:1, b:2}, {b:3}), {a:1, b:3}); +}); + +test('util.isBuffer', function () { + assert.equal(true, util.isBuffer(new Buffer(4))); + assert.equal(true, util.isBuffer(Buffer(4))); + assert.equal(true, util.isBuffer(new Buffer(4))); + assert.equal(true, util.isBuffer(new Buffer([1, 2, 3, 4]))); + assert.equal(false, util.isBuffer({})); + assert.equal(false, util.isBuffer([])); + assert.equal(false, util.isBuffer(new Error())); + assert.equal(false, util.isRegExp(new Date())); + assert.equal(true, util.isBuffer(Object.create(Buffer.prototype))); +}); diff --git a/node_modules/util/test/node/debug.js b/node_modules/util/test/node/debug.js new file mode 100644 index 0000000..ef5f69f --- /dev/null +++ b/node_modules/util/test/node/debug.js @@ -0,0 +1,86 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var assert = require('assert'); +var util = require('../../'); + +if (process.argv[2] === 'child') + child(); +else + parent(); + +function parent() { + test('foo,tud,bar', true); + test('foo,tud', true); + test('tud,bar', true); + test('tud', true); + test('foo,bar', false); + test('', false); +} + +function test(environ, shouldWrite) { + var expectErr = ''; + if (shouldWrite) { + expectErr = 'TUD %PID%: this { is: \'a\' } /debugging/\n' + + 'TUD %PID%: number=1234 string=asdf obj={"foo":"bar"}\n'; + } + var expectOut = 'ok\n'; + var didTest = false; + + var spawn = require('child_process').spawn; + var child = spawn(process.execPath, [__filename, 'child'], { + env: { NODE_DEBUG: environ } + }); + + expectErr = expectErr.split('%PID%').join(child.pid); + + var err = ''; + child.stderr.setEncoding('utf8'); + child.stderr.on('data', function(c) { + err += c; + }); + + var out = ''; + child.stdout.setEncoding('utf8'); + child.stdout.on('data', function(c) { + out += c; + }); + + child.on('close', function(c) { + assert(!c); + assert.equal(err, expectErr); + assert.equal(out, expectOut); + didTest = true; + console.log('ok %j %j', environ, shouldWrite); + }); + + process.on('exit', function() { + assert(didTest); + }); +} + + +function child() { + var debug = util.debuglog('tud'); + debug('this', { is: 'a' }, /debugging/); + debug('number=%d string=%s obj=%j', 1234, 'asdf', { foo: 'bar' }); + console.log('ok'); +} diff --git a/node_modules/util/test/node/format.js b/node_modules/util/test/node/format.js new file mode 100644 index 0000000..f2d1862 --- /dev/null +++ b/node_modules/util/test/node/format.js @@ -0,0 +1,77 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + + +var assert = require('assert'); +var util = require('../../'); + +assert.equal(util.format(), ''); +assert.equal(util.format(''), ''); +assert.equal(util.format([]), '[]'); +assert.equal(util.format({}), '{}'); +assert.equal(util.format(null), 'null'); +assert.equal(util.format(true), 'true'); +assert.equal(util.format(false), 'false'); +assert.equal(util.format('test'), 'test'); + +// CHECKME this is for console.log() compatibility - but is it *right*? +assert.equal(util.format('foo', 'bar', 'baz'), 'foo bar baz'); + +assert.equal(util.format('%d', 42.0), '42'); +assert.equal(util.format('%d', 42), '42'); +assert.equal(util.format('%s', 42), '42'); +assert.equal(util.format('%j', 42), '42'); + +assert.equal(util.format('%d', '42.0'), '42'); +assert.equal(util.format('%d', '42'), '42'); +assert.equal(util.format('%s', '42'), '42'); +assert.equal(util.format('%j', '42'), '"42"'); + +assert.equal(util.format('%%s%s', 'foo'), '%sfoo'); + +assert.equal(util.format('%s'), '%s'); +assert.equal(util.format('%s', undefined), 'undefined'); +assert.equal(util.format('%s', 'foo'), 'foo'); +assert.equal(util.format('%s:%s'), '%s:%s'); +assert.equal(util.format('%s:%s', undefined), 'undefined:%s'); +assert.equal(util.format('%s:%s', 'foo'), 'foo:%s'); +assert.equal(util.format('%s:%s', 'foo', 'bar'), 'foo:bar'); +assert.equal(util.format('%s:%s', 'foo', 'bar', 'baz'), 'foo:bar baz'); +assert.equal(util.format('%%%s%%', 'hi'), '%hi%'); +assert.equal(util.format('%%%s%%%%', 'hi'), '%hi%%'); + +(function() { + var o = {}; + o.o = o; + assert.equal(util.format('%j', o), '[Circular]'); +})(); + +// Errors +assert.equal(util.format(new Error('foo')), '[Error: foo]'); +function CustomError(msg) { + Error.call(this); + Object.defineProperty(this, 'message', { value: msg, enumerable: false }); + Object.defineProperty(this, 'name', { value: 'CustomError', enumerable: false }); +} +util.inherits(CustomError, Error); +assert.equal(util.format(new CustomError('bar')), '[CustomError: bar]'); diff --git a/node_modules/util/test/node/inspect.js b/node_modules/util/test/node/inspect.js new file mode 100644 index 0000000..f766d11 --- /dev/null +++ b/node_modules/util/test/node/inspect.js @@ -0,0 +1,195 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + + +var assert = require('assert'); +var util = require('../../'); + +// test the internal isDate implementation +var Date2 = require('vm').runInNewContext('Date'); +var d = new Date2(); +var orig = util.inspect(d); +Date2.prototype.foo = 'bar'; +var after = util.inspect(d); +assert.equal(orig, after); + +// test for sparse array +var a = ['foo', 'bar', 'baz']; +assert.equal(util.inspect(a), '[ \'foo\', \'bar\', \'baz\' ]'); +delete a[1]; +assert.equal(util.inspect(a), '[ \'foo\', , \'baz\' ]'); +assert.equal(util.inspect(a, true), '[ \'foo\', , \'baz\', [length]: 3 ]'); +assert.equal(util.inspect(new Array(5)), '[ , , , , ]'); + +// test for property descriptors +var getter = Object.create(null, { + a: { + get: function() { return 'aaa'; } + } +}); +var setter = Object.create(null, { + b: { + set: function() {} + } +}); +var getterAndSetter = Object.create(null, { + c: { + get: function() { return 'ccc'; }, + set: function() {} + } +}); +assert.equal(util.inspect(getter, true), '{ [a]: [Getter] }'); +assert.equal(util.inspect(setter, true), '{ [b]: [Setter] }'); +assert.equal(util.inspect(getterAndSetter, true), '{ [c]: [Getter/Setter] }'); + +// exceptions should print the error message, not '{}' +assert.equal(util.inspect(new Error()), '[Error]'); +assert.equal(util.inspect(new Error('FAIL')), '[Error: FAIL]'); +assert.equal(util.inspect(new TypeError('FAIL')), '[TypeError: FAIL]'); +assert.equal(util.inspect(new SyntaxError('FAIL')), '[SyntaxError: FAIL]'); +try { + undef(); +} catch (e) { + assert.equal(util.inspect(e), '[ReferenceError: undef is not defined]'); +} +var ex = util.inspect(new Error('FAIL'), true); +assert.ok(ex.indexOf('[Error: FAIL]') != -1); +assert.ok(ex.indexOf('[stack]') != -1); +assert.ok(ex.indexOf('[message]') != -1); + +// GH-1941 +// should not throw: +assert.equal(util.inspect(Object.create(Date.prototype)), '{}'); + +// GH-1944 +assert.doesNotThrow(function() { + var d = new Date(); + d.toUTCString = null; + util.inspect(d); +}); + +assert.doesNotThrow(function() { + var r = /regexp/; + r.toString = null; + util.inspect(r); +}); + +// bug with user-supplied inspect function returns non-string +assert.doesNotThrow(function() { + util.inspect([{ + inspect: function() { return 123; } + }]); +}); + +// GH-2225 +var x = { inspect: util.inspect }; +assert.ok(util.inspect(x).indexOf('inspect') != -1); + +// util.inspect.styles and util.inspect.colors +function test_color_style(style, input, implicit) { + var color_name = util.inspect.styles[style]; + var color = ['', '']; + if(util.inspect.colors[color_name]) + color = util.inspect.colors[color_name]; + + var without_color = util.inspect(input, false, 0, false); + var with_color = util.inspect(input, false, 0, true); + var expect = '\u001b[' + color[0] + 'm' + without_color + + '\u001b[' + color[1] + 'm'; + assert.equal(with_color, expect, 'util.inspect color for style '+style); +} + +test_color_style('special', function(){}); +test_color_style('number', 123.456); +test_color_style('boolean', true); +test_color_style('undefined', undefined); +test_color_style('null', null); +test_color_style('string', 'test string'); +test_color_style('date', new Date); +test_color_style('regexp', /regexp/); + +// an object with "hasOwnProperty" overwritten should not throw +assert.doesNotThrow(function() { + util.inspect({ + hasOwnProperty: null + }); +}); + +// new API, accepts an "options" object +var subject = { foo: 'bar', hello: 31, a: { b: { c: { d: 0 } } } }; +Object.defineProperty(subject, 'hidden', { enumerable: false, value: null }); + +assert(util.inspect(subject, { showHidden: false }).indexOf('hidden') === -1); +assert(util.inspect(subject, { showHidden: true }).indexOf('hidden') !== -1); +assert(util.inspect(subject, { colors: false }).indexOf('\u001b[32m') === -1); +assert(util.inspect(subject, { colors: true }).indexOf('\u001b[32m') !== -1); +assert(util.inspect(subject, { depth: 2 }).indexOf('c: [Object]') !== -1); +assert(util.inspect(subject, { depth: 0 }).indexOf('a: [Object]') !== -1); +assert(util.inspect(subject, { depth: null }).indexOf('{ d: 0 }') !== -1); + +// "customInspect" option can enable/disable calling inspect() on objects +subject = { inspect: function() { return 123; } }; + +assert(util.inspect(subject, { customInspect: true }).indexOf('123') !== -1); +assert(util.inspect(subject, { customInspect: true }).indexOf('inspect') === -1); +assert(util.inspect(subject, { customInspect: false }).indexOf('123') === -1); +assert(util.inspect(subject, { customInspect: false }).indexOf('inspect') !== -1); + +// custom inspect() functions should be able to return other Objects +subject.inspect = function() { return { foo: 'bar' }; }; + +assert.equal(util.inspect(subject), '{ foo: \'bar\' }'); + +subject.inspect = function(depth, opts) { + assert.strictEqual(opts.customInspectOptions, true); +}; + +util.inspect(subject, { customInspectOptions: true }); + +// util.inspect with "colors" option should produce as many lines as without it +function test_lines(input) { + var count_lines = function(str) { + return (str.match(/\n/g) || []).length; + } + + var without_color = util.inspect(input); + var with_color = util.inspect(input, {colors: true}); + assert.equal(count_lines(without_color), count_lines(with_color)); +} + +test_lines([1, 2, 3, 4, 5, 6, 7]); +test_lines(function() { + var big_array = []; + for (var i = 0; i < 100; i++) { + big_array.push(i); + } + return big_array; +}()); +test_lines({foo: 'bar', baz: 35, b: {a: 35}}); +test_lines({ + foo: 'bar', + baz: 35, + b: {a: 35}, + very_long_key: 'very_long_value', + even_longer_key: ['with even longer value in array'] +}); diff --git a/node_modules/util/test/node/log.js b/node_modules/util/test/node/log.js new file mode 100644 index 0000000..6bd96d1 --- /dev/null +++ b/node_modules/util/test/node/log.js @@ -0,0 +1,58 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + +var assert = require('assert'); +var util = require('../../'); + +assert.ok(process.stdout.writable); +assert.ok(process.stderr.writable); + +var stdout_write = global.process.stdout.write; +var strings = []; +global.process.stdout.write = function(string) { + strings.push(string); +}; +console._stderr = process.stdout; + +var tests = [ + {input: 'foo', output: 'foo'}, + {input: undefined, output: 'undefined'}, + {input: null, output: 'null'}, + {input: false, output: 'false'}, + {input: 42, output: '42'}, + {input: function(){}, output: '[Function]'}, + {input: parseInt('not a number', 10), output: 'NaN'}, + {input: {answer: 42}, output: '{ answer: 42 }'}, + {input: [1,2,3], output: '[ 1, 2, 3 ]'} +]; + +// test util.log() +tests.forEach(function(test) { + util.log(test.input); + var result = strings.shift().trim(), + re = (/[0-9]{1,2} [A-Z][a-z]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} - (.+)$/), + match = re.exec(result); + assert.ok(match); + assert.equal(match[1], test.output); +}); + +global.process.stdout.write = stdout_write; diff --git a/node_modules/util/test/node/util.js b/node_modules/util/test/node/util.js new file mode 100644 index 0000000..633ba69 --- /dev/null +++ b/node_modules/util/test/node/util.js @@ -0,0 +1,83 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + +var assert = require('assert'); +var context = require('vm').runInNewContext; + +var util = require('../../'); + +// isArray +assert.equal(true, util.isArray([])); +assert.equal(true, util.isArray(Array())); +assert.equal(true, util.isArray(new Array())); +assert.equal(true, util.isArray(new Array(5))); +assert.equal(true, util.isArray(new Array('with', 'some', 'entries'))); +assert.equal(true, util.isArray(context('Array')())); +assert.equal(false, util.isArray({})); +assert.equal(false, util.isArray({ push: function() {} })); +assert.equal(false, util.isArray(/regexp/)); +assert.equal(false, util.isArray(new Error)); +assert.equal(false, util.isArray(Object.create(Array.prototype))); + +// isRegExp +assert.equal(true, util.isRegExp(/regexp/)); +assert.equal(true, util.isRegExp(RegExp())); +assert.equal(true, util.isRegExp(new RegExp())); +assert.equal(true, util.isRegExp(context('RegExp')())); +assert.equal(false, util.isRegExp({})); +assert.equal(false, util.isRegExp([])); +assert.equal(false, util.isRegExp(new Date())); +assert.equal(false, util.isRegExp(Object.create(RegExp.prototype))); + +// isDate +assert.equal(true, util.isDate(new Date())); +assert.equal(true, util.isDate(new Date(0))); +assert.equal(true, util.isDate(new (context('Date')))); +assert.equal(false, util.isDate(Date())); +assert.equal(false, util.isDate({})); +assert.equal(false, util.isDate([])); +assert.equal(false, util.isDate(new Error)); +assert.equal(false, util.isDate(Object.create(Date.prototype))); + +// isError +assert.equal(true, util.isError(new Error)); +assert.equal(true, util.isError(new TypeError)); +assert.equal(true, util.isError(new SyntaxError)); +assert.equal(true, util.isError(new (context('Error')))); +assert.equal(true, util.isError(new (context('TypeError')))); +assert.equal(true, util.isError(new (context('SyntaxError')))); +assert.equal(false, util.isError({})); +assert.equal(false, util.isError({ name: 'Error', message: '' })); +assert.equal(false, util.isError([])); +assert.equal(true, util.isError(Object.create(Error.prototype))); + +// isObject +assert.ok(util.isObject({}) === true); + +// _extend +assert.deepEqual(util._extend({a:1}), {a:1}); +assert.deepEqual(util._extend({a:1}, []), {a:1}); +assert.deepEqual(util._extend({a:1}, null), {a:1}); +assert.deepEqual(util._extend({a:1}, true), {a:1}); +assert.deepEqual(util._extend({a:1}, false), {a:1}); +assert.deepEqual(util._extend({a:1}, {b:2}), {a:1, b:2}); +assert.deepEqual(util._extend({a:1, b:2}, {b:3}), {a:1, b:3}); diff --git a/node_modules/util/util.js b/node_modules/util/util.js new file mode 100644 index 0000000..e0ea321 --- /dev/null +++ b/node_modules/util/util.js @@ -0,0 +1,586 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var formatRegExp = /%[sdj%]/g; +exports.format = function(f) { + if (!isString(f)) { + var objects = []; + for (var i = 0; i < arguments.length; i++) { + objects.push(inspect(arguments[i])); + } + return objects.join(' '); + } + + var i = 1; + var args = arguments; + var len = args.length; + var str = String(f).replace(formatRegExp, function(x) { + if (x === '%%') return '%'; + if (i >= len) return x; + switch (x) { + case '%s': return String(args[i++]); + case '%d': return Number(args[i++]); + case '%j': + try { + return JSON.stringify(args[i++]); + } catch (_) { + return '[Circular]'; + } + default: + return x; + } + }); + for (var x = args[i]; i < len; x = args[++i]) { + if (isNull(x) || !isObject(x)) { + str += ' ' + x; + } else { + str += ' ' + inspect(x); + } + } + return str; +}; + + +// Mark that a method should not be used. +// Returns a modified function which warns once by default. +// If --no-deprecation is set, then it is a no-op. +exports.deprecate = function(fn, msg) { + // Allow for deprecating things in the process of starting up. + if (isUndefined(global.process)) { + return function() { + return exports.deprecate(fn, msg).apply(this, arguments); + }; + } + + if (process.noDeprecation === true) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (process.throwDeprecation) { + throw new Error(msg); + } else if (process.traceDeprecation) { + console.trace(msg); + } else { + console.error(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +}; + + +var debugs = {}; +var debugEnviron; +exports.debuglog = function(set) { + if (isUndefined(debugEnviron)) + debugEnviron = process.env.NODE_DEBUG || ''; + set = set.toUpperCase(); + if (!debugs[set]) { + if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { + var pid = process.pid; + debugs[set] = function() { + var msg = exports.format.apply(exports, arguments); + console.error('%s %d: %s', set, pid, msg); + }; + } else { + debugs[set] = function() {}; + } + } + return debugs[set]; +}; + + +/** + * Echos the value of a value. Trys to print the value out + * in the best way possible given the different types. + * + * @param {Object} obj The object to print out. + * @param {Object} opts Optional options object that alters the output. + */ +/* legacy: obj, showHidden, depth, colors*/ +function inspect(obj, opts) { + // default options + var ctx = { + seen: [], + stylize: stylizeNoColor + }; + // legacy... + if (arguments.length >= 3) ctx.depth = arguments[2]; + if (arguments.length >= 4) ctx.colors = arguments[3]; + if (isBoolean(opts)) { + // legacy... + ctx.showHidden = opts; + } else if (opts) { + // got an "options" object + exports._extend(ctx, opts); + } + // set default options + if (isUndefined(ctx.showHidden)) ctx.showHidden = false; + if (isUndefined(ctx.depth)) ctx.depth = 2; + if (isUndefined(ctx.colors)) ctx.colors = false; + if (isUndefined(ctx.customInspect)) ctx.customInspect = true; + if (ctx.colors) ctx.stylize = stylizeWithColor; + return formatValue(ctx, obj, ctx.depth); +} +exports.inspect = inspect; + + +// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics +inspect.colors = { + 'bold' : [1, 22], + 'italic' : [3, 23], + 'underline' : [4, 24], + 'inverse' : [7, 27], + 'white' : [37, 39], + 'grey' : [90, 39], + 'black' : [30, 39], + 'blue' : [34, 39], + 'cyan' : [36, 39], + 'green' : [32, 39], + 'magenta' : [35, 39], + 'red' : [31, 39], + 'yellow' : [33, 39] +}; + +// Don't use 'blue' not visible on cmd.exe +inspect.styles = { + 'special': 'cyan', + 'number': 'yellow', + 'boolean': 'yellow', + 'undefined': 'grey', + 'null': 'bold', + 'string': 'green', + 'date': 'magenta', + // "name": intentionally not styling + 'regexp': 'red' +}; + + +function stylizeWithColor(str, styleType) { + var style = inspect.styles[styleType]; + + if (style) { + return '\u001b[' + inspect.colors[style][0] + 'm' + str + + '\u001b[' + inspect.colors[style][1] + 'm'; + } else { + return str; + } +} + + +function stylizeNoColor(str, styleType) { + return str; +} + + +function arrayToHash(array) { + var hash = {}; + + array.forEach(function(val, idx) { + hash[val] = true; + }); + + return hash; +} + + +function formatValue(ctx, value, recurseTimes) { + // Provide a hook for user-specified inspect functions. + // Check that value is an object with an inspect function on it + if (ctx.customInspect && + value && + isFunction(value.inspect) && + // Filter out the util module, it's inspect function is special + value.inspect !== exports.inspect && + // Also filter out any prototype objects using the circular check. + !(value.constructor && value.constructor.prototype === value)) { + var ret = value.inspect(recurseTimes, ctx); + if (!isString(ret)) { + ret = formatValue(ctx, ret, recurseTimes); + } + return ret; + } + + // Primitive types cannot have properties + var primitive = formatPrimitive(ctx, value); + if (primitive) { + return primitive; + } + + // Look up the keys of the object. + var keys = Object.keys(value); + var visibleKeys = arrayToHash(keys); + + if (ctx.showHidden) { + keys = Object.getOwnPropertyNames(value); + } + + // IE doesn't make error fields non-enumerable + // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx + if (isError(value) + && (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { + return formatError(value); + } + + // Some type of object without properties can be shortcutted. + if (keys.length === 0) { + if (isFunction(value)) { + var name = value.name ? ': ' + value.name : ''; + return ctx.stylize('[Function' + name + ']', 'special'); + } + if (isRegExp(value)) { + return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); + } + if (isDate(value)) { + return ctx.stylize(Date.prototype.toString.call(value), 'date'); + } + if (isError(value)) { + return formatError(value); + } + } + + var base = '', array = false, braces = ['{', '}']; + + // Make Array say that they are Array + if (isArray(value)) { + array = true; + braces = ['[', ']']; + } + + // Make functions say that they are functions + if (isFunction(value)) { + var n = value.name ? ': ' + value.name : ''; + base = ' [Function' + n + ']'; + } + + // Make RegExps say that they are RegExps + if (isRegExp(value)) { + base = ' ' + RegExp.prototype.toString.call(value); + } + + // Make dates with properties first say the date + if (isDate(value)) { + base = ' ' + Date.prototype.toUTCString.call(value); + } + + // Make error with message first say the error + if (isError(value)) { + base = ' ' + formatError(value); + } + + if (keys.length === 0 && (!array || value.length == 0)) { + return braces[0] + base + braces[1]; + } + + if (recurseTimes < 0) { + if (isRegExp(value)) { + return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); + } else { + return ctx.stylize('[Object]', 'special'); + } + } + + ctx.seen.push(value); + + var output; + if (array) { + output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); + } else { + output = keys.map(function(key) { + return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); + }); + } + + ctx.seen.pop(); + + return reduceToSingleString(output, base, braces); +} + + +function formatPrimitive(ctx, value) { + if (isUndefined(value)) + return ctx.stylize('undefined', 'undefined'); + if (isString(value)) { + var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') + .replace(/'/g, "\\'") + .replace(/\\"/g, '"') + '\''; + return ctx.stylize(simple, 'string'); + } + if (isNumber(value)) + return ctx.stylize('' + value, 'number'); + if (isBoolean(value)) + return ctx.stylize('' + value, 'boolean'); + // For some reason typeof null is "object", so special case here. + if (isNull(value)) + return ctx.stylize('null', 'null'); +} + + +function formatError(value) { + return '[' + Error.prototype.toString.call(value) + ']'; +} + + +function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { + var output = []; + for (var i = 0, l = value.length; i < l; ++i) { + if (hasOwnProperty(value, String(i))) { + output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, + String(i), true)); + } else { + output.push(''); + } + } + keys.forEach(function(key) { + if (!key.match(/^\d+$/)) { + output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, + key, true)); + } + }); + return output; +} + + +function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { + var name, str, desc; + desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; + if (desc.get) { + if (desc.set) { + str = ctx.stylize('[Getter/Setter]', 'special'); + } else { + str = ctx.stylize('[Getter]', 'special'); + } + } else { + if (desc.set) { + str = ctx.stylize('[Setter]', 'special'); + } + } + if (!hasOwnProperty(visibleKeys, key)) { + name = '[' + key + ']'; + } + if (!str) { + if (ctx.seen.indexOf(desc.value) < 0) { + if (isNull(recurseTimes)) { + str = formatValue(ctx, desc.value, null); + } else { + str = formatValue(ctx, desc.value, recurseTimes - 1); + } + if (str.indexOf('\n') > -1) { + if (array) { + str = str.split('\n').map(function(line) { + return ' ' + line; + }).join('\n').substr(2); + } else { + str = '\n' + str.split('\n').map(function(line) { + return ' ' + line; + }).join('\n'); + } + } + } else { + str = ctx.stylize('[Circular]', 'special'); + } + } + if (isUndefined(name)) { + if (array && key.match(/^\d+$/)) { + return str; + } + name = JSON.stringify('' + key); + if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { + name = name.substr(1, name.length - 2); + name = ctx.stylize(name, 'name'); + } else { + name = name.replace(/'/g, "\\'") + .replace(/\\"/g, '"') + .replace(/(^"|"$)/g, "'"); + name = ctx.stylize(name, 'string'); + } + } + + return name + ': ' + str; +} + + +function reduceToSingleString(output, base, braces) { + var numLinesEst = 0; + var length = output.reduce(function(prev, cur) { + numLinesEst++; + if (cur.indexOf('\n') >= 0) numLinesEst++; + return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; + }, 0); + + if (length > 60) { + return braces[0] + + (base === '' ? '' : base + '\n ') + + ' ' + + output.join(',\n ') + + ' ' + + braces[1]; + } + + return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; +} + + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. +function isArray(ar) { + return Array.isArray(ar); +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return isObject(re) && objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return isObject(d) && objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return isObject(e) && + (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = require('./support/isBuffer'); + +function objectToString(o) { + return Object.prototype.toString.call(o); +} + + +function pad(n) { + return n < 10 ? '0' + n.toString(10) : n.toString(10); +} + + +var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', + 'Oct', 'Nov', 'Dec']; + +// 26 Feb 16:19:34 +function timestamp() { + var d = new Date(); + var time = [pad(d.getHours()), + pad(d.getMinutes()), + pad(d.getSeconds())].join(':'); + return [d.getDate(), months[d.getMonth()], time].join(' '); +} + + +// log is just a thin wrapper to console.log that prepends a timestamp +exports.log = function() { + console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); +}; + + +/** + * Inherit the prototype methods from one constructor into another. + * + * The Function.prototype.inherits from lang.js rewritten as a standalone + * function (not on Function.prototype). NOTE: If this file is to be loaded + * during bootstrapping this function needs to be rewritten using some native + * functions as prototype setup using normal JavaScript does not work as + * expected during bootstrapping (see mirror.js in r114903). + * + * @param {function} ctor Constructor function which needs to inherit the + * prototype. + * @param {function} superCtor Constructor function to inherit prototype from. + */ +exports.inherits = require('inherits'); + +exports._extend = function(origin, add) { + // Don't do anything if add isn't an object + if (!add || !isObject(add)) return origin; + + var keys = Object.keys(add); + var i = keys.length; + while (i--) { + origin[keys[i]] = add[keys[i]]; + } + return origin; +}; + +function hasOwnProperty(obj, prop) { + return Object.prototype.hasOwnProperty.call(obj, prop); +} diff --git a/node_modules/utils-merge/.travis.yml b/node_modules/utils-merge/.travis.yml new file mode 100644 index 0000000..af92b02 --- /dev/null +++ b/node_modules/utils-merge/.travis.yml @@ -0,0 +1,6 @@ +language: "node_js" +node_js: + - "0.4" + - "0.6" + - "0.8" + - "0.10" diff --git a/node_modules/utils-merge/LICENSE b/node_modules/utils-merge/LICENSE new file mode 100644 index 0000000..e33bd10 --- /dev/null +++ b/node_modules/utils-merge/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2013 Jared Hanson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/utils-merge/README.md b/node_modules/utils-merge/README.md new file mode 100644 index 0000000..2f94e9b --- /dev/null +++ b/node_modules/utils-merge/README.md @@ -0,0 +1,34 @@ +# utils-merge + +Merges the properties from a source object into a destination object. + +## Install + + $ npm install utils-merge + +## Usage + +```javascript +var a = { foo: 'bar' } + , b = { bar: 'baz' }; + +merge(a, b); +// => { foo: 'bar', bar: 'baz' } +``` + +## Tests + + $ npm install + $ npm test + +[![Build Status](https://secure.travis-ci.org/jaredhanson/utils-merge.png)](http://travis-ci.org/jaredhanson/utils-merge) + +## Credits + + - [Jared Hanson](http://github.com/jaredhanson) + +## License + +[The MIT License](http://opensource.org/licenses/MIT) + +Copyright (c) 2013 Jared Hanson <[http://jaredhanson.net/](http://jaredhanson.net/)> diff --git a/node_modules/utils-merge/index.js b/node_modules/utils-merge/index.js new file mode 100644 index 0000000..4265c69 --- /dev/null +++ b/node_modules/utils-merge/index.js @@ -0,0 +1,23 @@ +/** + * Merge object b with object a. + * + * var a = { foo: 'bar' } + * , b = { bar: 'baz' }; + * + * merge(a, b); + * // => { foo: 'bar', bar: 'baz' } + * + * @param {Object} a + * @param {Object} b + * @return {Object} + * @api public + */ + +exports = module.exports = function(a, b){ + if (a && b) { + for (var key in b) { + a[key] = b[key]; + } + } + return a; +}; diff --git a/node_modules/utils-merge/package.json b/node_modules/utils-merge/package.json new file mode 100644 index 0000000..a02940d --- /dev/null +++ b/node_modules/utils-merge/package.json @@ -0,0 +1,39 @@ +{ + "name": "utils-merge", + "version": "1.0.0", + "description": "merge() utility function", + "keywords": [ + "util" + ], + "repository": { + "type": "git", + "url": "git://github.com/jaredhanson/utils-merge.git" + }, + "bugs": { + "url": "http://github.com/jaredhanson/utils-merge/issues" + }, + "author": { + "name": "Jared Hanson", + "email": "jaredhanson@gmail.com", + "url": "http://www.jaredhanson.net/" + }, + "licenses": [ + { + "type": "MIT", + "url": "http://www.opensource.org/licenses/MIT" + } + ], + "main": "./index", + "dependencies": { + }, + "devDependencies": { + "mocha": "1.x.x", + "chai": "1.x.x" + }, + "scripts": { + "test": "node_modules/.bin/mocha --reporter spec --require test/bootstrap/node test/*.test.js" + }, + "engines": { + "node": ">= 0.4.0" + } +} diff --git a/node_modules/vary/HISTORY.md b/node_modules/vary/HISTORY.md new file mode 100644 index 0000000..ed68118 --- /dev/null +++ b/node_modules/vary/HISTORY.md @@ -0,0 +1,29 @@ +1.1.0 / 2015-09-29 +================== + + * Only accept valid field names in the `field` argument + - Ensures the resulting string is a valid HTTP header value + +1.0.1 / 2015-07-08 +================== + + * Fix setting empty header from empty `field` + * perf: enable strict mode + * perf: remove argument reassignments + +1.0.0 / 2014-08-10 +================== + + * Accept valid `Vary` header string as `field` + * Add `vary.append` for low-level string manipulation + * Move to `jshttp` orgainzation + +0.1.0 / 2014-06-05 +================== + + * Support array of fields to set + +0.0.0 / 2014-06-04 +================== + + * Initial release diff --git a/node_modules/vary/LICENSE b/node_modules/vary/LICENSE new file mode 100644 index 0000000..142ede3 --- /dev/null +++ b/node_modules/vary/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/vary/README.md b/node_modules/vary/README.md new file mode 100644 index 0000000..5966542 --- /dev/null +++ b/node_modules/vary/README.md @@ -0,0 +1,91 @@ +# vary + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Manipulate the HTTP Vary header + +## Installation + +```sh +$ npm install vary +``` + +## API + +```js +var vary = require('vary') +``` + +### vary(res, field) + +Adds the given header `field` to the `Vary` response header of `res`. +This can be a string of a single field, a string of a valid `Vary` +header, or an array of multiple fields. + +This will append the header if not already listed, otherwise leaves +it listed in the current location. + +```js +// Append "Origin" to the Vary header of the response +vary(res, 'Origin') +``` + +### vary.append(header, field) + +Adds the given header `field` to the `Vary` response header string `header`. +This can be a string of a single field, a string of a valid `Vary` header, +or an array of multiple fields. + +This will append the header if not already listed, otherwise leaves +it listed in the current location. The new header string is returned. + +```js +// Get header string appending "Origin" to "Accept, User-Agent" +vary.append('Accept, User-Agent', 'Origin') +``` + +## Examples + +### Updating the Vary header when content is based on it + +```js +var http = require('http') +var vary = require('vary') + +http.createServer(function onRequest(req, res) { + // about to user-agent sniff + vary(res, 'User-Agent') + + var ua = req.headers['user-agent'] || '' + var isMobile = /mobi|android|touch|mini/i.test(ua) + + // serve site, depending on isMobile + res.setHeader('Content-Type', 'text/html') + res.end('You are (probably) ' + (isMobile ? '' : 'not ') + 'a mobile user') +}) +``` + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/vary.svg +[npm-url]: https://npmjs.org/package/vary +[node-version-image]: https://img.shields.io/node/v/vary.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/jshttp/vary/master.svg +[travis-url]: https://travis-ci.org/jshttp/vary +[coveralls-image]: https://img.shields.io/coveralls/jshttp/vary/master.svg +[coveralls-url]: https://coveralls.io/r/jshttp/vary +[downloads-image]: https://img.shields.io/npm/dm/vary.svg +[downloads-url]: https://npmjs.org/package/vary diff --git a/node_modules/vary/index.js b/node_modules/vary/index.js new file mode 100644 index 0000000..21dbaf1 --- /dev/null +++ b/node_modules/vary/index.js @@ -0,0 +1,124 @@ +/*! + * vary + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + */ + +module.exports = vary; +module.exports.append = append; + +/** + * RegExp to match field-name in RFC 7230 sec 3.2 + * + * field-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + * / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + * / DIGIT / ALPHA + * ; any VCHAR, except delimiters + */ + +var fieldNameRegExp = /^[!#$%&'\*\+\-\.\^_`\|~0-9A-Za-z]+$/ + +/** + * Append a field to a vary header. + * + * @param {String} header + * @param {String|Array} field + * @return {String} + * @api public + */ + +function append(header, field) { + if (typeof header !== 'string') { + throw new TypeError('header argument is required'); + } + + if (!field) { + throw new TypeError('field argument is required'); + } + + // get fields array + var fields = !Array.isArray(field) + ? parse(String(field)) + : field; + + // assert on invalid field names + for (var i = 0; i < fields.length; i++) { + if (!fieldNameRegExp.test(fields[i])) { + throw new TypeError('field argument contains an invalid header name'); + } + } + + // existing, unspecified vary + if (header === '*') { + return header; + } + + // enumerate current values + var val = header; + var vals = parse(header.toLowerCase()); + + // unspecified vary + if (fields.indexOf('*') !== -1 || vals.indexOf('*') !== -1) { + return '*'; + } + + for (var i = 0; i < fields.length; i++) { + var fld = fields[i].toLowerCase(); + + // append value (case-preserving) + if (vals.indexOf(fld) === -1) { + vals.push(fld); + val = val + ? val + ', ' + fields[i] + : fields[i]; + } + } + + return val; +} + +/** + * Parse a vary header into an array. + * + * @param {String} header + * @return {Array} + * @api private + */ + +function parse(header) { + return header.trim().split(/ *, */); +} + +/** + * Mark that a request is varied on a header field. + * + * @param {Object} res + * @param {String|Array} field + * @api public + */ + +function vary(res, field) { + if (!res || !res.getHeader || !res.setHeader) { + // quack quack + throw new TypeError('res argument is required'); + } + + // get existing header + var val = res.getHeader('Vary') || '' + var header = Array.isArray(val) + ? val.join(', ') + : String(val); + + // set new header + if ((val = append(header, field))) { + res.setHeader('Vary', val); + } +} diff --git a/node_modules/vary/package.json b/node_modules/vary/package.json new file mode 100644 index 0000000..10e9188 --- /dev/null +++ b/node_modules/vary/package.json @@ -0,0 +1,32 @@ +{ + "name": "vary", + "description": "Manipulate the HTTP Vary header", + "version": "1.1.0", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "http", + "res", + "vary" + ], + "repository": "jshttp/vary", + "devDependencies": { + "istanbul": "0.3.21", + "mocha": "2.3.3", + "supertest": "1.1.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/" + } +} diff --git a/node_modules/verror/.gitmodules b/node_modules/verror/.gitmodules new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/verror/.npmignore b/node_modules/verror/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/verror/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/verror/LICENSE b/node_modules/verror/LICENSE new file mode 100644 index 0000000..cbc0bb3 --- /dev/null +++ b/node_modules/verror/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012, Joyent, Inc. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/node_modules/verror/Makefile b/node_modules/verror/Makefile new file mode 100644 index 0000000..00faa97 --- /dev/null +++ b/node_modules/verror/Makefile @@ -0,0 +1,35 @@ +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile: top-level Makefile +# +# This Makefile contains only repo-specific logic and uses included makefiles +# to supply common targets (javascriptlint, jsstyle, restdown, etc.), which are +# used by other repos as well. +# + +# +# Tools +# +NPM = npm + +# +# Files +# +JS_FILES := $(shell find lib examples tests -name '*.js') +JSL_FILES_NODE = $(JS_FILES) +JSSTYLE_FILES = $(JS_FILES) +JSL_CONF_NODE = jsl.node.conf + +.PHONY: all +all: + $(NPM) install + +.PHONY: test +test: + node tests/tst.inherit.js + node tests/tst.verror.js + node tests/tst.werror.js + @echo all tests passed + +include ./Makefile.targ diff --git a/node_modules/verror/Makefile.targ b/node_modules/verror/Makefile.targ new file mode 100644 index 0000000..2a64fe7 --- /dev/null +++ b/node_modules/verror/Makefile.targ @@ -0,0 +1,285 @@ +# -*- mode: makefile -*- +# +# Copyright (c) 2012, Joyent, Inc. All rights reserved. +# +# Makefile.targ: common targets. +# +# NOTE: This makefile comes from the "eng" repo. It's designed to be dropped +# into other repos as-is without requiring any modifications. If you find +# yourself changing this file, you should instead update the original copy in +# eng.git and then update your repo to use the new version. +# +# This Makefile defines several useful targets and rules. You can use it by +# including it from a Makefile that specifies some of the variables below. +# +# Targets defined in this Makefile: +# +# check Checks JavaScript files for lint and style +# Checks bash scripts for syntax +# Checks SMF manifests for validity against the SMF DTD +# +# clean Removes built files +# +# docs Builds restdown documentation in docs/ +# +# prepush Depends on "check" and "test" +# +# test Does nothing (you should override this) +# +# xref Generates cscope (source cross-reference index) +# +# For details on what these targets are supposed to do, see the Joyent +# Engineering Guide. +# +# To make use of these targets, you'll need to set some of these variables. Any +# variables left unset will simply not be used. +# +# BASH_FILES Bash scripts to check for syntax +# (paths relative to top-level Makefile) +# +# CLEAN_FILES Files to remove as part of the "clean" target. Note +# that files generated by targets in this Makefile are +# automatically included in CLEAN_FILES. These include +# restdown-generated HTML and JSON files. +# +# DOC_FILES Restdown (documentation source) files. These are +# assumed to be contained in "docs/", and must NOT +# contain the "docs/" prefix. +# +# JSL_CONF_NODE Specify JavaScriptLint configuration files +# JSL_CONF_WEB (paths relative to top-level Makefile) +# +# Node.js and Web configuration files are separate +# because you'll usually want different global variable +# configurations. If no file is specified, none is given +# to jsl, which causes it to use a default configuration, +# which probably isn't what you want. +# +# JSL_FILES_NODE JavaScript files to check with Node config file. +# JSL_FILES_WEB JavaScript files to check with Web config file. +# +# You can also override these variables: +# +# BASH Path to bash (default: bash) +# +# CSCOPE_DIRS Directories to search for source files for the cscope +# index. (default: ".") +# +# JSL Path to JavaScriptLint (default: "jsl") +# +# JSL_FLAGS_NODE Additional flags to pass through to JSL +# JSL_FLAGS_WEB +# JSL_FLAGS +# +# JSSTYLE Path to jsstyle (default: jsstyle) +# +# JSSTYLE_FLAGS Additional flags to pass through to jsstyle +# + +# +# Defaults for the various tools we use. +# +BASH ?= bash +BASHSTYLE ?= tools/bashstyle +CP ?= cp +CSCOPE ?= cscope +CSCOPE_DIRS ?= . +JSL ?= jsl +JSSTYLE ?= jsstyle +MKDIR ?= mkdir -p +MV ?= mv +RESTDOWN_FLAGS ?= +RMTREE ?= rm -rf +JSL_FLAGS ?= --nologo --nosummary + +ifeq ($(shell uname -s),SunOS) + TAR ?= gtar +else + TAR ?= tar +endif + + +# +# Defaults for other fixed values. +# +BUILD = build +DISTCLEAN_FILES += $(BUILD) +DOC_BUILD = $(BUILD)/docs/public + +# +# Configure JSL_FLAGS_{NODE,WEB} based on JSL_CONF_{NODE,WEB}. +# +ifneq ($(origin JSL_CONF_NODE), undefined) + JSL_FLAGS_NODE += --conf=$(JSL_CONF_NODE) +endif + +ifneq ($(origin JSL_CONF_WEB), undefined) + JSL_FLAGS_WEB += --conf=$(JSL_CONF_WEB) +endif + +# +# Targets. For descriptions on what these are supposed to do, see the +# Joyent Engineering Guide. +# + +# +# Instruct make to keep around temporary files. We have rules below that +# automatically update git submodules as needed, but they employ a deps/*/.git +# temporary file. Without this directive, make tries to remove these .git +# directories after the build has completed. +# +.SECONDARY: $($(wildcard deps/*):%=%/.git) + +# +# This rule enables other rules that use files from a git submodule to have +# those files depend on deps/module/.git and have "make" automatically check +# out the submodule as needed. +# +deps/%/.git: + git submodule update --init deps/$* + +# +# These recipes make heavy use of dynamically-created phony targets. The parent +# Makefile defines a list of input files like BASH_FILES. We then say that each +# of these files depends on a fake target called filename.bashchk, and then we +# define a pattern rule for those targets that runs bash in check-syntax-only +# mode. This mechanism has the nice properties that if you specify zero files, +# the rule becomes a noop (unlike a single rule to check all bash files, which +# would invoke bash with zero files), and you can check individual files from +# the command line with "make filename.bashchk". +# +.PHONY: check-bash +check-bash: $(BASH_FILES:%=%.bashchk) $(BASH_FILES:%=%.bashstyle) + +%.bashchk: % + $(BASH) -n $^ + +%.bashstyle: % + $(BASHSTYLE) $^ + +.PHONY: check-jsl check-jsl-node check-jsl-web +check-jsl: check-jsl-node check-jsl-web + +check-jsl-node: $(JSL_FILES_NODE:%=%.jslnodechk) + +check-jsl-web: $(JSL_FILES_WEB:%=%.jslwebchk) + +%.jslnodechk: % $(JSL_EXEC) + $(JSL) $(JSL_FLAGS) $(JSL_FLAGS_NODE) $< + +%.jslwebchk: % $(JSL_EXEC) + $(JSL) $(JSL_FLAGS) $(JSL_FLAGS_WEB) $< + +.PHONY: check-jsstyle +check-jsstyle: $(JSSTYLE_FILES:%=%.jsstylechk) + +%.jsstylechk: % $(JSSTYLE_EXEC) + $(JSSTYLE) $(JSSTYLE_FLAGS) $< + +.PHONY: check +check: check-jsl check-jsstyle check-bash + @echo check ok + +.PHONY: clean +clean:: + -$(RMTREE) $(CLEAN_FILES) + +.PHONY: distclean +distclean:: clean + -$(RMTREE) $(DISTCLEAN_FILES) + +CSCOPE_FILES = cscope.in.out cscope.out cscope.po.out +CLEAN_FILES += $(CSCOPE_FILES) + +.PHONY: xref +xref: cscope.files + $(CSCOPE) -bqR + +.PHONY: cscope.files +cscope.files: + find $(CSCOPE_DIRS) -name '*.c' -o -name '*.h' -o -name '*.cc' \ + -o -name '*.js' -o -name '*.s' -o -name '*.cpp' > $@ + +# +# The "docs" target is complicated because we do several things here: +# +# (1) Use restdown to build HTML and JSON files from each of DOC_FILES. +# +# (2) Copy these files into $(DOC_BUILD) (build/docs/public), which +# functions as a complete copy of the documentation that could be +# mirrored or served over HTTP. +# +# (3) Then copy any directories and media from docs/media into +# $(DOC_BUILD)/media. This allows projects to include their own media, +# including files that will override same-named files provided by +# restdown. +# +# Step (3) is the surprisingly complex part: in order to do this, we need to +# identify the subdirectories in docs/media, recreate them in +# $(DOC_BUILD)/media, then do the same with the files. +# +DOC_MEDIA_DIRS := $(shell find docs/media -type d 2>/dev/null | grep -v "^docs/media$$") +DOC_MEDIA_DIRS := $(DOC_MEDIA_DIRS:docs/media/%=%) +DOC_MEDIA_DIRS_BUILD := $(DOC_MEDIA_DIRS:%=$(DOC_BUILD)/media/%) + +DOC_MEDIA_FILES := $(shell find docs/media -type f 2>/dev/null) +DOC_MEDIA_FILES := $(DOC_MEDIA_FILES:docs/media/%=%) +DOC_MEDIA_FILES_BUILD := $(DOC_MEDIA_FILES:%=$(DOC_BUILD)/media/%) + +# +# Like the other targets, "docs" just depends on the final files we want to +# create in $(DOC_BUILD), leveraging other targets and recipes to define how +# to get there. +# +.PHONY: docs +docs: \ + $(DOC_FILES:%.restdown=$(DOC_BUILD)/%.html) \ + $(DOC_FILES:%.restdown=$(DOC_BUILD)/%.json) \ + $(DOC_MEDIA_FILES_BUILD) + +# +# We keep the intermediate files so that the next build can see whether the +# files in DOC_BUILD are up to date. +# +.PRECIOUS: \ + $(DOC_FILES:%.restdown=docs/%.html) \ + $(DOC_FILES:%.restdown=docs/%json) + +# +# We do clean those intermediate files, as well as all of DOC_BUILD. +# +CLEAN_FILES += \ + $(DOC_BUILD) \ + $(DOC_FILES:%.restdown=docs/%.html) \ + $(DOC_FILES:%.restdown=docs/%.json) + +# +# Before installing the files, we must make sure the directories exist. The | +# syntax tells make that the dependency need only exist, not be up to date. +# Otherwise, it might try to rebuild spuriously because the directory itself +# appears out of date. +# +$(DOC_MEDIA_FILES_BUILD): | $(DOC_MEDIA_DIRS_BUILD) + +$(DOC_BUILD)/%: docs/% | $(DOC_BUILD) + $(CP) $< $@ + +docs/%.json docs/%.html: docs/%.restdown | $(DOC_BUILD) $(RESTDOWN_EXEC) + $(RESTDOWN) $(RESTDOWN_FLAGS) -m $(DOC_BUILD) $< + +$(DOC_BUILD): + $(MKDIR) $@ + +$(DOC_MEDIA_DIRS_BUILD): + $(MKDIR) $@ + +# +# The default "test" target does nothing. This should usually be overridden by +# the parent Makefile. It's included here so we can define "prepush" without +# requiring the repo to define "test". +# +.PHONY: test +test: + +.PHONY: prepush +prepush: check test diff --git a/node_modules/verror/README.md b/node_modules/verror/README.md new file mode 100644 index 0000000..e9b5497 --- /dev/null +++ b/node_modules/verror/README.md @@ -0,0 +1,120 @@ +# verror: richer JavaScript errors + +This module provides two classes: VError, for accretive errors, and WError, for +wrapping errors. Both support printf-style error messages using extsprintf. + +## Printf-style errors + +At the most basic level, VError is just like JavaScript's Error class, but with +printf-style arguments: + + var verror = require('verror'); + + var opname = 'read'; + var err = new verror.VError('"%s" operation failed', opname); + console.log(err.message); + console.log(err.stack); + +This prints: + + "read" operation failed + "read" operation failed + at Object. (/Users/dap/node-verror/examples/varargs.js:4:11) + at Module._compile (module.js:449:26) + at Object.Module._extensions..js (module.js:467:10) + at Module.load (module.js:356:32) + at Function.Module._load (module.js:312:12) + at Module.runMain (module.js:492:10) + at process.startup.processNextTick.process._tickCallback (node.js:244:9) + + +## VError for accretive error messages + +More interestingly, you can use VError to build up an error describing what +happened at various levels in the stack. For example, suppose you have a +request handler that stats a file and fails if it doesn't exist: + + var fs = require('fs'); + var verror = require('verror'); + + function checkFile(filename, callback) { + fs.stat(filename, function (err) { + if (err) + /* Annotate the "stat" error with what we were doing. */ + return (callback(new verror.VError(err, + 'failed to check "%s"', filename))); + + /* ... */ + }); + } + + function handleRequest(filename, callback) { + checkFile('/nonexistent', function (err) { + if (err) { + /* Annotate the "checkFile" error with what we were doing. */ + return (callback(new verror.VError(err, 'request failed'))); + } + + /* ... */ + }); + } + + handleRequest('/nonexistent', function (err) { + if (err) + console.log(err.message); + /* ... */ + }); + +Since the file "/nonexistent" doesn't exist, this prints out: + + request failed: failed to check "/nonexistent": ENOENT, stat '/nonexistent' + +The idea here is that the lowest level (Node's "fs.stat" function) generates an +arbitrary error, and each higher level (request handler and stat callback) +creates a new VError that annotates the previous error with what it was doing, +so that the result is a clear message explaining what failed at each level. + +This plays nicely with extsprintf's "%r" specifier, which prints out a +Java-style stacktrace with the whole chain of exceptions: + + EXCEPTION: VError: request failed: failed to check "/nonexistent": ENOENT, stat '/nonexistent' + at /Users/dap/work/node-verror/examples/levels.js:21:21 + at /Users/dap/work/node-verror/examples/levels.js:9:12 + at Object.oncomplete (fs.js:297:15) + Caused by: EXCEPTION: VError: failed to check "/nonexistent": ENOENT, stat '/nonexistent' + at /Users/dap/work/node-verror/examples/levels.js:9:21 + at Object.oncomplete (fs.js:297:15) + Caused by: EXCEPTION: Error: Error: ENOENT, stat '/nonexistent' + + +## WError for wrapped errors + +Sometimes you don't want an Error's "message" field to include the details of +all of the low-level errors, but you still want to be able to get at them +programmatically. For example, in an HTTP server, you probably don't want to +spew all of the low-level errors back to the client, but you do want to include +them in the audit log entry for the request. In that case, you can use a +WError, which is created exactly like VError (and also supports both +printf-style arguments and an optional cause), but the resulting "message" only +contains the top-level error. It's also more verbose, including the class +associated with each error in the cause chain. Using the same example above, +but replacing the VError in handleRequest with WError, we get this output: + + request failed + +That's what we wanted -- just a high-level summary for the client. But we can +get the object's toString() for the full details: + + WError: request failed; caused by WError: failed to check "/nonexistent"; + caused by Error: ENOENT, stat '/nonexistent' + +# Contributing + +Contributions welcome. Code should be "make check" clean. To run "make check", +you'll need these tools: + +* https://github.com/davepacheco/jsstyle +* https://github.com/davepacheco/javascriptlint + +If you're changing something non-trivial or user-facing, you may want to submit +an issue first. diff --git a/node_modules/verror/examples/levels-verror.js b/node_modules/verror/examples/levels-verror.js new file mode 100644 index 0000000..53a7022 --- /dev/null +++ b/node_modules/verror/examples/levels-verror.js @@ -0,0 +1,36 @@ +var extsprintf = require('extsprintf'); +var fs = require('fs'); +var verror = require('../lib/verror'); + +function checkFile(filename, callback) { + fs.stat(filename, function (err) { + if (err) + /* Annotate the "stat" error with what we were doing. */ + return (callback(new verror.VError(err, + 'failed to check "%s"', filename))); + + /* ... */ + return (callback()); + }); +} + +function handleRequest(filename, callback) { + checkFile('/nonexistent', function (err) { + if (err) + /* Annotate the "checkFile" error. */ + return (callback(new verror.VError( + err, 'request failed'))); + + /* ... */ + return (callback()); + }); +} + +handleRequest('/nonexistent', function (err) { + if (err) { + console.log(err.message); + console.log(extsprintf.sprintf('%r', err)); + } + + /* ... */ +}); diff --git a/node_modules/verror/examples/levels-werror.js b/node_modules/verror/examples/levels-werror.js new file mode 100644 index 0000000..7e57075 --- /dev/null +++ b/node_modules/verror/examples/levels-werror.js @@ -0,0 +1,34 @@ +var extsprintf = require('extsprintf'); +var fs = require('fs'); +var verror = require('../lib/verror'); + +function checkFile(filename, callback) { + fs.stat(filename, function (err) { + if (err) + /* Annotate the "stat" error with what we were doing. */ + return (callback(new verror.VError(err, + 'failed to check "%s"', filename))); + + /* ... */ + return (callback()); + }); +} + +function handleRequest(filename, callback) { + checkFile('/nonexistent', function (err) { + if (err) + /* Wrap the "checkFile" error. */ + return (callback(new verror.WError( + err, 'request failed'))); + + /* ... */ + return (callback()); + }); +} + +handleRequest('/nonexistent', function (err) { + if (err) { + console.log(err.message); + console.log(err.toString()); + } +}); diff --git a/node_modules/verror/examples/varargs.js b/node_modules/verror/examples/varargs.js new file mode 100644 index 0000000..2e14ee4 --- /dev/null +++ b/node_modules/verror/examples/varargs.js @@ -0,0 +1,6 @@ +var verror = require('../lib/verror'); + +var opname = 'read'; +var err = new verror.VError('"%s" operation failed', opname); +console.log(err.message); +console.log(err.stack); diff --git a/node_modules/verror/examples/verror.js b/node_modules/verror/examples/verror.js new file mode 100644 index 0000000..887b181 --- /dev/null +++ b/node_modules/verror/examples/verror.js @@ -0,0 +1,13 @@ +var mod_fs = require('fs'); +var mod_verror = require('../lib/verror'); + +var filename = '/nonexistent'; + +mod_fs.stat(filename, function (err1) { + var err2 = new mod_verror.VError(err1, 'failed to stat "%s"', filename); + + /* The following would normally be higher up the stack. */ + var err3 = new mod_verror.VError(err2, 'failed to handle request'); + console.log(err3.message); + console.log(err3.stack); +}); diff --git a/node_modules/verror/examples/werror.js b/node_modules/verror/examples/werror.js new file mode 100644 index 0000000..f55e532 --- /dev/null +++ b/node_modules/verror/examples/werror.js @@ -0,0 +1,14 @@ +var mod_fs = require('fs'); +var mod_verror = require('../lib/verror'); + +var filename = '/nonexistent'; + +mod_fs.stat(filename, function (err1) { + var err2 = new mod_verror.WError(err1, 'failed to stat "%s"', filename); + + /* The following would normally be higher up the stack. */ + var err3 = new mod_verror.WError(err2, 'failed to handle request'); + console.log(err3.message); + console.log(err3.toString()); + console.log(err3.stack); +}); diff --git a/node_modules/verror/jsl.node.conf b/node_modules/verror/jsl.node.conf new file mode 100644 index 0000000..bd724a2 --- /dev/null +++ b/node_modules/verror/jsl.node.conf @@ -0,0 +1,139 @@ +# +# Configuration File for JavaScript Lint +# +# This configuration file can be used to lint a collection of scripts, or to enable +# or disable warnings for scripts that are linted via the command line. +# + +### Warnings +# Enable or disable warnings based on requirements. +# Use "+WarningName" to display or "-WarningName" to suppress. +# ++ambiguous_else_stmt # the else statement could be matched with one of multiple if statements (use curly braces to indicate intent ++ambiguous_nested_stmt # block statements containing block statements should use curly braces to resolve ambiguity ++ambiguous_newline # unexpected end of line; it is ambiguous whether these lines are part of the same statement ++anon_no_return_value # anonymous function does not always return value ++assign_to_function_call # assignment to a function call +-block_without_braces # block statement without curly braces ++comma_separated_stmts # multiple statements separated by commas (use semicolons?) ++comparison_type_conv # comparisons against null, 0, true, false, or an empty string allowing implicit type conversion (use === or !==) ++default_not_at_end # the default case is not at the end of the switch statement ++dup_option_explicit # duplicate "option explicit" control comment ++duplicate_case_in_switch # duplicate case in switch statement ++duplicate_formal # duplicate formal argument {name} ++empty_statement # empty statement or extra semicolon ++identifier_hides_another # identifer {name} hides an identifier in a parent scope +-inc_dec_within_stmt # increment (++) and decrement (--) operators used as part of greater statement ++incorrect_version # Expected /*jsl:content-type*/ control comment. The script was parsed with the wrong version. ++invalid_fallthru # unexpected "fallthru" control comment ++invalid_pass # unexpected "pass" control comment ++jsl_cc_not_understood # couldn't understand control comment using /*jsl:keyword*/ syntax ++leading_decimal_point # leading decimal point may indicate a number or an object member ++legacy_cc_not_understood # couldn't understand control comment using /*@keyword@*/ syntax ++meaningless_block # meaningless block; curly braces have no impact ++mismatch_ctrl_comments # mismatched control comment; "ignore" and "end" control comments must have a one-to-one correspondence ++misplaced_regex # regular expressions should be preceded by a left parenthesis, assignment, colon, or comma ++missing_break # missing break statement ++missing_break_for_last_case # missing break statement for last case in switch ++missing_default_case # missing default case in switch statement ++missing_option_explicit # the "option explicit" control comment is missing ++missing_semicolon # missing semicolon ++missing_semicolon_for_lambda # missing semicolon for lambda assignment ++multiple_plus_minus # unknown order of operations for successive plus (e.g. x+++y) or minus (e.g. x---y) signs ++nested_comment # nested comment ++no_return_value # function {name} does not always return a value ++octal_number # leading zeros make an octal number ++parseint_missing_radix # parseInt missing radix parameter ++partial_option_explicit # the "option explicit" control comment, if used, must be in the first script tag ++redeclared_var # redeclaration of {name} ++trailing_comma_in_array # extra comma is not recommended in array initializers ++trailing_decimal_point # trailing decimal point may indicate a number or an object member ++undeclared_identifier # undeclared identifier: {name} ++unreachable_code # unreachable code +-unreferenced_argument # argument declared but never referenced: {name} +-unreferenced_function # function is declared but never referenced: {name} ++unreferenced_variable # variable is declared but never referenced: {name} ++unsupported_version # JavaScript {version} is not supported ++use_of_label # use of label ++useless_assign # useless assignment ++useless_comparison # useless comparison; comparing identical expressions +-useless_quotes # the quotation marks are unnecessary ++useless_void # use of the void type may be unnecessary (void is always undefined) ++var_hides_arg # variable {name} hides argument ++want_assign_or_call # expected an assignment or function call ++with_statement # with statement hides undeclared variables; use temporary variable instead + + +### Output format +# Customize the format of the error message. +# __FILE__ indicates current file path +# __FILENAME__ indicates current file name +# __LINE__ indicates current line +# __COL__ indicates current column +# __ERROR__ indicates error message (__ERROR_PREFIX__: __ERROR_MSG__) +# __ERROR_NAME__ indicates error name (used in configuration file) +# __ERROR_PREFIX__ indicates error prefix +# __ERROR_MSG__ indicates error message +# +# For machine-friendly output, the output format can be prefixed with +# "encode:". If specified, all items will be encoded with C-slashes. +# +# Visual Studio syntax (default): ++output-format __FILE__(__LINE__): __ERROR__ +# Alternative syntax: +#+output-format __FILE__:__LINE__: __ERROR__ + + +### Context +# Show the in-line position of the error. +# Use "+context" to display or "-context" to suppress. +# ++context + + +### Control Comments +# Both JavaScript Lint and the JScript interpreter confuse each other with the syntax for +# the /*@keyword@*/ control comments and JScript conditional comments. (The latter is +# enabled in JScript with @cc_on@). The /*jsl:keyword*/ syntax is preferred for this reason, +# although legacy control comments are enabled by default for backward compatibility. +# +-legacy_control_comments + + +### Defining identifiers +# By default, "option explicit" is enabled on a per-file basis. +# To enable this for all files, use "+always_use_option_explicit" +-always_use_option_explicit + +# Define certain identifiers of which the lint is not aware. +# (Use this in conjunction with the "undeclared identifier" warning.) +# +# Common uses for webpages might be: ++define __dirname ++define clearInterval ++define clearTimeout ++define console ++define exports ++define global ++define process ++define require ++define setInterval ++define setTimeout ++define Buffer ++define JSON ++define Math ++define __dirname ++define __filename + +### JavaScript Version +# To change the default JavaScript version: +#+default-type text/javascript;version=1.5 +#+default-type text/javascript;e4x=1 + +### Files +# Specify which files to lint +# Use "+recurse" to enable recursion (disabled by default). +# To add a set of files, use "+process FileName", "+process Folder\Path\*.js", +# or "+process Folder\Path\*.htm". +# + diff --git a/node_modules/verror/lib/verror.js b/node_modules/verror/lib/verror.js new file mode 100644 index 0000000..9ca087b --- /dev/null +++ b/node_modules/verror/lib/verror.js @@ -0,0 +1,157 @@ +/* + * verror.js: richer JavaScript errors + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +var mod_extsprintf = require('extsprintf'); + +/* + * Public interface + */ +exports.VError = VError; +exports.WError = WError; +exports.MultiError = MultiError; + +/* + * Like JavaScript's built-in Error class, but supports a "cause" argument and a + * printf-style message. The cause argument can be null. + */ +function VError(options) +{ + var args, causedBy, ctor, tailmsg; + + if (options instanceof Error || typeof (options) === 'object') { + args = Array.prototype.slice.call(arguments, 1); + } else { + args = Array.prototype.slice.call(arguments, 0); + options = undefined; + } + + tailmsg = args.length > 0 ? + mod_extsprintf.sprintf.apply(null, args) : ''; + this.jse_shortmsg = tailmsg; + this.jse_summary = tailmsg; + + if (options) { + causedBy = options.cause; + + if (!causedBy || !(options.cause instanceof Error)) + causedBy = options; + + if (causedBy && (causedBy instanceof Error)) { + this.jse_cause = causedBy; + this.jse_summary += ': ' + causedBy.message; + } + } + + this.message = this.jse_summary; + Error.call(this, this.jse_summary); + + if (Error.captureStackTrace) { + ctor = options ? options.constructorOpt : undefined; + ctor = ctor || arguments.callee; + Error.captureStackTrace(this, ctor); + } +} + +mod_util.inherits(VError, Error); +VError.prototype.name = 'VError'; + +VError.prototype.toString = function ve_toString() +{ + var str = (this.hasOwnProperty('name') && this.name || + this.constructor.name || this.constructor.prototype.name); + if (this.message) + str += ': ' + this.message; + + return (str); +}; + +VError.prototype.cause = function ve_cause() +{ + return (this.jse_cause); +}; + + +/* + * Represents a collection of errors for the purpose of consumers that generally + * only deal with one error. Callers can extract the individual errors + * contained in this object, but may also just treat it as a normal single + * error, in which case a summary message will be printed. + */ +function MultiError(errors) +{ + mod_assert.ok(errors.length > 0); + this.ase_errors = errors; + + VError.call(this, errors[0], 'first of %d error%s', + errors.length, errors.length == 1 ? '' : 's'); +} + +mod_util.inherits(MultiError, VError); + + + +/* + * Like JavaScript's built-in Error class, but supports a "cause" argument which + * is wrapped, not "folded in" as with VError. Accepts a printf-style message. + * The cause argument can be null. + */ +function WError(options) +{ + Error.call(this); + + var args, cause, ctor; + if (typeof (options) === 'object') { + args = Array.prototype.slice.call(arguments, 1); + } else { + args = Array.prototype.slice.call(arguments, 0); + options = undefined; + } + + if (args.length > 0) { + this.message = mod_extsprintf.sprintf.apply(null, args); + } else { + this.message = ''; + } + + if (options) { + if (options instanceof Error) { + cause = options; + } else { + cause = options.cause; + ctor = options.constructorOpt; + } + } + + Error.captureStackTrace(this, ctor || this.constructor); + if (cause) + this.cause(cause); + +} + +mod_util.inherits(WError, Error); +WError.prototype.name = 'WError'; + + +WError.prototype.toString = function we_toString() +{ + var str = (this.hasOwnProperty('name') && this.name || + this.constructor.name || this.constructor.prototype.name); + if (this.message) + str += ': ' + this.message; + if (this.we_cause && this.we_cause.message) + str += '; caused by ' + this.we_cause.toString(); + + return (str); +}; + +WError.prototype.cause = function we_cause(c) +{ + if (c instanceof Error) + this.we_cause = c; + + return (this.we_cause); +}; diff --git a/node_modules/verror/package.json b/node_modules/verror/package.json new file mode 100644 index 0000000..c0b9090 --- /dev/null +++ b/node_modules/verror/package.json @@ -0,0 +1,21 @@ +{ + "name": "verror", + "version": "1.3.6", + "description": "richer JavaScript errors", + "main": "./lib/verror.js", + + "repository": { + "type": "git", + "url": "git://github.com/davepacheco/node-verror.git" + }, + + "dependencies": { + "extsprintf": "1.0.2" + }, + + "engines": [ "node >=0.6.0" ], + + "scripts": { + "test": "make test" + } +} diff --git a/node_modules/verror/tests/tst.inherit.js b/node_modules/verror/tests/tst.inherit.js new file mode 100644 index 0000000..0f0d70b --- /dev/null +++ b/node_modules/verror/tests/tst.inherit.js @@ -0,0 +1,100 @@ +/* + * tst.inherit.js: test that inheriting from VError and WError work as expected. + */ + +var mod_assert = require('assert'); +var mod_util = require('util'); + +var mod_verror = require('../lib/verror'); + +var VError = mod_verror.VError; +var WError = mod_verror.WError; +var err, suberr; + +function VErrorChild() +{ + VError.apply(this, Array.prototype.slice.call(arguments)); +} + +mod_util.inherits(VErrorChild, VError); +VErrorChild.prototype.name = 'VErrorChild'; + + +function WErrorChild() +{ + WError.apply(this, Array.prototype.slice.call(arguments)); +} + +mod_util.inherits(WErrorChild, WError); +WErrorChild.prototype.name = 'WErrorChild'; + + +suberr = new Error('root cause'); +err = new VErrorChild(suberr, 'top'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof VError); +mod_assert.ok(err instanceof VErrorChild); +mod_assert.equal(err.cause(), suberr); +mod_assert.equal(err.message, 'top: root cause'); +mod_assert.equal(err.toString(), 'VErrorChild: top: root cause'); +mod_assert.equal(err.stack.split('\n')[0], 'VErrorChild: top: root cause'); + +suberr = new Error('root cause'); +err = new WErrorChild(suberr, 'top'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof WError); +mod_assert.ok(err instanceof WErrorChild); +mod_assert.equal(err.cause(), suberr); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), + 'WErrorChild: top; caused by Error: root cause'); +mod_assert.equal(err.stack.split('\n')[0], + 'WErrorChild: top; caused by Error: root cause'); + + +// Test that `.toString()` uses the ctor name. I.e. setting +// `.prototype.name` isn't necessary. +function VErrorChildNoName() { + VError.apply(this, Array.prototype.slice.call(arguments)); +} +mod_util.inherits(VErrorChildNoName, VError); +err = new VErrorChildNoName('top'); +mod_assert.equal(err.toString(), 'VErrorChildNoName: top'); + +function WErrorChildNoName() { + WError.apply(this, Array.prototype.slice.call(arguments)); +} +mod_util.inherits(WErrorChildNoName, WError); +err = new WErrorChildNoName('top'); +mod_assert.equal(err.toString(), 'WErrorChildNoName: top'); + + +// Test that `.prototype.name` can be used for the `.toString()` +// when the ctor is anonymous. +var VErrorChildAnon = function () { + VError.apply(this, Array.prototype.slice.call(arguments)); +}; +mod_util.inherits(VErrorChildAnon, VError); +VErrorChildAnon.prototype.name = 'VErrorChildAnon'; +err = new VErrorChildAnon('top'); +mod_assert.equal(err.toString(), 'VErrorChildAnon: top'); + +var WErrorChildAnon = function () { + WError.apply(this, Array.prototype.slice.call(arguments)); +}; +mod_util.inherits(WErrorChildAnon, WError); +WErrorChildAnon.prototype.name = 'WErrorChildAnon'; +err = new WErrorChildAnon('top'); +mod_assert.equal(err.toString(), 'WErrorChildAnon: top'); + + +// Test get appropriate exception name in `.toString()` when reconstituting +// an error instance a la: +// https://github.com/mcavage/node-fast/blob/master/lib/client.js#L215 +err = new VError('top'); +err.name = 'CustomNameError'; +mod_assert.equal(err.toString(), 'CustomNameError: top'); + +err = new WError('top'); +err.name = 'CustomNameError'; +mod_assert.equal(err.toString(), 'CustomNameError: top'); diff --git a/node_modules/verror/tests/tst.verror.js b/node_modules/verror/tests/tst.verror.js new file mode 100644 index 0000000..ee937cd --- /dev/null +++ b/node_modules/verror/tests/tst.verror.js @@ -0,0 +1,156 @@ +/* + * tst.verror.js: tests basic functionality of the VError class. + */ + +var mod_assert = require('assert'); +var mod_verror = require('../lib/verror'); + +var VError = mod_verror.VError; +var WError = mod_verror.WError; + +var err, suberr, stack, substack; + +/* + * Remove full paths and relative line numbers from stack traces so that we can + * compare against "known-good" output. + */ +function cleanStack(stacktxt) +{ + var re = new RegExp(__filename + ':\\d+:\\d+', 'gm'); + stacktxt = stacktxt.replace(re, 'tst.verror.js'); + return (stacktxt); +} + +/* + * Save the generic parts of all stack traces so we can avoid hardcoding + * Node-specific implementation details in our testing of stack traces. + */ +var nodestack = new Error().stack.split('\n').slice(2).join('\n'); + +/* no arguments */ +err = new VError(); +mod_assert.equal(err.name, 'VError'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof VError); +mod_assert.equal(err.message, ''); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +/* options-argument form */ +err = new VError({}); +mod_assert.equal(err.message, ''); +mod_assert.ok(err.cause() === undefined); + +/* simple message */ +err = new VError('my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: my error', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = new VError({}, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); + +/* printf-style message */ +err = new VError('%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +err = new VError({}, '%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +/* caused by another error, with no additional message */ +suberr = new Error('root cause'); +err = new VError(suberr); +mod_assert.equal(err.message, ': root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new VError({ 'cause': suberr }); +mod_assert.equal(err.message, ': root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by another error, with annotation */ +err = new VError(suberr, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: proximate cause: 3 issues: root cause', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = new VError({ 'cause': suberr }, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: proximate cause: 3 issues: root cause', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +/* caused by another VError, with annotation. */ +suberr = err; +err = new VError(suberr, 'top'); +mod_assert.equal(err.message, 'top: proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new VError({ 'cause': suberr }, 'top'); +mod_assert.equal(err.message, 'top: proximate cause: 3 issues: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by a WError */ +suberr = new WError(new Error('root cause'), 'mid'); +err = new VError(suberr, 'top'); +mod_assert.equal(err.message, 'top: mid'); +mod_assert.ok(err.cause() === suberr); + +/* null cause (for backwards compatibility with older versions) */ +err = new VError(null, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: my error', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = new VError({ 'cause': null }, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.ok(err.cause() === undefined); + +err = new VError(null); +mod_assert.equal(err.message, ''); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +/* constructorOpt */ +function makeErr(options) { + return (new VError(options, 'test error')); +} +err = makeErr({}); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: test error', + ' at makeErr (tst.verror.js)', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); + +err = makeErr({ 'constructorOpt': makeErr }); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'VError: test error', + ' at Object. (tst.verror.js)' +].join('\n') + '\n' + nodestack); diff --git a/node_modules/verror/tests/tst.werror.js b/node_modules/verror/tests/tst.werror.js new file mode 100644 index 0000000..c8cdc61 --- /dev/null +++ b/node_modules/verror/tests/tst.werror.js @@ -0,0 +1,179 @@ +/* + * tst.werror.js: tests basic functionality of the WError class. + */ + +var mod_assert = require('assert'); +var mod_verror = require('../lib/verror'); + +var VError = mod_verror.VError; +var WError = mod_verror.WError; + +var err, suberr, stack, substack; + +/* + * Remove full paths and relative line numbers from stack traces so that we can + * compare against "known-good" output. + */ +function cleanStack(stacktxt) +{ + var re = new RegExp(__filename + ':\\d+:\\d+', 'gm'); + stacktxt = stacktxt.replace(re, 'tst.werror.js'); + return (stacktxt); +} + +/* + * Save the generic parts of all stack traces so we can avoid hardcoding + * Node-specific implementation details in our testing of stack traces. + */ +var nodestack = new Error().stack.split('\n').slice(2).join('\n'); + +/* no arguments */ +err = new WError(); +mod_assert.equal(err.name, 'WError'); +mod_assert.ok(err instanceof Error); +mod_assert.ok(err instanceof WError); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +/* options-argument form */ +err = new WError({}); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError'); +mod_assert.ok(err.cause() === undefined); + +/* simple message */ +err = new WError('my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: my error', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = new WError({}, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); + +/* printf-style message */ +err = new WError('%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.equal(err.toString(), 'WError: very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +err = new WError({}, '%s error: %3d problems', 'very bad', 15); +mod_assert.equal(err.message, 'very bad error: 15 problems'); +mod_assert.equal(err.toString(), 'WError: very bad error: 15 problems'); +mod_assert.ok(err.cause() === undefined); + +/* caused by another error, with no additional message */ +suberr = new Error('root cause'); +err = new WError(suberr); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new WError({ 'cause': suberr }); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by another error, with annotation */ +err = new WError(suberr, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues'); +mod_assert.equal(err.toString(), 'WError: proximate cause: 3 issues; ' + + 'caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: proximate cause: 3 issues; caused by Error: root cause', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = new WError({ 'cause': suberr }, 'proximate cause: %d issues', 3); +mod_assert.equal(err.message, 'proximate cause: 3 issues'); +mod_assert.equal(err.toString(), 'WError: proximate cause: 3 issues; ' + + 'caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: proximate cause: 3 issues; caused by Error: root cause', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +/* caused by another WError, with annotation. */ +suberr = err; +err = new WError(suberr, 'top'); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), 'WError: top; caused by WError: ' + + 'proximate cause: 3 issues; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +err = new WError({ 'cause': suberr }, 'top'); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), 'WError: top; caused by WError: ' + + 'proximate cause: 3 issues; caused by Error: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* caused by a VError */ +suberr = new VError(new Error('root cause'), 'mid'); +err = new WError(suberr, 'top'); +mod_assert.equal(err.message, 'top'); +mod_assert.equal(err.toString(), + 'WError: top; caused by VError: mid: root cause'); +mod_assert.ok(err.cause() === suberr); + +/* null cause (for backwards compatibility with older versions) */ +err = new WError(null, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: my error', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = new WError({ 'cause': null }, 'my error'); +mod_assert.equal(err.message, 'my error'); +mod_assert.equal(err.toString(), 'WError: my error'); +mod_assert.ok(err.cause() === undefined); + +err = new WError(null); +mod_assert.equal(err.message, ''); +mod_assert.equal(err.toString(), 'WError'); +mod_assert.ok(err.cause() === undefined); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +/* constructorOpt */ +function makeErr(options) { + return (new WError(options, 'test error')); +} +err = makeErr({}); +mod_assert.equal(err.toString(), 'WError: test error'); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: test error', + ' at makeErr (tst.werror.js)', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); + +err = makeErr({ 'constructorOpt': makeErr }); +mod_assert.equal(err.toString(), 'WError: test error'); +stack = cleanStack(err.stack); +mod_assert.equal(stack, [ + 'WError: test error', + ' at Object. (tst.werror.js)' +].join('\n') + '\n' + nodestack); diff --git a/node_modules/vm-browserify/LICENSE b/node_modules/vm-browserify/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/vm-browserify/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/vm-browserify/example/run/bundle.js b/node_modules/vm-browserify/example/run/bundle.js new file mode 100644 index 0000000..5dbc43f --- /dev/null +++ b/node_modules/vm-browserify/example/run/bundle.js @@ -0,0 +1,473 @@ +var require = function (file, cwd) { + var resolved = require.resolve(file, cwd || '/'); + var mod = require.modules[resolved]; + if (!mod) throw new Error( + 'Failed to resolve module ' + file + ', tried ' + resolved + ); + var res = mod._cached ? mod._cached : mod(); + return res; +} + +require.paths = []; +require.modules = {}; +require.extensions = [".js",".coffee"]; + +require._core = { + 'assert': true, + 'events': true, + 'fs': true, + 'path': true, + 'vm': true +}; + +require.resolve = (function () { + return function (x, cwd) { + if (!cwd) cwd = '/'; + + if (require._core[x]) return x; + var path = require.modules.path(); + var y = cwd || '.'; + + if (x.match(/^(?:\.\.?\/|\/)/)) { + var m = loadAsFileSync(path.resolve(y, x)) + || loadAsDirectorySync(path.resolve(y, x)); + if (m) return m; + } + + var n = loadNodeModulesSync(x, y); + if (n) return n; + + throw new Error("Cannot find module '" + x + "'"); + + function loadAsFileSync (x) { + if (require.modules[x]) { + return x; + } + + for (var i = 0; i < require.extensions.length; i++) { + var ext = require.extensions[i]; + if (require.modules[x + ext]) return x + ext; + } + } + + function loadAsDirectorySync (x) { + x = x.replace(/\/+$/, ''); + var pkgfile = x + '/package.json'; + if (require.modules[pkgfile]) { + var pkg = require.modules[pkgfile](); + var b = pkg.browserify; + if (typeof b === 'object' && b.main) { + var m = loadAsFileSync(path.resolve(x, b.main)); + if (m) return m; + } + else if (typeof b === 'string') { + var m = loadAsFileSync(path.resolve(x, b)); + if (m) return m; + } + else if (pkg.main) { + var m = loadAsFileSync(path.resolve(x, pkg.main)); + if (m) return m; + } + } + + return loadAsFileSync(x + '/index'); + } + + function loadNodeModulesSync (x, start) { + var dirs = nodeModulesPathsSync(start); + for (var i = 0; i < dirs.length; i++) { + var dir = dirs[i]; + var m = loadAsFileSync(dir + '/' + x); + if (m) return m; + var n = loadAsDirectorySync(dir + '/' + x); + if (n) return n; + } + + var m = loadAsFileSync(x); + if (m) return m; + } + + function nodeModulesPathsSync (start) { + var parts; + if (start === '/') parts = [ '' ]; + else parts = path.normalize(start).split('/'); + + var dirs = []; + for (var i = parts.length - 1; i >= 0; i--) { + if (parts[i] === 'node_modules') continue; + var dir = parts.slice(0, i + 1).join('/') + '/node_modules'; + dirs.push(dir); + } + + return dirs; + } + }; +})(); + +require.alias = function (from, to) { + var path = require.modules.path(); + var res = null; + try { + res = require.resolve(from + '/package.json', '/'); + } + catch (err) { + res = require.resolve(from, '/'); + } + var basedir = path.dirname(res); + + var keys = (Object.keys || function (obj) { + var res = []; + for (var key in obj) res.push(key) + return res; + })(require.modules); + + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (key.slice(0, basedir.length + 1) === basedir + '/') { + var f = key.slice(basedir.length); + require.modules[to + f] = require.modules[basedir + f]; + } + else if (key === basedir) { + require.modules[to] = require.modules[basedir]; + } + } +}; + +require.define = function (filename, fn) { + var dirname = require._core[filename] + ? '' + : require.modules.path().dirname(filename) + ; + + var require_ = function (file) { + return require(file, dirname) + }; + require_.resolve = function (name) { + return require.resolve(name, dirname); + }; + require_.modules = require.modules; + require_.define = require.define; + var module_ = { exports : {} }; + + require.modules[filename] = function () { + require.modules[filename]._cached = module_.exports; + fn.call( + module_.exports, + require_, + module_, + module_.exports, + dirname, + filename + ); + require.modules[filename]._cached = module_.exports; + return module_.exports; + }; +}; + +if (typeof process === 'undefined') process = {}; + +if (!process.nextTick) process.nextTick = (function () { + var queue = []; + var canPost = typeof window !== 'undefined' + && window.postMessage && window.addEventListener + ; + + if (canPost) { + window.addEventListener('message', function (ev) { + if (ev.source === window && ev.data === 'browserify-tick') { + ev.stopPropagation(); + if (queue.length > 0) { + var fn = queue.shift(); + fn(); + } + } + }, true); + } + + return function (fn) { + if (canPost) { + queue.push(fn); + window.postMessage('browserify-tick', '*'); + } + else setTimeout(fn, 0); + }; +})(); + +if (!process.title) process.title = 'browser'; + +if (!process.binding) process.binding = function (name) { + if (name === 'evals') return require('vm') + else throw new Error('No such module') +}; + +if (!process.cwd) process.cwd = function () { return '.' }; + +require.define("path", function (require, module, exports, __dirname, __filename) { +function filter (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + if (fn(xs[i], i, xs)) res.push(xs[i]); + } + return res; +} + +// resolves . and .. elements in a path array with directory names there +// must be no slashes, empty elements, or device names (c:\) in the array +// (so also no leading and trailing slashes - it does not distinguish +// relative and absolute paths) +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = parts.length; i >= 0; i--) { + var last = parts[i]; + if (last == '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +} + +// Regex to split a filename into [*, dir, basename, ext] +// posix version +var splitPathRe = /^(.+\/(?!$)|\/)?((?:.+?)?(\.[^.]*)?)$/; + +// path.resolve([from ...], to) +// posix version +exports.resolve = function() { +var resolvedPath = '', + resolvedAbsolute = false; + +for (var i = arguments.length; i >= -1 && !resolvedAbsolute; i--) { + var path = (i >= 0) + ? arguments[i] + : process.cwd(); + + // Skip empty and invalid entries + if (typeof path !== 'string' || !path) { + continue; + } + + resolvedPath = path + '/' + resolvedPath; + resolvedAbsolute = path.charAt(0) === '/'; +} + +// At this point the path should be resolved to a full absolute path, but +// handle relative paths to be safe (might happen when process.cwd() fails) + +// Normalize the path +resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { + return !!p; + }), !resolvedAbsolute).join('/'); + + return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; +}; + +// path.normalize(path) +// posix version +exports.normalize = function(path) { +var isAbsolute = path.charAt(0) === '/', + trailingSlash = path.slice(-1) === '/'; + +// Normalize the path +path = normalizeArray(filter(path.split('/'), function(p) { + return !!p; + }), !isAbsolute).join('/'); + + if (!path && !isAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isAbsolute ? '/' : '') + path; +}; + + +// posix version +exports.join = function() { + var paths = Array.prototype.slice.call(arguments, 0); + return exports.normalize(filter(paths, function(p, index) { + return p && typeof p === 'string'; + }).join('/')); +}; + + +exports.dirname = function(path) { + var dir = splitPathRe.exec(path)[1] || ''; + var isWindows = false; + if (!dir) { + // No dirname + return '.'; + } else if (dir.length === 1 || + (isWindows && dir.length <= 3 && dir.charAt(1) === ':')) { + // It is just a slash or a drive letter with a slash + return dir; + } else { + // It is a full dirname, strip trailing slash + return dir.substring(0, dir.length - 1); + } +}; + + +exports.basename = function(path, ext) { + var f = splitPathRe.exec(path)[2] || ''; + // TODO: make this comparison case-insensitive on windows? + if (ext && f.substr(-1 * ext.length) === ext) { + f = f.substr(0, f.length - ext.length); + } + return f; +}; + + +exports.extname = function(path) { + return splitPathRe.exec(path)[3] || ''; +}; + +}); + +require.define("vm", function (require, module, exports, __dirname, __filename) { +var Object_keys = function (obj) { + if (Object.keys) return Object.keys(obj) + else { + var res = []; + for (var key in obj) res.push(key) + return res; + } +}; + +var forEach = function (xs, fn) { + if (xs.forEach) return xs.forEach(fn) + else for (var i = 0; i < xs.length; i++) { + fn(xs[i], i, xs); + } +}; + +var Script = exports.Script = function NodeScript (code) { + if (!(this instanceof Script)) return new Script(code); + this.code = code; +}; + +var iframe = document.createElement('iframe'); +if (!iframe.style) iframe.style = {}; +iframe.style.display = 'none'; + +var iframeCapable = true; // until proven otherwise +if (navigator.appName === 'Microsoft Internet Explorer') { + var m = navigator.appVersion.match(/\bMSIE (\d+\.\d+);/); + if (m && parseFloat(m[1]) <= 9.0) { + iframeCapable = false; + } +} + +Script.prototype.runInNewContext = function (context) { + if (!context) context = {}; + + if (!iframeCapable) { + var keys = Object_keys(context); + var args = []; + for (var i = 0; i < keys.length; i++) { + args.push(context[keys[i]]); + } + + var fn = new Function(keys, 'return ' + this.code); + return fn.apply(null, args); + } + + document.body.appendChild(iframe); + + var win = iframe.contentWindow + || (window.frames && window.frames[window.frames.length - 1]) + || window[window.length - 1] + ; + + forEach(Object_keys(context), function (key) { + win[key] = context[key]; + iframe[key] = context[key]; + }); + + if (win.eval) { + // chrome and ff can just .eval() + var res = win.eval(this.code); + } + else { + // this works in IE9 but not anything newer + iframe.setAttribute('src', + 'javascript:__browserifyVmResult=(' + this.code + ')' + ); + if ('__browserifyVmResult' in win) { + var res = win.__browserifyVmResult; + } + else { + iframeCapable = false; + res = this.runInThisContext(context); + } + } + + forEach(Object_keys(win), function (key) { + context[key] = win[key]; + }); + + document.body.removeChild(iframe); + + return res; +}; + +Script.prototype.runInThisContext = function () { + return eval(this.code); // maybe... +}; + +Script.prototype.runInContext = function (context) { + // seems to be just runInNewContext on magical context objects which are + // otherwise indistinguishable from objects except plain old objects + // for the parameter segfaults node + return this.runInNewContext(context); +}; + +forEach(Object_keys(Script.prototype), function (name) { + exports[name] = Script[name] = function (code) { + var s = Script(code); + return s[name].apply(s, [].slice.call(arguments, 1)); + }; +}); + +exports.createScript = function (code) { + return exports.Script(code); +}; + +exports.createContext = Script.createContext = function (context) { + // not really sure what this one does + // seems to just make a shallow copy + var copy = {}; + forEach(Object_keys(context), function (key) { + copy[key] = context[key]; + }); + return copy; +}; + +}); + +require.define("/entry.js", function (require, module, exports, __dirname, __filename) { + var vm = require('vm'); + +$(function () { + var res = vm.runInNewContext('a + 5', { a : 100 }); + $('#res').text(res); +}); + +}); +require("/entry.js"); diff --git a/node_modules/vm-browserify/example/run/entry.js b/node_modules/vm-browserify/example/run/entry.js new file mode 100644 index 0000000..c7d3891 --- /dev/null +++ b/node_modules/vm-browserify/example/run/entry.js @@ -0,0 +1,6 @@ +var vm = require('vm'); + +$(function () { + var res = vm.runInNewContext('a + 5', { a : 100 }); + $('#res').text(res); +}); diff --git a/node_modules/vm-browserify/example/run/index.html b/node_modules/vm-browserify/example/run/index.html new file mode 100644 index 0000000..1ea0942 --- /dev/null +++ b/node_modules/vm-browserify/example/run/index.html @@ -0,0 +1,9 @@ + + + + + + + result = + + diff --git a/node_modules/vm-browserify/example/run/server.js b/node_modules/vm-browserify/example/run/server.js new file mode 100644 index 0000000..339d3ee --- /dev/null +++ b/node_modules/vm-browserify/example/run/server.js @@ -0,0 +1,6 @@ +var ecstatic = require('ecstatic')(__dirname); +var http = require('http'); +http.createServer(ecstatic).listen(8000); + +console.log('listening on :8000'); +console.log('# remember to run browserify entry.js -o bundle.js'); diff --git a/node_modules/vm-browserify/index.js b/node_modules/vm-browserify/index.js new file mode 100644 index 0000000..644efd1 --- /dev/null +++ b/node_modules/vm-browserify/index.js @@ -0,0 +1,138 @@ +var indexOf = require('indexof'); + +var Object_keys = function (obj) { + if (Object.keys) return Object.keys(obj) + else { + var res = []; + for (var key in obj) res.push(key) + return res; + } +}; + +var forEach = function (xs, fn) { + if (xs.forEach) return xs.forEach(fn) + else for (var i = 0; i < xs.length; i++) { + fn(xs[i], i, xs); + } +}; + +var defineProp = (function() { + try { + Object.defineProperty({}, '_', {}); + return function(obj, name, value) { + Object.defineProperty(obj, name, { + writable: true, + enumerable: false, + configurable: true, + value: value + }) + }; + } catch(e) { + return function(obj, name, value) { + obj[name] = value; + }; + } +}()); + +var globals = ['Array', 'Boolean', 'Date', 'Error', 'EvalError', 'Function', +'Infinity', 'JSON', 'Math', 'NaN', 'Number', 'Object', 'RangeError', +'ReferenceError', 'RegExp', 'String', 'SyntaxError', 'TypeError', 'URIError', +'decodeURI', 'decodeURIComponent', 'encodeURI', 'encodeURIComponent', 'escape', +'eval', 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'undefined', 'unescape']; + +function Context() {} +Context.prototype = {}; + +var Script = exports.Script = function NodeScript (code) { + if (!(this instanceof Script)) return new Script(code); + this.code = code; +}; + +Script.prototype.runInContext = function (context) { + if (!(context instanceof Context)) { + throw new TypeError("needs a 'context' argument."); + } + + var iframe = document.createElement('iframe'); + if (!iframe.style) iframe.style = {}; + iframe.style.display = 'none'; + + document.body.appendChild(iframe); + + var win = iframe.contentWindow; + var wEval = win.eval, wExecScript = win.execScript; + + if (!wEval && wExecScript) { + // win.eval() magically appears when this is called in IE: + wExecScript.call(win, 'null'); + wEval = win.eval; + } + + forEach(Object_keys(context), function (key) { + win[key] = context[key]; + }); + forEach(globals, function (key) { + if (context[key]) { + win[key] = context[key]; + } + }); + + var winKeys = Object_keys(win); + + var res = wEval.call(win, this.code); + + forEach(Object_keys(win), function (key) { + // Avoid copying circular objects like `top` and `window` by only + // updating existing context properties or new properties in the `win` + // that was only introduced after the eval. + if (key in context || indexOf(winKeys, key) === -1) { + context[key] = win[key]; + } + }); + + forEach(globals, function (key) { + if (!(key in context)) { + defineProp(context, key, win[key]); + } + }); + + document.body.removeChild(iframe); + + return res; +}; + +Script.prototype.runInThisContext = function () { + return eval(this.code); // maybe... +}; + +Script.prototype.runInNewContext = function (context) { + var ctx = Script.createContext(context); + var res = this.runInContext(ctx); + + forEach(Object_keys(ctx), function (key) { + context[key] = ctx[key]; + }); + + return res; +}; + +forEach(Object_keys(Script.prototype), function (name) { + exports[name] = Script[name] = function (code) { + var s = Script(code); + return s[name].apply(s, [].slice.call(arguments, 1)); + }; +}); + +exports.createScript = function (code) { + return exports.Script(code); +}; + +exports.createContext = Script.createContext = function (context) { + var copy = new Context(); + if(typeof context === 'object') { + forEach(Object_keys(context), function (key) { + copy[key] = context[key]; + }); + } + return copy; +}; diff --git a/node_modules/vm-browserify/package.json b/node_modules/vm-browserify/package.json new file mode 100644 index 0000000..dd948a2 --- /dev/null +++ b/node_modules/vm-browserify/package.json @@ -0,0 +1,46 @@ +{ + "name": "vm-browserify", + "version": "0.0.4", + "description": "vm module for the browser", + "main": "index.js", + "repository": { + "type": "git", + "url": "http://github.com/substack/vm-browserify.git" + }, + "keywords": [ + "vm", + "browser", + "eval" + ], + "dependencies": { + "indexof": "0.0.1" + }, + "devDependencies": { + "tape": "~2.3.2" + }, + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "scripts": { + "test": "tap test/*.js" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/vm-browserify/readme.markdown b/node_modules/vm-browserify/readme.markdown new file mode 100644 index 0000000..1ad139b --- /dev/null +++ b/node_modules/vm-browserify/readme.markdown @@ -0,0 +1,67 @@ +# vm-browserify + +emulate node's vm module for the browser + +[![testling badge](https://ci.testling.com/substack/vm-browserify.png)](https://ci.testling.com/substack/vm-browserify) + +# example + +Just write some client-side javascript: + +``` js +var vm = require('vm'); + +$(function () { + var res = vm.runInNewContext('a + 5', { a : 100 }); + $('#res').text(res); +}); +``` + +compile it with [browserify](http://github.com/substack/node-browserify): + +``` +browserify entry.js -o bundle.js +``` + +then whip up some html: + +``` html + + + + + + + result = + + +``` + +and when you load the page you should see: + +``` +result = 105 +``` + +# methods + +## vm.runInNewContext(code, context={}) + +Evaluate some `code` in a new iframe with a `context`. + +Contexts are like wrapping your code in a `with()` except slightly less terrible +because the code is sandboxed into a new iframe. + +# install + +This module is depended upon by browserify, so you should just be able to +`require('vm')` and it will just work. However if you want to use this module +directly you can install it with [npm](http://npmjs.org): + +``` +npm install vm-browserify +``` + +# license + +MIT diff --git a/node_modules/vm-browserify/test/vm.js b/node_modules/vm-browserify/test/vm.js new file mode 100644 index 0000000..ea8cd31 --- /dev/null +++ b/node_modules/vm-browserify/test/vm.js @@ -0,0 +1,35 @@ +var test = require('tape'); +var vm = require('../'); + +test('vmRunInNewContext', function (t) { + t.plan(6); + + t.equal(vm.runInNewContext('a + 5', { a : 100 }), 105); + + (function () { + var vars = { x : 10 }; + t.equal(vm.runInNewContext('x++', vars), 10); + t.equal(vars.x, 11); + })(); + + (function () { + var vars = { x : 10 }; + t.equal(vm.runInNewContext('var y = 3; y + x++', vars), 13); + t.equal(vars.x, 11); + t.equal(vars.y, 3); + })(); + + t.end(); +}); + +test('vmRunInContext', function (t) { + t.plan(2); + + var context = vm.createContext({ foo: 1 }); + + vm.runInContext('var x = 1', context); + t.deepEqual(context, { foo: 1, x: 1 }); + + vm.runInContext('var y = 1', context); + t.deepEqual(context, { foo: 1, x: 1, y: 1 }); +}); diff --git a/node_modules/watchpack/.eslintrc b/node_modules/watchpack/.eslintrc new file mode 100644 index 0000000..c31e30e --- /dev/null +++ b/node_modules/watchpack/.eslintrc @@ -0,0 +1,10 @@ +{ + "env": { + "node": true + }, + "rules": { + "strict": 0, + "curly": 0, + "no-underscore-dangle": 0 + } +} diff --git a/node_modules/watchpack/.gitattributes b/node_modules/watchpack/.gitattributes new file mode 100644 index 0000000..dd6c9a6 --- /dev/null +++ b/node_modules/watchpack/.gitattributes @@ -0,0 +1,3 @@ +* text=auto +examples/* eol=lf +bin/* eol=lf \ No newline at end of file diff --git a/node_modules/watchpack/.npmignore b/node_modules/watchpack/.npmignore new file mode 100644 index 0000000..017a83d --- /dev/null +++ b/node_modules/watchpack/.npmignore @@ -0,0 +1,5 @@ +/node_modules +/test/fixtures +/playground/folder +/coverage +.DS_Store \ No newline at end of file diff --git a/node_modules/watchpack/.travis.yml b/node_modules/watchpack/.travis.yml new file mode 100644 index 0000000..7dc8f5e --- /dev/null +++ b/node_modules/watchpack/.travis.yml @@ -0,0 +1,13 @@ +sudo: false +language: node_js +node_js: + - "0.10" + - "0.12" + - "node" + - "iojs" +script: npm run travis + +before_install: + - '[ "${TRAVIS_NODE_VERSION}" != "0.10" ] || npm install -g npm' + +after_success: cat ./coverage/lcov.info | node_modules/.bin/coveralls --verbose && rm -rf ./coverage diff --git a/node_modules/watchpack/README.md b/node_modules/watchpack/README.md new file mode 100644 index 0000000..424d1e6 --- /dev/null +++ b/node_modules/watchpack/README.md @@ -0,0 +1,68 @@ +# watchpack + +Wrapper library for directory and file watching. + +[![Build Status](https://travis-ci.org/webpack/watchpack.svg?branch=master)](https://travis-ci.org/webpack/watchpack) [![Build status](https://ci.appveyor.com/api/projects/status/e5u2qvmugtv0r647/branch/master?svg=true)](https://ci.appveyor.com/project/sokra/watchpack/branch/master) [![Test coverage][coveralls-image]][coveralls-url] + +## Concept + +watchpack high level API don't map directly to watchers. Instead a three level architecture ensures that for each directory only a single watcher exists. + +* The high level API requests `DirectoryWatchers` from a `WatcherManager`, which ensures that only a single `DirectoryWatcher` per directory is created. +* A user-faced `Watcher` can be obtained from a `DirectoryWatcher` and provides a filtered view on the `DirectoryWatcher`. +* Reference-counting is used on the `DirectoryWatcher` and `Watcher` to decide when to close them. +* The real watchers (currently chokidar) are created by the `DirectoryWatcher`. +* Files are never watched directly. This should keep the watcher count low. +* Watching can be started in the past. This way watching can start after file reading. +* Symlinks are not followed, instead the symlink is watched. + +## API + +``` javascript +var Watchpack = require("watchpack"); + +var wp = new Watchpack({ + // options: + aggregateTimeout: 1000 + // fire "aggregated" event when after a change for 1000ms no additonal change occured + // aggregated defaults to undefined, which doesn't fire an "aggregated" event + + poll: true + // poll: true - use polling with the default interval + // poll: 10000 - use polling with an interval of 10s + // poll defaults to undefined, which prefer native watching methods + // Note: enable polling when watching on a network path +}); + +// Watchpack.prototype.watch(string[] files, string[] directories, [number startTime]) +wp.watch(listOfFiles, listOfDirectories, Date.now() - 10000); +// starts watching these files and directories +// calling this again will override the files and directories + +wp.on("change", function(filePath, mtime) { + // filePath: the changed file + // mtime: last modified time for the changed file +}); + +wp.on("aggregated", function(changes) { + // changes: an array of all changed files +}); + +// Watchpack.prototype.pause() +wp.pause(); +// stops emitting events, but keeps watchers open +// next "watch" call can reuse the watchers + +// Watchpack.prototype.close() +wp.close(); +// stops emitting events and closes all watchers + +// Watchpack.prototype.getTimes() +var fileTimes = wp.getTimes(); +// returns an object with all know change times for files +// this include timestamps from files not directly watched +// key: absolute path, value: timestamp as number +``` + +[coveralls-url]: https://coveralls.io/r/webpack/watchpack/ +[coveralls-image]: https://img.shields.io/coveralls/webpack/watchpack.svg diff --git a/node_modules/watchpack/appveyor.yml b/node_modules/watchpack/appveyor.yml new file mode 100644 index 0000000..b6d127c --- /dev/null +++ b/node_modules/watchpack/appveyor.yml @@ -0,0 +1,27 @@ +# appveyor file +# http://www.appveyor.com/docs/appveyor-yml + +init: + - git config --global core.autocrlf input + +# what combinations to test +environment: + matrix: + - nodejs_version: 0.10 + - nodejs_version: 0.11 + +# combinations having this can fail +matrix: + allow_failures: + - nodejs_version: 0.11 + +install: + - ps: Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version) + - npm install + +build: off + +test_script: + - node --version + - npm --version + - cmd: npm test diff --git a/node_modules/watchpack/lib/DirectoryWatcher.js b/node_modules/watchpack/lib/DirectoryWatcher.js new file mode 100644 index 0000000..ce141e5 --- /dev/null +++ b/node_modules/watchpack/lib/DirectoryWatcher.js @@ -0,0 +1,325 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var EventEmitter = require("events").EventEmitter; +var async = require("async"); +var chokidar = require("chokidar"); +var fs = require("graceful-fs"); +var path = require("path"); + +var watcherManager = require("./watcherManager"); + +var FS_ACCURENCY = 10000; + + +function withoutCase(str) { + return str.toLowerCase(); +} + + +function Watcher(directoryWatcher, filePath, startTime) { + EventEmitter.call(this); + this.directoryWatcher = directoryWatcher; + this.path = filePath; + this.startTime = startTime && +startTime; + this.data = 0; +} + +Watcher.prototype = Object.create(EventEmitter.prototype); +Watcher.prototype.constructor = Watcher; + +Watcher.prototype.checkStartTime = function checkStartTime(mtime, initial) { + if(typeof this.startTime !== "number") return !initial; + var startTime = this.startTime && Math.floor(this.startTime / FS_ACCURENCY) * FS_ACCURENCY; + return startTime <= mtime; +}; + +Watcher.prototype.close = function close() { + this.emit("closed"); +}; + + +function DirectoryWatcher(directoryPath, options) { + EventEmitter.call(this); + this.path = directoryPath; + this.files = {}; + this.directories = {}; + this.watcher = chokidar.watch(directoryPath, { + ignoreInitial: true, + persistent: true, + followSymlinks: false, + depth: 0, + atomic: false, + alwaysStat: true, + ignorePermissionErrors: true, + usePolling: options.poll ? true : undefined, + interval: typeof options.poll === "number" ? options.poll : undefined + }); + this.watcher.on("add", this.onFileAdded.bind(this)); + this.watcher.on("addDir", this.onDirectoryAdded.bind(this)); + this.watcher.on("change", this.onChange.bind(this)); + this.watcher.on("unlink", this.onFileUnlinked.bind(this)); + this.watcher.on("unlinkDir", this.onDirectoryUnlinked.bind(this)); + this.watcher.on("error", this.onWatcherError.bind(this)); + this.initialScan = true; + this.nestedWatching = false; + this.initialScanRemoved = []; + this.doInitialScan(); + this.watchers = {}; + this.refs = 0; +} +module.exports = DirectoryWatcher; + +DirectoryWatcher.prototype = Object.create(EventEmitter.prototype); +DirectoryWatcher.prototype.constructor = DirectoryWatcher; + +DirectoryWatcher.prototype.setFileTime = function setFileTime(filePath, mtime, initial, type) { + var now = Date.now(); + var old = this.files[filePath]; + this.files[filePath] = [initial ? Math.min(now, mtime) : now, mtime]; + if(!old) { + if(mtime) { + if(this.watchers[withoutCase(filePath)]) { + this.watchers[withoutCase(filePath)].forEach(function(w) { + if(!initial || w.checkStartTime(mtime, initial)) { + w.emit("change", mtime); + } + }); + } + } + } else if(!initial && mtime && type !== "add") { + if(this.watchers[withoutCase(filePath)]) { + this.watchers[withoutCase(filePath)].forEach(function(w) { + w.emit("change", mtime); + }); + } + } else if(!initial && !mtime) { + if(this.watchers[withoutCase(filePath)]) { + this.watchers[withoutCase(filePath)].forEach(function(w) { + w.emit("remove"); + }); + } + } + if(this.watchers[withoutCase(this.path)]) { + this.watchers[withoutCase(this.path)].forEach(function(w) { + if(!initial || w.checkStartTime(mtime, initial)) { + w.emit("change", filePath, mtime); + } + }); + } +}; + +DirectoryWatcher.prototype.setDirectory = function setDirectory(directoryPath, exist, initial) { + var old = this.directories[directoryPath]; + if(!old) { + if(exist) { + if(this.nestedWatching) { + this.createNestedWatcher(directoryPath); + } else { + this.directories[directoryPath] = true; + } + } + } else { + if(!exist) { + if(this.nestedWatching) + this.directories[directoryPath].close(); + delete this.directories[directoryPath]; + if(!initial && this.watchers[withoutCase(this.path)]) { + this.watchers[withoutCase(this.path)].forEach(function(w) { + w.emit("change", directoryPath, w.data); + }); + } + } + } +}; + +DirectoryWatcher.prototype.createNestedWatcher = function(directoryPath) { + this.directories[directoryPath] = watcherManager.watchDirectory(directoryPath, this.options, 1); + this.directories[directoryPath].on("change", function(filePath, mtime) { + if(this.watchers[withoutCase(this.path)]) { + this.watchers[withoutCase(this.path)].forEach(function(w) { + if(w.checkStartTime(mtime, false)) { + w.emit("change", filePath, mtime); + } + }); + } + }.bind(this)); +}; + +DirectoryWatcher.prototype.setNestedWatching = function(flag) { + if(this.nestedWatching !== !!flag) { + this.nestedWatching = !!flag; + if(this.nestedWatching) { + Object.keys(this.directories).forEach(function(directory) { + this.createNestedWatcher(directory); + }, this); + } else { + Object.keys(this.directories).forEach(function(directory) { + this.directories[directory].close(); + this.directories[directory] = true; + }, this); + } + } +}; + +DirectoryWatcher.prototype.watch = function watch(filePath, startTime) { + this.watchers[withoutCase(filePath)] = this.watchers[withoutCase(filePath)] || []; + this.refs++; + var watcher = new Watcher(this, filePath, startTime); + watcher.on("closed", function() { + var idx = this.watchers[withoutCase(filePath)].indexOf(watcher); + this.watchers[withoutCase(filePath)].splice(idx, 1); + if(this.watchers[withoutCase(filePath)].length === 0) { + delete this.watchers[withoutCase(filePath)]; + if(this.path === filePath) + this.setNestedWatching(false); + } + if(--this.refs <= 0) + this.close(); + }.bind(this)); + this.watchers[withoutCase(filePath)].push(watcher); + var data; + if(filePath === this.path) { + this.setNestedWatching(true); + data = false; + Object.keys(this.files).forEach(function(file) { + var d = this.files[file]; + if(!data) + data = d; + else + data = [Math.max(data[0], d[0]), Math.max(data[1], d[1])]; + }, this); + } else { + data = this.files[filePath]; + } + process.nextTick(function() { + if(data) { + if(data[0] > startTime) + watcher.emit("change", data[1]); + } else if(this.initialScan && this.initialScanRemoved.indexOf(filePath) >= 0) { + watcher.emit("remove"); + } + }.bind(this)); + return watcher; +}; + +DirectoryWatcher.prototype.onFileAdded = function onFileAdded(filePath, stat) { + if(filePath.indexOf(this.path) !== 0) return; + if(/[\\\/]/.test(filePath.substr(this.path.length + 1))) return; + + this.setFileTime(filePath, +stat.mtime, false, "add"); +}; + +DirectoryWatcher.prototype.onDirectoryAdded = function onDirectoryAdded(directoryPath /*, stat */) { + if(directoryPath.indexOf(this.path) !== 0) return; + if(/[\\\/]/.test(directoryPath.substr(this.path.length + 1))) return; + this.setDirectory(directoryPath, true, false); +}; + +DirectoryWatcher.prototype.onChange = function onChange(filePath, stat) { + if(filePath.indexOf(this.path) !== 0) return; + if(/[\\\/]/.test(filePath.substr(this.path.length + 1))) return; + var mtime = +stat.mtime; + if(FS_ACCURENCY > 1 && mtime % 1 !== 0) + FS_ACCURENCY = 1; + else if(FS_ACCURENCY > 10 && mtime % 10 !== 0) + FS_ACCURENCY = 10; + else if(FS_ACCURENCY > 100 && mtime % 100 !== 0) + FS_ACCURENCY = 100; + else if(FS_ACCURENCY > 1000 && mtime % 1000 !== 0) + FS_ACCURENCY = 1000; + else if(FS_ACCURENCY > 2000 && mtime % 2000 !== 0) + FS_ACCURENCY = 2000; + this.setFileTime(filePath, mtime, false, "change"); +}; + +DirectoryWatcher.prototype.onFileUnlinked = function onFileUnlinked(filePath) { + if(filePath.indexOf(this.path) !== 0) return; + if(/[\\\/]/.test(filePath.substr(this.path.length + 1))) return; + this.setFileTime(filePath, null, false, "unlink"); + if(this.initialScan) { + this.initialScanRemoved.push(filePath); + } +}; + +DirectoryWatcher.prototype.onDirectoryUnlinked = function onDirectoryUnlinked(directoryPath) { + if(directoryPath.indexOf(this.path) !== 0) return; + if(/[\\\/]/.test(directoryPath.substr(this.path.length + 1))) return; + this.setDirectory(directoryPath, false, false); + if(this.initialScan) { + this.initialScanRemoved.push(directoryPath); + } +}; + +DirectoryWatcher.prototype.onWatcherError = function onWatcherError(/* err */) { +}; + +DirectoryWatcher.prototype.doInitialScan = function doInitialScan() { + fs.readdir(this.path, function(err, items) { + if(err) { + this.initialScan = false; + return; + } + async.forEach(items, function(item, callback) { + var itemPath = path.join(this.path, item); + fs.stat(itemPath, function(err2, stat) { + if(!this.initialScan) return; + if(err2) { + callback(); + return; + } + if(stat.isFile()) { + if(!this.files[itemPath]) + this.setFileTime(itemPath, +stat.mtime, true); + } else if(stat.isDirectory()) { + if(!this.directories[itemPath]) + this.setDirectory(itemPath, true, true); + } + callback(); + }.bind(this)); + }.bind(this), function() { + this.initialScan = false; + this.initialScanRemoved = null; + }.bind(this)); + }.bind(this)); +}; + +DirectoryWatcher.prototype.getTimes = function() { + var obj = {}; + var selfTime = 0; + Object.keys(this.files).forEach(function(file) { + var data = this.files[file]; + if(data[1]) { + var time = Math.max(data[0], data[1]); + obj[file] = time; + if(time > selfTime) + selfTime = time; + } + }, this); + if(this.nestedWatching) { + Object.keys(this.directories).forEach(function(dir) { + var w = this.directories[dir]; + var times = w.directoryWatcher.getTimes(); + Object.keys(times).forEach(function(file) { + var time = times[file]; + obj[file] = time; + if(time > selfTime) + selfTime = time; + }); + }, this); + obj[this.path] = selfTime; + } + return obj; +}; + +DirectoryWatcher.prototype.close = function() { + this.initialScan = false; + this.watcher.close(); + if(this.nestedWatching) { + Object.keys(this.directories).forEach(function(dir) { + this.directories[dir].close(); + }, this); + } + this.emit("closed"); +}; diff --git a/node_modules/watchpack/lib/watcherManager.js b/node_modules/watchpack/lib/watcherManager.js new file mode 100644 index 0000000..128ac42 --- /dev/null +++ b/node_modules/watchpack/lib/watcherManager.js @@ -0,0 +1,33 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var path = require("path"); + +function WatcherManager() { + this.directoryWatchers = {}; +} + +WatcherManager.prototype.getDirectoryWatcher = function(directory, options) { + var DirectoryWatcher = require("./DirectoryWatcher"); + options = options || {}; + var key = directory + " " + JSON.stringify(options); + if(!this.directoryWatchers[key]) { + this.directoryWatchers[key] = new DirectoryWatcher(directory, options); + this.directoryWatchers[key].on("closed", function() { + delete this.directoryWatchers[key]; + }.bind(this)); + } + return this.directoryWatchers[key]; +}; + +WatcherManager.prototype.watchFile = function watchFile(p, options, startTime) { + var directory = path.dirname(p); + return this.getDirectoryWatcher(directory, options).watch(p, startTime); +}; + +WatcherManager.prototype.watchDirectory = function watchDirectory(directory, options, startTime) { + return this.getDirectoryWatcher(directory, options).watch(directory, startTime); +}; + +module.exports = new WatcherManager(); diff --git a/node_modules/watchpack/lib/watchpack.js b/node_modules/watchpack/lib/watchpack.js new file mode 100644 index 0000000..187a94b --- /dev/null +++ b/node_modules/watchpack/lib/watchpack.js @@ -0,0 +1,122 @@ +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var watcherManager = require("./watcherManager"); +var EventEmitter = require("events").EventEmitter; + +function Watchpack(options) { + EventEmitter.call(this); + if(!options) options = {}; + if(!options.aggregateTimeout) options.aggregateTimeout = 200; + this.options = options; + this.watcherOptions = { + poll: options.poll + }; + this.fileWatchers = []; + this.dirWatchers = []; + this.mtimes = {}; + this.paused = false; + this.aggregatedChanges = []; + this.aggregateTimeout = 0; + this._onTimeout = this._onTimeout.bind(this); +} + +module.exports = Watchpack; + +Watchpack.prototype = Object.create(EventEmitter.prototype); + +Watchpack.prototype.watch = function watch(files, directories, startTime) { + this.paused = false; + var oldFileWatchers = this.fileWatchers; + var oldDirWatchers = this.dirWatchers; + this.fileWatchers = files.map(function(file) { + return this._fileWatcher(file, watcherManager.watchFile(file, this.watcherOptions, startTime)); + }, this); + this.dirWatchers = directories.map(function(dir) { + return this._dirWatcher(dir, watcherManager.watchDirectory(dir, this.watcherOptions, startTime)); + }, this); + oldFileWatchers.forEach(function(w) { + w.close(); + }, this); + oldDirWatchers.forEach(function(w) { + w.close(); + }, this); +}; + +Watchpack.prototype.close = function resume() { + this.paused = true; + if(this.aggregateTimeout) + clearTimeout(this.aggregateTimeout); + this.fileWatchers.forEach(function(w) { + w.close(); + }, this); + this.dirWatchers.forEach(function(w) { + w.close(); + }, this); + this.fileWatchers.length = 0; + this.dirWatchers.length = 0; +}; + +Watchpack.prototype.pause = function pause() { + this.paused = true; + if(this.aggregateTimeout) + clearTimeout(this.aggregateTimeout); +}; + +function addWatchersToArray(watchers, array) { + watchers.forEach(function(w) { + if(array.indexOf(w.directoryWatcher) < 0) { + array.push(w.directoryWatcher); + addWatchersToArray(Object.keys(w.directoryWatcher.directories).reduce(function(a, dir) { + if(w.directoryWatcher.directories[dir] !== true) + a.push(w.directoryWatcher.directories[dir]); + return a; + }, []), array); + } + }); +} + +Watchpack.prototype.getTimes = function() { + var directoryWatchers = []; + addWatchersToArray(this.fileWatchers.concat(this.dirWatchers), directoryWatchers); + var obj = {}; + directoryWatchers.forEach(function(w) { + var times = w.getTimes(); + Object.keys(times).forEach(function(file) { + obj[file] = times[file]; + }); + }); + return obj; +}; + +Watchpack.prototype._fileWatcher = function _fileWatcher(file, watcher) { + watcher.on("change", this._onChange.bind(this, file)); + return watcher; +}; + +Watchpack.prototype._dirWatcher = function _dirWatcher(item, watcher) { + watcher.on("change", function(file, mtime) { + this._onChange(item, mtime, file); + }.bind(this)); + return watcher; +}; + +Watchpack.prototype._onChange = function _onChange(item, mtime, file) { + file = file || item; + this.mtimes[file] = mtime; + if(this.paused) return; + this.emit("change", file, mtime); + if(this.aggregateTimeout) + clearTimeout(this.aggregateTimeout); + if(this.aggregatedChanges.indexOf(item) < 0) + this.aggregatedChanges.push(item); + this.aggregateTimeout = setTimeout(this._onTimeout, this.options.aggregateTimeout); +}; + +Watchpack.prototype._onTimeout = function _onTimeout() { + this.aggregateTimeout = 0; + var changes = this.aggregatedChanges; + this.aggregatedChanges = []; + this.emit("aggregated", changes); +}; diff --git a/node_modules/watchpack/node_modules/async/.travis.yml b/node_modules/watchpack/node_modules/async/.travis.yml new file mode 100644 index 0000000..6064ca0 --- /dev/null +++ b/node_modules/watchpack/node_modules/async/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - "0.10" + - "0.12" + - "iojs" diff --git a/node_modules/watchpack/node_modules/async/LICENSE b/node_modules/watchpack/node_modules/async/LICENSE new file mode 100644 index 0000000..8f29698 --- /dev/null +++ b/node_modules/watchpack/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2014 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/watchpack/node_modules/async/README.md b/node_modules/watchpack/node_modules/async/README.md new file mode 100644 index 0000000..6cfb922 --- /dev/null +++ b/node_modules/watchpack/node_modules/async/README.md @@ -0,0 +1,1647 @@ +# Async.js + +[![Build Status via Travis CI](https://travis-ci.org/caolan/async.svg?branch=master)](https://travis-ci.org/caolan/async) + + +Async is a utility module which provides straight-forward, powerful functions +for working with asynchronous JavaScript. Although originally designed for +use with [Node.js](http://nodejs.org) and installable via `npm install async`, +it can also be used directly in the browser. + +Async is also installable via: + +- [bower](http://bower.io/): `bower install async` +- [component](https://github.com/component/component): `component install + caolan/async` +- [jam](http://jamjs.org/): `jam install async` +- [spm](http://spmjs.io/): `spm install async` + +Async provides around 20 functions that include the usual 'functional' +suspects (`map`, `reduce`, `filter`, `each`…) as well as some common patterns +for asynchronous control flow (`parallel`, `series`, `waterfall`…). All these +functions assume you follow the Node.js convention of providing a single +callback as the last argument of your `async` function. + + +## Quick Examples + +```javascript +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); + +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); + +async.parallel([ + function(){ ... }, + function(){ ... } +], callback); + +async.series([ + function(){ ... }, + function(){ ... } +]); +``` + +There are many more functions available so take a look at the docs below for a +full list. This module aims to be comprehensive, so if you feel anything is +missing please create a GitHub issue for it. + +## Common Pitfalls + +### Binding a context to an iterator + +This section is really about `bind`, not about `async`. If you are wondering how to +make `async` execute your iterators in a given context, or are confused as to why +a method of another library isn't working as an iterator, study this example: + +```js +// Here is a simple object with an (unnecessarily roundabout) squaring method +var AsyncSquaringLibrary = { + squareExponent: 2, + square: function(number, callback){ + var result = Math.pow(number, this.squareExponent); + setTimeout(function(){ + callback(null, result); + }, 200); + } +}; + +async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){ + // result is [NaN, NaN, NaN] + // This fails because the `this.squareExponent` expression in the square + // function is not evaluated in the context of AsyncSquaringLibrary, and is + // therefore undefined. +}); + +async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){ + // result is [1, 4, 9] + // With the help of bind we can attach a context to the iterator before + // passing it to async. Now the square function will be executed in its + // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent` + // will be as expected. +}); +``` + +## Download + +The source is available for download from +[GitHub](http://github.com/caolan/async). +Alternatively, you can install using Node Package Manager (`npm`): + + npm install async + +__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed + +## In the Browser + +So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. + +Usage: + +```html + + +``` + +## Documentation + +### Collections + +* [`each`](#each) +* [`eachSeries`](#eachSeries) +* [`eachLimit`](#eachLimit) +* [`map`](#map) +* [`mapSeries`](#mapSeries) +* [`mapLimit`](#mapLimit) +* [`filter`](#filter) +* [`filterSeries`](#filterSeries) +* [`reject`](#reject) +* [`rejectSeries`](#rejectSeries) +* [`reduce`](#reduce) +* [`reduceRight`](#reduceRight) +* [`detect`](#detect) +* [`detectSeries`](#detectSeries) +* [`sortBy`](#sortBy) +* [`some`](#some) +* [`every`](#every) +* [`concat`](#concat) +* [`concatSeries`](#concatSeries) + +### Control Flow + +* [`series`](#seriestasks-callback) +* [`parallel`](#parallel) +* [`parallelLimit`](#parallellimittasks-limit-callback) +* [`whilst`](#whilst) +* [`doWhilst`](#doWhilst) +* [`until`](#until) +* [`doUntil`](#doUntil) +* [`forever`](#forever) +* [`waterfall`](#waterfall) +* [`compose`](#compose) +* [`seq`](#seq) +* [`applyEach`](#applyEach) +* [`applyEachSeries`](#applyEachSeries) +* [`queue`](#queue) +* [`priorityQueue`](#priorityQueue) +* [`cargo`](#cargo) +* [`auto`](#auto) +* [`retry`](#retry) +* [`iterator`](#iterator) +* [`apply`](#apply) +* [`nextTick`](#nextTick) +* [`times`](#times) +* [`timesSeries`](#timesSeries) + +### Utils + +* [`memoize`](#memoize) +* [`unmemoize`](#unmemoize) +* [`log`](#log) +* [`dir`](#dir) +* [`noConflict`](#noConflict) + + +## Collections + + + +### each(arr, iterator, callback) + +Applies the function `iterator` to each item in `arr`, in parallel. +The `iterator` is called with an item from the list, and a callback for when it +has finished. If the `iterator` passes an error to its `callback`, the main +`callback` (for the `each` function) is immediately called with the error. + +Note, that since this function applies `iterator` to each item in parallel, +there is no guarantee that the iterator functions will complete in order. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err)` which must be called once it has + completed. If no error has occurred, the `callback` should be run without + arguments or with an explicit `null` argument. +* `callback(err)` - A callback which is called when all `iterator` functions + have finished, or an error occurs. + +__Examples__ + + +```js +// assuming openFiles is an array of file names and saveFile is a function +// to save the modified contents of that file: + +async.each(openFiles, saveFile, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +```js +// assuming openFiles is an array of file names + +async.each(openFiles, function(file, callback) { + + // Perform operation on file here. + console.log('Processing file ' + file); + + if( file.length > 32 ) { + console.log('This file name is too long'); + callback('File name too long'); + } else { + // Do work to process file here + console.log('File processed'); + callback(); + } +}, function(err){ + // if any of the file processing produced an error, err would equal that error + if( err ) { + // One of the iterations produced an error. + // All processing will now stop. + console.log('A file failed to process'); + } else { + console.log('All files have been processed successfully'); + } +}); +``` + +--------------------------------------- + + + +### eachSeries(arr, iterator, callback) + +The same as [`each`](#each), only `iterator` is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +This means the `iterator` functions will complete in order. + + +--------------------------------------- + + + +### eachLimit(arr, limit, iterator, callback) + +The same as [`each`](#each), only no more than `limit` `iterator`s will be simultaneously +running at any time. + +Note that the items in `arr` are not processed in batches, so there is no guarantee that +the first `limit` `iterator` functions will complete before any others are started. + +__Arguments__ + +* `arr` - An array to iterate over. +* `limit` - The maximum number of `iterator`s to run at any time. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err)` which must be called once it has + completed. If no error has occurred, the callback should be run without + arguments or with an explicit `null` argument. +* `callback(err)` - A callback which is called when all `iterator` functions + have finished, or an error occurs. + +__Example__ + +```js +// Assume documents is an array of JSON objects and requestApi is a +// function that interacts with a rate-limited REST api. + +async.eachLimit(documents, 20, requestApi, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +--------------------------------------- + + +### map(arr, iterator, callback) + +Produces a new array of values by mapping each value in `arr` through +the `iterator` function. The `iterator` is called with an item from `arr` and a +callback for when it has finished processing. Each of these callback takes 2 arguments: +an `error`, and the transformed item from `arr`. If `iterator` passes an error to his +callback, the main `callback` (for the `map` function) is immediately called with the error. + +Note, that since this function applies the `iterator` to each item in parallel, +there is no guarantee that the `iterator` functions will complete in order. +However, the results array will be in the same order as the original `arr`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, transformed)` which must be called once + it has completed with an error (which can be `null`) and a transformed item. +* `callback(err, results)` - A callback which is called when all `iterator` + functions have finished, or an error occurs. Results is an array of the + transformed items from the `arr`. + +__Example__ + +```js +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +--------------------------------------- + + +### mapSeries(arr, iterator, callback) + +The same as [`map`](#map), only the `iterator` is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +The results array will be in the same order as the original. + + +--------------------------------------- + + +### mapLimit(arr, limit, iterator, callback) + +The same as [`map`](#map), only no more than `limit` `iterator`s will be simultaneously +running at any time. + +Note that the items are not processed in batches, so there is no guarantee that +the first `limit` `iterator` functions will complete before any others are started. + +__Arguments__ + +* `arr` - An array to iterate over. +* `limit` - The maximum number of `iterator`s to run at any time. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, transformed)` which must be called once + it has completed with an error (which can be `null`) and a transformed item. +* `callback(err, results)` - A callback which is called when all `iterator` + calls have finished, or an error occurs. The result is an array of the + transformed items from the original `arr`. + +__Example__ + +```js +async.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +--------------------------------------- + + + +### filter(arr, iterator, callback) + +__Alias:__ `select` + +Returns a new array of all the values in `arr` which pass an async truth test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. This operation is +performed in parallel, but the results array will be in the same order as the +original. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The `iterator` is passed a `callback(truthValue)`, which must be called with a + boolean argument once it has completed. +* `callback(results)` - A callback which is called after all the `iterator` + functions have finished. + +__Example__ + +```js +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); +``` + +--------------------------------------- + + + +### filterSeries(arr, iterator, callback) + +__Alias:__ `selectSeries` + +The same as [`filter`](#filter) only the `iterator` is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +The results array will be in the same order as the original. + +--------------------------------------- + + +### reject(arr, iterator, callback) + +The opposite of [`filter`](#filter). Removes values that pass an `async` truth test. + +--------------------------------------- + + +### rejectSeries(arr, iterator, callback) + +The same as [`reject`](#reject), only the `iterator` is applied to each item in `arr` +in series. + + +--------------------------------------- + + +### reduce(arr, memo, iterator, callback) + +__Aliases:__ `inject`, `foldl` + +Reduces `arr` into a single value using an async `iterator` to return +each successive step. `memo` is the initial state of the reduction. +This function only operates in series. + +For performance reasons, it may make sense to split a call to this function into +a parallel map, and then use the normal `Array.prototype.reduce` on the results. +This function is for situations where each step in the reduction needs to be async; +if you can get the data before reducing it, then it's probably a good idea to do so. + +__Arguments__ + +* `arr` - An array to iterate over. +* `memo` - The initial state of the reduction. +* `iterator(memo, item, callback)` - A function applied to each item in the + array to produce the next step in the reduction. The `iterator` is passed a + `callback(err, reduction)` which accepts an optional error as its first + argument, and the state of the reduction as the second. If an error is + passed to the callback, the reduction is stopped and the main `callback` is + immediately called with the error. +* `callback(err, result)` - A callback which is called after all the `iterator` + functions have finished. Result is the reduced value. + +__Example__ + +```js +async.reduce([1,2,3], 0, function(memo, item, callback){ + // pointless async: + process.nextTick(function(){ + callback(null, memo + item) + }); +}, function(err, result){ + // result is now equal to the last value of memo, which is 6 +}); +``` + +--------------------------------------- + + +### reduceRight(arr, memo, iterator, callback) + +__Alias:__ `foldr` + +Same as [`reduce`](#reduce), only operates on `arr` in reverse order. + + +--------------------------------------- + + +### detect(arr, iterator, callback) + +Returns the first value in `arr` that passes an async truth test. The +`iterator` is applied in parallel, meaning the first iterator to return `true` will +fire the detect `callback` with that result. That means the result might not be +the first item in the original `arr` (in terms of order) that passes the test. + +If order within the original `arr` is important, then look at [`detectSeries`](#detectSeries). + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The iterator is passed a `callback(truthValue)` which must be called with a + boolean argument once it has completed. +* `callback(result)` - A callback which is called as soon as any iterator returns + `true`, or after all the `iterator` functions have finished. Result will be + the first item in the array that passes the truth test (iterator) or the + value `undefined` if none passed. + +__Example__ + +```js +async.detect(['file1','file2','file3'], fs.exists, function(result){ + // result now equals the first file in the list that exists +}); +``` + +--------------------------------------- + + +### detectSeries(arr, iterator, callback) + +The same as [`detect`](#detect), only the `iterator` is applied to each item in `arr` +in series. This means the result is always the first in the original `arr` (in +terms of array order) that passes the truth test. + + +--------------------------------------- + + +### sortBy(arr, iterator, callback) + +Sorts a list by the results of running each `arr` value through an async `iterator`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, sortValue)` which must be called once it + has completed with an error (which can be `null`) and a value to use as the sort + criteria. +* `callback(err, results)` - A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is the items from + the original `arr` sorted by the values returned by the `iterator` calls. + +__Example__ + +```js +async.sortBy(['file1','file2','file3'], function(file, callback){ + fs.stat(file, function(err, stats){ + callback(err, stats.mtime); + }); +}, function(err, results){ + // results is now the original array of files sorted by + // modified date +}); +``` + +__Sort Order__ + +By modifying the callback parameter the sorting order can be influenced: + +```js +//ascending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x); +}, function(err,result){ + //result callback +} ); + +//descending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x*-1); //<- x*-1 instead of x, turns the order around +}, function(err,result){ + //result callback +} ); +``` + +--------------------------------------- + + +### some(arr, iterator, callback) + +__Alias:__ `any` + +Returns `true` if at least one element in the `arr` satisfies an async test. +_The callback for each iterator call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. Once any iterator +call returns `true`, the main `callback` is immediately called. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a callback(truthValue) which must be + called with a boolean argument once it has completed. +* `callback(result)` - A callback which is called as soon as any iterator returns + `true`, or after all the iterator functions have finished. Result will be + either `true` or `false` depending on the values of the async tests. + +__Example__ + +```js +async.some(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then at least one of the files exists +}); +``` + +--------------------------------------- + + +### every(arr, iterator, callback) + +__Alias:__ `all` + +Returns `true` if every element in `arr` satisfies an async test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a callback(truthValue) which must be + called with a boolean argument once it has completed. +* `callback(result)` - A callback which is called after all the `iterator` + functions have finished. Result will be either `true` or `false` depending on + the values of the async tests. + +__Example__ + +```js +async.every(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then every file exists +}); +``` + +--------------------------------------- + + +### concat(arr, iterator, callback) + +Applies `iterator` to each item in `arr`, concatenating the results. Returns the +concatenated list. The `iterator`s are called in parallel, and the results are +concatenated as they return. There is no guarantee that the results array will +be returned in the original order of `arr` passed to the `iterator` function. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, results)` which must be called once it + has completed with an error (which can be `null`) and an array of results. +* `callback(err, results)` - A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is an array containing + the concatenated results of the `iterator` function. + +__Example__ + +```js +async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){ + // files is now a list of filenames that exist in the 3 directories +}); +``` + +--------------------------------------- + + +### concatSeries(arr, iterator, callback) + +Same as [`concat`](#concat), but executes in series instead of parallel. + + +## Control Flow + + +### series(tasks, [callback]) + +Run the functions in the `tasks` array in series, each one running once the previous +function has completed. If any functions in the series pass an error to its +callback, no more functions are run, and `callback` is immediately called with the value of the error. +Otherwise, `callback` receives an array of results when `tasks` have completed. + +It is also possible to use an object instead of an array. Each property will be +run as a function, and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`series`](#series). + +**Note** that while many implementations preserve the order of object properties, the +[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) +explicitly states that + +> The mechanics and order of enumerating the properties is not specified. + +So if you rely on the order in which your series of functions are executed, and want +this to work on all platforms, consider using an array. + +__Arguments__ + +* `tasks` - An array or object containing functions to run, each function is passed + a `callback(err, result)` it must call on completion with an error `err` (which can + be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the `task` callbacks. + +__Example__ + +```js +async.series([ + function(callback){ + // do some stuff ... + callback(null, 'one'); + }, + function(callback){ + // do some more stuff ... + callback(null, 'two'); + } +], +// optional callback +function(err, results){ + // results is now equal to ['one', 'two'] +}); + + +// an example using an object instead of an array +async.series({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equal to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + + +### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its +callback, the main `callback` is immediately called with the value of the error. +Once the `tasks` have completed, the results are passed to the final `callback` as an +array. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`parallel`](#parallel). + + +__Arguments__ + +* `tasks` - An array or object containing functions to run. Each function is passed + a `callback(err, result)` which it must call on completion with an error `err` + (which can be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +__Example__ + +```js +async.parallel([ + function(callback){ + setTimeout(function(){ + callback(null, 'one'); + }, 200); + }, + function(callback){ + setTimeout(function(){ + callback(null, 'two'); + }, 100); + } +], +// optional callback +function(err, results){ + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}); + + +// an example using an object instead of an array +async.parallel({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equals to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + + +### parallelLimit(tasks, limit, [callback]) + +The same as [`parallel`](#parallel), only `tasks` are executed in parallel +with a maximum of `limit` tasks executing at any time. + +Note that the `tasks` are not executed in batches, so there is no guarantee that +the first `limit` tasks will complete before any others are started. + +__Arguments__ + +* `tasks` - An array or object containing functions to run, each function is passed + a `callback(err, result)` it must call on completion with an error `err` (which can + be `null`) and an optional `result` value. +* `limit` - The maximum number of `tasks` to run at any time. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the `task` callbacks. + +--------------------------------------- + + +### whilst(test, fn, callback) + +Repeatedly call `fn`, while `test` returns `true`. Calls `callback` when stopped, +or an error occurs. + +__Arguments__ + +* `test()` - synchronous truth test to perform before each execution of `fn`. +* `fn(callback)` - A function which is called each time `test` passes. The function is + passed a `callback(err)`, which must be called once it has completed with an + optional `err` argument. +* `callback(err)` - A callback which is called after the test fails and repeated + execution of `fn` has stopped. + +__Example__ + +```js +var count = 0; + +async.whilst( + function () { return count < 5; }, + function (callback) { + count++; + setTimeout(callback, 1000); + }, + function (err) { + // 5 seconds have passed + } +); +``` + +--------------------------------------- + + +### doWhilst(fn, test, callback) + +The post-check version of [`whilst`](#whilst). To reflect the difference in +the order of operations, the arguments `test` and `fn` are switched. + +`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + +--------------------------------------- + + +### until(test, fn, callback) + +Repeatedly call `fn` until `test` returns `true`. Calls `callback` when stopped, +or an error occurs. + +The inverse of [`whilst`](#whilst). + +--------------------------------------- + + +### doUntil(fn, test, callback) + +Like [`doWhilst`](#doWhilst), except the `test` is inverted. Note the argument ordering differs from `until`. + +--------------------------------------- + + +### forever(fn, errback) + +Calls the asynchronous function `fn` with a callback parameter that allows it to +call itself again, in series, indefinitely. + +If an error is passed to the callback then `errback` is called with the +error, and execution stops, otherwise it will never be called. + +```js +async.forever( + function(next) { + // next is suitable for passing to things that need a callback(err [, whatever]); + // it will result in this function being called again. + }, + function(err) { + // if next is called with a value in its first parameter, it will appear + // in here as 'err', and execution will stop. + } +); +``` + +--------------------------------------- + + +### waterfall(tasks, [callback]) + +Runs the `tasks` array of functions in series, each passing their results to the next in +the array. However, if any of the `tasks` pass an error to their own callback, the +next function is not executed, and the main `callback` is immediately called with +the error. + +__Arguments__ + +* `tasks` - An array of functions to run, each function is passed a + `callback(err, result1, result2, ...)` it must call on completion. The first + argument is an error (which can be `null`) and any further arguments will be + passed as arguments in order to the next task. +* `callback(err, [results])` - An optional callback to run once all the functions + have completed. This will be passed the results of the last task's callback. + + + +__Example__ + +```js +async.waterfall([ + function(callback) { + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); + }, + function(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); + } +], function (err, result) { + // result now equals 'done' +}); +``` + +--------------------------------------- + +### compose(fn1, fn2...) + +Creates a function which is a composition of the passed asynchronous +functions. Each function consumes the return value of the function that +follows. Composing functions `f()`, `g()`, and `h()` would produce the result of +`f(g(h()))`, only this version uses callbacks to obtain the return values. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* `functions...` - the asynchronous functions to compose + + +__Example__ + +```js +function add1(n, callback) { + setTimeout(function () { + callback(null, n + 1); + }, 10); +} + +function mul3(n, callback) { + setTimeout(function () { + callback(null, n * 3); + }, 10); +} + +var add1mul3 = async.compose(mul3, add1); + +add1mul3(4, function (err, result) { + // result now equals 15 +}); +``` + +--------------------------------------- + +### seq(fn1, fn2...) + +Version of the compose function that is more natural to read. +Each function consumes the return value of the previous function. +It is the equivalent of [`compose`](#compose) with the arguments reversed. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* functions... - the asynchronous functions to compose + + +__Example__ + +```js +// Requires lodash (or underscore), express3 and dresende's orm2. +// Part of an app, that fetches cats of the logged user. +// This example uses `seq` function to avoid overnesting and error +// handling clutter. +app.get('/cats', function(request, response) { + var User = request.models.User; + async.seq( + _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) + function(user, fn) { + user.getCats(fn); // 'getCats' has signature (callback(err, data)) + } + )(req.session.user_id, function (err, cats) { + if (err) { + console.error(err); + response.json({ status: 'error', message: err.message }); + } else { + response.json({ status: 'ok', message: 'Cats found', data: cats }); + } + }); +}); +``` + +--------------------------------------- + +### applyEach(fns, args..., callback) + +Applies the provided arguments to each function in the array, calling +`callback` after all functions have completed. If you only provide the first +argument, then it will return a function which lets you pass in the +arguments as if it were a single function call. + +__Arguments__ + +* `fns` - the asynchronous functions to all call with the same arguments +* `args...` - any number of separate arguments to pass to the function +* `callback` - the final argument should be the callback, called when all + functions have completed processing + + +__Example__ + +```js +async.applyEach([enableSearch, updateSchema], 'bucket', callback); + +// partial application example: +async.each( + buckets, + async.applyEach([enableSearch, updateSchema]), + callback +); +``` + +--------------------------------------- + + +### applyEachSeries(arr, iterator, callback) + +The same as [`applyEach`](#applyEach) only the functions are applied in series. + +--------------------------------------- + + +### queue(worker, concurrency) + +Creates a `queue` object with the specified `concurrency`. Tasks added to the +`queue` are processed in parallel (up to the `concurrency` limit). If all +`worker`s are in progress, the task is queued until one becomes available. +Once a `worker` completes a `task`, that `task`'s callback is called. + +__Arguments__ + +* `worker(task, callback)` - An asynchronous function for processing a queued + task, which must call its `callback(err)` argument when finished, with an + optional `error` as an argument. +* `concurrency` - An `integer` for determining how many `worker` functions should be + run in parallel. + +__Queue objects__ + +The `queue` object returned by this function has the following properties and +methods: + +* `length()` - a function returning the number of items waiting to be processed. +* `started` - a function returning whether or not any items have been pushed and processed by the queue +* `running()` - a function returning the number of items currently being processed. +* `idle()` - a function returning false if there are items waiting or being processed, or true if not. +* `concurrency` - an integer for determining how many `worker` functions should be + run in parallel. This property can be changed after a `queue` is created to + alter the concurrency on-the-fly. +* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once + the `worker` has finished processing the task. Instead of a single task, a `tasks` array + can be submitted. The respective callback is used for every task in the list. +* `unshift(task, [callback])` - add a new task to the front of the `queue`. +* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit, + and further tasks will be queued. +* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`. +* `paused` - a boolean for determining whether the queue is in a paused state +* `pause()` - a function that pauses the processing of tasks until `resume()` is called. +* `resume()` - a function that resumes the processing of queued tasks when the queue is paused. +* `kill()` - a function that removes the `drain` callback and empties remaining tasks from the queue forcing it to go idle. + +__Example__ + +```js +// create a queue object with concurrency 2 + +var q = async.queue(function (task, callback) { + console.log('hello ' + task.name); + callback(); +}, 2); + + +// assign a callback +q.drain = function() { + console.log('all items have been processed'); +} + +// add some items to the queue + +q.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); +}); +q.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); + +// add some items to the queue (batch-wise) + +q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) { + console.log('finished processing item'); +}); + +// add some items to the front of the queue + +q.unshift({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); +``` + + +--------------------------------------- + + +### priorityQueue(worker, concurrency) + +The same as [`queue`](#queue) only tasks are assigned a priority and completed in ascending priority order. There are two differences between `queue` and `priorityQueue` objects: + +* `push(task, priority, [callback])` - `priority` should be a number. If an array of + `tasks` is given, all tasks will be assigned the same priority. +* The `unshift` method was removed. + +--------------------------------------- + + +### cargo(worker, [payload]) + +Creates a `cargo` object with the specified payload. Tasks added to the +cargo will be processed altogether (up to the `payload` limit). If the +`worker` is in progress, the task is queued until it becomes available. Once +the `worker` has completed some tasks, each callback of those tasks is called. +Check out [this animation](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) for how `cargo` and `queue` work. + +While [queue](#queue) passes only one task to one of a group of workers +at a time, cargo passes an array of tasks to a single worker, repeating +when the worker is finished. + +__Arguments__ + +* `worker(tasks, callback)` - An asynchronous function for processing an array of + queued tasks, which must call its `callback(err)` argument when finished, with + an optional `err` argument. +* `payload` - An optional `integer` for determining how many tasks should be + processed per round; if omitted, the default is unlimited. + +__Cargo objects__ + +The `cargo` object returned by this function has the following properties and +methods: + +* `length()` - A function returning the number of items waiting to be processed. +* `payload` - An `integer` for determining how many tasks should be + process per round. This property can be changed after a `cargo` is created to + alter the payload on-the-fly. +* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called + once the `worker` has finished processing the task. Instead of a single task, an array of `tasks` + can be submitted. The respective callback is used for every task in the list. +* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued. +* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - A callback that is called when the last item from the `queue` has returned from the `worker`. + +__Example__ + +```js +// create a cargo object with payload 2 + +var cargo = async.cargo(function (tasks, callback) { + for(var i=0; i +### auto(tasks, [callback]) + +Determines the best order for running the functions in `tasks`, based on their +requirements. Each function can optionally depend on other functions being completed +first, and each function is run as soon as its requirements are satisfied. + +If any of the functions pass an error to their callback, it will not +complete (so any other functions depending on it will not run), and the main +`callback` is immediately called with the error. Functions also receive an +object containing the results of functions which have completed so far. + +Note, all functions are called with a `results` object as a second argument, +so it is unsafe to pass functions in the `tasks` object which cannot handle the +extra argument. + +For example, this snippet of code: + +```js +async.auto({ + readData: async.apply(fs.readFile, 'data.txt', 'utf-8') +}, callback); +``` + +will have the effect of calling `readFile` with the results object as the last +argument, which will fail: + +```js +fs.readFile('data.txt', 'utf-8', cb, {}); +``` + +Instead, wrap the call to `readFile` in a function which does not forward the +`results` object: + +```js +async.auto({ + readData: function(cb, results){ + fs.readFile('data.txt', 'utf-8', cb); + } +}, callback); +``` + +__Arguments__ + +* `tasks` - An object. Each of its properties is either a function or an array of + requirements, with the function itself the last item in the array. The object's key + of a property serves as the name of the task defined by that property, + i.e. can be used when specifying requirements for other tasks. + The function receives two arguments: (1) a `callback(err, result)` which must be + called when finished, passing an `error` (which can be `null`) and the result of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions. +* `callback(err, results)` - An optional callback which is called when all the + tasks have been completed. It receives the `err` argument if any `tasks` + pass an error to their callback. Results are always returned; however, if + an error occurs, no further `tasks` will be performed, and the results + object will only contain partial results. + + +__Example__ + +```js +async.auto({ + get_data: function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + make_folder: function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + }, + write_file: ['get_data', 'make_folder', function(callback, results){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + callback(null, 'filename'); + }], + email_link: ['write_file', function(callback, results){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + // results.write_file contains the filename returned by write_file. + callback(null, {'file':results.write_file, 'email':'user@example.com'}); + }] +}, function(err, results) { + console.log('err = ', err); + console.log('results = ', results); +}); +``` + +This is a fairly trivial example, but to do this using the basic parallel and +series functions would look like this: + +```js +async.parallel([ + function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + } +], +function(err, results){ + async.series([ + function(callback){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + results.push('filename'); + callback(null); + }, + function(callback){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + callback(null, {'file':results.pop(), 'email':'user@example.com'}); + } + ]); +}); +``` + +For a complicated series of `async` tasks, using the [`auto`](#auto) function makes adding +new tasks much easier (and the code more readable). + + +--------------------------------------- + + +### retry([times = 5], task, [callback]) + +Attempts to get a successful response from `task` no more than `times` times before +returning an error. If the task is successful, the `callback` will be passed the result +of the successful task. If all attempts fail, the callback will be passed the error and +result (if any) of the final attempt. + +__Arguments__ + +* `times` - An integer indicating how many times to attempt the `task` before giving up. Defaults to 5. +* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)` + which must be called when finished, passing `err` (which can be `null`) and the `result` of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions (if nested inside another control flow). +* `callback(err, results)` - An optional callback which is called when the + task has succeeded, or after the final failed attempt. It receives the `err` and `result` arguments of the last attempt at completing the `task`. + +The [`retry`](#retry) function can be used as a stand-alone control flow by passing a +callback, as shown below: + +```js +async.retry(3, apiMethod, function(err, result) { + // do something with the result +}); +``` + +It can also be embeded within other control flow functions to retry individual methods +that are not as reliable, like this: + +```js +async.auto({ + users: api.getUsers.bind(api), + payments: async.retry(3, api.getPayments.bind(api)) +}, function(err, results) { + // do something with the results +}); +``` + + +--------------------------------------- + + +### iterator(tasks) + +Creates an iterator function which calls the next function in the `tasks` array, +returning a continuation to call the next one after that. It's also possible to +“peek” at the next iterator with `iterator.next()`. + +This function is used internally by the `async` module, but can be useful when +you want to manually control the flow of functions in series. + +__Arguments__ + +* `tasks` - An array of functions to run. + +__Example__ + +```js +var iterator = async.iterator([ + function(){ sys.p('one'); }, + function(){ sys.p('two'); }, + function(){ sys.p('three'); } +]); + +node> var iterator2 = iterator(); +'one' +node> var iterator3 = iterator2(); +'two' +node> iterator3(); +'three' +node> var nextfn = iterator2.next(); +node> nextfn(); +'three' +``` + +--------------------------------------- + + +### apply(function, arguments..) + +Creates a continuation function with some arguments already applied. + +Useful as a shorthand when combined with other control flow functions. Any arguments +passed to the returned function are added to the arguments originally passed +to apply. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to automatically apply when the + continuation is called. + +__Example__ + +```js +// using apply + +async.parallel([ + async.apply(fs.writeFile, 'testfile1', 'test1'), + async.apply(fs.writeFile, 'testfile2', 'test2'), +]); + + +// the same process without using apply + +async.parallel([ + function(callback){ + fs.writeFile('testfile1', 'test1', callback); + }, + function(callback){ + fs.writeFile('testfile2', 'test2', callback); + } +]); +``` + +It's possible to pass any number of additional arguments when calling the +continuation: + +```js +node> var fn = async.apply(sys.puts, 'one'); +node> fn('two', 'three'); +one +two +three +``` + +--------------------------------------- + + +### nextTick(callback), setImmediate(callback) + +Calls `callback` on a later loop around the event loop. In Node.js this just +calls `process.nextTick`; in the browser it falls back to `setImmediate(callback)` +if available, otherwise `setTimeout(callback, 0)`, which means other higher priority +events may precede the execution of `callback`. + +This is used internally for browser-compatibility purposes. + +__Arguments__ + +* `callback` - The function to call on a later loop around the event loop. + +__Example__ + +```js +var call_order = []; +async.nextTick(function(){ + call_order.push('two'); + // call_order now equals ['one','two'] +}); +call_order.push('one') +``` + + +### times(n, callback) + +Calls the `callback` function `n` times, and accumulates results in the same manner +you would use with [`map`](#map). + +__Arguments__ + +* `n` - The number of times to run the function. +* `callback` - The function to call `n` times. + +__Example__ + +```js +// Pretend this is some complicated async factory +var createUser = function(id, callback) { + callback(null, { + id: 'user' + id + }) +} +// generate 5 users +async.times(5, function(n, next){ + createUser(n, function(err, user) { + next(err, user) + }) +}, function(err, users) { + // we should now have 5 users +}); +``` + + +### timesSeries(n, callback) + +The same as [`times`](#times), only the iterator is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +The results array will be in the same order as the original. + + +## Utils + + +### memoize(fn, [hasher]) + +Caches the results of an `async` function. When creating a hash to store function +results against, the callback is omitted from the hash and an optional hash +function can be used. + +The cache of results is exposed as the `memo` property of the function returned +by `memoize`. + +__Arguments__ + +* `fn` - The function to proxy and cache results from. +* `hasher` - Tn optional function for generating a custom hash for storing + results. It has all the arguments applied to it apart from the callback, and + must be synchronous. + +__Example__ + +```js +var slow_fn = function (name, callback) { + // do something + callback(null, result); +}; +var fn = async.memoize(slow_fn); + +// fn can now be used as if it were slow_fn +fn('some name', function () { + // callback +}); +``` + + +### unmemoize(fn) + +Undoes a [`memoize`](#memoize)d function, reverting it to the original, unmemoized +form. Handy for testing. + +__Arguments__ + +* `fn` - the memoized function + + +### log(function, arguments) + +Logs the result of an `async` function to the `console`. Only works in Node.js or +in browsers that support `console.log` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.log` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, 'hello ' + name); + }, 1000); +}; +``` +```js +node> async.log(hello, 'world'); +'hello world' +``` + +--------------------------------------- + + +### dir(function, arguments) + +Logs the result of an `async` function to the `console` using `console.dir` to +display the properties of the resulting object. Only works in Node.js or +in browsers that support `console.dir` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.dir` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, {hello: name}); + }, 1000); +}; +``` +```js +node> async.dir(hello, 'world'); +{hello: 'world'} +``` + +--------------------------------------- + + +### noConflict() + +Changes the value of `async` back to its original value, returning a reference to the +`async` object. diff --git a/node_modules/watchpack/node_modules/async/bower.json b/node_modules/watchpack/node_modules/async/bower.json new file mode 100644 index 0000000..1817688 --- /dev/null +++ b/node_modules/watchpack/node_modules/async/bower.json @@ -0,0 +1,38 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "0.9.2", + "main": "lib/async.js", + "keywords": [ + "async", + "callback", + "utility", + "module" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/caolan/async.git" + }, + "devDependencies": { + "nodeunit": ">0.0.0", + "uglify-js": "1.2.x", + "nodelint": ">0.0.0", + "lodash": ">=2.4.1" + }, + "moduleType": [ + "amd", + "globals", + "node" + ], + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ], + "authors": [ + "Caolan McMahon" + ] +} \ No newline at end of file diff --git a/node_modules/watchpack/node_modules/async/component.json b/node_modules/watchpack/node_modules/async/component.json new file mode 100644 index 0000000..5003a7c --- /dev/null +++ b/node_modules/watchpack/node_modules/async/component.json @@ -0,0 +1,16 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "0.9.2", + "keywords": [ + "async", + "callback", + "utility", + "module" + ], + "license": "MIT", + "repository": "caolan/async", + "scripts": [ + "lib/async.js" + ] +} \ No newline at end of file diff --git a/node_modules/watchpack/node_modules/async/lib/async.js b/node_modules/watchpack/node_modules/async/lib/async.js new file mode 100644 index 0000000..394c41c --- /dev/null +++ b/node_modules/watchpack/node_modules/async/lib/async.js @@ -0,0 +1,1123 @@ +/*! + * async + * https://github.com/caolan/async + * + * Copyright 2010-2014 Caolan McMahon + * Released under the MIT license + */ +/*jshint onevar: false, indent:4 */ +/*global setImmediate: false, setTimeout: false, console: false */ +(function () { + + var async = {}; + + // global on the server, window in the browser + var root, previous_async; + + root = this; + if (root != null) { + previous_async = root.async; + } + + async.noConflict = function () { + root.async = previous_async; + return async; + }; + + function only_once(fn) { + var called = false; + return function() { + if (called) throw new Error("Callback was already called."); + called = true; + fn.apply(root, arguments); + } + } + + //// cross-browser compatiblity functions //// + + var _toString = Object.prototype.toString; + + var _isArray = Array.isArray || function (obj) { + return _toString.call(obj) === '[object Array]'; + }; + + var _each = function (arr, iterator) { + for (var i = 0; i < arr.length; i += 1) { + iterator(arr[i], i, arr); + } + }; + + var _map = function (arr, iterator) { + if (arr.map) { + return arr.map(iterator); + } + var results = []; + _each(arr, function (x, i, a) { + results.push(iterator(x, i, a)); + }); + return results; + }; + + var _reduce = function (arr, iterator, memo) { + if (arr.reduce) { + return arr.reduce(iterator, memo); + } + _each(arr, function (x, i, a) { + memo = iterator(memo, x, i, a); + }); + return memo; + }; + + var _keys = function (obj) { + if (Object.keys) { + return Object.keys(obj); + } + var keys = []; + for (var k in obj) { + if (obj.hasOwnProperty(k)) { + keys.push(k); + } + } + return keys; + }; + + //// exported async module functions //// + + //// nextTick implementation with browser-compatible fallback //// + if (typeof process === 'undefined' || !(process.nextTick)) { + if (typeof setImmediate === 'function') { + async.nextTick = function (fn) { + // not a direct alias for IE10 compatibility + setImmediate(fn); + }; + async.setImmediate = async.nextTick; + } + else { + async.nextTick = function (fn) { + setTimeout(fn, 0); + }; + async.setImmediate = async.nextTick; + } + } + else { + async.nextTick = process.nextTick; + if (typeof setImmediate !== 'undefined') { + async.setImmediate = function (fn) { + // not a direct alias for IE10 compatibility + setImmediate(fn); + }; + } + else { + async.setImmediate = async.nextTick; + } + } + + async.each = function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length) { + return callback(); + } + var completed = 0; + _each(arr, function (x) { + iterator(x, only_once(done) ); + }); + function done(err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed >= arr.length) { + callback(); + } + } + } + }; + async.forEach = async.each; + + async.eachSeries = function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length) { + return callback(); + } + var completed = 0; + var iterate = function () { + iterator(arr[completed], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed >= arr.length) { + callback(); + } + else { + iterate(); + } + } + }); + }; + iterate(); + }; + async.forEachSeries = async.eachSeries; + + async.eachLimit = function (arr, limit, iterator, callback) { + var fn = _eachLimit(limit); + fn.apply(null, [arr, iterator, callback]); + }; + async.forEachLimit = async.eachLimit; + + var _eachLimit = function (limit) { + + return function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length || limit <= 0) { + return callback(); + } + var completed = 0; + var started = 0; + var running = 0; + + (function replenish () { + if (completed >= arr.length) { + return callback(); + } + + while (running < limit && started < arr.length) { + started += 1; + running += 1; + iterator(arr[started - 1], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + running -= 1; + if (completed >= arr.length) { + callback(); + } + else { + replenish(); + } + } + }); + } + })(); + }; + }; + + + var doParallel = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.each].concat(args)); + }; + }; + var doParallelLimit = function(limit, fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [_eachLimit(limit)].concat(args)); + }; + }; + var doSeries = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.eachSeries].concat(args)); + }; + }; + + + var _asyncMap = function (eachfn, arr, iterator, callback) { + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + if (!callback) { + eachfn(arr, function (x, callback) { + iterator(x.value, function (err) { + callback(err); + }); + }); + } else { + var results = []; + eachfn(arr, function (x, callback) { + iterator(x.value, function (err, v) { + results[x.index] = v; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + async.map = doParallel(_asyncMap); + async.mapSeries = doSeries(_asyncMap); + async.mapLimit = function (arr, limit, iterator, callback) { + return _mapLimit(limit)(arr, iterator, callback); + }; + + var _mapLimit = function(limit) { + return doParallelLimit(limit, _asyncMap); + }; + + // reduce only has a series version, as doing reduce in parallel won't + // work in many situations. + async.reduce = function (arr, memo, iterator, callback) { + async.eachSeries(arr, function (x, callback) { + iterator(memo, x, function (err, v) { + memo = v; + callback(err); + }); + }, function (err) { + callback(err, memo); + }); + }; + // inject alias + async.inject = async.reduce; + // foldl alias + async.foldl = async.reduce; + + async.reduceRight = function (arr, memo, iterator, callback) { + var reversed = _map(arr, function (x) { + return x; + }).reverse(); + async.reduce(reversed, memo, iterator, callback); + }; + // foldr alias + async.foldr = async.reduceRight; + + var _filter = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.filter = doParallel(_filter); + async.filterSeries = doSeries(_filter); + // select alias + async.select = async.filter; + async.selectSeries = async.filterSeries; + + var _reject = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (!v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.reject = doParallel(_reject); + async.rejectSeries = doSeries(_reject); + + var _detect = function (eachfn, arr, iterator, main_callback) { + eachfn(arr, function (x, callback) { + iterator(x, function (result) { + if (result) { + main_callback(x); + main_callback = function () {}; + } + else { + callback(); + } + }); + }, function (err) { + main_callback(); + }); + }; + async.detect = doParallel(_detect); + async.detectSeries = doSeries(_detect); + + async.some = function (arr, iterator, main_callback) { + async.each(arr, function (x, callback) { + iterator(x, function (v) { + if (v) { + main_callback(true); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(false); + }); + }; + // any alias + async.any = async.some; + + async.every = function (arr, iterator, main_callback) { + async.each(arr, function (x, callback) { + iterator(x, function (v) { + if (!v) { + main_callback(false); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(true); + }); + }; + // all alias + async.all = async.every; + + async.sortBy = function (arr, iterator, callback) { + async.map(arr, function (x, callback) { + iterator(x, function (err, criteria) { + if (err) { + callback(err); + } + else { + callback(null, {value: x, criteria: criteria}); + } + }); + }, function (err, results) { + if (err) { + return callback(err); + } + else { + var fn = function (left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + }; + callback(null, _map(results.sort(fn), function (x) { + return x.value; + })); + } + }); + }; + + async.auto = function (tasks, callback) { + callback = callback || function () {}; + var keys = _keys(tasks); + var remainingTasks = keys.length + if (!remainingTasks) { + return callback(); + } + + var results = {}; + + var listeners = []; + var addListener = function (fn) { + listeners.unshift(fn); + }; + var removeListener = function (fn) { + for (var i = 0; i < listeners.length; i += 1) { + if (listeners[i] === fn) { + listeners.splice(i, 1); + return; + } + } + }; + var taskComplete = function () { + remainingTasks-- + _each(listeners.slice(0), function (fn) { + fn(); + }); + }; + + addListener(function () { + if (!remainingTasks) { + var theCallback = callback; + // prevent final callback from calling itself if it errors + callback = function () {}; + + theCallback(null, results); + } + }); + + _each(keys, function (k) { + var task = _isArray(tasks[k]) ? tasks[k]: [tasks[k]]; + var taskCallback = function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + if (err) { + var safeResults = {}; + _each(_keys(results), function(rkey) { + safeResults[rkey] = results[rkey]; + }); + safeResults[k] = args; + callback(err, safeResults); + // stop subsequent errors hitting callback multiple times + callback = function () {}; + } + else { + results[k] = args; + async.setImmediate(taskComplete); + } + }; + var requires = task.slice(0, Math.abs(task.length - 1)) || []; + var ready = function () { + return _reduce(requires, function (a, x) { + return (a && results.hasOwnProperty(x)); + }, true) && !results.hasOwnProperty(k); + }; + if (ready()) { + task[task.length - 1](taskCallback, results); + } + else { + var listener = function () { + if (ready()) { + removeListener(listener); + task[task.length - 1](taskCallback, results); + } + }; + addListener(listener); + } + }); + }; + + async.retry = function(times, task, callback) { + var DEFAULT_TIMES = 5; + var attempts = []; + // Use defaults if times not passed + if (typeof times === 'function') { + callback = task; + task = times; + times = DEFAULT_TIMES; + } + // Make sure times is a number + times = parseInt(times, 10) || DEFAULT_TIMES; + var wrappedTask = function(wrappedCallback, wrappedResults) { + var retryAttempt = function(task, finalAttempt) { + return function(seriesCallback) { + task(function(err, result){ + seriesCallback(!err || finalAttempt, {err: err, result: result}); + }, wrappedResults); + }; + }; + while (times) { + attempts.push(retryAttempt(task, !(times-=1))); + } + async.series(attempts, function(done, data){ + data = data[data.length - 1]; + (wrappedCallback || callback)(data.err, data.result); + }); + } + // If a callback is passed, run this as a controll flow + return callback ? wrappedTask() : wrappedTask + }; + + async.waterfall = function (tasks, callback) { + callback = callback || function () {}; + if (!_isArray(tasks)) { + var err = new Error('First argument to waterfall must be an array of functions'); + return callback(err); + } + if (!tasks.length) { + return callback(); + } + var wrapIterator = function (iterator) { + return function (err) { + if (err) { + callback.apply(null, arguments); + callback = function () {}; + } + else { + var args = Array.prototype.slice.call(arguments, 1); + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + async.setImmediate(function () { + iterator.apply(null, args); + }); + } + }; + }; + wrapIterator(async.iterator(tasks))(); + }; + + var _parallel = function(eachfn, tasks, callback) { + callback = callback || function () {}; + if (_isArray(tasks)) { + eachfn.map(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + eachfn.each(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.parallel = function (tasks, callback) { + _parallel({ map: async.map, each: async.each }, tasks, callback); + }; + + async.parallelLimit = function(tasks, limit, callback) { + _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback); + }; + + async.series = function (tasks, callback) { + callback = callback || function () {}; + if (_isArray(tasks)) { + async.mapSeries(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + async.eachSeries(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.iterator = function (tasks) { + var makeCallback = function (index) { + var fn = function () { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + }; + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + }; + return makeCallback(0); + }; + + async.apply = function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + return function () { + return fn.apply( + null, args.concat(Array.prototype.slice.call(arguments)) + ); + }; + }; + + var _concat = function (eachfn, arr, fn, callback) { + var r = []; + eachfn(arr, function (x, cb) { + fn(x, function (err, y) { + r = r.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, r); + }); + }; + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + if (test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.whilst(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.doWhilst = function (iterator, test, callback) { + iterator(function (err) { + if (err) { + return callback(err); + } + var args = Array.prototype.slice.call(arguments, 1); + if (test.apply(null, args)) { + async.doWhilst(iterator, test, callback); + } + else { + callback(); + } + }); + }; + + async.until = function (test, iterator, callback) { + if (!test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.until(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.doUntil = function (iterator, test, callback) { + iterator(function (err) { + if (err) { + return callback(err); + } + var args = Array.prototype.slice.call(arguments, 1); + if (!test.apply(null, args)) { + async.doUntil(iterator, test, callback); + } + else { + callback(); + } + }); + }; + + async.queue = function (worker, concurrency) { + if (concurrency === undefined) { + concurrency = 1; + } + function _insert(q, data, pos, callback) { + if (!q.started){ + q.started = true; + } + if (!_isArray(data)) { + data = [data]; + } + if(data.length == 0) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + if (q.drain) { + q.drain(); + } + }); + } + _each(data, function(task) { + var item = { + data: task, + callback: typeof callback === 'function' ? callback : null + }; + + if (pos) { + q.tasks.unshift(item); + } else { + q.tasks.push(item); + } + + if (q.saturated && q.tasks.length === q.concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + var workers = 0; + var q = { + tasks: [], + concurrency: concurrency, + saturated: null, + empty: null, + drain: null, + started: false, + paused: false, + push: function (data, callback) { + _insert(q, data, false, callback); + }, + kill: function () { + q.drain = null; + q.tasks = []; + }, + unshift: function (data, callback) { + _insert(q, data, true, callback); + }, + process: function () { + if (!q.paused && workers < q.concurrency && q.tasks.length) { + var task = q.tasks.shift(); + if (q.empty && q.tasks.length === 0) { + q.empty(); + } + workers += 1; + var next = function () { + workers -= 1; + if (task.callback) { + task.callback.apply(task, arguments); + } + if (q.drain && q.tasks.length + workers === 0) { + q.drain(); + } + q.process(); + }; + var cb = only_once(next); + worker(task.data, cb); + } + }, + length: function () { + return q.tasks.length; + }, + running: function () { + return workers; + }, + idle: function() { + return q.tasks.length + workers === 0; + }, + pause: function () { + if (q.paused === true) { return; } + q.paused = true; + }, + resume: function () { + if (q.paused === false) { return; } + q.paused = false; + // Need to call q.process once per concurrent + // worker to preserve full concurrency after pause + for (var w = 1; w <= q.concurrency; w++) { + async.setImmediate(q.process); + } + } + }; + return q; + }; + + async.priorityQueue = function (worker, concurrency) { + + function _compareTasks(a, b){ + return a.priority - b.priority; + }; + + function _binarySearch(sequence, item, compare) { + var beg = -1, + end = sequence.length - 1; + while (beg < end) { + var mid = beg + ((end - beg + 1) >>> 1); + if (compare(item, sequence[mid]) >= 0) { + beg = mid; + } else { + end = mid - 1; + } + } + return beg; + } + + function _insert(q, data, priority, callback) { + if (!q.started){ + q.started = true; + } + if (!_isArray(data)) { + data = [data]; + } + if(data.length == 0) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + if (q.drain) { + q.drain(); + } + }); + } + _each(data, function(task) { + var item = { + data: task, + priority: priority, + callback: typeof callback === 'function' ? callback : null + }; + + q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item); + + if (q.saturated && q.tasks.length === q.concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + // Start with a normal queue + var q = async.queue(worker, concurrency); + + // Override push to accept second parameter representing priority + q.push = function (data, priority, callback) { + _insert(q, data, priority, callback); + }; + + // Remove unshift function + delete q.unshift; + + return q; + }; + + async.cargo = function (worker, payload) { + var working = false, + tasks = []; + + var cargo = { + tasks: tasks, + payload: payload, + saturated: null, + empty: null, + drain: null, + drained: true, + push: function (data, callback) { + if (!_isArray(data)) { + data = [data]; + } + _each(data, function(task) { + tasks.push({ + data: task, + callback: typeof callback === 'function' ? callback : null + }); + cargo.drained = false; + if (cargo.saturated && tasks.length === payload) { + cargo.saturated(); + } + }); + async.setImmediate(cargo.process); + }, + process: function process() { + if (working) return; + if (tasks.length === 0) { + if(cargo.drain && !cargo.drained) cargo.drain(); + cargo.drained = true; + return; + } + + var ts = typeof payload === 'number' + ? tasks.splice(0, payload) + : tasks.splice(0, tasks.length); + + var ds = _map(ts, function (task) { + return task.data; + }); + + if(cargo.empty) cargo.empty(); + working = true; + worker(ds, function () { + working = false; + + var args = arguments; + _each(ts, function (data) { + if (data.callback) { + data.callback.apply(null, args); + } + }); + + process(); + }); + }, + length: function () { + return tasks.length; + }, + running: function () { + return working; + } + }; + return cargo; + }; + + var _console_fn = function (name) { + return function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + fn.apply(null, args.concat([function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (typeof console !== 'undefined') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _each(args, function (x) { + console[name](x); + }); + } + } + }])); + }; + }; + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + + async.memoize = function (fn, hasher) { + var memo = {}; + var queues = {}; + hasher = hasher || function (x) { + return x; + }; + var memoized = function () { + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + var key = hasher.apply(null, args); + if (key in memo) { + async.nextTick(function () { + callback.apply(null, memo[key]); + }); + } + else if (key in queues) { + queues[key].push(callback); + } + else { + queues[key] = [callback]; + fn.apply(null, args.concat([function () { + memo[key] = arguments; + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i].apply(null, arguments); + } + }])); + } + }; + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + }; + + async.unmemoize = function (fn) { + return function () { + return (fn.unmemoized || fn).apply(null, arguments); + }; + }; + + async.times = function (count, iterator, callback) { + var counter = []; + for (var i = 0; i < count; i++) { + counter.push(i); + } + return async.map(counter, iterator, callback); + }; + + async.timesSeries = function (count, iterator, callback) { + var counter = []; + for (var i = 0; i < count; i++) { + counter.push(i); + } + return async.mapSeries(counter, iterator, callback); + }; + + async.seq = function (/* functions... */) { + var fns = arguments; + return function () { + var that = this; + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + async.reduce(fns, args, function (newargs, fn, cb) { + fn.apply(that, newargs.concat([function () { + var err = arguments[0]; + var nextargs = Array.prototype.slice.call(arguments, 1); + cb(err, nextargs); + }])) + }, + function (err, results) { + callback.apply(that, [err].concat(results)); + }); + }; + }; + + async.compose = function (/* functions... */) { + return async.seq.apply(null, Array.prototype.reverse.call(arguments)); + }; + + var _applyEach = function (eachfn, fns /*args...*/) { + var go = function () { + var that = this; + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + return eachfn(fns, function (fn, cb) { + fn.apply(that, args.concat([cb])); + }, + callback); + }; + if (arguments.length > 2) { + var args = Array.prototype.slice.call(arguments, 2); + return go.apply(this, args); + } + else { + return go; + } + }; + async.applyEach = doParallel(_applyEach); + async.applyEachSeries = doSeries(_applyEach); + + async.forever = function (fn, callback) { + function next(err) { + if (err) { + if (callback) { + return callback(err); + } + throw err; + } + fn(next); + } + next(); + }; + + // Node.js + if (typeof module !== 'undefined' && module.exports) { + module.exports = async; + } + // AMD / RequireJS + else if (typeof define !== 'undefined' && define.amd) { + define([], function () { + return async; + }); + } + // included directly via